code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
## module error_choleski
''' L= choleski(a).
Choleski decomposition: [L][L]transpose = [a].
'''
from numarray import array
from math import sqrt
def choleski(a):
n = len(a)
# Create zero matrix for L
L = [[0.0] * n for i in xrange(n)]
# Perform the Choleski decomposition
for i in xrange(n):
for k in xrange(i+1):
tmp_sum = sum(L[i][j] * L[k][j] for j in xrange(k))
if (i == k): # Diagonal elements
# LaTeX: l_{kk} = \sqrt{ a_{kk} - \sum^{k-1}_{j=1} l^2_{kj}}
L[i][k] = sqrt(a[i][i] - tmp_sum)
else:
# LaTeX: l_{ik} = \frac{1}{l_{kk}} \left( a_{ik} - \sum^{k-1}_{j=1} l_{ij} l_{kj} \right)
L[i][k] = (1.0 / L[k][k] * (a[i][k] - tmp_sum))
return L
| Python |
#===============================================================================
# encoding utf-8
# author :kanchan mahajan
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#===============================================================================
import sys
sys.path.append(r"C:\Python27\Lib\site-packages\mayavi")
from mayavi import mlab
from math import cos,sin,sqrt
'''
Module for plotting the beams
'''
def plot_undeformed_line(x,y,z):
'''
plots *undeformed* line with having sequential x,y,z, values
x: array of global x locations of points
y: array of global y locations of points
z: array of global z locations of points
'''
undeformed_line=mlab.plot3d(x,y,z,opacity=.43)
return undeformed_line
def plot_deformed_line(X,Y,Z,S):
'''
plots *deformed* line with having sequential x,y,z, values
X: array of global x locations of points with added respective deformation
Y: array of global y locations of points with added respective deformation
Z: array of global z locations of points with added respective deformation
S: array of scalar values(total deformation USUM) same size as that of the point data
'''
deformed_line=mlab.plot3d(X,Y,Z,S)
return deformed_line
def plot_deformed_nodes(X,Y,Z):
'''
plots *deformed* points with having sequential x,y,z, values
X: array of global x locations of points with added respective deformation
Y: array of global y locations of points with added respective deformation
Z: array of global z locations of points with added respective deformation
'''
node_size=1./(len(X)*10)
deformed_nodes=mlab.points3d(X,Y,Z,scale_factor=0.15,opacity=.50)
return deformed_nodes
def plot_undeformed_nodes(x,y,z):
'''
plots *undeformed* points with having sequential x,y,z, values
x: array of global x locations of points with added respective deformation
y: array of global y locations of points with added respective deformation
z: array of global z locations of points with added respective deformation
'''
node_size=1./(len(x)*10)
undeformed_nodes=mlab.points3d(x,y,z,scale_factor=0.15,opacity=.10)
return undeformed_nodes
def get_deformed_XYZS(u,defu):
'''
Extract new x,y,z and total deforamtion values from solver output
u: original node x,y,z locations in a sequential form one after another
defu: deformation at each node ux,uy,uz from solver in a sequential form one after another
'''
X=[x+ux for x,ux in zip (u[0::3],defu[0::3])]
Y=[y+uy for y,uy in zip (u[1::3],defu[1::3])]
Z=[z+uz for z,uz in zip (u[2::3],defu[2::3])]
S=[sqrt(xi**2+yi**2+zi**2) for xi,yi,zi in zip(defu[0::3],defu[1::3],defu[2::3])]
return X,Y,Z,S
def get_undeformed_XYZ(u):
'''
Extract new x,y,z and total deforamtion values from solver output
u: original node x,y,z locations in a sequential form one after another
'''
x=u[0::3]
y=u[1::3]
z=u[2::3]
return x,y,z
def prepare_u_from_solver(uFromSolver,uRed):
'''
Inserting the reduced DOF values as 0. so that the array sizing is compatible for plotting
uFromSolver: result coming directly from the solver
uRed: DOFs which are removed in for the solver
'''
for key in sorted(uRed):
uFromSolver.insert(key-1,0)
return uFromSolver
def _remove_rotations(uFromSolver):
'''
Removing rotz from the array from solver and inserting 0 for z value
for plotting inn the cartesian co-ordinate system
uFromSolver: result coming directly from the solver
'''
#tmp_u=[[x+cos(r),y+sin(r),0] for x,y,r in zip(defu[0::3],defu[1::3],defu[2::3])]
tmp_u=[[x,y,0] for x,y,r in zip(uFromSolver[0::3],uFromSolver[1::3],uFromSolver[2::3])]
tmp_u=[a for aa in tmp_u for a in aa]
return tmp_u
def _get_aligned_u(u,udef,elements):
'''
node numbering might not be sequential and has to be adjusted to that of the connections in defined elements
u and udef are reshuffled according to the connections defined for elements
u: original node x,y,z locations in a sequential form one after another
udef: deformation at each node ux,uy,uz from solver in a sequential form one after another
elements: elementconnections from the input data
'''
aligned_nodes=list()
for element in elements:
for nodeNumber in element:
if nodeNumber not in aligned_nodes:
aligned_nodes.append(nodeNumber)
tmp_u=list()
tmp_udef=list()
for node in aligned_nodes:
start=(node-1)*3
end=start+3
tmp_u.append(u[start:end])
tmp_udef.append(udef[start:end])
tmp_u=[a for aa in tmp_u for a in aa]
tmp_udef=[a for aa in tmp_udef for a in aa]
return tmp_u,tmp_udef
@mlab.show
def plot_deformation(u,udef,uRed,elements=list()):
'''
plotting all the plots in one function as a child of undeformed line
u: original node x,y,z locations in a sequential form one after another
udef: deformation at each node ux,uy,uz from solver in a sequential for
uRed: DOFs which are removed in for the solver
elements: elementconnections from the input data
'''
udef=_remove_rotations(udef)
udef=prepare_u_from_solver(udef,uRed)
if elements:
u,udef=_get_aligned_u(u,udef,elements)
x,y,z=get_undeformed_XYZ(u)
undeformed_line=plot_undeformed_line(x,y,z)
mlab.orientation_axes(undeformed_line,xlabel="X",ylabel="Y",zlabel="Z")
undeformed_line.add_child(plot_undeformed_nodes(x,y,z))
X,Y,Z,S=get_deformed_XYZS(u,udef)
temp=plot_deformed_line(X,Y,Z,S)
mlab.colorbar(temp,title='Deformation', orientation='vertical')
undeformed_line.add_child(temp)
undeformed_line.add_child(plot_deformed_nodes(X,Y,Z))
#===============================================================================
# TODO : Check and try to run the animation its not working for now
#===============================================================================
def plot23(u,udef):
x,y,z=get_undeformed_XYZ(u)
undeformed_line=plot_undeformed_line(x,y,z)
mlab.orientation_axes(undeformed_line,xlabel="X",ylabel="Y",zlabel="Z")
undeformed_line.add_child(plot_undeformed_nodes(x,y,z))
X,Y,Z,S=get_deformed_XYZS(u,udef)
temp=plot_deformed_line(X,Y,Z,S)
mlab.colorbar(temp,title='Deformation', orientation='vertical')
undeformed_line.add_child(temp)
undeformed_line.add_child(plot_deformed_nodes(X,Y,Z))
return undeformed_line
@mlab.animate
def anim_deformation(u,udef,frames=10):
for i in range(frames):
factor=float(frames)-i
tmp_udef=[a/factor for a in udef]
plot23(u,tmp_udef)
#mlab.colorbar(undeformed_line,title='Deformation', orientation='vertical')
#yield
def _test23(numberOfNodes=10):
temp_u=zip(range(numberOfNodes),[0]*numberOfNodes,[0]*numberOfNodes)
u=[aa for a in temp_u for aa in a]
temp_udef=zip( [0]*numberOfNodes,[y**2/(numberOfNodes*5.) for y in u[0::3]],[0]*numberOfNodes)
udef=[aa for a in temp_udef for aa in a]
anim_deformation(u,udef)
def _test(numberOfNodes=10):
uRed=dict()
temp_u=zip(range(numberOfNodes),[0]*numberOfNodes,[0]*numberOfNodes)
u=[aa for a in temp_u for aa in a]
temp_udef=zip( [0]*numberOfNodes,[y**2/(numberOfNodes*5.) for y in u[0::3]],[0]*numberOfNodes)
udef=[aa for a in temp_udef for aa in a]
plot_deformation(u,udef,uRed)
if __name__=="__main__":
_test() | Python |
'''
#===============================================================================
# encoding utf-8
# author :michael dettling
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
#
# CAUTION: This code is only for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#===============================================================================
'''
#TESTCOMMENT
import sys
import os
mydirpath=os.path.abspath("..")
sys.path.append(mydirpath)
from Reading_and_Stiffness import objectUtils
#===============================================================================
# import sys
# mydirpath='A:\\ProjectWork_GC\\trunk\\03_FEM_code\\'
# sys.path.append(mydirpath)
# from Reading_and_Stiffness import input_reader
# K,M,R = input_reader._test()
# mat_size = K.get_size()
# import processing_BC
#===============================================================================
#u = processing_BC.u_vector()
#F = processing_BC.force_vector()
#print mat_size
#print R.boundary_conditions #represents which DOF are fixed, e.g. 1, 2, 3 means all 3 Rotations are fixed
#print R.node_sets #reprsents the node sets where i ve BC and Loads
#from smd_10 import stiffness_mat_dict as K
#u = [0, 1, 2, 7, 8]
#print 'K =', K
#print 'u =', u
'''
========================================================================================================
-----------------------------PROGRAM PART FOR THE ROW AND COLUMN REDUCTION------------------------------
ROW REDUCTION IS WORKING!!!!!!!!!!!!!!!!!!!!!!!!!!!!
COLUMN REDUCTION IS WORKING!!!!!!!!!!!!!!!!!!!!!!!!!
-------------------FOR DOCUMENTATION---------------------------------------
The function reduction needs two variables to work. First is the matrix (in the
following named K) which
has to be reduced by the function reduction and the second is the rows/columns which has
to be deleted. This rows is in the fem-python program stored in the displacement
vector. This two variables need to be known to start the program.
In the following the program is described.
In the first for-loop the ieration is done (line 5). The number of iterations is given by
the length of the displacement vector. The fact that only u=0 boundary conditions
are allowed helps in this case. Then a temporary reduced K-matrix will be created (line 6).
The for-loop in line 7 and the for-loop in line 8 are going through the K-matrix
starting from the line that has to be reduced until the last line of the matrix.
The arguments in line 9 and 10 deleting the rows in the K_red matrix with respect to
the number out of the displacement vector. The deletion starts at the number mentioned before
until the last row of the K_red matrix is reached. This means that the K_red matrix contains
at this stage of the program only the upper part of the matrix. The arguments in
line 11 and 12 are renumbering the lines (reducing the number of the row by -1)
and store this lines in the temporary
reduced matrix. Line 14 copies the temporary reduced matrix in the reduced matrix
named K_red. Then the procedure counts up one and starts with the next row (line 7).
At this stage, only the rows have been deleted. That means that we have an
unsymetric matrix. To fix this the program need to reduce the columns as well.
This is done in the second part of the module. Starting in line 15, a column reduced
temporary matrix is introduced named K_red_temp_c. This is an empty dictionary similar to the K_red_temp
in line 6. The module is doing the similar operations what have been described
before to delete the rows. At the end in line 23, the temporary matrix is copied
to the K_red dictionary (K_red.update(K_red_temp_c) to have the reduced matrix named K_red.
The module then returns the K_red dictionary that contains only the non 0 entries
in the following form: K_red = {(i,j): value}
This Matrix is then used in the solvers as stiffness matrix or mass matrix.
========================================================================================================
'''
def reduction(Matrix, disp_vector):
matrix_size=Matrix.get_size()[0]
K_red=objectUtils.myDict()
K_red.update(Matrix)
for iter in range(0,len(disp_vector),1):
K_red_temp={}
for i in range(disp_vector[-iter-1],matrix_size+1,1):
for k in range(1,matrix_size+1,1):
if (disp_vector[-iter-1],k) in K_red:
del K_red[disp_vector[-iter-1],k]
if (i,k) in K_red:
K_red_temp.update({(i-1,k):K_red[i,k]})
del K_red[i,k]
K_red.update(K_red_temp)
K_red_temp_c={}
for k in range(disp_vector[-iter-1],matrix_size+1,1):
for i in range(1,matrix_size+1,1):
if (i,disp_vector[-iter-1]) in K_red:
del K_red[i,disp_vector[-iter-1]]
if (i,k) in K_red:
K_red_temp_c.update({(i,k-1):K_red[i,k]})
del K_red[i,k]
K_red.update(K_red_temp_c)
return K_red
'''def mprint(mat_size, mat_print):
for i in range(0,mat_size+1,1):
for k in range(0,mat_size+1,1):
#print i,k
if (i,k) not in mat_print:
mat_print.update({(i,k):"_"})
for x in range(0,mat_size+1,1):
for y in range(0,mat_size+1,1):
if y == mat_size:
print mat_print[x,y]
else:
print mat_print[x,y],
'''
def _test():
import os,sys
sys.path.append(os.path.abspath('..'))
from Reading_and_Stiffness import input_reader
K,M,R = input_reader._test()
from BC_and_Reduction import processing_BC
print 'K =', K
print 'K_array ='
input_reader.print_stiff_array(K.ToArray())
uRed=processing_BC.u_vector(R.boundary_conditions,R.node_sets)
reducedStiffnesMatrix=reduction(K,uRed)
print 'MatrixSize ',reducedStiffnesMatrix.get_size()
print 'K_red array ='
input_reader.print_stiff_array(reducedStiffnesMatrix.ToArray())
if __name__=="__main__":
_test() | Python |
stiffness_mat_dict={
(0, 0): 0,
(0, 1): 0,
(1, 0): 1,
(1, 1): 1,
(1, 2): 1,
(2, 1): 2,
(2, 2): 2,
(2, 3): 2,
(3, 2): 3,
(3, 3): 3,
(3, 4): 3,
(4, 3): 4,
(4, 4): 4,
(4, 5): 4,
(5, 4): 5,
(5, 5): 5,
(5, 6): 5,
(6, 5): 6,
(6, 6): 6,
(6, 7): 6,
(7, 6): 7,
(7, 7): 7,
(7, 8): 7,
(8, 7): 8,
(8, 8): 8,
(8, 9): 8,
(9, 8): 9,
(9, 9): 9,
} | Python |
'''
===============================================================================
# encoding utf-8
# author :michael dettling
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
# This part processes the BC
#
# CAUTION: This code is only for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
===============================================================================
'''
#===============================================================================
# import sys
#mydirpath='A:\\ProjectWork_GC\\trunk\\03_FEM_code\\'
#sys.path.append(mydirpath)
#===============================================================================
#===============================================================================
# from Reading_and_Stiffness import input_reader
# K,M,R = input_reader._test()
# mat_size = K.get_size()
#===============================================================================
#print mat_size[0]
#print R.boundary_conditions
#print R.node_sets['Set-load']
#print 'R.loads Value =', R.loads['Set-load']['Value']
#print 'R.loads DOF =', R.loads
#print 'Value =', R.boundary_conditions['Set-load']
#print mat_size
'''
-------------------------------------------------------------------------------
Displacement vector u is created in the following part
Following text for documentation:
---------------------------------
a checks the Set Name of the Boundary conditions
b checks the Node Set, which has the same name as the applied bc and includes
the node numbers
Next part is for all the nodes that are fixed the i=1 BC to write in the u-vector
That goes through all nodes
then i counts one more and the second and after that the third DOF will be written
into the u-vector
-------------------------------------------------------------------------------
'''
def u_vector(boundary_conditions,node_sets):
u=[]
for a in boundary_conditions:
for b in node_sets:
if b == a:
for i in boundary_conditions[a]['DOFs']:
for k in range(0,len(node_sets[a]),1):
BC_pos=((node_sets[a][k]-1)*3)+i
u.append(BC_pos)
u.sort()
return u
'''
-------------------------------------------------------------------------------
Force vector F is created in the following part
Following text for documentation:
---------------------------------
a checks for the set name in the LOADS BC and b checks for the representing set
in the node_sets. If these two values are identical then the writing of the F-vector
starts.
k goes through the nodes sets of a, so the F-vector writing will start with the
first node of the node_sets values
All the if codes are checking, which force is applied, e.g. if the value 1 is given
an x-force is applied and die F-vector will get the value of the x-force at the
node position.
-------------------------------------------------------------------------------
'''
def force_vector(loads,node_sets):
F={}
for a in loads:
for b in node_sets:
if a == b:
for k in range(0,len(node_sets[a]),1):
for x in loads[a]:
if 1 == x:
F_pos=(node_sets[a][k]-1)*3+1
F_val=loads[a][x]
NDF={(F_pos):F_val}
F.update(NDF)
if 2 == x:
F_pos=((node_sets[a][k]-1)*3)+1+1
F_val=loads[a][x]
NDF={(F_pos):F_val}
F.update(NDF)
if 3 == x:
F_pos=((node_sets[a][k]-1)*3)+2+1
F_val=loads[a][x]
NDF={(F_pos):F_val}
F.update(NDF)
if 4 == x:
F_pos=((node_sets[a][k]-1)*3)+3+1
F_val=loads[a][x]
NDF={(F_pos):F_val}
F.update(NDF)
if 5 == x:
F_pos=((node_sets[a][k]-1)*3)+4+1
F_val=loads[a][x]
NDF={(F_pos):F_val}
F.update(NDF)
if 6 == x:
F_pos=((node_sets[a][k]-1)*3)+5+1
F_val=loads[a][x]
NDF={(F_pos):F_val}
F.update(NDF)
return F
def get_reduced_force_vector(full_force_vector,boundary_conditions,node_sets):
temp_variable=list()
f_reduced=dict()
temp_variable+=full_force_vector
for name,val in boundary_conditions.iteritems():
nodes=node_sets.get(name,list())
for node in nodes:
DOFs=val['DOFs']
for DOF in DOFs:
DOF_index=(node-1)*3+DOF
del temp_variable[DOF_index]
for entry in temp_variable:
if entry:
f_reduced[temp_variable.index(entry)]=entry
return f_reduced
# print 'F =', F
#===============================================================================
# for the testing this can be used where the total module is executed
#===============================================================================
def _test():
import os,sys
sys.path.append(os.path.abspath('..'))
from Reading_and_Stiffness import input_reader
K,M,R = input_reader._test()
uRed=u_vector(R.boundary_conditions,R.node_sets)
print 'uRed =', uRed
fRed=force_vector(R.loads,R.node_sets)
print 'fRed =', fRed
if __name__ == '__main__':
_test() | Python |
#===============================================================================
# encoding utf-8
# author :kanchan mahajan
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#===============================================================================
'''
This module implements the assembly of the mass matrix as well as stiffness matrix
assembly is performed with the help of the elemental mass and stiffness matrices respectively
Elements module provides the basic Elemental stiffness matrices depending on the name of Element
in ABAQUS programm currently implemented elements can be seen in ##$$Elements.py module
Assembly is a class for creating object which can share some specific data about the
elements as well as input data which was read by ##$$input_reader.py module
arguments necessary for the object creation
input_data : object created by ##$$reader class from ##$$input_reader.py
for now the assembly object is focused on the geometry of beam/bar which has specific (circular) cross section
mentioned in the input data and has some length from the calculated from the dependent node location of each element.
These two variables then can give other necessary properties like volume nd mass of the element
this mass has been used for assembling the Mass matrix
The Elemental matrices are based on specific variables defined in function create_element
in the object which can be used to extend the assembly afterwards as well and then adding those to the
assembled matrices
assemble_mat function return the stiffness matrix as well as mass matrix respectively
arguments necessary for assemble_mat
for assembly element type has to be passed to the assemble_mat function and in turn which is passed to
create_element matrix which created that specific element matrices depending on the type of the element
and getting appropriate properties from ##$$Element_mapping
'''
from Elements import Element_mapping
import re
from pprint import pprint
import objectUtils
from timeit import timeit
from errors import guarded_call
import math
class Assembly:
'''
Assembly is a object which is responsible for creating the assembly matrices
for stiffness as well for mass at the moment
the objects are based on the dict format defined in the objectUtils module
Usage:
input_data is an object created by the input_reader.reader() class::
OBJ=Assembly(input_data)
stiffnessMatrix,massMatrix=OBJ.assemble_mat()
both of these objects will have all the methods defined in the myDict object from objectUtils module
'''
def __init__(self, input_data):
'''
@argument
input_data : input object from input reader
'''
self.data = input_data
#@guarded_call
def assemble_mat(self, elementType):
'''
returns assembled mass and stiffness matrices in co-ordinate form
@arguments:
elementType : element type is a string for particular element name
'''
k_mat = None
m_mat=None
elements = self.data.elements
elementType = self.data.elementType
for element in elements:
elemental_stiffness,elemental_mass=self.create_element(element, elementType)
if not k_mat:
k_mat = elemental_stiffness
else:
k_mat = k_mat + elemental_stiffness
if not m_mat:
m_mat = elemental_mass
else:
m_mat = m_mat + elemental_mass
return k_mat,m_mat
#@guarded_call
def create_element(self, element, elementType):
'''
Returns elemental stiffness and mass matrix
general elmental stiffness and mass(in terms of E,I,l,m) matrix is defined in the Elements.py file
and the by substituting the values for the variables the matrices are composed
'''
element_standard_props = Element_mapping[elementType]
N_dof = element_standard_props["NodalDOFs"]
elementalMatrix = element_standard_props["stiffnessMatrix"]
elementalMassMatrix = element_standard_props["massMatrix"]
element_mat_Object = objectUtils.element_mat()
element_mass_matrix_Object=objectUtils.element_mat()
l, A, E, mu, rho, m,I = self.get_element_properties(element)
#print A,I
for first_node in element:
for second_node in element:
for d_i in range(1, N_dof + 1):
for d_j in range(1, N_dof + 1):
#print first_node,second_node,d_i,d_j
s_i = (first_node - 1) * N_dof + d_i
s_j = (second_node - 1) * N_dof + d_j
e_i = element.index(first_node) * N_dof + d_i
e_j = element.index(second_node) * N_dof + d_j
if (e_i,e_j) in elementalMatrix:
if (s_i, s_j) not in element_mat_Object:
element_mat_Object[(s_i, s_j)] = eval(str(elementalMatrix[(e_i, e_j)]))
else:
element_mat_Object[(s_i, s_j)] = element_mat_Object[(s_i, s_j)] + eval(str(elementalMatrix[(e_i, e_j)]))
if (e_i,e_j) in elementalMassMatrix:
if (s_i, s_j) not in element_mass_matrix_Object:
element_mass_matrix_Object[(s_i, s_j)] = eval(str(elementalMassMatrix[(e_i, e_j)]))
else:
element_mass_matrix_Object[(s_i, s_j)] = element_mass_matrix_Object[(s_i, s_j)] + eval(str(elementalMassMatrix[(e_i, e_j)]))
return element_mat_Object,element_mass_matrix_Object
#@guarded_call
def get_length(self, element):
'''
returns length of the bar/beam element
@arguments
element : element should be a list of nodes for specific element
'''
nodes = self.data.nodes
return math.sqrt(sum([x ** 2 for x in [x1 - x2 for x1, x2 in zip(nodes[element[0] - 1], nodes[element[1] - 1])]]))
#@guarded_call
def get_element_properties(self, element):
'''
returns length,area,Elastic modulus,poissons ratio,density,mass,moment of inertia
of specific element in the above order
@arguments
element : element should be a list of nodes for specific element
'''
l, area, E, mu, rho, m,I=0.,0.,0.,0.,0.,0.,0.
l = self.get_length(element)
section = self.get_section(element)
material = self.get_material(section)
I=self.get_moment(section)
area = self.get_area(section)
E = material["EMOD"]
mu = material["POISSON"]
rho = material["density"]
m = area * l * rho
return l, area, E, mu, rho, m,I
#@guarded_call
def get_area(self, section):
'''
returns area of the circular cross section, given that its defined in the input file
@arguments
section : a dictionary which should contain a key radius with a defined value
This is an internal function
'''
radius = section["radius"]
return math.pi * (radius ** 2)
#@guarded_call
def get_moment(self, section):
'''
returns moment of ineria of the circular cross section, given that its defined in the input file
@arguments
section : a dictionary which should contain a key radius with a defined value
This is an internal function
'''
radius = section["radius"]
return math.pi * (radius ** 4)/4.
#@guarded_call
def get_material(self, section):
'''
returns a material dictionary containing information about material used for specific section
@arguments
section : a dictionary which should contain a key radius with a defined value
This is an internal function
'''
if section:
materialName = section.get("material")
return self.data.materials[materialName]
#@guarded_call
def get_section(self, element):
'''
returns a section dictionary with all neccessary information read from the
input file
@arguments
element : element should be a list of nodes for specific element
This is an internal function
'''
ele_number_map = self.get_element_mapping()
ele_index = self.data.elements.index(element)
ele_index = ele_number_map[ele_index]
eleSetName=None
self.eliminate_double_Entries()
for name, element_numbers in self.data.elementSets.iteritems():
if ele_index in element_numbers:
eleSetName=name
if eleSetName:
for name, section in self.data.sections.iteritems():
if eleSetName == section["elset"].strip():
return section
return dict()
#@guarded_call
def get_element_mapping(self):
'''
returns a mapping of the element numbers to actual element numbers
this function is a helper function
since in many cases the element numbering may not start from 1 and might start from any number
input reader is not saving any inforamtion on the element numbering hence its necessary to
map the list of elements stored in the input_data object to the actual element numbering in the
input file for the purpose of assigning the section properties for the elements
This is an internal function
'''
if not self.data.elementSets:
return dict()
min_element_number=None
max_element_number=None
for name, element_numbers in self.data.elementSets.iteritems():
if min_element_number==None:
min_element_number = min(element_numbers)
elif min_element_number > min(element_numbers):
min_element_number = min(element_numbers)
if max_element_number==None:
max_element_number=max(element_numbers)
elif max_element_number < max(element_numbers):
max_element_number = max(element_numbers)
tmp1 = range(min_element_number, max_element_number+1)
tmp2 = map(lambda x:x - min_element_number, tmp1)
ele_number_map = dict()
for entry in tmp2 :
ele_number_map[entry] = tmp1[entry]
return ele_number_map
#@guarded_call
def eliminate_double_Entries(self):
'''
This function eliminate the setdouble entries in element sets
if there are double element sets with same elements in them then
it might be ambiguous to decide which one is to be used for further calculations
'''
temp_elementSets=dict()
for name, element_numbers in self.data.elementSets.iteritems():
if element_numbers not in temp_elementSets.values():
temp_elementSets[name]= element_numbers
self.data.elementSets=temp_elementSets
| Python |
class myDict(dict):
'''
dictionary representation of the matrix
this class is only for general matrix while for symmetric matrix another
subclass can be derived from this
V 1
'''
def __init__(self, **kwargs):
for arg in kwargs:
self.__setitem__(arg, kwargs.get(arg))
pass
#---------------- Overriding Methods ------------------
def __setitem__(self, key, val):
if self._check_key(key):
super(myDict, self).__setitem__(key, val)
def __add__(self, arg):
result = myDict()
if self._check_compatibility(arg):
for key in self.keys():
result[key] = self[key]
for key in arg.keys():
if key in result: result[key] += arg[key]
else:
result[key] = arg[key]
return result
def __sub__(self, arg):
result = myDict()
if self._check_compatibility(arg):
for key in self.keys():
result[key] = self[key]
for key in arg.keys():
if key in result: result[key] -= arg[key]
else:
result[key] = -arg[key]
return result
#---------------- Callable Methods ------------------
def check_symmetry(self):
'''
checks if the matrix is symmetric or not
returns bool accordingly
'''
for key in self.keys():
reverse_key = (key[1], key[0])
if reverse_key in self.keys():
if self.get(key) != self.get(reverse_key):
return False
else:
# log the warning that the matrix does not have the entries and has to be considered as symmetric one
pass
return True
def fill_symmetry_elements(self):
for key in self.keys():
reverse_key = (key[1], key[0])
if reverse_key not in self.keys():
self.__setitem__(reverse_key, self[key])
def delete_item(self, key):
'''
deleting the specific item and modifying all the indices accordingly
@args:
key : tuple (<row_number>,<column_number>)
key must be present in dict
return bool value if successful
'''
if key not in self.keys():
return False
row_index = key[0]
column_index = key[1]
self._delete_row(row_index)
self._delete_column(column_index)
return True
def get_size(self):
'''
gets the size of the matrix
returns a tuple like (row,column)
'''
number_of_rows = max([x[0] for x in self.keys()])
number_of_columns = max([x[1] for x in self.keys()])
return number_of_rows, number_of_columns
def ToArray(self):
'''
returns 2D Array to represent the current object with 0 filled as in sparse matrices
'''
row, column = self.get_size()
matrix_2d_array = list()
for i in range(1, row + 1):
matrix_2d_array.append(list())
for j in range(1, column + 1):
if (i , j) in self.keys():
matrix_2d_array[-1].append(self[(i, j)])
else:
matrix_2d_array[-1].append(0.)
return matrix_2d_array
#---------------- Private Methods ------------------
def _check_key(self, key):
'''
checks key for type,length and integer tuple
'''
if not isinstance(key, tuple):
return False
elif len(key) != 2:
return False
elif not isinstance(key[0], int) or not isinstance (key[1], int) :
return False
else:
return True
def _check_compatibility(self, arg):
'''
checks the type and size of objects
'''
if not isinstance(arg, myDict) :
# raise not supported
return False
elif self.get_size() != arg.get_size():
# raise not equal size
return False
else:
return True
def _delete_row(self, row):
'''
deletes row (internal method)
@args :
row : int -- number of the row to be deleted
'''
self._internal_delete(row, "row")
def _delete_column(self, column):
'''
deletes column (internal method)
@args :
column : int -- number of the column to be deleted
'''
self._internal_delete(column, "column")
def _internal_delete(self, index, flag):
'''
should not be used externally in any case
'''
if flag == "row":j = 0
elif flag == "column":j = 1
else:
raise NotImplementedError
if not isinstance(index, int):
return False
else:
tmp = myDict()
for key in self.keys():
if key[j] > index:
if j == 0 :modified_key = (key[0] - 1, key[1])
if j == 1 :modified_key = (key[0] , key[1] - 1)
tmp[modified_key] = self[key]
else:
tmp[key] = self[key]
del self[key]
# elif key[0] == index:
# del self[key]
self.update(tmp)
class element_mat(myDict):
'''
myDict objects are supposed to be always start from 1 and and with the highest number of
row or columns while in case of the element that might not be the case as the start of the element can
start and end at any point and hence get_size function is modified
assumption that all the elements in elemental matrix are non zero !!
'''
def get_size(self):
rows = set([x[0] for x in self.keys()])
columns = set([x[1] for x in self.keys()])
number_of_rows = len(rows)
number_of_columns = len(columns)
return number_of_rows, number_of_columns
def create_element(nodes):
element_mat_Object = element_mat()
for first_node in nodes:
for second_node in nodes:
value = None
if first_node == second_node:value = 1
if first_node - second_node in [1, -1]:value = -1
if value:
element_mat_Object[(first_node, second_node)] = value
return element_mat_Object
| Python |
#===============================================================================
# encoding utf-8
# author :kanchan mahajan
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#===============================================================================
'''
This module defines allowed/defined element types for the program
general structure of the element definition is explained as below::
{"NumberOfNodes":
<number of nodes that element has>,
"stiffnessMatrix":
<stiffness matrix(co-ordiante form) in terms of Elasticity modulus as E, Area of element as A,length as l,moment of inertia as I>,
"massMatrix":
<mass matrix(co-ordiante form) in terms as m is mass of the totla element> ,
"NodalDOFs":
<number of DOFs each node has>,
"cornerNodeIndices":
<a list of the indices as where corner nodes are located in input file for specific element(list starts with 1)>,
"midNodeIndices":
<a list of the indices as where mid side nodes are located in input file for specific element(list starts with 1)>,
"DOF_mapping":
<Mapping of the dofs to the actual numbering that should be used in the program>,
}
for further more understanding please refer to the module
'''
Element_mapping={
"C3D20":
{"NumberOfNodes":20,
"stiffnessMatrix":{},
"NodalDOFs":3,
},
"B31":
{"NumberOfNodes":2,
"stiffnessMatrix":{(1,1):11,(1,2):12,(1,3):13,(1,4):14,
(2,1):21,(2,2):22,(2,3):23,(2,4):24,
(3,1):31,(3,2):32,(3,3):33,(3,4):34,
(4,1):41,(4,2):42,(4,3):43,(4,4):44,},
"NodalDOFs":2,
"cornerNodeIndices":[1,2],
"midNodeIndices":[3],
},
#===================================================================
# file generated from abaqus gives element B23 for beam element
# whie made from FEModeler gives B31 element
# what is the difference ??
#===================================================================
"B23":
{"NumberOfNodes":2,
"stiffnessMatrix":{(1,1):"E*A/l",(1,2):"0.",(1,3):"0.",(1,4):"-E*A/l",(1,5):"0.",(1,6):"0.",
(2,1):"0.",(2,2):"12*E*I/l**3",(2,3):"6*E*I/l**2",(2,4):"0.",(2,5):"-12*E*I/l**3",(2,6):"6*E*I/l**2",
(3,1):"0.",(3,2):"6*E*I/l**2",(3,3):"4*E*I/l",(3,4):"0.",(3,5):"-6*E*I/l**2",(3,6):"2*E*I/l",
(4,1):"-E*A/l",(4,2):"0.",(4,3):"0.",(4,4):"E*A/l",(4,5):"0.",(4,6):"0.",
(5,1):"0.",(5,2):"-12*E*I/l**3",(5,3):"-6*E*I/l**2",(5,4):"0.",(5,5):"12*E*I/l**3",(5,6):"-6*E*I/l**2",
(6,1):"0.",(6,2):"6*E*I/l**2",(6,3):"2*E*I/l",(6,4):"0.",(6,5):"-6*E*I/l**2",(6,6):"4*E*I/l"},
#"massMatrix":{(1,1):"m/4",(2,2):"m/4",(3,3):"I/2",(4,4):"m/4",(5,5):"m/4",(6,6):"I/2"},
"massMatrix":{(1,1):"rho*A*l/3",(1,4):"70*rho*A*l/420",
(2,2):"156*rho*A*l/420",(2,3):"22*l*rho*A*l/420",(2,5):"54*rho*A*l/420",(2,6):"-13*l*rho*A*l/420",
(3,2):"rho*A*l*l/19",(3,3):"rho*A*l*l*l/105",(3,5):"13*l*rho*A*l/420",(3,6):"-3*l*l*rho*A*l/420",
(4,1):"70*rho*A*l/420",(4,4):"rho*A*l/3",
(5,2):"54*rho*A*l/420",(5,3):"13*l*rho*A*l/420",(5,5):"156*rho*A*l/420",(5,6):"-22*l*rho*A*l/420",
(6,2):"-13*l*rho*A*l/420",(6,3):"-3*l*l*rho*A*l/420",(6,5):"-22*l*rho*A*l/420",(6,6):"4*l*l*rho*A*l/420"},
"NodalDOFs":3,
"cornerNodeIndices":[1,2],
"midNodeIndices":[3],
"DOF_mapping":{1:1,2:2,6:3}
},
"B21":
{"NumberOfNodes":2,
"stiffnessMatrix":{(1,1):"E*A/l",(1,4):"-E*A/l",
(2,2):"12*E*I/l**3",(2,3):"6*E*I/l**2",(2,5):"-12*E*I/l**3",(2,6):"6*E*I/l**2",
(3,2):"6*E*I/l**2",(3,3):"4*E*I/l",(3,5):"-6*E*I/l**2",(3,6):"2*E*I/l",
(4,1):"-E*A/l",(4,4):"E*A/l",
(5,2):"-12*E*I/l**3",(5,3):"-6*E*I/l**2",(5,5):"12*E*I/l**3",(5,6):"-6*E*I/l**2",
(6,2):"6*E*I/l**2",(6,3):"2*E*I/l",(6,5):"-6*E*I/l**2",(6,6):"4*E*I/l"},
"massMatrix":{(1,1):"m/4",(2,2):"m/4",(3,3):"I/2",(4,4):"m/4",(5,5):"m/4",(6,6):"I/2"},
"NodalDOFs":3,
"cornerNodeIndices":[1,2],
"midNodeIndices":[3],
"DOF_mapping":{1:1,2:2,6:3}
},
} | Python |
#===============================================================================
# encoding utf-8
# author :kanchan mahajan
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#===============================================================================
#===============================================================================
# Debug flag for printiing the input and output of the guarded call functions
#===============================================================================
_DEBUG=True
#===============================================================================
# PrintIO flag is for the printitng of the input and output variables to the functions
#===============================================================================
_PrintIO=False
def error_string(*args):
'''
Initiation of the error exception
@param args:the argguments can contain anything but be careful while passing the
args since for now they can be converted to only the internal representation
'''
str_msg=""
for arg in args:
try :
str_msg+=str(arg)
except:
str_msg =+ "cannot convert argument to string"
return str_msg
def guarded_call(func,*args,**kwargs):
'''
This function is implemented to facilitate the development process as well as
to reduce the errors
this is the standard procedure to route the function calls through a single function which
can centralize the process and developer has more control over the errors
'''
def calling_funct(*args,**kwargs):
global _DEBUG
global _PrintIO
try:
if _PrintIO:
print "Invoking {0} wih arguments {1} and {2}".format(func.func_name,args,kwargs)
tmp=func(*args,**kwargs)
if _PrintIO:
print "{0} return value is {1}".format(func.func_name,tmp)
return tmp
except Exception as e:
print error_string(e)
if _DEBUG:
import pdb
pdb.post_mortem()
return calling_funct
| Python |
#===============================================================================
# encoding utf-8
# author :kanchan mahajan
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#===============================================================================
_AllowedInputType=["inp"]
_AllowedMadeFrom=["abaqus","Ansys"]
_AllowedIntendedFor=["abaqus"]
def myError(*args):
str_msg=""
for arg in args:
try :
str_msg+=(str(arg)+"\t")
except:
str_msg+=(arg.ToString()+"\t")
return str_msg
class ReadingTokens:
'''
Tokens are basically used for dividing the input data in blocks
ReadingToken([fileType,[MadeFrom,[IntendedFor]]])
fileType : is the extension of the file
MadeFrom : is the name of the software from which the file is created
IntendedFor : is the name of the software for which the file is intended to be used
Available tokens are
defines start and end of the part (not used in the current code)::
self.PartStartToken,self.PartEndToken
defines the start and end of the node definition block::
self.NodeStartToken,self.NodeEndToken
defines the start and end of the element definition block::
self.ElementStartToken,self.ElementEndToken
defines the start and end of the material properties definition block::
self.MaterialStartToken,self.MaterialEndToken
defines the start and end of the displacement Boundary conditions definition block::
self.BCStartToken,self.BCEndToken
defines the start and end of load block which defines the loads for the structure::
self.LoadStartToken,self.LoadEndToken
'''
def __init__(self,fileType="inp",MadeFrom="abaqus",IntendedFor="abaqus"):
if fileType not in _AllowedInputType:
print (fileType,"is not accepted by the code at the moment")
else:
self.fileType=fileType
if MadeFrom not in _AllowedMadeFrom:
print ("File from ",MadeFrom,"is not yet supported")
else:
self.MadeFrom=MadeFrom
if IntendedFor not in _AllowedIntendedFor:
print (IntendedFor,"input files are yet to be supported")
else:
self.IntendedFor=IntendedFor
self._defineReadTokens()
def _defineReadTokens(self):
'''
_defineReadTokens()
is an internal function of the class and it defines tokens depending on the input file type and madefrom argument passed at the
object initiation phase
'''
if self.fileType=="inp" and self.MadeFrom=="abaqus":
#self.readTokens=["*Part","*Node","*Element, type","*END Part"]
self.PartStartToken,self.PartEndToken=("*Part","*End Part")
self.NodeStartToken,self.NodeEndToken=("*Node", "*Element, type")
self.ElementStartToken,self.ElementEndToken=("*Element, type", "*End Part")
self.MaterialStartToken,self.MaterialEndToken=("*Material", "**")
self.BCStartToken,self.BCEndToken=("** BOUNDARY", "** --")
self.LoadStartToken,self.LoadEndToken=("** LOADS", "** OUTPUT")
elif self.fileType=="inp" and self.MadeFrom=="Ansys":
self.PartStartToken,self.PartEndToken=(None,None)
self.NodeStartToken,self.NodeEndToken=("*NODE", "**")
self.ElementStartToken,self.ElementEndToken=("*ELEMENT", "**")
self.MaterialStartToken,self.MaterialEndToken=("*ELEMENT", "**")
#self.readTokens=["*NODE", "**", "*ELEMENT", "**"]
else:
pass
| Python |
#===============================================================================
# encoding utf-8
# author :kanchan mahajan
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#===============================================================================
class myDict(dict):
'''
dictionary representation of the matrix
this class is only for general matrix while for symmetric matrix another
subclass can be derived from this
V 1
'''
def __init__(self, **kwargs):
for arg in kwargs:
self.__setitem__(arg, kwargs.get(arg))
pass
#---------------- Overriding Methods ------------------
def __setitem__(self, key, val):
if self._check_key(key):
super(myDict, self).__setitem__(key, val)
def __add__(self, arg):
result = myDict()
if self._check_compatibility(arg):
for key in self.keys():
result[key] = self[key]
for key in arg.keys():
if key in result: result[key] += arg[key]
else:
result[key] = arg[key]
return result
def __sub__(self, arg):
result = myDict()
if self._check_compatibility(arg):
for key in self.keys():
result[key] = self[key]
for key in arg.keys():
if key in result: result[key] -= arg[key]
else:
result[key] = -arg[key]
return result
#---------------- Callable Methods ------------------
def check_symmetry(self):
'''
checks if the matrix is symmetric or not
returns bool accordingly
'''
for key in self.keys():
reverse_key = (key[1], key[0])
if reverse_key in self.keys():
if self.get(key) != self.get(reverse_key):
return False
else:
# log the warning that the matrix does not have the entries and has to be considered as symmetric one
pass
return True
def fill_symmetry_elements(self):
for key in self.keys():
reverse_key = (key[1], key[0])
if reverse_key not in self.keys():
self.__setitem__(reverse_key, self[key])
def get_size(self):
'''
gets the size of the matrix
returns a tuple like (row,column)
'''
number_of_rows = max([x[0] for x in self.keys()])
number_of_columns = max([x[1] for x in self.keys()])
return number_of_rows, number_of_columns
def ToArray(self):
'''
returns 2D Array to represent the current object with 0 filled as in sparse matrices
'''
row, column = self.get_size()
matrix_2d_array = list()
for i in range(1, row + 1):
matrix_2d_array.append(list())
for j in range(1, column + 1):
if (i , j) in self.keys():
matrix_2d_array[-1].append(self[(i, j)])
else:
matrix_2d_array[-1].append(0.)
return matrix_2d_array
#---------------- Private Methods ------------------
def _check_key(self, key):
'''
checks key for type,length and integer tuple
'''
if not isinstance(key, tuple):
return False
elif len(key) != 2:
return False
elif not isinstance(key[0], int) or not isinstance (key[1], int) :
return False
else:
return True
def _check_compatibility(self, arg):
'''
checks the type and size of objects
'''
if not isinstance(arg, myDict) :
# raise not supported
return False
#=======================================================================
# elif self.get_size() != arg.get_size():
# raise not equal size
# return False
#=======================================================================
else:
return True
def _delete_row(self, row):
'''
deletes row (internal method)
@args :
row : int -- number of the row to be deleted
'''
self._internal_delete(row, "row")
def _delete_column(self, column):
'''
deletes column (internal method)
@args :
column : int -- number of the column to be deleted
'''
self._internal_delete(column, "column")
def _internal_delete(self, index, flag):
'''
should not be used externally in any case
'''
if flag == "row":j = 0
elif flag == "column":j = 1
else:
raise NotImplementedError
if not isinstance(index, int):
return False
else:
tmp = myDict()
for key in self.keys():
if key[j] > index:
if j == 0 :modified_key = (key[0] - 1, key[1])
if j == 1 :modified_key = (key[0] , key[1] - 1)
tmp[modified_key] = self[key]
else:
tmp[key] = self[key]
del self[key]
# elif key[0] == index:
# del self[key]
self.update(tmp)
class element_mat(myDict):
'''
myDict objects are supposed to be always start from 1 and and with the highest number of
row or columns while in case of the element that might not be the case as the start of the element can
start and end at any point and hence get_size function is modified
assumption that all the elements in elemental matrix are non zero !!
'''
def get_size(self):
rows = set([x[0] for x in self.keys()])
columns = set([x[1] for x in self.keys()])
number_of_rows = len(rows)
number_of_columns = len(columns)
return number_of_rows, number_of_columns
| Python |
#===============================================================================
# encoding utf-8
# author :kanchan mahajan
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Elelement Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#===============================================================================
'''
This module is for parsing the ABAQUS input file and then creating an appropriate
variables for the further calculations
'''
import os
from global_constants import ReadingTokens
from Elements import Element_mapping
from errors import guarded_call
from assembly import Assembly
class reader():
'''
reader is a class which implements the reading mechanism for the input file
creating an object of reader class is fairly easy task
given that the input file fits the given requirements
@params:
necessary arguments
filepath : file path of the file to be read
Optional arguments:
MadeFrom : Input file is prepared(written) from software (default value : "abaqus")
fileType : extension of the file (default value : "inp")
IntendedFor : Input file is Intended(written) for software (default value : "abaqus")
also it will create two empty lists for the node and elements
'''
def __init__(self, filePath=None, fileType="inp",MadeFrom="abaqus",IntendedFor="abaqus"):
self.tokens=ReadingTokens(fileType, MadeFrom, IntendedFor)
self.data = None
self.nodes = []
self.elements = []
self.elementSets = dict()
self.node_sets=dict()
self.sections=dict()
self.materials=dict()
self.boundary_conditions=dict()
self.loads=dict()
self.filePath = filePath
self.elementType = None
#=======================================================================
# currently reading the file inside init itself but can be removed at later steps based on the requirements
#=======================================================================
self._read()
def _read(self):
'''
this is an internal function which checks the availibility of the input file and if file not available then produces an error
and then it just reads the file in string format
eventually it calls another internal function known as _sort for sequentially sorting the read data
'''
f = open(self.filePath, 'r')
try:
self.data = f.readlines()
except:
raise IOError("Cannot read from the specified file")
finally:
f.close()
self.data = map(remove_endline, self.data)
self._sort_data()
def _sort_data(self):
'''
This is also an internal function which will sort the read data sequentially
depending on the tokens created in the object initiation
these tokens are ABAQUS input file specific
'''
nodeStart = _get_index(self.data,self.tokens.NodeStartToken)
nodeEnd = _get_index(self.data,self.tokens.NodeEndToken, start=nodeStart + 1)
self._process_nodeBlock(self.data[nodeStart + 1:nodeEnd])
elementStart = _get_index(self.data,self.tokens.ElementStartToken)
elementEnd = _get_index(self.data,self.tokens.ElementEndToken, start=elementStart + 1)
self._process_elementBlock(self.data[elementStart + 1:elementEnd], self.data[elementStart])
materialStart=_get_index(self.data,self.tokens.MaterialStartToken)
materialEnd = _get_index(self.data,self.tokens.MaterialEndToken, start=materialStart + 1)
self._process_material_block(self.data[materialStart :materialEnd])
self._process_nodal_set()
BCStart=_get_index(self.data,self.tokens.BCStartToken)
BCEnd = _get_index(self.data,self.tokens.BCEndToken, start=BCStart + 2)
self._process_boundary_conditions(self.data[BCStart :BCEnd])
LoadStart=_get_index(self.data,self.tokens.LoadStartToken)
LoadEnd = _get_index(self.data,self.tokens.LoadEndToken, start=LoadStart + 3)
self._process_LoadBlock(self.data[LoadStart :LoadEnd])
self.apply_DOF_mapping(self.boundary_conditions)
def _process_elementBlock(self, element_list, element_line):
'''
This is an internal function
this function separates and convert the elements block from the data read in previously
after that it processes the element set definitions which are used afterwards to assign the
section as well as the material properties
also it processes section definitions and save those in python dict format
Example::
elements=[[1, 3], [3, 2]]
elementSets={'_PICKEDSET2': [22, 42]}
sections={' Profile-1':
{'elset': '_PICKEDSET2',
'radius': 2.0,
'temperature': 'GRADIENTS',
'section': 'CIRC',
'material': 'STRUCTURALSTEEL',
'orientation': [0.0, 0.0, -1.0]}
}
'''
self.elementType = element_line.split("=")[-1]
cornerNodes= Element_mapping[self.elementType]["cornerNodeIndices"]
self.elementalStiffnessMatrix= Element_mapping[self.elementType]["stiffnessMatrix"]
element_enumerator = enumerate(element_list)
for entry in element_enumerator:
if "ELEMENT" in entry[-1]:
continue
if "*Elset" in entry[-1]:
tmp=entry[-1].split(",")
elementSetName = tmp[1].split("=")[-1]
elementNumbers=element_enumerator.next()[-1]
elementNumbers=[int(x) for x in elementNumbers.split(",")]
self.elementSets[elementSetName]=range(elementNumbers[0],elementNumbers[1]+1)
#===================================================================
# *Beam Section, elset=_PickedSet2, material=STRUCTURALSTEEL, temperature=GRADIENTS, section=CIRC
# 2.
# 0.,0.,-1.
#===================================================================
elif "** Section" in entry[-1]:
tmp=entry[-1].split(",")
sectionName = tmp[0].split(":")[-1]
sectionDetails=element_enumerator.next()[-1]
sectionDetails=sectionDetails.split(",")
sectionDetails=[x.strip() for x in sectionDetails]
sectionDetails=dict([x.split("=") for x in sectionDetails[1:]])
sectionRadius=float(element_enumerator.next()[-1])
sectionOrientation=[float(x) for x in element_enumerator.next()[-1].split(",")]
sectionDetails.update({"radius":sectionRadius,"orientation":sectionOrientation})
self.sections[sectionName]=sectionDetails
else:
if entry[-1].endswith(','):
try:
tmp = entry[-1] + element_enumerator.next()[-1]
except StopIteration:
continue
else:
tmp = entry[-1]
try:
tmp = tmp.split(',')
self.elements.append([int(x) for x in tmp if tmp.index(x) in cornerNodes])
except:
raise ValueError(str(tmp) + " cannot be converted to int")
def _process_nodeBlock(self, node_list):
'''
This is an internal function
it converts the node positions in a list and the index of each sublist is the index of the node
Example::
nodes=[
[0.0, 0.0, 0.0],
[100.0, 0.0, 0.0],
[50.0, 0.0, 0.0]
]
'''
for line in node_list:
try:
self.nodes.append([float(x) for x in line.split(',')[1:] ])
if len(self.nodes[-1])==2:
self.nodes[-1].append(0.)
except:
raise ValueError(line + " cannot be converted to float")
def _process_material_block(self,material_block):
'''
This is an internal function
Material block definition should be limited to only Elasticity modulus, Poisson ratio and density
and the material block should not contain any empty lines
also the sequence of the definitions should be Elastic property first and then Density
if otherwise the reading mechanism will fail and there wont be any further processing
Example::
materials={'STRUCTURALSTEEL':
{
'behaviour': 'Elastic',
'EMOD': 210000.0,
'POISSON': 0.3,
'density': 7.85e-06
}
}
'''
if len(material_block)==5:
material_name=material_block[0].split(",")
material_name=material_name[-1].split("=")[-1]
self.materials[material_name]=dict()
material_behaviour=material_block[1].replace("*","")
self.materials[material_name]["behaviour"]=material_behaviour
material_props=material_block[2].split(",")
self.materials[material_name]["EMOD"]=float(material_props[0])
self.materials[material_name]["POISSON"]=float(material_props[1])
self.materials[material_name]["density"]=float(material_block[4])
else:
print len(material_block)
raise NotImplementedError("Material Not supported")
#===========================================================================
# *Nset, nset=Set-fix, instance=PART-1-1
# 1,
# *Nset, nset=Set-load, instance=PART-1-1
# 2,
#===========================================================================
def _process_nodal_set(self):
'''
This is an internal function
It processes nodal sets in the input file which are used for applying the boundary conditions afterwards
Example::
nodeSets={'SET-FIX': [1],
'SET-LOAD': [2]}
'''
nset_index=_get_index(self.data,"*Nset",return_all=True)
self.node_sets=dict()
for index in nset_index:
nodal_set=self.data[index]
nodal_set_name=nodal_set.split(",")[1].split("=")[-1]
nodes=self.data[index+1]
self.node_sets[nodal_set_name]=list()
for node_number in nodes.split(","):
try:
self.node_sets[nodal_set_name].append(int(node_number))
except:
continue
#===========================================================================
# ** BOUNDARY CONDITIONS
# **
# ** Name: BC-fix Type: Displacement/Rotation
# *Boundary
# Set-fix, 1, 1
# Set-fix, 2, 2
# Set-fix, 6, 6
# ** ----------------------------------------------------------------
# **
#===========================================================================
def _process_boundary_conditions(self,BC_block):
'''
This is an internal function
It processes the displacement type of boundary conditions and stores them
according to the nodal sets to which they are assigned and afterward interpreted accordingly
Example::
boundary_conditions={'SET-FIX':
{
'DOFs': [1, 2, 3],
'Type': ' Displacement/Rotation'
}
}
'''
starts=_get_index(BC_block,"** Name",return_all=True)
for start in starts:
end=_get_index(BC_block,"** Name",start=start+2)
if end==None:
end=_get_index(BC_block,"** --",start=start+1)
bc=BC_block[start:end]
bc_name_type=bc[0].split("Name:")[-1]
bc_name_type=bc_name_type.split("Type:")
bc_type=bc_name_type[-1]
if bc[1]=="*Boundary":
for line in bc[2:end]:
try:
nset_dofs=line.split(",")
nset=nset_dofs[0]
if nset not in self.boundary_conditions:
self.boundary_conditions[nset]=dict()
self.boundary_conditions[nset]["DOFs"]=list()
self.boundary_conditions[nset]["DOFs"]+=range(int(nset_dofs[1]),int(nset_dofs[2])+1)
self.boundary_conditions[nset]["Type"]=bc_type
except Exception as e:
print "{0} occurred in boundary conditions block with {1}".format(e,nset_dofs)
continue
#===========================================================================
# ** LOADS
# **
# ** Name: Load-1 Type: Concentrated force
# *Cload
# Set-load, 2, -1000.
# **
#===========================================================================
def _process_LoadBlock(self,LoadBlock):
'''
This is an internal function
It processes the displacement type of boundary conditions and stores them according to the nodal sets
to which they are assigned and afterward interpreted accordingly
Example::
loads={'SET-LOAD': {2: -100.0, 'Type': ' Concentrated force'}}
'''
DOF_mapping=Element_mapping[self.elementType]["DOF_mapping"]
starts=_get_index(LoadBlock,"** Name",return_all=True)
for start in starts:
end=_get_index(LoadBlock,"** Name",start=start+2)
if end==None:
end=_get_index(LoadBlock,"**",start=start+1)
load=LoadBlock[start:end]
load_name_type=load[0].split("Name:")[-1]
load_name_type=load_name_type.split("Type:")
load_type=load_name_type[-1]
if "load" in load[1]:
for line in load[2:end]:
try:
nset_dofs=line.split(",")
nset=nset_dofs[0]
key=DOF_mapping.get(int(nset_dofs[1]))
if nset not in self.loads:
self.loads[nset]=dict()
if "Type" not in self.loads[nset]:
self.loads[nset]["Type"]=""
if key not in self.loads[nset]:
self.loads[nset][key]=0.
self.loads[nset][key]+=float(nset_dofs[2])
self.loads[nset]["Type"]=load_type
except Exception as e:
print "{0} occurred in boundary material block".format(e)
continue
def apply_DOF_mapping(self,dict_object):
'''
This function is for converting the DOF read from the input file to the desired DOF mapping
as defined in the Elements.py module
'''
dof_mapping=Element_mapping[self.elementType]["DOF_mapping"]
for key,value in dict_object.iteritems():
old_Dofs =value["DOFs"]
dict_object[key]["DOFs"]=sorted([x for x in map(dof_mapping.get,old_Dofs) if x])
def remove_endline(line):
'''
Removing the endline character and whitespaces in the read data
'''
return line.replace('\n', '').replace('\t', '').replace('\r', '')
def _get_index(data,word,start=0,return_all=False):
tmp=[data.index(a,start) for a in data[start:] if a.startswith(word)]
if not return_all and tmp:
return tmp[0]
elif return_all and tmp:
return tmp
else:
return None
@guarded_call
def _test():
filePath=os.path.join("..","Reading_and_Stiffness","input_files","2d_beam_10DOF-1_a.inp")
r = reader (filePath,MadeFrom="abaqus")
assembly=Assembly(r)
stiff_mat,mass_mat=assembly.assemble_mat(r.elementType)
return stiff_mat,mass_mat,r
def print_stiff_array(stiff_array):
for entry in stiff_array:
print entry
if __name__=="__main__":
_test()
| Python |
'''
# -*- coding: utf-8 -*-
#==============================================================================
# # author :Pisarn Pasutanon
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Element Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#==============================================================================
L= choleski(a).
Incomplete Choleski decomposition: [L][L]transpose = [a].
based on Chunlei Xu module
'''
from math import sqrt
import numpy as np
from numpy import linalg
import Toarray
def choleski(a):
n = len(a)
L = ([[0.0] * n for i in xrange(n)])
L = np.array(L)
for i in xrange(n):
for k in xrange(i+1):
tmp_sum = sum(L[i][j] * L[k][j] for j in xrange(k))
if (i == k): # Diagonal elements
L[i][k] = sqrt(a[i][i] - tmp_sum)
else:
L[i][k] = (1.0 / L[k][k] * (a[i][k] - tmp_sum))
return L
#==============================================================================
#Incomplete Cholesky decomposition
#a = stiffness matrix
#n = size of matrix
#L = Lower Triangular matrix
#==============================================================================
def in_choleski(a):#incomplete Cholesky decomposition
n = len(a)
L = ([[0.0] * n for i in xrange(n)])
L = np.array(L, dtype=float)
for i in xrange(n):
for k in xrange(i+1):
tmp_sum = sum(L[i][j] * L[k][j] for j in xrange(k))
if (i == k):
L[i][k] = sqrt(a[i][i] - tmp_sum)
elif (a[i][k] != 0.):
L[i][k] = (1.0 / L[k][k] * (a[i][k] - tmp_sum))
else:
L[i][k] = 0.
return L
| Python |
'''
# -*- coding: utf-8 -*-
#==============================================================================
# # author :Pisarn Pasutanon
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Element Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#==============================================================================
'''
import sys
mydirpath='D:\\pop\\Project-Work-ACM\\googlecode\\trunk\\03_FEM_code\\'
sys.path.append(mydirpath)
#from Reading_and_Stiffness import input_reader
'''
#==============================================================================
# import reduced stiffness matrix
#==============================================================================
'''
#Kn,M = input_reader._test()
#print Kn
#force = {66:-1000}
#import numpy as np
#a=np.array(Kn.ToArray())
#aa = copy.deepcopy(a)#original matrix
class directsolve_LU():
def __init__(self, Kn, force):
self.K = Kn
self.U = dict()
self.L = dict()
self.force = force
self.row = [x[0] for x in self.K.keys()]
self.column = [x[1] for x in self.K.keys()]
self.size = max(self.row)
def LUdecomp_np(self, a):#looping the converted array from dict
self.a = a
n= len(self.a)
for j in range (0,n-1):
for i in range (j+1,n):
if self.a[i,j] != 0.0:
l = self.a[i,j]/self.a[j,j]
self.a[i,j+1:n] = self.a[i,j+1:n] - l*self.a[j,j+1:n]
self.a[i,j] = l
return self.a
def LUdecomp_dict(self): # looping the dictionary
for j in range (1,self.size):
for i in range (j+1,self.size+1):
if self.K.has_key((i,j)):
if self.K.get((i,j),0.0) != 0.0:
l = self.K.get((i,j),0.)/self.K.get((j,j),1.)
for ii in range (j+1,self.size+1):
self.K[(i,ii)] = self.K.get((i,ii),0.) - l*self.K.get((j,ii),0.)
self.K[(i,j)] = l
else:
continue
#print self.K
return self.K
def Upper_tri_dict(self):
keys = sorted([i for i in self.K.keys() if i[0]<i[1] or i[0]==i[1]])
for j in keys:
self.U.update({j:self.K.get(j)})
return self.U
def Lower_tri_dict(self):
keys = sorted([i for i in self.K.keys() if i[0]>i[1] or i[0]==i[1]])
for j in keys:
if j[0] == j[1]:
self.L.update({j:1.})
else:
self.L.update({j:self.K.get(j)})
return self.L
#==============================================================================
#Forward and backward substitution
# Ly = force ---> get y , then using to solve Ux=y ---> x
#==============================================================================
def forward_dict(self):
self.y = dict()
for i in range (1,self.size+1):
if i in self.row:
tmp = 0.
for j in range (1, i):
if j in self.column:
#print (i,j)
if j == i:
tmp += self.K.get((i,j),1.)* self.y.get((j),0.)
else:
tmp += self.K.get((i,j),0.)* self.y.get((j),0.)
self.y[i] = (self.force.get((i),0.) - tmp)/1
else:
continue
else:
continue
def backward_dict(self):
self.x = dict()
for i in range (self.size, 0, -1):
if i in self.row:
tmp = 0.
for j in range (i, self.size+1):
if j in self.column:
#print (i,j)
tmp += self.K.get((i,j),1.)*self.x.get((j),0.)
#print tmp
self.x[i] = (self.y.get((i),0.) - tmp)/self.K.get((i,i),1.)
#print self.x[i]
else:
continue
else:
continue
def solve(self):
#LUdecomp_np()
self.LUdecomp_dict()
#self.Upper_tri_dict()#use to check
#self.Lower_tri_dict()#use to check
self.forward_dict()
self.backward_dict()
#self.x is the displacement in the dictionary format
def _test():
import sys
import os
mydirpath=os.path.abspath("..")
sys.path.append(mydirpath)
from BC_and_Reduction import processing_BC
from Reading_and_Stiffness import input_reader
reduced_stiffness,mass_matrix,input_data = input_reader._test()
#===========================================================================
# reduced_stiffness={(1, 1): 1.004528000000000E+011,
# (4, 1): -5.022640000000001E+010,
# (1, 4): -5.022640000000001E+010,
# (2, 2): 4.815513600000000E+008,
# (5, 2): -2.407756800000000E+008,
# (2, 5): -2.407756800000000E+008,
# (6, 2): 6.019392000000000E+009,
# (2, 6): 6.019392000000000E+009,
# (3, 3): 4.012928000000000E+011,
# (5, 3): -6.019392000000000E+009,
# (3, 5): -6.019392000000000E+009,
# (6, 3): 1.003232000000000E+011,
# (3, 6): 1.003232000000000E+011,
# (4, 4): 5.022640000000001E+010,
# (5, 5): 2.407756800000000E+008,
# (6, 5): -6.019392000000000E+009,
# (5, 6): -6.019392000000000E+009,
# (6, 6): 2.006464000000000E+011}
#===========================================================================
#print stiff_matrix
#print input_data.loads
F = processing_BC.force_vector(input_data.loads,input_data.node_sets)
solve_object=directsolve_LU(reduced_stiffness,F)
solve_object.solve()
print solve_object.x
if __name__=="__main__":
_test()
| Python |
# -*- coding: utf-8 -*-
"""
Created on Sat Mar 01 08:01:05 2014
@author: Pisarn
"""
def ToArray(dic):
#=======================================================================
# returns 2D Array to represent the current object with 0 filled as in sparse matrices
# CAUTION : if exploit_symmetry_* method has been used then this willyield wrong result
# if so then please use fill_symmetry_elements method before using this method
#=======================================================================
#row, column = self.get_size()
row = max([x[0] for x in dic.keys()])
column = max([x[1] for x in dic.keys()])
matrix_2d_array = list()
for i in range(1, row+1 ):
matrix_2d_array.append(list())
for j in range(1, column+1 ):
if (i , j) in dic.keys():
matrix_2d_array[-1].append(dic[(i, j)])
else:
matrix_2d_array[-1].append(0.)
return matrix_2d_array | Python |
'''
# -*- coding: utf-8 -*-
#==============================================================================
# # author :Pisarn Pasutanon
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Element Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#==============================================================================
'''
import direct_solve
import numpy as np
#==============================================================================
# Example
#==============================================================================
Kn = {(1,1):2., (1,2):-2., (1,5):-1.,
(2,2):3., (2,3):-2.,
(3,3):5., (3,4) :-3.,
(4,4):10., (4,5):4.,
(5,5):10.}
force = {2:1}#Force-vector
fill_symmetry_elements()
test = direct_solve.directsolve_LU(Kn, force)
test.solve()
#Decomp_K=np.array(ToArray(test.Kn))
#Upper = np.array(ToArray(test.U))
#Lower = np.array(ToArray(test.L))
def ToArray(dict):
#=======================================================================
# returns 2D Array to represent the current object with 0 filled as in sparse matrices
# CAUTION : if exploit_symmetry_* method has been used then this willyield wrong result
# if so then please use fill_symmetry_elements method before using this method
#=======================================================================
#row, column = self.get_size()
row = max([x[0] for x in dict.keys()])
column = max([x[1] for x in dict.keys()])
matrix_2d_array = list()
for i in range(1, row+1 ):
matrix_2d_array.append(list())
for j in range(1, column+1 ):
if (i , j) in dict.keys():
matrix_2d_array[-1].append(dict[(i, j)])
else:
matrix_2d_array[-1].append(0.)
return matrix_2d_array
def fill_symmetry_elements():
for key in Kn.keys():
reverse_key = (key[1], key[0])
if reverse_key not in Kn.keys():
Kn.__setitem__(reverse_key, Kn[key]) | Python |
'''
# -*- coding: utf-8 -*-
#==============================================================================
# # author :Pisarn Pasutanon
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Element Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#==============================================================================
'''
import numpy as np
from math import sqrt
'''
#==============================================================================
# Example
#==============================================================================
data = [float(i) for i in range (0,13)]
col = [0,1,4,1,2,4,1,3,4,5,3,4,5]#column
ia = [0,1,3,6,10,12,13]#pointer+1
x = [0,1,2,3,4,5]
#Ax = y
'''
class iterative():
def __init__(self, stiffness, force, tol=1.0e-9):
self.tol = tol
self.stiffness = stiffness
self.force = force
#self.size = self.stiffness.get_size()[0] # if use dict
self.size = max([i[0] for i in self.stiffness.keys()])
self.data = [0.]#nonzero elements
self.col = [0]#column index
self.ia = [0]#pointer for row
self.x = np.zeros(self.size+1, dtype = float).reshape(self.size+1,1)
self.b = np.zeros(self.size+1, dtype = float).reshape(self.size+1,1)
self.i=0
def solve(self):
self.vector()
self.csr()
self.conjGrad()
#==============================================================================
# Convert force vector dict into numpy array
#==============================================================================
def vector(self):
for i,j in sorted(self.force.iteritems()):
self.b.put(i,j)
#==============================================================================
# Convert to stiffness matrix into csr format
#==============================================================================
def csr(self):
tmp = [0]
for (i,j) in sorted(self.stiffness.iteritems()):
self.data.append(j)
self.col.append(i[1])
tmp.append(i[0])
for i in range(1,len(tmp)):
print i
if tmp[i] > tmp[i-1]:
self.ia.append(i)
self.ia.append(i+1)
#==============================================================================
# Matrix multiplication
#==============================================================================
def multi(self, vector):
self.y = [0.]
n = len(self.ia)-1
for i in range (1,n):
start = self.ia[i]
stop = self.ia[i+1]
tmp = 0
for k in range (start,stop):
print i,k
tmp = tmp + self.data[k]*vector[self.col[k]]
print tmp
self.y.append(tmp)
#print y
return np.array(self.y).reshape(n,1)
#==============================================================================
# Conjugate gradient method
# res = residual
# p = search direction
# alpha = step length
# beta = improvement
#==============================================================================
def conjGrad(self):
n = len(self.b)-1
max_it = n*2
res = self.b-self.multi(self.x)#matrix multiplication (A*x)
p = res.copy()
for i in range(1,n+1):
u = self.multi(p)
alpha = np.dot(p.T,res)/np.dot(p.T,u)
al = float(alpha)
self.x += (al*p)
res = self.b - self.multi(self.x)
if(sqrt(np.dot(res.T,res)[0][0])) < self.tol or i == max_it:#termination criterias
break
else:
beta = -np.dot(res.T,u)/np.dot(p.T,u)
be = float(beta)
p = res+be*p
self.i=i
'''
def _test():
import sys
import os
mydirpath=os.path.abspath("..")
sys.path.append(mydirpath)
from BC_and_Reduction import processing_BC
from Reading_and_Stiffness import input_reader
reduced_stiffness,mass_matrix,input_data = input_reader._test()
print reduced_stiffness
print input_data.loads
F = processing_BC.force_vector(input_data.loads,input_data.node_sets)
solve_object=iterative(reduced_stiffness,F,tol=1.0e-6)
solve_object.solve()
print solve_object.x
if __name__=="__main__":
_test()
'''
| Python |
'''
# -*- coding: utf-8 -*-
#==============================================================================
# # author :Pisarn Pasutanon
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Element Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#==============================================================================
'''
import numpy as np
from math import sqrt
from scipy import linalg
import In_choleski as cho
class iterative_num():#using numpy array
def __init__(self, stiffness, force, tol):
self.tol = tol
self.stiffness = stiffness
self.L_mat = cho.in_choleski(self.stiffness)#perform incomplete Cholesky
self.L_tran = np.transpose(self.L_mat)
self.size = len(self.stiffness)
self.b = np.array (force,dtype=float).reshape(self.size,1)
self.x = np.zeros(self.size, dtype = float).reshape(self.size,1)
self.i=0
def solve(self):
#self.conjGrad() #Conjugate Gradient
self.Pre_conjGrad() #Conjugate Gradient with precondition
#==============================================================================
# Conjugate gradient method(with numpy)
# res = residual
# b = RHS vector (Force)
# x = Solution vector (displacement)
# p = search direction
# alpha = step length
# beta = improvement
#==============================================================================
def conjGrad(self):
n = len(self.b)-1
max_it = n*2
res = np.dot(self.stiffness,self.x) - self.b
if (np.dot(res.T,res) != 0.):
p = res
for i in range(1,max_it+1):
u = np.dot(self.stiffness,p)
alpha = - np.dot(res.T,res)/np.dot(p.T,u)
al = float(alpha)
self.x += (al*p)
res_o = res
res = res_o + al*np.dot(self.stiffness,p)
if(sqrt(np.dot(res.T,res)[0][0])) < self.tol or i == max_it:
break
else:
beta = np.dot(res.T,res)/np.dot(res_o.T,res_o)
be = float(beta)
p = res+be*p
return self.x
#==============================================================================
# Precondition Conjugate gradient method
# res = residual
# b = RHS vector (force)
# x = Solution vector (displacement)
# L = Lower Triangular matrix from incomplete Cholesky decomposition
# r = modified residual with preconditioner
# p = search direction
# alpha = step length
# beta = improvement
#==============================================================================
def Pre_conjGrad(self):
n = len(self.b)-1
max_it = n*2
L_inv = Inverse(self.L_mat)
L_inv_tran = np.transpose(L_inv)
res = np.dot(self.stiffness,self.x) - self.b
if (np.dot(res.T,res) != 0.):
r = np.dot(L_inv,res)
p = np.dot(L_inv_tran,r)
for i in range(1,max_it+1):
u = np.dot(self.stiffness,p)
alpha = - np.dot(np.transpose(r),r)/np.dot(np.transpose(p),u)
al = float(alpha)
self.x += (al*p)
r_o = r
r = r_o + al*np.dot(L_inv,np.dot(self.stiffness,p))
if(sqrt(np.dot(np.transpose(r),r)[0][0])) < self.tol or i == max_it:
break
else:
beta = np.dot(np.transpose(r),r)/np.dot(np.transpose(r_o),r_o)
be = float(beta)
p_o = p
p = np.dot(L_inv_tran,r)+be*p_o
self.i=i
def _test():
import sys
import os
mydirpath=os.path.abspath("..")
sys.path.append(mydirpath)
from BC_and_Reduction import processing_BC
from Reading_and_Stiffness import input_reader
reduced_stiffness,mass_matrix,input_data = input_reader._test()
print reduced_stiffness
print input_data.loads
F = processing_BC.force_vector(input_data.loads,input_data.node_sets)
solve_object=iterative_num(reduced_stiffness,F,tol=1.0e-9)
solve_object.solve()
print solve_object.x
if __name__=="__main__":
_test()
#==============================================================================
# ## module Inverse_L from Chunlei Xu
#==============================================================================
def Inverse_L(L):
def solveLb(L, b):
n = len(L)
if len(b) != n:
raise ValueError, "incompatible dimensions"
x = [0.0] * n
for i in range(n):
S = b[i]
for j in range(i):
S-= L[i][j]*x[j]
x[i] = S/L[i][i]
return x
n = len(L)
b = [0.0] *n
invL = [[0.0] * n for i in range(n)]
for i in range(n):
b[i] = 1.0
x = solveLb(L, b)
for j in range(n):
invL[j][i] = x[j]
b[i] = 0.0
return invL
#==============================================================================
# ## module Inverse_L
#==============================================================================
def Inverse(L):
return linalg.inv(L) | Python |
'''
# -*- coding: utf-8 -*-
#==============================================================================
# # author :Pisarn Pasutanon
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Element Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#==============================================================================
'''
import numpy as np
import copy
import sys
mydirpath='D:\\pop\\Project-Work-ACM\\googlecode\\trunk\\03_FEM_code\\'
sys.path.append(mydirpath)
#==============================================================================
# Direct solver for LDLT
#==============================================================================
class directsolve_LDL():
'direct solver for symmetry matrix'
def __init__(self, K, force):
self.K = K
self.force = force
self.row = [x[0] for x in self.K.keys()]
self.column = [x[1] for x in self.K.keys()]
self.size = max(self.row)
self.m = list()
self.Kn = dict()
self.temp_g = dict()
self.diagonal = dict()
self.solve
def m_vect(self):
self.g = list()
self.column = list()
self.m_dict = dict()
for j in range (1, self.size+1):
for i in range (1, self.size+1):
if (i,j) in sorted(self.K):
if self.K[(i,j)] != 0:
self.g.append(i)
self.column.append(j)
self.m_dict.update({(i,j):self.K[(i,j)]})
break
self.m += self.g
#==============================================================================
# Fill zero entry to the members within skyline
#==============================================================================
def fill(self):
for j in range (1, self.size+1):
for i in range (self.m[j-1], j+1):
if (i,j) in self.K.keys() and i<j or i==j:
self.Kn.update({(i,j):self.K[(i,j)]})
elif (i,j) in self.K.keys() and i>j:
self.Kn.__delitem__((i,j))
else:
self.Kn.update({(i,j):0.})
#==============================================================================
# Diagonal dictionary
#==============================================================================
def diagonal_dict(self):
for key, value in self.Kn.iteritems():
if key[0] == key[1]:
self.diagonal.update({key: value})
#==============================================================================
#Perform LDLT factorisation
#tmp = temperary values
#temp_g = intermidiate quantities (dictionary)
#==============================================================================
def LDLcomp(self):
for j in range (2, self.size+1):
for i in range (self.m[j-1], j):
tmp= 0.
if (i,j) in self.Kn.keys():
if i == self.m[j-1]:
self.temp_g[(i,j)] = self.Kn[(i,j)]
else:
for k in range (1, i):
if (self.Kn.get((k,i),0) != 0):
tmp += (self.Kn.get((k,i),0.)*self.temp_g.get((k,j),0.))
else:
continue
self.temp_g[(i,j)] = self.Kn.get((i,j),0.) - tmp
self.Kn[(i,j)] = self.temp_g[(i,j)]/self.Kn.get((i,i),1.)
self.Kn[(j,j)] = self.Kn.get((j,j),0.) - self.Kn.get((i,j),0.)*self.temp_g.get((i,j),0.)
#==============================================================================
# Forward substitution:
# Ax = Force ---> (LDLT)x = Force -----> let (DLT)x = y
# Ly = Force ---> solve for y
#==============================================================================
def forward_dict(self):
self.y = dict()
s = min(self.column)
self.y.update({s:self.force.get((1),0.)})
for j in range (1, self.size+1):
if j in self.column:
tmp = 0.
for i in range (1, j):
if i in self.row:
if i == j:
tmp += self.Kn.get((i,j),1.)* self.y.get((i),0.)
else:
tmp += self.Kn.get((i,j),0.)* self.y.get((i),0.)
self.y[j] = (self.force.get((j),0.) - tmp)/1
else:
continue
else:
continue
#==============================================================================
# Backsubstitution:
# (DLT)x = y ----- (LT)x = V2 where V2 is inverse(D)*y
# ----- finally solve for x (displacement)
#==============================================================================
def backsub(self):
self.x = dict()
self.d_inv = dict()
self.displacement = dict() #displacement(intermediate result)
det = 1
for v in self.diagonal.values(): #determinant of diagonal elements
det = det*v
for j in range (1, self.size+1):
temp2 = 1
l = [v for v in self.diagonal.keys() if v != (j,j)]
for i in l:
temp2 = temp2*self.diagonal[i]
self.d_inv.update({(j,j):temp2/det})
for key in self.d_inv.keys():
temp3 = self.d_inv[(key)]*self.y.get((key[0]),0.)
self.x.update({key[0]: temp3})
displacement = self.x[len(self.x)]
self.x.update({self.size:displacement})
for j in range (self.size, 0,-1):
for i in range(self.m[j-1], j):
vv = self.x[(i)]-self.Kn[(i,j)]*displacement
self.x.update({i:vv})
displacement = self.x[i]
def solve(self):
self.m_vect() #search first nonzero element
self.fill() #fill between the first nonzero and diagonal element with zero
self.LDLcomp() #perform LDLT
self.diagonal_dict() #diagonal elements after LDLT
self.forward_dict()
self.backsub()
#self.x is the displacement in the dictionary format
def _test():
import sys
import os
mydirpath=os.path.abspath("..")
sys.path.append(mydirpath)
from BC_and_Reduction import processing_BC
from Reading_and_Stiffness import input_reader
stiff_matrix,mass_matrix,input_data = input_reader._test()
print stiff_matrix
print input_data.loads
F = processing_BC.force_vector(input_data.loads,input_data.node_sets)
solve_object=directsolve_LDL(stiff_matrix,F)
solve_object.solve()
print solve_object.x
if __name__=="__main__":
_test()
| Python |
'''
# -*- coding: utf-8 -*-
#==============================================================================
# # author :Pisarn Pasutanon
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Element Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#==============================================================================
'''
#import sys
#mydirpath='D:\\pop\\Project-Work-ACM\\googlecode\\trunk\\03_FEM_code\\Eigensolver'
#sys.path.append(mydirpath)
import numpy as np
import In_choleski as cho
import Toarray
def Directsolve_cholesky(reduced_stiffness,F):
reduced_stiff = np.array(Toarray.ToArray(reduced_stiffness))
n = len(reduced_stiff)
force = np.array([0]*n,dtype=float).reshape(n,1)
f = [(i,j) for i,j in F.iteritems()]
for i in f:
force[i[0]] = i[1]
T = cho.choleski(reduced_stiff)
print T
T_tran = np.transpose(T)
y = np.array([0]*n,dtype=float).reshape(n,1)
x = np.array([0]*n,dtype=float).reshape(n,1)
#forward
for i in xrange(n):
y[i] = (force[i] - sum(T[i][j] * y[j] for j in xrange(i)))/T[i][i]
#backward
for i in range(n,-1,-1):
tmp = 0.
for j in range (i, n):
tmp += T_tran[i][j] * x[j]
x[i] = (y[i] - tmp)/T_tran[i][i]
print x
return x
def _test():
import sys
import os
mydirpath=os.path.abspath("..")
sys.path.append(mydirpath)
from BC_and_Reduction import processing_BC
from Reading_and_Stiffness import input_reader
reduced_stiffness,mass_matrix,input_data = input_reader._test()
print reduced_stiffness
print input_data.loads
F = processing_BC.force_vector(input_data.loads,input_data.node_sets)
Directsolve_cholesky(reduced_stiffness,F)
#solve_object.solve()
#print x
if __name__=="__main__":
_test() | Python |
'''
# -*- coding: utf-8 -*-
#==============================================================================
# # author :Pisarn Pasutanon
# module : this module is a part of educational project
# and the calculations here are done according to conventional theory
# book of Finite Element Methods
#
# CAUTION: This code is onlly for educational purpose and not for commercial purpose
# DISCLAIMER : Author is not responsible any result pertaining certain analysis
#==============================================================================
'''
import numpy as np
import ConjG_solve as CG
#Example: Kn = stiffness
Kn = {(1,1):2., (1,2):-2., (1,5):-1.,
(2,1):-2., (2,2):3., (2,3):-2.,
(3,2):-2, (3,3):5., (3,4) :-3.,
(4,3):-3, (4,4):10., (4,5):4.,
(5,1):-1, (5,4):4, (5,5):10.}
force = {2:1}#Force-vector
#==============================================================================
# test
#==============================================================================
test = CG.iterative(Kn, force, tol=1.0e-9)
test.solve()
#check
print'solution is [636,619,292,74,34]'
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
################################################################################
#
# qooxdoo - the new era of web development
#
# http://qooxdoo.org
#
# Copyright:
# 2008 - 2012 1&1 Internet AG, Germany, http://www.1und1.de
#
# License:
# LGPL: http://www.gnu.org/licenses/lgpl.html
# EPL: http://www.eclipse.org/org/documents/epl-v10.php
# See the LICENSE file in the project's top-level directory for details.
#
# Authors:
# * Thomas Herchenroeder (thron7)
#
################################################################################
##
# This is a stub proxy for the real generator.py
##
import sys, os, re, subprocess, codecs, optparse
CMD_PYTHON = sys.executable
QOOXDOO_PATH = '../../../../projects/tools/qooxdoo-2.1.1-sdk'
QX_PYLIB = "tool/pylib"
##
# A derived OptionParser class that ignores unknown options (The parent
# class raises in those cases, and stops further processing).
# We need this, as we are only interested in -c/--config on this level, and
# want to ignore pot. other options.
#
class IgnoringUnknownOptionParser(optparse.OptionParser):
##
# <rargs> is the raw argument list. The original _process_args mutates
# rargs, processing options into <values> and copying interspersed args
# into <largs>. This overridden version ignores unknown or ambiguous
# options.
def _process_args(self, largs, rargs, values):
while rargs:
try:
optparse.OptionParser._process_args(self, largs, rargs, values)
except (optparse.BadOptionError, optparse.AmbiguousOptionError):
pass
def parseArgs():
parser = IgnoringUnknownOptionParser(add_help_option=False)
parser.add_option(
"-c", "--config", dest="config", metavar="CFGFILE",
default="config.json", help="path to configuration file"
)
parser.add_option(
"-v", "--verbose", dest="verbose", action="store_true",
default=False, help="run in verbose mode"
)
(options, args) = parser.parse_args(sys.argv[1:])
return options, args
ShellOptions, ShellArgs = parseArgs()
# this is from misc.json, duplicated for decoupling
_eolComment = re.compile(r'(?<![a-zA-Z]:)//.*$', re.M) # double $ for string.Template
_mulComment = re.compile(r'/\*.*?\*/', re.S)
def stripComments(s):
b = _eolComment.sub('',s)
b = _mulComment.sub('',b)
return b
def getQxPath():
path = QOOXDOO_PATH
# OS env takes precedence
if os.environ.has_key("QOOXDOO_PATH"):
path = os.environ["QOOXDOO_PATH"]
# else use QOOXDOO_PATH from config.json
else:
config_file = ShellOptions.config
if os.path.exists(config_file):
# try json parsing with qx json
if not path.startswith('${'): # template macro has been resolved
sys.path.insert(0, os.path.join(path, QX_PYLIB))
try:
from misc import json
got_json = True
except:
got_json = False
got_path = False
if got_json:
config_str = codecs.open(config_file, "r", "utf-8").read()
#config_str = stripComments(config_str) # not necessary under demjson
config = json.loads(config_str)
p = config.get("let")
if p:
p = p.get("QOOXDOO_PATH")
if p:
path = p
got_path = True
# regex parsing - error prone
if not got_path:
qpathr=re.compile(r'"QOOXDOO_PATH"\s*:\s*"([^"]*)"\s*,?')
conffile = codecs.open(config_file, "r", "utf-8")
aconffile = conffile.readlines()
for line in aconffile:
mo = qpathr.search(line)
if mo:
path = mo.group(1)
break # assume first occurrence is ok
path = os.path.normpath(os.path.join(os.path.dirname(os.path.abspath(sys.argv[0])), path))
return path
os.chdir(os.path.dirname(os.path.abspath(sys.argv[0]))) # switch to skeleton dir
qxpath = getQxPath()
REAL_GENERATOR = os.path.join(qxpath, 'tool', 'bin', 'generator.py')
if not os.path.exists(REAL_GENERATOR):
print "Cannot find real generator script under: \"%s\"; aborting" % REAL_GENERATOR
sys.exit(1)
elif ShellOptions.verbose:
print "\nInvoking real generator under %s ..." % REAL_GENERATOR
argList = []
argList.append(CMD_PYTHON)
argList.append(REAL_GENERATOR)
argList.extend(sys.argv[1:])
if sys.platform == "win32":
argList1=[]
for arg in argList:
if arg.find(' ')>-1:
argList1.append('"%s"' % arg)
else:
argList1.append(arg)
argList = argList1
else:
argList = ['"%s"' % x for x in argList] # quote argv elements
cmd = " ".join(argList)
retval = subprocess.call(cmd, shell=True)
if "build" in ShellArgs:
subprocess.call("nodewebkit.py", shell=True)
sys.exit(retval)
| Python |
import shutil, os
import tarfile
import json
def concat(dst, src1, src2):
destination = open(dst, 'wb')
shutil.copyfileobj(open(src1, 'rb'), destination)
shutil.copyfileobj(open(src2, 'rb'), destination)
destination.close()
def tarLinuxFiles(tarname, src):
tar = tarfile.open(tarname, "w:gz")
for name in ["libffmpegsumo.so", "muvconf", "nw.pak"]:
tar.add(src + "/" + name, arcname=name)
tar.close()
print ''
print '>>> Build node-webkit'
print ' - Load package.json'
package = json.load(file("package.json"))
version = package['version']
root_dir = "nw"
root_win32 = root_dir + "/win32";
root_lin32 = root_dir + "/lin32";
root_lin64 = root_dir + "/lin64";
root_nw = "../../Tools/node-webkit-v0.4.2-"
tarname_x32 = root_dir + "/muvconf-" + version + "-ia32.tar.gz"
tarname_x64 = root_dir + "/muvconf-" + version + "-x64.tar.gz"
nw_name = root_dir + "/muvconf-" + version + ".nw"
winzip_x32 = root_dir + "/muvconf-" + version + "-x86"
if not os.path.exists(root_dir):
os.makedirs(root_dir)
print ' - Remove old files'
if os.path.exists(nw_name):
os.remove(nw_name)
if os.path.exists(root_win32):
shutil.rmtree(root_win32)
if os.path.exists(root_lin32):
shutil.rmtree(root_lin32)
if os.path.exists(root_lin64):
shutil.rmtree(root_lin64)
if os.path.exists(tarname_x32):
os.remove(tarname_x32)
if os.path.exists(tarname_x64):
os.remove(tarname_x64)
print ' - Create .nw file'
shutil.copyfile("package.json", "build/package.json")
shutil.make_archive(root_dir + "/muvconf", format="zip", root_dir="build")
shutil.move(root_dir + "/muvconf.zip", nw_name)
print ' - Build Windows x32'
shutil.copytree(root_nw + "win-ia32", root_win32)
concat(root_win32 + "/muvconf.exe", root_win32 + "/nw.exe", nw_name)
os.remove(root_win32 + "/nw.exe")
os.remove(root_win32 + "/nwsnapshot.exe")
shutil.make_archive(winzip_x32, format="zip", root_dir=root_win32)
def buildLinux(root_lin, nwextra, tarname):
shutil.copytree(root_nw + nwextra, root_lin)
concat(root_lin + "/muvconf", root_lin + "/nw", nw_name)
os.remove(root_lin + "/nw")
os.remove(root_lin + "/nwsnapshot")
tarLinuxFiles(tarname, root_lin)
print ' - Build Linux x32'
buildLinux(root_lin32, "linux-ia32", tarname_x32);
print ' - Build Linux x64'
buildLinux(root_lin64, "linux-x64", tarname_x64);
| Python |
from ResourceManager import *
from Rendering import *
import GameClient
print "Python Scripting Loaded."
## Don't hardcode the base... instead get the "gameRoot" environment variable
resourceService.addResourceLocation("C:\\dev\\Zen\\examples\\taBBall\\resources",
"FileSystem", "Pong", True)
print "Added resource location"
## This is done in C++ for now
## Create the Left Wall
##wall = resourceService.loadResource("cube.mesh", "WallLeft")
##wall.setMaterialName("wall")
##wall.setNormaliseNormals(True)
##node = sceneService.createChildNode("WallLeft")
##node.attachResource(wall)
##node.setPosition(-95.0, 0.0, 0.0)
##node.setScale(0.05, 1.45, 0.1)
| Python |
from ResourceManager import *
from Rendering import *
import GameClient
#Establish the root skin directory
if MySkin:
addResourceLocation("~/resources/ui/skins/MySkin", "FileSystem", "KoZ", False)
elif MySkin2:
addResourceLocation("~/resources/ui/skins/MySkin2", "FileSystem", "KoZ", False)
else:
addResourceLocation("~/resources/ui/skins/Default", "FileSystem", "KoZ", False)
#Establish the icon directory for skill icons, etc.
addResourceLocation("~/resources/ui/skins/MySkin", "FileSystem", "KoZ", False)
| Python |
import csv
reader = csv.reader(open("character.csv", "rb"))
for row in reader:
print row
raw_input("press <enter>")
| Python |
#Defines is where all C++ calls are converted into readable Python variables
#Check out www.indiezen.org/wiki/wiki/KoZ/Scripts for the full complete list of available arguments.
from ResourceManager import *
from Rendering import *
import GameClient
#Allows the addition of a resource location with the following arguments
#(path, resource container type, resource container, recursive)
addResourceLocation = gameClient().getGUIResourceService().addResourceLocation
| Python |
from ResourceManager import *
from Rendering import *
import GameClient
print "Python Scripting Loaded."
## Don't hardcode the base... instead get the "gameRoot" environment variable
gameClient.getRenderingResourceService().addResourceLocation("~/resources",
"FileSystem", "KoZ", True)
print "Added resource location"
## TODO Execute Client.py (TR - I Don't know how todo this... do you?)
| Python |
"""Convert Wavefront OBJ / MTL files into Three.js (JSON model version, to be used with web worker based ascii / binary loader)
-------------------------
How to use this converter
-------------------------
python convert_obj_three.py -i infile.obj -o outfile.js [-m "morphfiles*.obj"] [-c "morphcolors*.obj"] [-a center|centerxz|top|bottom|none] [-s smooth|flat] [-t ascii|binary] [-d invert|normal] [-b] [-e]
Notes:
- flags
-i infile.obj input OBJ file
-o outfile.js output JS file
-m "morphfiles*.obj" morph OBJ files (can use wildcards, enclosed in quotes multiple patterns separate by space)
-c "morphcolors*.obj" morph colors OBJ files (can use wildcards, enclosed in quotes multiple patterns separate by space)
-a center|centerxz|top|bottom|none model alignment
-s smooth|flat smooth = export vertex normals, flat = no normals (face normals computed in loader)
-t ascii|binary export ascii or binary format (ascii has more features, binary just supports vertices, faces, normals, uvs and materials)
-d invert|normal invert transparency
-b bake material colors into face colors
-e export edges
-x 10.0 scale and truncate
- by default:
use smooth shading (if there were vertex normals in the original model)
will be in ASCII format
original model is assumed to use non-inverted transparency / dissolve (0.0 fully transparent, 1.0 fully opaque)
no face colors baking
no edges export
- binary conversion will create two files:
outfile.js (materials)
outfile.bin (binary buffers)
--------------------------------------------------
How to use generated JS file in your HTML document
--------------------------------------------------
<script type="text/javascript" src="Three.js"></script>
...
<script type="text/javascript">
...
// load ascii model
var jsonLoader = new THREE.JSONLoader();
jsonLoader.load( { model: "Model_ascii.js", callback: function( geometry ) { createScene( geometry) } } );
// load binary model
var binLoader = new THREE.BinaryLoader();
binLoader.load( { model: "Model_bin.js", callback: function( geometry ) { createScene( geometry) } } );
function createScene( geometry ) {
var mesh = new THREE.Mesh( geometry, new THREE.MeshFaceMaterial() );
}
...
</script>
-------------------------------------
Parsers based on formats descriptions
-------------------------------------
http://en.wikipedia.org/wiki/Obj
http://en.wikipedia.org/wiki/Material_Template_Library
-------------------
Current limitations
-------------------
- for the moment, only diffuse color and texture are used
(will need to extend shaders / renderers / materials in Three)
- texture coordinates can be wrong in canvas renderer
(there is crude normalization, but it doesn't
work for all cases)
- smoothing can be turned on/off only for the whole mesh
----------------------------------------------
How to get proper OBJ + MTL files with Blender
----------------------------------------------
0. Remove default cube (press DEL and ENTER)
1. Import / create model
2. Select all meshes (Select -> Select All by Type -> Mesh)
3. Export to OBJ (File -> Export -> Wavefront .obj) [*]
- enable following options in exporter
Material Groups
Rotate X90
Apply Modifiers
High Quality Normals
Copy Images
Selection Only
Objects as OBJ Objects
UVs
Normals
Materials
Edges
- select empty folder
- give your exported file name with "obj" extension
- click on "Export OBJ" button
4. Your model is now all files in this folder (OBJ, MTL, number of images)
- this converter assumes all files staying in the same folder,
(OBJ / MTL files use relative paths)
- for WebGL, textures must be power of 2 sized
[*] If OBJ export fails (Blender 2.54 beta), patch your Blender installation
following instructions here:
http://www.blendernation.com/2010/09/12/blender-2-54-beta-released/
------
Author
------
AlteredQualia http://alteredqualia.com
"""
import fileinput
import operator
import random
import os.path
import getopt
import sys
import struct
import math
import glob
# #####################################################
# Configuration
# #####################################################
ALIGN = "none" # center centerxz bottom top none
SHADING = "smooth" # smooth flat
TYPE = "ascii" # ascii binary
TRANSPARENCY = "normal" # normal invert
TRUNCATE = False
SCALE = 1.0
BAKE_COLORS = False
EXPORT_EDGES = False
# default colors for debugging (each material gets one distinct color):
# white, red, green, blue, yellow, cyan, magenta
COLORS = [0xeeeeee, 0xee0000, 0x00ee00, 0x0000ee, 0xeeee00, 0x00eeee, 0xee00ee]
# #####################################################
# Templates
# #####################################################
TEMPLATE_FILE_ASCII = u"""\
// Converted from: %(fname)s
// vertices: %(nvertex)d
// faces: %(nface)d
// normals: %(nnormal)d
// colors: %(ncolor)d
// uvs: %(nuv)d
// materials: %(nmaterial)d
// edges: %(nedge)d
//
// Generated with OBJ -> Three.js converter
// http://github.com/alteredq/three.js/blob/master/utils/exporters/convert_obj_three.py
var model = {
"version" : 2,
"scale" : %(scale)f,
"materials": [%(materials)s],
"vertices": [%(vertices)s],
"morphTargets": [%(morphTargets)s],
"morphColors": [%(morphColors)s],
"normals": [%(normals)s],
"colors": [%(colors)s],
"uvs": [[%(uvs)s]],
"faces": [%(faces)s],
"edges" : [%(edges)s]
};
postMessage( model );
close();
"""
TEMPLATE_FILE_BIN = u"""\
// Converted from: %(fname)s
// vertices: %(nvertex)d
// faces: %(nface)d
// materials: %(nmaterial)d
//
// Generated with OBJ -> Three.js converter
// http://github.com/alteredq/three.js/blob/master/utils/exporters/convert_obj_three.py
var model = {
"version" : 1,
"materials": [%(materials)s],
"buffers": "%(buffers)s"
};
postMessage( model );
close();
"""
TEMPLATE_VERTEX = "%f,%f,%f"
TEMPLATE_VERTEX_TRUNCATE = "%d,%d,%d"
TEMPLATE_N = "%.5g,%.5g,%.5g"
TEMPLATE_UV = "%.5g,%.5g"
TEMPLATE_COLOR = "%.3g,%.3g,%.3g"
TEMPLATE_COLOR_DEC = "%d"
TEMPLATE_EDGE = "%d,%d"
TEMPLATE_MORPH_VERTICES = '\t{ "name": "%s", "vertices": [%s] }'
TEMPLATE_MORPH_COLORS = '\t{ "name": "%s", "colors": [%s] }'
# #####################################################
# Utils
# #####################################################
def file_exists(filename):
"""Return true if file exists and is accessible for reading.
Should be safer than just testing for existence due to links and
permissions magic on Unix filesystems.
@rtype: boolean
"""
try:
f = open(filename, 'r')
f.close()
return True
except IOError:
return False
def get_name(fname):
"""Create model name based of filename ("path/fname.js" -> "fname").
"""
return os.path.splitext(os.path.basename(fname))[0]
def bbox(vertices):
"""Compute bounding box of vertex array.
"""
if len(vertices)>0:
minx = maxx = vertices[0][0]
miny = maxy = vertices[0][1]
minz = maxz = vertices[0][2]
for v in vertices[1:]:
if v[0]<minx:
minx = v[0]
elif v[0]>maxx:
maxx = v[0]
if v[1]<miny:
miny = v[1]
elif v[1]>maxy:
maxy = v[1]
if v[2]<minz:
minz = v[2]
elif v[2]>maxz:
maxz = v[2]
return { 'x':[minx,maxx], 'y':[miny,maxy], 'z':[minz,maxz] }
else:
return { 'x':[0,0], 'y':[0,0], 'z':[0,0] }
def translate(vertices, t):
"""Translate array of vertices by vector t.
"""
for i in xrange(len(vertices)):
vertices[i][0] += t[0]
vertices[i][1] += t[1]
vertices[i][2] += t[2]
def center(vertices):
"""Center model (middle of bounding box).
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][0] + (bb['y'][1] - bb['y'][0])/2.0
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def top(vertices):
"""Align top of the model with the floor (Y-axis) and center it around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][1]
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def bottom(vertices):
"""Align bottom of the model with the floor (Y-axis) and center it around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][0]
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def centerxz(vertices):
"""Center model around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = 0
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def normalize(v):
"""Normalize 3d vector"""
l = math.sqrt(v[0]*v[0] + v[1]*v[1] + v[2]*v[2])
if l:
v[0] /= l
v[1] /= l
v[2] /= l
def veckey3(v):
return round(v[0], 6), round(v[1], 6), round(v[2], 6)
# #####################################################
# MTL parser
# #####################################################
def texture_relative_path(fullpath):
texture_file = os.path.basename(fullpath)
return texture_file
def parse_mtl(fname):
"""Parse MTL file.
"""
materials = {}
for line in fileinput.input(fname):
chunks = line.split()
if len(chunks) > 0:
# Material start
# newmtl identifier
if chunks[0] == "newmtl" and len(chunks) == 2:
identifier = chunks[1]
if not identifier in materials:
materials[identifier] = {}
# Diffuse color
# Kd 1.000 1.000 1.000
if chunks[0] == "Kd" and len(chunks) == 4:
materials[identifier]["colorDiffuse"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Ambient color
# Ka 1.000 1.000 1.000
if chunks[0] == "Ka" and len(chunks) == 4:
materials[identifier]["colorAmbient"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Specular color
# Ks 1.000 1.000 1.000
if chunks[0] == "Ks" and len(chunks) == 4:
materials[identifier]["colorSpecular"] = [float(chunks[1]), float(chunks[2]), float(chunks[3])]
# Specular coefficient
# Ns 154.000
if chunks[0] == "Ns" and len(chunks) == 2:
materials[identifier]["specularCoef"] = float(chunks[1])
# Transparency
# Tr 0.9 or d 0.9
if (chunks[0] == "Tr" or chunks[0] == "d") and len(chunks) == 2:
if TRANSPARENCY == "invert":
materials[identifier]["transparency"] = 1.0 - float(chunks[1])
else:
materials[identifier]["transparency"] = float(chunks[1])
# Optical density
# Ni 1.0
if chunks[0] == "Ni" and len(chunks) == 2:
materials[identifier]["opticalDensity"] = float(chunks[1])
# Diffuse texture
# map_Kd texture_diffuse.jpg
if chunks[0] == "map_Kd" and len(chunks) == 2:
materials[identifier]["mapDiffuse"] = texture_relative_path(chunks[1])
# Ambient texture
# map_Ka texture_ambient.jpg
if chunks[0] == "map_Ka" and len(chunks) == 2:
materials[identifier]["mapAmbient"] = texture_relative_path(chunks[1])
# Specular texture
# map_Ks texture_specular.jpg
if chunks[0] == "map_Ks" and len(chunks) == 2:
materials[identifier]["mapSpecular"] = texture_relative_path(chunks[1])
# Alpha texture
# map_d texture_alpha.png
if chunks[0] == "map_d" and len(chunks) == 2:
materials[identifier]["mapAlpha"] = texture_relative_path(chunks[1])
# Bump texture
# map_bump texture_bump.jpg or bump texture_bump.jpg
if (chunks[0] == "map_bump" or chunks[0] == "bump") and len(chunks) == 2:
materials[identifier]["mapBump"] = texture_relative_path(chunks[1])
# Illumination
# illum 2
#
# 0. Color on and Ambient off
# 1. Color on and Ambient on
# 2. Highlight on
# 3. Reflection on and Ray trace on
# 4. Transparency: Glass on, Reflection: Ray trace on
# 5. Reflection: Fresnel on and Ray trace on
# 6. Transparency: Refraction on, Reflection: Fresnel off and Ray trace on
# 7. Transparency: Refraction on, Reflection: Fresnel on and Ray trace on
# 8. Reflection on and Ray trace off
# 9. Transparency: Glass on, Reflection: Ray trace off
# 10. Casts shadows onto invisible surfaces
if chunks[0] == "illum" and len(chunks) == 2:
materials[identifier]["illumination"] = int(chunks[1])
return materials
# #####################################################
# OBJ parser
# #####################################################
def parse_vertex(text):
"""Parse text chunk specifying single vertex.
Possible formats:
vertex index
vertex index / texture index
vertex index / texture index / normal index
vertex index / / normal index
"""
v = 0
t = 0
n = 0
chunks = text.split("/")
v = int(chunks[0])
if len(chunks) > 1:
if chunks[1]:
t = int(chunks[1])
if len(chunks) > 2:
if chunks[2]:
n = int(chunks[2])
return { 'v':v, 't':t, 'n':n }
def parse_obj(fname):
"""Parse OBJ file.
"""
vertices = []
normals = []
uvs = []
faces = []
materials = {}
mcounter = 0
mcurrent = 0
mtllib = ""
# current face state
group = 0
object = 0
smooth = 0
for line in fileinput.input(fname):
chunks = line.split()
if len(chunks) > 0:
# Vertices as (x,y,z) coordinates
# v 0.123 0.234 0.345
if chunks[0] == "v" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
vertices.append([x,y,z])
# Normals in (x,y,z) form; normals might not be unit
# vn 0.707 0.000 0.707
if chunks[0] == "vn" and len(chunks) == 4:
x = float(chunks[1])
y = float(chunks[2])
z = float(chunks[3])
normals.append([x,y,z])
# Texture coordinates in (u,v[,w]) coordinates, w is optional
# vt 0.500 -1.352 [0.234]
if chunks[0] == "vt" and len(chunks) >= 3:
u = float(chunks[1])
v = float(chunks[2])
w = 0
if len(chunks)>3:
w = float(chunks[3])
uvs.append([u,v,w])
# Face
if chunks[0] == "f" and len(chunks) >= 4:
vertex_index = []
uv_index = []
normal_index = []
for v in chunks[1:]:
vertex = parse_vertex(v)
if vertex['v']:
vertex_index.append(vertex['v'])
if vertex['t']:
uv_index.append(vertex['t'])
if vertex['n']:
normal_index.append(vertex['n'])
faces.append({
'vertex':vertex_index,
'uv':uv_index,
'normal':normal_index,
'material':mcurrent,
'group':group,
'object':object,
'smooth':smooth,
})
# Group
if chunks[0] == "g" and len(chunks) == 2:
group = chunks[1]
# Object
if chunks[0] == "o" and len(chunks) == 2:
object = chunks[1]
# Materials definition
if chunks[0] == "mtllib" and len(chunks) == 2:
mtllib = chunks[1]
# Material
if chunks[0] == "usemtl" and len(chunks) == 2:
material = chunks[1]
if not material in materials:
mcurrent = mcounter
materials[material] = mcounter
mcounter += 1
else:
mcurrent = materials[material]
# Smooth shading
if chunks[0] == "s" and len(chunks) == 2:
smooth = chunks[1]
return faces, vertices, uvs, normals, materials, mtllib
# #####################################################
# Generator - faces
# #####################################################
def setBit(value, position, on):
if on:
mask = 1 << position
return (value | mask)
else:
mask = ~(1 << position)
return (value & mask)
def generate_face(f, fc):
isTriangle = ( len(f['vertex']) == 3 )
if isTriangle:
nVertices = 3
else:
nVertices = 4
hasMaterial = True # for the moment OBJs without materials get default material
hasFaceUvs = False # not supported in OBJ
hasFaceVertexUvs = ( len(f['uv']) >= nVertices )
hasFaceNormals = False # don't export any face normals (as they are computed in engine)
hasFaceVertexNormals = ( len(f["normal"]) >= nVertices and SHADING == "smooth" )
hasFaceColors = BAKE_COLORS
hasFaceVertexColors = False # not supported in OBJ
faceType = 0
faceType = setBit(faceType, 0, not isTriangle)
faceType = setBit(faceType, 1, hasMaterial)
faceType = setBit(faceType, 2, hasFaceUvs)
faceType = setBit(faceType, 3, hasFaceVertexUvs)
faceType = setBit(faceType, 4, hasFaceNormals)
faceType = setBit(faceType, 5, hasFaceVertexNormals)
faceType = setBit(faceType, 6, hasFaceColors)
faceType = setBit(faceType, 7, hasFaceVertexColors)
faceData = []
# order is important, must match order in JSONLoader
# face type
# vertex indices
# material index
# face uvs index
# face vertex uvs indices
# face normal index
# face vertex normals indices
# face color index
# face vertex colors indices
faceData.append(faceType)
# must clamp in case on polygons bigger than quads
for i in xrange(nVertices):
index = f['vertex'][i] - 1
faceData.append(index)
faceData.append( f['material'] )
if hasFaceVertexUvs:
for i in xrange(nVertices):
index = f['uv'][i] - 1
faceData.append(index)
if hasFaceVertexNormals:
for i in xrange(nVertices):
index = f['normal'][i] - 1
faceData.append(index)
if hasFaceColors:
index = fc['material']
faceData.append(index)
return ",".join( map(str, faceData) )
# #####################################################
# Generator - chunks
# #####################################################
def hexcolor(c):
return ( int(c[0] * 255) << 16 ) + ( int(c[1] * 255) << 8 ) + int(c[2] * 255)
def generate_vertex(v, option_vertices_truncate, scale):
if not option_vertices_truncate:
return TEMPLATE_VERTEX % (v[0], v[1], v[2])
else:
return TEMPLATE_VERTEX_TRUNCATE % (scale * v[0], scale * v[1], scale * v[2])
def generate_normal(n):
return TEMPLATE_N % (n[0], n[1], n[2])
def generate_uv(uv):
return TEMPLATE_UV % (uv[0], 1.0 - uv[1])
def generate_color_rgb(c):
return TEMPLATE_COLOR % (c[0], c[1], c[2])
def generate_color_decimal(c):
return TEMPLATE_COLOR_DEC % hexcolor(c)
def generate_edge(e):
return TEMPLATE_EDGE % (e[0], e[1])
# #####################################################
# Morphs
# #####################################################
def generate_morph_vertex(name, vertices):
vertex_string = ",".join(generate_vertex(v, TRUNCATE, SCALE) for v in vertices)
return TEMPLATE_MORPH_VERTICES % (name, vertex_string)
def generate_morph_color(name, colors):
color_string = ",".join(generate_color_rgb(c) for c in colors)
return TEMPLATE_MORPH_COLORS % (name, color_string)
def extract_material_colors(materials, mtlfilename, basename):
"""Extract diffuse colors from MTL materials
"""
if not materials:
materials = { 'default': 0 }
mtl = create_materials(materials, mtlfilename, basename)
mtlColorArraySrt = []
for m in mtl:
if m in materials:
index = materials[m]
color = mtl[m].get("colorDiffuse", [1,0,0])
mtlColorArraySrt.append([index, color])
mtlColorArraySrt.sort()
mtlColorArray = [x[1] for x in mtlColorArraySrt]
return mtlColorArray
def extract_face_colors(faces, material_colors):
"""Extract colors from materials and assign them to faces
"""
faceColors = []
for face in faces:
material_index = face['material']
faceColors.append(material_colors[material_index])
return faceColors
def generate_morph_targets(morphfiles, n_vertices, infile):
skipOriginalMorph = False
norminfile = os.path.normpath(infile)
morphVertexData = []
for mfilepattern in morphfiles.split():
matches = glob.glob(mfilepattern)
matches.sort()
for path in matches:
normpath = os.path.normpath(path)
if normpath != norminfile or not skipOriginalMorph:
name = os.path.basename(normpath)
morphFaces, morphVertices, morphUvs, morphNormals, morphMaterials, morphMtllib = parse_obj(normpath)
n_morph_vertices = len(morphVertices)
if n_vertices != n_morph_vertices:
print "WARNING: skipping morph [%s] with different number of vertices [%d] than the original model [%d]" % (name, n_morph_vertices, n_vertices)
else:
if ALIGN == "center":
center(morphVertices)
elif ALIGN == "centerxz":
centerxz(morphVertices)
elif ALIGN == "bottom":
bottom(morphVertices)
elif ALIGN == "top":
top(morphVertices)
morphVertexData.append((get_name(name), morphVertices))
print "adding [%s] with %d vertices" % (name, n_morph_vertices)
morphTargets = ""
if len(morphVertexData):
morphTargets = "\n%s\n\t" % ",\n".join(generate_morph_vertex(name, vertices) for name, vertices in morphVertexData)
return morphTargets
def generate_morph_colors(colorfiles, n_vertices, n_faces):
morphColorData = []
colorFaces = []
materialColors = []
for mfilepattern in colorfiles.split():
matches = glob.glob(mfilepattern)
matches.sort()
for path in matches:
normpath = os.path.normpath(path)
name = os.path.basename(normpath)
morphFaces, morphVertices, morphUvs, morphNormals, morphMaterials, morphMtllib = parse_obj(normpath)
n_morph_vertices = len(morphVertices)
n_morph_faces = len(morphFaces)
if n_vertices != n_morph_vertices:
print "WARNING: skipping morph color map [%s] with different number of vertices [%d] than the original model [%d]" % (name, n_morph_vertices, n_vertices)
elif n_faces != n_morph_faces:
print "WARNING: skipping morph color map [%s] with different number of faces [%d] than the original model [%d]" % (name, n_morph_faces, n_faces)
else:
morphMaterialColors = extract_material_colors(morphMaterials, morphMtllib, normpath)
morphFaceColors = extract_face_colors(morphFaces, morphMaterialColors)
morphColorData.append((get_name(name), morphFaceColors))
# take first color map for baking into face colors
if len(colorFaces) == 0:
colorFaces = morphFaces
materialColors = morphMaterialColors
print "adding [%s] with %d face colors" % (name, len(morphFaceColors))
morphColors = ""
if len(morphColorData):
morphColors = "\n%s\n\t" % ",\n".join(generate_morph_color(name, colors) for name, colors in morphColorData)
return morphColors, colorFaces, materialColors
# #####################################################
# Edges
# #####################################################
def edge_hash(a, b):
return "%d_%d" % (min(a, b), max(a, b))
def add_unique_edge(a, b, edge_set, edges):
h = edge_hash(a[0], b[0])
if h not in edge_set:
x = min(a[1], b[1])
y = max(a[1], b[1])
edges.append([x, y])
edge_set.add(h)
def compute_edges(faces, vertices):
edges = []
# compute unique vertices
unique_vertices = {}
vertex_count = 0
for i, v in enumerate(vertices):
key = veckey3(v)
if key not in unique_vertices:
unique_vertices[key] = [vertex_count, i]
vertex_count += 1
# find edges between unique vertices
edge_set = set()
for f in faces:
vertex_indices = f["vertex"]
unique_indices = []
for vi in vertex_indices:
v = vertices[vi - 1]
key = veckey3(v)
unique_indices.append(unique_vertices[key])
if len(unique_indices) == 3:
a = unique_indices[0]
b = unique_indices[1]
c = unique_indices[2]
add_unique_edge(a, b, edge_set, edges)
add_unique_edge(b, c, edge_set, edges)
add_unique_edge(a, c, edge_set, edges)
elif len(unique_indices) == 4:
a = unique_indices[0]
b = unique_indices[1]
c = unique_indices[2]
d = unique_indices[3]
# this should be inside edge of quad, should it go in?
# add_unique_edge(b, d, edge_set, edges)
add_unique_edge(a, b, edge_set, edges)
add_unique_edge(a, d, edge_set, edges)
add_unique_edge(b, c, edge_set, edges)
add_unique_edge(c, d, edge_set, edges)
edges.sort()
return edges
# #####################################################
# Materials
# #####################################################
def generate_color(i):
"""Generate hex color corresponding to integer.
Colors should have well defined ordering.
First N colors are hardcoded, then colors are random
(must seed random number generator with deterministic value
before getting colors).
"""
if i < len(COLORS):
#return "0x%06x" % COLORS[i]
return COLORS[i]
else:
#return "0x%06x" % int(0xffffff * random.random())
return int(0xffffff * random.random())
def value2string(v):
if type(v)==str and v[0:2] != "0x":
return '"%s"' % v
elif type(v) == bool:
return str(v).lower()
return str(v)
def generate_materials(mtl, materials):
"""Generate JS array of materials objects
JS material objects are basically prettified one-to-one
mappings of MTL properties in JSON format.
"""
mtl_array = []
for m in mtl:
if m in materials:
index = materials[m]
# add debug information
# materials should be sorted according to how
# they appeared in OBJ file (for the first time)
# this index is identifier used in face definitions
mtl[m]['DbgName'] = m
mtl[m]['DbgIndex'] = index
mtl[m]['DbgColor'] = generate_color(index)
if BAKE_COLORS:
mtl[m]['vertexColors'] = "face"
mtl_raw = ",\n".join(['\t"%s" : %s' % (n, value2string(v)) for n,v in sorted(mtl[m].items())])
mtl_string = "\t{\n%s\n\t}" % mtl_raw
mtl_array.append([index, mtl_string])
return ",\n\n".join([m for i,m in sorted(mtl_array)])
def generate_mtl(materials):
"""Generate dummy materials (if there is no MTL file).
"""
mtl = {}
for m in materials:
index = materials[m]
mtl[m] = {
'DbgName': m,
'DbgIndex': index,
'DbgColor': generate_color(index)
}
return mtl
def generate_materials_string(materials, mtlfilename, basename):
"""Generate final materials string.
"""
if not materials:
materials = { 'default': 0 }
mtl = create_materials(materials, mtlfilename, basename)
return generate_materials(mtl, materials)
def create_materials(materials, mtlfilename, basename):
"""Parse MTL file and create mapping between its materials and OBJ materials.
Eventual edge cases are handled here (missing materials, missing MTL file).
"""
random.seed(42) # to get well defined color order for debug colors
# default materials with debug colors for when
# there is no specified MTL / MTL loading failed,
# or if there were no materials / null materials
mtl = generate_mtl(materials)
if mtlfilename:
# create full pathname for MTL (included from OBJ)
path = os.path.dirname(basename)
fname = os.path.join(path, mtlfilename)
if file_exists(fname):
# override default materials with real ones from MTL
# (where they exist, otherwise keep defaults)
mtl.update(parse_mtl(fname))
else:
print "Couldn't find [%s]" % fname
return mtl
# #####################################################
# Faces
# #####################################################
def is_triangle_flat(f):
return len(f['vertex'])==3 and not (f["normal"] and SHADING == "smooth") and not f['uv']
def is_triangle_flat_uv(f):
return len(f['vertex'])==3 and not (f["normal"] and SHADING == "smooth") and len(f['uv'])==3
def is_triangle_smooth(f):
return len(f['vertex'])==3 and f["normal"] and SHADING == "smooth" and not f['uv']
def is_triangle_smooth_uv(f):
return len(f['vertex'])==3 and f["normal"] and SHADING == "smooth" and len(f['uv'])==3
def is_quad_flat(f):
return len(f['vertex'])==4 and not (f["normal"] and SHADING == "smooth") and not f['uv']
def is_quad_flat_uv(f):
return len(f['vertex'])==4 and not (f["normal"] and SHADING == "smooth") and len(f['uv'])==4
def is_quad_smooth(f):
return len(f['vertex'])==4 and f["normal"] and SHADING == "smooth" and not f['uv']
def is_quad_smooth_uv(f):
return len(f['vertex'])==4 and f["normal"] and SHADING == "smooth" and len(f['uv'])==4
def sort_faces(faces):
data = {
'triangles_flat': [],
'triangles_flat_uv': [],
'triangles_smooth': [],
'triangles_smooth_uv': [],
'quads_flat': [],
'quads_flat_uv': [],
'quads_smooth': [],
'quads_smooth_uv': []
}
for f in faces:
if is_triangle_flat(f):
data['triangles_flat'].append(f)
elif is_triangle_flat_uv(f):
data['triangles_flat_uv'].append(f)
elif is_triangle_smooth(f):
data['triangles_smooth'].append(f)
elif is_triangle_smooth_uv(f):
data['triangles_smooth_uv'].append(f)
elif is_quad_flat(f):
data['quads_flat'].append(f)
elif is_quad_flat_uv(f):
data['quads_flat_uv'].append(f)
elif is_quad_smooth(f):
data['quads_smooth'].append(f)
elif is_quad_smooth_uv(f):
data['quads_smooth_uv'].append(f)
return data
# #####################################################
# API - ASCII converter
# #####################################################
def convert_ascii(infile, morphfiles, colorfiles, outfile):
"""Convert infile.obj to outfile.js
Here is where everything happens. If you need to automate conversions,
just import this file as Python module and call this method.
"""
if not file_exists(infile):
print "Couldn't find [%s]" % infile
return
# parse OBJ / MTL files
faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
n_vertices = len(vertices)
n_faces = len(faces)
# align model
if ALIGN == "center":
center(vertices)
elif ALIGN == "centerxz":
centerxz(vertices)
elif ALIGN == "bottom":
bottom(vertices)
elif ALIGN == "top":
top(vertices)
# generate normals string
nnormal = 0
normals_string = ""
if SHADING == "smooth":
normals_string = ",".join(generate_normal(n) for n in normals)
nnormal = len(normals)
# extract morph vertices
morphTargets = generate_morph_targets(morphfiles, n_vertices, infile)
# extract morph colors
morphColors, colorFaces, materialColors = generate_morph_colors(colorfiles, n_vertices, n_faces)
# generate colors string
ncolor = 0
colors_string = ""
if len(colorFaces) < len(faces):
colorFaces = faces
materialColors = extract_material_colors(materials, mtllib, infile)
if BAKE_COLORS:
colors_string = ",".join(generate_color_decimal(c) for c in materialColors)
ncolor = len(materialColors)
# generate edges string
nedge = 0
edges_string = ""
if EXPORT_EDGES:
edges = compute_edges(faces, vertices)
nedge = len(edges)
edges_string = ",".join(generate_edge(e) for e in edges)
# generate ascii model string
text = TEMPLATE_FILE_ASCII % {
"name" : get_name(outfile),
"fname" : infile,
"nvertex" : len(vertices),
"nface" : len(faces),
"nuv" : len(uvs),
"nnormal" : nnormal,
"ncolor" : ncolor,
"nmaterial" : len(materials),
"nedge" : nedge,
"materials" : generate_materials_string(materials, mtllib, infile),
"normals" : normals_string,
"colors" : colors_string,
"uvs" : ",".join(generate_uv(uv) for uv in uvs),
"vertices" : ",".join(generate_vertex(v, TRUNCATE, SCALE) for v in vertices),
"morphTargets" : morphTargets,
"morphColors" : morphColors,
"faces" : ",".join(generate_face(f, fc) for f, fc in zip(faces, colorFaces)),
"edges" : edges_string,
"scale" : SCALE
}
out = open(outfile, "w")
out.write(text)
out.close()
print "%d vertices, %d faces, %d materials" % (len(vertices), len(faces), len(materials))
# #############################################################################
# API - Binary converter
# #############################################################################
def convert_binary(infile, outfile):
"""Convert infile.obj to outfile.js + outfile.bin
"""
if not file_exists(infile):
print "Couldn't find [%s]" % infile
return
binfile = get_name(outfile) + ".bin"
faces, vertices, uvs, normals, materials, mtllib = parse_obj(infile)
if ALIGN == "center":
center(vertices)
elif ALIGN == "centerxz":
centerxz(vertices)
elif ALIGN == "bottom":
bottom(vertices)
elif ALIGN == "top":
top(vertices)
sfaces = sort_faces(faces)
# ###################
# generate JS file
# ###################
text = TEMPLATE_FILE_BIN % {
"name" : get_name(outfile),
"materials" : generate_materials_string(materials, mtllib, infile),
"buffers" : binfile,
"fname" : infile,
"nvertex" : len(vertices),
"nface" : len(faces),
"nmaterial" : len(materials)
}
out = open(outfile, "w")
out.write(text)
out.close()
# ###################
# generate BIN file
# ###################
if SHADING == "smooth":
nnormals = len(normals)
else:
nnormals = 0
buffer = []
# header
# ------
header_bytes = struct.calcsize('<8s')
header_bytes += struct.calcsize('<BBBBBBBB')
header_bytes += struct.calcsize('<IIIIIIIIIII')
# signature
signature = struct.pack('<8s', 'Three.js')
# metadata (all data is little-endian)
vertex_coordinate_bytes = 4
normal_coordinate_bytes = 1
uv_coordinate_bytes = 4
vertex_index_bytes = 4
normal_index_bytes = 4
uv_index_bytes = 4
material_index_bytes = 2
# header_bytes unsigned char 1
# vertex_coordinate_bytes unsigned char 1
# normal_coordinate_bytes unsigned char 1
# uv_coordinate_bytes unsigned char 1
# vertex_index_bytes unsigned char 1
# normal_index_bytes unsigned char 1
# uv_index_bytes unsigned char 1
# material_index_bytes unsigned char 1
bdata = struct.pack('<BBBBBBBB', header_bytes,
vertex_coordinate_bytes,
normal_coordinate_bytes,
uv_coordinate_bytes,
vertex_index_bytes,
normal_index_bytes,
uv_index_bytes,
material_index_bytes)
# nvertices unsigned int 4
# nnormals unsigned int 4
# nuvs unsigned int 4
# ntri_flat unsigned int 4
# ntri_smooth unsigned int 4
# ntri_flat_uv unsigned int 4
# ntri_smooth_uv unsigned int 4
# nquad_flat unsigned int 4
# nquad_smooth unsigned int 4
# nquad_flat_uv unsigned int 4
# nquad_smooth_uv unsigned int 4
ndata = struct.pack('<IIIIIIIIIII', len(vertices),
nnormals,
len(uvs),
len(sfaces['triangles_flat']),
len(sfaces['triangles_smooth']),
len(sfaces['triangles_flat_uv']),
len(sfaces['triangles_smooth_uv']),
len(sfaces['quads_flat']),
len(sfaces['quads_smooth']),
len(sfaces['quads_flat_uv']),
len(sfaces['quads_smooth_uv']))
buffer.append(signature)
buffer.append(bdata)
buffer.append(ndata)
# 1. vertices
# ------------
# x float 4
# y float 4
# z float 4
for v in vertices:
data = struct.pack('<fff', v[0], v[1], v[2])
buffer.append(data)
# 2. normals
# ---------------
# x signed char 1
# y signed char 1
# z signed char 1
if SHADING == "smooth":
for n in normals:
normalize(n)
data = struct.pack('<bbb', math.floor(n[0]*127+0.5),
math.floor(n[1]*127+0.5),
math.floor(n[2]*127+0.5))
buffer.append(data)
# 3. uvs
# -----------
# u float 4
# v float 4
for uv in uvs:
data = struct.pack('<ff', uv[0], 1.0-uv[1])
buffer.append(data)
# 4. flat triangles
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# m unsigned short 2
for f in sfaces['triangles_flat']:
vi = f['vertex']
data = struct.pack('<IIIH',
vi[0]-1, vi[1]-1, vi[2]-1,
f['material'])
buffer.append(data)
# 5. smooth triangles
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# m unsigned short 2
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
for f in sfaces['triangles_smooth']:
vi = f['vertex']
ni = f['normal']
data = struct.pack('<IIIHIII',
vi[0]-1, vi[1]-1, vi[2]-1,
f['material'],
ni[0]-1, ni[1]-1, ni[2]-1)
buffer.append(data)
# 6. flat triangles uv
# --------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# m unsigned short 2
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
for f in sfaces['triangles_flat_uv']:
vi = f['vertex']
ui = f['uv']
data = struct.pack('<IIIHIII',
vi[0]-1, vi[1]-1, vi[2]-1,
f['material'],
ui[0]-1, ui[1]-1, ui[2]-1)
buffer.append(data)
# 7. smooth triangles uv
# ----------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# m unsigned short 2
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
for f in sfaces['triangles_smooth_uv']:
vi = f['vertex']
ni = f['normal']
ui = f['uv']
data = struct.pack('<IIIHIIIIII',
vi[0]-1, vi[1]-1, vi[2]-1,
f['material'],
ni[0]-1, ni[1]-1, ni[2]-1,
ui[0]-1, ui[1]-1, ui[2]-1)
buffer.append(data)
# 8. flat quads
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# m unsigned short 2
for f in sfaces['quads_flat']:
vi = f['vertex']
data = struct.pack('<IIIIH',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1,
f['material'])
buffer.append(data)
# 9. smooth quads
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# m unsigned short 2
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# nd unsigned int 4
for f in sfaces['quads_smooth']:
vi = f['vertex']
ni = f['normal']
data = struct.pack('<IIIIHIIII',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1,
f['material'],
ni[0]-1, ni[1]-1, ni[2]-1, ni[3]-1)
buffer.append(data)
# 10. flat quads uv
# ------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# m unsigned short 2
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
# ud unsigned int 4
for f in sfaces['quads_flat_uv']:
vi = f['vertex']
ui = f['uv']
data = struct.pack('<IIIIHIIII',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1,
f['material'],
ui[0]-1, ui[1]-1, ui[2]-1, ui[3]-1)
buffer.append(data)
# 11. smooth quads uv
# -------------------
# a unsigned int 4
# b unsigned int 4
# c unsigned int 4
# d unsigned int 4
# m unsigned short 2
# na unsigned int 4
# nb unsigned int 4
# nc unsigned int 4
# nd unsigned int 4
# ua unsigned int 4
# ub unsigned int 4
# uc unsigned int 4
# ud unsigned int 4
for f in sfaces['quads_smooth_uv']:
vi = f['vertex']
ni = f['normal']
ui = f['uv']
data = struct.pack('<IIIIHIIIIIIII',
vi[0]-1, vi[1]-1, vi[2]-1, vi[3]-1,
f['material'],
ni[0]-1, ni[1]-1, ni[2]-1, ni[3]-1,
ui[0]-1, ui[1]-1, ui[2]-1, ui[3]-1)
buffer.append(data)
path = os.path.dirname(outfile)
fname = os.path.join(path, binfile)
out = open(fname, "wb")
out.write("".join(buffer))
out.close()
# #############################################################################
# Helpers
# #############################################################################
def usage():
print "Usage: %s -i filename.obj -o filename.js [-m morphfiles*.obj] [-c morphcolors*.obj] [-a center|top|bottom] [-s flat|smooth] [-t binary|ascii] [-d invert|normal]" % os.path.basename(sys.argv[0])
# #####################################################
# Main
# #####################################################
if __name__ == "__main__":
# get parameters from the command line
try:
opts, args = getopt.getopt(sys.argv[1:], "hbei:m:c:b:o:a:s:t:d:x:", ["help", "bakecolors", "edges", "input=", "morphs=", "colors=", "output=", "align=", "shading=", "type=", "dissolve=", "truncatescale="])
except getopt.GetoptError:
usage()
sys.exit(2)
infile = outfile = ""
morphfiles = ""
colorfiles = ""
for o, a in opts:
if o in ("-h", "--help"):
usage()
sys.exit()
elif o in ("-i", "--input"):
infile = a
elif o in ("-m", "--morphs"):
morphfiles = a
elif o in ("-c", "--colors"):
colorfiles = a
elif o in ("-o", "--output"):
outfile = a
elif o in ("-a", "--align"):
if a in ("top", "bottom", "center", "centerxz", "none"):
ALIGN = a
elif o in ("-s", "--shading"):
if a in ("flat", "smooth"):
SHADING = a
elif o in ("-t", "--type"):
if a in ("binary", "ascii"):
TYPE = a
elif o in ("-d", "--dissolve"):
if a in ("normal", "invert"):
TRANSPARENCY = a
elif o in ("-b", "--bakecolors"):
BAKE_COLORS = True
elif o in ("-e", "--edges"):
EXPORT_EDGES = True
elif o in ("-x", "--truncatescale"):
TRUNCATE = True
SCALE = float(a)
if infile == "" or outfile == "":
usage()
sys.exit(2)
print "Converting [%s] into [%s] ..." % (infile, outfile)
if morphfiles:
print "Morphs [%s]" % morphfiles
if colorfiles:
print "Colors [%s]" % colorfiles
if TYPE == "ascii":
convert_ascii(infile, morphfiles, colorfiles, outfile)
elif TYPE == "binary":
convert_binary(infile, outfile)
| Python |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Based on import_obj.py
# Contributors: alteredq
"""
Blender importer for Three.js (ASCII JSON format).
"""
import os
import time
import json
import bpy
import mathutils
from mathutils.geometry import tesselate_polygon
from io_utils import load_image, unpack_list, unpack_face_list
# #####################################################
# Generators
# #####################################################
def setColor(c, t):
c.r = t[0]
c.g = t[1]
c.b = t[2]
def create_texture(filename, modelpath):
name = filename
texture = bpy.data.textures.new(name, type='IMAGE')
image = load_image(filename, modelpath)
has_data = False
if image:
texture.image = image
has_data = image.has_data
return texture
def create_materials(data, modelpath):
materials = []
materials_data = data.get("materials", [])
for i, m in enumerate(materials_data):
name = m.get("DbgName", "material_%d" % i)
colorAmbient = m.get("colorAmbient", None)
colorDiffuse = m.get("colorDiffuse", None)
colorSpecular = m.get("colorSpecular", None)
alpha = m.get("transparency", 1.0)
specular_hardness = m.get("specularCoef", 0)
mapDiffuse = m.get("mapDiffuse", None)
mapLightmap = m.get("mapLightmap", None)
vertexColorsType = m.get("vertexColors", False)
useVertexColors = False
if vertexColorsType:
useVertexColors = True
material = bpy.data.materials.new(name)
material.THREE_useVertexColors = useVertexColors
if colorDiffuse:
setColor(material.diffuse_color, colorDiffuse)
material.diffuse_intensity = 1.0
if colorSpecular:
setColor(material.specular_color, colorSpecular)
material.specular_intensity = 1.0
if alpha < 1.0:
material.alpha = alpha
material.use_transparency = True
if specular_hardness:
material.specular_hardness = specular_hardness
if mapDiffuse:
texture = create_texture(mapDiffuse, modelpath)
mtex = material.texture_slots.add()
mtex.texture = texture
mtex.texture_coords = 'UV'
mtex.use = True
mtex.use_map_color_diffuse = True
material.active_texture = texture
materials.append(material)
return materials
def create_mesh_object(name, vertices, materials, face_data, flipYZ, recalculate_normals):
faces = face_data["faces"]
vertexNormals = face_data["vertexNormals"]
vertexColors = face_data["vertexColors"]
vertexUVs = face_data["vertexUVs"]
faceMaterials = face_data["materials"]
faceColors = face_data["faceColors"]
edges = []
# Create a new mesh
me = bpy.data.meshes.new(name)
me.from_pydata(vertices, edges, faces)
# Handle normals
if not recalculate_normals:
me.update(calc_edges = True)
if face_data["hasVertexNormals"]:
print("setting vertex normals")
for fi in range(len(faces)):
if vertexNormals[fi]:
#print("setting face %i with %i vertices" % (fi, len(normals[fi])))
# if me.update() is called after setting vertex normals
# setting face.use_smooth overrides these normals
# - this fixes weird shading artefacts (seems to come from sharing
# of vertices between faces, didn't find a way how to set vertex normals
# per face use of vertex as opposed to per vertex),
# - probably this just overrides all custom vertex normals
# - to preserve vertex normals from the original data
# call me.update() before setting them
me.faces[fi].use_smooth = True
if not recalculate_normals:
for j in range(len(vertexNormals[fi])):
vertexNormal = vertexNormals[fi][j]
x = vertexNormal[0]
y = vertexNormal[1]
z = vertexNormal[2]
if flipYZ:
tmp = y
y = -z
z = tmp
# flip normals (this make them look consistent with the original before export)
#x = -x
#y = -y
#z = -z
vi = me.faces[fi].vertices[j]
me.vertices[vi].normal.x = x
me.vertices[vi].normal.y = y
me.vertices[vi].normal.z = z
if recalculate_normals:
me.update(calc_edges = True)
# Handle colors
if face_data["hasVertexColors"]:
print("setting vertex colors")
me.vertex_colors.new("vertex_color_layer_0")
for fi in range(len(faces)):
if vertexColors[fi]:
face_colors = me.vertex_colors[0].data[fi]
face_colors = face_colors.color1, face_colors.color2, face_colors.color3, face_colors.color4
for vi in range(len(vertexColors[fi])):
r = vertexColors[fi][vi][0]
g = vertexColors[fi][vi][1]
b = vertexColors[fi][vi][2]
face_colors[vi].r = r
face_colors[vi].g = g
face_colors[vi].b = b
elif face_data["hasFaceColors"]:
print("setting vertex colors from face colors")
me.vertex_colors.new("vertex_color_layer_0")
for fi in range(len(faces)):
if faceColors[fi]:
r = faceColors[fi][0]
g = faceColors[fi][1]
b = faceColors[fi][2]
face_colors = me.vertex_colors[0].data[fi]
face_colors = face_colors.color1, face_colors.color2, face_colors.color3, face_colors.color4
for vi in range(len(faces[fi])):
face_colors[vi].r = r
face_colors[vi].g = g
face_colors[vi].b = b
# Handle uvs
if face_data["hasVertexUVs"]:
print("setting vertex uvs")
for li, layer in enumerate(vertexUVs):
me.uv_textures.new("uv_layer_%d" % li)
for fi in range(len(faces)):
if layer[fi]:
uv_face = me.uv_textures[li].data[fi]
face_uvs = uv_face.uv1, uv_face.uv2, uv_face.uv3, uv_face.uv4
for vi in range(len(layer[fi])):
u = layer[fi][vi][0]
v = layer[fi][vi][1]
face_uvs[vi].x = u
face_uvs[vi].y = 1.0 - v
active_texture = materials[faceMaterials[fi]].active_texture
if active_texture:
uv_face.use_image = True
uv_face.image = active_texture.image
# Handle materials # 1
if face_data["hasMaterials"]:
print("setting materials (mesh)")
for m in materials:
me.materials.append(m)
print("setting materials (faces)")
for fi in range(len(faces)):
if faceMaterials[fi] >= 0:
me.faces[fi].material_index = faceMaterials[fi]
# Create a new object
ob = bpy.data.objects.new(name, me)
ob.data = me # link the mesh data to the object
scene = bpy.context.scene # get the current scene
scene.objects.link(ob) # link the object into the scene
ob.location = scene.cursor_location # position object at 3d-cursor
# #####################################################
# Faces
# #####################################################
def extract_faces(data):
result = {
"faces" : [],
"materials" : [],
"faceUVs" : [],
"vertexUVs" : [],
"faceNormals" : [],
"vertexNormals" : [],
"faceColors" : [],
"vertexColors" : [],
"hasVertexNormals" : False,
"hasVertexUVs" : False,
"hasVertexColors" : False,
"hasFaceColors" : False,
"hasMaterials" : False
}
faces = data.get("faces", [])
normals = data.get("normals", [])
colors = data.get("colors", [])
offset = 0
zLength = len(faces)
# disregard empty arrays
nUvLayers = 0
for layer in data["uvs"]:
if len(layer) > 0:
nUvLayers += 1
result["faceUVs"].append([])
result["vertexUVs"].append([])
while ( offset < zLength ):
type = faces[ offset ]
offset += 1
isQuad = isBitSet( type, 0 )
hasMaterial = isBitSet( type, 1 )
hasFaceUv = isBitSet( type, 2 )
hasFaceVertexUv = isBitSet( type, 3 )
hasFaceNormal = isBitSet( type, 4 )
hasFaceVertexNormal = isBitSet( type, 5 )
hasFaceColor = isBitSet( type, 6 )
hasFaceVertexColor = isBitSet( type, 7 )
#print("type", type, "bits", isQuad, hasMaterial, hasFaceUv, hasFaceVertexUv, hasFaceNormal, hasFaceVertexNormal, hasFaceColor, hasFaceVertexColor)
result["hasVertexUVs"] = result["hasVertexUVs"] or hasFaceVertexUv
result["hasVertexNormals"] = result["hasVertexNormals"] or hasFaceVertexNormal
result["hasVertexColors"] = result["hasVertexColors"] or hasFaceVertexColor
result["hasFaceColors"] = result["hasFaceColors"] or hasFaceColor
result["hasMaterials"] = result["hasMaterials"] or hasMaterial
# vertices
if isQuad:
a = faces[ offset ]
offset += 1
b = faces[ offset ]
offset += 1
c = faces[ offset ]
offset += 1
d = faces[ offset ]
offset += 1
face = [a, b, c, d]
nVertices = 4
else:
a = faces[ offset ]
offset += 1
b = faces[ offset ]
offset += 1
c = faces[ offset ]
offset += 1
face = [a, b, c]
nVertices = 3
result["faces"].append(face)
# material
if hasMaterial:
materialIndex = faces[ offset ]
offset += 1
else:
materialIndex = -1
result["materials"].append(materialIndex)
# uvs
for i in range(nUvLayers):
faceUv = None
if hasFaceUv:
uvLayer = data["uvs"][ i ]
uvIndex = faces[ offset ]
offset += 1
u = uvLayer[ uvIndex * 2 ]
v = uvLayer[ uvIndex * 2 + 1 ]
faceUv = [u, v]
result["faceUVs"][i].append(faceUv)
if hasFaceVertexUv:
uvLayer = data["uvs"][ i ]
vertexUvs = []
for j in range(nVertices):
uvIndex = faces[ offset ]
offset += 1
u = uvLayer[ uvIndex * 2 ]
v = uvLayer[ uvIndex * 2 + 1 ]
vertexUvs.append([u, v])
result["vertexUVs"][i].append(vertexUvs)
if hasFaceNormal:
normalIndex = faces[ offset ] * 3
offset += 1
x = normals[ normalIndex ]
y = normals[ normalIndex + 1 ]
z = normals[ normalIndex + 2 ]
faceNormal = [x, y, z]
else:
faceNormal = None
result["faceNormals"].append(faceNormal)
if hasFaceVertexNormal:
vertexNormals = []
for j in range(nVertices):
normalIndex = faces[ offset ] * 3
offset += 1
x = normals[ normalIndex ]
y = normals[ normalIndex + 1 ]
z = normals[ normalIndex + 2 ]
vertexNormals.append( [x, y, z] )
else:
vertexNormals = None
result["vertexNormals"].append(vertexNormals)
if hasFaceColor:
colorIndex = faces[ offset ]
offset += 1
faceColor = hexToTuple( colors[ colorIndex ] )
else:
faceColor = None
result["faceColors"].append(faceColor)
if hasFaceVertexColor:
vertexColors = []
for j in range(nVertices):
colorIndex = faces[ offset ]
offset += 1
color = hexToTuple( colors[ colorIndex ] )
vertexColors.append( color )
else:
vertexColors = None
result["vertexColors"].append(vertexColors)
return result
# #####################################################
# Utils
# #####################################################
def hexToTuple( hexColor ):
r = (( hexColor >> 16 ) & 0xff) / 255.0
g = (( hexColor >> 8 ) & 0xff) / 255.0
b = ( hexColor & 0xff) / 255.0
return (r, g, b)
def isBitSet(value, position):
return value & ( 1 << position )
def splitArray(data, chunkSize):
result = []
chunk = []
for i in range(len(data)):
if i > 0 and i % chunkSize == 0:
result.append(chunk)
chunk = []
chunk.append(data[i])
result.append(chunk)
return result
def extract_json_string(text):
marker_begin = "var model ="
marker_end = "postMessage"
start = text.find(marker_begin) + len(marker_begin)
end = text.find(marker_end)
end = text.rfind("}", start, end)
return text[start:end+1].strip()
def get_name(filepath):
return os.path.splitext(os.path.basename(filepath))[0]
def get_path(filepath):
return os.path.dirname(filepath)
# #####################################################
# Parser
# #####################################################
def load(operator, context, filepath, option_flip_yz = True, recalculate_normals = True):
print('\nimporting %r' % filepath)
time_main = time.time()
print("\tparsing JSON file...")
time_sub = time.time()
file = open(filepath, 'rU')
rawcontent = file.read()
file.close()
json_string = extract_json_string(rawcontent)
data = json.loads( json_string )
time_new = time.time()
print('parsing %.4f sec' % (time_new - time_sub))
time_sub = time_new
# flip YZ
vertices = splitArray(data["vertices"], 3)
if option_flip_yz:
vertices[:] = [(v[0], -v[2], v[1]) for v in vertices]
# extract faces
face_data = extract_faces(data)
# deselect all
bpy.ops.object.select_all(action='DESELECT')
nfaces = len(face_data["faces"])
nvertices = len(vertices)
nnormals = len(data.get("normals", [])) / 3
ncolors = len(data.get("colors", [])) / 3
nuvs = len(data.get("uvs", [])) / 2
nmaterials = len(data.get("materials", []))
print('\tbuilding geometry...\n\tfaces:%i, vertices:%i, vertex normals: %i, vertex uvs: %i, vertex colors: %i, materials: %i ...' % (
nfaces, nvertices, nnormals, nuvs, ncolors, nmaterials ))
# Create materials
materials = create_materials(data, get_path(filepath))
# Create new obj
create_mesh_object(get_name(filepath), vertices, materials, face_data, option_flip_yz, recalculate_normals)
scene = bpy.context.scene
scene.update()
time_new = time.time()
print('finished importing: %r in %.4f sec.' % (filepath, (time_new - time_main)))
return {'FINISHED'}
if __name__ == "__main__":
register()
| Python |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# Based on export_obj.py and export_ply.py
# Contributors: Mr.doob, Kikko, alteredq
"""
Blender exporter for Three.js (ASCII JSON format).
TODO
- export scene
- copy used images to folder where exported file goes
- binary format
"""
import bpy
import mathutils
import os
import os.path
import math
import operator
import random
# #####################################################
# Configuration
# #####################################################
DEFAULTS = {
"bgcolor" : [0, 0, 0],
"bgalpha" : 1.0,
"position" : [0, 0, 0],
"rotation" : [-math.pi/2, 0, 0],
"scale" : [1, 1, 1],
"camera" :
{
"name" : "default_camera",
"type" : "perspective",
"near" : 1,
"far" : 10000,
"fov" : 60,
"aspect": 1.333,
"position" : [0, 0, 10],
"target" : [0, 0, 0]
},
"light" :
{
"name" : "default_light",
"type" : "directional",
"direction" : [0, 1, 1],
"color" : [1, 1, 1],
"intensity" : 0.8
}
}
# default colors for debugging (each material gets one distinct color):
# white, red, green, blue, yellow, cyan, magenta
COLORS = [0xeeeeee, 0xee0000, 0x00ee00, 0x0000ee, 0xeeee00, 0x00eeee, 0xee00ee]
# #####################################################
# Templates - scene
# #####################################################
TEMPLATE_SCENE_ASCII = """\
/* Converted from: %(fname)s
*
* File generated with Blender 2.56 Exporter
* https://github.com/alteredq/three.js/tree/master/utils/exporters/blender/
*
* objects: %(nobjects)s
* geometries: %(ngeometries)s
* materials: %(nmaterials)s
* textures: %(ntextures)s
*/
var scene = {
"type" : "scene",
"urlBaseType" : "relativeToScene",
%(sections)s
"transform" :
{
"position" : %(position)s,
"rotation" : %(rotation)s,
"scale" : %(scale)s
},
"defaults" :
{
"bgcolor" : %(bgcolor)s,
"bgalpha" : %(bgalpha)f,
"camera" : %(defcamera)s
}
}
postMessage( scene );
close();
"""
TEMPLATE_SECTION = """
"%s" :
{
%s
},
"""
TEMPLATE_OBJECT = """\
%(object_id)s : {
"geometry" : %(geometry_id)s,
"groups" : [ %(group_id)s ],
"materials" : [ %(material_id)s ],
"position" : %(position)s,
"rotation" : %(rotation)s,
"quaternion": %(quaternion)s,
"scale" : %(scale)s,
"visible" : %(visible)s,
"castsShadow" : %(castsShadow)s,
"meshCollider" : %(meshCollider)s,
"trigger" : %(trigger)s
}"""
TEMPLATE_EMPTY = """\
%(object_id)s : {
"groups" : [ %(group_id)s ],
"position" : %(position)s,
"rotation" : %(rotation)s,
"quaternion": %(quaternion)s,
"scale" : %(scale)s,
"trigger" : %(trigger)s
}"""
TEMPLATE_GEOMETRY_LINK = """\
%(geometry_id)s : {
"type" : "ascii_mesh",
"url" : %(model_file)s
}"""
TEMPLATE_GEOMETRY_EMBED = """\
%(geometry_id)s : {
"type" : "embedded_mesh",
"id" : %(embed_id)s
}"""
TEMPLATE_TEXTURE = """\
%(texture_id)s : {
"url": %(texture_file)s
}"""
TEMPLATE_MATERIAL_SCENE = """\
%(material_id)s : {
"type": %(type)s,
"parameters": { %(parameters)s }
}"""
TEMPLATE_CAMERA_PERSPECTIVE = """\
%(camera_id)s : {
"type" : "perspective",
"fov" : %(fov)f,
"aspect": %(aspect)f,
"near" : %(near)f,
"far" : %(far)f,
"position": %(position)s,
"target" : %(target)s
}"""
TEMPLATE_CAMERA_ORTHO = """\
%(camera_id)s: {
"type" : "ortho",
"left" : %(left)f,
"right" : %(right)f,
"top" : %(top)f,
"bottom": %(bottom)f,
"near" : %(near)f,
"far" : %(far)f,
"position": %(position)s,
"target" : %(target)s
}"""
TEMPLATE_LIGHT_DIRECTIONAL = """\
%(light_id)s: {
"type" : "directional",
"direction" : %(direction)s,
"color" : %(color)d,
"intensity" : %(intensity).2f
}"""
TEMPLATE_LIGHT_POINT = """\
%(light_id)s: {
"type" : "point",
"position" : %(position)s,
"color" : %(color)d,
"intensity" : %(intensity).3f
}"""
TEMPLATE_VEC4 = '[ %f, %f, %f, %f ]'
TEMPLATE_VEC3 = '[ %f, %f, %f ]'
TEMPLATE_VEC2 = '[ %f, %f ]'
TEMPLATE_STRING = '"%s"'
TEMPLATE_HEX = "0x%06x"
# #####################################################
# Templates - model
# #####################################################
TEMPLATE_FILE_ASCII = """\
/*
* File generated with Blender 2.56 Exporter
* https://github.com/mrdoob/three.js/tree/master/utils/exporters/blender/
*
* vertices: %(nvertex)d
* faces: %(nface)d
* normals: %(nnormal)d
* uvs: %(nuv)d
* colors: %(ncolor)d
* materials: %(nmaterial)d
* edges: %(nedges)d
*
*/
var model = {
%(model)s
};
postMessage( model );
close();
"""
TEMPLATE_MODEL_ASCII = """\
"version" : 2,
"scale" : %(scale)f,
"materials": [%(materials)s],
"vertices": [%(vertices)s],
"morphTargets": [],
"normals": [%(normals)s],
"colors": [%(colors)s],
"uvs": [[%(uvs)s]],
"faces": [%(faces)s],
"edges" : [%(edges)s]
"""
TEMPLATE_VERTEX = "%f,%f,%f"
TEMPLATE_VERTEX_TRUNCATE = "%d,%d,%d"
TEMPLATE_N = "%f,%f,%f"
TEMPLATE_UV = "%f,%f"
#TEMPLATE_C = "0x%06x"
TEMPLATE_C = "%d"
TEMPLATE_EDGE = "%d,%d"
# #####################################################
# Utils
# #####################################################
def veckey3(x,y,z):
return round(x, 6), round(y, 6), round(z, 6)
def veckey3d(v):
return veckey3(v.x, v.y, v.z)
def veckey2d(v):
return round(v[0], 6), round(v[1], 6)
def get_normal_indices(v, normals, mesh):
n = []
mv = mesh.vertices
for i in v:
normal = mv[i].normal
key = veckey3d(normal)
n.append( normals[key] )
return n
def get_uv_indices(face_index, uvs, mesh):
uv = []
uv_layer = mesh.uv_textures.active.data
for i in uv_layer[face_index].uv:
uv.append( uvs[veckey2d(i)] )
return uv
def get_color_indices(face_index, colors, mesh):
c = []
color_layer = mesh.vertex_colors.active.data
face_colors = color_layer[face_index]
face_colors = face_colors.color1, face_colors.color2, face_colors.color3, face_colors.color4
for i in face_colors:
c.append( colors[hexcolor(i)] )
return c
def rgb2int(rgb):
color = (int(rgb[0]*255) << 16) + (int(rgb[1]*255) << 8) + int(rgb[2]*255);
return color
# #####################################################
# Utils - files
# #####################################################
def write_file(fname, content):
out = open(fname, "w")
out.write(content)
out.close()
def ensure_folder_exist(foldername):
"""Create folder (with whole path) if it doesn't exist yet."""
if not os.access(foldername, os.R_OK|os.W_OK|os.X_OK):
os.makedirs(foldername)
def ensure_extension(filepath, extension):
if not filepath.lower().endswith(extension):
filepath += extension
return filepath
def generate_mesh_filename(meshname, filepath):
normpath = os.path.normpath(filepath)
path, ext = os.path.splitext(normpath)
return "%s.%s%s" % (path, meshname, ext)
# #####################################################
# Utils - alignment
# #####################################################
def bbox(vertices):
"""Compute bounding box of vertex array.
"""
if len(vertices)>0:
minx = maxx = vertices[0].co.x
miny = maxy = vertices[0].co.y
minz = maxz = vertices[0].co.z
for v in vertices[1:]:
if v.co.x < minx:
minx = v.co.x
elif v.co.x > maxx:
maxx = v.co.x
if v.co.y < miny:
miny = v.co.y
elif v.co.y > maxy:
maxy = v.co.y
if v.co.z < minz:
minz = v.co.z
elif v.co.z > maxz:
maxz = v.co.z
return { 'x':[minx,maxx], 'y':[miny,maxy], 'z':[minz,maxz] }
else:
return { 'x':[0,0], 'y':[0,0], 'z':[0,0] }
def translate(vertices, t):
"""Translate array of vertices by vector t.
"""
for i in range(len(vertices)):
vertices[i].co.x += t[0]
vertices[i].co.y += t[1]
vertices[i].co.z += t[2]
def center(vertices):
"""Center model (middle of bounding box).
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][0] + (bb['y'][1] - bb['y'][0])/2.0
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def top(vertices):
"""Align top of the model with the floor (Y-axis) and center it around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][1]
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
def bottom(vertices):
"""Align bottom of the model with the floor (Y-axis) and center it around X and Z.
"""
bb = bbox(vertices)
cx = bb['x'][0] + (bb['x'][1] - bb['x'][0])/2.0
cy = bb['y'][0]
cz = bb['z'][0] + (bb['z'][1] - bb['z'][0])/2.0
translate(vertices, [-cx,-cy,-cz])
# #####################################################
# Elements rendering
# #####################################################
def hexcolor(c):
return ( int(c[0] * 255) << 16 ) + ( int(c[1] * 255) << 8 ) + int(c[2] * 255)
def generate_vertices(vertices, option_vertices_truncate, option_vertices):
if not option_vertices:
return ""
return ",".join(generate_vertex(v, option_vertices_truncate) for v in vertices)
def generate_vertex(v, option_vertices_truncate):
if not option_vertices_truncate:
return TEMPLATE_VERTEX % (v.co.x, v.co.y, v.co.z)
else:
return TEMPLATE_VERTEX_TRUNCATE % (v.co.x, v.co.y, v.co.z)
def generate_normal(n):
return TEMPLATE_N % (n[0], n[1], n[2])
def generate_vertex_color(c):
return TEMPLATE_C % c
def generate_uv(uv):
return TEMPLATE_UV % (uv[0], 1.0 - uv[1])
def generate_edge(e):
return TEMPLATE_EDGE % (e.vertices[0], e.vertices[1])
# #####################################################
# Model exporter - faces
# #####################################################
def setBit(value, position, on):
if on:
mask = 1 << position
return (value | mask)
else:
mask = ~(1 << position)
return (value & mask)
def generate_faces(normals, uvs, colors, mesh, option_normals, option_colors, option_uv_coords, option_materials, flipyz, option_faces):
if not option_faces:
return ""
return ",".join(generate_face(f, i, normals, uvs, colors, mesh, option_normals, option_colors, option_uv_coords, option_materials, flipyz) for i, f in enumerate(mesh.faces))
def generate_face(f, faceIndex, normals, uvs, colors, mesh, option_normals, option_colors, option_uv_coords, option_materials, flipyz):
isTriangle = ( len(f.vertices) == 3 )
if isTriangle:
nVertices = 3
else:
nVertices = 4
hasMaterial = option_materials
hasFaceUvs = False # not supported in Blender
hasFaceVertexUvs = option_uv_coords
hasFaceNormals = False # don't export any face normals (as they are computed in engine)
hasFaceVertexNormals = option_normals
hasFaceColors = False # not supported in Blender
hasFaceVertexColors = option_colors
faceType = 0
faceType = setBit(faceType, 0, not isTriangle)
faceType = setBit(faceType, 1, hasMaterial)
faceType = setBit(faceType, 2, hasFaceUvs)
faceType = setBit(faceType, 3, hasFaceVertexUvs)
faceType = setBit(faceType, 4, hasFaceNormals)
faceType = setBit(faceType, 5, hasFaceVertexNormals)
faceType = setBit(faceType, 6, hasFaceColors)
faceType = setBit(faceType, 7, hasFaceVertexColors)
faceData = []
# order is important, must match order in JSONLoader
# face type
# vertex indices
# material index
# face uvs index
# face vertex uvs indices
# face color index
# face vertex colors indices
faceData.append(faceType)
# must clamp in case on polygons bigger than quads
for i in range(nVertices):
index = f.vertices[i]
faceData.append(index)
if hasMaterial:
faceData.append( f.material_index )
if hasFaceVertexUvs:
uv = get_uv_indices(faceIndex, uvs, mesh)
for i in range(nVertices):
index = uv[i]
faceData.append(index)
if hasFaceVertexNormals:
n = get_normal_indices(f.vertices, normals, mesh)
for i in range(nVertices):
index = n[i]
faceData.append(index)
if hasFaceVertexColors:
c = get_color_indices(faceIndex, colors, mesh)
for i in range(nVertices):
index = c[i]
faceData.append(index)
return ",".join( map(str, faceData) )
# #####################################################
# Model exporter - normals
# #####################################################
def extract_vertex_normals(mesh, option_normals):
if not option_normals:
return {}, 0
count = 0
normals = {}
for f in mesh.faces:
for v in f.vertices:
normal = mesh.vertices[v].normal
key = veckey3d(normal)
if key not in normals:
normals[key] = count
count += 1
return normals, count
def generate_normals(normals, option_normals):
if not option_normals:
return ""
chunks = []
for key, index in sorted(normals.items(), key = operator.itemgetter(1)):
chunks.append(key)
return ",".join(generate_normal(n) for n in chunks)
# #####################################################
# Model exporter - vertex colors
# #####################################################
def extract_vertex_colors(mesh, option_colors):
if not option_colors:
return {}, 0
count = 0
colors = {}
color_layer = mesh.vertex_colors.active.data
for face_index, face in enumerate(mesh.faces):
face_colors = color_layer[face_index]
face_colors = face_colors.color1, face_colors.color2, face_colors.color3, face_colors.color4
for c in face_colors:
key = hexcolor(c)
if key not in colors:
colors[key] = count
count += 1
return colors, count
def generate_vertex_colors(colors, option_colors):
if not option_colors:
return ""
chunks = []
for key, index in sorted(colors.items(), key=operator.itemgetter(1)):
chunks.append(key)
return ",".join(generate_vertex_color(c) for c in chunks)
# #####################################################
# Model exporter - UVs
# #####################################################
def extract_uvs(mesh, option_uv_coords):
if not option_uv_coords:
return {}, 0
count = 0
uvs = {}
uv_layer = mesh.uv_textures.active.data
for face_index, face in enumerate(mesh.faces):
for uv_index, uv in enumerate(uv_layer[face_index].uv):
key = veckey2d(uv)
if key not in uvs:
uvs[key] = count
count += 1
return uvs, count
def generate_uvs(uvs, option_uv_coords):
if not option_uv_coords:
return ""
chunks = []
for key, index in sorted(uvs.items(), key=operator.itemgetter(1)):
chunks.append(key)
return ",".join(generate_uv(n) for n in chunks)
# #####################################################
# Model exporter - materials
# #####################################################
def generate_color(i):
"""Generate hex color corresponding to integer.
Colors should have well defined ordering.
First N colors are hardcoded, then colors are random
(must seed random number generator with deterministic value
before getting colors).
"""
if i < len(COLORS):
#return "0x%06x" % COLORS[i]
return COLORS[i]
else:
#return "0x%06x" % int(0xffffff * random.random())
return int(0xffffff * random.random())
def generate_mtl(materials):
"""Generate dummy materials.
"""
mtl = {}
for m in materials:
index = materials[m]
mtl[m] = {
"DbgName": m,
"DbgIndex": index,
"DbgColor": generate_color(index),
"vertexColors" : False
}
return mtl
def value2string(v):
if type(v) == str and v[0:2] != "0x":
return '"%s"' % v
elif type(v) == bool:
return str(v).lower()
return str(v)
def generate_materials(mtl, materials, draw_type):
"""Generate JS array of materials objects
"""
mtl_array = []
for m in mtl:
index = materials[m]
# add debug information
# materials should be sorted according to how
# they appeared in OBJ file (for the first time)
# this index is identifier used in face definitions
mtl[m]['DbgName'] = m
mtl[m]['DbgIndex'] = index
mtl[m]['DbgColor'] = generate_color(index)
if draw_type in [ "BOUNDS", "WIRE" ]:
mtl[m]['wireframe'] = True
mtl[m]['DbgColor'] = 0xff0000
mtl_raw = ",\n".join(['\t"%s" : %s' % (n, value2string(v)) for n,v in sorted(mtl[m].items())])
mtl_string = "\t{\n%s\n\t}" % mtl_raw
mtl_array.append([index, mtl_string])
return ",\n\n".join([m for i,m in sorted(mtl_array)]), len(mtl_array)
def extract_materials(mesh, scene, option_colors):
world = scene.world
materials = {}
for m in mesh.materials:
if m:
materials[m.name] = {}
material = materials[m.name]
material['colorDiffuse'] = [m.diffuse_intensity * m.diffuse_color[0],
m.diffuse_intensity * m.diffuse_color[1],
m.diffuse_intensity * m.diffuse_color[2]]
material['colorSpecular'] = [m.specular_intensity * m.specular_color[0],
m.specular_intensity * m.specular_color[1],
m.specular_intensity * m.specular_color[2]]
world_ambient_color = [0, 0, 0]
if world:
world_ambient_color = world.ambient_color
material['colorAmbient'] = [m.ambient * world_ambient_color[0],
m.ambient * world_ambient_color[1],
m.ambient * world_ambient_color[2]]
material['transparency'] = m.alpha
# not sure about mapping values to Blinn-Phong shader
# Blender uses INT from [1,511] with default 0
# http://www.blender.org/documentation/blender_python_api_2_54_0/bpy.types.Material.html#bpy.types.Material.specular_hardness
material["specularCoef"] = m.specular_hardness
if m.active_texture and m.active_texture.type == 'IMAGE' and m.active_texture.image:
fn = bpy.path.abspath(m.active_texture.image.filepath)
fn = os.path.normpath(fn)
fn_strip = os.path.basename(fn)
material['mapDiffuse'] = fn_strip
material["vertexColors"] = m.THREE_useVertexColors and option_colors
# can't really use this reliably to tell apart Phong from Lambert
# as Blender defaults to non-zero specular color
#if m.specular_intensity > 0.0 and (m.specular_color[0] > 0 or m.specular_color[1] > 0 or m.specular_color[2] > 0):
# material['shading'] = "Phong"
#else:
# material['shading'] = "Lambert"
material['shading'] = m.THREE_materialType
return materials
def generate_materials_string(mesh, scene, option_colors, draw_type):
random.seed(42) # to get well defined color order for debug materials
materials = {}
if mesh.materials:
for i, m in enumerate(mesh.materials):
if m:
materials[m.name] = i
else:
materials["undefined_dummy_%0d" % i] = i
if not materials:
materials = { 'default':0 }
# default dummy materials
mtl = generate_mtl(materials)
# extract real materials from the mesh
mtl.update(extract_materials(mesh, scene, option_colors))
return generate_materials(mtl, materials, draw_type)
# #####################################################
# ASCII model generator
# #####################################################
def generate_ascii_model(mesh, scene,
option_vertices,
option_vertices_truncate,
option_faces,
option_normals,
option_edges,
option_uv_coords,
option_materials,
option_colors,
align_model,
flipyz,
option_scale,
draw_type):
vertices = mesh.vertices[:]
if align_model == 1:
center(vertices)
elif align_model == 2:
bottom(vertices)
elif align_model == 3:
top(vertices)
normals, nnormal = extract_vertex_normals(mesh, option_normals)
colors, ncolor = extract_vertex_colors(mesh, option_colors)
uvs, nuv = extract_uvs(mesh, option_uv_coords)
materials_string = ""
nmaterial = 0
edges_string = ""
nedges = 0
if option_materials:
materials_string, nmaterial = generate_materials_string(mesh, scene, option_colors, draw_type)
if option_edges:
nedges = len(mesh.edges)
edges_string = ",".join(generate_edge(e) for e in mesh.edges)
model_string = TEMPLATE_MODEL_ASCII % {
"scale" : option_scale,
"uvs" : generate_uvs(uvs, option_uv_coords),
"normals" : generate_normals(normals, option_normals),
"colors" : generate_vertex_colors(colors, option_colors),
"materials" : materials_string,
"vertices" : generate_vertices(vertices, option_vertices_truncate, option_vertices),
"faces" : generate_faces(normals, uvs, colors, mesh, option_normals, option_colors, option_uv_coords, option_materials, flipyz, option_faces),
"edges" : edges_string
}
text = TEMPLATE_FILE_ASCII % {
"nvertex" : len(mesh.vertices),
"nface" : len(mesh.faces),
"nuv" : nuv,
"nnormal" : nnormal,
"ncolor" : ncolor,
"nmaterial" : nmaterial,
"nedges" : nedges,
"model" : model_string
}
return text, model_string
# #####################################################
# Model exporter - export single mesh
# #####################################################
def generate_mesh_string(obj, scene,
option_vertices,
option_vertices_truncate,
option_faces,
option_normals,
option_edges,
option_uv_coords,
option_materials,
option_colors,
align_model,
flipyz,
option_scale,
export_single_model):
# collapse modifiers into mesh
mesh = obj.create_mesh(scene, True, 'RENDER')
if not mesh:
raise Exception("Error, could not get mesh data from object [%s]" % obj.name)
# that's what Blender's native export_obj.py does
# to flip YZ
if export_single_model:
X_ROT = mathutils.Matrix.Rotation(-math.pi/2, 4, 'X')
mesh.transform(X_ROT * obj.matrix_world)
mesh.calc_normals()
mesh.transform(mathutils.Matrix.Scale(option_scale, 4))
faceUV = (len(mesh.uv_textures) > 0)
vertexUV = (len(mesh.sticky) > 0)
vertexColors = len(mesh.vertex_colors) > 0
if not vertexColors:
option_colors = False
if (not faceUV) and (not vertexUV):
option_uv_coords = False
if faceUV:
active_uv_layer = mesh.uv_textures.active
if not active_uv_layer:
option_uv_coords = False
if vertexColors:
active_col_layer = mesh.vertex_colors.active
if not active_col_layer:
option_colors = False
text, model_string = generate_ascii_model(mesh, scene,
option_vertices,
option_vertices_truncate,
option_faces,
option_normals,
option_edges,
option_uv_coords,
option_materials,
option_colors,
align_model,
flipyz,
option_scale,
obj.draw_type)
# remove temp mesh
bpy.data.meshes.remove(mesh)
return text, model_string
def export_mesh(obj, scene, filepath,
option_vertices,
option_vertices_truncate,
option_faces,
option_normals,
option_edges,
option_uv_coords,
option_materials,
option_colors,
align_model,
flipyz,
option_scale,
export_single_model):
"""Export single mesh"""
text, model_string = generate_mesh_string(obj, scene,
option_vertices,
option_vertices_truncate,
option_faces,
option_normals,
option_edges,
option_uv_coords,
option_materials,
option_colors,
align_model,
flipyz,
option_scale,
export_single_model)
write_file(filepath, text)
print("writing", filepath, "done")
# #####################################################
# Scene exporter - render elements
# #####################################################
def generate_vec4(vec):
return TEMPLATE_VEC4 % (vec[0], vec[1], vec[2], vec[3])
def generate_vec3(vec):
return TEMPLATE_VEC3 % (vec[0], vec[1], vec[2])
def generate_vec2(vec):
return TEMPLATE_VEC2 % (vec[0], vec[1])
def generate_hex(number):
return TEMPLATE_HEX % number
def generate_string(s):
return TEMPLATE_STRING % s
def generate_string_list(src_list):
return ", ".join(generate_string(item) for item in src_list)
def generate_section(label, content):
return TEMPLATE_SECTION % (label, content)
def get_mesh_filename(mesh):
object_id = mesh["data"]["name"]
filename = "%s.js" % sanitize(object_id)
return filename
def generate_material_id_list(materials):
chunks = []
for material in materials:
chunks.append(material.name)
return chunks
def generate_group_id_list(obj):
chunks = []
for group in bpy.data.groups:
if obj.name in group.objects:
chunks.append(group.name)
return chunks
def generate_bool_property(property):
if property:
return "true"
return "false"
# #####################################################
# Scene exporter - objects
# #####################################################
def generate_objects(data):
chunks = []
for obj in data["objects"]:
if obj.type == "MESH" and obj.THREE_exportGeometry:
object_id = obj.name
if len(obj.modifiers) > 0:
geo_name = obj.name
else:
geo_name = obj.data.name
geometry_id = "geo_%s" % geo_name
material_ids = generate_material_id_list(obj.material_slots)
group_ids = generate_group_id_list(obj)
position, quaternion, scale = obj.matrix_world.decompose()
rotation = quaternion.to_euler("XYZ")
material_string = ""
if len(material_ids) > 0:
material_string = generate_string_list(material_ids)
group_string = ""
if len(group_ids) > 0:
group_string = generate_string_list(group_ids)
castsShadow = obj.THREE_castsShadow
meshCollider = obj.THREE_meshCollider
triggerType = obj.THREE_triggerType
visible = True
#if obj.draw_type in ["BOUNDS", "WIRE"] and (meshCollider or castsShadow):
if meshCollider or castsShadow:
visible = False
geometry_string = generate_string(geometry_id)
object_string = TEMPLATE_OBJECT % {
"object_id" : generate_string(object_id),
"geometry_id" : geometry_string,
"group_id" : group_string,
"material_id" : material_string,
"position" : generate_vec3(position),
"rotation" : generate_vec3(rotation),
"quaternion" : generate_vec4(quaternion),
"scale" : generate_vec3(scale),
"castsShadow" : generate_bool_property(castsShadow),
"meshCollider" : generate_bool_property(meshCollider),
"trigger" : generate_string(triggerType),
"visible" : generate_bool_property(visible)
}
chunks.append(object_string)
elif obj.type == "EMPTY" or (obj.type == "MESH" and not obj.THREE_exportGeometry):
object_id = obj.name
group_ids = generate_group_id_list(obj)
position, quaternion, scale = obj.matrix_world.decompose()
rotation = quaternion.to_euler("XYZ")
group_string = ""
if len(group_ids) > 0:
group_string = generate_string_list(group_ids)
triggerType = obj.THREE_triggerType
object_string = TEMPLATE_EMPTY % {
"object_id" : generate_string(object_id),
"group_id" : group_string,
"position" : generate_vec3(position),
"rotation" : generate_vec3(rotation),
"quaternion" : generate_vec4(quaternion),
"scale" : generate_vec3(scale),
"trigger" : generate_string(triggerType),
}
chunks.append(object_string)
return ",\n\n".join(chunks), len(chunks)
# #####################################################
# Scene exporter - geometries
# #####################################################
def generate_geometries(data):
chunks = []
geo_set = set()
for obj in data["objects"]:
if obj.type == "MESH" and obj.THREE_exportGeometry:
if len(obj.modifiers) > 0:
name = obj.name
else:
name = obj.data.name
if name not in geo_set:
geometry_id = "geo_%s" % name
if data["embed_meshes"]:
embed_id = "emb_%s" % name
geometry_string = TEMPLATE_GEOMETRY_EMBED % {
"geometry_id" : generate_string(geometry_id),
"embed_id" : generate_string(embed_id)
}
else:
model_filename = os.path.basename(generate_mesh_filename(name, data["filepath"]))
geometry_string = TEMPLATE_GEOMETRY_LINK % {
"geometry_id" : generate_string(geometry_id),
"model_file" : generate_string(model_filename)
}
chunks.append(geometry_string)
geo_set.add(name)
return ",\n\n".join(chunks), len(chunks)
# #####################################################
# Scene exporter - textures
# #####################################################
def generate_textures_scene(data):
chunks = []
# TODO: extract just textures actually used by some objects in the scene
for img in bpy.data.images:
texture_id = img.name
texture_file = extract_texture_filename(img)
texture_string = TEMPLATE_TEXTURE % {
"texture_id" : generate_string(texture_id),
"texture_file" : generate_string(texture_file)
}
chunks.append(texture_string)
return ",\n\n".join(chunks), len(chunks)
def extract_texture_filename(image):
fn = bpy.path.abspath(image.filepath)
fn = os.path.normpath(fn)
fn_strip = os.path.basename(fn)
return fn_strip
# #####################################################
# Scene exporter - materials
# #####################################################
def extract_material_data(m, option_colors):
world = bpy.context.scene.world
material = { 'name': m.name }
material['colorDiffuse'] = [m.diffuse_intensity * m.diffuse_color[0],
m.diffuse_intensity * m.diffuse_color[1],
m.diffuse_intensity * m.diffuse_color[2]]
material['colorSpecular'] = [m.specular_intensity * m.specular_color[0],
m.specular_intensity * m.specular_color[1],
m.specular_intensity * m.specular_color[2]]
world_ambient_color = [0, 0, 0]
if world:
world_ambient_color = world.ambient_color
material['colorAmbient'] = [m.ambient * world_ambient_color[0],
m.ambient * world_ambient_color[1],
m.ambient * world_ambient_color[2]]
material['transparency'] = m.alpha
# not sure about mapping values to Blinn-Phong shader
# Blender uses INT from [1,511] with default 0
# http://www.blender.org/documentation/blender_python_api_2_54_0/bpy.types.Material.html#bpy.types.Material.specular_hardness
material["specularCoef"] = m.specular_hardness
material['mapDiffuse'] = ""
material['mapLight'] = ""
material['mapNormal'] = ""
material["vertexColors"] = m.THREE_useVertexColors and option_colors
# just take first textures of each, for the moment three.js materials can't handle more
for i in range(len(m.texture_slots)):
ts = m.texture_slots[i]
if ts:
t = ts.texture
if ts.use and t.type == 'IMAGE':
name = t.image.name
if t.use_normal_map:
material['mapNormal'] = name
else:
if not material['mapDiffuse']:
material['mapDiffuse'] = name
else:
material['mapLight'] = name
if material['mapDiffuse'] and material['mapNormal'] and material['mapLight']:
break
#if m.specular_intensity > 0.0 and (m.specular_color[0] > 0 or m.specular_color[1] > 0 or m.specular_color[2] > 0):
# material['shading'] = "Phong"
#else:
# material['shading'] = "Lambert"
material['shading'] = m.THREE_materialType
return material
def generate_material_string(material):
type_map = {
"Lambert" : "MeshLambertMaterial",
"Phong" : "MeshPhongMaterial"
}
material_id = material["name"]
shading = material.get("shading", "Lambert")
material_type = type_map.get(shading, "MeshBasicMaterial")
parameters = '"color": %d' % rgb2int(material["colorDiffuse"])
parameters += ', "opacity": %.2g' % material["transparency"]
if shading == "Phong":
parameters += ', "ambient": %d' % rgb2int(material["colorAmbient"])
parameters += ', "specular": %d' % rgb2int(material["colorSpecular"])
parameters += ', "shininess": %.1g' % material["specularCoef"]
colorMap = material['mapDiffuse']
lightMap = material['mapLight']
normalMap = material['mapNormal']
if colorMap:
parameters += ', "map": %s' % generate_string(colorMap)
if lightMap:
parameters += ', "lightMap": %s' % generate_string(lightMap)
if normalMap:
parameters += ', "normalMap": %s' % generate_string(normalMap)
if material['vertexColors']:
parameters += ', "vertexColors": "vertex"'
material_string = TEMPLATE_MATERIAL_SCENE % {
"material_id" : generate_string(material_id),
"type" : generate_string(material_type),
"parameters" : parameters
}
return material_string
def generate_materials_scene(data):
chunks = []
# TODO: extract just materials actually used by some objects in the scene
for m in bpy.data.materials:
material = extract_material_data(m, data["use_colors"])
material_string = generate_material_string(material)
chunks.append(material_string)
return ",\n\n".join(chunks), len(chunks)
# #####################################################
# Scene exporter - cameras
# #####################################################
def generate_cameras(data):
if data["use_cameras"]:
cameras = data.get("cameras", [])
if not cameras:
cameras.append(DEFAULTS["camera"])
chunks = []
for camera in cameras:
if camera["type"] == "perspective":
camera_string = TEMPLATE_CAMERA_PERSPECTIVE % {
"camera_id" : generate_string(camera["name"]),
"fov" : camera["fov"],
"aspect" : camera["aspect"],
"near" : camera["near"],
"far" : camera["far"],
"position" : generate_vec3(camera["position"]),
"target" : generate_vec3(camera["target"])
}
elif camera["type"] == "ortho":
camera_string = TEMPLATE_CAMERA_ORTHO % {
"camera_id" : generate_string(camera["name"]),
"left" : camera["left"],
"right" : camera["right"],
"top" : camera["top"],
"bottom" : camera["bottom"],
"near" : camera["near"],
"far" : camera["far"],
"position" : generate_vec3(camera["position"]),
"target" : generate_vec3(camera["target"])
}
chunks.append(camera_string)
return ",\n\n".join(chunks)
return ""
# #####################################################
# Scene exporter - lights
# #####################################################
def generate_lights(data):
if data["use_lights"]:
lights = data.get("lights", [])
if not lights:
lights.append(DEFAULTS["light"])
chunks = []
for light in lights:
if light["type"] == "directional":
light_string = TEMPLATE_LIGHT_DIRECTIONAL % {
"light_id" : generate_string(light["name"]),
"direction" : generate_vec3(light["direction"]),
#"color" : generate_hex(rgb2int(light["color"])),
"color" : rgb2int(light["color"]),
"intensity" : light["intensity"]
}
elif light["type"] == "point":
light_string = TEMPLATE_LIGHT_POINT % {
"light_id" : generate_string(light["name"]),
"position" : generate_vec3(light["position"]),
#"color" : generate_hex(rgb2int(light["color"])),
"color" : rgb2int(light["color"]),
"intensity" : light["intensity"]
}
chunks.append(light_string)
return ",\n\n".join(chunks)
return ""
# #####################################################
# Scene exporter - embedded meshes
# #####################################################
def generate_embeds(data):
if data["embed_meshes"]:
chunks = []
for e in data["embeds"]:
embed = '"emb_%s": {%s}' % (e, data["embeds"][e])
chunks.append(embed)
return ",\n\n".join(chunks)
return ""
# #####################################################
# Scene exporter - generate ASCII scene
# #####################################################
def generate_ascii_scene(data):
objects, nobjects = generate_objects(data)
geometries, ngeometries = generate_geometries(data)
textures, ntextures = generate_textures_scene(data)
materials, nmaterials = generate_materials_scene(data)
cameras = generate_cameras(data)
lights = generate_lights(data)
embeds = generate_embeds(data)
sections = [
["objects", objects],
["geometries", geometries],
["textures", textures],
["materials", materials],
["cameras", cameras],
["lights", lights],
["embeds", embeds]
]
chunks = []
for label, content in sections:
if content:
chunks.append(generate_section(label, content))
sections_string = "\n".join(chunks)
default_camera = ""
if data["use_cameras"]:
default_camera = generate_string("default_camera")
parameters = {
"fname" : data["source_file"],
"sections" : sections_string,
"bgcolor" : generate_vec3(DEFAULTS["bgcolor"]),
"bgalpha" : DEFAULTS["bgalpha"],
"defcamera" : generate_string(default_camera),
"nobjects" : nobjects,
"ngeometries" : ngeometries,
"ntextures" : ntextures,
"nmaterials" : nmaterials,
"position" : generate_vec3(DEFAULTS["position"]),
"rotation" : generate_vec3(DEFAULTS["rotation"]),
"scale" : generate_vec3(DEFAULTS["scale"])
}
text = TEMPLATE_SCENE_ASCII % parameters
return text
def export_scene(scene, filepath, flipyz, option_colors, option_lights, option_cameras, option_embed_meshes, embeds):
source_file = os.path.basename(bpy.data.filepath)
scene_text = ""
data = {
"scene" : scene,
"objects" : scene.objects,
"embeds" : embeds,
"source_file" : source_file,
"filepath" : filepath,
"flipyz" : flipyz,
"use_colors" : option_colors,
"use_lights" : option_lights,
"use_cameras" : option_cameras,
"embed_meshes": option_embed_meshes
}
scene_text += generate_ascii_scene(data)
write_file(filepath, scene_text)
# #####################################################
# Main
# #####################################################
def save(operator, context, filepath = "",
option_flip_yz = True,
option_vertices = True,
option_vertices_truncate = False,
option_faces = True,
option_normals = True,
option_edges = False,
option_uv_coords = True,
option_materials = True,
option_colors = True,
align_model = 0,
option_export_scene = False,
option_lights = False,
option_cameras = False,
option_scale = 1.0,
option_embed_meshes = True):
filepath = ensure_extension(filepath, '.js')
scene = context.scene
if scene.objects.active:
bpy.ops.object.mode_set(mode='OBJECT')
if option_export_scene:
geo_set = set()
embeds = {}
for obj in scene.objects:
if obj.type == "MESH" and obj.THREE_exportGeometry:
# create extra copy of geometry with applied modifiers
# (if they exist)
if len(obj.modifiers) > 0:
name = obj.name
# otherwise can share geometry
else:
name = obj.data.name
if name not in geo_set:
if option_embed_meshes:
text, model_string = generate_mesh_string(obj, scene,
option_vertices,
option_vertices_truncate,
option_faces,
option_normals,
option_edges,
option_uv_coords,
option_materials,
option_colors,
False,
option_flip_yz,
option_scale,
False)
embeds[name] = model_string
else:
fname = generate_mesh_filename(name, filepath)
export_mesh(obj, scene, fname,
option_vertices,
option_vertices_truncate,
option_faces,
option_normals,
option_edges,
option_uv_coords,
option_materials,
option_colors,
False,
option_flip_yz,
option_scale,
False)
geo_set.add(name)
export_scene(scene, filepath, option_flip_yz, option_colors, option_lights, option_cameras, option_embed_meshes, embeds)
else:
obj = context.object
if not obj:
raise Exception("Error, Select 1 active object or select 'export scene'")
export_mesh(obj, scene, filepath,
option_vertices,
option_vertices_truncate,
option_faces,
option_normals,
option_edges,
option_uv_coords,
option_materials,
option_colors,
align_model,
option_flip_yz,
option_scale,
True)
return {'FINISHED'}
| Python |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# ################################################################
# Init
# ################################################################
# To support reload properly, try to access a package var,
# if it's there, reload everything
if "bpy" in locals():
import imp
if "export_threejs" in locals():
imp.reload(export_threejs)
if "import_threejs" in locals():
imp.reload(import_threejs)
import bpy
from bpy.props import *
from io_utils import ExportHelper, ImportHelper
# ################################################################
# Custom properties
# ################################################################
bpy.types.Object.THREE_castsShadow = bpy.props.BoolProperty()
bpy.types.Object.THREE_meshCollider = bpy.props.BoolProperty()
bpy.types.Object.THREE_exportGeometry = bpy.props.BoolProperty(default = True)
THREE_trigger_types = [("None", "None", "None"), ("Small", "Small", "Small"), ("Large", "Large", "Large")]
bpy.types.Object.THREE_triggerType = EnumProperty(name = "Trigger type", description = "Trigger type", items = THREE_trigger_types, default = "None")
bpy.types.Material.THREE_useVertexColors = bpy.props.BoolProperty()
THREE_material_types = [("Basic", "Basic", "Basic"), ("Phong", "Phong", "Phong"), ("Lambert", "Lambert", "Lambert")]
bpy.types.Material.THREE_materialType = EnumProperty(name = "Material type", description = "Material type", items = THREE_material_types, default = "Lambert")
class OBJECT_PT_hello( bpy.types.Panel ):
bl_label = "THREE"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "object"
def draw(self, context):
layout = self.layout
obj = context.object
row = layout.row()
row.label(text="Selected object: " + obj.name )
row = layout.row()
row.prop( obj, "THREE_exportGeometry", text="Export geometry" )
row = layout.row()
row.prop( obj, "THREE_castsShadow", text="Casts shadow" )
row = layout.row()
row.prop( obj, "THREE_meshCollider", text="Mesh collider" )
row = layout.row()
row.prop( obj, "THREE_triggerType", text="Trigger type" )
class MATERIAL_PT_hello( bpy.types.Panel ):
bl_label = "THREE"
bl_space_type = "PROPERTIES"
bl_region_type = "WINDOW"
bl_context = "material"
def draw(self, context):
layout = self.layout
mat = context.material
row = layout.row()
row.label(text="Selected material: " + mat.name )
row = layout.row()
row.prop( mat, "THREE_materialType", text="Material type" )
row = layout.row()
row.prop( mat, "THREE_useVertexColors", text="Use vertex colors" )
# ################################################################
# Importer
# ################################################################
class ImportTHREEJS(bpy.types.Operator, ImportHelper):
'''Load a Three.js ASCII JSON model'''
bl_idname = "import.threejs"
bl_label = "Import Three.js"
filename_ext = ".js"
filter_glob = StringProperty(default="*.js", options={'HIDDEN'})
option_flip_yz = BoolProperty(name="Flip YZ", description="Flip YZ", default=True)
recalculate_normals = BoolProperty(name="Recalculate normals", description="Recalculate vertex normals", default=True)
def execute(self, context):
import io_mesh_threejs.import_threejs
return io_mesh_threejs.import_threejs.load(self, context, **self.properties)
def draw(self, context):
layout = self.layout
row = layout.row()
row.prop(self.properties, "option_flip_yz")
row = layout.row()
row.prop(self.properties, "recalculate_normals")
# ################################################################
# Exporter - settings
# ################################################################
SETTINGS_FILE_EXPORT = "threejs_settings_export.js"
import os
import json
def file_exists(filename):
"""Return true if file exists and accessible for reading.
Should be safer than just testing for existence due to links and
permissions magic on Unix filesystems.
@rtype: boolean
"""
try:
f = open(filename, 'r')
f.close()
return True
except IOError:
return False
def get_settings_fullpath():
return os.path.join(bpy.app.tempdir, SETTINGS_FILE_EXPORT)
def save_settings_export(properties):
settings = {
"option_export_scene" : properties.option_export_scene,
"option_embed_meshes" : properties.option_embed_meshes,
"option_lights" : properties.option_lights,
"option_cameras" : properties.option_cameras,
"option_flip_yz" : properties.option_flip_yz,
"option_materials" : properties.option_materials,
"option_normals" : properties.option_normals,
"option_colors" : properties.option_colors,
"option_uv_coords" : properties.option_uv_coords,
"option_edges" : properties.option_edges,
"option_faces" : properties.option_faces,
"option_vertices" : properties.option_vertices,
"option_vertices_truncate" : properties.option_vertices_truncate,
"option_scale" : properties.option_scale,
"align_model" : properties.align_model
}
fname = get_settings_fullpath()
f = open(fname, "w")
json.dump(settings, f)
def restore_settings_export(properties):
settings = {}
fname = get_settings_fullpath()
if file_exists(fname):
f = open(fname, "r")
settings = json.load(f)
properties.option_vertices = settings.get("option_vertices", True)
properties.option_vertices_truncate = settings.get("option_vertices_truncate", False)
properties.option_faces = settings.get("option_faces", True)
properties.option_normals = settings.get("option_normals", True)
properties.option_edges = settings.get("option_edges", False)
properties.option_colors = settings.get("option_colors", True)
properties.option_uv_coords = settings.get("option_uv_coords", True)
properties.option_materials = settings.get("option_materials", True)
properties.align_model = settings.get("align_model", "None")
properties.option_scale = settings.get("option_scale", 1.0)
properties.option_flip_yz = settings.get("option_flip_yz", True)
properties.option_export_scene = settings.get("option_export_scene", False)
properties.option_embed_meshes = settings.get("option_embed_meshes", True)
properties.option_lights = settings.get("option_lights", False)
properties.option_cameras = settings.get("option_cameras", False)
# ################################################################
# Exporter
# ################################################################
class ExportTHREEJS(bpy.types.Operator, ExportHelper):
'''Export selected object / scene for Three.js (ASCII JSON format).'''
bl_idname = "export.threejs"
bl_label = "Export Three.js"
filename_ext = ".js"
option_vertices = BoolProperty(name = "Vertices", description = "Export vertices", default = True)
option_vertices_deltas = BoolProperty(name = "Deltas", description = "Delta vertices", default = False)
option_vertices_truncate = BoolProperty(name = "Truncate", description = "Truncate vertices", default = False)
option_faces = BoolProperty(name = "Faces", description = "Export faces", default = True)
option_faces_deltas = BoolProperty(name = "Deltas", description = "Delta faces", default = False)
option_normals = BoolProperty(name = "Normals", description = "Export normals", default = True)
option_edges = BoolProperty(name = "Edges", description = "Export edges", default = False)
option_colors = BoolProperty(name = "Colors", description = "Export vertex colors", default = True)
option_uv_coords = BoolProperty(name = "UVs", description = "Export texture coordinates", default = True)
option_materials = BoolProperty(name = "Materials", description = "Export materials", default = True)
align_types = [("None","None","None"), ("Center","Center","Center"), ("Bottom","Bottom","Bottom"), ("Top","Top","Top")]
align_model = EnumProperty(name = "Align model", description = "Align model", items = align_types, default = "None")
option_scale = FloatProperty(name = "Scale", description = "Scale vertices", min = 0.01, max = 1000.0, soft_min = 0.01, soft_max = 1000.0, default = 1.0)
option_flip_yz = BoolProperty(name = "Flip YZ", description = "Flip YZ", default = True)
option_export_scene = BoolProperty(name = "Scene", description = "Export scene", default = False)
option_embed_meshes = BoolProperty(name = "Embed", description = "Embed meshes", default = True)
option_lights = BoolProperty(name = "Lights", description = "Export default scene lights", default = False)
option_cameras = BoolProperty(name = "Cameras", description = "Export default scene cameras", default = False)
def invoke(self, context, event):
restore_settings_export(self.properties)
return ExportHelper.invoke(self, context, event)
@classmethod
def poll(cls, context):
return context.active_object != None
def execute(self, context):
print("Selected: " + context.active_object.name)
if not self.properties.filepath:
raise Exception("filename not set")
save_settings_export(self.properties)
filepath = self.filepath
import io_mesh_threejs.export_threejs
return io_mesh_threejs.export_threejs.save(self, context, **self.properties)
def draw(self, context):
layout = self.layout
row = layout.row()
row.label(text="Geometry:")
row = layout.row()
row.prop(self.properties, "option_vertices")
row = layout.row()
row.enabled = self.properties.option_vertices
# row.prop(self.properties, "option_vertices_deltas")
row.prop(self.properties, "option_vertices_truncate")
layout.separator()
row = layout.row()
row.prop(self.properties, "option_faces")
row = layout.row()
row.enabled = self.properties.option_faces
# row.prop(self.properties, "option_faces_deltas")
layout.separator()
row = layout.row()
row.prop(self.properties, "option_normals")
layout.separator()
row = layout.row()
row.prop(self.properties, "option_edges")
layout.separator()
row = layout.row()
row.label(text="Materials:")
row = layout.row()
row.prop(self.properties, "option_uv_coords")
row.prop(self.properties, "option_colors")
row = layout.row()
row.prop(self.properties, "option_materials")
layout.separator()
row = layout.row()
row.label(text="Settings:")
row = layout.row()
row.prop(self.properties, "align_model")
row = layout.row()
row.prop(self.properties, "option_flip_yz")
row.prop(self.properties, "option_scale")
layout.separator()
row = layout.row()
row.label(text="Beta:")
row = layout.row()
row.prop(self.properties, "option_export_scene")
row.prop(self.properties, "option_lights")
row.prop(self.properties, "option_cameras")
row = layout.row()
row.prop(self.properties, "option_embed_meshes")
layout.separator()
# ################################################################
# Common
# ################################################################
def menu_func_export(self, context):
default_path = bpy.data.filepath.replace(".blend", ".js")
self.layout.operator(ExportTHREEJS.bl_idname, text="Three.js (.js)").filepath = default_path
def menu_func_import(self, context):
self.layout.operator(ImportTHREEJS.bl_idname, text="Three.js (.js)")
def register():
bpy.types.INFO_MT_file_export.append(menu_func_export)
bpy.types.INFO_MT_file_import.append(menu_func_import)
def unregister():
bpy.types.INFO_MT_file_export.remove(menu_func_export)
bpy.types.INFO_MT_file_import.remove(menu_func_import)
if __name__ == "__main__":
register()
| Python |
#!/usr/bin/env python
import os
import tempfile
files = [
'js/Error.js',
'js/lib/Logger.js',
'js/lib/Stats.js',
'js/lib/gui.min.js',
'js/lib/color.js',
'js/lib/js-signals.min.js',
'js/lib/Tween.js',
'js/lib/ThreeWebGL.js',
'js/lib/ThreeExtras.js',
'js/lib/LoadingBar.js',
'js/lib/RequestAnimationFrame.js',
'js/lib/Sequencer.js',
'js/lib/Tune.js',
'js/lib/Utils.js',
'js/lib/UgcObject.js',
'js/lib/UgcHandler.js',
'js/lib/Gee.js',
'js/lib/Swell.js',
'js/lib/WonderWall.js',
'js/lib/Heart.js',
'js/effects/ClearEffect.js',
'js/effects/FadeInEffect.js',
'js/effects/FadeOutEffect.js',
'js/effects/RenderEffect.js',
'js/effects/NoiseEffect.js',
'js/effects/BloomEffect.js',
'js/effects/HeatEffect.js',
'js/effects/PaintEffect.js',
'js/effects/PaintEffectPrairie.js',
'js/effects/PaintEffectDunes.js',
'js/effects/PaintEffectVideo.js',
'js/effects/PaintDarkEffect.js',
'js/effects/OverlayEffect.js',
'js/effects/PointerEffect.js',
'js/effects/PointerImageEffect.js',
'js/worlds/CityWorld.js',
'js/worlds/PrairieWorld.js',
'js/worlds/DunesWorld.js',
'js/worlds/shaders/CityShader.js',
'js/worlds/shaders/DunesShader.js',
'js/worlds/shaders/CloudsShader.js',
'js/worlds/shaders/UgcShader.js',
'js/worlds/cameras/DunesCamera.js',
'js/worlds/cameras/DunesCameraFreeExplore.js',
'js/worlds/cameras/PrairieCameraFreeExplore.js',
'js/worlds/cameras/CityCameraFreeExplore.js',
'js/worlds/triggers/Trigger.js',
'js/soups/CitySoup.js',
'js/soups/PrairieSoup.js',
'js/soups/DunesSoup.js',
'js/soups/elements/Ribbon.js',
'js/soups/elements/AnimalRandom.js',
'js/soups/elements/AnimalRandom_old.js',
'js/soups/elements/Ribbons.js',
'js/soups/elements/Vectors.js',
'js/soups/elements/Particles.js',
'js/soups/elements/AnimalSwarm.js',
'js/soups/elements/AnimalSwarm2.js',
'js/soups/elements/AnimalSwarm_dunes.js',
'js/soups/elements/AnimalInFrontOfCamera.js',
'js/soups/elements/Trail.js',
'js/soups/elements/CollisionScene.js',
'js/soups/elements/TrailShader.js',
'js/soups/elements/Stragglers.js',
'js/video/VideoPlane.js',
'js/video/VideoPlayer.js',
'js/video/VideoShader.js',
'js/video/VideoShots.js',
'js/sequences/City.js',
'js/sequences/Prairie.js',
'js/sequences/Dunes.js',
'js/sections/Section.js',
'js/sections/LauncherSection.js',
'js/sections/FilmSection.js',
'js/sections/RelauncherSection.js',
'js/sections/ExplorationSection.js',
'js/sections/UgcSection.js',
'js/sections/launcher/Clouds.js',
'js/sections/ugc/UgcIntro.js',
'js/sections/ugc/ColorPicker.js',
'js/sections/ugc/UgcUI.js',
'js/sections/ugc/UgcObjectCreator.js',
'js/sections/ugc/UgcSoupCreator.js',
'js/sections/ugc/objectcreator/VoxelPainter.js',
'js/Footer.js',
'js/Shortcuts.js',
'js/Main.js'
]
string = ''
for item in files:
src_file = open('../deploy/' + item,'r')
string += src_file.read() + "\n"
tmp_file = open('main.js','w')
tmp_file.write(string)
tmp_file.close()
os.system("java -jar compiler.jar --language_in=ECMASCRIPT5 --js main.js --js_output_file ../deploy/main.min.js")
os.unlink("main.js") #comment this line if you want to make sense of the errors
| Python |
#!/usr/bin/python2.6
#
# Simple http server to emulate api.playfoursquare.com
import logging
import shutil
import sys
import urlparse
import SimpleHTTPServer
import BaseHTTPServer
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Handle playfoursquare.com requests, for testing."""
def do_GET(self):
logging.warn('do_GET: %s, %s', self.command, self.path)
url = urlparse.urlparse(self.path)
logging.warn('do_GET: %s', url)
query = urlparse.parse_qs(url.query)
query_keys = [pair[0] for pair in query]
response = self.handle_url(url)
if response != None:
self.send_200()
shutil.copyfileobj(response, self.wfile)
self.wfile.close()
do_POST = do_GET
def handle_url(self, url):
path = None
if url.path == '/v1/venue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/addvenue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/venues':
path = '../captures/api/v1/venues.xml'
elif url.path == '/v1/user':
path = '../captures/api/v1/user.xml'
elif url.path == '/v1/checkcity':
path = '../captures/api/v1/checkcity.xml'
elif url.path == '/v1/checkins':
path = '../captures/api/v1/checkins.xml'
elif url.path == '/v1/cities':
path = '../captures/api/v1/cities.xml'
elif url.path == '/v1/switchcity':
path = '../captures/api/v1/switchcity.xml'
elif url.path == '/v1/tips':
path = '../captures/api/v1/tips.xml'
elif url.path == '/v1/checkin':
path = '../captures/api/v1/checkin.xml'
elif url.path == '/history/12345.rss':
path = '../captures/api/v1/feed.xml'
if path is None:
self.send_error(404)
else:
logging.warn('Using: %s' % path)
return open(path)
def send_200(self):
self.send_response(200)
self.send_header('Content-type', 'text/xml')
self.end_headers()
def main():
if len(sys.argv) > 1:
port = int(sys.argv[1])
else:
port = 8080
server_address = ('0.0.0.0', port)
httpd = BaseHTTPServer.HTTPServer(server_address, RequestHandler)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import os
import subprocess
import sys
BASEDIR = '../main/src/com/joelapenna/foursquare'
TYPESDIR = '../captures/types/v1'
captures = sys.argv[1:]
if not captures:
captures = os.listdir(TYPESDIR)
for f in captures:
basename = f.split('.')[0]
javaname = ''.join([c.capitalize() for c in basename.split('_')])
fullpath = os.path.join(TYPESDIR, f)
typepath = os.path.join(BASEDIR, 'types', javaname + '.java')
parserpath = os.path.join(BASEDIR, 'parsers', javaname + 'Parser.java')
cmd = 'python gen_class.py %s > %s' % (fullpath, typepath)
print cmd
subprocess.call(cmd, stdout=sys.stdout, shell=True)
cmd = 'python gen_parser.py %s > %s' % (fullpath, parserpath)
print cmd
subprocess.call(cmd, stdout=sys.stdout, shell=True)
| Python |
#!/usr/bin/python
"""
Pull a oAuth protected page from foursquare.
Expects ~/.oget to contain (one on each line):
CONSUMER_KEY
CONSUMER_KEY_SECRET
USERNAME
PASSWORD
Don't forget to chmod 600 the file!
"""
import httplib
import os
import re
import sys
import urllib
import urllib2
import urlparse
import user
from xml.dom import pulldom
from xml.dom import minidom
import oauth
"""From: http://groups.google.com/group/foursquare-api/web/oauth
@consumer = OAuth::Consumer.new("consumer_token","consumer_secret", {
:site => "http://foursquare.com",
:scheme => :header,
:http_method => :post,
:request_token_path => "/oauth/request_token",
:access_token_path => "/oauth/access_token",
:authorize_path => "/oauth/authorize"
})
"""
SERVER = 'api.foursquare.com:80'
CONTENT_TYPE_HEADER = {'Content-Type' :'application/x-www-form-urlencoded'}
SIGNATURE_METHOD = oauth.OAuthSignatureMethod_HMAC_SHA1()
AUTHEXCHANGE_URL = 'http://api.foursquare.com/v1/authexchange'
def parse_auth_response(auth_response):
return (
re.search('<oauth_token>(.*)</oauth_token>', auth_response).groups()[0],
re.search('<oauth_token_secret>(.*)</oauth_token_secret>',
auth_response).groups()[0]
)
def create_signed_oauth_request(username, password, consumer):
oauth_request = oauth.OAuthRequest.from_consumer_and_token(
consumer, http_method='POST', http_url=AUTHEXCHANGE_URL,
parameters=dict(fs_username=username, fs_password=password))
oauth_request.sign_request(SIGNATURE_METHOD, consumer, None)
return oauth_request
def main():
url = urlparse.urlparse(sys.argv[1])
# Nevermind that the query can have repeated keys.
parameters = dict(urlparse.parse_qsl(url.query))
password_file = open(os.path.join(user.home, '.oget'))
lines = [line.strip() for line in password_file.readlines()]
if len(lines) == 4:
cons_key, cons_key_secret, username, password = lines
access_token = None
else:
cons_key, cons_key_secret, username, password, token, secret = lines
access_token = oauth.OAuthToken(token, secret)
consumer = oauth.OAuthConsumer(cons_key, cons_key_secret)
if not access_token:
oauth_request = create_signed_oauth_request(username, password, consumer)
connection = httplib.HTTPConnection(SERVER)
headers = {'Content-Type' :'application/x-www-form-urlencoded'}
connection.request(oauth_request.http_method, AUTHEXCHANGE_URL,
body=oauth_request.to_postdata(), headers=headers)
auth_response = connection.getresponse().read()
token = parse_auth_response(auth_response)
access_token = oauth.OAuthToken(*token)
open(os.path.join(user.home, '.oget'), 'w').write('\n'.join((
cons_key, cons_key_secret, username, password, token[0], token[1])))
oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer,
access_token, http_method='POST', http_url=url.geturl(),
parameters=parameters)
oauth_request.sign_request(SIGNATURE_METHOD, consumer, access_token)
connection = httplib.HTTPConnection(SERVER)
connection.request(oauth_request.http_method, oauth_request.to_url(),
body=oauth_request.to_postdata(), headers=CONTENT_TYPE_HEADER)
print connection.getresponse().read()
#print minidom.parse(connection.getresponse()).toprettyxml(indent=' ')
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import datetime
import sys
import textwrap
import common
from xml.dom import pulldom
PARSER = """\
/**
* Copyright 2009 Joe LaPenna
*/
package com.joelapenna.foursquare.parsers;
import com.joelapenna.foursquare.Foursquare;
import com.joelapenna.foursquare.error.FoursquareError;
import com.joelapenna.foursquare.error.FoursquareParseException;
import com.joelapenna.foursquare.types.%(type_name)s;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Auto-generated: %(timestamp)s
*
* @author Joe LaPenna (joe@joelapenna.com)
* @param <T>
*/
public class %(type_name)sParser extends AbstractParser<%(type_name)s> {
private static final Logger LOG = Logger.getLogger(%(type_name)sParser.class.getCanonicalName());
private static final boolean DEBUG = Foursquare.PARSER_DEBUG;
@Override
public %(type_name)s parseInner(XmlPullParser parser) throws XmlPullParserException, IOException,
FoursquareError, FoursquareParseException {
parser.require(XmlPullParser.START_TAG, null, null);
%(type_name)s %(top_node_name)s = new %(type_name)s();
while (parser.nextTag() == XmlPullParser.START_TAG) {
String name = parser.getName();
%(stanzas)s
} else {
// Consume something we don't understand.
if (DEBUG) LOG.log(Level.FINE, "Found tag that we don't recognize: " + name);
skipSubTree(parser);
}
}
return %(top_node_name)s;
}
}"""
BOOLEAN_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(Boolean.valueOf(parser.nextText()));
"""
GROUP_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new GroupParser(new %(sub_parser_camel_case)s()).parse(parser));
"""
COMPLEX_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new %(parser_name)s().parse(parser));
"""
STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(parser.nextText());
"""
def main():
type_name, top_node_name, attributes = common.WalkNodesForAttributes(
sys.argv[1])
GenerateClass(type_name, top_node_name, attributes)
def GenerateClass(type_name, top_node_name, attributes):
"""generate it.
type_name: the type of object the parser returns
top_node_name: the name of the object the parser returns.
per common.WalkNodsForAttributes
"""
stanzas = []
for name in sorted(attributes):
typ, children = attributes[name]
replacements = Replacements(top_node_name, name, typ, children)
if typ == common.BOOLEAN:
stanzas.append(BOOLEAN_STANZA % replacements)
elif typ == common.GROUP:
stanzas.append(GROUP_STANZA % replacements)
elif typ in common.COMPLEX:
stanzas.append(COMPLEX_STANZA % replacements)
else:
stanzas.append(STANZA % replacements)
if stanzas:
# pop off the extranious } else for the first conditional stanza.
stanzas[0] = stanzas[0].replace('} else ', '', 1)
replacements = Replacements(top_node_name, name, typ, [None])
replacements['stanzas'] = '\n'.join(stanzas).strip()
print PARSER % replacements
def Replacements(top_node_name, name, typ, children):
# CameCaseClassName
type_name = ''.join([word.capitalize() for word in top_node_name.split('_')])
# CamelCaseClassName
camel_name = ''.join([word.capitalize() for word in name.split('_')])
# camelCaseLocalName
attribute_name = camel_name.lower().capitalize()
# mFieldName
field_name = 'm' + camel_name
if children[0]:
sub_parser_camel_case = children[0] + 'Parser'
else:
sub_parser_camel_case = (camel_name[:-1] + 'Parser')
return {
'type_name': type_name,
'name': name,
'top_node_name': top_node_name,
'camel_name': camel_name,
'parser_name': typ + 'Parser',
'attribute_name': attribute_name,
'field_name': field_name,
'typ': typ,
'timestamp': datetime.datetime.now(),
'sub_parser_camel_case': sub_parser_camel_case,
'sub_type': children[0]
}
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import logging
from xml.dom import minidom
from xml.dom import pulldom
BOOLEAN = "boolean"
STRING = "String"
GROUP = "Group"
# Interfaces that all FoursquareTypes implement.
DEFAULT_INTERFACES = ['FoursquareType']
# Interfaces that specific FoursqureTypes implement.
INTERFACES = {
}
DEFAULT_CLASS_IMPORTS = [
]
CLASS_IMPORTS = {
# 'Checkin': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
# 'Venue': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
# 'Tip': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
}
COMPLEX = [
'Group',
'Badge',
'Beenhere',
'Checkin',
'CheckinResponse',
'City',
'Credentials',
'Data',
'Mayor',
'Rank',
'Score',
'Scoring',
'Settings',
'Stats',
'Tags',
'Tip',
'User',
'Venue',
]
TYPES = COMPLEX + ['boolean']
def WalkNodesForAttributes(path):
"""Parse the xml file getting all attributes.
<venue>
<attribute>value</attribute>
</venue>
Returns:
type_name - The java-style name the top node will have. "Venue"
top_node_name - unadultured name of the xml stanza, probably the type of
java class we're creating. "venue"
attributes - {'attribute': 'value'}
"""
doc = pulldom.parse(path)
type_name = None
top_node_name = None
attributes = {}
level = 0
for event, node in doc:
# For skipping parts of a tree.
if level > 0:
if event == pulldom.END_ELEMENT:
level-=1
logging.warn('(%s) Skip end: %s' % (str(level), node))
continue
elif event == pulldom.START_ELEMENT:
logging.warn('(%s) Skipping: %s' % (str(level), node))
level+=1
continue
if event == pulldom.START_ELEMENT:
logging.warn('Parsing: ' + node.tagName)
# Get the type name to use.
if type_name is None:
type_name = ''.join([word.capitalize()
for word in node.tagName.split('_')])
top_node_name = node.tagName
logging.warn('Found Top Node Name: ' + top_node_name)
continue
typ = node.getAttribute('type')
child = node.getAttribute('child')
# We don't want to walk complex types.
if typ in COMPLEX:
logging.warn('Found Complex: ' + node.tagName)
level = 1
elif typ not in TYPES:
logging.warn('Found String: ' + typ)
typ = STRING
else:
logging.warn('Found Type: ' + typ)
logging.warn('Adding: ' + str((node, typ)))
attributes.setdefault(node.tagName, (typ, [child]))
logging.warn('Attr: ' + str((type_name, top_node_name, attributes)))
return type_name, top_node_name, attributes
| Python |
#!/usr/bin/python
import xml.parsers.expat;
import sys;
import re;
parser=xml.parsers.expat.ParserCreate('UTF-8');
values_en = {}
values_lang = {}
values_hash = {}
name=''
def parse(lang, values):
def start_element(n, attrs):
global name;
if n != u'string': return
name=attrs[u'name']
def end_element(n):
global name;
name=''
def char_data(value):
global name;
if name == '': return;
if not name in values: values[name] = u'';
values[name] += value;
p = xml.parsers.expat.ParserCreate()
p.StartElementHandler = start_element
p.EndElementHandler = end_element
p.CharacterDataHandler = char_data
if lang == 'en':
f=open('res/values/strings.xml');
else:
f=open('res/values-%s/strings.xml' % lang);
p.ParseFile(f);
def parse_R(file, values):
try:
for line in open(file):
match = re.search(".*public static final int (.*)=0x(.*);", line)
if match:
values[match.group(1)] = match.group(2)
except:
sys.exit(1)
parse('en', values_en);
parse_R('gen/com/volosyukivan/R.java', values_lang);
page=open('html/key.html').read();
for num,(key,orig) in enumerate(
sorted(values_en.iteritems(),
key=lambda x:len(x[1]), reverse=True)):
if not key in values_lang: continue;
replacement = '##//$$$%s$$$//##' % num;
values_hash[key] = replacement;
page = page.replace(orig, replacement);
for key,repl in values_lang.iteritems():
if not key in values_hash: continue;
orig = values_hash[key];
replacement = '$' + values_lang[key];
page = page.replace(orig, replacement);
old = None
try:
old = open("res/raw/key.html").read();
except:
pass
if (old != page):
open("res/raw/key.html", "w").write(page.encode('UTF-8'));
| Python |
#! /usr/bin/env python
# encoding: utf-8
# waf 1.6.10
VERSION='0.3.3'
import sys
APPNAME='p2t'
top = '.'
out = 'build'
CPP_SOURCES = ['poly2tri/common/shapes.cc',
'poly2tri/sweep/cdt.cc',
'poly2tri/sweep/advancing_front.cc',
'poly2tri/sweep/sweep_context.cc',
'poly2tri/sweep/sweep.cc',
'testbed/main.cc']
from waflib.Tools.compiler_cxx import cxx_compiler
cxx_compiler['win32'] = ['g++']
#Platform specific libs
if sys.platform == 'win32':
# MS Windows
sys_libs = ['glfw', 'opengl32']
elif sys.platform == 'darwin':
# Apple OSX
sys_libs = ['glfw', 'OpenGL']
else:
# GNU/Linux, BSD, etc
sys_libs = ['glfw', 'GL']
def options(opt):
print(' set_options')
opt.load('compiler_cxx')
def configure(conf):
print(' calling the configuration')
conf.load('compiler_cxx')
conf.env.CXXFLAGS = ['-O3', '-ffast-math']
conf.env.DEFINES_P2T = ['P2T']
conf.env.LIB_P2T = sys_libs
def build(bld):
print(' building')
bld.program(features = 'cxx cxxprogram', source=CPP_SOURCES, target = 'p2t', uselib = 'P2T')
| Python |
import sys
if sys.version_info < (2, 5):
raise Exception('Fenton requires Python 2.5 or higher.')
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
setup(name = 'Fenton',
version = '0.1',
description = 'Fenton is for apps',
author = 'Adrian Dries',
author_email = 'adries@gmail.com',
url = 'http://',
packages = ['fenton'],
license = 'MIT License',
long_description = 'Fenton is for apps',
classifiers = [
"Development Status :: 4 - Beta",
"Intended Audience :: Developers",
"License :: OSI Approved :: MIT License",
"Programming Language :: Python",
"Programming Language :: Python :: 2",
"Topic :: Database :: Front-Ends",
"Operating System :: OS Independent",
],
)
| Python |
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
__test__ = False
import os
import sys
import time
import logging
import threading
import traceback
from unittest import _WritelnDecorator
from nose.config import Config
from nose.core import TestProgram
from nose.util import isclass, odict
from nose.plugins import DefaultPluginManager
from fenton import script
from fenton import reloader
from fenton.console import colours
from fenton.logging import patch_getlogger
from fenton.script import Monitor, run_with_reloader
VERBOSITY=3
def once(ign):
setup_logging()
runtests(sys.argv[1], False)
def testconfig(conffile):
config = Config()
config.stream = sys.stderr # StringIO()
config.plugins = DefaultPluginManager()
config.loggingConfig = conffile
#config.stopOnError = True
return config
def runtests(conffile, exit=True):
argv = ['--with-doctest']
config = testconfig(conffile)
runner = ConsoleRunner(config, VERBOSITY)
print >>sys.stderr, "" # blank line
tp = TestProgram(argv=argv, config=config, exit=exit, testRunner=runner)
success = tp.success
del tp
return success
def setup_logging():
patch_getlogger()
logging.basicConfig()
def install_crash_dummy(pdb=False):
try:
import IPython.ultraTB
f = IPython.ultraTB.FormattedTB(mode='Verbose', color_scheme='Linux', call_pdb=pdb)
except:
if not pdb:
return
def f(*args, **kw):
import pdb
pdb.set_trace()
sys.excepthook = f
C = colours(hasattr(sys.stderr, 'fileno') and os.isatty(sys.stderr.fileno()))
got_gtk = False
got_others = False
try:
import pygtk
except ImportError:
# other GUIs?
pass
else:
if os.environ.get('DISPLAY'):
pygtk.require("2.0")
import gtk
got_gtk = True
class ConsoleRunner:
separator1 = '=' * 70
separator2 = '-' * 70
def __init__(self, config, verbosity, errorClasses={}):
self.errorClasses = errorClasses
self.config = config
self.stream = _WritelnDecorator(config.stream)
self.dots = verbosity == 1
self.show_tests = verbosity > 1
self.show_descriptions = verbosity > 2
self.failures = []
self.errors = []
self.testsRun = 0
self.shouldStop = 0
def __repr__(self):
return "<%s%s run=%i errors=%i failures=%i>" % \
(self.__class__.__module__, self.__class__.__name__,
self.testsRun, len(self.errors),
len(self.failures))
# unittest TestResult API
def startTest(self, test):
if self.started is None:
self.started = time.time()
self.testsRun += 1
if self.show_tests:
self.stream.write(self.getDescription(test))
self.stream.write(" ... ")
def stopTest(self, test):
"Called when the given test has been run"
pass
def stop(self):
"Indicates that the tests should be aborted"
self.shouldStop = True
def addSuccess(self, test):
if self.show_tests:
self.stream.writeln("%sOK%s" % (C.Green, C.Normal))
elif self.dots:
self.stream.write('%s.%s' % (C.Green, C.Normal))
def addError(self, test, err):
"""Overrides normal addError to add support for
errorClasses. If the exception is a registered class, the
error will be added to the list for that class, not errors.
"""
ec, ev, tb = err
exc_info = self._exc_info_to_string(err, test)
for cls, (storage, label, isfail) in self.errorClasses.items():
if isclass(ec) and issubclass(ec, cls):
storage.append((test, exc_info))
if self.show_tests:
self.stream.writeln(label)
elif self.dots:
self.stream.write(label[:1])
return
self.errors.append((test, exc_info))
cols = (C.LightRed, C.Normal)
if self.show_tests:
self.stream.writeln('%sERROR%s' % cols)
elif self.dots:
self.stream.write('%sE%s' % cols)
def addFailure(self, test, err):
self.failures.append((test, self._exc_info_to_string(err, test)))
cols = (C.Red, C.Normal)
if self.show_tests:
self.stream.writeln("%sFAIL%s" % cols)
elif self.dots:
self.stream.write('%sF%s' % cols)
def wasSuccessful(self):
"""Overrides to check that there are no errors in errorClasses
lists that are marked as errors that should cause a run to
fail.
"""
if self.errors or self.failures:
return False
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
if not isfail:
continue
if storage:
return False
return True
def _exc_info_to_string(self, err, test):
"""Converts a sys.exc_info()-style tuple of values into a string."""
exctype, value, tb = err
# Skip test runner traceback levels
while tb and self._is_relevant_tb_level(tb):
tb = tb.tb_next
if exctype is test.failureException:
# Skip assert*() traceback levels
length = self._count_relevant_tb_levels(tb)
return ''.join(traceback.format_exception(exctype, value, tb, length))
return ''.join(traceback.format_exception(exctype, value, tb))
def _is_relevant_tb_level(self, tb):
return tb.tb_frame.f_globals.has_key('__unittest')
def _count_relevant_tb_levels(self, tb):
length = 0
while tb and not self._is_relevant_tb_level(tb):
length += 1
tb = tb.tb_next
return length
def run(self, test):
# nose business
wrapper = self.config.plugins.prepareTest(test)
if wrapper is not None:
test = wrapper
# plugins can decorate or capture the output stream
wrapped = self.config.plugins.setOutputStream(self.config.stream)
if wrapped is not None:
self.config.stream = wrapped
self.started = time.time()
test(self)
self.report()
self.config.plugins.finalize(self)
#import pdb; pdb.set_trace()
return self
def report(self):
finished = time.time()
self.printErrors()
self.printSummary(self.started, finished)
def getDescription(self, test):
if self.show_descriptions:
return test.shortDescription() or str(test)
else:
return str(test)
def printErrors(self):
"""Overrides to print all errorClasses errors as well.
"""
# unittest
if self.dots or self.show_tests:
self.stream.writeln()
self.printErrorList('%sERROR%s' % (C.LightRed, C.Normal), self.errors)
self.printErrorList('%sFAIL%s' % (C.Red, C.Normal), self.failures)
# nose
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
self.printErrorList(label, storage)
self.config.plugins.report(self.stream)
def printErrorList(self, flavour, errors):
for test, err in errors:
self.stream.writeln("%s%s%s" % (C.Normal, self.separator1, C.Normal))
self.stream.writeln("%s: %s" % (flavour, str(test)))
self.stream.writeln("%s%s%s" % (C.Normal, self.separator2, C.Normal))
self.stream.writeln("%s" % err)
def printSummary(self, start, stop):
"""Called by the test runner to print the final summary of test
run results.
"""
write = self.stream.write
writeln = self.stream.writeln
taken = float(stop - start)
run = self.testsRun
plural = run != 1 and "s" or ""
writeln("%s%s%s" % (C.Normal, self.separator2, C.Normal))
writeln("Ran %s test%s in %.3fs" % (run, plural, taken))
writeln()
if not self.wasSuccessful():
write("%sFAILED%s (" % (C.Yellow, C.Normal))
summary = odict()
summary['%sfailures%s' % (C.Red, C.Normal)] = len(self.failures)
summary['%serrors%s' % (C.LightRed, C.Normal)] = len(self.errors)
for cls in self.errorClasses.keys():
storage, label, isfail = self.errorClasses[cls]
if not isfail:
continue
summary[label] = len(storage)
any = False
for label, count in summary.items():
if not count:
continue
if any:
write(", ")
write("%s=%s" % (label, count))
any = True
writeln(")")
else:
writeln("%sOK%s" % (C.Green, C.Normal))
writeln()
def icon(name):
path = os.path.dirname(os.path.abspath(__file__))
return os.path.join(path, 'media', 'img', name)
class Icon:
icons = {
None: icon('grey.png'),
True: icon('green.png'),
False:icon('red.png')
}
def __init__(self, handler):
self.handler = handler
init = getattr(self, 'init', None)
if init is not None:
init()
def show(self, status):
icon = self.icons[status]
self.change_icon(icon)
def onclick(self, icon):
self.handler()
def change_icon(self, icon):
print "showing " + icon
class GtkIcon(Icon):
def init(self):
icon = self.icons[None]
self.icon = gtk.status_icon_new_from_file(icon)
self.icon.connect('activate', self.onclick)
gtk.gdk.threads_init()
threading.Thread(target=gtk.main).start()
def change_icon(self, img):
self.icon.set_from_file(img)
if got_gtk:
icon_class = GtkIcon
else:
icon_class = Icon
suite = script.Suite(parser=script.suite_parser('test/config.ini'))
RELOAD_KEY = 'fenton_TEST_RELOADER'
@suite.builtin
def testrunner(self):
if os.environ.get(RELOAD_KEY):
def run():
run_tests(self)
return run_with_reloader(run, RELOAD_KEY)
else:
return run_monitor(self)
def run_tests(self):
# reloader watches for modifications to loaded modules
reloader.watch_file(self.config_file)
reloader.watch_file(__file__)
# patch nose to collect names of files
# that might have syntax errors, so we can watch them too
from nose.selector import Selector
orig_wantFile = Selector.wantFile
def new_wantFile(self, file):
want = orig_wantFile(self, file)
if want:
reloader.watch_file(file)
return want
Selector.wantFile = new_wantFile
config = testconfig(self.config_file)
result = runtests(self.config_file, False)
# print any logging
import logging
logs = (getattr(h, 'baseFilename', None) for h in logging.root.handlers)
logs = [f for f in logs if f and os.path.getsize(f)]
if logs:
sys.stderr.write("Log:\n\n")
for f in logs:
sys.stderr.write(open(f).read())
return int(result)
def run_monitor(self):
import os, signal
def onclick():
os.kill(m.proc.pid, signal.SIGHUP)
icon = icon_class(onclick)
def report(arg):
icon.show(bool(int(arg)))
def finish():
icon.show(None)
m = Monitor(RELOAD_KEY, report, finish)
return m.run()
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import thread
import fenton
# descriptor decorator
class resource:
def __init__(self, factory):
self.factory = factory
self.id = getattr(factory, '__name__', str(factory))
def __get__(self, obj, Class):
if obj is None:
return self.factory
return obj.get_resource(self.id, self.factory)
class toggle:
def __init__(self, f):
import threading
self.value = None
self.lock = threading.Lock()
self.f = f
def __get__(self, obj, Class):
return self.value
def __set__(self, obj, value):
orig = self.value
with self.lock:
self.value = value
if orig is not None and orig is not value:
self.f(obj, value)
class ThreadCheckError(Exception):
pass
class ThreadChecker:
def __init__(self, name, throw):
self.name = name
self.owner = thread.get_ident()
self.throw = throw
def __bool__(self):
if thread.get_ident() == self.owner:
return True
if self.throw:
raise ThreadCheckError(self)
return False
def __repr__(self):
from fenton import util
x = util.uint(self.owner)
y = util.uint(thread.get_ident())
return '<ThreadChecker: %s owner=%x this=%x>' % (self.name, x, y)
class BaseContext:
app = property(lambda x:x.request.app)
user = property(lambda x:x.request.user)
tz = property(lambda x:x.request.tz)
config = property(lambda x:x.request.app.config)
def __init__(self, request=None):
from fenton import util
self.screen = None
self.request = request
self.id = util.uniqid()
self._resources = {}
def __getstate__(self):
d = self.__dict__.copy()
d['request'] = None
d['_resources'] = {}
return d
def __repr__(self):
return '%s<%s>' % (self.__class__.__name__, self.id)
# context manager for 'with' statement
def __enter__(self):
self.tx_start()
return self
def __exit__(self, *errors):
if any(errors):
self.tx_rollback()
else:
try:
self._flush_deleted()
self.tx_finish()
self.tx_commit()
except:
self.tx_rollback()
raise
def get(self, __id, *args, **kw):
meta = obj = None
if isinstance(__id, basestring):
meta, arg = self.app.get(__id)
if arg:
obj = meta.reconstruct(arg, self)
elif isinstance(__id, type):
meta = fenton.getmeta(__id)
if args:
obj = meta.load_model(self, args)
else:
raise NotImplementedError
if obj is None:
if args:
kw.update(dict((meta.key[i], arg)
for (i, arg) in enumerate(args)))
obj = meta.construct_model(self, **kw)
elif kw:
fenton.getmeta(obj).update_model(self, obj, kw)
return obj
__deleted = None
def delete(self, obj):
if self.__deleted is None:
self.__deleted = []
self.__deleted.append(obj)
def _flush_deleted(self):
if self.__deleted:
for obj in self.__deleted:
fenton.getmeta(obj).delete_model(self, obj)
self.__deleted = None
def update(self, obj, **attrs):
return fenton.getmeta(obj).update_model(self, obj, attrs)
def get_view(self, obj):
return fenton.getmeta(obj).get_view(self, obj)
def close(self):
self.close_resources()
def thaw(self, request):
orig = self.request
if orig is not request:
self.request = request
if orig:
request.update(orig)
if self.screen:
self.screen.thaw()
def get_resource(self, id, factory):
from fenton import logging
rs = self._resources.get(id)
if rs is None:
rs = factory(self)
self._resources[id] = rs
throw = not self.app.config.get('allow_resource_sharing', False)
rs.__thread_check = ThreadChecker(id, throw)
elif not rs.__thread_check:
logging.log.warn('THREAD CHECK FAILED: %s' % rs.__thread_check)
return rs
def close_resources(self):
from fenton import logging
for id in self._resources.keys():
rs = self._resources.pop(id)
del rs.__thread_check
close = getattr(rs, 'close', None)
close and close()
# template methods for context manager
def tx_start(self):
pass
def tx_finish(self):
return True
def tx_commit(self):
for rs in self._resources.values():
if hasattr(rs, 'commit'):
rs.commit()
def tx_rollback(self):
for rs in self._resources.values():
if hasattr(rs, 'rollback'):
rs.rollback()
class BaseApp:
context_class = BaseContext
def __init__(self, container, config=None):
self.container = container
self.threadpool = container.get_threadpool()
self.config = config = config or {}
self.debug = bool(config.get('debug'))
self.configure()
from fenton import security
security.initialize(self)
from fenton import model
model.compile_all()
def configure(self):
pass
def get(self, id):
import fenton.model
return fenton.model.REGISTRY.get(id)
def source_dirs(self):
import os, sys
mod = sys.modules[self.__class__.__module__]
return [os.path.dirname(mod.__file__),
os.path.dirname(__file__)]
def create_context(self, request):
return self.context_class(request)
def get_login_url(self, quoted_url):
return None
def call_async(self, f):
if not self.threadpool:
return f()
def work(f=f, name=str(f), tracker=self.threadpool.worker_tracker):
tracker[thread.get_ident()][1] = name
f()
self.threadpool.add_task(work)
class Shell:
config = property(lambda x:x.app.config)
debug = property(lambda x:x.app.debug)
def __init__(self, app):
self.app = app
class Request:
user_key = '~'
model_key = '_'
bind_key = '__'
history_key = '^'
context_key = '$'
context_prefix = '_'
history = None
max_history = 32
def new_context(self):
return self.app.create_context(self)
def close(self):
pass
def authenticate(self, user):
user.authenticate(self)
self.user = user
def unauthenticate(self, user):
user.unauthenticate(self)
self.user = None
def add_history(self, screen):
history = self.get_history()
if history is None:
return
latest = history and history[-1]
path = screen.get_path()
if not path:
return
args = screen.get_args()
if latest and latest[0] == path:
history[-1] = (path, args)
else:
history.append((path, args))
while len(history) > self.max_history:
del history[0]
def get_history(self):
return self.history
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import re
import time
import uuid
from sqlalchemy import event
from sqlalchemy.ext.declarative import DeclarativeMeta
from fenton import util
from fenton import view
from fenton import model
from fenton import types
from fenton import getmeta
from fenton import timestamp
from fenton.db import METADATA, sql, orm
GUID_ATTR = '_object_guid'
COMMENT_ATTR = '__comment__'
DbModel = None
def make_code(name, corpus, r=re.compile('[^A-Z]+')):
words = r.split(name.upper())
n = len(words)
start = n < 3 and 1 or 0
indexes = [start] * n
lens = [len(x) for x in words]
word = len(words) - 1
while indexes != lens:
name = ''.join(p[:indexes[j]+1] for (j, p) in enumerate(words))
if name not in corpus:
return name
if indexes[word] < lens[word]:
indexes[word] += 1
word = (word+1) % n
suffix = 0
while name in corpus:
suffix += 1
name += str(suffix)
return name
def child_relation(child_ref, backref=None, **kw):
if backref is not None:
backref = '_%s_attached' % backref
return orm.relation(child_ref,
backref=backref,
cascade='all,delete-orphan',
passive_deletes=True,
**kw)
def parent_property(*args, **kw):
return ParentProperty(*args, **kw)
def seeds(Class):
def decorator(f):
_register_seed_func(Class, f)
return f
return decorator
def _insert_metainfo(cx, Class=None):
if Class is None:
Class = cx.Class
classid = getmeta(Class).classid
if not classid:
return
tablename = Class.__tablename__
table = MetaObjectClass.__table__
if table.select(table.c.classid==classid, bind=cx.bind).scalar():
return
print 'INSERT metadata (%s, %s)' % (classid, tablename)
q = table.insert(bind=cx.bind)
q.values(classid=classid, tablename=tablename).execute()
def _register_seed_func(Class, func):
@event.listens_for(Class.__table__, 'after_create')
def seed(table, bind, *x, **y):
func(SeedContext(Class, table, bind))
bind.db.flush()
def _print_create(t, *x, **y):
print 'CREATE TABLE', t.name
def _itercsv(f, filter=None, skipheader=True):
import csv
header = 0
for row in csv.reader(f):
if skipheader and not header:
header = 1
continue
if filter:
row = filter(row)
if row:
yield row
def GUID():
from sqlalchemy.dialects import postgresql
return postgresql.UUID(as_uuid=True)
def _prepare_attrs(self, attrs):
self.__attrs__ = attrs.get('__attrs__', {})
for k, v in attrs.iteritems():
if isinstance(v, tuple) and len(v)==1 and isinstance(v[0], types.Type):
raise TypeError('errant comma at %s.%s' % (self.__name__, k))
if isinstance(v, types.Type):
v.instrument(self, k)
self.__attrs__[k] = v
fields = getattr(self, '__fields__', None) or ()
if fields and isinstance(fields[0], basestring):
fields = (('', '', fields),)
self.__fields__ = fields
def _init_metainfo(self):
if DbModel in self.__bases__ and GUID_ATTR not in self.__dict__:
guid_col = sql.Column(GUID_ATTR, GUID(), nullable=False, unique=True)
guid_col._creation_order = -1
setattr(self, GUID_ATTR, guid_col)
MI = MetaObjectInstance
self._meta = orm.relation(MI,
primaryjoin=(guid_col==MI.object_guid),
foreign_keys=[MI.object_guid],
#innerjoin=True,
#lazy='joined',
passive_deletes='all',
uselist=False,
viewonly=True,
cascade=None)
mattrs = self.__attrs__
if COMMENT_ATTR not in mattrs and COMMENT_ATTR not in self.__dict__:
setattr(self, COMMENT_ATTR, None)
mattrs[COMMENT_ATTR] = types.Text(required=False,
showview=False,
label='Add comment')
if getattr(self, COMMENT_ATTR, False) is not False:
self.__fields__ = tuple(list(self.__fields__)
+ [(COMMENT_ATTR, ' ', (COMMENT_ATTR,))])
def _init_events(self):
_register_seed_func(self, _insert_metainfo)
event.listen(self, 'load', _set_context)
event.listen(self.__table__, 'before_create', _print_create)
def _set_context(obj, _):
m = getmeta(obj, False)
if m:
m.set_context(orm.object_session(obj).context, obj)
class ModelMetaclass(type):
def __init__(self, name, bases, attrs):
_prepare_attrs(self, attrs)
return type.__init__(self, name, bases, attrs)
class DbMetaclass(DeclarativeMeta):
def __init__(self, name, bases, attrs):
superinit = DeclarativeMeta.__init__
if DbModel is None:
superinit(self, name, bases, attrs)
else:
_prepare_attrs(self, attrs)
_init_metainfo(self)
superinit(self, name, bases, attrs)
_init_events(self)
def __call__(Class):
self = DeclarativeMeta.__call__(Class)
if getattr(Class, GUID_ATTR, None) is not None:
setattr(self, GUID_ATTR, uuid.uuid4())
return self
class ParentProperty:
def __init__(self):
self.name = None
attached_name = property(lambda x: '_%s_attached' % x.name)
unattached_name = property(lambda x: '_%s_unattached' % x.name)
def __compile(self, Class):
for C in Class.mro():
for k in C.__dict__:
if C.__dict__.get(k) is self:
self.name = k
return
raise TypeError('Name not found for parent_property on %s' % Class)
def __get__(self, obj, Class):
if self.name is None:
self.__compile(Class)
if obj is None:
return getattr(Class, self.attached_name)
return (getattr(obj, self.attached_name)
or getattr(obj, self.unattached_name, None))
def __set__(self, obj, x):
if self.name is None:
self.__compile(type(obj))
setattr(obj, self.unattached_name, x)
class Lookup:
__order_attr__ = 'label'
def __title__(self):
return getattr(self, self.__order_attr__, None) or '(new?)'
@classmethod
def __filter__(Class, q):
return q.order_by(getattr(Class, Class.__order_attr__))
@classmethod
def symbols(Class, cx):
key = getmeta(Class).key[0]
db = getmeta(Class).get_db(cx)
q = db.query(Class)
return util.dictobject(dict((getattr(l, key), l) for l in q))
def after_update_event(self, form):
if not form:
return
M = getmeta(self)
T = self.__table__
C = T.c[M.key[0]]
c = getattr(self, M.key[0])
U = T.c._object_guid
u = self._object_guid
q = sql.select([sql.exists().where((C==c) & (U!=u))])
if self._cx.db.execute(q).scalar():
raise types.Invalid('That code is already used')
def before_store_event(self):
if not self._is_persistent():
self.update_code()
def update_code(self):
key = getmeta(self).key[0]
if getattr(self, key):
return
name = getattr(self, self.__order_attr__)
if name:
sel = sql.select([getattr(type(self), key)])
codes = set(r[0] for r in self._db.execute(sel))
code = make_code(name, codes)
else:
code = None
setattr(self, key, code)
class TransientMeta(model.ModelMeta):
def do_store_model(self, context, model):
return model.do_store()
def do_thaw_model(self, context, obj):
super(TransientMeta, self).do_thaw_model(context, obj)
m = getmeta(obj)
for k in m.attributes:
m.attributes[k].send_thaw(context, obj)
class DbMeta(model.DbMeta):
get_db = lambda x,cx: cx.db
def get_polymorphic_attr(self):
mapper = orm.class_mapper(self.model_class)
on = mapper.polymorphic_on
return on is not None and on.key or None
def get_polymorphic_id(self):
mapper = orm.class_mapper(self.model_class)
return mapper.polymorphic_identity
def get_polymorphic_meta(self, polyid):
mapper = orm.class_mapper(self.model_class)
return getmeta(mapper.polymorphic_map[polyid].class_)
# unused
def polymogrify(self, obj):
obj.__class__ = self.model_class
from sqlalchemy.orm.attributes import instance_state, manager_of_class
instance_state(obj).manager = manager_of_class(self.model_class)
return obj
# unused
def get_polymorphic_keys(self):
mapper = orm.class_mapper(self.model_class)
return mapper.polymorphic_map.keys()
# unused
def get_polymorphic_update(self, obj, newid):
mapper = orm.object_mapper(obj).base_mapper
attr = self.get_polymorphic_attr()
update = mapper.local_table.update().values({attr:newid})
for col in mapper.primary_key:
update = update.where(col == (getattr(obj, col.key)))
return update
class ModelBase:
_cx = property(lambda x:getmeta(x).get_context(x))
_db = property(lambda x:x._cx.db)
_rq = property(lambda x:x._cx.request)
_meta = None
def __eq__(self, other):
if type(self) is not type(other):
return False
key = getmeta(self).key
k1 = [getattr(self, k) for k in key]
k2 = [getattr(other, k) for k in key]
return k1 == k2
def __str__(self):
meta = getmeta(self)
ids = [getattr(self, k, None) for k in meta.key]
if all(x is not None for x in ids):
title = getattr(self, '__title__', None)
if isinstance(title, basestring):
return title
elif title:
return title() or ''
else:
return ', '.join(map(str, ids))
else:
return meta.title
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, str(self))
def set_attribute_event(self, key, value, form):
f = getattr(self, 'set_attribute_'+key, None)
if f:
f(value, form)
return bool(f)
@model.IterMeta.declare(metaclass=ModelMetaclass)
class Iter(ModelBase, model.ModelIterator):
pass
@TransientMeta.declare(metaclass=ModelMetaclass)
class Transient(ModelBase):
pass
@DbMeta.declare(metadata=METADATA, metaclass=DbMetaclass)
class DbModel(ModelBase):
def set_attribute_event(self, key, value, form):
if key == COMMENT_ATTR:
self._cx.history_logger.log_comment(self, value)
return True
meta = getmeta(self)
old = getattr(self, key)
if not meta.attributes[key].is_equal(value, old):
new_str = meta.attributes[key].format_value(value, self._cx)
if old is None:
old_str = '(None)'
else:
old_str = meta.attributes[key].format_value(old, self._cx)
self._cx.history_logger.log_update(self, key, old_str, new_str)
return ModelBase.set_attribute_event(self, key, value, form)
def after_delete_event(self):
self._cx.history_logger.log_delete(self)
def _is_persistent(self):
state = orm.attributes.instance_state(self)
return state.key is not None
class Nav(Transient):
__classid__ = None
__view__ = view.NavScreen
def get_links(self):
for x in self.get_children():
v = self._cx.get_view(x)
if v.check() and v.get_url():
yield v
def get_children(self):
return sorted([x for x in getmeta(self).get_children(self._cx)],
key=lambda x:str(x))
class CsvData(Transient):
__view__ = view.Attachment
def attachment_type(self):
return 'text/csv; charset=UTF-8'
def write_attachment_data(self, file):
import csv
w = csv.writer(file)
for row in self.csv_rows():
row = [unicode(x).encode('UTF-8') for x in row]
w.writerow(row)
class Comment(Transient):
__classid__ = None
__edit__ = view.CommentScreen
__fields__ = (
'comment',
)
obj = types.String(editable=False)
comment = types.Text(required=True)
def do_store(self):
self._cx.history_logger.log_comment(self.obj, self.comment)
getmeta(self.obj).store_model(self._cx, self.obj)
class SeedContext:
def __init__(self, Class, table, bind):
self.meta = getmeta(Class)
self.Class = Class
self.table = table
self.bind = bind
self.db = bind.db
self.context = bind.context
self.app = bind.context.app
def __getattr__(self, name):
return getattr(self.context, name)
def seed(self, data, *columns):
data = [line.strip().split('|')
for line in data.strip().splitlines()]
if not columns:
columns = data[0]
data = data[1:]
for row in data:
self.insert(**dict(zip(columns, row)))
def insert(self, **values):
print 'INSERT', self.table.name, '\n ',
print ',\n '.join('%s=%s'%v for v in values.items()
if v[0]!= GUID_ATTR)
if self.meta.attributes:
return self.meta.construct_model(self.context, **values)
else:
ins = self.table.insert(bind=self.bind)
ins.values(values).execute()
def run(self, *qs, **kw):
rs = None
for stmt in qs:
print stmt
rs = self.bind.execute(stmt, **kw)
return rs
def copy(self, rowiter, columns=None):
has_guid = GUID_ATTR in self.table.c
if has_guid:
if columns:
columns.append(GUID_ATTR)
def addguid(rowiter):
for row in rowiter:
yield list(row) + [str(uuid.uuid4())]
rowiter = addguid(rowiter)
if columns:
columns = '(%s)' % ','.join(columns)
for row in rowiter:
values = {}
for i, col in enumerate(self.table.columns):
if col.key != GUID_ATTR:
values[col.key] = row[i] or None
self.insert(**values)
def copycsv(self, f, columns=None, filter=None, skipheader=True):
csv = _itercsv(f, filter, skipheader)
return self.copy(csv, columns)
### History stuff
class PendingEvent:
action = comment = None
def __init__(self):
self.attrs = {}
class HistoryLogger:
def __init__(self, context):
self.context = context
self.histories = {}
def get_event(self, obj):
classid = getmeta(obj).classid
if not classid:
return
guid = getattr(obj, GUID_ATTR, None)
if not guid:
return
e = self.histories.get(guid, None)
if e is None:
e = self.histories[guid] = PendingEvent()
e.classid = classid
e.guid = guid
e.action = 'UPDATE'
e.instance = obj._meta
e.userid = self.context.user.username
return e
def log_update(self, obj, attr, old, new):
e = self.get_event(obj)
if e:
e.attrs[attr] = (old, new)
def log_comment(self, obj, text):
e = self.get_event(obj)
if e:
e.comment = text is not None and text.strip() or None
def log_delete(self, obj):
e = self.get_event(obj)
if e:
e.action = 'DELETE'
def store_event(self, pe, ts):
instance = pe.instance
action = pe.action
if instance is None:
action = 'CREATE'
instance = MetaObjectInstance()
instance.object_guid = pe.guid
instance.classid = pe.classid
instance.created_at = ts
instance.created_by = pe.userid
instance.updated_at = ts
instance.updated_by = pe.userid
ev = MetaObjectEvent()
ev.action = action
ev.timestamp = ts
ev.userid = pe.userid
ev.comment_text = pe.comment
if pe.attrs:
for k, (old, new) in pe.attrs.items():
at = MetaObjectAttr()
at.attr_name = k
#at.attr_old = old
#at.attr_new = new
at.attr_value = new
ev.attrs.append(at)
ev.instance = instance
getmeta(ev).store_model(self.context, ev)
def flush(self):
history = self.histories
if not history:
return
self.histories = {}
ts = timestamp.utcnow()
for guid, event in history.iteritems():
if not event.attrs:
if event.comment:
event.action = 'COMMENT'
elif event.action != 'DELETE':
continue
self.store_event(event, ts)
class MetaObjectClass(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'meta_object_class'
classid = sql.Column(sql.String, primary_key=True)
tablename = sql.Column(sql.String, nullable=False)
instances = child_relation('MetaObjectInstance', back_populates='_class')
meta = property(lambda x:DbMeta.meta_map[x.classid])
class MetaObjectInstance(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'meta_object_instance'
object_guid = sql.Column(GUID(), primary_key=True)
version = sql.Column(sql.BigInteger(), nullable=False)
classid = sql.Column(sql.String,
sql.ForeignKey('meta_object_class.classid'),
nullable=False)
created_at = sql.Column(sql.DateTime(timezone=True), nullable=False)
created_by = sql.Column(sql.String, nullable=False)
updated_at = sql.Column(sql.DateTime(timezone=True), nullable=False)
updated_by = sql.Column(sql.String, nullable=False)
events = child_relation('MetaObjectEvent',
order_by=lambda:MetaObjectEvent.timestamp,
back_populates='instance')
comments = property(lambda x:[e
for e in x.events
if e.comment_text])
_class = orm.relation('MetaObjectClass', back_populates='instances')
meta = property(lambda x:DbMeta.meta_map[x.classid])
__mapper_args__ = {
'version_id_col': version,
'version_id_generator': lambda x:int(time.time() * 1000000)
}
def get_object(self):
q = self._db.query(self.meta.model_class)
return q.filter_by(_object_guid=self.object_guid).first()
_obj = util.undefined
@property
def obj(self):
if self._obj is util.undefined:
self._obj = self.get_object()
return self._obj
class MetaObjectEvent(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'meta_object_event'
object_guid = sql.Column(GUID(),
sql.ForeignKey('meta_object_instance.object_guid'),
primary_key=True)
userid = sql.Column(sql.String(), primary_key=True)
timestamp = sql.Column(sql.DateTime(timezone=True), primary_key=True)
action = sql.Column(sql.String(), nullable=False)
comment_text = sql.Column(sql.Text())
instance = orm.relation('MetaObjectInstance', back_populates='events')
attrs = child_relation('MetaObjectAttr', back_populates='event')
obj = property(lambda x:x.instance.obj)
meta = property(lambda x:x.instance.meta)
@property
def when(self):
ts = self.timestamp.astimezone(self._cx.tz)
return ts.strftime('%d/%m/%y %H:%M')
class MetaObjectAttr(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'meta_object_attr'
object_guid = sql.Column(GUID(), primary_key=True)
userid = sql.Column(sql.String(), primary_key=True)
timestamp = sql.Column(sql.DateTime(timezone=True), primary_key=True)
attr_name = sql.Column(sql.String(), nullable=False, primary_key=True)
attr_value = sql.Column(sql.String(), nullable=False)
#attr_old = sql.Column(sql.String(), nullable=False)
#attr_new = sql.Column(sql.String(), nullable=False)
event = orm.relation('MetaObjectEvent', back_populates='attrs')
__table_args__ = (sql.ForeignKeyConstraint(['userid', 'timestamp', 'object_guid'],
['meta_object_event.userid',
'meta_object_event.timestamp',
'meta_object_event.object_guid']),)
@property
def summary(self):
return '%s: %s' % (self.label, self.attr_value or 'None')
@property
def meta(self):
return self.event.instance.meta.attributes.get(self.attr_name)
@property
def label(self):
return self.meta and self.meta.get_label() or self.attr_name
if 0:
class MetaObjectDraft(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'meta_object_draft'
object_guid = sql.Column(GUID(), primary_key=True)
userid = sql.Column(sql.String(), nullable=False)
timestamp = sql.Column(sql.DateTime(timezone=True), nullable=False)
data = sql.Column(sql.LargeBinary(), nullable=False)
class MetaObjectLock(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'meta_object_lock'
object_guid = sql.Column(GUID(), primary_key=True)
draft_guid = sql.Column(GUID(),
sql.ForeignKey('meta_object_draft.object_guid'),
nullable=False)
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import json
import urlparse
import os.path as op
import cPickle as pickle
import webob
import webob.exc
from weberror.errormiddleware import ErrorMiddleware
from fenton import app
from fenton import util
from fenton import types
from fenton import getmeta
from fenton import logging
from fenton import security
LONGTIMEAGO=-10**9
CHUNK_SIZE = 4096 * 16
DEFAULT_ENCODING = 'UTF-8'
def stack(app):
if app.debug:
print app
config = app.config
config.setdefault('beaker.session.key', 's')
config.setdefault('beaker.session.secret', 'fenton-very-secret')
config['beaker.session.data_dir'] = config['work_dir']
encrypt_key = config.get('fenton.encrypt_key')
sign_key = config.get('fenton.sign_key')
if sign_key:
signer = security.get_signer(sign_key)
else:
logging.log.info('no sign key, not signing session')
signer = None
if encrypt_key:
crypter = security.get_crypter(encrypt_key)
else:
logging.log.info('no encrypt key, not encrypting session')
crypter = None
dumper = Dumper(Pickler, signer, crypter)
renderer = mako_renderer(app)
stack = shell = Shell(app, renderer, dumper)
stack = error_middleware(stack, config)
stack = session_middleware(stack, config)
stack = clean_cookies(stack)
if not config.get('static_prefix'):
from paste.cascade import Cascade
from paste.urlparser import StaticURLParser
dirs = [op.join(d, 'static') for d in app.source_dirs()]
statics = [StaticURLParser(d) for d in dirs]
stack = Cascade(statics + [stack])
if shell.prefix:
stack = prefix_middleware(stack, shell.prefix)
stack = force_https(stack)
stack = zap_favicon(stack)
delay = config.get('delay_response', 0)
if delay:
stack = delay_response(stack, float(delay))
return stack
def debug_request(rq):
url = rq.url[len(rq.host_url):]
from fenton import console
parts = ['%s %s' % (rq.method, console.colors.Green(url))]
for name, value in sorted(rq.headers.items()):
parts.append('%s: %s' % (console.colors.LightGray(name), value))
logging.log.debug('\n'.join(parts))
def clean_cookies(app):
def _app(environ, start_response):
cookie = environ.get('HTTP_COOKIE')
if cookie:
#logging.log.debug('cookies: %s', cookie)
environ['HTTP_COOKIE'] = cookie.replace(':', '__colon__')
return app(environ, start_response)
return _app
def zap_favicon(app):
def _app(environ, start_response):
if environ['PATH_INFO'] == '/favicon.ico':
start_response('404 Not found',
[('content-type', 'text/plain'), ('Content-Length', '0')])
return []
return app(environ, start_response)
return _app
def delay_response(app, s=1):
def _app(environ, start_response):
import time, random
time.sleep(random.random() * s)
return app(environ, start_response)
return _app
def force_https(app):
def _app(env, resp):
if env.get('HTTP_X_FORCE_HTTPS'):
env['wsgi.url_scheme'] = 'https'
return app(env, resp)
return _app
def session_middleware(*args, **kw):
from beaker.middleware import SessionMiddleware
return SessionMiddleware(*args, **kw)
def prefix_middleware(app, prefix):
import re
reg = re.compile('^'+prefix)
pre_len = len(prefix)
def _app(env, start_response):
url = env['PATH_INFO']
if url == '/' or not url:
start_response('301 Elsewhere', [('Location', prefix)])
return ['Location: ' + prefix]
if reg.match(url):
env['SCRIPT_NAME'] = prefix
env['PATH_INFO'] = reg.sub('', url) or '/'
return app(env, start_response)
return _app
def error_middleware(app, config):
E = config.get('debug') and debug_middleware or ErrorNotifier
return E(app, config)
def debug_middleware(app, config):
from weberror.evalexception import EvalException
app = EvalException(app, config, xmlhttp_key=Request.xmlhttp_key,
templating_formatters=[format_mako_error])
from fenton import console
g = console.colors.LightGray
def debug_response(env, start_response):
def new_start_response(status, headers, exc_info=None):
lines = ['%s: %s' % (g(k), v) for (k,v) in sorted(headers)]
logging.log.debug('%s\n%s\n', status, '\n'.join(lines))
rq = env.get('fenton.request')
if rq and rq.XHR:
print rq.response.body
return start_response(status, headers, exc_info)
return new_start_response
def _app(env, start_response):
if not env['PATH_INFO'].startswith('/_debug/'):
start_response = debug_response(env, start_response)
return app(env, start_response)
return _app
class ErrorNotifier(ErrorMiddleware):
def __init__(self, app, config):
default_message = 'An internal server error occurred'
kw = {}
kw['debug'] = config.get('debug') or False
kw['error_email'] = config['email_to']
kw['smtp_server'] = config['smtp_server']
kw['from_address'] = config['error_email_from']
kw['error_subject_prefix'] = config.get('error_subject_prefix',
'Web error: ')
kw['error_message'] = config.get('error_message', default_message)
kw['error_log'] = config.get('error_log', None)
super(ErrorNotifier, self).__init__(app, config, **kw)
def exception_handler(self, exc_info, environ):
discard = super(ErrorNotifier, self).exception_handler(exc_info, environ)
request = environ['fenton.request']
if request.XHR:
request.response.headers['content-type'] = 'application/json'
return json.dumps({'error': 'server error'})
else:
from fenton import widgets
return request.render(widgets.server_error())
def mako_renderer(app):
from mako.lookup import TemplateLookup
from mako.runtime import Namespace
Namespace.__repr__ = lambda x: '<Namespace %s>' % x.name
config = app.config
dirs = [op.join(d, 'templates') for d in app.source_dirs()]
work_dir = op.join(config['work_dir'], 'templates')
return TemplateLookup(directories=dirs,
input_encoding=DEFAULT_ENCODING,
output_encoding=config.get('www_encoding',
DEFAULT_ENCODING),
imports=['import markupsafe'],
default_filters=['markupsafe.escape_silent'],
error_handler=mako_errorhandler,
module_directory=not app.debug and work_dir or None)
def mako_errorhandler(context, e):
import sys
t = sys.exc_info()[2]
while t.tb_next:
t = t.tb_next
f = t.tb_frame.f_globals.get('__file__')
if f and 'mako' in f:
try:
e.__mako = True
except:
pass
raise
def format_mako_error(e):
'''Format a Mako exception as HTML'''
import mako.exceptions as mx
if hasattr(e, '__mako') or isinstance(e, mx.MakoException):
template = mx.html_error_template()
return template.render(full=False, css=True, error=e)
class Response(webob.Response):
def __delete_cookie(self, key, path='/', domain=None):
self.set_cookie(key, '', path=path,
domain=domain,
max_age=LONGTIMEAGO)
@property
def user_headers(self):
return [(k, v) for (k, v) in self.headerlist
if k.lower() == 'set-cookie'
or k[:2].lower() == 'x-']
class Request(app.Request):
xmlhttp_key = '__xhr'
response_class = Response
session_key = 'beaker.session'
default_charset = DEFAULT_ENCODING
context_maxlen = 10
heartbeat_seconds = 300
context_expire_seconds = 900
app = property(lambda x:x.shell.app)
dumper = property(lambda x:x.shell.dumper)
config = property(lambda x:x.shell.app.config)
__session = None
@property
def session(self):
if self.session_key in self.environ:
return self.environ[self.session_key]
if not self.__session:
self.__session = {}
return self.__session
def __init__(self, environ, shell):
self.environ = environ
self.shell = shell
self.charset = (shell.charset or self.default_charset)
self.__request = webob.Request(environ, charset=self.charset)
self.MSIE = 'MSIE' in self.headers.get('user-agent', '')
self.XHR = self.headers.get('x-requested-with', '').lower() == 'xmlhttprequest'
def __getattr__(self, k):
return getattr(self.__request, k)
def __repr__(self):
return '<%s at 0x%x [%r]>' % (self.__class__.__name__,
id(self),
self.__request)
def __str__(self):
return str(self.__request)
def timer(self):
t = self.environ['fenton.started']
return util.timer() - t
def log(self, msg, *args):
t = '[%0.1f] ' % (self.timer() * 1000)
logging.log.debug(t+msg, *args)
@property
def tz(self):
offset = self.session.get('tz')
if offset is None:
logging.log.debug('no tz in session, getting from cookie')
offset = self.cookies.get('tzoffset')
self.session['tz'] = offset
if offset is None:
logging.log.debug('no tzoffset in cookie!')
return self.app.tz
#if not offset:
# offset = self.params.get('____tzoffset')
from fenton import timestamp
return offset and timestamp.FixedOffset(-int(offset)) or None
@util.lazyattr
def context_id(self):
path = self.path_info and self.path_info[1:]
if path and path.startswith(self.context_prefix):
return path[len(self.context_prefix):]
@util.lazyattr
def vars(self):
from fenton import form
vars = util.varsobject()
vars.update(form.decode(self.__request.params.mixed()))
ct = self.content_type.lower()
if 'text/javascript' in ct or 'application/json' in ct:
body = self.body
if body:
try:
obj = json.loads(body)
except ValueError, e:
logging.log.error('JSON decoding error: %s, %s', e, body)
raise
vars.update(obj)
return vars
@util.lazyattr
def action(self):
action = self.__request.params.get(self.bind_key, None)
if not action and self.bind_key in self.vars:
action = self.vars[self.bind_key]
action = action and [x[0] for x in action.items() if x[1]]
action = action and action[0]
self.vars.pop(self.bind_key, None)
return action
__response = None
@property
def response(self):
if self.__response is None:
self.__response = self.response_class()
self.__response.charset = self.charset
return self.__response
@property
def model_vars(self):
return self.vars.get(self.model_key, {})
@property
def messages(self):
return self.session.setdefault('messages', {})
def refresh(self, screen):
url = screen.get_url()
if url:
self.messages[url] = screen.screen_message
else:
url = self.context_location(screen.context)
self.redirect(url)
def render(self, widget):
if self.app.debug or not widget.cachable:
cc = self.response.cache_control
cc.no_store = cc.no_cache = cc.must_revalidate = True
self.response.expires = 0
self.response.pragma = 'no-cache'
path = self.get_template_file(widget)
tmod = self.shell.renderer.get_template(path)
tdef = tmod.get_def(widget.template_name)
return tdef.render(widget, request=self)
def get_template_file(self, widget):
return '/%s.mako' % widget.template_file.replace('.', '/')
def update(self, request):
self.__response = request.response
self.vars.update(request.vars)
self.user = request.user
self.session.update(request.session)
def context_location(self, context):
return self.app_url('%s%s' % (self.context_prefix, context.id))
def get_history(self):
return self.session.setdefault(self.history_key, [])
def get_referrer(self):
if self.referer:
scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url)
if self.host == netloc and path.startswith(self.shell.prefix):
path = path[len(self.shell.prefix)+1:]
if path and not path.startswith(self.context_prefix):
e = self.environ.copy()
e['PATH_INFO'] = path
e['QUERY_STRING'] = query
ref = self.shell.new_request(e)
return (path, ref.model_vars)
@property
def stored_contexts(self):
try:
return self.session.setdefault(self.context_key, {})
except:
logging.log.error('Error getting contexts')
logging.log.exception()
return {}
def decode_context(self):
id = self.context_id
if id not in self.stored_contexts:
logging.log.error('context %s not found in session %s', id, self.session.id)
return None
t, context = self.stored_contexts[id]
if self.dumper:
context = self.dumper.loads(context)
if context is None:
logging.log.error('Failed loading context from session')
return None
context.thaw(self)
return context
def store_context(self, context):
self.purge_contexts()
id = context.id
if self.dumper:
context = self.dumper.dumps(context)
self.stored_contexts[id] = util.timer(), context
def purge_contexts(self, max_age=None, max_len=None):
if max_age is None:
max_age = self.context_expire_seconds
if max_len is None:
max_len = self.context_maxlen
if len(self.stored_contexts) <= max_len:
return
for id, (t, _) in self.stored_contexts.items():
if id == self.context_id:
continue
t = util.timer() - t
if t > max_age:
del self.stored_contexts[id]
@util.property
def user():
def fget(self):
return self.session.get(self.user_key) or security.ANONYMOUS
def fset(self, u):
self.session[self.user_key] = u
def abort(self, code, **kw):
Ex = webob.exc.status_map[code]
ex = Ex(headers=self.response.user_headers, **kw).exception
ex.body = ' %s ' % code
raise ex
def redirect(self, url, code=302):
if not url:
raise ValueError('empty location')
if not urlparse.urlsplit(url).scheme:
url = self.app_url(url)
if self.XHR:
self.response.headers['x-location'] = str(url)
else:
self.abort(code=code, location=url)
def absolute_url(self, url):
if urlparse.urlparse(url).scheme:
return url
return self.host_url + self.app_url(url)
def app_url(self, url):
if url and url[0] == '/':
return url
prefix = self.shell.prefix
if prefix and not url.startswith(prefix):
url = ('/'.join((prefix, url))).replace('//', '/')
return url
def static_url(self, url):
if urlparse.urlparse(url).scheme:
return url
prefix = self.config.get('static_prefix')
if prefix:
return '%s/%s' % (prefix, url)
else:
return self.app_url(url)
def redirect_login(self, url=None):
import urllib
url = urllib.quote_plus(url or self.url)
self.redirect(self.app.get_login_url(url))
def new(self, **kw):
old = urlparse.urlsplit(self.url)
parts ='scheme', 'netloc', 'path', 'query', 'fragment'
new = [kw.get(part) or getattr(old.part) for part in parts]
return urlparse.urlunsplit(new)
class Closing:
def __init__(self, it, close):
self.it = it
self._close = close
def close(self):
return self._close()
def __iter__(self):
return iter(self.it)
class BaseHandler:
request = property(lambda x:x.context.request)
invoked = False
def handle(self):
self.check()
self.invoke()
self.invoked = True
self.render()
def check(self):
try:
ok = security.check(self.priv, self.context, None)
except security.RecheckWithObject:
ok = security.check(self.priv, self.context, self.obj)
if not ok:
logging.log.error('security failed: %s %s', self.priv, self.context.user)
raise security.NotAllowed
def render(self):
if not self.context.screen:
raise AssertionError('Nothing to render')
if self.request.XHR:
self.request.response.headers['content-type'] = 'application/json'
rsp = json.dumps(self.context.screen.vars)
elif self.request.method == 'POST' and self.request.context_id:
self.request.redirect(self.request.url)
else:
rsp = self.context.screen.render()
self.context.screen.vars = {}
if rsp is not None:
self.request.response.body = rsp
def finish(self):
self.request.store_context(self.context)
class NewScreenHandler(BaseHandler):
priv = property(lambda x:x.meta.get_priv('view'))
def __init__(self, context, meta, arg):
self.context = context
self.meta = meta
self.arg = arg
def __repr__(self):
if self.invoked:
return 'Got::%s' % self.context.screen
else:
return 'Get::%s::view' % self.meta
__obj = None
@property
def obj(self):
if self.__obj is None:
obj = self.meta.reconstruct(self.arg, self.context)
self.__obj = obj
self.meta = getmeta(obj)
return self.__obj
def invoke(self):
cx = self.context
screen = getmeta(self.obj).get_view(cx, self.obj, cx.screen)
if screen is None:
raise security.NotAllowed
cx.screen = screen
args = cx.request.model_vars
if args:
screen.update(args)
cx.request.add_history(screen)
screen.referer = cx.request.get_referrer()
url = screen.get_url()
screen.screen_message = cx.request.messages.pop(url, None)
class CurrentScreenHandler(BaseHandler):
obj = property(lambda x:x.context.screen.model)
priv = property(lambda x:x.context.screen.priv)
def __init__(self, context):
self.context = context
def __repr__(self):
return 'Re-Get::%s' % self.context.screen
def invoke(self):
pass
class BindingHandler(CurrentScreenHandler):
def __init__(self, context, f, x, y):
self.context = context
self.f = f
self.x = x
self.y = y
def __repr__(self):
x = ', '.join(map(repr, self.x))
y = ', '.join('%s=%r'%p for p in self.y.iteritems())
z = ', '.join((x and [x] or []) + (y and [y] or []))
f = self.f
if hasattr(f, 'func_name'):
f = '%s.%s' % (f.__module__, f.__name__)
return '%s(%s)' % (f, z)
def invoke(self):
self.thaw_args()
return self.f(*self.x, **self.y)
def thaw_args(self):
objs = list(self.x or []) + (self.y or {}).values()
for x in objs:
m = getmeta(x, False)
if m and not isinstance(x, type):
m.thaw_model(self.context, x)
class DebugInfo:
def __init__(self, request, handler=None):
self.request = request
self.env = request.environ
self.user = request.user
self.action = request.action
self.handler = handler
def extraData(self):
k = ('important', 'Fenton vars')
return {k: {'user': self.user,
'handler': self.handler and str(self.handler)}}
class Shell(app.Shell):
def __init__(self, app, renderer, dumper=None):
self.app = app
self.renderer = renderer
self.dumper = dumper
self.prefix = app.config.get('url_prefix')
self.charset = app.config.get('www_charset')or Request.default_charset
def __call__(self, environ, start_response):
environ['fenton.started'] = util.timer()
environ['fenton.shell'] = self
environ['fenton.app'] = self.app
environ['fenton.request'] = request = self.new_request(environ)
if 0 and self.debug:
debug_request(request)
else:
url = request.url.replace(request.host_url, '')
logging.log.info('%s %s', request.method, url)
if request.params:
logging.log.info('%s', request.params)
try:
response = self.respond(request)
except webob.exc.HTTPException, e:
response = e
except:
logging.log.exception()
raise
finally:
# must be here and not in closer so session is written
# to storage before iterating app response
request.session.save()
if response is None:
raise AssertionError('No response')
response.headers['X-Execution-Time'] = '%.1f ms' % (request.timer() * 1000)
return Closing(response(environ, start_response), request.close)
def new_request(self, environ):
return Request(environ, self)
def respond(self, request):
__traceback_supplement__ = DebugInfo, request
handler = self._get_handler(request)
__traceback_supplement__ += (handler,)
if handler is None:
return self._handle_not_found(request)
close = handler.context.close
try:
handler.handle()
request.close = close
except security.NotAuthenticated:
self._handle_not_authenticated(request)
except security.NotAllowed:
self._handle_not_allowed(request)
except types.Invalid, e:
if self.debug:
raise
logging.log.exception()
self._handle_not_found(request)
finally:
handler.finish()
if not self.debug:
request.close = close
return request.response
def _handle_not_authenticated(self, request):
request.redirect_login(request.url)
def _handle_not_allowed(self, request):
request.abort(401)
def _handle_not_found(self, request):
request.abort(404)
def _get_handler(self, request):
if not request.context_id:
return self._resolve(request)
if request.method == 'GET':
return self._continued_as_get(request)
if request.method == 'POST':
return self._continued_as_post(request)
def _resolve(self, rq):
path = rq.path_info and rq.path_info[1:]
tmp = self.app.resolve(path)
if tmp:
return tmp(rq.new_context())
try:
meta, arg = self.app.get(path)
except KeyError:
return None
return NewScreenHandler(rq.new_context(), meta, arg)
def _continued_as_get(self, request):
context = request.decode_context()
if not context:
return None
return CurrentScreenHandler(context)
def _continued_as_post(self, request):
if not request.action:
logging.log.error('POST to context and no action!')
return None
context = request.decode_context()
if not context:
logging.log.error('Failed decoding context')
return None
binding = context.screen.get_binding(request.action)
if not binding:
logging.log.error('screen has no binding: %s', request.action)
return None
return BindingHandler(context, *binding)
class Pickler:
dumps = staticmethod(lambda x:pickle.dumps(x, protocol=-1))
loads = staticmethod(pickle.loads)
class Dumper:
def __init__(self, serializer=Pickler, signer=None, crypter=None):
self.serializer = serializer
self.crypter = crypter
self.signer = signer
def dumps(self, obj):
data = self.serializer.dumps(obj)
if self.crypter:
data = self.crypter.encrypt(data)
if self.signer:
data = self.signer.sign(data)
return data
def loads(self, data):
if self.signer:
data = self.signer.verify(data)
if self.crypter:
data = self.crypter.decrypt(data)
return self.serializer.loads(data)
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import re
import sys
import decimal
import datetime
import pytz
try:
import json
except:
import simplejson as json
from fenton import util
from fenton import widgets
from fenton import timestamp
# TODO: date format configuration
def format_date(value):
d = value.day
a = value.strftime('%a')
return '%d %s' % (d, value.strftime('%b %Y'))
return '%s, %d %s' % (a, d, value.strftime('%b, %Y'))
def format_time(value):
secs = value.second and ':%S' or ''
return value.strftime('%H:%M' + secs)
def format_days(d):
y = int(d / 365.25)
if y > 1:
return str(y) + ' years'
m = int(d / 30.4)
if m > 1:
return str(m) + ' months'
w = int(d / 7.0)
if w > 1:
return str(w) + ' weeks'
return str(d) + ' days'
def as_sequence(value):
if value is None:
return []
if isinstance(value, basestring):
return [value]
if isinstance(value, (list, tuple)):
return value
try:
for n in value:
break
except TypeError:
return [value]
else:
return value
def localize(f):
import functools
@functools.wraps(f)
def local(self):
ts = f(self)
return ts and ts.astimezone(self.tz)
return local
def localdate(f):
import functools
@functools.wraps(f)
def local(self):
ts = f(self)
return ts and ts.astimezone(self.tz).date()
return local
def localtime(f):
import functools
@functools.wraps(f)
def local(self):
ts = f(self)
return ts and ts.astimezone(self.tz).time()
return local
def pair_property(name):
return multi_property(lambda i:'%s_%d'%(name, i), 2)
class multi_property(object):
def __init__(self, translate, size):
self.translate = translate
self.size = size
def __get__(self, obj, type):
x = obj is None and type or obj
return [getattr(x, self.translate(i))
for i in xrange(self.size)]
def __set__(self, obj, value):
for i in xrange(self.size):
setattr(obj, self.translate(i), value[i])
class Invalid(Exception):
message = None
def __init__(self, message=None, valid=None, errors=None, input=None):
if message:
self.message = message
self.valid = valid
self.errors = errors
self.input = input
# unlike normal objects, exceptions are
# unpickled by calling the constructor with whatever
# is in the 'args' attribute
# therefore, we must set this attribute
self.args = message, valid, errors, input
def get_message(self):
return self.message or util.decamel(self.__class__.__name__).replace('_', ' ').title()
def __str__(self):
return self.get_message()
def __unicode__(self):
msg = self.get_message()
if isinstance(msg, unicode):
return msg
elif isinstance(msg, str):
return msg.decode('utf8')
else:
return unicode(msg)
class NoSuchOption(Invalid):
pass
class ListErrors(Invalid):
pass
class Type:
# static type configuration
view_widget = widgets.string_view
edit_widget = widgets.string_edit
multi_view_widget = widgets.simple_list_view
multi_edit_widget = widgets.simple_list_edit
# instance vars
showview = True
showedit = True
editable = True
required = True
hint = None
label = None
default = None
meta = None
key = None
widget_config = None
num_args = 1
thaw = False
def __init__(self, impl=None, **kw):
self.impl = impl
self.__dict__.update(kw)
if not self.showedit:
self.editable = False
self.widget_config = self.widget_config or {}
def __call__(self, impl):
'decorator interface; impl is property'
if self.impl is not None:
raise TypeError('impl %s already supplied' % self.impl)
self.impl = impl
return self
def instrument(self, Class, name):
setattr(Class, name, self.impl)
def set_owner(self, meta, key):
self.owner = meta
self.key = key
self.resolve()
def get_value(self, context, obj, key):
return getattr(obj, key)
def resolve(self):
pass
def get_default(self, obj):
return self.default
def get_label(self):
return self.label or self.key.replace('_', ' ').capitalize()
def get_hint(self):
return self.hint
def get_validator(self):
return self
def get_widget(self, field):
args = self.get_widget_args(field)
return self.get_widget_class(field.editable)(None, field=field, **args)
def get_widget_args(self, field):
return dict(**self.widget_config)
def get_widget_class(self, showedit):
return showedit and self.edit_widget or self.view_widget
def get_multi_widget_class(self, showedit):
return showedit and self.multi_edit_widget or self.multi_view_widget
def send_thaw(self, context, obj):
pass
def format_value(self, value, context):
if value is None:
return self.format_empty(value, context)
else:
return self.format_nonempty(value, context)
def format_empty(self, value, context):
return u''
def format_nonempty(self, value, context):
return unicode(value)
def is_equal(self, l, r):
return l == r
def simplify(self, value, context):
'convert python object to string or serializable'
if value is None:
return ''
return unicode(value)
def reconstruct(self, value, context):
'convert string or serializable to python object'
if value == '':
return None
return value
class Primitive(Type):
'wrapper for python primitive types'
class Lookup(Type):
other_key = False
other_label = 'Other...'
option_filter = None
def get_options(self, field):
opts = [x[0] for x in self.options]
owner = field.form.model
if self.option_filter:
if isinstance(self.option_filter, basestring):
filter = getattr(field.form.model, self.option_filter)
else:
filter = lambda q: self.option_filter(owner, q)
opts = filter(opts)
for x in opts:
yield x, x
class ResolutionError(Exception):
pass
class AbstractReference(Type):
thaw = True
thaw_depends = None
def __init__(self, model_class, impl=None, **kw):
self.model_class = model_class
self.meta = None
Type.__init__(self, impl, **kw)
def __repr__(self):
n = self.__class__.__name__
return '<%s meta=%s>' % (n, self.meta)
def resolve(self):
from fenton import getmeta
if isinstance(self.model_class, type(lambda:0)):
self.model_class = self.model_class()
elif isinstance(self.model_class, basestring):
name = self.model_class
if name in self.owner.meta_map:
self.model_class = self.owner.meta_map[name].model_class
elif name in self.owner.class_map:
self.model_class = self.owner.class_map[name]
else:
module = sys.modules[self.owner.model_class.__module__]
if '.' in name:
module, name = util.get_modattr(name)
try:
self.model_class = getattr(module, name)
except AttributeError, e:
c = self.owner.model_class
msg = '"%s" in %s.%s::%s' % (name, c.__module__, c.__name__, self.key)
raise ResolutionError(msg)
self.meta = getmeta(self.model_class)
def send_thaw(self, context, obj):
from fenton import getmeta
if self.thaw:
if self.thaw_depends:
for k in self.thaw_depends:
getmeta(obj).attributes[k].send_thaw(context, obj)
x = getattr(obj, self.key, None)
if x is not None:
getmeta(x).thaw_model(context, x)
class Reference(AbstractReference, Lookup):
option_filter = None
view_widget = widgets.reference_view
edit_widget = widgets.single_option_select
multi_view_widget = widgets.reference_list_view
multi_edit_widget = widgets.simple_select_list
link = False #True
num_args = property(lambda x:x.meta.num_args)
def get_options(self, field):
owner = field.form.model
if self.option_filter:
if isinstance(self.option_filter, basestring):
filter = getattr(field.form.model, self.option_filter)
else:
filter = lambda q: self.option_filter(owner, q)
else:
filter = None
for obj in self.meta.iter_model(field.context, filter):
yield self.meta.get_id(field.context, obj), obj
def simplify(self, value, context):
return self.meta.simplify(value, context)
def reconstruct(self, value, context):
if not value:
return None
if self.other_key and value == self.other_key:
return self.other_key
return self.meta.reconstruct(value, context)
class Child(AbstractReference):
view_widget = widgets.child_single_view
edit_widget = widgets.child_single_edit
multi_view_widget = widgets.child_list_view
multi_edit_widget = widgets.child_list_edit
link_column = False
show_header = True
def get_validator(self):
return None
class Aggregate(Type):
meta = property(lambda x:x.of.meta)
def __init__(self, of, impl=None, **kw):
if isinstance(of, type):
of = of()
if isinstance(of, basestring):
of = Reference(of)
self.of = of
Type.__init__(self, impl, **kw)
def is_equal(self, l, r):
return set(l or []) == set(r or [])
class List(Aggregate):
index_attr = None
columns = None
link_column = False
show_header = True
def __repr__(self):
return '<list of=%s>' % self.of
def set_owner(self, meta, key):
self.of.set_owner(meta, key)
self.owner = meta
self.key = key
def format_nonempty(self, value, context):
if not value:
return ''
return ', '.join(self.of.format_nonempty(x, context)
for x in value)
def get_default(self, obj):
return list()
def get_validator(self):
v = self.of.get_validator()
return v and self or None
def get_widget_class(self, showedit):
return self.of.get_multi_widget_class(showedit)
def send_thaw(self, context, obj):
if self.thaw:
thaw = self.of.send_thaw
xs = getattr(obj, self.key, None)
for x in xs or []:
thaw(context, x)
def simplify(self, value, context):
cons = isinstance(value, set) and set or list
value = as_sequence(value)
if not value:
return []
return cons(self.of.get_validator().simplify(item, context)
for item in value)
def reconstruct(self, value, context):
#as_set = isinstance(value, set)
value = as_sequence(value or [])
if not value:
return []
result = []
errors = []
for item in value:
good = util.undefined
try:
good = self.of.get_validator().reconstruct(item, context)
except Invalid, error:
errors.append(error)
good = error.valid
if good is not util.undefined:
result.append(good)
#if as_set:
# result = set(result)
if errors:
raise ListErrors(value, context, result, errors)
return list(set(result))
class Tuple(Aggregate):
labels = None
class view_widget(widgets.field_widget):
class_id = 'tuple_view'
template_file = 'widgets'
labels = property(lambda x:x.field.type.labels)
class edit_widget(widgets.field_widget):
class_id = 'tuple_edit'
template_file = 'widgets'
labels = property(lambda x:x.field.type.labels)
def format_value(self, value, context):
values = [self.type.format_value(v, context)
for v in value]
if not any(values):
return ''
return ', '.join(values)
def reconstruct(self, values, context):
return tuple(self.type.reconstruct(v.strip(), context)
for v in values)
def simplify(self, values, context):
return tuple(self.type.simplify(v, context)
for v in values)
class Pair(Aggregate):
of = None
labels = None
@util.lazyattr
def type(self):
if isinstance(self.of, type):
self.of = self.of()
return self.of
class view_widget(widgets.field_widget):
class_id = 'pair_view'
template_file = 'widgets'
labels = property(lambda x:x.field.type.labels)
class edit_widget(widgets.field_widget):
class_id = 'pair_edit'
template_file = 'widgets'
labels = property(lambda x:x.field.type.labels)
def format_value(self, value, context):
values = (self.type.format_value(value[0], context),
self.type.format_value(value[1], context))
if not any(values):
return ''
return ', '.join(values)
def reconstruct(self, value, context):
l, r = [v.strip() for v in value]
return (self.type.reconstruct(l, context),
self.type.reconstruct(r, context))
def simplify(self, value, context):
return (self.type.simplify(value[0], context),
self.type.simplify(value[1], context))
class String(Primitive):
def reconstruct(self, value, context):
value = value.strip()
return value and unicode(value) or None
def simplify(self, value, context):
return value
#return value and unicode(value.strip()) or u''
class Text(String):
edit_widget = widgets.textbox_edit
view_widget = widgets.text_view
class MaskedString(String):
view_widget = widgets.string_mask_view
edit_widget = widgets.string_mask_edit
class Code(String):
edit_widget = widgets.code_edit
chars = 'A-Z0-9_.-'
case = 'upper'
def reconstruct(self, value, context):
value = value.strip()
if not value:
return
if self.case:
value = getattr(value, self.case)()
reg = re.compile('[^%s]+' % self.chars)
return reg.sub('', value)
class Integer(Primitive):
def reconstruct(self, value, context):
value = value.strip()
if not value:
return None
try:
return int(value)
except (ValueError, TypeError):
raise Invalid('Not a number: %s' % value)
class NotANumber(Invalid):
def get_message(self):
return self.message or util.decamel(self.__class__.__name__).replace('_', ' ').title()
class Enum(Lookup):
view_widget = widgets.reference_view
edit_widget = widgets.single_option_select
multi_view_widget = widgets.simple_list_view
multi_edit_widget = widgets.simple_select_list
options = None
link = False
def format_nonempty(self, value, context):
return dict(self.options).get(value, value)
def reconstruct(self, value, context):
if not value:
return None
opts = [x[0] for x in self.options]
if value not in opts and not self.other_key:
raise Invalid('No such option: %s' % value)
return value
def get_default(self, obj):
if self.default is None:
return self.options[0][0]
else:
return self.default
class Boolean(Enum):
edit_widget = widgets.boolean_edit
options = ((True, 'Yes'), (False, 'No'))
strvalues = [str(x[0]) for x in reversed(options)]
def reconstruct(self, value, context):
if not value:
return None
try:
return bool(self.strvalues.index(value))
except ValueError:
raise Invalid('Expected True or False')
class ExpectedTrueOrFalse(Invalid):
pass
class Number(Primitive):
def reconstruct(self, value, context):
value = value.strip()
if not value:
return None
try:
value = float(value)
try:
int_value = int(value)
except OverflowError:
int_value = None
if value == int_value:
return int_value
return value
except ValueError:
raise Invalid('Not a number: %s' % value)
class Decimal(Number):
def reconstruct(self, value, context):
value = value.strip()
if not value:
return None
value = Number.reconstruct(self, value, context)
return decimal.Decimal(value)
class Date(Primitive):
regexp = '\d{4}(-\d{2}){2}'
regexp = re.compile(regexp)
view_widget = widgets.date_view
edit_widget = widgets.date_edit
def format_nonempty(self, value, context):
return format_date(value)
def simplify(self, value, context):
if not value:
return ''
return '%04d-%02d-%02d' % (value.year, value.month, value.day)
def reconstruct(self, value, context):
value = value.strip()
if not value:
return None
if not self.regexp.match(value):
raise Invalid('Not a date: %s' % value)
try:
args = map(int, value.split('-'))
return datetime.date(*args)
except Exception, e:
raise Invalid(message=str(e))
class NotADate(Invalid):
pass
class Time(Primitive):
regexp = re.compile('^(\d\d?)([:\.]?\d\d) *([aApP][mM])?$')
edit_widget = widgets.time_edit
def reconstruct(self, value, context):
if not value:
return None
m = self.regexp.match(value)
if not m:
raise Invalid
hour, minute, meridian = m.groups()
hour = hour and int(hour) or 0
minute = minute and int(minute[-2:]) or 0
meridian = meridian and meridian.upper()
if hour > 23:
raise Invalid('Hour out of range')
if minute > 61:
raise Invalid('Minute out of range')
if meridian == 'PM' and hour < 12:
hour += 12
try:
return datetime.time(hour, minute)
except Exception, e:
raise Invalid
def simplify(self, value, context):
return self.format_value(value, context)
def format_nonempty(self, value, context):
s = u'%02d:%02d' % (value.hour, value.minute)
if value.second:
s += u':%02d' % value.second
return s
class DateTime(Primitive):
def reconstruct(self, value, context):
try:
if len(value) > 16:
return datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M:%S')
else:
return datetime.datetime.strptime(value, '%Y-%m-%dT%H:%M')
except Exception, e:
raise Invalid
def simplify(self, value, context):
if not value:
return ''
fmt = '%Y-%m-%dT%H:%M'
if value.second:
fmt += ':%S'
return unicode(value.strftime(fmt))
class LocalDateTime(Primitive):
view_widget = widgets.local_datetime_view
edit_widget = widgets.local_datetime_edit
follows_from = None
get_tz = None
def get_value(self, context, obj, key):
value = getattr(obj, key)
if not value:
return None
tz = self.get_tz and self.get_tz(obj)
if tz:
value = value.astimezone(tz)
return value
def simplify(self, value, context):
if not value:
return ''
iso_fmt = '%Y-%m-%dT%H:%M:%S'
z = value.strftime('%z')
z = z[:3] + ':' + z[3:]
return value.strftime(iso_fmt) + z
def reconstruct(self, value, context):
if not value:
return None
try:
ts = timestamp.parse(value)
except Exception, e:
raise Invalid(str(e))
if not ts.tzinfo:
raise Invalid('No timezone')
return ts
def format_nonempty(self, value, context):
d = self.format_date(value, context)
t = self.format_time(value, context)
#z = self.format_tz(value, context)
z = value.strftime('%Z')
return '%s %s %s' % (d, t, z)
def format_date(self, value, context):
return format_date(value)
def format_time(self, value, context):
return format_time(value)
def utc_from_local():
try:
ts = tz.localize(ts, is_dst=None)
except pytz.AmbiguousTimeError, e:
raise Invalid('Ambiguous time')
class Minutes(Primitive):
regexp = re.compile('^((\d+)\s*h)?\s*((\d+)\s*m?)$')
def format_hhmm(self, value):
if value == 0:
return '0'
sign = ''
if value < 0:
value = -value
sign= '-'
h, m = divmod(value, 60)
return '%s%dh%02dm' % (sign, h, m)
def format_decimal(self, value):
if value == 0:
return '0'
return '%.2fh' % (value / 60.0)
def format_nonempty(self, value, context):
if value == 0:
return '0'
return self.format_hhmm(value)
def reconstruct(self, value, context):
value = value.strip()
if not value:
return None
match = self.regexp.match(value)
if match is None:
raise Invalid('Value not in format (12h)34(m)', context)
hh, h, mm, m = match.groups()
if not any((hh, h, mm, m)):
return 0
return int(h or 0) * 60 + int(m or 0)
def simplify(self, value, state):
if value is None:
return ''
return self.format_hhmm(value)
class IsoWeek(Primitive):
reg = re.compile('^(\d{4})-(\d{1,2})$')
def reconstruct(self, value, context):
match = self.reg.match(value)
if not match:
raise Invalid
y, m = map(int, match.groups())
if 0 < m < 54:
return isoweek(y, m)
raise Invalid
def simplify(self, value, context):
return '%d-%d' % (value.year, value.week)
format_nonempty = simplify
class isoweek:
def __init__(self, year, week):
self.year = year
self.week = week
def __repr__(self):
return 'isoweek<%s, %s>' % (self.year, self.week)
@property
def monday(self):
# http://en.wikipedia.org/wiki/ISO_week_date#Calculation
correction = datetime.date(self.year, 1, 4).isoweekday() + 3
offset = datetime.timedelta(self.week * 7 - correction)
return datetime.date(self.year, 1, 1) + offset
class Period(Primitive):
regexp = re.compile('\d{4}(-\d{2}){2}-\d+')
fmt = '%04d-%02d-%02d-%d'
def reconstruct(self, value, context):
if not self.regexp.match(value):
raise Invalid
try:
y, m, d, n = map(int, value.split('-'))
d = datetime.date(y, m, d)
except Exception, e:
raise Invalid
else:
return period(d, n)
def simplify(self, value, context):
t = value.first
days = value.delta.days
return self.fmt % (t.year, t.month, t.day, days)
class period:
def __init__(self, first, arg):
try:
days = int(arg)
except (TypeError, ValueError):
try:
days = arg.days
except AttributeError:
try:
days = (arg - first).days
except (TypeError, ValueError):
raise TypeError('Invalid argument %r of type %r' % (arg, type(arg)))
if days < 0:
days = -days
first = first - datetime.timedelta(days)
self.first = first
self.days = days
def __len__(self):
return self.days
def __iter__(self):
for i in range(self.days + 1):
yield self.first + datetime.timedelta(i)
def __getitem__(self, i):
return list(self)[i]
def __repr__(self):
return 'period(%s, %d)' % (self.first, self.days)
def __str__(self):
return '%s -- %s' % (self.first, self.last)
def __iadd__(self, arg):
if isinstance(arg, int):
arg = datetime.timedelta(arg)
self.first += arg
return self
def __isub__(self, arg):
self += -arg
return self
def __add__(self, arg):
if isinstance(arg, int):
arg = datetime.timedelta(arg)
result = self.__class__(self.first, self.days)
result += arg
return result
def __sub__(self, arg):
return self + -arg
def __eq__(self, value):
if not isinstance(value, self.__class__):
return False
return self.first == value.first and self.days == value.days
@property
def delta(self):
return datetime.timedelta(self.days)
@property
def last(self):
return self.first + datetime.timedelta(self.days)
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
from fenton import util
from fenton import types
from fenton import getmeta
from fenton import security
EMPTY = (None, [], '', {})
# called only by web.Request.vars
# and therefore created only by form inputs and URLs
def decode(indict, dch='.', lch='-'):
result = {}
sort_keys = set()
for key, value in indict.iteritems():
keys = key.split(dch)
new_keys = []
for key in keys:
if lch in key:
key, index = key.split(lch, 1)
try:
index = int(index)
except ValueError:
continue
new_keys.append(key)
sort_keys.add(tuple(new_keys))
new_keys.append(index)
else:
new_keys.append(key)
if not new_keys:
continue
place = result
for i in range(len(new_keys)-1):
try:
if not isinstance(place[new_keys[i]], dict):
place[new_keys[i]] = {None: place[new_keys[i]]}
place = place[new_keys[i]]
except KeyError:
place[new_keys[i]] = {}
place = place[new_keys[i]]
if new_keys[-1] in place:
if isinstance(place[new_keys[-1]], dict):
place[new_keys[-1]][None] = value
elif isinstance(place[new_keys[-1]], list):
if isinstance(value, list):
place[new_keys[-1]].extend(value)
else:
place[new_keys[-1]].append(value)
else:
if isinstance(value, list):
place[new_keys[-1]] = [place[new_keys[-1]]]
place[new_keys[-1]].extend(value)
else:
place[new_keys[-1]] = [place[new_keys[-1]], value]
else:
place[new_keys[-1]] = value
keys = sorted(sort_keys, cmp=lambda a,b:-cmp(len(a),len(b)))
for key in keys:
to_sort = result
source = None
last_key = None
for sub_key in key:
source = to_sort
last_key = sub_key
to_sort = to_sort[sub_key]
if None in to_sort:
noneVals = [(0, x) for x in to_sort[None]]
del to_sort[None]
noneVals.extend(to_sort.items())
to_sort = noneVals
else:
to_sort = to_sort.items()
to_sort.sort()
to_sort = [v for k, v in to_sort]
source[last_key] = to_sort
return result
# called only by view.get_url() (view.py and system.py)
def encode(inval, prefix='', dch='.', lch='-', outval=None):
if outval is None:
outval = {}
if isinstance(inval, dict):
for key, value in inval.items():
if key is None:
name = prefix
elif not prefix:
name = key
else:
name = '%s%s%s' % (prefix, dch, key)
encode(value, name, dch, lch, outval)
elif isinstance(inval, list):
for i in range(len(inval)):
prefix = '%s%s%i' % (prefix, lch, i)
encode(inval[i], prefix, dch, lch, outval)
else:
outval[prefix] = inval
return outval
class RequiredValueMissing(types.Invalid):
message = 'Required value missing'
class Form:
def __init__(self, context, model, editable, errors):
self.context = context
self.model = model
self.editable = editable
self.errors = errors
self.pushback_keys = []
self.meta = getmeta(model)
self.groups = self._groups()
from itertools import chain
fs = chain(*[fs for (k, h, fs) in self.groups])
self.fields = dict((f.key, f) for f in fs)
key = self.meta.key
self.keys = [k for (k, f) in self.fields.iteritems()
if f.editable
and f.type.get_validator()
and k not in key]
@util.yieldlist
def _fields(self, keys, offset):
i = 0
meta = getmeta(self.model)
for k in keys:
type = meta.attributes[k]
if self.editable:
check = type.showedit
else:
check = type.showview
if security.check(check, self.context, self.model):
editable = self.editable and security.check(type.editable,
self.context,
self.model)
required = self.editable and security.check(type.required,
self.context,
self.model)
yield Field(self, i+offset, type, editable, required)
i += 1
@util.yieldlist
def _groups(self):
count = 0
for groupkey, header, keys in self.meta.fields:
fields = self._fields(keys, count)
if fields:
count += len(fields)
yield groupkey, header, fields
def get_args(self):
return dict((k, self.fields[k].get_simplified())
for k in self.keys)
def pushback(self, *keys):
for key in keys:
self.errors[key] = None
self.pushback_keys.append(key)
def get_pushback(self):
if self.pushback_keys:
return dict((k, self.fields[k].get_string())
for k in self.pushback_keys)
# called only by view.update()
def validate(self, incoming, only=None):
orig = {}
reconstructed = {}
meta = self.meta
# first loop: formal validation: str -> py
for k in only or self.fields.keys():
field = self.fields.get(k)
if not field or not field.editable:
continue
orig[k] = field.get_value()
self.errors[k] = None
v = field.type.get_validator()
if not v:
reconstructed[k] = orig[k]
continue
value = incoming.get(k, '')
try:
reconstructed[k] = v.reconstruct(value, self.context)
except types.Invalid, e:
self.errors[k] = e
if e.input is None:
e.input = value
# second loop: semantic validation
keys = reconstructed.keys()
missing = []
while keys:
key = keys.pop(0)
#print 'form.update:', key
field = self.fields[key]
value = reconstructed.get(key)
if self.errors.get(key):
continue
if field.required and value in EMPTY:
self.errors[key] = RequiredValueMissing(input=value)
missing.append(key)
continue
try:
meta.update_model(self.context, self.model, {key: value}, self)
except types.Invalid, e:
self.errors[key] = e
if e.input is None:
e.input = incoming.get(key)
else:
self.errors[key] = None
class Field:
name = util.lazyattr(lambda x:x.request.model_key + '.' + x.key)
request = property(lambda x:x.context.request)
context = property(lambda x:x.form.context)
error = property(lambda x:x.form.errors.get(x.key))
label = property(lambda x:x.type.get_label())
hint = property(lambda x:x.type.get_hint())
def __init__(self, form, index, type, editable, required):
self.form = form
self.index = index
self.type = type
self.key = type.key
self.editable = editable
self.required = required
def get_value(self):
return self.type.get_value(self.context, self.form.model, self.key)
def get_simplified(self):
e = self.form.errors.get(self.key)
if e is not None:
return e.input
v = self.type.get_validator()
return v.simplify(self.get_value(), self.context)
def get_formatted(self):
return self.type.format_value(self.get_value(), self.context)
def get_string(self):
return (self.editable and self.get_simplified or self.get_formatted)()
@util.lazyattr
def widget(self):
return self.type.get_widget(self)
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import os
import sys
import time
import os.path as op
import fenton.app
MAXFD = 1024
def get_config(filename, vars=None):
import ConfigParser
parser = ConfigParser.ConfigParser(vars)
if not parser.read(filename):
raise RuntimeError('Config not found: ' + filename)
items = parser.items('fenton')
false = ('off', 'no', 'false')
return dict((k, False if v and v.lower() in false else v)
for (k, v) in items)
def _stop_daemon(pidfile):
if not op.exists(pidfile):
print 'No PID file exists in %s' % pidfile
return 0
pid = _read_pid(pidfile)
if not pid:
print 'removing invalid pid file %s' % pidfile
try:
os.unlink(pidfile)
except (OSError, IOError), e:
print 'Could not delete: %s' % e
return 2
return 1
for _ in range(10):
if not _read_pid(pidfile):
break
import signal
os.kill(pid, signal.SIGTERM)
time.sleep(1)
else:
print 'failed to kill daemon process %s' % pid
return 3
if op.exists(pidfile):
os.unlink(pidfile)
return 0
def _write_pid(pidfile):
import atexit
pid = os.getpid()
with open(pidfile, 'w') as f:
f.write(str(pid))
atexit.register(_remove_pidfile, pid, pidfile)
def _read_pid(pidfile):
pid = _read_pidfile(pidfile)
if pid:
try:
os.kill(int(pid), 0)
return pid
except OSError, e:
import errno
if e.errno == errno.EPERM:
return pid
def _read_pidfile(pidfile):
if not op.exists(pidfile):
return None
try:
with open(pidfile) as f:
return int(f.read().strip())
except (ValueError, IOError):
return None
def _remove_pidfile(written_pid, pidfile):
current_pid = os.getpid()
if written_pid != current_pid:
# A forked process must be exiting, not the process that
# wrote the PID file
return
if not op.exists(pidfile):
return
with open(pidfile) as f:
content = f.read().strip()
try:
pid_in_file = int(content)
except ValueError:
pass
else:
if pid_in_file != current_pid:
msg = 'Unexpected PID %s in file %s (expected %s)' % (pid_in_file,
pidfile,
current_pid)
print msg
return
try:
os.unlink(pidfile)
return
except OSError, e:
# Record, but don't give traceback
print 'Cannot remove PID file: %s' % e
# well, at least lets not leave the invalid PID around...
try:
with open(pidfile, 'w') as f:
f.write('')
except OSError, e:
print 'Stale PID left in file: %s (%e)' % (pidfile, e)
else:
print 'Stale PID removed'
class Suite:
def __init__(self, factory, **vars):
import optparse
self.factory = factory
self.vars = vars
self.name = factory.__name__
commands = [ShellCommand,
DevServer,
RestartDaemon,
StartDaemon,
StopDaemon]
self.commands = dict((s.name, s()) for s in commands)
usage='%prog [global-options] COMMAND [command-options]'
p = self.parser = optparse.OptionParser(add_help_option=False,
usage=usage)
p.add_option(
'-c', '--config',
action='store',
metavar='FILE.ini',
dest='config_file',
default = factory.config_file
)
p.add_option(
'-h', '--help',
action='store_true',
dest='do_help',
help='Show this help message')
p.disable_interspersed_args()
_main = None
def main(self, f=None):
'call from __main__'
if f is not None:
if self._main is not None:
raise AssertionError('main already registered')
self._main = f
self.install_command_func('main', f)
sys.argv.append('main')
from fenton import util
util.normalize_path()
args = sys.argv[1:]
status = 255
try:
status = self.dispatch(args)
except (SystemExit, KeyboardInterrupt), e:
status = 0
except:
import traceback
traceback.print_exc()
sys.stdout.flush()
sys.exit(status)
def dispatch(self, args):
options, args = self.parser.parse_args(args)
if not args or options.do_help:
args = ['help'] + args
name = args.pop(0)
if name == 'help':
name = args and args.pop(0) or None
return self.help(name)
try:
command = self.commands[name]
except KeyError:
print 'ERROR: unknown command \'%s\'' % name
self.list_commands()
return 1
self.config_file = cf = op.realpath(options.config_file)
self.config = get_config(cf, vars=self.vars)
return self.run_command(command, args)
def run_command(self, command, args):
command.suite = self
command.parse_args(args)
return command.run() or 0
def list_commands(self):
print '\nCommands:'
longest = max(map(len, self.commands.keys()))
for name, command in sorted(self.commands.items()):
padded = ('%%-%ds' % longest) % name
print ' %s %s' % (padded, command.__doc__)
def help(self, name=None):
status = 0
if name is None:
self.parser.print_help()
self.list_commands()
elif name not in self.commands:
status = 1
else:
self.commands[name].parse_args(['-h'])
return status
def add_command(self, cmd):
self.commands[cmd.name] = cmd
def install_command_func(self, name, func):
cmd = Command()
cmd.__doc__ = func.__doc__
cmd.name = name
cmd.action = lambda:func(cmd)
self.commands[cmd.name] = cmd
return func
def command(self, arg):
if isinstance(arg, basestring):
return lambda f:self.install_command_func(arg, f)
name = arg.__name__.lower().replace('_', '-')
return self.install_command_func(name, arg)
class Request(fenton.app.Request):
def __init__(self, app, cmd):
import getpass
import pytz
from fenton import security
self.app = app
self.cmd = cmd
self.args = cmd.args
self.user = security.SystemUser(getpass.getuser())
self.environ = os.environ
self.tz = pytz.timezone(open('/etc/timezone').read().strip())
class Command:
__app = __rq = __cx = None
description = None
parameters = ()
factory = property(lambda x:x.suite.factory)
config = property(lambda x:x.suite.config)
config_file = property(lambda x:x.suite.config_file)
def __init__(self):
import optparse
self.parser = optparse.OptionParser()
for args, kw in self.parameters:
self.parser.add_option(*args, **kw)
@property
def app(self):
if self.__app is None:
self.__app = self.factory(self, self.config)
return self.__app
@property
def request(self):
if self.__rq is None:
self.__rq = Request(self.app, self)
return self.__rq
@property
def context(self):
if self.__cx is None:
self.__cx = self.request.new_context()
return self.__cx
def run(self):
self.setup_logging()
return self.action()
def parse_args(self, args):
self.parser.usage = '%%prog [global-options] %s [options]\n%s' % (self.name, self.__doc__)
if self.description:
import textwrap
desc = self.description
desc = textwrap.dedent(desc)
self.parser.description = desc
self.options, self.args = self.parser.parse_args(args)
def setup_logging(self):
import logging.config
logging.config.fileConfig(self.config_file)
def get_threadpool(self):
pass
class ShellCommand(Command):
'Run an interactive python shell'
name = 'shell'
def action(self):
locs = dict(context=self.context,
request=self.request,
application=self.app)
from fenton import console
console.interactive(locs)
class ServerCommand(Command):
def serve(self):
from fenton import web, wsgi, logging
addr = self.config.get('server_addr') or '0.0.0.0'
port = self.config.get('server_port') or 8000
stack = web.stack(self.app)
server = wsgi.CherryPyWSGIServer((addr, int(port)), stack)
logging.log.info('serving on %s:%s', addr, port)
server.start()
class DevServer(ServerCommand):
'Run an auto-reloading webserver'
environ_key = 'WITH_FENTON_SERVER_RELOADER'
name = 'dev'
def run(self):
from fenton import reloader
if os.environ.get(self.environ_key):
self.setup_logging()
reloader.watch(op.realpath(self.config_file))
reloader.wait()
self.serve()
else:
return reloader.run(self.environ_key)
class RestartDaemon(Command):
'Restart daemon'
name = 'restart'
def run(self):
cmd = '%s %s' % (sys.executable, ' '.join(sys.argv))
os.system(cmd.replace('restart', 'stop'))
return os.system(cmd.replace('restart', 'start'))
class DaemonFailure(Exception):
pass
class StartDaemon(ServerCommand):
'Start a daemon'
name = 'start'
parameters = (
(('--nodaemon',),
dict(dest='daemon',
default=True,
action='store_false',
help='Do not fork')),
(('--pidfile',),
dict(dest='pidfile',
metavar='FILE',
help='Save PID to file')),
(('--logfile',),
dict(dest='logfile',
metavar='FILE',
help='Save output to the given log file (redirects stdout)')))
def run(self):
import locale
locale.setlocale(locale.LC_ALL, '')
lang, enc = locale.getlocale()
pidfile = self.options.pidfile or self.config.get('pidfile')
if pidfile:
with open(pidfile, 'a'):
pass
logfile = self.options.logfile or self.config.get('logfile')
logger = None
if logfile:
with open(logfile, 'a'):
pass
from fenton import util
maxsize = self.config.get('logfile.maxsize') or '0'
maxsize = maxsize and util.bytesize(maxsize)
logger = RotatingLogFile(logfile, 'a', enc)
if maxsize:
logger.rotate(maxsize)
if self.options.daemon:
try:
self._fork(pidfile)
except DaemonFailure, e:
print str(e)
return
if pidfile:
_write_pid(pidfile)
if logger:
sys.stdout = sys.stderr = logger
self.setup_logging()
try:
return self.serve()
except (SystemExit, KeyboardInterrupt), e:
if str(e):
msg = ' ' + str(e)
else:
msg = ''
print 'Exiting%s (-v to see traceback)' % msg
def get_threadpool(self):
from paste.httpserver import ThreadPool
return ThreadPool(5)
def _fork(self, pidfile):
pid = _read_pid(pidfile)
if pid:
msg = 'Daemon is already running (PID: %s from PID file %s)'
raise DaemonFailure(msg % (pid, pidfile))
pid = os.fork()
if pid:
# The forked process also has a handle on resources, so we
# *don't* want proper termination of the process, we just
# want to exit quick (which os._exit() does)
os._exit(0)
# Make this the session leader
os.setsid()
# Fork again for good measure!
pid = os.fork()
if pid:
os._exit(0)
import resource
maxfd = resource.getrlimit(resource.RLIMIT_NOFILE)[1]
if maxfd == resource.RLIM_INFINITY:
maxfd = MAXFD
# close all file descriptors
for fd in range(0, maxfd):
try:
os.close(fd)
except OSError: # fd wasn't open to begin with (ignored)
pass
if (hasattr(os, 'devnull')):
REDIRECT_TO = os.devnull
else:
REDIRECT_TO = '/dev/null'
os.open(REDIRECT_TO, os.O_RDWR) # stdin
# Duplicate stdin to stdout and stderr
os.dup2(0, 1) # stdout
os.dup2(0, 2) # stderr
class StopDaemon(Command):
'Stop a running daemon'
name = 'stop'
parameters = (
(('--pidfile',),
dict(dest='pidfile',
metavar='FILE',
help='Save PID to file')),)
def run(self):
pidfile = self.config.get('pidfile') or self.options.pidfile
result = _stop_daemon(pidfile)
if result:
print 'Could not stop daemon'
return result
class RotatingLogFile:
suffix = '%Y%m%d'
def __init__(self, filename, mode='w', encoding=None):
import threading
self.lock = threading.Lock()
self.filename = filename
self.mode = mode
self.encoding = encoding or 'ASCII'
self.fd = None
def open(self):
if self.fd is None:
with self.lock:
if self.fd is None:
self.fd = open(self.filename, self.mode)
return self.fd
def write(self, text):
f = self.open()
if isinstance(text, unicode):
text = text.encode(self.encoding)
f.write(text)
f.flush()
def writelines(self, text):
f = self.open()
f.writelines(text)
f.flush()
def flush(self):
self.open().flush()
def rotate(self, size):
if not os.path.exists(self.filename):
return
stat = os.stat(self.filename)
if stat.st_size < size:
return
t = time.localtime()
newfile = self.filename + "." + time.strftime(self.suffix, t)
if os.path.exists(newfile):
return
with self.lock:
if self.fd is not None:
self.fd.close()
self.fd = None
os.rename(self.filename, newfile)
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import sys
import logging
DEBUG0 = logging.DEBUG
DEBUG1 = DEBUG0+1
DEBUG2 = DEBUG0+2
DEBUG3 = DEBUG0+3
logging.addLevelName(DEBUG0, 'DEBUG0')
logging.addLevelName(DEBUG1, 'DEBUG1')
logging.addLevelName(DEBUG2, 'DEBUG2')
logging.addLevelName(DEBUG3, 'DEBUG3')
getLogger = logging.getLogger
def color_detect(*args, **kw):
format = '%(asctime)s %(levelname)-5.5s [%(name)s] %(message)s'
normal = logging.Formatter(format)
pretty = PrettyThreadFormatter(*args, **kw)
from fenton import console
return console.colors.capable() and pretty or normal
def _calling_frame(offset=0):
return sys._getframe(offset+3).f_globals.get('__name__', None)
def _getlogger(offset=0):
return getLogger(_calling_frame(offset))
@apply
def debugcolor():
d = [0]
def f():
d[0] = (d[0] + 1) % 4
return d[0]
return f
def log_exception(msg=None):
E, e, tb = sys.exc_info()
if getattr(e, '_logged', False):
return
log = _getlogger(1)
prefix = '%s\n' % (msg or '')
from fenton.util import color_tb
trace = ''.join(color_tb(tb))
if e is None:
postfix = E
else:
postfix = '%s: %s' % (E.__name__, e)
msg = '%s%s%s\n' % (prefix, trace, postfix)
log.error(msg)
def log_stack(msg=None, limit=None):
from fenton.util import color_stack
log = _getlogger()
prefix = '%s\n' % (msg or '')
trace = ''.join(color_stack(f=sys._getframe(2), limit=limit))
msg = '%s%s\n' % (prefix, trace)
log.error(msg)
class PrettyFormatter(logging.Formatter):
_format_prefix = None
_colors = None
def formatTime(self, rec):
import time
t = time.localtime(rec.created)
return '%s.%03d' % (time.strftime('%H:%M:%S', t), rec.msecs)
def format(self, rec):
colorize = self.colors.get(rec.levelno)
msg = '%s %s' % (colorize(rec.name), rec.getMessage())
prefix = self._format_prefix and self._format_prefix(rec) or ''
return prefix + msg
@property
def colors(self):
if self._colors is None:
from fenton import console
self._colors = {
logging.CRITICAL: console.colors.LightRed,
logging.ERROR: console.colors.Yellow,
logging.WARNING: console.colors.Brown,
logging.INFO: console.colors.LightGreen,
logging.DEBUG: console.colors.LightBlue,
DEBUG1: console.colors.LightPurple,
DEBUG2: console.colors.Purple,
DEBUG3: console.colors.Blue,
logging.NOTSET: console.colors.White
}
return self._colors
class PrettyThreadFormatter(PrettyFormatter):
def _format_prefix(self, rec):
import thread
from fenton import console
from fenton.util import byteswap, uint
id = uint(thread.get_ident())
return '%s ' % console.colors.random('%x'%id, byteswap(id))
class LazyLogger:
def __getattr__(self, attr):
return getattr(_getlogger(), attr)
def exception(self, msg=None):
log_exception(msg)
def stack(self, msg=None, limit=None):
log_stack(msg, limit)
def debug(self, *args, **kw):
_getlogger().log(DEBUG0 + debugcolor(), *args, **kw)
def caller(self, msg, *args, **kw):
from fenton.util import getcaller
level = kw.pop('level', 1)
scope = getcaller(level)
msg = ('[%s:%s] ' % (scope['file'], scope['line'])) + msg
_getlogger().log(DEBUG0, msg, *args, **kw)
log = LazyLogger()
| Python |
__metaclass__ = type
import os
import sys
import time
import signal
import threading
import subprocess
RELOADER_CODE = 125
def watch(f):
Watcher.extra.append(os.path.abspath(f))
def wait():
Watcher().start()
def run(key):
exe = sys.executable
args = [exe] + sys.argv
while True:
proc = None
environ = os.environ.copy()
environ[key] = 'true'
try:
signal.signal(signal.SIGTERM, _handle_sigterm)
proc = subprocess.Popen(args, env=environ)
status = proc.wait()
proc = None
except KeyboardInterrupt:
sys.stderr.write('\nStopping\n')
sys.stderr.flush()
return 1
finally:
if proc is not None and hasattr(os, 'kill'):
try:
os.kill(proc.pid, signal.SIGTERM)
except (OSError, IOError):
pass
if status != RELOADER_CODE:
return status
def _handle_sigterm(signo, frame):
raise SystemExit
class Watcher(threading.Thread):
extra = []
def _set_daemon(self):
return True
def run(self, interval=1):
self.mtimes = {}
self.changed = None
while self._check():
time.sleep(interval)
sys.stderr.write('\n%s changed\n' % self.changed)
os._exit(RELOADER_CODE)
def _files(self):
for m in sys.modules.values():
f = getattr(m, '__file__', None)
if f:
yield f.endswith('.pyc') and f[:-1] or f
for f in self.extra:
yield f
def _check(self):
for f in self._files():
try:
stat = os.stat(f)
mtime = stat and stat.st_mtime or 0
except (OSError, IOError):
continue
if f not in self.mtimes:
self.mtimes[f] = mtime
elif self.mtimes[f] < mtime:
self.changed = f
return False
return True
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
from fenton import data
from fenton import view
from fenton import util
from fenton import types
from fenton import getmeta
from fenton import widgets
from fenton import security
def perms(default=None, debug=False, **kw):
return util.newdict({
'list': default,
'view': default,
'edit': default,
'clone': default,
'create': default,
'delete': default,
}, kw)
def public(**kw):
return perms(view=security.PUBLIC, **kw)
def prepare_static(cx):
'compress css/js'
import subprocess, os
dirs = [os.path.join(d, 'static') for d in cx.app.source_dirs()]
compr = cx.app.config['compressor.jar']
outpath = cx.app.config['compress.out']
out = 'pack-%s.js' % cx.app.static_version
out = os.path.join(outpath, 'js', out)
p = subprocess.Popen(['java', '-jar', compr, '--type', 'js', '-o', out],
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
for fn in widgets.get_all_js():
for d in dirs:
fp = os.path.join(d, fn)
if not os.path.exists(fp):
continue
with open(fp) as f:
for line in f:
line = line.replace('/*!', '/*')
p.stdin.write(line)
p.stdin.write('\n')
p.communicate()
p.wait()
out = 'pack-%s.css' % cx.app.static_version
out = os.path.join(outpath, 'css', out)
import re
sre = re.compile('\s+')
with open(out, 'wb') as fout:
for fn in widgets.get_all_css():
for d in dirs:
fp = os.path.join(d, fn)
if not os.path.exists(fp):
continue
f = open(fp)
fout.write(sre.sub(' ', f.read()))
fout.write(' ')
return
# for future upgrades method
from fenton import web
r = web.mako_renderer(cx.app)
for fn in widgets.ALL_MAKO:
path = '/%s.mako' % fn.replace('.', '/')
r.get_template(path)
class External(view.Screen):
priv = True
bindings = {}
has_error = False
def __init__(self, cx, url, label=None):
self.context = cx
assert url
self.url = url
self.label = label
def get_url(self):
return self.url
def get_title(self):
return self.label
def render(self):
self.request.redirect(self.url)
class LoginScreen(view.FormDialog):
login_message = None
bind_update = None
def build_dialog_buttons(self, panel):
widgets.submit_button(panel,
type='primary',
label='Login',
id=self.bind(self.do_login))
widgets.submit_button(panel,
type='secondary',
label='Help',
id=self.bind(self.do_help))
widgets.message_button(panel, text=self.login_message)
def get_url(self):
path = 'system/login'
url = self.request.app_url(path)
if self.model.url:
from fenton import form
from urllib import quote_plus as q
from urlparse import urlsplit, urlunsplit
args = form.encode({'url': self.model.url})
qs = '&'.join('%s=%s'%(k,q(v)) for (k, v) in args.items() if v)
scheme, host, path, _, frag = urlsplit(url)
url = urlunsplit((scheme, host, path, qs, frag))
return url
def do_login(self):
self.update()
if not self.has_error:
ok = self.model.authenticate()
if ok:
return self.login_ok(ok)
self.login_message = 'Failed'
def login_ok(self, ok):
if ok.show_landing():
vs = getmeta(ok).view_screen
self.replace(vs(self.context, ok, self.priv, None))
else:
self.replace(External(self.context, self.model.url))
def do_help(self):
obj = self.model.get_help()
vs = getmeta(obj).view_screen
self.replace(vs(self.context, obj, self.priv, self))
class LoginOkScreen(view.CustomDialog):
screen_title = 'DEFAULT LOGIN OK'
def build_dialog_buttons(self, panel):
widgets.submit_button(panel,
type='primary',
label='Continue',
id=self.bind(self.do_continue))
if self.model.allow_chpasswd():
widgets.submit_button(panel,
type='secondary',
label='Change password',
id=self.bind(self.go_chpasswd))
if self.model.allow_switch():
widgets.submit_button(panel,
type='secondary',
label='Switch user',
id=self.bind(self.go_switch_user))
def do_continue(self):
self.replace(External(self.context, self.model.url))
def go_chpasswd(self):
url = '?'.join(map(self.request.absolute_url,
('system/chpasswd', self.model.url)))
self.request.redirect(url)
def go_switch_user(self):
obj = self.model.get_switcher()
vs = getmeta(obj).view_screen
self.replace(vs(self.context, obj, True, self))
class LoginHelpScreen(view.CustomDialog):
registrable = False
def build_dialog_buttons(self, panel):
widgets.submit_button(panel,
type='primary',
label='Back to login',
id=self.bind(self.pop))
if self.model.allow_chpasswd():
widgets.submit_button(panel,
type='secondary',
label='Change password',
id=self.bind(self.to_chpasswd))
else:
widgets.text(panel, 'Password change not available')
def to_chpasswd(self):
abs = self.request.absolute_url
url = abs('system/chpasswd') + '?' + abs(self.model.url)
self.replace(External(self.context, url))
class LogoutScreen(view.Dialog):
screen_title = 'DEFAULT LOGOUT'
def build_dialog(self, sheet):
widgets.screen_widget(sheet, template_file=self.template_file)
def build_dialog_buttons(self, panel):
widgets.submit_button(panel,
type='primary',
label='Logout',
id=self.bind(self.do_logout))
widgets.submit_button(panel,
type='secondary',
label='Cancel',
id=self.bind(self.do_cancel))
def do_logout(self):
ok = self.model.logout()
vs = getmeta(ok).view_screen
self.replace(vs(self.context, ok, self.priv))
def do_cancel(self):
self.replace(External(self.context, self.model.url))
class LogoutOkScreen(view.Dialog):
heartbeat = False
def build_dialog(self, sheet):
widgets.screen_widget(sheet, template_file=self.template_file)
class ChpasswdForm(view.FormDialog):
screen_title = 'Change password'
path = 'system/chpasswd'
error = None
bind_update = None
def build_dialog(self, sheet):
msg = '''
The password is case-sensitive, and may
include letters, digits, spaces, and
punctuation.
'''
widgets.screen_title(sheet, title='Change password')
widgets.textblock(sheet, msg)
widgets.textblock(sheet, 'It must be at least 8 characters.')
widgets.model_form(sheet, groups=self.get_form().groups)
def build_dialog_buttons(self, panel):
widgets.submit_button(panel,
type='primary',
label='Change password',
id=self.bind(self.do_chpasswd))
widgets.submit_button(panel,
type='secondary',
label='Cancel',
id=self.bind(self.do_cancel))
widgets.message_button(panel, text=self.error)
self.error = None
def do_chpasswd(self):
self.update()
if self.has_error:
return
from fenton.ext import directory
from fenton import logging
try:
self.model.chpasswd()
except security.PoorPassword, e:
logging.log.error('chpasswd: %s', e)
self.error = e.args and e.args[0] or 'New password is too simple'
except (security.LoginFailed, directory.AccountLocked), e:
self.error = 'Authentication failed'
logging.log.error('chpasswd: %s', e)
except Exception, e:
logging.log.exception()
self.error = 'Server error: please contact administrator'
logging.log.error('chpasswd: server error: %s', e)
else:
logging.log.info('chpasswd: changed password for user %s', self.model.username)
ok = self.model.get_ok()
vs = getmeta(ok).view_screen
self.replace(vs(self.context, ok, self.priv, self))
def do_cancel(self):
self.replace(External(self.context, self.model.url))
class ChpasswdOkScreen(view.Dialog):
screen_title = 'Password changed'
def build_dialog(self, panel):
msg = '''
Your password has been changed
'''
widgets.textblock(panel, msg)
def build_dialog_buttons(self, panel):
widgets.submit_button(panel,
type='primary',
label='Continue',
id=self.bind(self.do_continue))
def do_continue(self):
self.replace(External(self.context, self.model.url))
class SwitchUserForm(view.FormDialog):
screen_title = 'Switch user'
bind_update = None
error = None
def build_dialog_buttons(self, panel):
widgets.message_button(panel, text=self.error)
widgets.submit_button(panel,
type='primary',
label='Switch user',
id=self.bind(self.do_switch))
widgets.submit_button(panel,
type='secondary',
label='Cancel',
id=self.bind(self.do_cancel))
self.error = None
def do_switch(self):
self.update()
if self.has_error:
return
ok = self.model.switch_user()
if ok:
vs = getmeta(ok).view_screen
self.replace(vs(self.context, ok, self.priv, None))
else:
self.error = 'Failed'
def do_cancel(self):
self.replace(External(self.context, self.model.url))
class Login(data.Transient):
__classid__ = None
__permissions__ = public()
__fields__ = ('username', 'password')
__view__ = LoginScreen
url = types.String()
username = types.String(widget_config={'keypress_triggers': False})
password = types.MaskedString(widget_config={'keypress_triggers': False})
def authenticate(self):
try:
self._cx.authenticate(self.username, self.password.encode('utf-8'))
except security.LoginFailed, e:
return None
return self.get_ok()
def get_ok(self):
raise NotImplementedError
return self._cx.get(LoginOk, url=self.url)
def get_help(self):
return self._cx.get(LoginHelp, url=self.url)
def after_load_event(self):
rq = self._rq
if rq.vars.url:
self.url = rq.vars.url
else:
ref = rq.referer
ref = ref and rq.host in ref and '/_' not in ref and ref
self.url = ref or self._cx.app.home_url or '/'
try:
self._cx.user.verify_authenticated(self._cx)
except security.NotAuthenticated:
self._cx.unauthenticate()
else:
rq.redirect(self.url)
class LoginOk(data.Transient):
__classid__ = None
__permissions__ = perms(view=security.AUTHENTICATED)
__view__ = LoginOkScreen
url = None
def __str__(self):
return 'Login OK'
def allow_chpasswd(self):
return False
def allow_switch(self):
return False
def show_landing(self):
return False
def get_switcher(self):
raise NotImplementedError
return self._cx.get(SwitchUser, url=self.url)
class LoginHelp(data.Transient):
__classid__ = None
__permissions__ = public()
__view__ = LoginHelpScreen
url = types.String()
def allow_chpasswd(self):
return
class Logout(data.Transient):
__classid__ = None
__view__ = LogoutScreen
__permissions__ = public()
def after_load_event(self):
try:
self._rq.user.verify_authenticated(self._cx)
except security.NotAuthenticated:
self._rq.redirect(self._cx.app.home_url)
else:
self.url = self._rq.referer or self._rq.app.home_url
def logout(self):
self._cx.unauthenticate()
return self.get_ok()
def get_ok(self):
return self._cx.get(LogoutOk)
class LogoutOk(data.Transient):
__classid__ = None
__permissions__ = public()
class ChpasswdOk(data.Transient):
__classid__ = None
__permissions__ = public()
__view__ = ChpasswdOkScreen
url = types.String()
class Chpasswd(data.Transient):
__classid__ = None
__view__ = ChpasswdForm
__permissions__ = public()
__fields__ = (
'username',
'pw0',
'pw1',
'pw2',
)
url = types.String()
username = types.String()
pw0 = types.MaskedString(label='Current/old password')
pw1 = types.MaskedString(label='New password')
pw2 = types.MaskedString(label='Confirm new password')
def chpasswd(self):
ldap_user = self._cx.app.auth.get_user(self.username)
if ldap_user is None:
raise security.NoUser(self.username)
pw0, pw1, pw2 = self.pw0, self.pw1, self.pw2
if not security.streq(pw1, pw2):
raise security.PoorPassword('Passwords do not match')
if len(pw1) < 8:
raise security.PoorPassword('New password is too short')
ldap_user.change_password(pw0, pw1)
from fenton.ext import crowd
crowd.get_synchronized_user(self._cx, ldap_user, pw1)
def get_ok(self):
return self._cx.get(ChpasswdOk, url=self.url)
class SwitchUser(data.Transient):
__classid__ = None
__fields__ = ('username',)
__permissions__ = perms(view=security.RESTRICTED)
__view__ = SwitchUserForm
username = types.String()
url = types.String()
def switch_user(self):
if self._cx.switch_user(self.username):
return self.get_ok()
def get_ok(self):
raise NotImplementedError
return self._cx.get(LoginOk, url=self.url)
class Ping(data.Transient):
__permissions__ = public()
__view__ = view.Plain
content_type = 'text/plain'
def __str__(self):
return 'OK'
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import sqlalchemy.orm as orm
from fenton import util
from fenton import view
from fenton import types
from fenton import logging
def compile_all():
for m in AbstractMeta.all_metas:
m.compile()
for m in AbstractMeta.all_metas:
m.register()
orm.compile_mappers()
def create_class(name=None, bases=None, metaclass=None, **kw):
if name is None:
name = '<dynamic base class>'
if bases is None:
bases = (object,)
if metaclass is None:
metaclass = type
kw['__metaclass__'] = metaclass
Class = metaclass(name, bases, kw)
Class.__module__ = '<dynamic>'
return Class
def create_base_class(metainit,
metaclass=type,
classfactory=create_class,
**kw):
base_class = None
class submeta(metaclass):
supermeta = metaclass
def __init__(self, name, bases, attrs):
metaclass.__init__(self, name, bases, attrs)
if base_class and self is not base_class:
metainit(self)
kw['metaclass'] = submeta
base_class = classfactory(**kw)
return base_class
def _idlist(objs):
out = []
for obj in objs:
meta = getmeta(obj, False)
if meta:
out.extend(_idlist(getattr(obj, k) for k in meta.key))
else:
out.append(obj)
return out
class IncorrectArgs(types.Invalid):
pass
class NoInstance(types.Invalid):
def get_message(self):
return 'No instance: %s' % (self.input)
class Registry:
class KeyExists(Exception):
'Attempt to register an existing key'
def __init__(self, sep='/'):
self.__items = {}
self.__sep = sep
def __contains__(self, path):
return path in self.__items
def set(self, path, obj, force=False):
path = self.join(self.split(path))
obj.path = path
if path in self.__items:
if force:
logging.log.warn("Re-registration: '%s'" % path)
else:
raise self.KeyExists(path)
self.__items[path] = obj
return obj
def get(self, path):
if not path:
path = self.__sep
parts = self.split(path)
# reversed() makes the matching greedy
# i.e. match /foo/bar before /foo
for i in reversed(range(len(parts))):
key = self.join(parts[0:i+1])
if key in self.__items:
rest = parts[i+1:]
return self.__items[key], self.join(rest)
raise KeyError(path)
def split(self, path):
parts = path.split(self.__sep)
if not parts[0]:
parts.pop(0)
return parts
def join(self, parts):
return self.__sep.join(parts)
def register(self, meta, *path):
self.set(self.join(path), meta)
def __repr__(self):
s = ', '.join('%r' % x for x in sorted(self.__items.keys()))
return '<Registry (%s)>' % s
class AbstractMeta:
model_class = None
fields = None
privs = None
all_metas = []
meta_map = {}
class_map = {}
__context_attr = '____fenton_model_context____'
__meta_attr = '____fenton_model_meta____'
def __init__(self, model_class):
setattr(model_class, self.__meta_attr, self)
self.__children = []
self.__parents = ()
self.model_class = model_class
self.all_metas.append(self)
self.class_map[model_class.__name__] = self
if self.classid:
self.meta_map[self.classid] = self
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__,
util.classname(self.model_class))
@classmethod
def getmeta(Class, obj, throw=True):
if isinstance(obj, AbstractMeta):
return obj
try:
return getattr(obj, Class.__meta_attr)
except AttributeError:
if throw:
raise
return None
# decorator
@classmethod
def declare(Class, **kw):
def classdecorator(newclass):
return Class.create_base(bases=(newclass,), **kw)
return classdecorator
@classmethod
def create_base(Class, **kw):
if 'bases' not in kw:
kw['bases'] = (Class.base_class,)
factory = kw.pop('classfactory', Class.base_class_factory)
metaclass = kw.pop('metaclass', None) or Class.metaclass
return create_base_class(Class,
classfactory=factory,
metaclass=metaclass,
**kw)
def compile(self):
mc = self.model_class
### compile permissions
self.privs = getattr(mc, '__permissions__', {})
### compile model key
key = getattr(mc, '__key__', None) or ()
if not isinstance(key, (tuple, list)):
key = (key,)
self.key = key
### compile model attributes
attrs = self.attributes = {}
for c in reversed(mc.__mro__):
attrs.update(getattr(c, '__attrs__', {}))
for k, t in attrs.iteritems():
t.set_owner(self, k)
ischild = False
### compile hierarchy
parents = mc.__dict__.get('__parent__', None)
if isinstance(parents, type(lambda:0)):
parents = parents()
if not isinstance(parents, tuple):
parents = parents and (parents,) or ()
self.__parents = parents
for parent in parents:
if isinstance(parent, basestring):
parent = self.attributes.get(parent)
parent = parent and parent.meta
#ischild = True
elif isinstance(parent, type):
parent = getmeta(parent)
if parent and isinstance(parent, AbstractMeta):
parent.add_child(self)
### compile form
self.fields = getattr(mc, '__fields__', None) or ()
for a, b, fs in self.fields:
for f in fs:
if f not in self.attributes:
raise TypeError('Missing attr %s in %s' % (f, mc))
# compile screens
view_screen = view.ViewScreen
if ischild:
edit_screen = view.SubEditScreen
create_screen = view.SubCreateScreen
else:
edit_screen = view.EditScreen
create_screen = view.CreateScreen
self.view_screen = getattr(mc, '__view__', view_screen)
self.edit_screen = getattr(mc, '__edit__', edit_screen)
self.create_screen = getattr(mc, '__create__', create_screen)
def register(self):
path = self.get_class_path()
if path and path not in REGISTRY:
REGISTRY.register(self, path)
def add_child(self, child):
if child not in self.__children:
self.__children.append(child)
def get_context(self, obj):
return getattr(obj, self.__context_attr)
def set_context(self, context, obj):
setattr(obj, self.__context_attr, context)
def get_id(self, context, obj):
return self.simplify(obj, context)
def get_class_path(self):
if not self.classid:
return None
if self.get_polymorphic_attr():
meta = filter(None, [getmeta(C, False) for C in self.model_class.__mro__])[-1]
else:
meta = self
path = meta.classid.replace('_', '-').split('.')
return REGISTRY.join(path)
def get_object_path(self, context, obj):
path = self.get_class_path()
if not path:
return None
id = self.get_id(context, obj)
if id:
return REGISTRY.join((path, id))
return path
@util.lazyattr
def key_validators(self):
return tuple(self.attributes[k].get_validator() for k in self.key)
@util.lazyattr
def num_args(self):
return sum(v.num_args for v in self.key_validators)
def get_view(self, context, obj, parent=None):
priv = self.get_priv('view')
return self.view_screen(context, obj, priv, parent)
def get_checked_view(self, context, obj, parent=None):
v = self.get_view(context, obj, parent)
if v.check():
return v
def __get__view__(self, context, obj, parent=None):
priv = self.get_priv('view')
vs = self.view_screen
from fenton import security
if vs and security.check(priv, context):
v = vs(context, obj, priv, parent)
if v.get_path():
return v
def get_parents(self, context, obj):
result = []
for p in self.__parents:
if isinstance(p, basestring):
p = getattr(obj, p, None)
if p:
if isinstance(p, type):
p = getmeta(p).load_model(context, None)
result.append(p)
return tuple(result)
def get_children(self, context):
# XXX args?
args = ()
for meta in self.__children:
yield meta.load_model(context, args)
def get_priv(self, key):
return self.privs and self.privs.get(key) or None
def send_event(self, __obj, __event, *args, **kw):
f = getattr(__obj, __event + '_event', None)
if f:
return f(*args, **kw)
return False
def get_polymorphic_attr(self):
pass
def get_polymorphic_id(self):
pass
def get_polymorphic_meta(self, polyattr, polyid):
raise NotImplementedError
def construct_child(self, context, obj, key, index=None):
subtype = self.attributes[key]
submeta = subtype.meta
# the child must deal with attaching the parent correctly
pattr = submeta.__parents[0]
attrs = {pattr: obj}
# should be:
# attrs = {'__parent__': obj}
attr = submeta.get_polymorphic_attr()
if attr:
attrs[attr] = submeta.get_polymorphic_id()
if index is not None and subtype.index_attr:
attrs[subtype.index_attr] = index
# attrs[subtype.key_attr] = key
return submeta.construct_model(context, **attrs)
def delete_child(self, context, obj, key, index):
if index is not None:
subtype = self.attributes[key]
items = getattr(obj, key)
del items[index]
# reindex children
if subtype.index_attr:
for i in range(index, len(items)):
setattr(items[index], subtype.index_attr, i)
else:
delattr(obj, key)
def store_child(self, context, obj, child, key, index):
subtype = self.attributes[key]
if index is None:
val = child
else:
val = getattr(obj, key)
if index == len(getattr(obj, key)):
val.append(child)
# log history
self.set_attribute(context, obj, key, val, None)
def construct_model(self, context, **args):
polyattr = self.get_polymorphic_attr()
polyid = polyattr and args.get(polyattr) or None
if polyid:
meta = self.get_polymorphic_meta(polyid)
else:
meta = self
obj = meta.do_construct_model(context, args)
meta.send_event(obj, 'after_construct')
return obj
def do_construct_model(self, context, args):
raise NotImplementedError
def purge_model(self, context, obj):
return obj
def set_attribute(self, context, obj, key, value, form=None):
if not self.send_event(obj, 'set_attribute', key, value, form):
setattr(obj, key, value)
def update_model(self, context, obj, vars, form=None):
self.send_event(obj, 'before_update', form)
self.do_update_model(context, obj, vars, form)
self.send_event(obj, 'after_update', form)
def do_update_model(self, context, obj, args, form):
for k in args:
self.set_attribute(context, obj, k, args[k], form)
def polyswitch(self, context, obj, args):
polyattr = self.get_polymorphic_attr()
polyid = polyattr and args.get(polyattr) or None
if polyid and getattr(obj, polyattr) != polyid:
newmeta = self.get_polymorphic_meta(polyid)
# preserve old attributes ???
return newmeta.construct_model(context)
return obj
def load_model(self, context, args):
obj = self.do_load_model(context, args)
self.send_event(obj, 'after_load')
return obj
def do_load_model(self, context, args):
raise NotImplementedError
def iter_model(self, context, filter):
raise NotImplementedError
def store_model(self, context, obj):
self.thaw_model(context, obj)
self.send_event(obj, 'before_store')
obj = self.do_store_model(context, obj)
self.send_event(obj, 'after_store')
return obj
def do_store_model(self, context, model):
raise NotImplementedError
def thaw_model(self, context, obj):
self.send_event(obj, 'before_thaw')
self.do_thaw_model(context, obj)
self.send_event(obj, 'after_thaw')
return obj
def do_thaw_model(self, context, obj):
self.set_context(context, obj)
return obj
def delete_model(self, context, obj):
self.send_event(obj, 'before_delete')
self.do_delete_model(context, obj)
self.send_event(obj, 'after_delete')
def do_delete_model(self, context, model):
raise NotImplementedError
def format_attribute(self, obj, attr, context):
v = getattr(obj, attr, None)
if v is not None:
return self.attributes[attr].format_value(v, context)
else:
return ''
def reconstruct(self, arg, context):
if arg:
args = REGISTRY.split(arg)
else:
args = ()
if len(args) != self.num_args:
logging.log.info('%r: Expected %d args, received %d' % (self, self.num_args, len(args)))
raise IncorrectArgs(arg, context)
py_args = []
i = 0
# recursively validate the arguments
for v in self.key_validators:
n = getattr(v, 'num_args', 1)
x = REGISTRY.join(args[i:i+n])
py_args.append(v.reconstruct(x, context))
i += n
if i != self.num_args:
msg = i < self.num_args and 'few' or 'many'
logging.log.info('Too %s arguments: expected %d, got %d' % (msg, self.num_args, i))
raise IncorrectArgs(arg, context)
obj = self.load_model(context, py_args)
if obj is None:
raise NoInstance(input=arg)
return obj
def simplify(self, obj, context):
if obj is None:
return ''
bits = (unicode(v.simplify(getattr(obj, k), context))
for (k, v) in zip(self.key, self.key_validators))
return REGISTRY.join(bits)
getmeta = AbstractMeta.getmeta
class ModelMeta(AbstractMeta):
metaclass = type
base_class = object
base_class_factory = staticmethod(create_class)
def do_construct_model(self, context, args):
obj = self.model_class()
self.do_thaw_model(context, obj)
if args:
self.do_update_model(context, obj, args, form=None)
return obj
def do_load_model(self, context, args):
if args:
args = dict(zip(self.key, args))
return self.do_construct_model(context, args)
@util.lazyattr
def title(self):
return util.decamel(self.model_class.__name__).replace('_', ' ').title()
@util.lazyattr
def classid(self):
if '__classid__' in self.model_class.__dict__:
return self.model_class.__classid__
prefix = self._classid_prefix()
name = self._classid_name()
if prefix:
name = '%s.%s' % (prefix, name)
return name
@property
def module(self):
import sys
return sys.modules[self.model_class.__module__]
def _classid_prefix(self):
if hasattr(self.model_class, '__model_prefix__'):
return self.model_class.__model_prefix__
if hasattr(self.module, '__fenton_prefix__'):
return self.module.__fenton_prefix__
prefix = self.module.__name__.split('.')[1:]
return '.'.join(map(util.decamel, prefix)).lower()
def _classid_name(self):
if hasattr(self.model_class, '__model_name__'):
return self.model_class.__model_name__
return util.decamel(self.model_class.__name__)
class ModelIterator:
filter = None
link_column = None
list_columns = []
__view__ = view.ListScreen
submeta = property(lambda x:getmeta(x).submeta)
def __iter__(self):
context = getmeta(self).get_context(self)
return iter(self.submeta.iter_model(context, self.filter))
def __title__(self):
return getmeta(self).title
class IterMeta(ModelMeta):
submeta = None
get_priv = property(lambda x:x.submeta.get_priv)
base_class = ModelIterator
def add_child(self, child):
if not self.submeta:
self.submeta = child
class DbMeta(ModelMeta):
@staticmethod
def base_class_factory(**kw):
if 'bases' in kw:
kw['cls'] = kw.pop('bases')
from sqlalchemy.ext.declarative import declarative_base
return declarative_base(**kw)
@util.classproperty
def metaclass(self):
from sqlalchemy.ext.declarative import DeclarativeMeta
return DeclarativeMeta
def get_db(self, context):
raise NotImplementedError
def _query(self, context):
return self.get_db(context).query(self.model_class)
def do_load_model(self, context, args):
if args:
args = _idlist(args)
else:
args = ()
obj = self._query(context).get(args)
if obj is not None:
self.set_context(context, obj)
return obj
def iter_model(self, context, filter):
q = self._query(context)
if self.model_class._meta:
eager = orm.contains_eager(self.model_class._meta)
q = q.join(self.model_class._meta).options(eager)
q = self.get_default_filter()(q)
if filter:
q = filter(q)
return q
def get_default_filter(self):
return getattr(self.model_class, '__filter__', lambda q: q)
def do_thaw_model(self, context, obj):
#print 'do_thaw: <%s 0x%x>' % (type(obj).__name__, id(obj))
db = self.get_db(context)
# FIXME: add() is wrong
db.add(obj)
self.set_context(context, obj)
return obj
def do_store_model(self, context, obj):
db = self.get_db(context)
db.add(obj)
return obj
def do_delete_model(self, context, obj):
self.get_db(context).delete(obj)
def purge_model(self, context, obj):
db = self.get_db(context)
if obj not in db.new:
try:
db.refresh(obj)
except sql.exc.InvalidRequestError:
pass
return obj
REGISTRY = Registry()
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import os
import sqlalchemy as sql
from sqlalchemy import orm
METADATA = sql.MetaData()
ENV_KEY = 'FENTON_UPGRADING'
SCHEMA_HISTORY = sql.Table(
'SCHEMA_HISTORY',
METADATA,
sql.Column('version', sql.Integer(), primary_key=True, nullable=False),
sql.Column('author', sql.String(), nullable=False),
sql.Column('stamp', sql.DateTime(timezone=False), nullable=False),
)
def initialize(app):
from fenton import logging
os.putenv('PGTZ', 'UTC')
name = app.config['fenton.db'] + '.db.'
engine = sql.engine_from_config(app.config, name)
app.upgrade_path = app.config['upgrade.path']
if not upgrading():
con = engine.connect()
if find_upgrade(con, app.upgrade_path):
logging.log.error('upgrade pending')
con.close()
DB = orm.sessionmaker(bind=engine, autoflush=False)
DB.bind = engine
return DB
# command
def upgrade_status(cx):
'0 => no upgrade; 99 => online; 100 => offline'
up = find_upgrade(cx.db, cx.app.upgrade_path)
return up and (up.online and 99 or 100) or 0
# command
def upgrade(cx, commit=False):
up = find_upgrade(cx.db, cx.app.upgrade_path)
if not up:
print 'Nothing to do'
return 1
cx = UpgradeContext(cx)
try:
with cx:
do_upgrade(cx, up)
print 'Upgraded to version', up.version
if not commit:
raise NoCommit
except NoCommit:
print 'ROLLBACK'
else:
print 'COMMIT'
# decorator
def post_upgrade(f):
UpgradeContext.post_upgraders.append(f)
return f
def upgrading(set=None):
if set:
os.environ[ENV_KEY] = 'true'
else:
return bool(os.environ.get(ENV_KEY))
def get_version(con):
v = SCHEMA_HISTORY.c.version
q = sql.select([v]).order_by(v.desc()).limit(1)
return con.execute(q).scalar() or 0
def get_script(v, p):
ext = os.path.splitext(p)[1][1:].lower()
C = ext == 'sql' and SqlScript or PyScript
return C(v, p)
def do_upgrade(cx, upgrade):
for f in [upgrade] + cx.post_upgraders:
print f
f(cx)
cx.flush()
q = SCHEMA_HISTORY.insert().values(version=upgrade.version,
author=cx.user.username)
cx.execute(q)
cx.flush()
def grant(cx, privs, user):
run = cx.bind.execute
objs = run('''
SELECT relname
FROM pg_catalog.pg_class c,
pg_catalog.pg_namespace n
WHERE c.relkind IN ('r', 'v', 'S')
AND n.oid = c.relnamespace
AND n.nspname = 'public' ''')
objs = [row[0] for row in objs]
for obj in objs:
run('GRANT %s ON "%s" TO "%s" ' % (privs, obj, user))
def find_upgrade(con, path, _memo=[]):
if _memo:
return _memo[1]
_memo.append(None)
OP = os.path
upgrades = {}
pending = None
version = get_version(con) + 1
if not OP.exists(path):
print 'Directory %s does not exist' % path
elif not OP.isdir(path):
print 'Not a directory:', path
v = None
for d, _, files in os.walk(path, followlinks=True):
for n in files:
p = OP.join(d, n)
if not OP.isfile(p):
continue
v = OP.splitext(n)[0]
try:
v = int(v)
except ValueError:
if v == 'next':
v = version
else:
continue
if v < version:
continue
if v in upgrades:
print 'Duplicate upgrade:', p
continue
upgrades[v] = p
try:
pending = upgrades.pop(version)
except KeyError:
pass
else:
pending = get_script(version, pending)
if upgrades:
print 'Skipping unexpected upgrades:'
for v, p in sorted(upgrades.iteritems()):
print p
_memo.append(pending)
return _memo[1]
class NoCommit(Exception):
'Marker for non-committed upgrade'
class Upgrade:
def __init__(self, version, path):
self.version = version
self.path = path
self.init()
def __call__(self, cx):
return self.execute(cx)
def __repr__(self):
return '<Version %d:%s>' % (self.version, self.path)
def init(self):
pass
class PyScript(Upgrade):
online = property(lambda x:x.module.online)
def init(self):
src = open(self.path, 'rb').read()
code = compile(src, self.path, 'exec')
import imp
self.module = mod = imp.new_module('version-%d' % self.version)
self.module.online = False
mod.__file__ = self.path
exec code in mod.__dict__
def execute(self, cx):
self.module.upgrade(cx)
class SqlScript(Upgrade):
def execute(self, cx):
sql = open(self.filename).read()
cx.run(sql.replace('%', '%%'))
class UpgradeContext:
post_upgraders = []
def __init__(self, cx):
self.__cx = cx
name = cx.app.config['upgrade.db'] + '.db.'
engine = sql.engine_from_config(cx.app.config, name)
cx.app.db.configure(bind=engine, autoflush=True)
self.context = cx
self.bind = cx.bind = cx.db.connection()
self.db = self.bind.db = cx.db
self.flush = cx.db.flush
def __getattr__(self, name):
return getattr(self.__cx, name)
def __enter__(self):
return self.__cx.__enter__()
def __exit__(self, *errors):
return self.__cx.__exit__(*errors)
def execute(self, q, **kw):
self.flush()
return self.bind.execute(q, **kw)
def runmany(self, *qq, **kw):
r = None
for q in qq:
if isinstance(q, basestring):
print q % (kw or {})
print
r = self.execute(q, **kw)
return r
def run(self, qq, **kw):
if isinstance(qq, basestring):
qq = [q for q in qq.split(';\n') if q.strip()]
elif not isinstance(qq, list):
qq = [qq]
return self.runmany(*qq, **kw)
def create(self, *classes):
ts = [c.__table__ for c in classes]
METADATA.create_all(bind=self.bind, tables=ts or None)
def drop_columns(self, *colnames):
if len(colnames) == 1 and '\n' in colnames[0]:
colnames = colnames[0].split()
for n in colnames:
self.run('ALTER TABLE %s DROP %s CASCADE' % tuple(n.split('.', 1)))
def drop_tables(self, *tablenames):
if len(tablenames) == 1 and '\n' in tablenames[0]:
tablenames = tablenames[0].split()
for t in tablenames:
self.run('DROP TABLE %s CASCADE' % t)
self.delete_metainfo(t)
def insert_metainfo(self, *classes):
from fenton import data
for c in classes:
data._insert_metainfo(self, c)
def delete_metainfo(self, *tablenames):
from fenton import data
C = data.MetaObjectClass
q = C.__table__.delete()
for t in tablenames:
self.run(q.where(C.tablename == t))
| Python |
#!/usr/bin/python
# -*- coding: ascii -*-
###########################################################################
# PBKDF2.py - PKCS#5 v2.0 Password-Based Key Derivation
#
# Copyright (C) 2007, 2008 Dwayne C. Litzenberger <dlitz@dlitz.net>
# All rights reserved.
#
# Permission to use, copy, modify, and distribute this software and its
# documentation for any purpose and without fee is hereby granted,
# provided that the above copyright notice appear in all copies and that
# both that copyright notice and this permission notice appear in
# supporting documentation.
#
# THE AUTHOR PROVIDES THIS SOFTWARE ``AS IS'' AND ANY EXPRESSED OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES
# OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED.
# IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT
# NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Country of origin: Canada
#
###########################################################################
# Sample PBKDF2 usage:
# from Crypto.Cipher import AES
# from PBKDF2 import PBKDF2
# import os
#
# salt = os.urandom(8) # 64-bit salt
# key = PBKDF2("This passphrase is a secret.", salt).read(32) # 256-bit key
# iv = os.urandom(16) # 128-bit IV
# cipher = AES.new(key, AES.MODE_CBC, iv)
# ...
#
# Sample crypt() usage:
# from PBKDF2 import crypt
# pwhash = crypt("secret")
# alleged_pw = raw_input("Enter password: ")
# if pwhash == crypt(alleged_pw, pwhash):
# print "Password good"
# else:
# print "Invalid password"
#
###########################################################################
# History:
#
# 2007-07-27 Dwayne C. Litzenberger <dlitz@dlitz.net>
# - Initial Release (v1.0)
#
# 2007-07-31 Dwayne C. Litzenberger <dlitz@dlitz.net>
# - Bugfix release (v1.1)
# - SECURITY: The PyCrypto XOR cipher (used, if available, in the _strxor
# function in the previous release) silently truncates all keys to 64
# bytes. The way it was used in the previous release, this would only be
# problem if the pseudorandom function that returned values larger than
# 64 bytes (so SHA1, SHA256 and SHA512 are fine), but I don't like
# anything that silently reduces the security margin from what is
# expected.
#
# 2008-06-17 Dwayne C. Litzenberger <dlitz@dlitz.net>
# - Compatibility release (v1.2)
# - Add support for older versions of Python (2.2 and 2.3).
#
###########################################################################
__version__ = "1.2"
from struct import pack
from binascii import b2a_hex
from random import randint
import string
try:
# Use PyCrypto (if available)
from Crypto.Hash import HMAC, SHA as SHA1
except ImportError:
# PyCrypto not available. Use the Python standard library.
import hmac as HMAC
import sha as SHA1
def strxor(a, b):
return "".join([chr(ord(x) ^ ord(y)) for (x, y) in zip(a, b)])
def b64encode(data, chars="+/"):
tt = string.maketrans("+/", chars)
return data.encode('base64').replace("\n", "").translate(tt)
class PBKDF2(object):
"""PBKDF2.py : PKCS#5 v2.0 Password-Based Key Derivation
This implementation takes a passphrase and a salt (and optionally an
iteration count, a digest module, and a MAC module) and provides a
file-like object from which an arbitrarily-sized key can be read.
If the passphrase and/or salt are unicode objects, they are encoded as
UTF-8 before they are processed.
The idea behind PBKDF2 is to derive a cryptographic key from a
passphrase and a salt.
PBKDF2 may also be used as a strong salted password hash. The
'crypt' function is provided for that purpose.
Remember: Keys generated using PBKDF2 are only as strong as the
passphrases they are derived from.
"""
def __init__(self, passphrase, salt, iterations=1000,
digestmodule=SHA1, macmodule=HMAC):
self.__macmodule = macmodule
self.__digestmodule = digestmodule
self._setup(passphrase, salt, iterations, self._pseudorandom)
def _pseudorandom(self, key, msg):
"""Pseudorandom function. e.g. HMAC-SHA1"""
return self.__macmodule.new(key=key, msg=msg,
digestmod=self.__digestmodule).digest()
def read(self, bytes):
"""Read the specified number of key bytes."""
if self.closed:
raise ValueError("file-like object is closed")
size = len(self.__buf)
blocks = [self.__buf]
i = self.__blockNum
while size < bytes:
i += 1
if i > 0xffffffffL or i < 1:
# We could return "" here, but
raise OverflowError("derived key too long")
block = self.__f(i)
blocks.append(block)
size += len(block)
buf = "".join(blocks)
retval = buf[:bytes]
self.__buf = buf[bytes:]
self.__blockNum = i
return retval
def __f(self, i):
# i must fit within 32 bits
assert 1 <= i <= 0xffffffffL
U = self.__prf(self.__passphrase, self.__salt + pack("!L", i))
result = U
for j in xrange(2, 1+self.__iterations):
U = self.__prf(self.__passphrase, U)
result = strxor(result, U)
return result
def hexread(self, octets):
"""Read the specified number of octets. Return them as hexadecimal.
Note that len(obj.hexread(n)) == 2*n.
"""
return b2a_hex(self.read(octets))
def _setup(self, passphrase, salt, iterations, prf):
# Sanity checks:
# passphrase and salt must be str or unicode (in the latter
# case, we convert to UTF-8)
if isinstance(passphrase, unicode):
passphrase = passphrase.encode("UTF-8")
if not isinstance(passphrase, str):
raise TypeError("passphrase must be str or unicode")
if isinstance(salt, unicode):
salt = salt.encode("UTF-8")
if not isinstance(salt, str):
raise TypeError("salt must be str or unicode")
# iterations must be an integer >= 1
if not isinstance(iterations, (int, long)):
raise TypeError("iterations must be an integer")
if iterations < 1:
raise ValueError("iterations must be at least 1")
# prf must be callable
if not callable(prf):
raise TypeError("prf must be callable")
self.__passphrase = passphrase
self.__salt = salt
self.__iterations = iterations
self.__prf = prf
self.__blockNum = 0
self.__buf = ""
self.closed = False
def close(self):
"""Close the stream."""
if not self.closed:
del self.__passphrase
del self.__salt
del self.__iterations
del self.__prf
del self.__blockNum
del self.__buf
self.closed = True
def crypt(word, salt=None, iterations=None):
"""PBKDF2-based unix crypt(3) replacement.
The number of iterations specified in the salt overrides the 'iterations'
parameter.
The effective hash length is 192 bits.
"""
# Generate a (pseudo-)random salt if the user hasn't provided one.
if salt is None:
salt = _makesalt()
# salt must be a string or the us-ascii subset of unicode
if isinstance(salt, unicode):
salt = salt.encode("us-ascii")
if not isinstance(salt, str):
raise TypeError("salt must be a string")
# word must be a string or unicode (in the latter case, we convert to UTF-8)
if isinstance(word, unicode):
word = word.encode("UTF-8")
if not isinstance(word, str):
raise TypeError("word must be a string or unicode")
# Try to extract the real salt and iteration count from the salt
if salt.startswith("$p5k2$"):
(iterations, salt, dummy) = salt.split("$")[2:5]
if iterations == "":
iterations = 400
else:
converted = int(iterations, 16)
if iterations != "%x" % converted: # lowercase hex, minimum digits
raise ValueError("Invalid salt")
iterations = converted
if not (iterations >= 1):
raise ValueError("Invalid salt")
# Make sure the salt matches the allowed character set
allowed = "abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ0123456789./"
for ch in salt:
if ch not in allowed:
raise ValueError("Illegal character %r in salt" % (ch,))
if iterations is None or iterations == 400:
iterations = 400
salt = "$p5k2$$" + salt
else:
salt = "$p5k2$%x$%s" % (iterations, salt)
rawhash = PBKDF2(word, salt, iterations).read(24)
return salt + "$" + b64encode(rawhash, "./")
# Add crypt as a static method of the PBKDF2 class
# This makes it easier to do "from PBKDF2 import PBKDF2" and still use
# crypt.
PBKDF2.crypt = staticmethod(crypt)
def _makesalt():
"""Return a 48-bit pseudorandom salt for crypt().
This function is not suitable for generating cryptographic secrets.
"""
binarysalt = "".join([pack("@H", randint(0, 0xffff)) for i in range(3)])
return b64encode(binarysalt, "./")
def test_pbkdf2():
"""Module self-test"""
from binascii import a2b_hex
#
# Test vectors from RFC 3962
#
# Test 1
result = PBKDF2("password", "ATHENA.MIT.EDUraeburn", 1).read(16)
expected = a2b_hex("cdedb5281bb2f801565a1122b2563515")
if result != expected:
raise RuntimeError("self-test failed")
# Test 2
result = PBKDF2("password", "ATHENA.MIT.EDUraeburn", 1200).hexread(32)
expected = ("5c08eb61fdf71e4e4ec3cf6ba1f5512b"
"a7e52ddbc5e5142f708a31e2e62b1e13")
if result != expected:
raise RuntimeError("self-test failed")
# Test 3
result = PBKDF2("X"*64, "pass phrase equals block size", 1200).hexread(32)
expected = ("139c30c0966bc32ba55fdbf212530ac9"
"c5ec59f1a452f5cc9ad940fea0598ed1")
if result != expected:
raise RuntimeError("self-test failed")
# Test 4
result = PBKDF2("X"*65, "pass phrase exceeds block size", 1200).hexread(32)
expected = ("9ccad6d468770cd51b10e6a68721be61"
"1a8b4d282601db3b36be9246915ec82a")
if result != expected:
raise RuntimeError("self-test failed")
#
# Other test vectors
#
# Chunked read
f = PBKDF2("kickstart", "workbench", 256)
result = f.read(17)
result += f.read(17)
result += f.read(1)
result += f.read(2)
result += f.read(3)
expected = PBKDF2("kickstart", "workbench", 256).read(40)
if result != expected:
raise RuntimeError("self-test failed")
#
# crypt() test vectors
#
# crypt 1
result = crypt("cloadm", "exec")
expected = '$p5k2$$exec$r1EWMCMk7Rlv3L/RNcFXviDefYa0hlql'
if result != expected:
raise RuntimeError("self-test failed")
# crypt 2
result = crypt("gnu", '$p5k2$c$u9HvcT4d$.....')
expected = '$p5k2$c$u9HvcT4d$Sd1gwSVCLZYAuqZ25piRnbBEoAesaa/g'
if result != expected:
raise RuntimeError("self-test failed")
# crypt 3
result = crypt("dcl", "tUsch7fU", iterations=13)
expected = "$p5k2$d$tUsch7fU$nqDkaxMDOFBeJsTSfABsyn.PYUXilHwL"
if result != expected:
raise RuntimeError("self-test failed")
# crypt 4 (unicode)
result = crypt(u'\u0399\u03c9\u03b1\u03bd\u03bd\u03b7\u03c2',
'$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ')
expected = '$p5k2$$KosHgqNo$9mjN8gqjt02hDoP0c2J0ABtLIwtot8cQ'
if result != expected:
raise RuntimeError("self-test failed")
if __name__ == '__main__':
test_pbkdf2()
# vim:set ts=4 sw=4 sts=4 expandtab:
| Python |
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import os
import sys
class colors:
@staticmethod
def capable():
return (hasattr(sys.stderr, 'fileno') and
os.isatty(sys.stderr.fileno()))
def random(self, text, choice=None):
import random
if not self.capable():
return text
if choice is None:
choice = random.randint(0,10000)
i = choice % 16
func = self.codes.keys()[i]
func = getattr(self, func)
return func(text)
normal = '\033[0m'
esc = '\033[%sm'
codes = dict([
("Black" , "0;30"),
("Red" , "0;31"),
("Green" , "0;32"),
("Brown" , "0;33"),
("Blue" , "0;34"),
("Purple" , "0;35"),
("Cyan" , "0;36"),
("LightGray" , "0;37"),
("DarkGray" , "1;30"),
("LightRed" , "1;31"),
("LightGreen" , "1;32"),
("Yellow" , "1;33"),
("LightBlue" , "1;34"),
("LightPurple" , "1;35"),
("LightCyan" , "1;36"),
("White" , "1;37")
])
for name, value in colors.codes.items():
def makefunc():
esc = colors.esc % value
def wrap(self, text):
if self.capable():
return "%s%s%s" % (esc, text, colors.normal)
return text
return wrap
setattr(colors, name, makefunc())
colors = colors()
def get_ipython(locs):
from IPython.Shell import IPShell
shell = IPShell(user_ns=locs, argv=[])
return shell.mainloop
def get_bpython(locs):
from bpython.cli import main
return lambda:main([], locs)
def get_builtin(locs):
import code
shell = code.InteractiveConsole(locals=locs)
try:
import readline
except ImportError:
pass
return shell.interact
def get_shell(namespace):
shell = None
#mods = get_bpython, get_ipython
mods = get_ipython,
for f in mods:
try:
return f(namespace)
except ImportError:
pass
return get_builtin(namespace)
def interactive(namespace):
shell = get_shell(namespace)
return shell()
| Python |
from __future__ import absolute_import
import sys
import unittest
from fenton.util import decamel
__test__ = False
__metaclass__ = type
TestCase = unittest.TestCase
class T(unittest.TestCase):
__test__ = False
def runTest(self):
pass
def test(f):
f.__name__ = 'test_' + f.__name__
return f
test.__test__ = False
t = T()
for k in dir(t):
if k.startswith('assert') or k.startswith('fail'):
j = decamel(k[0].upper() + k[1:])
if j != 'assert':
exec k + " = t." + k
exec decamel(k[0].upper() + k[1:]) + " = t." + k
def assert_is(first, second, msg=None):
if not first is second:
raise t.failureException, \
(msg or '%r is not %r' % (first, second))
def assert_is_not(first, second, msg=None):
if not first is second:
raise t.failureException, \
(msg or '%r is %r' % (first, second))
def assert_isinstance(first, second, msg=None):
if not isinstance(first, second):
raise t.failureException, \
(msg or "%r is not instance of %r" % (first, second))
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import time
import urllib2
import datetime
from xml.etree import cElementTree as etree
import sqlalchemy as sql
import sqlalchemy.orm as orm
from sqlalchemy.ext.associationproxy import association_proxy
from sqlalchemy.orm.collections import column_mapped_collection as coldict
from fenton import types
from fenton import logging
from fenton import security
from fenton import timestamp
# Crowd directory: there is only one
CROWD_DIRECTORY_ID = 32769
CROWD_COOKIE = 'crowd.token_key'
def initialize(app):
config = app.config
engine = sql.engine_from_config(config, prefix='crowd.db.')
app.get_crowd_db = orm.sessionmaker(bind=engine, autoflush=False)
config = dict((k.replace('crowd.', ''), config[k])
for k in config
if k.startswith('crowd.')
and not k.startswith('crowd.db.'))
app.crowd_client = CrowdClient(**config)
CrowdUser.tz = app.tz
Principal.tz = app.tz
def declarative(**kw):
from sqlalchemy.ext.declarative import declarative_base
return lambda C: declarative_base(cls=C, **kw)
def synchronize(cx):
keep_users = ['crowd-admin']
keep_groups = ['crowd-administrators']
db = cx.crowd_db
users_left = set(db.query(Principal).all())
groups_left = set(db.query(Group).all())
for master in cx.app.auth.iter_users():
cu = Principal._ensure(db, master.username)
cu.synchronize(master, None)
users_left.discard(cu)
groups_left.difference_update(cu.groups)
for u in users_left:
if u.username not in keep_users:
logging.log.warn('deleting user %s', u.username)
db.delete(u)
for g in groups_left:
if g.group_name not in keep_groups:
logging.log.warn('deleting group %s', g.group_name)
db.delete(g)
def get_authenticated_user(context, username, password):
p = Principal._get(context.crowd_db, username)
if not p:
raise security.NoUser
if p.active != 'T':
raise security.BadUser
if password is None or p.authenticate(password):
return CrowdUser(p)
raise security.LoginFailed
def get_synchronized_user(context, master, password):
p = Principal._ensure(context.crowd_db, master.username)
p.synchronize(master, password)
p.set_authenticate_stamp()
return CrowdUser(p)
def qn(ns, tag):
return '{%s}%s' % (ns, tag)
def qe(ns, name, parent=None, **attrs):
name = qn(ns, name)
if parent is not None:
return etree.SubElement(parent, name, **attrs)
else:
return etree.Element(name, **attrs)
class SoapError(Exception):
pass
class SoapService:
ns_soap = 'http://schemas.xmlsoap.org/soap/envelope/'
def __init__(self, url, debug=False):
self.url = url
self.debug = debug
def el(self, *args, **kw):
return qe(self.ns_soap, *args, **kw)
def send(self, rq, msg):
if self.debug:
logging.log.debug('SEND:\n%s' % msg)
try:
response = urllib2.urlopen(rq, msg)
except urllib2.HTTPError, e:
if e.code == 500 and e.headers.subtype == 'xml':
msg = e.read()
else:
msg = str(e)
raise SoapError(msg)
xml = response.read()
if self.debug:
logging.log.debug('RECV:\n%s' % xml)
if not response.headers.subtype == 'xml':
raise SoapError('The response was not XML')
return xml
def call(self, action, payload):
rq = urllib2.Request(self.url)
rq.add_header('SOAPAction', action)
rq.add_header('Content-Type', 'text/xml')
envelope = self.el('Envelope')
body = self.el('Body', envelope)
body.append(payload)
response = self.send(rq, etree.tostring(envelope))
if not response:
raise SoapError('Empty response')
return etree.fromstring(response).find(body.tag)[0]
class CrowdClient:
ns_server = 'urn:SecurityServer'
ns_auth = 'http://authentication.integration.crowd.atlassian.com'
ns_xsi = 'http://www.w3.org/1999/XMLSchema-instance'
def __init__(self, url, appname, appkey,
cookie_name=CROWD_COOKIE,
cookie_path='/',
cookie_domain=None,
cookie_secure=False,
debug=False):
self.appname = appname
self.appkey = appkey
self.cookie_name = cookie_name
self.cookie_path = cookie_path
self.cookie_domain = cookie_domain
self.cookie_secure = cookie_secure
self.service = SoapService(url, debug)
def f_el(self, name, parent=None, **attrs):
return qe(self.ns_server, name, parent, **attrs)
def v_el(self, name, parent=None, **attrs):
return qe(self.ns_auth, name, parent, **attrs)
def _authenticateApplication(self):
action = 'authenticateApplication'
request = self.f_el(action)
cred = self.v_el('credential')
self.v_el('credential', cred).text = self.appkey
self.v_el('encryptedCredential', cred).text = 'false'
name = self.v_el('name')
name.text = self.appname
nil = {qn(self.ns_xsi, 'nil'): 'true'}
vf = self.v_el('validationFactors', **nil)
arg0 = self.f_el('in0', request)
for x in [cred, name, vf]:
arg0.append(x)
response = self.service.call(action, request)
return list(response.getiterator(qn(self.ns_server, 'out')))[0]
def authenticatePrincipal(self, username, password, user_agent=None, remote_addr=None):
action = 'authenticatePrincipal'
msg = self.f_el(action)
appcred = self._authenticateApplication()
arg0 = self.f_el('in0', msg)
arg0.append(appcred[0])
arg0.append(appcred[1])
arg1 = self.f_el('in1', msg)
self.v_el('application', arg1).text = self.appname
self.v_el('name', arg1).text = username
cred = self.v_el('credential', arg1)
self.v_el('credential', cred).text = password
vfs = self.v_el('validationFactors', arg1)
vf = self.v_el('ValidationFactor', vfs)
self.v_el('name', vf).text = 'User-Agent'
self.v_el('value', vf).text = user_agent
vf = self.v_el('ValidationFactor', vfs)
self.v_el('name', vf).text = 'remote_address'
self.v_el('value', vf).text = remote_addr
vf = self.v_el('ValidationFactor', vfs)
self.v_el('name', vf).text = 'X-Forwarded-For'
self.v_el('value', vf).text = ''
response = self.service.call(action, msg)
return list(response.getiterator(qn(self.ns_server, 'out')))[0].text
def createPrincipalToken(self, username, user_agent=None, remote_addr=None):
action = 'createPrincipalToken'
msg = self.f_el(action)
appcred = self._authenticateApplication()
arg0 = self.f_el('in0', msg)
arg0.append(appcred[0])
arg0.append(appcred[1])
arg1 = self.f_el('in1', msg)
arg1.text = username
arg2 = self.f_el('in2', msg)
vf = self.v_el('ValidationFactor', arg2)
self.v_el('name', vf).text = 'User-Agent'
self.v_el('value', vf).text = user_agent or ''
vf = self.v_el('ValidationFactor', arg2)
self.v_el('name', vf).text = 'remote_address'
self.v_el('value', vf).text = remote_addr or ''
vf = self.v_el('ValidationFactor', arg2)
self.v_el('name', vf).text = 'X-Forwarded-For'
self.v_el('value', vf).text = ''
response = self.service.call(action, msg)
return list(response.getiterator(qn(self.ns_server, 'out')))[0].text
def isValidPrincipalToken(self, token, user_agent=None, remote_addr=None):
action = 'isValidPrincipalToken'
msg = self.f_el(action)
appcred = self._authenticateApplication()
arg0 = self.f_el('in0', msg)
arg0.append(appcred[0])
arg0.append(appcred[1])
arg1 = self.f_el('in1', msg)
arg1.text = token
arg2 = self.f_el('in2', msg)
vf = self.v_el('ValidationFactor', arg2)
self.v_el('name', vf).text = 'User-Agent'
self.v_el('value', vf).text = user_agent
vf = self.v_el('ValidationFactor', arg2)
self.v_el('name', vf).text = 'remote_address'
self.v_el('value', vf).text = remote_addr
vf = self.v_el('ValidationFactor', arg2)
self.v_el('name', vf).text = 'X-Forwarded-For'
self.v_el('value', vf).text = ''
response = self.service.call(action, msg)
text = list(response.getiterator(qn(self.ns_server, 'out')))[0].text
return text == 'true'
def invalidatePrincipalToken(self, token):
action = 'invalidatePrincipalToken'
msg = self.f_el(action)
appcred = self._authenticateApplication()
arg0 = self.f_el('in0', msg)
arg0.append(appcred[0])
arg0.append(appcred[1])
arg1 = self.f_el('in1', msg)
arg1.text = token
self.service.call(action, msg)
# end class CrowdClient
class NewClient:
default_host = 'localhost:8095'
path = '/crowd/rest/usermanagement/1/'
def __init__(self, appname, appkey, host=None, debug=False):
self.host = host or self.default_host
self.auth = ('%s:%s' % (appname, appkey)).encode('base64').replace('\n', '')
def createPrincipalToken(self, username, user_agent=None, remote_addr=None):
pass
def isValidPrincipalToken(self, token, user_agent=None, remote_addr=None):
pass
def invalidatePrincipalToken(self, token):
pass
def request(self, method, path, data=None):
import httplib
h = httplib.HTTPConnection(self.host)
headers = {
'Authorization': 'Basic ' + self.auth,
'Accept': 'application/json',
'Connection': 'close'
}
if data:
headers['Content-Type'] = 'application/json'
data = types.json.dumps(data)
try:
h.request(method, self.path + path, data, headers)
r = h.getresponse()
except socket.error, e:
raise URLError(e)
rsp = r.read()
if 200 <= r.status <= 299:
return types.json.loads(rsp)
raise SoapError(rsp)
class CrowdUser(security.User):
username = name = groups = tz = None
def __init__(self, user):
self.username = user.username
self.name = user.display_name
self.mail = user.email_address
self.groups = set(user.groupnames)
self.password_ttl = user.password_ttl
self.password_stamp = user.password_stamp
self.touch()
def __repr__(self):
return 'CrowdUser(%s)' % self.username
def password_change_due(self, grace=None):
if not grace:
return False
return self.password_ttl and self.password_ttl < grace
@property
def password_age(self):
if not self.password_stamp:
return None
now = timestamp.localnow(self.tz)
return now - self.password_stamp
def authenticate(self, request):
app = request.app
if not app.config.get('crowd_single_signon'):
return
cc = app.crowd_client
ua = request.user_agent
ra = request.remote_addr
token = cc.createPrincipalToken(self.username, ua, ra)
self.crowd_token = token
response = getattr(request, 'response', None)
if response:
response.set_cookie(cc.cookie_name, token,
domain=cc.cookie_domain,
path=cc.cookie_path,
secure=cc.cookie_secure,
httponly=True)
def unauthenticate(self, request):
cc = request.app.crowd_client
token = request.cookies.get(cc.cookie_name)
if not token:
return
cc.invalidatePrincipalToken(token)
response = getattr(request, 'response', None)
if response:
response.delete_cookie(cc.cookie_name, path='/')
def touch(self):
self._authenticated_ttl = int(time.time())
def verify_authenticated(self, context):
if not context.request.app.config.get('crowd_single_signon'):
return True
now = int(time.time())
app = context.app
cc = app.crowd_client
previous = self._authenticated_ttl
ttl = int(app.config.get('security.verify.ttl') or 0)
if now - previous < ttl:
return True
token = context.request.cookies.get(cc.cookie_name)
if not token:
raise security.NotAuthenticated
u = context.request.user_agent
r = context.request.remote_addr
if not cc.isValidPrincipalToken(token, u, r):
context.request.user = security.ANONYMOUS
raise security.NotAuthenticated
self.touch()
return True
@declarative()
class CrowdObject:
@classmethod
def _get(Class, db, name):
q = db.query(Class).filter_by(directory_id=CROWD_DIRECTORY_ID)
return q.filter(getattr(Class, Class.key) == name.lower()).first()
@classmethod
def _ensure(Class, db, name):
x = Class._get(db, name)
if x is None:
logging.log.info('creating %s for %s', Class.__name__, name)
x = Class(name)
db.add(x)
return x
class Attribute(CrowdObject):
__tablename__ = 'cwd_user_attribute'
id = sql.Column(sql.Integer(), primary_key=True)
attribute_name = sql.Column(sql.String(length=255), nullable=False)
attribute_value = sql.Column(sql.String(length=255))
attribute_lower_value = sql.Column(sql.String(length=255))
user_id = sql.Column(sql.Integer(),
sql.ForeignKey('cwd_user.id'),
nullable=False)
directory_id = sql.Column(sql.Integer(), nullable=False)
def __init__(self, name, value):
self.attribute_name = name
self.attribute_value = value
self.attribute_lower_value = value and str(value).lower() or None
self.directory_id = CROWD_DIRECTORY_ID
def __repr__(self):
return 'crowd.Attribute(%s=%s)' % (self.attribute_name, self.attribute_value)
class Principal(CrowdObject):
tz = None
key = 'lower_user_name'
__tablename__ = 'cwd_user'
id = sql.Column(sql.Integer(), primary_key=True)
user_name = sql.Column(sql.String(length=255), nullable=False)
lower_user_name = sql.Column(sql.String(length=255), nullable=False)
active = sql.Column(sql.CHAR(1), nullable=False)
created_date = sql.Column(sql.DateTime(timezone=False), nullable=False)
updated_date = sql.Column(sql.DateTime(timezone=False), nullable=False)
first_name = sql.Column(sql.String(length=255))
lower_first_name = sql.Column(sql.String(length=255))
last_name = sql.Column(sql.String(length=255))
lower_last_name = sql.Column(sql.String(length=255))
display_name = sql.Column(sql.String(length=255))
lower_display_name = sql.Column(sql.String(length=255))
email_address = sql.Column(sql.String(length=255))
lower_email_address = sql.Column(sql.String(length=255))
directory_id = sql.Column(sql.Integer(), nullable=False)
credential = sql.Column(sql.String(length=255))
memberships = orm.relation('Membership',
cascade='all,delete-orphan',
backref='user')
_attributes = orm.relation('Attribute',
cascade='all,delete-orphan',
collection_class=coldict(Attribute.attribute_name))
attributes = association_proxy('_attributes', 'attribute_value')
groups = association_proxy('memberships', 'group')
groupnames = property(lambda x:[g.group_name for g in x.groups])
@property
def password_stamp(self):
stamp = self.attributes.get('passwordLastChanged')
if stamp:
return datetime.datetime.fromtimestamp(int(stamp)/1000.0, self.tz)
password_ttl = None
username = property(lambda x:x.user_name)
cred_prefix = '{SSHA}'
def __init__(self, username, clearpass=None):
self.directory_id = CROWD_DIRECTORY_ID
self.user_name = username
self.lower_user_name = username.lower()
self.created_date = datetime.datetime.now()
self.updated_date = datetime.datetime.now()
self.active = 'T'
self.attributes['requiresPasswordChange'] = 'false'
self.attributes['invalidPasswordAttempts'] = '0'
def __repr__(self):
return 'crowd.Principal(%s)' % self.user_name
def set_credential(self, clearpass):
self.index = 0
if clearpass:
hashed = security.mkhash(clearpass, impl=security.sha1impl)
self.credential = self.cred_prefix + hashed
self.set_authenticate_stamp()
else:
self.credential = None
def compare_credential(self, clearpass):
if self.credential and clearpass:
cred = self.credential[len(self.cred_prefix):]
return security.cmphash(clearpass, cred, impl=security.sha1impl)
return False
def authenticate(self, clearpass):
if not self.credential:
raise security.BadUser
if not self.compare_credential(clearpass):
raise security.BadPassword
self.set_authenticate_stamp()
return True
def set_authenticate_stamp(self):
self.attributes['lastAuthenticated'] = str(int(time.time()*1000))
def synchronize(self, master, clearpass=None):
if clearpass is not None and not self.compare_credential(clearpass):
self.set_credential(clearpass)
self.display_name = master.displayname
self.first_name = x = master.givenname
self.lower_first_name = x and x.lower()
self.last_name = x = master.sn
self.lower_last_name = x and x.lower()
self.email_address = x = master.mail
self.lower_email_address = x and x.lower()
self.password_ttl = master.password_ttl
if master.password_stamp:
t = master.password_stamp
t = int(time.mktime(t.timetuple()) * 1000 + t.microsecond / 1000.0)
self.attributes['passwordLastChanged'] = str(t)
self.active = master.disabled and 'F' or 'T'
if master.groups:
db = orm.object_session(self)
new = set(Group._ensure(db, n) for n in set(master.groups))
old = set(self.groups)
self.memberships = [m for m in self.memberships
if m.group not in old - new]
for g in new - old:
self.memberships.append(Membership(g, self))
# end class Principal
class Group(CrowdObject):
key = 'lower_group_name'
__tablename__ = 'cwd_group'
id = sql.Column(sql.Integer(), primary_key=True, nullable=False)
group_name = sql.Column(sql.String(length=255), primary_key=True, nullable=False)
lower_group_name = sql.Column(sql.String(length=255), primary_key=True, nullable=False)
active = sql.Column(sql.CHAR(1), nullable=False)
is_local = sql.Column(sql.CHAR(1), nullable=False)
created_date = sql.Column(sql.DateTime(timezone=False), nullable=False)
updated_date = sql.Column(sql.DateTime(timezone=False), nullable=False)
description = sql.Column(sql.String(length=255))
group_type = sql.Column(sql.String(length=32))
directory_id = sql.Column(sql.Integer())
memberships = orm.relation('Membership',
cascade='all,delete-orphan',
backref='group')
members = property(lambda x:[m.user for m in x.memberships])
def __init__(self, name):
self.group_name = name
self.lower_group_name = name.lower()
self.directory_id = CROWD_DIRECTORY_ID
self.active = 'T'
self.is_local = 'F'
self.created_date = datetime.datetime.now()
self.updated_date = datetime.datetime.now()
self.description = None
self.group_type = 'GROUP'
def __repr__(self):
return 'crowd.Group(%s)' % self.group_name
# end class Group
class Membership(CrowdObject):
__tablename__ = 'cwd_membership'
id = sql.Column(sql.Integer(), primary_key=True)
parent_id = sql.Column(sql.Integer())
child_id = sql.Column(sql.Integer())
membership_type = sql.Column(sql.String(length=32))
group_type = sql.Column(sql.String(length=32))
parent_name = sql.Column(sql.String(length=255), nullable=False)
lower_parent_name = sql.Column(sql.String(length=255), nullable=False)
child_name = sql.Column(sql.String(length=255), nullable=False)
lower_child_name = sql.Column(sql.String(length=255), nullable=False)
directory_id = sql.Column(sql.Integer(), nullable=False)
def __init__(self, group, user):
self.group = group
self.user = user
self.parent_name = group.group_name
self.lower_parent_name = group.lower_group_name
self.child_name = user.user_name
self.lower_child_name = user.lower_user_name
self.directory_id = CROWD_DIRECTORY_ID
self.group_type = 'GROUP'
self.membership_type = 'GROUP_USER'
__table_args__ = (
sql.ForeignKeyConstraint(['directory_id', 'lower_parent_name'],
['cwd_group.directory_id', 'cwd_group.lower_group_name']),
sql.ForeignKeyConstraint(['directory_id', 'lower_child_name'],
['cwd_user.directory_id', 'cwd_user.lower_user_name']),
)
# end class Membership
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
# TODO: get messages from win32 error codes
import re
import os
import sys
import uuid
import pytz
import ldap
import datetime
from fenton import util
from fenton import insecure
from fenton import security
AD_ERR_AUTH_FAIL = 0x80070056
AD_ERR_POOR_PASSWORD = 0x800708c5
AD_ERR_ACCOUNT_LOCKED = 0x80070775
MS_MAX_TIMESTAMP = 9223372036854775807
MS_EPOCH = datetime.datetime(1601, 1, 1, tzinfo=pytz.UTC)
MSTSRE = re.compile('(\d\d\d\d)(\d\d)(\d\d)(\d\d)(\d\d)(\d\d).*')
class AccountLocked(security.SecurityException):
pass
class WindowsError(security.SecurityException):
pass
class LdapUser:
mail = altmail = None
def __init__(self, conn, attrs):
self.conn = conn
self._attributes = attrs
self.__dict__.update(attrs)
#attrs = dict((k.lower(), v) for (k, v) in attrs.iteritems())
#self.__dict__.update(attrs)
#self.__attributes = attrs
self.dn = self.distinguishedname
self.password_stamp = self.pwdlastset
self.account_expires = self.accountexpires
self.disabled = False
if self.password_expires:
now = self.conn.now()
ac_exp = self.account_expires
pw_exp = self.password_expires
no_expire = ADS_UF_DONT_EXPIRE_PASSWD in self.uac_flags
self.disabled = (self.account_disabled
or (ac_exp and ac_exp < now)
or (pw_exp and pw_exp < now and not no_expire))
@property
def must_change_password(self):
return (not self.password_stamp and
ADS_UF_DONT_EXPIRE_PASSWD not in self.uac_flags)
def set_password(self, password, reset=False):
return self.conn.winagent.call(_set_password,
self.conn.domain,
self.dn,
password,
reset)
def change_password(self, old, new):
try:
return self.conn.winagent.call(_change_password,
self.conn.domain,
self.username,
old, new)
except insecure.winerror, e:
code, msg, detail, other = e.args[0]
code = code & 0xffffffff
subcode = detail and detail[5] & 0xffffffff
if subcode == AD_ERR_AUTH_FAIL:
raise security.BadPassword
if subcode == AD_ERR_POOR_PASSWORD:
raise security.PoorPassword
if subcode == AD_ERR_ACCOUNT_LOCKED:
raise AccountLocked
raise WindowsError(code, msg, subcode)
def __repr__(self):
return '<LdapUser %s>' % (self.username)
# can't remember what this is for
# obviously it orders users by lastname then firstname
# but I can't remember what for
def __cmp__(self, other):
return cmp(self.fullname.lower(), other.fullname.lower())
username = property(lambda x:x.samaccountname.lower())
@property
def fullname(self):
sn = self.sn.upper() if self.sn else ''
gn = self.givenname if self.givenname else ''
if sn and gn:
return '%s, %s' % (sn, gn)
else:
return self.name
class _worker:
def __init__(self, parent):
self.parent = parent
self.tz = parent.tz
self.handle = ldap.initialize(self.url)
self.bind(self.bind_dn, self.bind_pw)
def __getattr__(self, name):
return getattr(self.parent, name)
def bind(self, dn, pw):
self.handle.simple_bind_s(dn, pw)
def unbind(self):
self.handle.unbind_s()
def __search(self, *args, **kw):
return self.handle.search_s(*args, **kw)
def _search_all(self, base, q, *a, **k):
result = self.__search(base, ldap.SCOPE_SUBTREE, q, *a, **k)
if not result:
return []
return [dict((k.lower(), v) for k, v in item[1].items())
for item in result]
def _search_one(self, base, scope, q, *a, **k):
result = self.__search(base, scope, q, *a, **k)
if not result:
return None
return dict((k.lower(), v) for k, v in result[0][1].items())
def get_base(self, q, *a, **k):
return self._search_one(self.orgbase, ldap.SCOPE_SUBTREE, q, *a, **k)
def get_root(self, q, *a, **k):
return self._search_one(self.root, ldap.SCOPE_SUBTREE, q, *a, **k)
def get_dn(self, username):
q = 'samaccountname='+username
result = self.__search(self.orgbase, ldap.SCOPE_SUBTREE, q, [''])
if not result:
return None
return result[0][0]
def authenticate(self, username, password):
try:
dn = self.get_dn(username)
if not dn:
raise security.NoUser
self.bind(dn, password)
except ldap.error, e:
raise LdapFailure, LdapFailure(e), sys.exc_info()[2]
def get_user(self, id, groups=True):
info = self.get_attributes(id)
if not info:
return None
user = LdapUser(self, info)
user.groups = groups and self.get_groups(user) or None
return user
def get_groups(self, user, allgroups=False):
if allgroups:
get = self.get_root
else:
get = self.get_base
groups = set()
seen = set()
chase = [x.lower() for x in user._attributes.get('memberof') or []]
while chase:
next = []
for dn in chase:
group = get('distinguishedname='+dn, ['samaccountname', 'memberof'])
sam = group and group.get('samaccountname')
sam = sam and sam[0] or None
if sam:
seen.add(dn)
groups.add(sam.lower())
parents = [x.lower() for x in group.get('memberof') or []]
next.extend(p for p in parents if p not in seen)
chase = next
return groups
def get_attributes(self, dn):
if '=' not in dn:
dn = self.get_dn(dn)
if not dn:
return None
dn = dn.replace('(', '\\(').replace(')', '\\)')
attrs = self.get_base('distinguishedname='+dn)
if not attrs:
return None
self.parent.convert_types(attrs)
uac = attrs['useraccountcontrol']
uac_items = set()
uac_flags = set()
if uac:
for k, d in UAC_FIELDS.items():
if uac & d:
uac_items.add(k)
uac_flags.add(d)
attrs['uac_items'] = uac_items
attrs['uac_flags'] = uac_flags
attrs['account_disabled'] = uac and bool(uac & ADS_UF_ACCOUNTDISABLE)
no_expire = uac and bool(uac & ADS_UF_DONT_EXPIRE_PASSWD)
now = self.parent.now()
lastset = attrs.get('pwdlastset') # or None -> never set or must change
expires = not no_expire and lastset and self.pw_max_age + lastset
attrs['password_expires'] = expires
attrs['password_ttl'] = expires and expires - now
attrs['altmail'] = None
alt = attrs.get('altrecipient')
if alt:
alt = self.get_base('distinguishedname='+alt[0])
if alt and 'targetaddress' in alt:
alt = alt['targetaddress'][0].replace('SMTP:', '')
else:
alt = None
attrs['altmail'] = alt
return attrs
class Problem(Exception):
pass
class ServerDown(Problem):
pass
def _set_password(domain, dn, password, reset):
from win32com.client import GetObject
lobj = GetObject('LDAP://%s' % dn)
#wobj = GetObject('WinNT://%s/%s,user' % (domain, lobj.samaccountname))
#wobj.SetPassword(password)
lobj.SetPassword(password)
if reset:
lobj.pwdLastSet = 0
lobj.setinfo()
# must use WinNT:// to get correct error response
def _change_password(domain, username, old, new):
from win32com.client import GetObject
obj = GetObject('WinNT://%s/%s,user' % (domain, username))
obj.ChangePassword(old, new)
def _current_sessions():
from win32com.client import GetObject
sessions = GetObject('WinNT://lynx/LanManServer').Sessions()
return [(s.Computer.lower(), s.Name.lower()) for s in sessions]
def lookup(host):
import socket
if host[0] == '[':
return host, ''
if host.startswith('192.168'):
ip = host
host = socket.gethostbyaddr(ip)
if '.' in host[0]:
host = host[0].split('.')[0]
host = '(%s)' % host
ip = ' ' + ip
else:
ip = '(%s)' % socket.gethostbyname(host)
return ip, host
def domain_sessions(rq):
sessions = rq.app.auth.winagent.call(_current_sessions)
def f():
for (host, user) in sessions:
try:
ip, host = lookup(host)
except Exception, e:
ip = host
host = '-'
yield ip, host, user.split('\\')[0]
for (ip, host, user) in sorted(f()):
print '%-16s %-20s %s' % (ip, host, user)
class Authenticator:
lockout_duration = property(lambda x:x.domain_info['lockout_duration'])
lockout_threshold = property(lambda x:x.domain_info['lockout_threshold'])
pw_max_age = property(lambda x:x.domain_info['pw_max_age'])
pw_min_len = property(lambda x:x.domain_info['pw_min_len'])
pw_history_len = property(lambda x:x.domain_info['pw_history_len'])
pw_properties = property(lambda x:x.domain_info['pw_properties'])
string =lambda s,x:x[0].decode('UTF-8')
strings = lambda s,xs: [x.decode('UTF-8') for x in xs]
ident = lambda s,x: x
just = lambda s,x: x[0]
num = lambda s,x: int(x[0])
guid = lambda s,x: uuid.UUID(bytes=x[0])
def from_msinterval(self, x):
return datetime.timedelta(microseconds=long(x)/10)
def mstimestamp(self, x):
x = long(x[0])
if x == 0:
return None
if x == MS_MAX_TIMESTAMP:
return datetime.datetime.max.replace(tzinfo=self.tz)
return (MS_EPOCH + self.from_msinterval(x)).astimezone(self.tz)
def mstimestring(self, x, r=MSTSRE):
x = x[0]
t = datetime.datetime(*map(int, r.match(x).groups()))
return pytz.UTC.localize(t).astimezone(self.tz)
def convert_types(self, attrs):
for k, f in self._user_fields.items():
v = attrs.get(k)
attrs[k] = f(self, v) if v is not None else None
_user_fields = dict(
accountexpires = mstimestamp,
badpasswordtime = mstimestamp,
lastlogon = mstimestamp,
lastlogontimestamp = mstimestamp,
lockoutime = mstimestamp,
pwdlastset = mstimestamp,
badpwdcount = num,
whenchanged = mstimestring,
whencreated = mstimestring,
useraccountcontrol = num,
description = string,
cn = string,
displayname = string,
givenname = string,
name = string,
distinguishedname = string,
samaccountname = string,
sn = string,
userprincipalname = string,
#c = string,
#co = string,
#company = string,
#department = string,
#directreports = strings,
#homedirectory = string,
#homedrive = string,
#homemdb = string,
#homemta = string,
homephone = string,
#l = string,
mail = string,
mailnickname = string,
#manager = string,
mobile = string,
scriptpath = string,
#showinaddressbook = strings,
title = string,
telephonenumber = string,
#memberof = strings
)
def __init__(self, config):
def get(k):
return config['directory.'+k]
self.config = config
self.root = get('rootdn')
self.domain = get('ntdomain')
self.orgbase = get('orgbase')
self.userbase = get('userbase')
self.bind_dn = get('bind_dn')
self.bind_pw = get('bind_pw')
self.tz = config['tz']
host = get('host')
secret = get('secret')
self.url = 'ldap://%s:%s' % (host, get('ldapport'))
self.winagent = insecure.Client(secret, host, int(get('winport')))
def connect(self):
try:
return _worker(self)
except ldap.SERVER_DOWN:
raise ServerDown
def now(self):
from fenton import timestamp
return timestamp.localnow(self.tz)
@util.lazyattr
def domain_info(self):
w = self.connect()
info = w._search_one(self.root, ldap.SCOPE_BASE, 'objectclass=domain')
if not info:
raise Problem('no domain info')
lockout_duration = info['lockoutduration'][0]
lockout_window = info['lockoutobservationwindow'][0]
lockout_threshold = info['lockoutthreshold'][0]
pw_max_age = info['maxpwdage'][0]
pw_min_len = info['minpwdlength'][0]
pw_history_len = info['pwdhistorylength'][0]
pw_properties = info['pwdproperties'][0]
return dict(
lockout_duration = self.from_msinterval(abs(int(lockout_duration))),
lockout_window = self.from_msinterval(abs(int(lockout_window))),
lockout_threshold = abs(int(lockout_threshold)),
pw_max_age = self.from_msinterval(abs(int(pw_max_age))),
pw_min_len = int(pw_min_len),
pw_history_len = int(pw_history_len),
pw_properties = int(pw_properties),
)
def __set_user_password(self, username, password, reset=False):
return self.winagent.call(_set_user_password, self.domain, username, password, reset)
def __change_user_password(self, username, old, new):
try:
return self.winagent.call(_change_user_password, self.domain, username, old, new)
except insecure.winerror, e:
code, msg, detail, other = e.args[0]
code = code & 0xffffffff
if not detail:
raise
subcode = detail[5] & 0xffffffff
if subcode == AD_ERR_AUTH_FAIL:
raise security.BadPassword
if subcode == AD_ERR_POOR_PASSWORD:
raise security.PoorPassword
if subcode == AD_ERR_ACCOUNT_LOCKED:
raise AccountLocked
raise WindowsError(code, msg, detail)
def get_user(self, username, password=None):
w = self.connect()
if password is not None:
w.authenticate(username, password)
return w.get_user(username)
def iter_users(self):
w = self.connect()
dns = w._search_all(self.userbase,
'objectclass=user',
['distinguishedname'])
for dn in sorted(x['distinguishedname'][0] for x in dns):
yield w.get_user(dn)
class LdapFailure(security.LoginFailed):
def __init__(self, orig):
self.msg = orig.args[0].get('desc')
self.detail = orig.args[0].get('info')
self.orig = orig
def __str__(self):
return self.msg
# userAccountControl flags (on user object)
ADS_UF_SCRIPT = 0x1
ADS_UF_ACCOUNTDISABLE = 0x2
ADS_UF_HOMEDIR_REQUIRED = 0x8
ADS_UF_LOCKOUT = 0x10
ADS_UF_PASSWD_NOTREQD = 0x20
ADS_UF_PASSWD_CANT_CHANGE = 0x40
ADS_UF_ENCRYPTED_TEXT_PASSWORD_ALLOWED = 0x80
ADS_UF_TEMP_DUPLICATE_ACCOUNT = 0x100
ADS_UF_NORMAL_ACCOUNT = 0x200
ADS_UF_INTERDOMAIN_TRUST_ACCOUNT = 0x800
ADS_UF_WORKSTATION_TRUST_ACCOUNT = 0x1000
ADS_UF_SERVER_TRUST_ACCOUNT = 0x2000
ADS_UF_DONT_EXPIRE_PASSWD = 0x10000
ADS_UF_MNS_LOGON_ACCOUNT = 0x20000
ADS_UF_SMARTCARD_REQUIRED = 0x40000
ADS_UF_TRUSTED_FOR_DELEGATION = 0x80000
ADS_UF_NOT_DELEGATED = 0x100000
ADS_UF_USE_DES_KEY_ONLY = 0x200000
ADS_UF_DONT_REQUIRE_PREAUTH = 0x400000
ADS_UF_PASSWORD_EXPIRED = 0x800000
ADS_UF_TRUSTED_TO_AUTHENTICATE_FOR_DELEGATION = 0x1000000
UAC_FIELDS = dict((k, eval(k)) for k in dir()
if k.startswith('ADS_UF_'))
# pwdProperties flags (on domain object)
DOMAIN_PASSWORD_COMPLEX = 1
DOMAIN_PASSWORD_NO_ANON_CHANGE = 2
DOMAIN_PASSWORD_NO_CLEAR_CHANGE = 4
DOMAIN_LOCKOUT_ADMINS = 8
DOMAIN_PASSWORD_STORE_CLEARTEXT = 16
DOMAIN_REFUSE_PASSWORD_CHANGE = 32
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import os
import string
import random
USABLE_PUNCTUATION = '''~!@#$%^&*()-=+[];:'",.<>?'''
def randint(max):
return random.randint(0, max)
class Generator:
def __init__(self, words):
self.words = words
self.digits = string.digits
self.punct = USABLE_PUNCTUATION
def getword(self, min=1, max=100):
word = ''
while len(word) < min or len(word) > max:
word = random.choice(self.words)
return word
def generate(self, minlen=8):
words = [self.getword(5,12),
self.getword(3,6)]
#self.getword(1,4)]
i = randint(1)
words[i] = words[i].capitalize()
pun = random.choice(self.punct)
num = str(randint(10**randint(2)))
joins = [pun, num]
out = words.pop()
out += joins.pop(randint(1))
out += words.pop()
#out += joins.pop()
#out += words.pop()
while len(out) < minlen:
out += random.choice(self.digits + self.punct)
out += random.choice(self.words)
return out
__repr__ = __str__ = __call__ = generate
def open_file(name='words'):
fname = name + '.txt'
if not os.path.isabs(fname):
d = os.path.dirname(__file__)
fname = os.path.join(d, fname)
words = map(string.strip, open(fname))
return Generator(words)
if __name__ == '__main__':
print open_file().generate()
| Python |
if 0:
class schema_table(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'tables'
table_schema = sql.Column(sql.String(), primary_key=True)
table_name = sql.Column(sql.String(), primary_key=True)
__table_args__ = {'schema': 'information_schema'}
class schema_column(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'columns'
table_schema = sql.Column(sql.String(), primary_key=True)
table_name = sql.Column(sql.String(), primary_key=True)
column_name = sql.Column(sql.String(), primary_key=True)
ordinal_position = sql.Column(sql.Integer())
is_nullable = sql.Column(sql.String())
data_type = sql.Column(sql.String())
table = orm.relation('schema_table', backref='columns')
__table_args__ = (sql.ForeignKeyConstraint(['table_schema', 'table_name'],
['information_schema.tables.table_schema',
'information_schema.tables.table_name']),
{'schema': 'information_schema'})
class schema_constraint(DbModel):
__classid__ = _object_guid = None
__tablename__ = 'table_constraints'
constraint_schema = sql.Column(sql.String(), primary_key=True)
constraint_name = sql.Column(sql.String(), primary_key=True)
table_schema = sql.Column(sql.String(), primary_key=True)
table_name = sql.Column(sql.String(), primary_key=True)
constraint_type = sql.Column(sql.String())
table = orm.relation('schema_table', backref='constraints')
__table_args__ = (sql.ForeignKeyConstraint(['table_schema', 'table_name'],
['information_schema.tables.table_schema',
'information_schema.tables.table_name']),
{'schema': 'information_schema'})
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import time
import Queue
import smtplib
import threading
from email import encoders
from email.header import make_header
from email.utils import make_msgid
from email.mime.audio import MIMEAudio
from email.mime.base import MIMEBase
from email.mime.image import MIMEImage
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
__metaclass__ = type
class Message:
def __init__(self, To=None, From=None, Cc=None, Bcc=None,
Date=None, Subject=None, TextBody=None, HtmlBody=None,
attachments=None, charset='US-ASCII'):
self.attachments = []
for a in attachments or []:
self.add_attachment(a)
self.To = To
self.Cc = Cc
self.Bcc = Bcc
self.From = From
self.Subject = Subject
self.TextBody = TextBody
self.HtmlBody = HtmlBody
self.Date = Date or time.strftime('%a, %d %b %Y %H:%M:%S %z', time.gmtime())
self.charset = charset
self.message_id = make_msgid()
def add_attachment(self, attachment):
if isinstance(attachment, basestring):
self.attachments.append((attachment, None))
else:
try:
filename, cid = attachment
except (TypeError, IndexError):
self.attachments.append((attachment, None))
else:
self.attachments.append((filename, cid))
def __str__(self):
if self.attachments:
msg = self._multipart()
else:
msg = self._text()
self._write_headers(msg)
return msg.as_string()
def _text(self):
if self.HtmlBody:
return self._with_html()
return MIMEText(self.TextBody, 'plain', self.charset)
def _with_html(self):
outer = MIMEMultipart('alternative')
part1 = MIMEText(self.TextBody, 'plain', self.charset)
part2 = MIMEText(self.HtmlBody, 'html', self.charset)
outer.attach(part1)
outer.attach(part2)
return outer
def _write_headers(self, msg):
msg['Date'] = self.Date
msg['From'] = self.From
msg['Message-Id'] = self.message_id
subject = self.Subject
if self.charset != 'us-ascii':
subject = unicode(self.Subject, self.charset)
subject = str(make_header([(subject, self.charset)]))
msg['Subject'] = subject
to = self.To
if not isinstance(to, basestring):
to = ', '.join(list(to))
msg['To'] = to
cc = self.Cc
if cc and not isinstance(cc, basestring):
cc = ', '.join(list(cc))
if cc:
msg['Cc'] = cc
def _multipart(self):
msg = MIMEMultipart('related')
msg.preamble = self.Subject
if self.HtmlBody:
outer = MIMEMultipart('alternative')
part1 = MIMEText(self.TextBody, 'plain', self.charset)
part1.add_header('Content-Disposition', 'inline')
part2 = MIMEText(self.HtmlBody, 'html', self.charset)
part2.add_header('Content-Disposition', 'inline')
outer.attach(part1)
outer.attach(part2)
msg.attach(outer)
else:
msg.attach(MIMEText(self.TextBody, 'plain', self.charset))
for filename, cid in self.attachments:
msg.attach(self.put_attachment(filename, cid))
return msg
def put_attachment(self, body, ctype, filename=None, cid=None):
maintype, subtype = ctype.split('/', 1)
M = {'text': MIMEText,
'image': MIMEImage,
'audio': MIMEAudio}.get(maintype)
if M:
msg = M(body, _subtype=subtype)
else:
msg = MIMEBase(maintype, subtype)
msg.set_payload(body)
encoders.encode_base64(msg)
if cid:
msg.add_header('Content-ID', '<%s>' % cid)
msg.add_header('Content-Disposition', 'inline')
else:
msg.add_header('Content-Disposition', 'attachment', filename=filename)
return msg
def attach(self, filename, cid=None):
self.attachments.append((filename, cid))
def __iter__(self):
yield self
class Mailer:
def __init__(self, host='localhost', port=25, use_tls=False,
username=None, password=None):
self.host = host
self.port = port
self.use_tls = use_tls
self.username = username
self.password = password
def login(self, username, password):
self.username = username
self.password = password
def send(self, messages):
server = smtplib.SMTP(self.host, self.port)
if self.username and self.password:
if self.use_tls is True:
server.ehlo()
server.starttls()
server.ehlo()
server.login(self.username, self.password)
for m in messages:
self._send(server, m)
server.quit()
def _send(self, server, msg):
to = msg.To or []
to = isinstance(to, basestring) and [to] or list(to)
cc = msg.Cc or []
cc = isinstance(cc, basestring) and [cc] or list(cc)
bcc = msg.Bcc or []
bcc = isinstance(bcc, basestring) and [bcc] or list(bcc)
rcpts = to + cc + bcc
server.sendmail(msg.From, rcpts, str(msg))
class Manager(threading.Thread):
'''
Send email in the background
if a message was succesfully sent, self.status[msg.message_id] contains
a tuple (True/False, code, message)
'''
def __init__(self, mailer=None, callback=None):
threading.Thread.__init__(self)
self.mailer = mailer
self.queue = Queue.Queue()
self.send = self.queue.put
self.callback = callback
self.running = True
self.results = {}
def halt(self):
self.running = False
def run(self):
while self.running:
msg = self.queue.get(block=True)
if msg is None:
break
for m in msg:
try:
self.status[m.message_id] = (False, -1, 'Not sent')
self.mailer.send(m)
self.status[m.message_id] = (True, 0, 'Sent successfully')
except Exception, e:
args = e.args
if len(args) < 2:
args = (-1, e.args[0])
self.status[m.message_id] = (False, args[0], args[1])
if self.callback is not None:
try:
self.callback(m)
except:
pass
self.queue.task_done()
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import os
import re
import sys
import time
import string
import inspect
import threading
import functools
from collections import defaultdict
if sys.platform == 'win32':
timer = time.clock
else:
timer = time.time
builtin_property = property
# python language utils
#######################
def classname(obj):
if not isinstance(obj, type):
obj = type(obj)
return obj.__module__ + '.' + obj.__name__
class classproperty:
def __init__(self, fget):
self.fget = fget
def __get__(self, obj, type):
return self.fget(type)
class classinstancemethod:
def __init__(self, func):
self.func = func
def __get__(self, instance, owner):
import new
if instance is None:
return new.instancemethod(self.func, owner, owner.__class__)
else:
return new.instancemethod(self.func, instance, owner)
class classinstanceproperty:
def __init__(self, fget):
self.fget = fget
def __get__(self, instance, owner):
return self.fget(owner, instance)
def getvar(name, offset=1):
f = sys._getframe(offset)
while f:
if name in f.f_locals:
return f.f_locals[name]
f = f.f_back
raise NameError('Dynamic variable %s not found' % name)
def reprargs(*x, **y):
x = ', '.join(map(repr, x))
y = ', '.join('%s=%r'%p for p in y.iteritems())
return ', '.join((x and [x] or []) + (y and [y] or []))
class dynamic_var_getter:
def __getattr__(self, name):
return getvar(name, 2)
dynamic = dynamic_var_getter()
def wrapwith(wrapper):
def decorate(wrapped):
@functools.wraps(wrapped)
def actual(*args, **kw):
return wrapper(wrapped(*args, **kw))
return actual
return decorate
def yieldlist(f):
@functools.wraps(f)
def x(*args, **kw):
return list(f(*args, **kw))
return x
class synchronized:
def __init__(self, func):
self.lock = threading.Lock()
self.func = func
def __get__(self, obj, type):
return obj is None and self or self.bound(obj)
def bound(self, __obj):
@functools.wraps(self.func)
def bound(*__args, **__kw):
with self.lock:
return self.func(__obj, *__args, **__kw)
bound.__name__ += ' (synchronized)'
return bound
# UNUSED
class synchronized_property:
def __init__(self, value=None):
self.value = value
self.lock = threading.Lock()
def __get__(self, type, obj):
return self.value
def __set__(self, obj, value):
with self.lock:
self.value = value
def normalize_path(f=None):
mf = sys.modules['__main__'].__file__
mp = os.path.dirname(os.path.realpath(mf))
sys.path[:] = [p for p in sys.path if p != mp]
def property(fget=None, fset=None, fdel=None, doc=None):
# if function takes a parameter, assume it is 'self'
# and usethis is a simple accessor property
if inspect.getargspec(fget)[0]:
return builtin_property(fget, fset=fset, fdel=fdel,
doc=doc or inspect.getdoc(fget))
assert not fset and not fdel
return fancy_property(fget)
def fancy_property(func):
# else function takes no parameters and defines
# fget, fset, fdel in local scope
# do some hackery to get the fget, fset, fdel
funcs = set(['fget', 'fset', 'fdel'])
import new
kw = {}
code = func.func_code
globs = func.func_globals
kw = dict((c.co_name, new.function(c, globs))
for c in code.co_consts
if type(c) is type(code) and c.co_name in funcs)
kw['doc'] = inspect.getdoc(func)
return builtin_property(**kw)
def topsort(pairs):
adjacencies = {}
incount = {}
for src, dst in pairs:
adjacencies.setdefault(src, [])
adjacencies.setdefault(dst, [])
adjacencies[src].append(dst)
incount.setdefault(src, 0)
incount.setdefault(dst, 0)
incount[dst] += 1
roots = [x for x in adjacencies if not incount[x]]
out = []
while roots:
this = roots.pop()
for node in adjacencies[this]:
out.append((this, node))
incount[node] -= 1
if not incount[node]:
roots.append(node)
del adjacencies[this]
return not adjacencies and out or []
class LazyAttributeError(Exception):
'''An exception for lazy attributes so that AttributeErrors thrown
don't get caught by getattr() trickery'''
class lazyattr:
def __init__(self, factory):
self.factory = factory
name = getattr(factory, '__name__', None)
if not name or name == '<lambda>':
name = str(factory)
self.name = '____lazy____' + name
self.hasparam = inspect.getargspec(factory)[0]
def __get__(self, obj, type):
if obj is None:
return self
try:
x = getattr(obj, self.name)
except AttributeError:
try:
if self.hasparam:
x = self.factory(obj)
else:
x = self.factory()
except AttributeError, e:
raise LazyAttributeError, LazyAttributeError(e), sys.exc_info()[2]
setattr(obj, self.name, x)
return x
def __set__(self, obj, v):
setattr(obj, self.name, v)
def listgen(f):
@functools.wraps(f)
def g(*args, **kw):
return list(f(*args, **kw))
return g
def newdict(*ds, **kw):
return dict(sum([d.items() for d in ds], []) + kw.items())
class dictobject(dict):
def __getattr__(self, k):
return self[k]
def __setattr__(self, k, v):
self[k] = v
class defaultdictobject(defaultdict):
def __getattr__(self, k):
return self[k]
def __setattr__(self, k, v):
self[k] = v
class varsobject(defaultdictobject):
def __init__(self, default=lambda:None):
defaultdictobject.__init__(self, default)
def __repr__(self):
return '{' + ', '.join('%s: %r' % x for x in self.items()) + '}'
symbol = None
class symbol:
def __repr__(self):
return '<symbol %s>' % self.name
def __str__(self):
return self.name
def __init__(self, name):
self.name = name
def __eq__(self, other):
return self is other
class __metaclass__(type):
def __new__(Class, name, bases, members):
if symbol:
return symbol(name)
return type.__new__(Class, name, bases, members)
def install_object(obj, module, name=None, overwrite=False):
mod = sys.modules[module]
if name is None:
name = 'anonymous_' + uniqid()
if hasattr(mod, name) and not overwrite:
raise AttributeError('Not overwriting %s.%s' % (module, name))
obj.__name__ = name
obj.__module__ = module
setattr(mod, name, obj)
def maptree(visit, tree, iter=iter):
for node in iter(tree):
maptree(visit, node, iter)
visit(tree)
def doall(fs):
def ff(*args, **kw):
for f in fs:
f(*args, **kw)
return ff
def rollover_midnight(ts, refts, threshold=None):
import datetime
if ts < refts - (threshold or datetime.timedelta(0)):
return ts + datetime.timedelta(1)
return ts
def format_datetime(dt):
return '%s, %d %s' % (dt.strftime('%a'), dt.day,
dt.strftime('%y %H:%M:%S GMT%z'))
#
# debugging utils
##################
def extract_tb(tb, limit=None):
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while tb is not None and (limit is None or n < limit):
f = tb.tb_frame
list.append(dict(lineno = tb.tb_lineno,
filename = f.f_code.co_filename,
name = f.f_code.co_name,
locals = f.f_locals,
modname = f.f_globals.get('__name__', None)))
tb = tb.tb_next
n += 1
return list
def extract_stack(f=None, limit=None):
if f is None:
try:
raise ZeroDivisionError
except ZeroDivisionError:
f = sys.exc_info()[2].tb_frame.f_back
if limit is None:
if hasattr(sys, 'tracebacklimit'):
limit = sys.tracebacklimit
list = []
n = 0
while f is not None and (limit is None or n < limit):
list.append(dict(lineno = f.f_lineno,
filename = f.f_code.co_filename,
name = f.f_code.co_name,
locals = f.f_locals,
modname = f.f_globals.get('__name__', None)))
f = f.f_back
n += 1
list.reverse()
return list
def format_list(frames, color=False):
import linecache
list = []
fmt = ' %s:%d %s\n'
g = r = y = lambda x:x
if color:
from fenton.console import colors
g = colors.Green
r = colors.Red
y = colors.Yellow
for f in frames:
item = ' %s:%s %s\n' % (g(f['modname']), r(f['lineno']), y(f['name']))
line = linecache.getline(f['filename'], f['lineno'])
if line:
item += ' %s\n' % line.strip()
list.append(item)
return list
def color_tb(tb, limit=None):
return format_list(extract_tb(tb, limit), color=True)
def color_stack(f=None, limit=None):
return format_list(extract_stack(f, limit), color=True)
def formattable(data, n):
fs = []
for col in range(n):
fs.append('%%-%ds' % max(len(str(t[col])) for t in data))
for row in data:
line = []
for col in range(n):
line.append(fs[col] % row[col])
yield line
# inspection
def _getmethod(frame):
f = frame
c = frame.f_code
if not c.co_varnames:
# can't be method, no arguments!
return None
selfname = c.co_varnames[0]
if selfname in f.f_locals:
self = f.f_locals[selfname]
elif 'self' in f.f_locals:
self = f.f_locals['self']
else:
# can't find 'self'
return None
if not hasattr(self, '__class__'):
# first arg not an instance
return None
if isinstance(self, type):
Class = self
else:
Class = self.__class__
for O in Class.__mro__:
for name in O.__dict__:
try:
attr = getattr(O, name)
except:
continue
if type(attr) is builtin_property:
for func in 'fget', 'fset', 'fdel':
func = getattr(attr, func)
if func:
func = getattr(func, 'im_func', func)
if func.func_code is c:
return O, self, func, name
func = getattr(attr, 'im_func', attr)
if getattr(func, 'func_code', None) is c:
return O, self, attr, name
return None
def frameinfo(f=None):
if f is None:
f = sys._getframe(1)
c = f.f_code
stuff = _getmethod(f)
if stuff:
Class, self, func, attr = stuff
modname = Class.__module__
else:
Class = self = func = attr = None
modname = f.f_globals.get('__name__')
return {'class': Class,
'self': self,
'module': modname,
'file': c.co_filename,
'line': f.f_lineno,
'function': func and func.func_name or c.co_name,
'method': attr}
def getcaller(offset=0):
f = sys._getframe(2+offset)
return frameinfo(f)
_callers = {}
def attrwatcher(T):
loc = threading.local()
#_callers.append((T, callers))
def getattribute(self, name):
if not getattr(loc, 'stop', False):
loc.stop = True
fr = getcaller()
cl = fr['class'] and (fr['class'].__name__ + '.') or ''
attr = cl + (fr['method'] or fr['function'] or '?')
k = (T+'.'+name, attr, fr['file'])
if k not in _callers:
_callers[k] = fr
loc.stop = False
return object.__getattribute__(self, name)
return getattribute
def printcallers():
if not _callers:
return
strip = os.path.abspath(os.path.dirname(os.path.dirname(__file__))) + '/'
data = []
for k in sorted(_callers):
fr = _callers[k]
name, attr, fn = k
fn = fn.replace(strip, '')
if 'site-packages' in fn:
fn = fn[fn.index('site-packages')+14:]
elif 'mako.py' in fn:
fn = os.path.basename(fn)
data.append((name, attr, fn, fr['line']))
print '\n'
for row in formattable(data, 4):
print ' '.join(row)
print '\n'
# misc bits and bobs
def uint(x):
return x & 0xffffffff
def byteswap(x):
if x >= 0xffffffff-1:
return None
import struct
return struct.unpack('<I', struct.pack('>I', x))[0]
class sequence:
def __init__(self, start):
self.start = start
self.next = None
def __call__(self):
if not self.next:
import itertools
start = self.start
if callable(start):
start = start()
self.next = itertools.count(start).next
return self.next()
def camel(t):
return ''.join(t.replace('_', '\x00').title().split('\x00'))
recamel = camel
#_camel_re = re.compile('^([A-Z]+[a-z]*)')
_camel_re = re.compile('^([A-Z][a-z0-9_]*)')
def decamel(s, sep='_'):
'FooBar -> foo_bar, NotANumber -> not_a_number'
out = []
while True:
m = _camel_re.match(s)
if not m: break
out.append(m.groups()[0].lower())
s = s.replace(m.groups()[0], '', 1)
return out and sep.join(out) or s
def escape(v):
import cgi
if v is None:
return u''
if not isinstance(v, basestring):
v = unicode(v)
return unicode(cgi.escape(v, True))
def scalar(x):
if len(x) == 1:
return x[0]
return tuple(x)
# some useful functions
def identity(x):
'Always return x'
return x
def void(x):
'Always return None'
return None
def traverse(x, path):
'if path is a.b.c, return x.a.b.c'
return reduce(getattr, path.split('.'), x)
def traverser(path):
'''
Given an attribute path (a.b.c)
return a function that traverses the path
(eg, x.a.b.c)
'''
return lambda x: traverse(x, path)
def alias(path):
return builtin_property(traverser(path))
def noimpl(f):
@functools.wraps(f)
def throw(*a, **k):
raise NotImplementedError(f.__name__)
return throw
def int_to_base(n, alphabet):
base = len(alphabet)
q = abs(n)
out = []
while True:
q, r = divmod(q, base)
out.append(alphabet[r])
if q == 0:
break
neg = n < 0 and '-' or ''
return neg + ''.join(reversed(out))
def base_to_int(s, alphabet):
pol = 1
if s and s[0] == '-':
s = s[1:]
pol = -1
if not s:
raise TypeError('empty string')
i=0
n = alphabet.index(s[i])
base = len(alphabet)
while s[i+1:]:
r = s[i+1:i+2]
n = n * base + alphabet.index(r)
i += 1
return n * pol
_B62 = string.digits + string.uppercase + string.lowercase
_B36 = string.digits + string.uppercase
def to_base62(n, alphabet=_B62):
return int_to_base(n, alphabet)
def from_base62(s, alphabet=_B62):
return base_to_int(s, alphabet)
def to_base36(n, alphabet=_B36):
return int_to_base(n, alphabet)
def from_base36(s, alphabet=_B36):
return base_to_int(s, alphabet)
_suffixes = 'kmgtpezy'
_bytere = re.compile('^(\d+)([%s]b?)?$' % _suffixes)
def bytesize(s, suffixes=_suffixes, r=_bytere):
m = r.match(s.strip().lower())
if not m:
return None
a, b = m.groups()
a = int(a)
if not b:
return a
i = suffixes.index(b[:1])
return a * 1024 ** (i+1)
_ENC_ALTCHARS='+/'
_WEB_ALTCHARS='._'
_STRIP_RE=re.compile('=*$')
def enc64(s, alt=_ENC_ALTCHARS, strip=False):
import base64
ret = base64.b64encode(s, alt)
if not strip:
return ret
return _STRIP_RE.sub('', ret)
def dec64(s, alt=_ENC_ALTCHARS, pad=False):
import base64
if pad:
s = s+((4-len(s))%4)*'='
return base64.b64decode(s, alt)
def uniqid(size=16):
chars = os.urandom(size)
# chars = uuid.uuid4().bytes
# chars = ''.join(chr(random.randint(0, 256)) for _ in range(size))
return enc64(chars, _WEB_ALTCHARS, strip=True)
# UNDEFINED: an object that cannot be used, except as a placeholder
class Undefined(Exception):
pass
def throws(name):
def throw(*x,**y):
raise Undefined
throw.__name__ = name
return throw
@apply
def undefined():
def throw(*a, **k):
raise Undefined
class undefmeta(type):
def __new__(Self, name, bases, attrs, sentinel=None):
if sentinel is None:
raise Undefined
return type.__new__(Self, name, bases, attrs)
def __init__(self, name, bases, attrs, sentinel=None):
if sentinel is None:
raise Undefined
def __repr__(self):
return 'UNDEFINED'
throwers = [
'getattribute',
'abs',
'add',
'and',
'base',
'bases',
'basicsize',
'call',
'class',
'cmp',
'coerce',
'contains',
'delattr',
'delete',
'delitem',
'delslice',
'dict',
'dictoffset',
'div',
'divmod',
'doc',
'enter',
'eq',
'exit',
'flags',
'float',
'floordiv',
'ge',
'get',
'getformat',
'getitem',
'getnewargs',
'getslice',
'gt',
'hash',
'hex',
'iadd',
'iand',
'idiv',
'ifloordiv',
'ilshift',
'imod',
'imul',
'index',
'int',
'invert',
'ior',
'ipow',
'irshift',
'isub',
'itemsize',
'iter',
'itruediv',
'ixor',
'le',
'len',
'long',
'lshift',
'lt',
'mod',
'module',
'mro',
'mul',
'name',
'ne',
'neg',
'nonzero',
'objclass',
'oct',
'or',
'pos',
'pow',
'radd',
'rand',
'rdiv',
'rdivmod',
'reduce',
'reduce_ex',
'reversed',
'rfloordiv',
'rlshift',
'rmod',
'rmul',
'ror',
'rpow',
'rrshift',
'rshift',
'rsub',
'rtruediv',
'rxor',
'self',
'set',
'setattr',
'setformat',
'setitem',
'setslice',
'str',
'sub',
'subclasses',
'truediv',
'weakref',
'weakrefoffset',
'xor',
]
throwers = ('__%s__' % k for k in throwers)
throwers = dict((k, throws(k)) for k in throwers)
return undefmeta('undefined', (object,), throwers, 1)
def get_modattr(impname):
impname = impname.split('.')
module = '.'.join(impname[:-1])
name = impname[-1]
__import__(module)
mod = sys.modules[module]
return mod, name
class fileiter:
def __init__(self, it):
self.it = it
self.buf = []
self.readcount = 0
def read(self, size=None):
if size is None:
return ''.join(self.it)
for line in self.it:
if self.readcount > size:
return self.flush(size)
self.consume(line)
return self.flush(size)
def consume(self, line):
self.buf.append(line)
self.readcount += len(line)
def flush(self, size):
out = ''.join(self.buf)
self.buf = [out[size:]]
self.readcount = len(self.buf[0])
return out[:size]
#################################################################
## ##
## unused stuff follows ##
## ##
#################################################################
def get_module(qualname, throw_missing=True, create=False):
if create:
throw_missing=False
if qualname in sys.modules:
return sys.modules[qualname]
try:
__import__(qualname)
except ImportError:
# traceback of length 1 indicates module not found
# rather than an import error raised inside module
if throw_missing or sys.exc_info()[2].tb_next is not None:
raise
if not create:
return None
import imp
module = imp.new_module(qualname)
sys.modules[qualname] = module
return sys.modules[qualname]
def func():
return This().attr()
frames = []
class This:
def __getattribute__(self, name):
frames.append(sys._getframe(1))
frames.append(getcaller())
return super(This, self).__getattribute__(name)
def attr(self):
pass
class Base:
def basemeth(self):
return This().attr()
class Other(Base):
def instmeth(self):
return This().attr()
@classmethod
def clmeth(self):
return This().attr()
@staticmethod
def stmeth():
return This().attr()
def testcaller():
s = '''
func()
Other().basemeth()
Other().instmeth()
Other().clmeth()
Other.clmeth()
Other.stmeth()
'''
for line in s.strip().split():
eval(line)
print line, frames[-1]
| Python |
import sys
import pdb
__metaclass__ = type
class Debugger(pdb.Pdb):
def format_stack_entry(self, frame_lineno, lprefix=': '):
import linecache, repr
frame, lineno = frame_lineno
filename = self.canonic(frame.f_code.co_filename)
s = '%s(%r)' % (filename, lineno)
if frame.f_code.co_name:
s = s + frame.f_code.co_name
else:
s = s + "<lambda>"
if '__args__' in frame.f_locals:
args = frame.f_locals['__args__']
else:
args = None
if args:
s = s + repr.repr(args)
else:
s = s + '()'
if '__return__' in frame.f_locals:
rv = frame.f_locals['__return__']
s = s + '->'
s = s + repr.repr(rv)
from functools import partial
get = partial(linecache.getline, filename)
n = lineno
lines = [get(x) for x in (n-2,n-1,n) if get(x)]
if lines:
s = s + '\n ' + '\n '.join([l.strip() for l in lines])
return s
def set_trace():
Debugger().set_trace(sys._getframe().f_back)
| Python |
"""
(Possibly marginally secure) RPC server
Packet is:
octets meaning
4: seclen = length of secret
<seclen>: secret
4: msglen = length of message
<msglen>: message (code)
integers are big-endian, 32 bits.
A minimal windows service
Based on _Python Programming on win32_, pp347+
install by calling with argument "install"
"""
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import os
import sys
import pickle
import select
import logging
import threading
from SocketServer import (TCPServer,
StreamRequestHandler)
from cStringIO import StringIO
logging.basicConfig()
log = logging.getLogger(__name__)
try:
import win32serviceutil
import win32service
import win32event
HAVE_WIN32 = True
except ImportError:
HAVE_WIN32 = False
class Packet:
class Error(Exception):
pass
def __init__(self, secret, msg):
self.secret = secret
self.msg = msg
self.seclen = len(str(secret))
self.msglen = len(str(msg))
self.bytes = None
def format(self):
import struct
return "%s%s%s%s" % (struct.pack(">I", self.seclen),
self.secret,
struct.pack(">I", self.msglen),
self.msg)
def __str__(self):
if not self.bytes:
self.bytes = self.format()
return self.bytes
def __len__(self):
return len(str(self))
@staticmethod
def read(secret, stream):
import struct
data = stream.read(4)
seclen = struct.unpack(">I", data)[0]
if seclen != len(secret):
raise Packet.Error("Incorrect secret size")
data = stream.read(seclen)
if secret != data:
raise Packet.Error("Incorrect secret")
data = stream.read(4)
msglen = struct.unpack(">I", data)[0]
msg = stream.read(msglen)
return msg
class Client:
def __init__(self, secret, server, port=1666):
self.secret = secret
self.seclen = len(str(secret))
self.server = server
self.port = port
def connect(self):
import socket
s = socket.socket()
s.connect((self.server, self.port))
return s
def send(self, msg):
socket = self.connect()
msglen = len(str(msg))
packet = Packet(self.secret, msg)
try:
sent = send_packet(socket, packet)
if sent != len(packet):
raise Exception("Failed to send whole packet")
out = self.recv(socket)
finally:
socket.close()
return out
def call(self, func, *args):
import inspect
f = func.__name__
args = ','.join(map(repr, args))
source = inspect.getsource(func)
source += "\n\n__return__ = %s(%s)\n" % (f, args)
result = self.send(source)
if isinstance(result, error):
raise result
return result
def recv(self, socket):
buf = []
bufsize = 1024
while True:
data = socket.recv(bufsize)
if data:
buf.append(data)
else:
break
return pickle.loads(''.join(buf))
def send_packet(socket, data):
total = 0
data = str(data)
while True:
sent = socket.send(data)
total += sent
if sent < len(data):
data = data[total:]
else:
break
return total
real_stdout = sys.stdout
real_stdin = sys.stdin
real_stderr = sys.stderr
real_exit = os._exit
def dummy_exit(status):
log_error("Attempt to call os._exit()")
# error classes for pickling
class error(Exception): pass
class winerror(error): pass
class strerror(error): pass
def execute(msg, stdin):
sys.stdin = stdin
sys.stdout = StringIO()
sys.stderr = StringIO()
os._exit = dummy_exit
env = {'__return__': None}
ret = None
try:
exec msg in env
ret = env['__return__']
except:
# global function, set below
ret = handle_error(*sys.exc_info())
finally:
sys.stdin = real_stdin
sys.stdout = real_stdout
sys.stderr = real_stderr
os._exit = real_exit
try:
ret = pickle.dumps(ret)
except:
ret = strerror(map(str, sys.exc_info()[:2]))
ret = pickle.dumps(ret)
return ret
class Handler(StreamRequestHandler):
def handle(self):
try:
msg = Packet.read(self.server.secret, self.rfile)
except Packet.Error:
log_error("Error reading packet")
self.connection.close()
return
ret = execute(msg, self.rfile)
self.wfile.write(ret)
class Server(TCPServer):
allow_reuse_address = True
def verify_request(self, request, client):
return client[0] in self.allowed_clients
def select_server(event, secret, address, clients):
def run(event=event):
server = make_server(secret, address, clients)
socket = server.fileno()
while event.isSet():
ready = select.select([socket], [], [], 1)
if socket in ready[0]:
server.handle_request()
return threading.Thread(target=run)
def make_server(secret, addr, clients):
addr, port = addr.split(':')
port = int(port)
s = Server((addr, port), Handler)
s.secret = secret
s.seclen = len(secret)
s.allowed_clients = tuple(clients)
return s
def gen_secret(length):
import string, random
chars = string.letters + string.digits
return ''.join(random.sample(chars*100, length))
def test_server(secret=None):
if secret is None:
secret = gen_secret(20)
print "secret:", secret
allowed = ['127.0.0.1','192.168.0.1']
serv = make_server(secret, '0.0.0.0:6666', allowed)
serv.serve_forever()
def test_client(secret, server=None, port=1666):
cli = Client(secret, server or '', port)
msg = "import string; __return__ = string.printable"
result = cli.send(msg)
import string
assert result == string.printable
def test_good(*args):
return "GOOD! " + str(args)
def test_exc(*args):
return None + 1 + args
def test_exit(*args):
import os
os._exit(1)
def _tb(args):
import traceback
tb = traceback.format_exc()
return "\n\n".join(list(args) + [tb])
def handle_error_str(E, e, tb):
log_error(str(e))
return strerror((str(E), str(e)))
handle_error = handle_error_str
if HAVE_WIN32:
def log_win32(*args):
import servicemanager
servicemanager.LogErrorMsg(_tb(args))
log_error = log_win32
def handle_error_win32(E, e, tb):
import pythoncom
if isinstance(e, pythoncom.com_error):
return winerror(tuple(e))
return handle_error_str(E, e, tb)
handle_error = handle_error_win32
class AppService(win32serviceutil.ServiceFramework):
_svc_name_ = "MyAppAgent"
_svc_display_name_ = "My App Windows agent"
def __init__(self, args):
win32serviceutil.ServiceFramework.__init__(self, args)
self.hWaitStop = win32event.CreateEvent(None, 0, 0, None)
def get_config(self):
dir = os.path.dirname(os.path.abspath(__file__))
import ConfigParser
config = ConfigParser.ConfigParser()
config.read(os.path.join(dir, 'winagent.ini'))
secret = config.get('myapp', 'secret')
listen = config.get('myapp', 'listen')
clients = config.get('myapp', 'clients').split()
return secret, listen, clients
def SvcStop(self):
self.ReportServiceStatus(win32service.SERVICE_STOP_PENDING)
win32event.SetEvent(self.hWaitStop)
def SvcDoRun(self):
event = threading.Event()
event.set()
secret, listen, clients = self.get_config()
thread = select_server(event, secret, listen, clients)
thread.start()
win32event.WaitForSingleObject(self.hWaitStop, win32event.INFINITE)
event.clear()
thread.join()
else:
import logging
log = logging.getLogger(__name__)
def log_logger(*args):
#log.error(_tb(args))
log.error(*args)
log_error = log_logger
def dummy_module(name, *objs):
import imp
mod = None
parts = name.split('.')
for i in range(len(parts)):
part = '.'.join(parts[:i+1])
sub = sys.modules[part] = imp.new_module(part)
if mod is not None:
setattr(mod, parts[i], sub)
mod = sub
for obj in objs:
setattr(mod, obj.__name__, obj)
obj.__module__ = name
return mod
name = 'fenton.insecure'
if __name__ != name and name not in sys.modules:
# make the error classes picklable under 'fenton.insecure'
dummy_module(name, error, strerror, winerror)
if __name__ == "__main__":
args = sys.argv[1:]
if HAVE_WIN32:
if not args or args[0] != 'test':
win32serviceutil.HandleCommandLine(AppService)
sys.exit()
args.pop(0)
if args:
sec = args[0]
else:
sec = None
test_server(sec)
| Python |
from markupsafe import Markup, escape_silent as escape
__metaclass__ = type
empty = 'area base basefont br col frame hr img input isindex link meta param'
empty = set(empty.split())
def _make_tag(tag, args, kw):
if kw.has_key('_'):
assert not args, "The special '_' keyword argument cannot be used "\
'in conjunction with non-keyword arguments'
args = kw.pop('_')
close = kw.pop('_close', True)
attrs = [' %s="%s"' % (k.lower(), escape(v))
for k, v in sorted(kw.iteritems())
if v is not None]
start = '<%s%s' % (tag, ''.join(attrs))
if not args and tag in empty and close:
content = ''
end = ' />'
else:
start += '>'
content = ''.join(escape(x) for x in args)
end = close and '</%s>'%tag or ''
return Markup(start+content+end)
class Literal:
def __call__(self, *args):
return Markup(*args)
def __html__(self):
raise NotImplementedError
class Tag(Literal):
def __init__(self, name):
self.name = name
def __call__(__self, *args, **kw):
return _make_tag(__self.name, args, kw)
def __str__(self):
return Markup('<%s />' % self.name)
def __html__(self):
return str(self)
class Builder:
literal = Literal()
def __getattr__(self, name):
if name.startswith('_'):
raise AttributeError
x = self.__dict__[name] = Tag(name.lower())
return x
def __call__(self, *args):
return Markup(''.join(escape(x) for x in args))
def tag(__self, __tag, *args, **kw):
return _make_tag(__tag, args, kw)
HTML = Builder()
| Python |
import re
import datetime
MAX_OFFSET = 1440
DT_RX = re.compile(
r'^(?P<year>[0-9]{4})' # YYYY
r'(-(?P<month>[0-9]{2})' # -MM
r'(-(?P<day>[0-9]{2})' # -DD
r'([T: ]' # 'T', ':' or ' '
r'(?P<hour>[0-9]{2}):(?P<minute>[0-9]{2})' # HH:MM
r'(:(?P<second>[0-9]{2})' # :SS
r'(\.(?P<fraction>[0-9]+))?)?' # .s...
r'(?P<tzstring>(?P<tzutc>Z)' # 'Z'
r'|(?P<tzsign>[-+])' # '+' or '-'
r'(?P<tzhour>[0-9]{2}):(?P<tzminute>[0-9]{2})' # ZZ:ZZ
r')?' # close optional tzstring
r')?' # close optional time
r')?' # close optional day
r')?$' # close optional month
)
_now = datetime.datetime.now
_utcnow = datetime.datetime.utcnow
def reset():
global _now, _utcnow
_now = datetime.datetime.now
_utcnow = datetime.datetime.utcnow
def timetravel(now, freeze=False):
global _now, _utcnow
utc = now.astimezone(UTC)
reset()
if freeze:
print 'Time frozen at', now
_now = lambda t=now.replace(tzinfo=None):t
_utcnow = lambda t=utc.replace(tzinfo=None):t
else:
print 'Time travel to', now
offset = utcnow0() - utc
from datetime import datetime
_now = lambda f=datetime.now,o=offset:f()-o
_utcnow = lambda f=datetime.utcnow,o=offset:f()-o
def parse(string, tzinfo=None):
tup = parse_tuple(string)
if tup[-1] is not None:
tz = (tzinfo or FixedOffset)(tup[-1])
else:
tz = None
return datetime.datetime(*tup[:-1], **{'tzinfo':tz})
def parse_tuple(string):
m = DT_RX.match(string)
if not m:
raise Invalid('Invalid timestamp: \'%s\'' % string)
m = m.groupdict()
year = int(m['year'])
month = int(m['month'] or 1)
day = int(m['day'] or 1)
hour = int(m['hour'] or 0)
minute = int(m['minute'] or 0)
second = int(m['second'] or 0)
fraction = int(float('0.%s' % (m['fraction'] or 0)) * 1000000)
zs = m['tzsign']
zh = int(m['tzhour'] or 0)
zm = int(m['tzminute'] or 0)
if m['tzutc']:
offset = 0
elif m['tzstring']:
zh = zs == '-' and -zh or zh
zm = zs == '-' and -zm or zm
offset = zh * 60 + zm
else:
offset = None
if offset and abs(offset) >= MAX_OFFSET:
raise Invalid('timezone offset is too large', offset)
return (year, month, day,
hour, minute, second,
fraction, offset)
def _mkname(m):
s = '-+'[abs(m) == m]
h = abs(m) / 60
m = abs(m) % 60
return 'UTC%s%02d:%02d' % (s, h, m)
UTC = None
class Invalid(Exception):
'''Raised when there is a problem parsing a date string'''
class FixedOffset(datetime.tzinfo):
def __new__(self, minutes, name=None):
if UTC is not None and minutes == 0:
return UTC
return datetime.tzinfo.__new__(self, minutes, name)
def __init__(self, minutes, name=None):
assert minutes is not None
self.minutes = minutes
self.name = name or _mkname(self.minutes)
self.offset = datetime.timedelta(minutes=minutes)
def __reduce__(self):
return FixedOffset, (self.minutes, self.name)
def __str__(self):
return self.name
def __repr__(self):
return '<FixedOffset %s>' % self.name
def tzname(self, dt=None):
return self.name
def utcoffset(self, dt):
return self.offset
def dst(self, dt):
return datetime.timedelta(0)
def localize(self, dt):
assert not dt.tzinfo
return dt.replace(tzinfo=self)
def normalize(self, dt):
assert tzinfo
return dt.replace(tzinfo=self)
def now(self):
if self.minutes:
return UTC.now().astimezone(self)
else:
return self.localize(_utcnow())
def now0(self):
return self.now().replace(microsecond=0)
def now00(self):
return self.now().replace(microsecond=0, second=0)
UTC = FixedOffset(0)
now = lambda:_now()
utcnow = UTC.now
utcnow0 = UTC.now0
utcnow00 = UTC.now00
maxz = UTC.localize(datetime.datetime.max)
minz = UTC.localize(datetime.datetime.min)
today = lambda tz: utcnow().astimezone(tz).date()
localnow = lambda tz: utcnow().astimezone(tz)
localnow0 = lambda tz: utcnow0().astimezone(tz)
localnow00 = lambda tz: utcnow00().astimezone(tz)
def test():
valid = '''
2000-01-01T00:00:00.000000 = 2000
2000-01-01T00:00:00.000000 = 2000-01
2000-01-02T00:00:00.000000 = 2000-01-02
2000-01-02T03:04:00.000000 = 2000-01-02T03:04
2000-01-02T03:04:05.000000 = 2000-01-02 03:04:05
2000-01-02T03:04:05.600000 = 2000-01-02T03:04:05.6
2000-01-02T03:04:05.670000 = 2000-01-02 03:04:05.67
2000-01-02T03:04:05.678000 = 2000-01-02T03:04:05.678
2000-01-02T03:04:05.678900 = 2000-01-02T03:04:05.6789
2000-01-02T03:04:00.000000+0000 = 2000-01-02T03:04Z
2000-01-02T03:04:05.000000+0000 = 2000-01-02 03:04:05Z
2000-01-02T03:04:05.600000+0000 = 2000-01-02T03:04:05.6Z
2000-01-02T03:04:05.670000+0000 = 2000-01-02 03:04:05.67Z
2000-01-02T03:04:05.678000+0000 = 2000-01-02T03:04:05.678Z
2000-01-02T03:04:00.000000+0900 = 2000-01-02T03:04+09:00
2000-01-02T03:04:05.000000-0930 = 2000-01-02 03:04:05-09:30
2000-01-02T03:04:05.600000+1000 = 2000-01-02T03:04:05.6+10:00
2000-01-02T03:04:05.670000-1030 = 2000-01-02 03:04:05.67-10:30
2000-01-02T03:04:05.678000+1100 = 2000-01-02T03:04:05.678+11:00
'''
invalid = '''
2000-
2000-1
2000-1-2
2000-1-2T3:04
2000-1-2T03:04
2000-01-02T3:04
2000-01-02T3:04
2000-01-02T0304
2000-01-02T0345
2000-01-02T03456
2000-01-02T03:04+1
2000-01-02T03:04-10
2000-01-02T03:04Z+10:00
'''
ok = True
for s in [' '] + invalid.strip().split('\n'):
test = s.strip()
e = ''
passed = False
try:
parsed = parse(test)
except Invalid:
passed = True
except:
passed = False
if not passed:
print 'FAIL:', test
for s in valid.strip().split('\n'):
check, test = s.strip().split(' = ')
try:
parsed = parse(test)
assert check == parsed.strftime('%Y-%m-%dT%H:%M:%S.%f%z')
except:
ok = False
print 'FAIL:', test
print ok and 'PASS' or 'FAIL'
if __name__ == '__main__':
test()
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import markupsafe
from fenton import util
from fenton import getmeta
JQUERY_LOCAL = 'fenton/js/jquery-1.7.1.js'
JQUERY_CDN = 'https://ajax.googleapis.com/ajax/libs/jquery/1.4.4/jquery.min.js'
# FIXME belongs elsewhere
def iter_views(context, iter):
iter = (getmeta(obj).get_view(context, obj)
for obj in iter)
return (v for v in iter if v)
ALL_CSS = set()
ALL_JS = set()
ALL_MAKO = set(['default'])
core_css = ['fenton/css/reset.css',
'fenton/css/fenton-core.css',
'fenton/css/jquery.notify.css']
core_js = [JQUERY_LOCAL,
'fenton/js/fenton-core.js',
'fenton/js/jquery.bgiframe.js',
'fenton/js/jquery.notify.js']
def get_all_css():
return core_css + list(ALL_CSS - set(core_css))
def get_all_js():
return core_js + list(ALL_JS - set(core_js))
def register_css(css):
if isinstance(css, basestring):
css = [css]
if css:
ALL_CSS.update(css)
def register_js(js):
if isinstance(js, basestring):
js = [js]
if js:
ALL_JS.update(js)
def register_mako(f):
if isinstance(f, basestring):
f = [f]
if f:
ALL_MAKO.update(f)
class widget:
template_file = 'fenton_core'
class_id = property(lambda x:x.__class__.__name__)
template_name = property(lambda x:util.decamel(x.class_id))
js_class = property(lambda x:util.decamel(x.class_id))
class __metaclass__(type):
def __init__(self, name, bases, attrs):
type.__init__(self, name, bases, attrs)
register_css(self.css)
register_js(self.js)
register_mako(self.template_file)
js = css = None
__id = None
id = util.lazyattr(lambda x:x.__id or x.screen.next_id())
app = property(lambda x:x.screen.app)
model = property(lambda x:x.screen.model)
context = property(lambda x:x.screen.context)
request = property(lambda x:x.screen.request)
debug = property(lambda x:x.screen.app.debug)
config = property(lambda x:x.screen.app.config)
cachable = False
render = True
hidden = False
compile_child = None
def __init__(self, parent=None, **kw):
self.children = []
self.set_parent(parent)
self.__id = kw.pop('id', None)
self.__dict__.update(kw)
def __getstate__(self):
raise TypeError('Widgets may not be pickled')
def init(self):
pass
def get_url(self, obj):
v = getmeta(obj).get_view(self.context, obj)
return v and v.get_url()
@property
def css_class(self):
return ' '.join(util.decamel(x).replace('_', '-')
for x in self.css_classes() or [])
def css_classes(self):
return [self.class_id]
__parent = None
@property
def parent(self):
return self.__parent
def set_parent(self, parent):
self.__parent = parent
if isinstance(parent, widget):
self.screen = parent.screen
parent.children.append(self)
else:
self.screen = parent
def compile(self):
def _compile(w, compilers):
cc = w.compile_child
w.init()
compilers = cc and compilers + [cc] or compilers
for child in w:
_compile(child, compilers)
for f in compilers:
f(w)
return _compile(self, [])
def __iter__(self):
return iter(self.children)
@property
def args(self):
args = self.get_args() or {}
return dict((k, v) for (k, v) in args.iteritems() if v) or None
def get_args(self):
pass
def get_link(self, obj, label=None):
v = self.context.get_view(obj)
if not label:
label = str(obj)
view = getmeta(obj).get_view(self.context, obj)
url = view and view.get_url()
if url:
return link(self, url=url, label=label, link_class='ref')
else:
return text(self, label)
class screen_widget(widget):
class_id = property(lambda x:x.screen.__class__.__name__)
class custom_widget(widget):
name = None
class_id = property(lambda x:x.name or x.__class__.__name__)
class _collector(widget):
widgets = util.lazyattr(lambda:[])
def compile_child(self, child):
self.widgets.append((child.id, child.js_class, child.args))
class screen_frame(widget):
title = None
pack_static = property(lambda x:x.app.config.get('pack_static'))
def stylesheet_urls(self):
if self.pack_static:
url = 'css/pack-%s.css' % self.app.static_version
return [self.request.static_url(url)]
else:
s = '?%d' % util.timer()
return (self.request.static_url(url)+s for url in get_all_css())
def javascript_urls(self):
if self.pack_static:
url = 'js/pack-%s.js' % self.app.static_version
return [self.request.static_url(url)]
else:
s = '?%d' % util.timer()
return (self.request.static_url(url)+s for url in get_all_js())
def get_args(self):
return {'heartbeat_id': self.screen.bind_heartbeat(),
'heartbeat_seconds': self.request.heartbeat_seconds,
'expire_seconds': self.request.context_expire_seconds,
'update_bind': self.screen.bind_update and self.screen.bind_update()}
def css_classes(self):
tag = ''
series = self.app.release_series
if series:
tag = 'release-%s' % series
return [tag,
self.model.__class__.__name__,
self.screen.__class__.__name__,
self.class_id,
self.debug and 'debug' or '',
self.screen.editable and 'editable' or 'readonly']
@property
def context_location(self):
return self.request.context_location(self.context)
def install_script(self):
vars = {
'logger_bind': self.screen.bind_logger(),
'error_message': self.app.default_error_message,
'xmlhttp_key': self.request.xmlhttp_key,
'model_key': self.request.model_key,
'bind_key': self.request.bind_key,
'debug': self.debug,
}
vars.update(self.screen.vars)
from fenton.types import json
return '%s(%s)' % (self.screen.namespace,
json.dumps(vars, indent=self.debug and 2 or None))
class header_panel(widget):
title = None
max_crumbs = 5
crumb_prefix = '»'
crumb_ellipsis = '…'
def _screen_trail(self):
x = self.screen.get_parent()
while x:
yield x
x = x.get_parent()
@property
def screen_trail(self):
crumbs = list(self._screen_trail())
if len(crumbs) > self.max_crumbs:
crumbs = crumbs[:(self.max_crumbs-1)] + [None]
for v in reversed(crumbs):
if v is None:
yield text(self, markupsafe.Markup(self.crumb_ellipsis))
elif self.screen.modal:
yield text(self, v.get_title())
else:
yield link(self, url=v.get_url(), label=v.get_title(), link_class='crumb')
@property
def logo_html(self):
return self.app.logo_html(self.screen)
@property
def logo_image(self):
return self.request.static_url(self.app.global_logo)
@property
def logo_url(self):
return self.request.absolute_url(self.app.home_url)
@property
def logout_url(self):
return self.request.absolute_url(self.app.logout_url)
@property
def help_url(self):
return self.request.absolute_url(self.app.help_url)
@property
def release_label(self):
return self.app.release_label or ''
@property
def release_version(self):
return self.app.release_version or ''
@property
def session_links(self):
if self.request.user.authenticated:
yield text(self, self.request.user.name)
if not self.screen.modal:
#yield link(self, label='profile', url=self.logout_url)
#if self.screen.check(security.RESTRICTED):
# yield link(self, label='switch', url=self.logout_url)
yield link(self, label='logout', url=self.logout_url)
else:
yield text(self, 'Not logged in')
class action_panel(widget):
template_name = 'action_panel'
class body_panel(widget):
template_name = 'bare_panel'
class content_panel(widget):
template_name = 'content_panel'
class left_actions(action_panel):
css_class = 'action-panel left-panel'
class top_actions(content_panel):
css_class = 'action-panel top-panel'
class right_body(body_panel):
css_class = 'body-panel right-panel'
class sub_body(body_panel):
css_class = 'body-panel sub-panel'
class sheet_body(content_panel):
css_class = 'body-panel sheet-panel'
class link(widget):
link_class=None
text_class=None
class parent_link(link):
pass
class text(widget):
text_class = None
def __init__(self, parent, text, *args, **kw):
widget.__init__(self, parent, *args, **kw)
self.text = text
def __unicode__(self):
return unicode(self.text)
class screen_title(widget):
title = None
@property
def render(self):
return bool(self.title)
class model_list(widget):
meta = property(lambda x:x.model.submeta)
list_items = property(lambda x:list(x.screen.model))
offset = 0
columns = None
link_column = False
fixed_column = None
@property
def show_header(self):
return len(self.columns or []) > 1 and bool(self.list_items)
def iter_rows(self):
if not self.columns:
return []
return self._iter_rows()
def _iter_rows(self):
link_col = self.link_column
if link_col is None:
link_col = self.columns[0]
for i, view in enumerate(iter_views(self.context, self.list_items)):
url = view.get_url()
row = []
for j, k in enumerate(self.columns):
label = self.meta.format_attribute(view.model, k, self.context)
if link_col is not None and link_col == k:
cell = link(self, label=label, url=url, link_class='ref')
else:
cell = text(self, label, url=None)
row.append((j, cell))
yield i, row
def iter_headers(self):
for key in self.columns:
t = self.meta.attributes[key]
yield t.get_label()
class field_widget(widget):
key = property(lambda x:x.field.key)
name = property(lambda x:x.field.name)
type = property(lambda x:x.field.type)
hint = property(lambda x:x.field.hint)
error = property(lambda x:x.field.error)
index = property(lambda x:x.field.index)
label = property(lambda x:x.field.label)
value = property(lambda x:x.field.get_value())
required = property(lambda x:x.field.required)
editable = property(lambda x:x.field.editable)
formatted = property(lambda x:x.field.get_formatted())
simplified = property(lambda x:x.get_simplified())
change_on_dirty = True
change_refreshes = False
@property
def args(self):
args = self.get_args() or {}
return dict(field=self.field.key,
change_refreshes=self.change_refreshes,
change_on_dirty=self.change_on_dirty,
**(self.get_args() or {}))
def css_classes(self):
return (self.class_id, self.type.__class__.__name__)
def get_simplified(self):
v = self.field.get_simplified()
return v is not None and v or ''
class string_edit(field_widget):
keypress_triggers = True
def get_args(self):
return {'keypress_triggers_dirty': self.keypress_triggers}
class code_edit(string_edit):
template_name = 'string_edit'
def get_args(self):
return {'case': self.type.case,
'chars': self.type.chars}
class textbox_edit(string_edit):
columns = 50
rows = 5
class date_edit(string_edit):
js = ['fenton/js/jquery.dateinput.js']
css = ['fenton/css/jquery.dateinput.css']
def format_date(self):
if not self.value:
return None
d = self.value.day
return '%d %s' % (d, self.value.strftime('%b %Y'))
class time_edit(string_edit):
def format_time(self):
return self.field.get_formatted()
class minutes_edit(string_edit):
pass
class local_datetime_view(field_widget):
@property
def tz(self):
return self.type.get_tz(self.model)
@property
def formatted_date(self):
return self.formatted and self.type.format_date(self.value, self.context) or ''
@property
def formatted_time(self):
return self.formatted and self.type.format_time(self.value, self.context) or ''
@property
def formatted_tz(self):
return str(self.type.get_tz(self.model))
class local_datetime_edit(date_edit, local_datetime_view):
@property
def simplified(self):
return local_datetime_view.get_simplified(self)
@property
def tzoffset(self):
return self.tz.minutes
def get_args(self):
args = {'tzoffset': self.tzoffset}
if self.type.follows_from:
follow_field = self.field.form.fields[self.type.follows_from]
args['follows_from'] = follow_field.widget.id
return args
class option_list(field_widget):
other_key = property(lambda x:x.type.other_key)
other_label = property(lambda x:x.type.other_label)
multiple=False
def _iter_options(self):
return self.get_type().get_options(self.field)
def iter_options(self):
type = self.get_type()
has_selected = False
for i, (key, obj) in enumerate(self._iter_options()):
selected = self.is_selected(obj) and (self.multiple or not has_selected)
id = '%s_select_option_%d' % (self.field.key, i)
yield id, key, type.format_value(obj, self.context), selected
has_selected |= (selected or False)
class simple_select_list(option_list):
multiple = True
scrolling = True
scroll_height = None
def get_type(self):
return self.type.of
def is_selected(self, value):
return bool(self.value and value in (self.value or []))
class single_option_select(option_list):
null_option_empty = 'Select...'
null_option_value = '(none)'
def get_null_option(self):
if not self.field.get_value():
return self.null_option_empty
#elif not self.field.required:
else:
return self.null_option_value
def _iter_options(self):
if self.get_null_option():
yield '', self.get_null_option()
for x in option_list._iter_options(self):
yield x
def get_args(self):
return {'other_key': self.other_key,
'null_option_empty': self.null_option_empty,
'null_option_value': self.null_option_value}
def get_type(self):
return self.type
def is_selected(self, value):
return self.value is not None and value == self.value
class radio_select(single_option_select):
orient = 'vertical'
null_option_value = None
null_option_empty = None
def get_null_option(self):
if not self.field.get_value() and self.field.required:
return self.null_option_empty
elif not self.field.required:
return self.null_option_value
class boolean_edit(radio_select):
class_id = 'radio_select'
orient = 'horizontal'
def is_selected(self, value):
return self.value is not None and bool(value) == bool(self.value)
class reference_view(field_widget):
def init(self):
label = self.type.format_value(self.value, self.context)
if self.screen.modal or not self.type.link or not self.value:
self.link = text(self, text=str(label))
elif self.value:
view = getmeta(self.value).get_view(self.context, self.value)
self.link = link(self, url=view.get_url(), label=label, link_class='ref')
class reference_list_view(model_list, field_widget):
list_items = property(lambda x:x.value)
meta = property(lambda x:x.type.of.meta)
columns = property(lambda x:x.type.columns)
link_column = property(lambda x:x.type.link_column)
@property
def show_header(self):
return self.type.show_header and len(self.columns or []) > 1 and bool(self.list_items)
class child_single_view(model_list, field_widget):
meta = property(lambda x:x.type.meta)
columns = property(lambda x:x.type.columns)
link_column = property(lambda x:x.type.link_column)
@property
def list_items(self):
v = self.field.get_value()
if v is None:
return []
return [v]
@property
def show_header(self):
return self.type.show_header and len(self.columns or []) > 1 and bool(self.list_items)
class child_subedit_widget(field_widget):
meta = property(lambda x:x.type.meta)
columns = property(lambda x:x.type.columns)
link_column = property(lambda x:x.type.link_column)
addable = True
@property
def render(self):
priv = self.meta.get_priv('view')
from fenton import security
return security.check(priv, self.context, self.value)
def bind_create(self, index=None):
return self.screen.bind_create_child(self.key, index)
def bind_edit(self, index=None):
if index is None:
value = self.value
else:
value = self.value[index]
return self.screen.bind_edit_child(value, self.key, index)
def bind_delete(self, index=None):
if index is None:
value = self.value
else:
value = self.value[index]
return self.screen.bind_delete_child(value, self.key, index)
class child_single_edit(child_single_view, child_subedit_widget):
columns = property(lambda x:x.type.columns)
link_column = property(lambda x:x.type.link_column)
def init(self):
self.addable = self.value is None
def add_bind(self):
return self.bind_create()
def iter_rows(self):
for i, row in child_single_view.iter_rows(self):
yield i, row, self.bind_edit(), self.bind_delete()
class child_list_view(model_list, field_widget):
meta = property(lambda x:x.type.of.meta)
list_items = property(lambda x:x.value)
columns = property(lambda x:x.type.columns)
link_column = property(lambda x:x.type.link_column)
@property
def show_header(self):
return self.type.show_header and len(self.columns or []) > 1 and bool(self.list_items)
class child_list_edit(child_list_view, child_subedit_widget):
def iter_rows(self):
self.i = 0
for i, row in child_list_view.iter_rows(self):
self.i += 1
yield i, row, self.bind_edit(i), self.bind_delete(i)
def add_bind(self):
return self.bind_create(self.i)
class toolbar_button(widget):
confirm = None
button_class = property(lambda x:x.type + '-action')
disabled = False
dirty_disables = False
def get_args(self):
return dict(disabled = self.disabled,
confirm = self.confirm,
dirty_disables = self.dirty_disables)
class toggle_button(field_widget):
disabled = False
def get_args(self):
return {'label_on': self.label_on,
'label_off': self.label_off}
def navbuttons_navigate(screen, n):
screen.model += n
screen.refresh()
class navbuttons(toolbar_button):
label_back = u'\N{LEFTWARDS ARROW}'
label_forward = u'\N{RIGHTWARDS ARROW}'
@util.lazyattr
def bind_back(self):
if self.model.__iadd__(-1, True):
return self.screen.bind(navbuttons_navigate, self.screen, -1)
@util.lazyattr
def bind_forward(self):
if self.model.__iadd__(1, True):
return self.screen.bind(navbuttons_navigate, self.screen, 1)
class submit_button(widget):
button_type = 'submit'
button_class = property(lambda x:x.type + '-action')
class button_bar(widget):
@property
def render(self):
return bool(self)
class filter_field(widget):
def init(self):
self.field.widget.set_parent(self)
def css_classes(self):
return self.children[0].css_classes()
class model_form(widget):
def init(self):
for key, header, fields in self.groups:
field_group(self, key=key.replace('_', '-'), header=header, fields=fields)
class field_group(widget):
def init(self):
for f in self.fields:
f.widget.set_parent(self)
class server_error(widget):
template_file = 'error'
## template-only widgets
class dialog_buttons(widget):
pass
class textblock(text):
pass
class link_list(widget):
pass
class spacer(widget):
pass
class message(widget):
pass
class string_view(field_widget):
pass
class text_view(field_widget):
pass
class string_mask_view(string_view):
pass
class string_mask_edit(string_edit):
pass
class date_view(field_widget):
pass
class datetime_view(field_widget):
pass
class datetime_edit(field_widget):
pass
class simple_list_view(field_widget):
pass
class simple_list_edit(field_widget):
pass
class boolean_view(field_widget):
pass
class date_period_view(field_widget):
pass
class date_period_edit(field_widget):
pass
class model_history(widget):
pass
class message_button(text):
pass
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import urlparse
import functools
from fenton import util
from fenton import getmeta
from fenton import widgets
from fenton import logging
class MethodWrapper:
__func = property(lambda x:getattr(x.__self, x.__name))
def __init__(self, m):
f = m.im_func
if getattr(m.im_class, f.__name__).im_func is not f:
raise TypeError('method %s is not known by that name (decorated?)' % f.__name__)
self.__self = m.im_self
self._name = self.__name = f.__name__
functools.wraps(f)(self)
def __call__(self, *args, **kw):
return self.__func(*args, **kw)
def __repr__(self):
return '<%s.%s>' % (self.__self,
self.__name)
class Screen:
form_error_msg = 'Form has errors (click icon for detail)'
namespace = 'Fenton'
request = property(lambda x:x.context.request)
app = property(lambda x:x.request.app)
debug = property(lambda x:x.request.app.debug)
meta = property(lambda x:getmeta(x.model))
has_error = property(lambda x:any(x.errors.values()))
parent = None
screen_title = None
template_file = None
screen_message = None
model = None
referer = None
bind_update = None
registerable = False
heartbeat = True
editable = False
modal = False
def __init__(self, context, model, priv=None,
parent=None, key=None, index=None):
self.context = context
self.priv = priv
self.parent = parent
self.model = model
self.errors = {}
self.bindings = {}
self.child_key = key
self.child_index = index
def __repr__(self):
n = self.__class__.__name__
m = type(self.model).__name__
return '<%s(%s)>' % (n, m)
__current_id = 0
def __seq(self):
self.__current_id += 1
return self.__current_id
def next_id(self):
return 'x0' + util.to_base62(self.__seq())
def bind(self, func, *args, **kw):
if hasattr(func, 'im_self'):
func = MethodWrapper(func)
return self._bind(func, *args, **kw)
def bind_logger(self):
return self.bind(self.logmsg)
def logmsg(self):
msg = self.request.params.get('msg')
logging.log.info('[JS] %s', msg)
def bind_heartbeat(self):
if self.heartbeat:
return self.bind(self.heartbeat_ping)
def heartbeat_ping(self):
pass
def get_form(self):
from fenton import form
return form.Form(self.context, self.model, self.editable, self.errors)
def get_priv(self, name):
pass
def check(self, priv='view'):
if isinstance(priv, basestring):
priv = self.get_priv(priv)
from fenton import security
return security.check(priv, self.context, self.model)
def _bind(self, f, *args, **kw):
key = self.next_id()
self.bindings[key] = (f, args, kw)
return key
def get_binding(self, key):
if key not in self.bindings:
return None
return self.bindings.get(key)
def get_parent(self):
return self.parent
def get_path(self):
if not self.registerable:
return None
if self.model is None:
return None
return getmeta(self.model).get_object_path(self.context, self.model)
def get_link(self, parent):
return widgets.link(parent, url=self.get_url(), label=self.get_title())
def get_url(self):
path = self.get_path()
if not path:
return None
if path == '/': path = ''
url = self.request.app_url(path)
args = self.get_args()
if args:
from fenton import form
args = form.encode({self.request.model_key: args})
qs = '&'.join('%s=%s'%pair for pair in args.items() if pair[1])
scheme, host, path, _, frag = urlparse.urlsplit(url)
url = urlparse.urlunsplit((scheme, host, path, qs, frag))
return url
def add_callback(self, name):
self.vars.setdefault('run_callbacks', []).append(name)
def callback_expr(self, expr):
self.vars['callback_expr'] = expr
def refresh(self):
self.request.refresh(self)
def replace(self, screen):
self.context.screen = screen
screen.refresh()
def pop(self):
p = self.get_parent()
assert p
self.replace(p)
def thaw(self):
if self.model is not None:
getmeta(self.model).thaw_model(self.context, self.model)
if self.parent:
self.parent.thaw()
def render(self, widget=None):
if widget is None:
widget = self.get_widget()
p = widgets._collector(self)
widget.set_parent(p)
p.compile()
self.vars['widgets'] = p.widgets
return self.request.render(widget)
def update(self, args=None):
pass
def get_title(self):
return self.screen_title
def set_message(self, type, msg):
self.screen_message = (type, msg)
def get_message(self):
msg = self.screen_message
if not msg and self.has_error:
msg = ('error', self.form_error_msg)
self.screen_message = None
return msg
def get_args(self):
return self.get_form().get_args()
@util.lazyattr
def vars(self):
return self.get_vars()
def get_vars(self):
return {}
def get_widget(self):
self.vars['message'] = self.get_message()
frame = widgets.screen_frame(self, title=self.get_title())
self.build_header(frame)
self.build_actions(frame)
self.build_body(frame)
return frame
def build_header(self, frame):
widgets.header_panel(frame, title=self.get_title())
def build_actions(self, frame):
panel = widgets.left_actions(frame)
self.left_actions(panel)
def build_body(self, frame):
body = widgets.right_body(frame)
self.right_body(body)
def left_actions(self, panel):
pass
def right_body(self, body):
pass
class Plain(Screen):
registerable = True
def render(self):
self.request.response.content_type = self.model.content_type
return str(self.model)
class CustomScreen(Screen):
registerable = True
def build_body(self, frame):
widgets.screen_widget(frame, template_file=self.template_file)
class Dialog(Screen):
registerable = True
def build_body(self, frame):
sheet = widgets.sheet_body(frame)
self.build_dialog(sheet)
self.build_dialog_buttons(widgets.button_bar(sheet))
def build_actions(self, frame):
pass
def build_dialog(self, sheet):
pass
def build_dialog_buttons(self, sheet):
pass
class ModelScreen(Screen):
def bind_update(self):
return self.bind(self.do_update_form)
def do_update_form(self):
f = self.update()
es = dict((k, v and str(v)) for (k,v) in f.errors.items())
self.vars['form_errors'] = es
self.vars['form_error_msg'] = self.form_error_msg
for (k, v) in es.items():
if v is not None:
logging.log.debug('[form.error] %s: %s', k, v)
pushback = f.get_pushback()
if pushback:
self.vars['form_pushback'] = pushback
def update(self, args=None):
if args is None:
args = self.request.model_vars
trigger = self.request.params.get('____trigger_field')
args = trigger and {trigger: args.get(trigger)} or args
only = trigger and [trigger]
else:
only = args.keys()
f = self.get_form()
f.validate(args, only)
if type(self.model) is not type(f.model):
# class changed
self.add_callback('refresh_screen')
self.errors = {}
self.model = f.model
return f
def get_title(self):
return str(self.model)
__parent = util.undefined
def get_parent(self):
if self.parent:
return self.parent
if self.__parent is util.undefined:
self.__parent = self._get_parent_from_history()
return self.__parent
def get_parents(self):
parents = self.meta.get_parents(self.context, self.model)
history = self.get_history()
history = dict(reversed(history))
for p in parents:
v = self.context.get_view(p)
p = v.get_path()
if p:
if p in history:
v.update(history[p])
yield v
def get_history(self):
history = self.request.get_history()
if self.referer:
history = history + [self.referer]
return history
def _get_parent_from_history(self):
parents = self.meta.get_parents(self.context, self.model)
if not parents:
return None
history = self.get_history()
if not history:
return self.context.get_view(parents[0])
for hpath, hargs in reversed(history):
for p in parents:
v = self.context.get_view(p)
vpath = v and v.get_path()
if not vpath:
continue
if vpath == hpath:
v.update(hargs)
return v
return v
def get_priv(self, name):
return self.meta.get_priv(name)
class Attachment(ModelScreen):
registerable = True
editable = True
def get_args(self):
return self.get_form().get_args()
def render(self):
rs = self.request.response
rs.headers['Content-Type'] = self.model.attachment_type()
rs.headers['Content-Disposition'] = 'attachment;filename="%s"' % self.model.attachment_filename()
self.model.write_attachment_data(rs.body_file)
class IterScreen(ModelScreen):
editable = True
def do_update_form(self):
ModelScreen.do_update_form(self)
self.refresh()
def build_filter_form(self, panel):
for key, header, fields in self.get_form().groups:
for field in fields:
widgets.filter_field(panel, field=field)
class FormScreen(ModelScreen):
editable = False
def get_args(self):
return None
def right_body(self, body):
widgets.model_form(body, groups=self.get_form().groups)
if not self.editable:
if getattr(self.model, '_meta', None):
widgets.model_history(body)
def bind_create_child(self, key, index):
type = self.meta.attributes[key]
if hasattr(type, 'meta'):
priv = type.meta.get_priv('create')
else:
priv = True
if self.check(priv):
return self.bind(self.do_create_child, key, index)
def bind_edit_child(self, obj, key, index):
type = self.meta.attributes[key]
if hasattr(type, 'meta'):
priv = type.meta.get_priv('edit')
else:
priv = True
if self.check(priv):
return self.bind(self.do_edit_child, obj, key, index)
def bind_delete_child(self, obj, key, index):
type = self.meta.attributes[key]
if hasattr(type, 'meta'):
priv = type.meta.get_priv('delete')
else:
priv = True
if self.check(priv):
return self.bind(self.do_delete_child, obj, key, index)
def do_edit_child(self, obj, key, index):
self.update()
Screen = self.meta.attributes[key].meta.edit_screen
self.replace(Screen(self.context, obj, True, self, key, index))
def do_create_child(self, key, index):
self.update()
obj = self.meta.construct_child(self.context, self.model, key, index)
Screen = self.meta.attributes[key].meta.create_screen
self.replace(Screen(self.context, obj, True, self, key, index))
def do_delete_child(self, obj, key, index):
self.meta.delete_child(self.context, self.model, key, index)
self.update({key: getattr(self.model, key)})
self.set_message('info', 'Removed %s' % obj)
self.refresh()
def store_child(self, obj, key, index):
self.meta.store_child(self.context, self.model, obj, key, index)
self.update({key: getattr(self.model, key)})
class EditableScreen(FormScreen):
modal = True
editable = True
commit_label = 'Finish'
def left_actions(self, panel):
widgets.text(panel, text='UNSAVED', text_class='alert')
widgets.toolbar_button(panel,
type='primary',
label=self.commit_label,
dirty_disables=True,
bind=self.bind(self.do_commit))
widgets.toolbar_button(panel,
type='secondary',
label='Cancel',
bind=self.bind(self.do_cancel))
def do_cancel(self):
getmeta(self.model).purge_model(self.context, self.model)
self.pop()
def do_commit(self):
obj = self.model
self.update()
if self.has_error:
return self.refresh()
self.commit_model()
# instead, call parent.commit_model()
# and have it set the screen status ?
screen = self.get_commit_screen()
msg = self.get_commit_message()
if msg:
screen.set_message(*msg)
self.replace(screen)
def get_commit_message(self):
pass
def get_commit_screen(self):
return self.get_parent()
def commit_model(self):
# self.parent.commit_model(self.model,...)
with self.context:
self.model = self.meta.store_model(self.context, self.model)
# concrete classes
class NavScreen(ModelScreen):
registerable = True
def right_body(self, body):
self.build_links(widgets.link_list(body))
def build_links(self, container):
for label, url in self.get_links():
url = self.request.absolute_url(url)
widgets.link(container, url=url, label=label)
def get_links(self):
return [(v.get_title(), v.get_url())
for v in self.model.get_links()]
class ListScreen(IterScreen):
registerable = True
def left_actions(self, panel):
if self.check('create'):
widgets.toolbar_button(panel, type='primary', label='New',
bind=self.bind(self.do_create))
self.build_filter_form(panel)
def right_body(self, body):
link_col = getattr(self.model, 'link_column', False)
sm = self.model.submeta
columns = (self.model.list_columns
# backward compat
or getattr(sm.model_class, '__list_columns__', [])
# fallback
or [sm.attributes.keys()[0]])
widgets.model_list(body,
columns=columns,
link_column=link_col)
def do_create(self):
obj = self.model.submeta.construct_model(self.context)
Screen = self.model.submeta.create_screen
self.replace(Screen(self.context, obj, self.priv, self))
class ViewScreen(FormScreen):
registerable = True
editable = False
def parent_links(self, panel):
for p in self.get_parents():
if p.check():
widgets.parent_link(panel, url=p.get_url(), label=p.get_title())
def left_actions(self, panel):
self.parent_links(panel)
if self.check('edit'):
widgets.toolbar_button(panel,
type='primary',
label='Edit',
bind=self.bind(self.do_edit, self.model))
if self.check('delete'):
widgets.toolbar_button(panel,
type='primary',
label='Delete',
confirm='Delete %s?' % self.model,
bind=self.bind(self.do_delete, self.model))
def do_edit(self, obj=None):
if obj is None:
obj = self.model
Screen = getmeta(obj).edit_screen
self.replace(Screen(self.context, obj, self.priv, self))
def do_delete(self, obj=None):
if obj is None:
obj = self.model
with self.context:
getmeta(obj).delete_model(self.context, obj)
self.get_parent().set_message('ok', 'Deleted %s' % obj)
self.pop()
def do_clone(self, obj=None):
if obj is None:
obj = self.model
new = getmeta(obj).construct_model(self.context)
screen = CloneScreen(self.context, new, self.priv, self)
form = CloneScreen(self.context, obj, self.priv, self).form
screen.update(dict((k, form.get_value(k))
for k in form.fields))
self.replace(screen)
class EditScreen(EditableScreen):
def get_title(self):
return '(editing)'
def get_commit_message(self):
return 'ok', 'Saved %s' % self.model
class CloneScreen(EditableScreen):
def get_title(self):
return '(cloning)'
def get_commit_message(self):
return 'ok', 'Cloned %s' % self.model
class CreateScreen(EditableScreen):
def get_title(self):
return '(new)'
def get_commit_screen(self):
return getmeta(self.model).get_view(self.context, self.model)
def get_commit_message(self):
return 'ok', 'Created %s' % self.model
class SubEditScreen(EditScreen):
def left_actions(self, panel):
widgets.text(panel, text='UNSAVED', text_class='alert')
widgets.toolbar_button(panel,
type='primary',
label='Back',
dirty_disables=True,
bind=self.bind(self.do_commit))
def get_title(self):
return str(self.model)
def commit_model(self):
self.parent.store_child(self.model, self.child_key, self.child_index)
def get_commit_message(self):
return 'info', 'Updated %s (draft)' % self.model
class SubCreateScreen(SubEditScreen):
def get_title(self):
index = self.child_index
suffix = index is not None and ' (%d)' % (index + 1) or ''
return str(self.model) + suffix
def commit_model(self):
self.parent.store_child(self.model, self.child_key, self.child_index)
def get_commit_message(self):
return 'info', 'Added %s (draft)' % self.model
class CommentScreen(EditableScreen):
def get_title(self):
return '(add comment)'
def get_commit_message(self):
return 'ok', 'Added comment'
class FormDialog(Dialog, EditableScreen):
def build_dialog(self, sheet):
widgets.model_form(sheet, groups=self.get_form().groups)
class CustomDialog(Dialog):
def build_dialog(self, sheet):
widgets.screen_widget(sheet, template_file=self.template_file)
| Python |
def getmeta(*x, **y):
import fenton, fenton.model
fenton.getmeta = fenton.model.getmeta
return fenton.getmeta(*x, **y)
| Python |
"""A high-speed, production ready, thread pooled, generic HTTP server.
Simplest example on how to use this module directly
(without using CherryPy's application machinery)::
from cherrypy import wsgiserver
def my_crazy_app(environ, start_response):
status = '200 OK'
response_headers = [('Content-type','text/plain')]
start_response(status, response_headers)
return ['Hello world!']
server = wsgiserver.CherryPyWSGIServer(
('0.0.0.0', 8070), my_crazy_app,
server_name='www.cherrypy.example')
The CherryPy WSGI server can serve as many WSGI applications
as you want in one instance by using a WSGIPathInfoDispatcher::
d = WSGIPathInfoDispatcher({'/': my_crazy_app, '/blog': my_blog_app})
server = wsgiserver.CherryPyWSGIServer(('0.0.0.0', 80), d)
Want SSL support? Just set server.ssl_adapter to an SSLAdapter instance.
This won't call the CherryPy engine (application side) at all, only the
HTTP server, which is independent from the rest of CherryPy. Don't
let the name "CherryPyWSGIServer" throw you; the name merely reflects
its origin, not its coupling.
For those of you wanting to understand internals of this module, here's the
basic call flow. The server's listening thread runs a very tight loop,
sticking incoming connections onto a Queue::
server = CherryPyWSGIServer(...)
server.start()
while True:
tick()
# This blocks until a request comes in:
child = socket.accept()
conn = HTTPConnection(child, ...)
server.requests.put(conn)
Worker threads are kept in a pool and poll the Queue, popping off and then
handling each connection in turn. Each connection can consist of an arbitrary
number of requests and their responses, so we run a nested loop::
while True:
conn = server.requests.get()
conn.communicate()
-> while True:
req = HTTPRequest(...)
req.parse_request()
-> # Read the Request-Line, e.g. "GET /page HTTP/1.1"
req.rfile.readline()
read_headers(req.rfile, req.inheaders)
req.respond()
-> response = app(...)
try:
for chunk in response:
if chunk:
req.write(chunk)
finally:
if hasattr(response, "close"):
response.close()
if req.close_connection:
return
"""
CRLF = '\r\n'
import os
import Queue
import re
quoted_slash = re.compile("(?i)%2F")
import rfc822
import socket
import sys
if 'win' in sys.platform and not hasattr(socket, 'IPPROTO_IPV6'):
socket.IPPROTO_IPV6 = 41
try:
import cStringIO as StringIO
except ImportError:
import StringIO
_fileobject_uses_str_type = isinstance(socket._fileobject(None)._rbuf, basestring)
import threading
import time
import traceback
from urllib import unquote
from urlparse import urlparse
import warnings
import errno
def plat_specific_errors(*errnames):
"""Return error numbers for all errors in errnames on this platform.
The 'errno' module contains different global constants depending on
the specific platform (OS). This function will return the list of
numeric values for a given list of potential names.
"""
errno_names = dir(errno)
nums = [getattr(errno, k) for k in errnames if k in errno_names]
# de-dupe the list
return dict.fromkeys(nums).keys()
socket_error_eintr = plat_specific_errors("EINTR", "WSAEINTR")
socket_errors_to_ignore = plat_specific_errors(
"EPIPE",
"EBADF", "WSAEBADF",
"ENOTSOCK", "WSAENOTSOCK",
"ETIMEDOUT", "WSAETIMEDOUT",
"ECONNREFUSED", "WSAECONNREFUSED",
"ECONNRESET", "WSAECONNRESET",
"ECONNABORTED", "WSAECONNABORTED",
"ENETRESET", "WSAENETRESET",
"EHOSTDOWN", "EHOSTUNREACH",
)
socket_errors_to_ignore.append("timed out")
socket_errors_to_ignore.append("The read operation timed out")
socket_errors_nonblocking = plat_specific_errors(
'EAGAIN', 'EWOULDBLOCK', 'WSAEWOULDBLOCK')
comma_separated_headers = ['Accept', 'Accept-Charset', 'Accept-Encoding',
'Accept-Language', 'Accept-Ranges', 'Allow', 'Cache-Control',
'Connection', 'Content-Encoding', 'Content-Language', 'Expect',
'If-Match', 'If-None-Match', 'Pragma', 'Proxy-Authenticate', 'TE',
'Trailer', 'Transfer-Encoding', 'Upgrade', 'Vary', 'Via', 'Warning',
'WWW-Authenticate']
def read_headers(rfile, hdict=None):
"""Read headers from the given stream into the given header dict.
If hdict is None, a new header dict is created. Returns the populated
header dict.
Headers which are repeated are folded together using a comma if their
specification so dictates.
This function raises ValueError when the read bytes violate the HTTP spec.
You should probably return "400 Bad Request" if this happens.
"""
if hdict is None:
hdict = {}
while True:
line = rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
if line[0] in ' \t':
# It's a continuation line.
v = line.strip()
else:
try:
k, v = line.split(":", 1)
except ValueError:
raise ValueError("Illegal header line.")
# TODO: what about TE and WWW-Authenticate?
k = k.strip().title()
v = v.strip()
hname = k
if k in comma_separated_headers:
existing = hdict.get(hname)
if existing:
v = ", ".join((existing, v))
hdict[hname] = v
return hdict
class MaxSizeExceeded(Exception):
pass
class SizeCheckWrapper(object):
"""Wraps a file-like object, raising MaxSizeExceeded if too large."""
def __init__(self, rfile, maxlen):
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
def _check_length(self):
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded()
def read(self, size=None):
data = self.rfile.read(size)
self.bytes_read += len(data)
self._check_length()
return data
def readline(self, size=None):
if size is not None:
data = self.rfile.readline(size)
self.bytes_read += len(data)
self._check_length()
return data
# User didn't specify a size ...
# We read the line in chunks to make sure it's not a 100MB line !
res = []
while True:
data = self.rfile.readline(256)
self.bytes_read += len(data)
self._check_length()
res.append(data)
# See http://www.cherrypy.org/ticket/421
if len(data) < 256 or data[-1:] == "\n":
return ''.join(res)
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline()
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline()
return lines
def close(self):
self.rfile.close()
def __iter__(self):
return self
def next(self):
data = self.rfile.next()
self.bytes_read += len(data)
self._check_length()
return data
class KnownLengthRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted."""
def __init__(self, rfile, content_length):
self.rfile = rfile
self.remaining = content_length
def read(self, size=None):
if self.remaining == 0:
return ''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.read(size)
self.remaining -= len(data)
return data
def readline(self, size=None):
if self.remaining == 0:
return ''
if size is None:
size = self.remaining
else:
size = min(size, self.remaining)
data = self.rfile.readline(size)
self.remaining -= len(data)
return data
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def close(self):
self.rfile.close()
def __iter__(self):
return self
def __next__(self):
data = next(self.rfile)
self.remaining -= len(data)
return data
class ChunkedRFile(object):
"""Wraps a file-like object, returning an empty string when exhausted.
This class is intended to provide a conforming wsgi.input value for
request entities that have been encoded with the 'chunked' transfer
encoding.
"""
def __init__(self, rfile, maxlen, bufsize=8192):
self.rfile = rfile
self.maxlen = maxlen
self.bytes_read = 0
self.buffer = ''
self.bufsize = bufsize
self.closed = False
def _fetch(self):
if self.closed:
return
line = self.rfile.readline()
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise MaxSizeExceeded("Request Entity Too Large", self.maxlen)
line = line.strip().split(";", 1)
try:
chunk_size = line.pop(0)
chunk_size = int(chunk_size, 16)
except ValueError:
raise ValueError("Bad chunked transfer size: " + repr(chunk_size))
if chunk_size <= 0:
self.closed = True
return
## if line: chunk_extension = line[0]
if self.maxlen and self.bytes_read + chunk_size > self.maxlen:
raise IOError("Request Entity Too Large")
chunk = self.rfile.read(chunk_size)
self.bytes_read += len(chunk)
self.buffer += chunk
crlf = self.rfile.read(2)
if crlf != CRLF:
raise ValueError(
"Bad chunked transfer coding (expected '\\r\\n', "
"got " + repr(crlf) + ")")
def read(self, size=None):
data = ''
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
if size:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
data += self.buffer
def readline(self, size=None):
data = ''
while True:
if size and len(data) >= size:
return data
if not self.buffer:
self._fetch()
if not self.buffer:
# EOF
return data
newline_pos = self.buffer.find('\n')
if size:
if newline_pos == -1:
remaining = size - len(data)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
remaining = min(size - len(data), newline_pos)
data += self.buffer[:remaining]
self.buffer = self.buffer[remaining:]
else:
if newline_pos == -1:
data += self.buffer
else:
data += self.buffer[:newline_pos]
self.buffer = self.buffer[newline_pos:]
def readlines(self, sizehint=0):
# Shamelessly stolen from StringIO
total = 0
lines = []
line = self.readline(sizehint)
while line:
lines.append(line)
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
return lines
def read_trailer_lines(self):
if not self.closed:
raise ValueError(
"Cannot read trailers until the request body has been read.")
while True:
line = self.rfile.readline()
if not line:
# No more data--illegal end of headers
raise ValueError("Illegal end of headers.")
self.bytes_read += len(line)
if self.maxlen and self.bytes_read > self.maxlen:
raise IOError("Request Entity Too Large")
if line == CRLF:
# Normal end of headers
break
if not line.endswith(CRLF):
raise ValueError("HTTP requires CRLF terminators")
yield line
def close(self):
self.rfile.close()
def __iter__(self):
# Shamelessly stolen from StringIO
total = 0
line = self.readline(sizehint)
while line:
yield line
total += len(line)
if 0 < sizehint <= total:
break
line = self.readline(sizehint)
class HTTPRequest(object):
"""An HTTP Request (and response).
A single HTTP connection may consist of multiple request/response pairs.
"""
server = None
"""The HTTPServer object which is receiving this request."""
conn = None
"""The HTTPConnection object on which this request connected."""
inheaders = {}
"""A dict of request headers."""
outheaders = []
"""A list of header tuples to write in the response."""
ready = False
"""When True, the request has been parsed and is ready to begin generating
the response. When False, signals the calling Connection that the response
should not be generated and the connection should close."""
close_connection = False
"""Signals the calling Connection that the request should close. This does
not imply an error! The client and/or server may each request that the
connection be closed."""
chunked_write = False
"""If True, output will be encoded with the "chunked" transfer-coding.
This value is set automatically inside send_headers."""
def __init__(self, server, conn):
self.server= server
self.conn = conn
self.ready = False
self.started_request = False
self.scheme = "http"
if self.server.ssl_adapter is not None:
self.scheme = "https"
# Use the lowest-common protocol in case read_request_line errors.
self.response_protocol = 'HTTP/1.0'
self.inheaders = {}
self.status = ""
self.outheaders = []
self.sent_headers = False
self.close_connection = False
self.chunked_read = False
self.chunked_write = False
def parse_request(self):
"""Parse the next HTTP request start-line and message-headers."""
self.rfile = SizeCheckWrapper(self.conn.rfile,
self.server.max_request_header_size)
try:
self.read_request_line()
except MaxSizeExceeded:
self.simple_response("414 Request-URI Too Long",
"The Request-URI sent with the request exceeds the maximum "
"allowed bytes.")
return
try:
success = self.read_request_headers()
except MaxSizeExceeded:
self.simple_response("413 Request Entity Too Large",
"The headers sent with the request exceed the maximum "
"allowed bytes.")
return
else:
if not success:
return
self.ready = True
def read_request_line(self):
# HTTP/1.1 connections are persistent by default. If a client
# requests a page, then idles (leaves the connection open),
# then rfile.readline() will raise socket.error("timed out").
# Note that it does this based on the value given to settimeout(),
# and doesn't need the client to request or acknowledge the close
# (although your TCP stack might suffer for it: cf Apache's history
# with FIN_WAIT_2).
request_line = self.rfile.readline()
# Set started_request to True so communicate() knows to send 408
# from here on out.
self.started_request = True
if not request_line:
# Force self.ready = False so the connection will close.
self.ready = False
return
if request_line == CRLF:
# RFC 2616 sec 4.1: "...if the server is reading the protocol
# stream at the beginning of a message and receives a CRLF
# first, it should ignore the CRLF."
# But only ignore one leading line! else we enable a DoS.
request_line = self.rfile.readline()
if not request_line:
self.ready = False
return
if not request_line.endswith(CRLF):
self.simple_response("400 Bad Request", "HTTP requires CRLF terminators")
return
try:
method, uri, req_protocol = request_line.strip().split(" ", 2)
except ValueError:
self.simple_response("400 Bad Request", "Malformed Request-Line")
return
self.uri = uri
self.method = method
# uri may be an abs_path (including "http://host.domain.tld");
scheme, authority, path = self.parse_request_uri(uri)
if '#' in path:
self.simple_response("400 Bad Request",
"Illegal #fragment in Request-URI.")
return
if scheme:
self.scheme = scheme
qs = ''
if '?' in path:
path, qs = path.split('?', 1)
# Unquote the path+params (e.g. "/this%20path" -> "/this path").
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec5.html#sec5.1.2
#
# But note that "...a URI must be separated into its components
# before the escaped characters within those components can be
# safely decoded." http://www.ietf.org/rfc/rfc2396.txt, sec 2.4.2
# Therefore, "/this%2Fpath" becomes "/this%2Fpath", not "/this/path".
try:
atoms = [unquote(x) for x in quoted_slash.split(path)]
except ValueError, ex:
self.simple_response("400 Bad Request", ex.args[0])
return
path = "%2F".join(atoms)
self.path = path
# Note that, like wsgiref and most other HTTP servers,
# we "% HEX HEX"-unquote the path but not the query string.
self.qs = qs
# Compare request and server HTTP protocol versions, in case our
# server does not support the requested protocol. Limit our output
# to min(req, server). We want the following output:
# request server actual written supported response
# protocol protocol response protocol feature set
# a 1.0 1.0 1.0 1.0
# b 1.0 1.1 1.1 1.0
# c 1.1 1.0 1.0 1.0
# d 1.1 1.1 1.1 1.1
# Notice that, in (b), the response will be "HTTP/1.1" even though
# the client only understands 1.0. RFC 2616 10.5.6 says we should
# only return 505 if the _major_ version is different.
rp = int(req_protocol[5]), int(req_protocol[7])
sp = int(self.server.protocol[5]), int(self.server.protocol[7])
if sp[0] != rp[0]:
self.simple_response("505 HTTP Version Not Supported")
return
self.request_protocol = req_protocol
self.response_protocol = "HTTP/%s.%s" % min(rp, sp)
def read_request_headers(self):
"""Read self.rfile into self.inheaders. Return success."""
# then all the http headers
try:
read_headers(self.rfile, self.inheaders)
except ValueError, ex:
self.simple_response("400 Bad Request", ex.args[0])
return False
mrbs = self.server.max_request_body_size
if mrbs and int(self.inheaders.get("Content-Length", 0)) > mrbs:
self.simple_response("413 Request Entity Too Large",
"The entity sent with the request exceeds the maximum "
"allowed bytes.")
return False
# Persistent connection support
if self.response_protocol == "HTTP/1.1":
# Both server and client are HTTP/1.1
if self.inheaders.get("Connection", "") == "close":
self.close_connection = True
else:
# Either the server or client (or both) are HTTP/1.0
if self.inheaders.get("Connection", "") != "Keep-Alive":
self.close_connection = True
# Transfer-Encoding support
te = None
if self.response_protocol == "HTTP/1.1":
te = self.inheaders.get("Transfer-Encoding")
if te:
te = [x.strip().lower() for x in te.split(",") if x.strip()]
if te:
for enc in te:
if enc == "chunked":
self.chunked_read = True
else:
# Note that, even if we see "chunked", we must reject
# if there is an extension we don't recognize.
self.simple_response("501 Unimplemented")
self.close_connection = True
return False
# From PEP 333:
# "Servers and gateways that implement HTTP 1.1 must provide
# transparent support for HTTP 1.1's "expect/continue" mechanism.
# This may be done in any of several ways:
# 1. Respond to requests containing an Expect: 100-continue request
# with an immediate "100 Continue" response, and proceed normally.
# 2. Proceed with the request normally, but provide the application
# with a wsgi.input stream that will send the "100 Continue"
# response if/when the application first attempts to read from
# the input stream. The read request must then remain blocked
# until the client responds.
# 3. Wait until the client decides that the server does not support
# expect/continue, and sends the request body on its own.
# (This is suboptimal, and is not recommended.)
#
# We used to do 3, but are now doing 1. Maybe we'll do 2 someday,
# but it seems like it would be a big slowdown for such a rare case.
if self.inheaders.get("Expect", "") == "100-continue":
# Don't use simple_response here, because it emits headers
# we don't want. See http://www.cherrypy.org/ticket/951
msg = self.server.protocol + " 100 Continue\r\n\r\n"
try:
self.conn.wfile.sendall(msg)
except socket.error, x:
if x.args[0] not in socket_errors_to_ignore:
raise
return True
def parse_request_uri(self, uri):
"""Parse a Request-URI into (scheme, authority, path).
Note that Request-URI's must be one of::
Request-URI = "*" | absoluteURI | abs_path | authority
Therefore, a Request-URI which starts with a double forward-slash
cannot be a "net_path"::
net_path = "//" authority [ abs_path ]
Instead, it must be interpreted as an "abs_path" with an empty first
path segment::
abs_path = "/" path_segments
path_segments = segment *( "/" segment )
segment = *pchar *( ";" param )
param = *pchar
"""
if uri == "*":
return None, None, uri
i = uri.find('://')
if i > 0 and '?' not in uri[:i]:
# An absoluteURI.
# If there's a scheme (and it must be http or https), then:
# http_URL = "http:" "//" host [ ":" port ] [ abs_path [ "?" query ]]
scheme, remainder = uri[:i].lower(), uri[i + 3:]
authority, path = remainder.split("/", 1)
return scheme, authority, path
if uri.startswith('/'):
# An abs_path.
return None, None, uri
else:
# An authority.
return None, uri, None
def respond(self):
"""Call the gateway and write its iterable output."""
mrbs = self.server.max_request_body_size
if self.chunked_read:
self.rfile = ChunkedRFile(self.conn.rfile, mrbs)
else:
cl = int(self.inheaders.get("Content-Length", 0))
if mrbs and mrbs < cl:
if not self.sent_headers:
self.simple_response("413 Request Entity Too Large",
"The entity sent with the request exceeds the maximum "
"allowed bytes.")
return
self.rfile = KnownLengthRFile(self.conn.rfile, cl)
self.server.gateway(self).respond()
if (self.ready and not self.sent_headers):
self.sent_headers = True
self.send_headers()
if self.chunked_write:
self.conn.wfile.sendall("0\r\n\r\n")
def simple_response(self, status, msg=""):
"""Write a simple response back to the client."""
status = str(status)
buf = ["Content-Length: %s\r\n" % len(msg),
"Content-Type: text/plain\r\n"]
if status[:3] in ("413", "414"):
# Request Entity Too Large / Request-URI Too Long
self.close_connection = True
if self.response_protocol == 'HTTP/1.1':
# This will not be true for 414, since read_request_line
# usually raises 414 before reading the whole line, and we
# therefore cannot know the proper response_protocol.
buf.append("Connection: close\r\n")
else:
# HTTP/1.0 had no 413/414 status nor Connection header.
# Emit 400 instead and trust the message body is enough.
status = "400 Bad Request"
buf.append(CRLF)
if msg:
if isinstance(msg, unicode):
msg = msg.encode("ISO-8859-1")
buf.append(msg)
status_line = self.server.protocol + " " + status + CRLF
try:
self.conn.wfile.sendall(status_line + "".join(buf))
except socket.error, x:
if x.args[0] not in socket_errors_to_ignore:
raise
def write(self, chunk):
"""Write unbuffered data to the client."""
if self.chunked_write and chunk:
buf = [hex(len(chunk))[2:], CRLF, chunk, CRLF]
self.conn.wfile.sendall("".join(buf))
else:
self.conn.wfile.sendall(chunk)
def send_headers(self):
"""Assert, process, and send the HTTP response message-headers.
You must set self.status, and self.outheaders before calling this.
"""
hkeys = [key.lower() for key, value in self.outheaders]
status = int(self.status[:3])
if status == 413:
# Request Entity Too Large. Close conn to avoid garbage.
self.close_connection = True
elif "content-length" not in hkeys:
# "All 1xx (informational), 204 (no content),
# and 304 (not modified) responses MUST NOT
# include a message-body." So no point chunking.
if status < 200 or status in (204, 205, 304):
pass
else:
if (self.response_protocol == 'HTTP/1.1'
and self.method != 'HEAD'):
# Use the chunked transfer-coding
self.chunked_write = True
self.outheaders.append(("Transfer-Encoding", "chunked"))
else:
# Closing the conn is the only way to determine len.
self.close_connection = True
if "connection" not in hkeys:
if self.response_protocol == 'HTTP/1.1':
# Both server and client are HTTP/1.1 or better
if self.close_connection:
self.outheaders.append(("Connection", "close"))
else:
# Server and/or client are HTTP/1.0
if not self.close_connection:
self.outheaders.append(("Connection", "Keep-Alive"))
if (not self.close_connection) and (not self.chunked_read):
# Read any remaining request body data on the socket.
# "If an origin server receives a request that does not include an
# Expect request-header field with the "100-continue" expectation,
# the request includes a request body, and the server responds
# with a final status code before reading the entire request body
# from the transport connection, then the server SHOULD NOT close
# the transport connection until it has read the entire request,
# or until the client closes the connection. Otherwise, the client
# might not reliably receive the response message. However, this
# requirement is not be construed as preventing a server from
# defending itself against denial-of-service attacks, or from
# badly broken client implementations."
remaining = getattr(self.rfile, 'remaining', 0)
if remaining > 0:
self.rfile.read(remaining)
if "date" not in hkeys:
self.outheaders.append(("Date", rfc822.formatdate()))
if "server" not in hkeys:
self.outheaders.append(("Server", self.server.server_name))
buf = [self.server.protocol + " " + self.status + CRLF]
for k, v in self.outheaders:
buf.append(k + ": " + v + CRLF)
buf.append(CRLF)
self.conn.wfile.sendall("".join(buf))
class NoSSLError(Exception):
"""Exception raised when a client speaks HTTP to an HTTPS socket."""
pass
class FatalSSLAlert(Exception):
"""Exception raised when the SSL implementation signals a fatal alert."""
pass
if not _fileobject_uses_str_type:
class CP_fileobject(socket._fileobject):
"""Faux file object attached to a socket object."""
def sendall(self, data):
"""Sendall for non-blocking sockets."""
while data:
try:
bytes_sent = self.send(data)
data = data[bytes_sent:]
except socket.error, e:
if e.args[0] not in socket_errors_nonblocking:
raise
def send(self, data):
return self._sock.send(data)
def flush(self):
if self._wbuf:
buffer = "".join(self._wbuf)
self._wbuf = []
self.sendall(buffer)
def recv(self, size):
while True:
try:
return self._sock.recv(size)
except socket.error, e:
if (e.args[0] not in socket_errors_nonblocking
and e.args[0] not in socket_error_eintr):
raise
def read(self, size=-1):
# Use max, disallow tiny reads in a loop as they are very inefficient.
# We never leave read() with any leftover data from a new recv() call
# in our internal buffer.
rbufsize = max(self._rbufsize, self.default_bufsize)
# Our use of StringIO rather than lists of string objects returned by
# recv() minimizes memory usage and fragmentation that occurs when
# rbufsize is large compared to the typical return value of recv().
buf = self._rbuf
buf.seek(0, 2) # seek end
if size < 0:
# Read until EOF
self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
while True:
data = self.recv(rbufsize)
if not data:
break
buf.write(data)
return buf.getvalue()
else:
# Read until size bytes or EOF seen, whichever comes first
buf_len = buf.tell()
if buf_len >= size:
# Already have size bytes in our buffer? Extract and return.
buf.seek(0)
rv = buf.read(size)
self._rbuf = StringIO.StringIO()
self._rbuf.write(buf.read())
return rv
self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
while True:
left = size - buf_len
# recv() will malloc the amount of memory given as its
# parameter even though it often returns much less data
# than that. The returned data string is short lived
# as we copy it into a StringIO and free it. This avoids
# fragmentation issues on many platforms.
data = self.recv(left)
if not data:
break
n = len(data)
if n == size and not buf_len:
# Shortcut. Avoid buffer data copies when:
# - We have no data in our buffer.
# AND
# - Our call to recv returned exactly the
# number of bytes we were asked to read.
return data
if n == left:
buf.write(data)
del data # explicit free
break
assert n <= left, "recv(%d) returned %d bytes" % (left, n)
buf.write(data)
buf_len += n
del data # explicit free
#assert buf_len == buf.tell()
return buf.getvalue()
def readline(self, size=-1):
buf = self._rbuf
buf.seek(0, 2) # seek end
if buf.tell() > 0:
# check if we already have it in our buffer
buf.seek(0)
bline = buf.readline(size)
if bline.endswith('\n') or len(bline) == size:
self._rbuf = StringIO.StringIO()
self._rbuf.write(buf.read())
return bline
del bline
if size < 0:
# Read until \n or EOF, whichever comes first
if self._rbufsize <= 1:
# Speed up unbuffered case
buf.seek(0)
buffers = [buf.read()]
self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
data = None
recv = self.recv
while data != "\n":
data = recv(1)
if not data:
break
buffers.append(data)
return "".join(buffers)
buf.seek(0, 2) # seek end
self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
while True:
data = self.recv(self._rbufsize)
if not data:
break
nl = data.find('\n')
if nl >= 0:
nl += 1
buf.write(data[:nl])
self._rbuf.write(data[nl:])
del data
break
buf.write(data)
return buf.getvalue()
else:
# Read until size bytes or \n or EOF seen, whichever comes first
buf.seek(0, 2) # seek end
buf_len = buf.tell()
if buf_len >= size:
buf.seek(0)
rv = buf.read(size)
self._rbuf = StringIO.StringIO()
self._rbuf.write(buf.read())
return rv
self._rbuf = StringIO.StringIO() # reset _rbuf. we consume it via buf.
while True:
data = self.recv(self._rbufsize)
if not data:
break
left = size - buf_len
# did we just receive a newline?
nl = data.find('\n', 0, left)
if nl >= 0:
nl += 1
# save the excess data to _rbuf
self._rbuf.write(data[nl:])
if buf_len:
buf.write(data[:nl])
break
else:
# Shortcut. Avoid data copy through buf when returning
# a substring of our first recv().
return data[:nl]
n = len(data)
if n == size and not buf_len:
# Shortcut. Avoid data copy through buf when
# returning exactly all of our first recv().
return data
if n >= left:
buf.write(data[:left])
self._rbuf.write(data[left:])
break
buf.write(data)
buf_len += n
#assert buf_len == buf.tell()
return buf.getvalue()
else:
class CP_fileobject(socket._fileobject):
"""Faux file object attached to a socket object."""
def sendall(self, data):
"""Sendall for non-blocking sockets."""
while data:
try:
bytes_sent = self.send(data)
data = data[bytes_sent:]
except socket.error, e:
if e.args[0] not in socket_errors_nonblocking:
raise
def send(self, data):
return self._sock.send(data)
def flush(self):
if self._wbuf:
buffer = "".join(self._wbuf)
self._wbuf = []
self.sendall(buffer)
def recv(self, size):
while True:
try:
return self._sock.recv(size)
except socket.error, e:
if (e.args[0] not in socket_errors_nonblocking
and e.args[0] not in socket_error_eintr):
raise
def read(self, size=-1):
if size < 0:
# Read until EOF
buffers = [self._rbuf]
self._rbuf = ""
if self._rbufsize <= 1:
recv_size = self.default_bufsize
else:
recv_size = self._rbufsize
while True:
data = self.recv(recv_size)
if not data:
break
buffers.append(data)
return "".join(buffers)
else:
# Read until size bytes or EOF seen, whichever comes first
data = self._rbuf
buf_len = len(data)
if buf_len >= size:
self._rbuf = data[size:]
return data[:size]
buffers = []
if data:
buffers.append(data)
self._rbuf = ""
while True:
left = size - buf_len
recv_size = max(self._rbufsize, left)
data = self.recv(recv_size)
if not data:
break
buffers.append(data)
n = len(data)
if n >= left:
self._rbuf = data[left:]
buffers[-1] = data[:left]
break
buf_len += n
return "".join(buffers)
def readline(self, size=-1):
data = self._rbuf
if size < 0:
# Read until \n or EOF, whichever comes first
if self._rbufsize <= 1:
# Speed up unbuffered case
assert data == ""
buffers = []
while data != "\n":
data = self.recv(1)
if not data:
break
buffers.append(data)
return "".join(buffers)
nl = data.find('\n')
if nl >= 0:
nl += 1
self._rbuf = data[nl:]
return data[:nl]
buffers = []
if data:
buffers.append(data)
self._rbuf = ""
while True:
data = self.recv(self._rbufsize)
if not data:
break
buffers.append(data)
nl = data.find('\n')
if nl >= 0:
nl += 1
self._rbuf = data[nl:]
buffers[-1] = data[:nl]
break
return "".join(buffers)
else:
# Read until size bytes or \n or EOF seen, whichever comes first
nl = data.find('\n', 0, size)
if nl >= 0:
nl += 1
self._rbuf = data[nl:]
return data[:nl]
buf_len = len(data)
if buf_len >= size:
self._rbuf = data[size:]
return data[:size]
buffers = []
if data:
buffers.append(data)
self._rbuf = ""
while True:
data = self.recv(self._rbufsize)
if not data:
break
buffers.append(data)
left = size - buf_len
nl = data.find('\n', 0, left)
if nl >= 0:
nl += 1
self._rbuf = data[nl:]
buffers[-1] = data[:nl]
break
n = len(data)
if n >= left:
self._rbuf = data[left:]
buffers[-1] = data[:left]
break
buf_len += n
return "".join(buffers)
class HTTPConnection(object):
"""An HTTP connection (active socket).
server: the Server object which received this connection.
socket: the raw socket object (usually TCP) for this connection.
makefile: a fileobject class for reading from the socket.
"""
remote_addr = None
remote_port = None
ssl_env = None
rbufsize = -1
RequestHandlerClass = HTTPRequest
def __init__(self, server, sock, makefile=CP_fileobject):
self.server = server
self.socket = sock
self.rfile = makefile(sock, "rb", self.rbufsize)
self.wfile = makefile(sock, "wb", -1)
def communicate(self):
"""Read each request and respond appropriately."""
request_seen = False
try:
while True:
# (re)set req to None so that if something goes wrong in
# the RequestHandlerClass constructor, the error doesn't
# get written to the previous request.
req = None
req = self.RequestHandlerClass(self.server, self)
# This order of operations should guarantee correct pipelining.
req.parse_request()
if not req.ready:
# Something went wrong in the parsing (and the server has
# probably already made a simple_response). Return and
# let the conn close.
return
request_seen = True
req.respond()
if req.close_connection:
return
except socket.error, e:
errnum = e.args[0]
if errnum == 'timed out':
# Don't error if we're between requests; only error
# if 1) no request has been started at all, or 2) we're
# in the middle of a request.
# See http://www.cherrypy.org/ticket/853
if (not request_seen) or (req and req.started_request):
# Don't bother writing the 408 if the response
# has already started being written.
if req and not req.sent_headers:
try:
req.simple_response("408 Request Timeout")
except FatalSSLAlert:
# Close the connection.
return
elif errnum not in socket_errors_to_ignore:
if req and not req.sent_headers:
try:
req.simple_response("500 Internal Server Error",
format_exc())
except FatalSSLAlert:
# Close the connection.
return
return
except (KeyboardInterrupt, SystemExit):
raise
except FatalSSLAlert:
# Close the connection.
return
except NoSSLError:
if req and not req.sent_headers:
# Unwrap our wfile
self.wfile = CP_fileobject(self.socket._sock, "wb", -1)
req.simple_response("400 Bad Request",
"The client sent a plain HTTP request, but "
"this server only speaks HTTPS on this port.")
self.linger = True
except Exception:
if req and not req.sent_headers:
try:
req.simple_response("500 Internal Server Error", format_exc())
except FatalSSLAlert:
# Close the connection.
return
linger = False
def close(self):
"""Close the socket underlying this connection."""
self.rfile.close()
if not self.linger:
# Python's socket module does NOT call close on the kernel socket
# when you call socket.close(). We do so manually here because we
# want this server to send a FIN TCP segment immediately. Note this
# must be called *before* calling socket.close(), because the latter
# drops its reference to the kernel socket.
if hasattr(self.socket, '_sock'):
self.socket._sock.close()
self.socket.close()
else:
# On the other hand, sometimes we want to hang around for a bit
# to make sure the client has a chance to read our entire
# response. Skipping the close() calls here delays the FIN
# packet until the socket object is garbage-collected later.
# Someday, perhaps, we'll do the full lingering_close that
# Apache does, but not today.
pass
def format_exc(limit=None):
"""Like print_exc() but return a string. Backport for Python 2.3."""
try:
etype, value, tb = sys.exc_info()
return ''.join(traceback.format_exception(etype, value, tb, limit))
finally:
etype = value = tb = None
_SHUTDOWNREQUEST = None
class WorkerThread(threading.Thread):
"""Thread which continuously polls a Queue for Connection objects.
Due to the timing issues of polling a Queue, a WorkerThread does not
check its own 'ready' flag after it has started. To stop the thread,
it is necessary to stick a _SHUTDOWNREQUEST object onto the Queue
(one for each running WorkerThread).
"""
conn = None
"""The current connection pulled off the Queue, or None."""
server = None
"""The HTTP Server which spawned this thread, and which owns the
Queue and is placing active connections into it."""
ready = False
"""A simple flag for the calling server to know when this thread
has begun polling the Queue."""
def __init__(self, server):
self.ready = False
self.server = server
threading.Thread.__init__(self)
def run(self):
try:
self.ready = True
while True:
conn = self.server.requests.get()
if conn is _SHUTDOWNREQUEST:
return
self.conn = conn
try:
conn.communicate()
finally:
conn.close()
self.conn = None
except (KeyboardInterrupt, SystemExit), exc:
self.server.interrupt = exc
class ThreadPool(object):
"""A Request Queue for the CherryPyWSGIServer which pools threads.
ThreadPool objects must provide min, get(), put(obj), start()
and stop(timeout) attributes.
"""
def __init__(self, server, min=10, max=-1):
self.server = server
self.min = min
self.max = max
self._threads = []
self._queue = Queue.Queue()
self.get = self._queue.get
def start(self):
"""Start the pool of threads."""
for i in range(self.min):
self._threads.append(WorkerThread(self.server))
for worker in self._threads:
worker.setName("CP Server " + worker.getName())
worker.start()
for worker in self._threads:
while not worker.ready:
time.sleep(.1)
def _get_idle(self):
"""Number of worker threads which are idle. Read-only."""
return len([t for t in self._threads if t.conn is None])
idle = property(_get_idle, doc=_get_idle.__doc__)
def put(self, obj):
self._queue.put(obj)
if obj is _SHUTDOWNREQUEST:
return
def grow(self, amount):
"""Spawn new worker threads (not above self.max)."""
for i in range(amount):
if self.max > 0 and len(self._threads) >= self.max:
break
worker = WorkerThread(self.server)
worker.setName("CP Server " + worker.getName())
self._threads.append(worker)
worker.start()
def shrink(self, amount):
"""Kill off worker threads (not below self.min)."""
# Grow/shrink the pool if necessary.
# Remove any dead threads from our list
for t in self._threads:
if not t.isAlive():
self._threads.remove(t)
amount -= 1
if amount > 0:
for i in range(min(amount, len(self._threads) - self.min)):
# Put a number of shutdown requests on the queue equal
# to 'amount'. Once each of those is processed by a worker,
# that worker will terminate and be culled from our list
# in self.put.
self._queue.put(_SHUTDOWNREQUEST)
def stop(self, timeout=5):
# Must shut down threads here so the code that calls
# this method can know when all threads are stopped.
for worker in self._threads:
self._queue.put(_SHUTDOWNREQUEST)
# Don't join currentThread (when stop is called inside a request).
current = threading.currentThread()
if timeout and timeout >= 0:
endtime = time.time() + timeout
while self._threads:
worker = self._threads.pop()
if worker is not current and worker.isAlive():
try:
if timeout is None or timeout < 0:
worker.join()
else:
remaining_time = endtime - time.time()
if remaining_time > 0:
worker.join(remaining_time)
if worker.isAlive():
# We exhausted the timeout.
# Forcibly shut down the socket.
c = worker.conn
if c and not c.rfile.closed:
try:
c.socket.shutdown(socket.SHUT_RD)
except TypeError:
# pyOpenSSL sockets don't take an arg
c.socket.shutdown()
worker.join()
except (AssertionError,
# Ignore repeated Ctrl-C.
# See http://www.cherrypy.org/ticket/691.
KeyboardInterrupt), exc1:
pass
try:
import fcntl
except ImportError:
try:
from ctypes import windll, WinError
except ImportError:
def prevent_socket_inheritance(sock):
"""Dummy function, since neither fcntl nor ctypes are available."""
pass
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (Windows)."""
if not windll.kernel32.SetHandleInformation(sock.fileno(), 1, 0):
raise WinError()
else:
def prevent_socket_inheritance(sock):
"""Mark the given socket fd as non-inheritable (POSIX)."""
fd = sock.fileno()
old_flags = fcntl.fcntl(fd, fcntl.F_GETFD)
fcntl.fcntl(fd, fcntl.F_SETFD, old_flags | fcntl.FD_CLOEXEC)
class SSLAdapter(object):
"""Base class for SSL driver library adapters.
Required methods:
* ``wrap(sock) -> (wrapped socket, ssl environ dict)``
* ``makefile(sock, mode='r', bufsize=-1) -> socket file object``
"""
def __init__(self, certificate, private_key, certificate_chain=None):
self.certificate = certificate
self.private_key = private_key
self.certificate_chain = certificate_chain
def wrap(self, sock):
raise NotImplemented
def makefile(self, sock, mode='r', bufsize=-1):
raise NotImplemented
class HTTPServer(object):
"""An HTTP server."""
_bind_addr = "127.0.0.1"
_interrupt = None
gateway = None
"""A Gateway instance."""
minthreads = None
"""The minimum number of worker threads to create (default 10)."""
maxthreads = None
"""The maximum number of worker threads to create (default -1 = no limit)."""
server_name = None
"""The name of the server; defaults to socket.gethostname()."""
protocol = "HTTP/1.1"
"""The version string to write in the Status-Line of all HTTP responses.
For example, "HTTP/1.1" is the default. This also limits the supported
features used in the response."""
request_queue_size = 5
"""The 'backlog' arg to socket.listen(); max queued connections (default 5)."""
shutdown_timeout = 5
"""The total time, in seconds, to wait for worker threads to cleanly exit."""
timeout = 10
"""The timeout in seconds for accepted connections (default 10)."""
version = "CherryPy/3.2.0rc1"
"""A version string for the HTTPServer."""
software = None
"""The value to set for the SERVER_SOFTWARE entry in the WSGI environ.
If None, this defaults to ``'%s Server' % self.version``."""
ready = False
"""An internal flag which marks whether the socket is accepting connections."""
max_request_header_size = 0
"""The maximum size, in bytes, for request headers, or 0 for no limit."""
max_request_body_size = 0
"""The maximum size, in bytes, for request bodies, or 0 for no limit."""
nodelay = True
"""If True (the default since 3.1), sets the TCP_NODELAY socket option."""
ConnectionClass = HTTPConnection
"""The class to use for handling HTTP connections."""
ssl_adapter = None
"""An instance of SSLAdapter (or a subclass).
You must have the corresponding SSL driver library installed."""
def __init__(self, bind_addr, gateway, minthreads=10, maxthreads=-1,
server_name=None):
self.bind_addr = bind_addr
self.gateway = gateway
self.requests = ThreadPool(self, min=minthreads or 1, max=maxthreads)
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
def __str__(self):
return "%s.%s(%r)" % (self.__module__, self.__class__.__name__,
self.bind_addr)
def _get_bind_addr(self):
return self._bind_addr
def _set_bind_addr(self, value):
if isinstance(value, tuple) and value[0] in ('', None):
# Despite the socket module docs, using '' does not
# allow AI_PASSIVE to work. Passing None instead
# returns '0.0.0.0' like we want. In other words:
# host AI_PASSIVE result
# '' Y 192.168.x.y
# '' N 192.168.x.y
# None Y 0.0.0.0
# None N 127.0.0.1
# But since you can get the same effect with an explicit
# '0.0.0.0', we deny both the empty string and None as values.
raise ValueError("Host values of '' or None are not allowed. "
"Use '0.0.0.0' (IPv4) or '::' (IPv6) instead "
"to listen on all active interfaces.")
self._bind_addr = value
bind_addr = property(_get_bind_addr, _set_bind_addr,
doc="""The interface on which to listen for connections.
For TCP sockets, a (host, port) tuple. Host values may be any IPv4
or IPv6 address, or any valid hostname. The string 'localhost' is a
synonym for '127.0.0.1' (or '::1', if your hosts file prefers IPv6).
The string '0.0.0.0' is a special IPv4 entry meaning "any active
interface" (INADDR_ANY), and '::' is the similar IN6ADDR_ANY for
IPv6. The empty string or None are not allowed.
For UNIX sockets, supply the filename as a string.""")
def start(self):
"""Run the server forever."""
# We don't have to trap KeyboardInterrupt or SystemExit here,
# because cherrpy.server already does so, calling self.stop() for us.
# If you're using this server with another framework, you should
# trap those exceptions in whatever code block calls start().
self._interrupt = None
if self.software is None:
self.software = "%s Server" % self.version
# SSL backward compatibility
if (self.ssl_adapter is None and
getattr(self, 'ssl_certificate', None) and
getattr(self, 'ssl_private_key', None)):
warnings.warn(
"SSL attributes are deprecated in CherryPy 3.2, and will "
"be removed in CherryPy 3.3. Use an ssl_adapter attribute "
"instead.",
DeprecationWarning
)
try:
from cherrypy.wsgiserver.ssl_pyopenssl import pyOpenSSLAdapter
except ImportError:
pass
else:
self.ssl_adapter = pyOpenSSLAdapter(
self.ssl_certificate, self.ssl_private_key,
getattr(self, 'ssl_certificate_chain', None))
# Select the appropriate socket
if isinstance(self.bind_addr, basestring):
# AF_UNIX socket
# So we can reuse the socket...
try: os.unlink(self.bind_addr)
except: pass
# So everyone can access the socket...
try: os.chmod(self.bind_addr, 0777)
except: pass
info = [(socket.AF_UNIX, socket.SOCK_STREAM, 0, "", self.bind_addr)]
else:
# AF_INET or AF_INET6 socket
# Get the correct address family for our host (allows IPv6 addresses)
host, port = self.bind_addr
try:
info = socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM, 0, socket.AI_PASSIVE)
except socket.gaierror:
if ':' in self.bind_addr[0]:
info = [(socket.AF_INET6, socket.SOCK_STREAM,
0, "", self.bind_addr + (0, 0))]
else:
info = [(socket.AF_INET, socket.SOCK_STREAM,
0, "", self.bind_addr)]
self.socket = None
msg = "No socket could be created"
for res in info:
af, socktype, proto, canonname, sa = res
try:
self.bind(af, socktype, proto)
except socket.error, msg:
if self.socket:
self.socket.close()
self.socket = None
continue
break
if not self.socket:
raise socket.error(msg)
# Timeout so KeyboardInterrupt can be caught on Win32
self.socket.settimeout(1)
self.socket.listen(self.request_queue_size)
# Create worker threads
self.requests.start()
self.ready = True
while self.ready:
self.tick()
if self.interrupt:
while self.interrupt is True:
# Wait for self.stop() to complete. See _set_interrupt.
time.sleep(0.1)
if self.interrupt:
raise self.interrupt
def bind(self, family, type, proto=0):
"""Create (or recreate) the actual socket object."""
self.socket = socket.socket(family, type, proto)
prevent_socket_inheritance(self.socket)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if self.nodelay and not isinstance(self.bind_addr, str):
self.socket.setsockopt(socket.IPPROTO_TCP, socket.TCP_NODELAY, 1)
if self.ssl_adapter is not None:
self.socket = self.ssl_adapter.bind(self.socket)
# If listening on the IPV6 any address ('::' = IN6ADDR_ANY),
# activate dual-stack. See http://www.cherrypy.org/ticket/871.
if (family == socket.AF_INET6
and self.bind_addr[0] in ('::', '::0', '::0.0.0.0')):
try:
self.socket.setsockopt(socket.IPPROTO_IPV6, socket.IPV6_V6ONLY, 0)
except (AttributeError, socket.error):
# Apparently, the socket option is not available in
# this machine's TCP stack
pass
self.socket.bind(self.bind_addr)
def tick(self):
"""Accept a new connection and put it on the Queue."""
try:
s, addr = self.socket.accept()
if not self.ready:
return
prevent_socket_inheritance(s)
if hasattr(s, 'settimeout'):
s.settimeout(self.timeout)
makefile = CP_fileobject
ssl_env = {}
# if ssl cert and key are set, we try to be a secure HTTP server
if self.ssl_adapter is not None:
try:
s, ssl_env = self.ssl_adapter.wrap(s)
except NoSSLError:
msg = ("The client sent a plain HTTP request, but "
"this server only speaks HTTPS on this port.")
buf = ["%s 400 Bad Request\r\n" % self.protocol,
"Content-Length: %s\r\n" % len(msg),
"Content-Type: text/plain\r\n\r\n",
msg]
wfile = CP_fileobject(s, "wb", -1)
try:
wfile.sendall("".join(buf))
except socket.error, x:
if x.args[0] not in socket_errors_to_ignore:
raise
return
if not s:
return
makefile = self.ssl_adapter.makefile
conn = self.ConnectionClass(self, s, makefile)
if not isinstance(self.bind_addr, basestring):
# optional values
# Until we do DNS lookups, omit REMOTE_HOST
if addr is None: # sometimes this can happen
# figure out if AF_INET or AF_INET6.
if len(s.getsockname()) == 2:
# AF_INET
addr = ('0.0.0.0', 0)
else:
# AF_INET6
addr = ('::', 0)
conn.remote_addr = addr[0]
conn.remote_port = addr[1]
conn.ssl_env = ssl_env
self.requests.put(conn)
except socket.timeout:
# The only reason for the timeout in start() is so we can
# notice keyboard interrupts on Win32, which don't interrupt
# accept() by default
return
except socket.error, x:
if x.args[0] in socket_error_eintr:
# I *think* this is right. EINTR should occur when a signal
# is received during the accept() call; all docs say retry
# the call, and I *think* I'm reading it right that Python
# will then go ahead and poll for and handle the signal
# elsewhere. See http://www.cherrypy.org/ticket/707.
return
if x.args[0] in socket_errors_nonblocking:
# Just try again. See http://www.cherrypy.org/ticket/479.
return
if x.args[0] in socket_errors_to_ignore:
# Our socket was closed.
# See http://www.cherrypy.org/ticket/686.
return
raise
def _get_interrupt(self):
return self._interrupt
def _set_interrupt(self, interrupt):
self._interrupt = True
self.stop()
self._interrupt = interrupt
interrupt = property(_get_interrupt, _set_interrupt,
doc="Set this to an Exception instance to "
"interrupt the server.")
def stop(self):
"""Gracefully shutdown a server that is serving forever."""
self.ready = False
sock = getattr(self, "socket", None)
if sock:
if not isinstance(self.bind_addr, basestring):
# Touch our own socket to make accept() return immediately.
try:
host, port = sock.getsockname()[:2]
except socket.error, x:
if x.args[0] not in socket_errors_to_ignore:
# Changed to use error code and not message
# See http://www.cherrypy.org/ticket/860.
raise
else:
# Note that we're explicitly NOT using AI_PASSIVE,
# here, because we want an actual IP to touch.
# localhost won't work if we've bound to a public IP,
# but it will if we bound to '0.0.0.0' (INADDR_ANY).
for res in socket.getaddrinfo(host, port, socket.AF_UNSPEC,
socket.SOCK_STREAM):
af, socktype, proto, canonname, sa = res
s = None
try:
s = socket.socket(af, socktype, proto)
# See http://groups.google.com/group/cherrypy-users/
# browse_frm/thread/bbfe5eb39c904fe0
s.settimeout(1.0)
s.connect((host, port))
s.close()
except socket.error:
if s:
s.close()
if hasattr(sock, "close"):
sock.close()
self.socket = None
self.requests.stop(self.shutdown_timeout)
class Gateway(object):
def __init__(self, req):
self.req = req
def respond(self):
raise NotImplemented
# These may either be wsgiserver.SSLAdapter subclasses or the string names
# of such classes (in which case they will be lazily loaded).
ssl_adapters = {
'builtin': 'cherrypy.wsgiserver.ssl_builtin.BuiltinSSLAdapter',
'pyopenssl': 'cherrypy.wsgiserver.ssl_pyopenssl.pyOpenSSLAdapter',
}
def get_ssl_adapter_class(name='pyopenssl'):
adapter = ssl_adapters[name.lower()]
if isinstance(adapter, basestring):
last_dot = adapter.rfind(".")
attr_name = adapter[last_dot + 1:]
mod_path = adapter[:last_dot]
try:
mod = sys.modules[mod_path]
if mod is None:
raise KeyError()
except KeyError:
# The last [''] is important.
mod = __import__(mod_path, globals(), locals(), [''])
# Let an AttributeError propagate outward.
try:
adapter = getattr(mod, attr_name)
except AttributeError:
raise AttributeError("'%s' object has no attribute '%s'"
% (mod_path, attr_name))
return adapter
# -------------------------------- WSGI Stuff -------------------------------- #
class CherryPyWSGIServer(HTTPServer):
wsgi_version = (1, 1)
def __init__(self, bind_addr, wsgi_app, numthreads=10, server_name=None,
max=-1, request_queue_size=5, timeout=10, shutdown_timeout=5):
self.requests = ThreadPool(self, min=numthreads or 1, max=max)
self.wsgi_app = wsgi_app
self.gateway = wsgi_gateways[self.wsgi_version]
self.bind_addr = bind_addr
if not server_name:
server_name = socket.gethostname()
self.server_name = server_name
self.request_queue_size = request_queue_size
self.timeout = timeout
self.shutdown_timeout = shutdown_timeout
def _get_numthreads(self):
return self.requests.min
def _set_numthreads(self, value):
self.requests.min = value
numthreads = property(_get_numthreads, _set_numthreads)
class WSGIGateway(Gateway):
def __init__(self, req):
self.req = req
self.started_response = False
self.env = self.get_environ()
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
raise NotImplemented
def respond(self):
response = self.req.server.wsgi_app(self.env, self.start_response)
try:
for chunk in response:
# "The start_response callable must not actually transmit
# the response headers. Instead, it must store them for the
# server or gateway to transmit only after the first
# iteration of the application return value that yields
# a NON-EMPTY string, or upon the application's first
# invocation of the write() callable." (PEP 333)
if chunk:
if isinstance(chunk, unicode):
chunk = chunk.encode('ISO-8859-1')
self.write(chunk)
finally:
if hasattr(response, "close"):
response.close()
def start_response(self, status, headers, exc_info = None):
"""WSGI callable to begin the HTTP response."""
# "The application may call start_response more than once,
# if and only if the exc_info argument is provided."
if self.started_response and not exc_info:
raise AssertionError("WSGI start_response called a second "
"time with no exc_info.")
self.started_response = True
# "if exc_info is provided, and the HTTP headers have already been
# sent, start_response must raise an error, and should raise the
# exc_info tuple."
if self.req.sent_headers:
try:
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None
self.req.status = status
for k, v in headers:
if not isinstance(k, str):
raise TypeError("WSGI response header key %r is not a byte string." % k)
if not isinstance(v, str):
raise TypeError("WSGI response header value %r is not a byte string." % v)
self.req.outheaders.extend(headers)
return self.write
def write(self, chunk):
"""WSGI callable to write unbuffered data to the client.
This method is also used internally by start_response (to write
data from the iterable returned by the WSGI application).
"""
if not self.started_response:
raise AssertionError("WSGI write called before start_response.")
if not self.req.sent_headers:
self.req.sent_headers = True
self.req.send_headers()
self.req.write(chunk)
class WSGIGateway_10(WSGIGateway):
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env = {
# set a non-standard environ entry so the WSGI app can know what
# the *real* server protocol is (and what features to support).
# See http://www.faqs.org/rfcs/rfc2145.html.
'ACTUAL_SERVER_PROTOCOL': req.server.protocol,
'PATH_INFO': req.path,
'QUERY_STRING': req.qs,
'REMOTE_ADDR': req.conn.remote_addr or '',
'REMOTE_PORT': str(req.conn.remote_port or ''),
'REQUEST_METHOD': req.method,
'REQUEST_URI': req.uri,
'SCRIPT_NAME': '',
'SERVER_NAME': req.server.server_name,
# Bah. "SERVER_PROTOCOL" is actually the REQUEST protocol.
'SERVER_PROTOCOL': req.request_protocol,
'SERVER_SOFTWARE': req.server.software,
'wsgi.errors': sys.stderr,
'wsgi.input': req.rfile,
'wsgi.multiprocess': False,
'wsgi.multithread': True,
'wsgi.run_once': False,
'wsgi.url_scheme': req.scheme,
'wsgi.version': (1, 0),
}
if isinstance(req.server.bind_addr, basestring):
# AF_UNIX. This isn't really allowed by WSGI, which doesn't
# address unix domain sockets. But it's better than nothing.
env["SERVER_PORT"] = ""
else:
env["SERVER_PORT"] = str(req.server.bind_addr[1])
# CONTENT_TYPE/CONTENT_LENGTH
for k, v in req.inheaders.iteritems():
env["HTTP_" + k.upper().replace("-", "_")] = v
ct = env.pop("HTTP_CONTENT_TYPE", None)
if ct is not None:
env["CONTENT_TYPE"] = ct
cl = env.pop("HTTP_CONTENT_LENGTH", None)
if cl is not None:
env["CONTENT_LENGTH"] = cl
if req.conn.ssl_env:
env.update(req.conn.ssl_env)
return env
class WSGIGateway_11(WSGIGateway_10):
def get_environ(self):
env = WSGIGateway_10.get_environ(self)
env['wsgi.version'] = (1, 1)
return env
class WSGIGateway_u0(WSGIGateway_10):
def get_environ(self):
"""Return a new environ dict targeting the given wsgi.version"""
req = self.req
env_10 = WSGIGateway_10.get_environ(self)
env = dict([(k.decode('ISO-8859-1'), v) for k, v in env_10.iteritems()])
env[u'wsgi.version'] = ('u', 0)
# Request-URI
env.setdefault(u'wsgi.url_encoding', u'utf-8')
try:
for key in [u"PATH_INFO", u"SCRIPT_NAME", u"QUERY_STRING"]:
env[key] = env_10[str(key)].decode(env[u'wsgi.url_encoding'])
except UnicodeDecodeError:
# Fall back to latin 1 so apps can transcode if needed.
env[u'wsgi.url_encoding'] = u'ISO-8859-1'
for key in [u"PATH_INFO", u"SCRIPT_NAME", u"QUERY_STRING"]:
env[key] = env_10[str(key)].decode(env[u'wsgi.url_encoding'])
for k, v in sorted(env.items()):
if isinstance(v, str) and k not in ('REQUEST_URI', 'wsgi.input'):
env[k] = v.decode('ISO-8859-1')
return env
wsgi_gateways = {
(1, 0): WSGIGateway_10,
(1, 1): WSGIGateway_11,
('u', 0): WSGIGateway_u0,
}
class WSGIPathInfoDispatcher(object):
"""A WSGI dispatcher for dispatch based on the PATH_INFO.
apps: a dict or list of (path_prefix, app) pairs.
"""
def __init__(self, apps):
try:
apps = apps.items()
except AttributeError:
pass
# Sort the apps by len(path), descending
apps.sort(cmp=lambda x,y: cmp(len(x[0]), len(y[0])))
apps.reverse()
# The path_prefix strings must start, but not end, with a slash.
# Use "" instead of "/".
self.apps = [(p.rstrip("/"), a) for p, a in apps]
def __call__(self, environ, start_response):
path = environ["PATH_INFO"] or "/"
for p, app in self.apps:
# The apps list should be sorted by length, descending.
if path.startswith(p + "/") or path == p:
environ = environ.copy()
environ["SCRIPT_NAME"] = environ["SCRIPT_NAME"] + p
environ["PATH_INFO"] = path[len(p):]
return app(environ, start_response)
start_response('404 Not Found', [('Content-Type', 'text/plain'),
('Content-Length', '0')])
return ['']
| Python |
# forward-compat boilerplate
from __future__ import absolute_import
from __future__ import with_statement
__metaclass__ = type
import os
import hmac
import hashlib
from fenton import util
DEFAULT_TTL = 600 # seconds
NONCELEN = 8
MACLEN = 20
def initialize(app):
BuiltinUser.config = app.config
def get_builtin(username, password):
builtins = (BuiltinUser.config.get('security.builtin.users') or '').split()
if username not in builtins:
return None
pw = BuiltinUser.config.get('security.builtin.%s.password' % username) or ''
if password is not None and not streq(password, pw):
raise BadPassword
return BuiltinUser(username=username)
def sha1impl(secret, nonce):
if isinstance(secret, unicode):
secret = secret.encode('UTF-8')
return hashlib.sha1(secret + nonce).digest()
def secure_impl(secret, nonce):
return make_key(secret, nonce, size=20, rounds=1000)
def streq(left, right):
return sum(l==r for (l,r) in zip(left, right)) == len(left)
def cmphash(secret, stored, impl=sha1impl):
nonce = stored.decode('base64')[20:]
hashed = mkhash(secret, nonce, impl)
fmt = '%%-%ds' % len(stored)
hashed = fmt % hashed
return streq(hashed, stored)
def mkhash(secret, nonce=None, impl=sha1impl):
if nonce is None:
nonce = os.urandom(NONCELEN)
return (impl(secret, nonce) + nonce).encode('base64').strip()
# functions for integrating with windows NTLM
def _hmac(secret, msg):
return hmac.new(secret, msg, hashlib.sha1).digest()
def encode_message(secret, msg, nonce=None):
if nonce is None:
nonce = os.urandom(NONCELEN)
return util.enc64(_hmac(secret, nonce+msg) + nonce + msg)
return _hmac(secret, nonce+msg) + nonce + msg
def decode_message(secret, string, noncelen=NONCELEN):
# string is MAC(20) + nonce(noncelen) + msg
string = util.dec64(string, '+/')
mac = string[:MACLEN]
nonce = string[MACLEN:MACLEN+noncelen]
msg = string[MACLEN+noncelen:]
if streq(mac, _hmac(secret, nonce+msg)):
return msg
return None
def pretend_ntlm(secret, itoken, user=None, noncelen=NONCELEN):
ibytes = util.dec64(itoken)
mac = ibytes[:MACLEN]
macmsg = ibytes[MACLEN:]
if not streq(mac, _hmac(secret, macmsg)):
return None
nonce = ibytes[MACLEN:MACLEN+noncelen]
url = ibytes[MACLEN+noncelen:]
if user is None:
import getpass
user = getpass.getuser()
return util.enc64(_hmac(secret, nonce+user) + nonce + user)
# dim macobj as new HMACSHA1(enc.GetBytes(secret))
# dim itoken = Request.Parameter('_').replace('-', '+')
# itoken = System.Convert.FromBase64String(itoken)
# dim nonce as left(itoken, noncelen)
# dim enc as new UTF8Encoding
# dim msg = enc.GetBytes(nonce & user)
# dim mac = macobj.ComputeHash(msg)
# dim ret() as Byte
# redim ret(mac.Length + msg.Length - 1)
# System.Buffer.BlockCopy(mac, 0, ret, 0, mac.Length)
# System.Buffer.BlockCopy(msg, 0, ret, mac.Length, msg.Length)
# otoken = System.Convert.ToBase64String(ret)
def check(priv, context, obj=None):
if priv in (True, False, None):
result = priv
elif isinstance(priv, basestring):
result = Dynamic(priv).check(context, obj)
else:
result = priv(context, obj)
return bool(result)
def get_crypter(key, impl=None, noncelen=32):
return Crypter(key, impl or _default_impl, noncelen)
def get_signer(key, hasher=hashlib.sha1):
return Signer(key, hasher)
def make_key(password, salt, rounds=1, size=32):
from fenton import PBKDF2
return PBKDF2.PBKDF2(password, salt, rounds).read(size)
class Priv:
def __call__(self, context, obj=None):
return self.check(context, obj)
def __and__(self, other):
return AND((self, other))
def __or__(self, other):
return OR((self, other))
def __invert__(self):
return NOT(self)
class _AuthenticatedPriv(Priv):
def __repr__(self):
return '<AUTHENTICATED>'
def check(self, context, obj):
return context.user.verify_authenticated(context)
class _RestrictedPriv(Priv):
def __repr__(self):
return '<RESTRICTED>'
def check(self, context, obj):
context.user.verify_authenticated(context)
supers = context.app.config.get('security.superusers')
return supers and context.user.has_privilege(supers)
class OR(Priv):
def __init__(self, operands):
assert operands
self.operands = operands
def check(self, context, obj):
return any(f(context, obj) for f in self.operands)
def __or__(self, other):
return OR(self.operands + (other,))
def __repr__(self):
return ' | '.join('(%r)' % x for x in self.operands)
class AND(Priv):
def __init__(self, operands):
assert operands
self.operands = operands
def check(self, context, obj):
return all(check(f, context, obj) for f in self.operands)
def __and__(self, other):
return AND(self.operands + (other,))
def __repr__(self):
return ' & '.join('(%r)' % x for x in self.operands)
class NOT(Priv):
def __init__(self, operand):
self.operand = operand
def check(self, context, obj):
return not self.operand(context, obj)
def __invert__(self):
return self.operand
def __repr__(self):
return '!%r' % self.operand
class Static(Priv):
def __init__(self, privilege):
self.privilege = privilege
def check(self, context, obj):
context.user.verify_authenticated(context)
return context.user.has_privilege(self.privilege)
def __repr__(self):
return '~%s' % self.privilege
class Dynamic(Priv):
def __init__(self, relation):
self.relation = relation
def check(self, context, obj):
context.user.verify_authenticated(context)
if obj is None:
raise RecheckWithObject
f = getattr(obj, self.relation, None)
return f and f()
def __repr__(self):
return ':%s' % self.relation
class RecheckWithObject(Exception):
pass
class InvalidSignature(Exception):
pass
class SecurityException(Exception):
def __unicode__(self):
return unicode(str(self))
def __str__(self):
msg = util.decamel(self.__class__.__name__, sep=' ').capitalize()
if self.args:
msg += ': ' + str(self.args[0])
return msg
class LoginFailed(SecurityException): pass
class NoUser(LoginFailed): pass
class BadUser(LoginFailed): pass
class BadPassword(LoginFailed): pass
class PoorPassword(LoginFailed): pass
class NotAllowed(SecurityException): pass
class NotAuthenticated(SecurityException): pass
class SessionExpired(NotAuthenticated): pass
class User:
name = property(lambda x:x.username)
groups = None
authenticated = True
password_stamp = None
password_ttl = None
password_age = None
def __init__(self, username):
self.username = username
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.username)
def authenticate(self, request):
raise NotImplementedError
def unauthenticate(self, context):
raise NotImplementedError
def verify_authenticated(self, context):
raise NotImplementedError
def get_privileges(self):
return self.groups
def has_privilege(self, *required):
has = self.get_privileges()
return bool(has and (set(required) & has))
class AnonymousUser(User):
name = 'Anonymous'
username = '<anonymous>'
authenticated = False
__instance = None
def __init__(self):
pass
def __repr__(self):
return self.username
def __new__(Class):
if not Class.__instance:
Class.__instance = User.__new__(Class)
return Class.__instance
def unauthenticate(self, context):
return True
def has_privilege(self, *required):
raise NotAuthenticated
def verify_authenticated(self, context):
raise NotAuthenticated
class SystemUser(User):
name = 'System'
def authenticate(self, request):
return True
def unauthenticate(self, context):
return True
def verify_authenticated(self, context):
return True
def has_privilege(self, *required):
return True
class BuiltinUser(User):
password_ttl = None
config = {}
def __init__(self, username):
self.username = username
def authenticate(self, request):
return True
def unauthenticate(self, context):
return True
def verify_authenticated(self, context):
return True
@property
def groups(self):
default = (self.config.get('security.builtin.groups.default') or '').split()
mine = (self.config.get('security.builtin.%s.groups' % self.username) or '').split()
return set(default) | set(mine)
class RemoteUser(User):
pass
ALLOWED = PUBLIC = True
DENIED = PRIVATE = False
AUTHENTICATED = _AuthenticatedPriv()
RESTRICTED = _RestrictedPriv()
ANONYMOUS = AnonymousUser()
SYSTEM = SystemUser('<system>')
_crypto_impl = {}
_default_impl = 'pycrypto_aes'
try:
from pycryptopp.cipher.aes import AES as aes_cpp
except ImportError:
pass
else:
def do_cryptopp(key, data):
return aes_cpp(key).process(data)
_crypto_impl['cryptopp_aes'] = (do_cryptopp, do_cryptopp)
try:
from Crypto.Cipher import AES
except ImportError:
pass
else:
def enc_aes(key, data):
return AES.new(key, AES.MODE_CFB).encrypt(data)
def dec_aes(key, data):
return AES.new(key, AES.MODE_CFB).decrypt(data)
_crypto_impl['pycrypto_aes'] = (enc_aes, dec_aes)
try:
from Crypto.Cipher import Blowfish
except ImportError:
pass
else:
def enc_blowfish(key, data):
return Blowfish.new(key, Blowfish.MODE_CFB).encrypt(data)
def dec_blowfish(key, data):
return Blowfish.new(key, Blowfish.MODE_CFB).decrypt(data)
_crypto_impl['pycrypto_blowfish'] = (enc_blowfish, dec_blowfish)
class Crypter:
def __init__(self, key, impl, noncelen=32):
self.__key = key
self.noncelen = noncelen
try:
self._enc, self._dec = _crypto_impl[impl]
except KeyError:
msg = 'algo %s not found, available algos: %s'
raise Exception(msg % (impl, _crypto_impl.keys()))
def key(self, nonce):
return make_key(self.__key, nonce)
def encrypt(self, data):
nonce = os.urandom(self.noncelen)
key = self.key(nonce)
return nonce + self._enc(key, data)
def decrypt(self, data):
nonce = data[:self.noncelen]
data = data[self.noncelen:]
key = self.key(nonce)
return self._dec(key, data)
class Signer:
def __init__(self, key, hasher=hashlib.sha256):
self.__key = key
self.hasher = hasher
self.hashlen = len(hasher('').digest())
def verify(self, val):
sig = val[:self.hashlen]
val = val[self.hashlen:]
expect = hmac.new(self.__key, val, self.hasher).digest()
if not streq(expect, sig):
raise InvalidSignature
return val
def sign(self, val):
sig = hmac.new(self.__key, val, self.hasher).digest()
return sig + val
| Python |
"""
Read and write ZIP files.
"""
# Improved by Chortos-2 in 2010 (added bzip2 support)
import struct, os, time, sys, shutil
import binascii, cStringIO, stat
import io
import re
try:
import zlib # We may need its compression method
crc32 = zlib.crc32
except ImportError:
zlib = None
crc32 = binascii.crc32
try:
import bz2 # We may need its compression method
except ImportError:
bz2 = None
__all__ = ["BadZipfile", "error", "ZIP_STORED", "ZIP_DEFLATED", "is_zipfile",
"ZipInfo", "ZipFile", "PyZipFile", "LargeZipFile", "ZIP_BZIP2" ]
class BadZipfile(Exception):
pass
class LargeZipFile(Exception):
"""
Raised when writing a zipfile, the zipfile requires ZIP64 extensions
and those extensions are disabled.
"""
error = BadZipfile # The exception raised by this module
ZIP64_LIMIT = (1 << 31) - 1
ZIP_FILECOUNT_LIMIT = 1 << 16
ZIP_MAX_COMMENT = (1 << 16) - 1
# constants for Zip file compression methods
ZIP_STORED = 0
ZIP_DEFLATED = 8
ZIP_BZIP2 = 12
# Other ZIP compression methods not supported
# Below are some formats and associated data for reading/writing headers using
# the struct module. The names and structures of headers/records are those used
# in the PKWARE description of the ZIP file format:
# http://www.pkware.com/documents/casestudies/APPNOTE.TXT
# (URL valid as of January 2008)
# The "end of central directory" structure, magic number, size, and indices
# (section V.I in the format document)
structEndArchive = "<4s4H2LH"
stringEndArchive = "PK\005\006"
sizeEndCentDir = struct.calcsize(structEndArchive)
_ECD_SIGNATURE = 0
_ECD_DISK_NUMBER = 1
_ECD_DISK_START = 2
_ECD_ENTRIES_THIS_DISK = 3
_ECD_ENTRIES_TOTAL = 4
_ECD_SIZE = 5
_ECD_OFFSET = 6
_ECD_COMMENT_SIZE = 7
# These last two indices are not part of the structure as defined in the
# spec, but they are used internally by this module as a convenience
_ECD_COMMENT = 8
_ECD_LOCATION = 9
# The "central directory" structure, magic number, size, and indices
# of entries in the structure (section V.F in the format document)
structCentralDir = "<4s4B4HL2L5H2L"
stringCentralDir = "PK\001\002"
sizeCentralDir = struct.calcsize(structCentralDir)
# indexes of entries in the central directory structure
_CD_SIGNATURE = 0
_CD_CREATE_VERSION = 1
_CD_CREATE_SYSTEM = 2
_CD_EXTRACT_VERSION = 3
_CD_EXTRACT_SYSTEM = 4
_CD_FLAG_BITS = 5
_CD_COMPRESS_TYPE = 6
_CD_TIME = 7
_CD_DATE = 8
_CD_CRC = 9
_CD_COMPRESSED_SIZE = 10
_CD_UNCOMPRESSED_SIZE = 11
_CD_FILENAME_LENGTH = 12
_CD_EXTRA_FIELD_LENGTH = 13
_CD_COMMENT_LENGTH = 14
_CD_DISK_NUMBER_START = 15
_CD_INTERNAL_FILE_ATTRIBUTES = 16
_CD_EXTERNAL_FILE_ATTRIBUTES = 17
_CD_LOCAL_HEADER_OFFSET = 18
# The "local file header" structure, magic number, size, and indices
# (section V.A in the format document)
structFileHeader = "<4s2B4HL2L2H"
stringFileHeader = "PK\003\004"
sizeFileHeader = struct.calcsize(structFileHeader)
_FH_SIGNATURE = 0
_FH_EXTRACT_VERSION = 1
_FH_EXTRACT_SYSTEM = 2
_FH_GENERAL_PURPOSE_FLAG_BITS = 3
_FH_COMPRESSION_METHOD = 4
_FH_LAST_MOD_TIME = 5
_FH_LAST_MOD_DATE = 6
_FH_CRC = 7
_FH_COMPRESSED_SIZE = 8
_FH_UNCOMPRESSED_SIZE = 9
_FH_FILENAME_LENGTH = 10
_FH_EXTRA_FIELD_LENGTH = 11
# The "Zip64 end of central directory locator" structure, magic number, and size
structEndArchive64Locator = "<4sLQL"
stringEndArchive64Locator = "PK\x06\x07"
sizeEndCentDir64Locator = struct.calcsize(structEndArchive64Locator)
# The "Zip64 end of central directory" record, magic number, size, and indices
# (section V.G in the format document)
structEndArchive64 = "<4sQ2H2L4Q"
stringEndArchive64 = "PK\x06\x06"
sizeEndCentDir64 = struct.calcsize(structEndArchive64)
_CD64_SIGNATURE = 0
_CD64_DIRECTORY_RECSIZE = 1
_CD64_CREATE_VERSION = 2
_CD64_EXTRACT_VERSION = 3
_CD64_DISK_NUMBER = 4
_CD64_DISK_NUMBER_START = 5
_CD64_NUMBER_ENTRIES_THIS_DISK = 6
_CD64_NUMBER_ENTRIES_TOTAL = 7
_CD64_DIRECTORY_SIZE = 8
_CD64_OFFSET_START_CENTDIR = 9
def _check_zipfile(fp):
try:
if _EndRecData(fp):
return True # file has correct magic number
except IOError:
pass
return False
def is_zipfile(filename):
"""Quickly see if a file is a ZIP file by checking the magic number.
The filename argument may be a file or file-like object too.
"""
result = False
try:
if hasattr(filename, "read"):
result = _check_zipfile(fp=filename)
else:
with open(filename, "rb") as fp:
result = _check_zipfile(fp)
except IOError:
pass
return result
def _EndRecData64(fpin, offset, endrec):
"""
Read the ZIP64 end-of-archive records and use that to update endrec
"""
try:
fpin.seek(offset - sizeEndCentDir64Locator, 2)
except IOError:
# If the seek fails, the file is not large enough to contain a ZIP64
# end-of-archive record, so just return the end record we were given.
return endrec
data = fpin.read(sizeEndCentDir64Locator)
sig, diskno, reloff, disks = struct.unpack(structEndArchive64Locator, data)
if sig != stringEndArchive64Locator:
return endrec
if diskno != 0 or disks != 1:
raise BadZipfile("zipfiles that span multiple disks are not supported")
# Assume no 'zip64 extensible data'
fpin.seek(offset - sizeEndCentDir64Locator - sizeEndCentDir64, 2)
data = fpin.read(sizeEndCentDir64)
sig, sz, create_version, read_version, disk_num, disk_dir, \
dircount, dircount2, dirsize, diroffset = \
struct.unpack(structEndArchive64, data)
if sig != stringEndArchive64:
return endrec
# Update the original endrec using data from the ZIP64 record
endrec[_ECD_SIGNATURE] = sig
endrec[_ECD_DISK_NUMBER] = disk_num
endrec[_ECD_DISK_START] = disk_dir
endrec[_ECD_ENTRIES_THIS_DISK] = dircount
endrec[_ECD_ENTRIES_TOTAL] = dircount2
endrec[_ECD_SIZE] = dirsize
endrec[_ECD_OFFSET] = diroffset
return endrec
def _EndRecData(fpin):
"""Return data from the "End of Central Directory" record, or None.
The data is a list of the nine items in the ZIP "End of central dir"
record followed by a tenth item, the file seek offset of this record."""
# Determine file size
fpin.seek(0, 2)
filesize = fpin.tell()
# Check to see if this is ZIP file with no archive comment (the
# "end of central directory" structure should be the last item in the
# file if this is the case).
try:
fpin.seek(-sizeEndCentDir, 2)
except IOError:
return None
data = fpin.read()
if data[0:4] == stringEndArchive and data[-2:] == "\000\000":
# the signature is correct and there's no comment, unpack structure
endrec = struct.unpack(structEndArchive, data)
endrec=list(endrec)
# Append a blank comment and record start offset
endrec.append("")
endrec.append(filesize - sizeEndCentDir)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, -sizeEndCentDir, endrec)
# Either this is not a ZIP file, or it is a ZIP file with an archive
# comment. Search the end of the file for the "end of central directory"
# record signature. The comment is the last item in the ZIP file and may be
# up to 64K long. It is assumed that the "end of central directory" magic
# number does not appear in the comment.
maxCommentStart = max(filesize - (1 << 16) - sizeEndCentDir, 0)
fpin.seek(maxCommentStart, 0)
data = fpin.read()
start = data.rfind(stringEndArchive)
if start >= 0:
# found the magic number; attempt to unpack and interpret
recData = data[start:start+sizeEndCentDir]
endrec = list(struct.unpack(structEndArchive, recData))
comment = data[start+sizeEndCentDir:]
# check that comment length is correct
if endrec[_ECD_COMMENT_SIZE] == len(comment):
# Append the archive comment and start offset
endrec.append(comment)
endrec.append(maxCommentStart + start)
# Try to read the "Zip64 end of central directory" structure
return _EndRecData64(fpin, maxCommentStart + start - filesize,
endrec)
# Unable to find a valid end of central directory structure
return
class ZipInfo (object):
"""Class with attributes describing each file in the ZIP archive."""
__slots__ = (
'orig_filename',
'filename',
'date_time',
'compress_type',
'comment',
'extra',
'create_system',
'create_version',
'extract_version',
'reserved',
'flag_bits',
'volume',
'internal_attr',
'external_attr',
'header_offset',
'CRC',
'compress_size',
'file_size',
'_raw_time',
)
def __init__(self, filename="NoName", date_time=(1980,1,1,0,0,0)):
self.orig_filename = filename # Original file name in archive
# Terminate the file name at the first null byte. Null bytes in file
# names are used as tricks by viruses in archives.
null_byte = filename.find(chr(0))
if null_byte >= 0:
filename = filename[0:null_byte]
# This is used to ensure paths in generated ZIP files always use
# forward slashes as the directory separator, as required by the
# ZIP format specification.
if os.sep != "/" and os.sep in filename:
filename = filename.replace(os.sep, "/")
self.filename = filename # Normalized file name
self.date_time = date_time # year, month, day, hour, min, sec
# Standard values:
self.compress_type = ZIP_STORED # Type of compression for the file
self.comment = "" # Comment for each file
self.extra = "" # ZIP extra data
if sys.platform == 'win32':
self.create_system = 0 # System which created ZIP archive
else:
# Assume everything else is unix-y
self.create_system = 3 # System which created ZIP archive
self.create_version = 20 # Version which created ZIP archive
self.extract_version = 20 # Version needed to extract archive
self.reserved = 0 # Must be zero
self.flag_bits = 0 # ZIP flag bits
self.volume = 0 # Volume number of file header
self.internal_attr = 0 # Internal attributes
self.external_attr = 0 # External file attributes
# Other attributes are set by class ZipFile:
# header_offset Byte offset to the file header
# CRC CRC-32 of the uncompressed file
# compress_size Size of the compressed file
# file_size Size of the uncompressed file
def FileHeader(self):
"""Return the per-file header as a string."""
dt = self.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
if self.flag_bits & 0x08:
# Set these to zero because we write them after the file data
CRC = compress_size = file_size = 0
else:
CRC = self.CRC
compress_size = self.compress_size
file_size = self.file_size
extra = self.extra
if file_size > ZIP64_LIMIT or compress_size > ZIP64_LIMIT:
# File is larger than what fits into a 4 byte integer,
# fall back to the ZIP64 extension
fmt = '<HHQQ'
extra = extra + struct.pack(fmt,
1, struct.calcsize(fmt)-4, file_size, compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
self.extract_version = max(45, self.extract_version)
self.create_version = max(45, self.extract_version)
filename, flag_bits = self._encodeFilenameFlags()
header = struct.pack(structFileHeader, stringFileHeader,
self.extract_version, self.reserved, flag_bits,
self.compress_type, dostime, dosdate, CRC,
compress_size, file_size,
len(filename), len(extra))
return header + filename + extra
def _encodeFilenameFlags(self):
if isinstance(self.filename, unicode):
try:
return self.filename.encode('ascii'), self.flag_bits
except UnicodeEncodeError:
return self.filename.encode('utf-8'), self.flag_bits | 0x800
else:
return self.filename, self.flag_bits
def _decodeFilename(self):
if self.flag_bits & 0x800:
return self.filename.decode('utf-8')
else:
return self.filename
def _decodeExtra(self):
# Try to decode the extra field.
extra = self.extra
unpack = struct.unpack
while extra:
tp, ln = unpack('<HH', extra[:4])
if tp == 1:
if ln >= 24:
counts = unpack('<QQQ', extra[4:28])
elif ln == 16:
counts = unpack('<QQ', extra[4:20])
elif ln == 8:
counts = unpack('<Q', extra[4:12])
elif ln == 0:
counts = ()
else:
raise RuntimeError, "Corrupt extra field %s"%(ln,)
idx = 0
# ZIP64 extension (large files and/or large archives)
if self.file_size in (0xffffffffffffffffL, 0xffffffffL):
self.file_size = counts[idx]
idx += 1
if self.compress_size == 0xFFFFFFFFL:
self.compress_size = counts[idx]
idx += 1
if self.header_offset == 0xffffffffL:
old = self.header_offset
self.header_offset = counts[idx]
idx+=1
extra = extra[ln+4:]
class _ZipDecrypter:
"""Class to handle decryption of files stored within a ZIP archive.
ZIP supports a password-based form of encryption. Even though known
plaintext attacks have been found against it, it is still useful
to be able to get data out of such a file.
Usage:
zd = _ZipDecrypter(mypwd)
plain_char = zd(cypher_char)
plain_text = map(zd, cypher_text)
"""
def _GenerateCRCTable():
"""Generate a CRC-32 table.
ZIP encryption uses the CRC32 one-byte primitive for scrambling some
internal keys. We noticed that a direct implementation is faster than
relying on binascii.crc32().
"""
poly = 0xedb88320
table = [0] * 256
for i in range(256):
crc = i
for j in range(8):
if crc & 1:
crc = ((crc >> 1) & 0x7FFFFFFF) ^ poly
else:
crc = ((crc >> 1) & 0x7FFFFFFF)
table[i] = crc
return table
crctable = _GenerateCRCTable()
def _crc32(self, ch, crc):
"""Compute the CRC32 primitive on one byte."""
return ((crc >> 8) & 0xffffff) ^ self.crctable[(crc ^ ord(ch)) & 0xff]
def __init__(self, pwd):
self.key0 = 305419896
self.key1 = 591751049
self.key2 = 878082192
for p in pwd:
self._UpdateKeys(p)
def _UpdateKeys(self, c):
self.key0 = self._crc32(c, self.key0)
self.key1 = (self.key1 + (self.key0 & 255)) & 4294967295
self.key1 = (self.key1 * 134775813 + 1) & 4294967295
self.key2 = self._crc32(chr((self.key1 >> 24) & 255), self.key2)
def __call__(self, c):
"""Decrypt a single character."""
c = ord(c)
k = self.key2 | 2
c = c ^ (((k * (k^1)) >> 8) & 255)
c = chr(c)
self._UpdateKeys(c)
return c
class ZipExtFile(io.BufferedIOBase):
"""File-like object for reading an archive member.
Is returned by ZipFile.open().
"""
# Max size supported by decompressor.
MAX_N = 1 << 31 - 1
# Read from compressed files in 4k blocks.
MIN_READ_SIZE = 4096
# Search for universal newlines or line chunks.
PATTERN = re.compile(r'^(?P<chunk>[^\r\n]+)|(?P<newline>\n|\r\n?)')
def __init__(self, fileobj, mode, zipinfo, decrypter=None):
self._fileobj = fileobj
self._decrypter = decrypter
self._compress_type = zipinfo.compress_type
self._compress_size = zipinfo.compress_size
self._compress_left = zipinfo.compress_size
if self._compress_type == ZIP_DEFLATED:
self._decompressor = zlib.decompressobj(-15)
elif self._compress_type == ZIP_BZIP2:
self._decompressor = bz2.BZ2Decompressor()
self.MIN_READ_SIZE = 900000
self._unconsumed = ''
self._readbuffer = ''
self._offset = 0
self._universal = 'U' in mode
self.newlines = None
# Adjust read size for encrypted files since the first 12 bytes
# are for the encryption/password information.
if self._decrypter is not None:
self._compress_left -= 12
self.mode = mode
self.name = zipinfo.filename
if hasattr(zipinfo, 'CRC'):
self._expected_crc = zipinfo.CRC
self._running_crc = crc32(b'') & 0xffffffff
else:
self._expected_crc = None
def readline(self, limit=-1):
"""Read and return a line from the stream.
If limit is specified, at most limit bytes will be read.
"""
if not self._universal and limit < 0:
# Shortcut common case - newline found in buffer.
i = self._readbuffer.find('\n', self._offset) + 1
if i > 0:
line = self._readbuffer[self._offset: i]
self._offset = i
return line
if not self._universal:
return io.BufferedIOBase.readline(self, limit)
line = ''
while limit < 0 or len(line) < limit:
readahead = self.peek(2)
if readahead == '':
return line
#
# Search for universal newlines or line chunks.
#
# The pattern returns either a line chunk or a newline, but not
# both. Combined with peek(2), we are assured that the sequence
# '\r\n' is always retrieved completely and never split into
# separate newlines - '\r', '\n' due to coincidental readaheads.
#
match = self.PATTERN.search(readahead)
newline = match.group('newline')
if newline is not None:
if self.newlines is None:
self.newlines = []
if newline not in self.newlines:
self.newlines.append(newline)
self._offset += len(newline)
return line + '\n'
chunk = match.group('chunk')
if limit >= 0:
chunk = chunk[: limit - len(line)]
self._offset += len(chunk)
line += chunk
return line
def peek(self, n=1):
"""Returns buffered bytes without advancing the position."""
if n > len(self._readbuffer) - self._offset:
chunk = self.read(n)
self._offset -= len(chunk)
# Return up to 512 bytes to reduce allocation overhead for tight loops.
return self._readbuffer[self._offset: self._offset + 512]
def readable(self):
return True
def read(self, n=-1):
"""Read and return up to n bytes.
If the argument is omitted, None, or negative, data is read and returned until EOF is reached..
"""
buf = ''
if n is None:
n = -1
while True:
if n < 0:
data = self.read1(n)
elif n > len(buf):
data = self.read1(n - len(buf))
else:
return buf
if len(data) == 0:
return buf
buf += data
def _update_crc(self, newdata, eof):
# Update the CRC using the given data.
if self._expected_crc is None:
# No need to compute the CRC if we don't have a reference value
return
self._running_crc = crc32(newdata, self._running_crc) & 0xffffffff
# Check the CRC if we're at the end of the file
if eof and self._running_crc != self._expected_crc:
raise BadZipfile("Bad CRC-32 for file %r" % self.name)
def read1(self, n):
"""Read up to n bytes with at most one read() system call."""
# Simplify algorithm (branching) by transforming negative n to large n.
if n < 0 or n is None:
n = self.MAX_N
# Bytes available in read buffer.
len_readbuffer = len(self._readbuffer) - self._offset
# Read from file.
if self._compress_left > 0 and n > len_readbuffer + len(self._unconsumed):
nbytes = n - len_readbuffer - len(self._unconsumed)
nbytes = max(nbytes, self.MIN_READ_SIZE)
nbytes = min(nbytes, self._compress_left)
data = self._fileobj.read(nbytes)
self._compress_left -= len(data)
if data and self._decrypter is not None:
data = ''.join(map(self._decrypter, data))
if self._compress_type == ZIP_STORED:
self._update_crc(data, eof=(self._compress_left==0))
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
else:
# Prepare deflated bytes for decompression.
self._unconsumed += data
# Handle unconsumed data.
if (len(self._unconsumed) > 0 and n > len_readbuffer and
self._compress_type == ZIP_DEFLATED):
data = self._decompressor.decompress(
self._unconsumed,
max(n - len_readbuffer, self.MIN_READ_SIZE)
)
self._unconsumed = self._decompressor.unconsumed_tail
eof = len(self._unconsumed) == 0 and self._compress_left == 0
if eof:
data += self._decompressor.flush()
self._update_crc(data, eof=eof)
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
elif (len(self._unconsumed) > 0 and n > len_readbuffer and
self._compress_type == ZIP_BZIP2):
data = self._decompressor.decompress(self._unconsumed)
self._unconsumed = ''
self._readbuffer = self._readbuffer[self._offset:] + data
self._offset = 0
# Read from buffer.
data = self._readbuffer[self._offset: self._offset + n]
self._offset += len(data)
return data
class ZipFile:
""" Class with methods to open, read, write, close, list zip files.
z = ZipFile(file, mode="r", compression=ZIP_STORED, allowZip64=False)
file: Either the path to the file, or a file-like object.
If it is a path, the file will be opened and closed by ZipFile.
mode: The mode can be either read "r", write "w" or append "a".
compression: ZIP_STORED (no compression), ZIP_DEFLATED (requires zlib),
or ZIP_BZIP2 (requires bz2).
allowZip64: if True ZipFile will create files with ZIP64 extensions when
needed, otherwise it will raise an exception when this would
be necessary.
"""
fp = None # Set here since __del__ checks it
def __init__(self, file, mode="r", compression=ZIP_STORED, allowZip64=False):
"""Open the ZIP file with mode read "r", write "w" or append "a"."""
if mode not in ("r", "w", "a"):
raise RuntimeError('ZipFile() requires mode "r", "w", or "a"')
if compression == ZIP_STORED:
pass
elif compression == ZIP_DEFLATED:
if not zlib:
raise RuntimeError,\
"Compression requires the (missing) zlib module"
elif compression == ZIP_BZIP2:
if not bz2:
raise RuntimeError,\
"Compression requires the (missing) bz2 module"
else:
raise RuntimeError, "That compression method is not supported"
self._allowZip64 = allowZip64
self._didModify = False
self.debug = 0 # Level of printing: 0 through 3
self.NameToInfo = {} # Find file info given name
self.filelist = [] # List of ZipInfo instances for archive
self.compression = compression # Method of compression
self.mode = key = mode.replace('b', '')[0]
self.pwd = None
self.comment = ''
# Check if we were passed a file-like object
if isinstance(file, basestring):
self._filePassed = 0
self.filename = file
modeDict = {'r' : 'rb', 'w': 'wb', 'a' : 'r+b'}
try:
self.fp = open(file, modeDict[mode])
except IOError:
if mode == 'a':
mode = key = 'w'
self.fp = open(file, modeDict[mode])
else:
raise
else:
self._filePassed = 1
self.fp = file
self.filename = getattr(file, 'name', None)
if key == 'r':
self._GetContents()
elif key == 'w':
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
elif key == 'a':
try:
# See if file is a zip file
self._RealGetContents()
# seek to start of directory and overwrite
self.fp.seek(self.start_dir, 0)
except BadZipfile:
# file is not a zip file, just append
self.fp.seek(0, 2)
# set the modified flag so central directory gets written
# even if no files are added to the archive
self._didModify = True
else:
if not self._filePassed:
self.fp.close()
self.fp = None
raise RuntimeError, 'Mode must be "r", "w" or "a"'
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
self.close()
def _GetContents(self):
"""Read the directory, making sure we close the file if the format
is bad."""
try:
self._RealGetContents()
except BadZipfile:
if not self._filePassed:
self.fp.close()
self.fp = None
raise
def _RealGetContents(self):
"""Read in the table of contents for the ZIP file."""
fp = self.fp
try:
endrec = _EndRecData(fp)
except IOError:
raise BadZipfile("File is not a zip file")
if not endrec:
raise BadZipfile, "File is not a zip file"
if self.debug > 1:
print endrec
size_cd = endrec[_ECD_SIZE] # bytes in central directory
offset_cd = endrec[_ECD_OFFSET] # offset of central directory
self.comment = endrec[_ECD_COMMENT] # archive comment
# "concat" is zero, unless zip was concatenated to another file
concat = endrec[_ECD_LOCATION] - size_cd - offset_cd
if endrec[_ECD_SIGNATURE] == stringEndArchive64:
# If Zip64 extension structures are present, account for them
concat -= (sizeEndCentDir64 + sizeEndCentDir64Locator)
if self.debug > 2:
inferred = concat + offset_cd
print "given, inferred, offset", offset_cd, inferred, concat
# self.start_dir: Position of start of central directory
self.start_dir = offset_cd + concat
fp.seek(self.start_dir, 0)
data = fp.read(size_cd)
fp = cStringIO.StringIO(data)
total = 0
while total < size_cd:
centdir = fp.read(sizeCentralDir)
if centdir[0:4] != stringCentralDir:
raise BadZipfile, "Bad magic number for central directory"
centdir = struct.unpack(structCentralDir, centdir)
if self.debug > 2:
print centdir
filename = fp.read(centdir[_CD_FILENAME_LENGTH])
# Create ZipInfo instance to store file information
x = ZipInfo(filename)
x.extra = fp.read(centdir[_CD_EXTRA_FIELD_LENGTH])
x.comment = fp.read(centdir[_CD_COMMENT_LENGTH])
x.header_offset = centdir[_CD_LOCAL_HEADER_OFFSET]
(x.create_version, x.create_system, x.extract_version, x.reserved,
x.flag_bits, x.compress_type, t, d,
x.CRC, x.compress_size, x.file_size) = centdir[1:12]
x.volume, x.internal_attr, x.external_attr = centdir[15:18]
# Convert date/time code to (year, month, day, hour, min, sec)
x._raw_time = t
x.date_time = ( (d>>9)+1980, (d>>5)&0xF, d&0x1F,
t>>11, (t>>5)&0x3F, (t&0x1F) * 2 )
x._decodeExtra()
x.header_offset = x.header_offset + concat
x.filename = x._decodeFilename()
self.filelist.append(x)
self.NameToInfo[x.filename] = x
# update total bytes read from central directory
total = (total + sizeCentralDir + centdir[_CD_FILENAME_LENGTH]
+ centdir[_CD_EXTRA_FIELD_LENGTH]
+ centdir[_CD_COMMENT_LENGTH])
if self.debug > 2:
print "total", total
def namelist(self):
"""Return a list of file names in the archive."""
l = []
for data in self.filelist:
l.append(data.filename)
return l
def infolist(self):
"""Return a list of class ZipInfo instances for files in the
archive."""
return self.filelist
def printdir(self):
"""Print a table of contents for the zip file."""
print "%-46s %19s %12s" % ("File Name", "Modified ", "Size")
for zinfo in self.filelist:
date = "%d-%02d-%02d %02d:%02d:%02d" % zinfo.date_time[:6]
print "%-46s %s %12d" % (zinfo.filename, date, zinfo.file_size)
def testzip(self):
"""Read all the files and check the CRC."""
chunk_size = 2 ** 20
for zinfo in self.filelist:
try:
# Read by chunks, to avoid an OverflowError or a
# MemoryError with very large embedded files.
f = self.open(zinfo.filename, "r")
while f.read(chunk_size): # Check CRC-32
pass
except BadZipfile:
return zinfo.filename
def getinfo(self, name):
"""Return the instance of ZipInfo given 'name'."""
info = self.NameToInfo.get(name)
if info is None:
raise KeyError(
'There is no item named %r in the archive' % name)
return info
def setpassword(self, pwd):
"""Set default password for encrypted files."""
self.pwd = pwd
def read(self, name, pwd=None):
"""Return file bytes (as a string) for name."""
return self.open(name, "r", pwd).read()
def open(self, name, mode="r", pwd=None):
"""Return file-like object for 'name'."""
if mode not in ("r", "U", "rU"):
raise RuntimeError, 'open() requires mode "r", "U", or "rU"'
if not self.fp:
raise RuntimeError, \
"Attempt to read ZIP archive that was already closed"
# Only open a new file for instances where we were not
# given a file object in the constructor
if self._filePassed:
zef_file = self.fp
else:
zef_file = open(self.filename, 'rb')
# Make sure we have an info object
if isinstance(name, ZipInfo):
# 'name' is already an info object
zinfo = name
else:
# Get info object for name
zinfo = self.getinfo(name)
zef_file.seek(zinfo.header_offset, 0)
# Skip the file header:
fheader = zef_file.read(sizeFileHeader)
if fheader[0:4] != stringFileHeader:
raise BadZipfile, "Bad magic number for file header"
fheader = struct.unpack(structFileHeader, fheader)
fname = zef_file.read(fheader[_FH_FILENAME_LENGTH])
if fheader[_FH_EXTRA_FIELD_LENGTH]:
zef_file.read(fheader[_FH_EXTRA_FIELD_LENGTH])
if fname != zinfo.orig_filename:
raise BadZipfile, \
'File name in directory "%s" and header "%s" differ.' % (
zinfo.orig_filename, fname)
# check for encrypted flag & handle password
is_encrypted = zinfo.flag_bits & 0x1
zd = None
if is_encrypted:
if not pwd:
pwd = self.pwd
if not pwd:
raise RuntimeError, "File %s is encrypted, " \
"password required for extraction" % name
zd = _ZipDecrypter(pwd)
# The first 12 bytes in the cypher stream is an encryption header
# used to strengthen the algorithm. The first 11 bytes are
# completely random, while the 12th contains the MSB of the CRC,
# or the MSB of the file time depending on the header type
# and is used to check the correctness of the password.
bytes = zef_file.read(12)
h = map(zd, bytes[0:12])
if zinfo.flag_bits & 0x8:
# compare against the file type from extended local headers
check_byte = (zinfo._raw_time >> 8) & 0xff
else:
# compare against the CRC otherwise
check_byte = (zinfo.CRC >> 24) & 0xff
if ord(h[11]) != check_byte:
raise RuntimeError("Bad password for file", name)
return ZipExtFile(zef_file, mode, zinfo, zd)
def extract(self, member, path=None, pwd=None):
"""Extract a member from the archive to the current working directory,
using its full name. Its file information is extracted as accurately
as possible. `member' may be a filename or a ZipInfo object. You can
specify a different directory using `path'.
"""
if not isinstance(member, ZipInfo):
member = self.getinfo(member)
if path is None:
path = os.getcwd()
return self._extract_member(member, path, pwd)
def extractall(self, path=None, members=None, pwd=None):
"""Extract all members from the archive to the current working
directory. `path' specifies a different directory to extract to.
`members' is optional and must be a subset of the list returned
by namelist().
"""
if members is None:
members = self.namelist()
for zipinfo in members:
self.extract(zipinfo, path, pwd)
def _extract_member(self, member, targetpath, pwd):
"""Extract the ZipInfo object 'member' to a physical
file on the path targetpath.
"""
# build the destination pathname, replacing
# forward slashes to platform specific separators.
# Strip trailing path separator, unless it represents the root.
if (targetpath[-1:] in (os.path.sep, os.path.altsep)
and len(os.path.splitdrive(targetpath)[1]) > 1):
targetpath = targetpath[:-1]
# don't include leading "/" from file name if present
if member.filename[0] == '/':
targetpath = os.path.join(targetpath, member.filename[1:])
else:
targetpath = os.path.join(targetpath, member.filename)
targetpath = os.path.normpath(targetpath)
# Create all upper directories if necessary.
upperdirs = os.path.dirname(targetpath)
if upperdirs and not os.path.exists(upperdirs):
os.makedirs(upperdirs)
if member.filename[-1] == '/':
if not os.path.isdir(targetpath):
os.mkdir(targetpath)
return targetpath
source = self.open(member, pwd=pwd)
target = file(targetpath, "wb")
shutil.copyfileobj(source, target)
source.close()
target.close()
return targetpath
def _writecheck(self, zinfo):
"""Check for errors before writing a file to the archive."""
if zinfo.filename in self.NameToInfo:
if self.debug: # Warning for duplicate names
print "Duplicate name:", zinfo.filename
if self.mode not in ("w", "a"):
raise RuntimeError, 'write() requires mode "w" or "a"'
if not self.fp:
raise RuntimeError, \
"Attempt to write ZIP archive that was already closed"
if zinfo.compress_type == ZIP_DEFLATED and not zlib:
raise RuntimeError, \
"Compression requires the (missing) zlib module"
if zinfo.compress_type == ZIP_BZIP2 and not bz2:
raise RuntimeError, \
"Compression requires the (missing) bz2 module"
if zinfo.compress_type not in (ZIP_STORED, ZIP_DEFLATED, ZIP_BZIP2):
raise RuntimeError, \
"That compression method is not supported"
if zinfo.file_size > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Filesize would require ZIP64 extensions")
if zinfo.header_offset > ZIP64_LIMIT:
if not self._allowZip64:
raise LargeZipFile("Zipfile size would require ZIP64 extensions")
def write(self, filename, arcname=None, compress_type=None):
"""Put the bytes from filename into the archive under the name
arcname."""
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
st = os.stat(filename)
isdir = stat.S_ISDIR(st.st_mode)
mtime = time.localtime(st.st_mtime)
date_time = mtime[0:6]
# Create ZipInfo instance to store file information
if arcname is None:
arcname = filename
arcname = os.path.normpath(os.path.splitdrive(arcname)[1])
while arcname[0] in (os.sep, os.altsep):
arcname = arcname[1:]
if isdir:
arcname += '/'
zinfo = ZipInfo(arcname, date_time)
zinfo.external_attr = (st[0] & 0xFFFF) << 16L # Unix attributes
if compress_type is None:
zinfo.compress_type = self.compression
else:
zinfo.compress_type = compress_type
zinfo.file_size = st.st_size
zinfo.flag_bits = 0x00
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
if isdir:
zinfo.file_size = 0
zinfo.compress_size = 0
zinfo.CRC = 0
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
self.fp.write(zinfo.FileHeader())
return
with open(filename, "rb") as fp:
# Must overwrite CRC and sizes with correct data later
zinfo.CRC = CRC = 0
zinfo.compress_size = compress_size = 0
zinfo.file_size = file_size = 0
self.fp.write(zinfo.FileHeader())
if zinfo.compress_type == ZIP_DEFLATED:
cmpr = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
elif zinfo.compress_type == ZIP_BZIP2:
cmpr = bz2.BZ2Compressor()
else:
cmpr = None
while 1:
buf = fp.read(1024 * 8)
if not buf:
break
file_size = file_size + len(buf)
CRC = crc32(buf, CRC) & 0xffffffff
if cmpr:
buf = cmpr.compress(buf)
compress_size = compress_size + len(buf)
self.fp.write(buf)
if cmpr:
buf = cmpr.flush()
compress_size = compress_size + len(buf)
self.fp.write(buf)
zinfo.compress_size = compress_size
else:
zinfo.compress_size = file_size
zinfo.CRC = CRC
zinfo.file_size = file_size
# Seek backwards and write CRC and file sizes
position = self.fp.tell() # Preserve current position in file
self.fp.seek(zinfo.header_offset + 14, 0)
self.fp.write(struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.fp.seek(position, 0)
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def writestr(self, zinfo_or_arcname, bytes, compress_type=None):
"""Write a file into the archive. The contents is the string
'bytes'. 'zinfo_or_arcname' is either a ZipInfo instance or
the name of the file in the archive."""
if not isinstance(zinfo_or_arcname, ZipInfo):
zinfo = ZipInfo(filename=zinfo_or_arcname,
date_time=time.localtime(time.time())[:6])
zinfo.compress_type = self.compression
zinfo.external_attr = 0600 << 16
else:
zinfo = zinfo_or_arcname
if not self.fp:
raise RuntimeError(
"Attempt to write to ZIP archive that was already closed")
if compress_type is not None:
zinfo.compress_type = compress_type
zinfo.file_size = len(bytes) # Uncompressed size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self._writecheck(zinfo)
self._didModify = True
zinfo.CRC = crc32(bytes) & 0xffffffff # CRC-32 checksum
if zinfo.compress_type == ZIP_DEFLATED:
co = zlib.compressobj(zlib.Z_DEFAULT_COMPRESSION,
zlib.DEFLATED, -15)
bytes = co.compress(bytes) + co.flush()
zinfo.compress_size = len(bytes) # Compressed size
elif zinfo.compress_type == ZIP_BZIP2:
co = bz2.BZ2Compressor()
bytes = co.compress(bytes) + co.flush()
zinfo.compress_size = len(bytes) # Compressed size
else:
zinfo.compress_size = zinfo.file_size
zinfo.header_offset = self.fp.tell() # Start of header bytes
self.fp.write(zinfo.FileHeader())
self.fp.write(bytes)
self.fp.flush()
if zinfo.flag_bits & 0x08:
# Write CRC and file sizes after the file data
self.fp.write(struct.pack("<LLL", zinfo.CRC, zinfo.compress_size,
zinfo.file_size))
self.filelist.append(zinfo)
self.NameToInfo[zinfo.filename] = zinfo
def __del__(self):
"""Call the "close()" method in case the user forgot."""
self.close()
def close(self):
"""Close the file, and for mode "w" and "a" write the ending
records."""
if self.fp is None:
return
if self.mode in ("w", "a") and self._didModify: # write ending records
count = 0
pos1 = self.fp.tell()
for zinfo in self.filelist: # write central directory
count = count + 1
dt = zinfo.date_time
dosdate = (dt[0] - 1980) << 9 | dt[1] << 5 | dt[2]
dostime = dt[3] << 11 | dt[4] << 5 | (dt[5] // 2)
extra = []
if zinfo.file_size > ZIP64_LIMIT \
or zinfo.compress_size > ZIP64_LIMIT:
extra.append(zinfo.file_size)
extra.append(zinfo.compress_size)
file_size = 0xffffffff
compress_size = 0xffffffff
else:
file_size = zinfo.file_size
compress_size = zinfo.compress_size
if zinfo.header_offset > ZIP64_LIMIT:
extra.append(zinfo.header_offset)
header_offset = 0xffffffffL
else:
header_offset = zinfo.header_offset
extra_data = zinfo.extra
if extra:
# Append a ZIP64 field to the extra's
extra_data = struct.pack(
'<HH' + 'Q'*len(extra),
1, 8*len(extra), *extra) + extra_data
extract_version = max(45, zinfo.extract_version)
create_version = max(45, zinfo.create_version)
else:
extract_version = zinfo.extract_version
create_version = zinfo.create_version
try:
filename, flag_bits = zinfo._encodeFilenameFlags()
centdir = struct.pack(structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
except DeprecationWarning:
print >>sys.stderr, (structCentralDir,
stringCentralDir, create_version,
zinfo.create_system, extract_version, zinfo.reserved,
zinfo.flag_bits, zinfo.compress_type, dostime, dosdate,
zinfo.CRC, compress_size, file_size,
len(zinfo.filename), len(extra_data), len(zinfo.comment),
0, zinfo.internal_attr, zinfo.external_attr,
header_offset)
raise
self.fp.write(centdir)
self.fp.write(filename)
self.fp.write(extra_data)
self.fp.write(zinfo.comment)
pos2 = self.fp.tell()
# Write end-of-zip-archive record
centDirCount = count
centDirSize = pos2 - pos1
centDirOffset = pos1
if (centDirCount >= ZIP_FILECOUNT_LIMIT or
centDirOffset > ZIP64_LIMIT or
centDirSize > ZIP64_LIMIT):
# Need to write the ZIP64 end-of-archive records
zip64endrec = struct.pack(
structEndArchive64, stringEndArchive64,
44, 45, 45, 0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset)
self.fp.write(zip64endrec)
zip64locrec = struct.pack(
structEndArchive64Locator,
stringEndArchive64Locator, 0, pos2, 1)
self.fp.write(zip64locrec)
centDirCount = min(centDirCount, 0xFFFF)
centDirSize = min(centDirSize, 0xFFFFFFFF)
centDirOffset = min(centDirOffset, 0xFFFFFFFF)
# check for valid comment length
if len(self.comment) >= ZIP_MAX_COMMENT:
if self.debug > 0:
msg = 'Archive comment is too long; truncating to %d bytes' \
% ZIP_MAX_COMMENT
self.comment = self.comment[:ZIP_MAX_COMMENT]
endrec = struct.pack(structEndArchive, stringEndArchive,
0, 0, centDirCount, centDirCount,
centDirSize, centDirOffset, len(self.comment))
self.fp.write(endrec)
self.fp.write(self.comment)
self.fp.flush()
if not self._filePassed:
self.fp.close()
self.fp = None
class PyZipFile(ZipFile):
"""Class to create ZIP archives with Python library files and packages."""
def writepy(self, pathname, basename = ""):
"""Add all files from "pathname" to the ZIP archive.
If pathname is a package directory, search the directory and
all package subdirectories recursively for all *.py and enter
the modules into the archive. If pathname is a plain
directory, listdir *.py and enter all modules. Else, pathname
must be a Python *.py file and the module will be put into the
archive. Added modules are always module.pyo or module.pyc.
This method will compile the module.py into module.pyc if
necessary.
"""
dir, name = os.path.split(pathname)
if os.path.isdir(pathname):
initname = os.path.join(pathname, "__init__.py")
if os.path.isfile(initname):
# This is a package directory, add it
if basename:
basename = "%s/%s" % (basename, name)
else:
basename = name
if self.debug:
print "Adding package in", pathname, "as", basename
fname, arcname = self._get_codename(initname[0:-3], basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
dirlist = os.listdir(pathname)
dirlist.remove("__init__.py")
# Add all *.py files and package subdirectories
for filename in dirlist:
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if os.path.isdir(path):
if os.path.isfile(os.path.join(path, "__init__.py")):
# This is a package directory, add it
self.writepy(path, basename) # Recursive call
elif ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
# This is NOT a package directory, add its files at top level
if self.debug:
print "Adding files from directory", pathname
for filename in os.listdir(pathname):
path = os.path.join(pathname, filename)
root, ext = os.path.splitext(filename)
if ext == ".py":
fname, arcname = self._get_codename(path[0:-3],
basename)
if self.debug:
print "Adding", arcname
self.write(fname, arcname)
else:
if pathname[-3:] != ".py":
raise RuntimeError, \
'Files added with writepy() must end with ".py"'
fname, arcname = self._get_codename(pathname[0:-3], basename)
if self.debug:
print "Adding file", arcname
self.write(fname, arcname)
def _get_codename(self, pathname, basename):
"""Return (filename, archivename) for the path.
Given a module name path, return the correct file path and
archive name, compiling if necessary. For example, given
/python/lib/string, return (/python/lib/string.pyc, string).
"""
file_py = pathname + ".py"
file_pyc = pathname + ".pyc"
file_pyo = pathname + ".pyo"
if os.path.isfile(file_pyo) and \
os.stat(file_pyo).st_mtime >= os.stat(file_py).st_mtime:
fname = file_pyo # Use .pyo file
elif not os.path.isfile(file_pyc) or \
os.stat(file_pyc).st_mtime < os.stat(file_py).st_mtime:
import py_compile
if self.debug:
print "Compiling", file_py
try:
py_compile.compile(file_py, file_pyc, None, True)
except py_compile.PyCompileError,err:
print err.msg
fname = file_pyc
else:
fname = file_pyc
archivename = os.path.split(fname)[1]
if basename:
archivename = "%s/%s" % (basename, archivename)
return (fname, archivename)
def main(args = None):
import textwrap
USAGE=textwrap.dedent("""\
Usage:
zipfile.py -l zipfile.zip # Show listing of a zipfile
zipfile.py -t zipfile.zip # Test if a zipfile is valid
zipfile.py -e zipfile.zip target # Extract zipfile into target dir
zipfile.py -c zipfile.zip src ... # Create zipfile from sources
""")
if args is None:
args = sys.argv[1:]
if not args or args[0] not in ('-l', '-c', '-e', '-t'):
print USAGE
sys.exit(1)
if args[0] == '-l':
if len(args) != 2:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
zf.printdir()
zf.close()
elif args[0] == '-t':
if len(args) != 2:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
badfile = zf.testzip()
if badfile:
print("The following enclosed file is corrupted: {!r}".format(badfile))
print "Done testing"
elif args[0] == '-e':
if len(args) != 3:
print USAGE
sys.exit(1)
zf = ZipFile(args[1], 'r')
out = args[2]
for path in zf.namelist():
if path.startswith('./'):
tgt = os.path.join(out, path[2:])
else:
tgt = os.path.join(out, path)
tgtdir = os.path.dirname(tgt)
if not os.path.exists(tgtdir):
os.makedirs(tgtdir)
with open(tgt, 'wb') as fp:
fp.write(zf.read(path))
zf.close()
elif args[0] == '-c':
if len(args) < 3:
print USAGE
sys.exit(1)
def addToZip(zf, path, zippath):
if os.path.isfile(path):
zf.write(path, zippath, ZIP_DEFLATED)
elif os.path.isdir(path):
for nm in os.listdir(path):
addToZip(zf,
os.path.join(path, nm), os.path.join(zippath, nm))
# else: ignore
zf = ZipFile(args[1], 'w', allowZip64=True)
for src in args[2:]:
addToZip(zf, src, os.path.basename(src))
zf.close()
if __name__ == "__main__":
main()
| Python |
import os.path
import re
import shutil
import struct
import subprocess
import sys
import zipfile2 as zipfile
import bz2
def import_boto():
global Key, S3Connection, awscreds
try:
from boto.s3.key import Key
from boto.s3.connection import S3Connection
except:
print("You need boto library (http://code.google.com/p/boto/)")
print("svn checkout http://boto.googlecode.com/svn/trunk/ boto")
print("cd boto; python setup.py install")
raise
try:
import awscreds
except:
print "awscreds.py file needed with access and secret globals for aws access"
sys.exit(1)
def log(s):
print(s)
sys.stdout.flush()
def group(list, size):
i = 0
while list[i:]:
yield list[i:i + size]
i += size
def uniquify(array):
return list(set(array))
def test_for_flag(args, arg, has_data=False):
if arg not in args:
return None if has_data else False
if not has_data:
args.remove(arg)
return True
ix = args.index(arg)
if ix == len(args) - 1:
return None
data = args[ix + 1]
args.pop(ix + 1)
args.pop(ix)
return data
S3_BUCKET = "kjkpub"
g_s3conn = None
def s3connection():
global g_s3conn
if g_s3conn is None:
import_boto()
g_s3conn = S3Connection(awscreds.access, awscreds.secret, True)
return g_s3conn
def s3PubBucket():
return s3connection().get_bucket(S3_BUCKET)
def ul_cb(sofar, total):
print("So far: %d, total: %d" % (sofar , total))
def s3UploadFilePublic(local_file_name, remote_file_name):
log("s3 upload '%s' as '%s'" % (local_file_name, remote_file_name))
bucket = s3PubBucket()
k = Key(bucket)
k.key = remote_file_name
k.set_contents_from_filename(local_file_name, cb=ul_cb)
k.make_public()
def s3UploadDataPublic(data, remote_file_name):
log("s3 upload data as '%s'" % remote_file_name)
bucket = s3PubBucket()
k = Key(bucket)
k.key = remote_file_name
k.set_contents_from_string(data)
k.make_public()
def ensure_s3_doesnt_exist(remote_file_path):
bucket = s3PubBucket()
if not bucket.get_key(remote_file_path):
return
print("'%s' already exists on s3" % remote_file_path)
sys.exit(1)
def ensure_path_exists(path):
if not os.path.exists(path):
print("path '%s' doesn't exist" % path)
sys.exit(1)
def verify_started_in_right_directory():
p1 = os.path.join("scripts", "build-release.py")
p2 = os.path.join(os.getcwd(), "scripts", "build-release.py")
if not (os.path.exists(p1) and os.path.exists(p2)):
print("This script must be run from top of the source tree")
sys.exit(1)
# like cmdrun() but throws an exception on failure
def run_cmd_throw(*args):
cmd = " ".join(args)
print("\nrun_cmd_throw: '%s'" % cmd)
cmdproc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.PIPE)
res = cmdproc.communicate()
errcode = cmdproc.returncode
if 0 != errcode:
print("Failed with error code %d" % errcode)
print("Stdout:")
print(res[0])
print("Stderr:")
print(res[1])
raise Exception("'%s' failed with error code %d" % (cmd, errcode))
return (res[0], res[1])
# Parse output of svn info and return revision number indicated by
# "Last Changed Rev" field or, if that doesn't exist, by "Revision" field
def parse_svninfo_out(txt):
ver = re.findall(r'(?m)^Last Changed Rev: (\d+)', txt)
if ver: return ver[0]
ver = re.findall(r'(?m)^Revision: (\d+)', txt)
if ver: return ver[0]
raise Exception("parse_svn_info_out() failed to parse '%s'" % txt)
# version line is in the format:
# #define CURR_VERSION 1.1
def extract_sumatra_version(file_path):
content = open(file_path).read()
ver = re.findall(r'CURR_VERSION (\d+(?:\.\d+)*)', content)[0]
return ver
def zip_file_add(dst_zip_file, src, src_name=None, compress=True, append=False):
mode = "w"
if append: mode = "a"
if compress:
zf = zipfile.ZipFile(dst_zip_file, mode, zipfile.ZIP_DEFLATED)
else:
zf = zipfile.ZipFile(dst_zip_file, mode, zipfile.ZIP_STORED)
if src_name is None:
src_name = os.path.basename(src)
zf.write(src, src_name)
zf.close()
# build the .zip with with installer data, will be included as part of
# Installer.exe resources
def build_installer_data(dir):
zf = zipfile.ZipFile(os.path.join(dir, "InstallerData.zip"), "w", zipfile.ZIP_BZIP2)
exe = os.path.join(dir, "SumatraPDF-no-MuPDF.exe")
zf.write(exe, "SumatraPDF.exe")
for f in ["libmupdf.dll", "npPdfViewer.dll", "PdfFilter.dll", "PdfPreview.dll", "uninstall.exe"]:
zf.write(os.path.join(dir, f), f)
font_path = os.path.join("mupdf", "fonts", "droid", "DroidSansFallback.ttf")
zf.write(font_path, "DroidSansFallback.ttf")
zf.close()
| Python |
#!/usr/bin/python
"""
Build LevelDB dlls, build a zip and upload to s3.
Command line arguments:
-test : run all tests
-upload : upload the zip
"""
import os
import os.path
import shutil
import sys
import time
import re
import json
from util import log, run_cmd_throw, test_for_flag, s3UploadFilePublic, import_boto
from util import s3UploadDataPublic, ensure_s3_doesnt_exist, ensure_path_exists
from util import zip_file_add
# This is version that LevelDB reports. It's in
# http://code.google.com/p/leveldb/source/browse/include/leveldb/db.h
# under kMajorVersion, kMinorVersion
gVersion = "1.2"
# Incrementally increasing revision that identifies my revisions.
# They might happen because of merging new code from LevelDB (they don't always
# update kMinorVersion after changing code) or making changes to my port.
gRevision = 1
gVer = "%s rev %d" % (gVersion, gRevision)
# The format of release notes is:
# - a list for each version
# - first element of the list is version
# - second element is a date on which the release was made
# - rest are html fragments that will be displayed as <li> items on a html page
gReleaseNotes = [
["1.2 rev 1", "2011-??-??",
"first release",
"based on <a href='http://code.google.com/p/leveldb/source/detail?r=299ccedfeca1fb3497978c288e76008a5c08e899'>http://code.google.com/p/leveldb/source/detail?r=299ccedfeca1fb3497978c288e76008a5c08e899</a>",
"<a href='http://kjkpub.s3.amazonaws.com/software/leveldb/rel/LevelDB-1.2-rev-1.zip'>LevelDB-1.2-rev-1.zip</a>"]
]
args = sys.argv[1:]
upload = test_for_flag(args, "-upload") or test_for_flag(args, "upload")
# we force test if we're uploading
test = test_for_flag(args, "-test") or test_for_flag(args, "test") or upload
def usage():
print("build.py [-test][-upload]")
sys.exit(1)
s3_dir = "software/leveldb/rel"
def s3_zip_name():
return "%s/LevelDB-%s-rev-%d.zip" % (s3_dir, gVersion, gRevision)
def zip_name():
return "LevelDB-%s-rev-%d.zip" % (gVersion, gRevision)
dll_file = "libleveldb.dll"
dbbench_exe = "db_bench.exe"
test_exes = ["filename_test.exe", "db_test.exe", "corruption_test.exe", "arena_test.exe", "coding_test.exe", "env_test.exe", "memenv_test.exe", "version_edit_test.exe", "c_test.exe", "skiplist_test.exe", "version_set_test.exe", "cache_test.exe", "crc32c_test.exe", "dbformat_test.exe", "log_test.exe", "write_batch_test.exe", "table_test.exe"]
build_files = test_exes + [dll_file] + [dbbench_exe]
def verify_build_ok(build_dir):
for f in build_files:
p = os.path.join(build_dir, f)
ensure_path_exists(p)
pdb = os.path.splitext(p)[0] + ".pdb"
ensure_path_exists(pdb)
def run_tests(build_dir):
total = len(test_exes)
curr = 1
for f in test_exes:
p = os.path.join(build_dir, f)
print("Running test %d/%d %s" % (curr, total, p))
out, err = run_cmd_throw(p)
print(out + err)
curr += 1
p = os.path.join(build_dir, dbbench_exe)
print("Running %s" % p)
run_cmd_throw(p)
def build_and_test(build_dir, target):
#shutil.rmtree(build_dir, ignore_errors=True)
run_cmd_throw("cmd.exe", "/c", "build.bat", target)
verify_build_ok(build_dir)
if test: run_tests(build_dir)
def build_zip():
zip_file_add(zip_name(), "zip-readme.txt", "readme.txt", compress=True, append=True)
include_path = os.path.join("..", "include", "leveldb")
include_files = os.listdir(include_path)
for f in include_files:
p = os.path.join(include_path, f)
zippath = "include/leveldb/" + f
zip_file_add(zip_name(), p, zippath, compress=True, append=True)
dll_files = ["libleveldb.dll", "libleveldb.lib", "libleveldb.pdb"]
dll_dir = "rel"
zip_dir = "32bit"
for f in dll_files:
p = os.path.join(dll_dir, f)
zippath = zip_dir + "/" + f
zip_file_add(zip_name(), p, zippath, compress=True, append=True)
dll_dir = "rel64bit"
zip_dir = "64bit"
for f in dll_files:
p = os.path.join(dll_dir, f)
zippath = zip_dir + "/" + f
zip_file_add(zip_name(), p, zippath, compress=True, append=True)
def build_s3_js():
s = 'var latestVer = "%s";\n' % gVer
s += 'var builtOn = "%s";\n' % time.strftime("%Y-%m-%d")
s += 'var zipUrl = "http://kjkpub.s3.amazonaws.com/%s";\n' % s3_zip_name()
s += 'var relNotes = %s;\n' % json.dumps(gReleaseNotes)
return s
def upload_to_s3():
s3UploadFilePublic(zip_name(), s3_zip_name())
jstxt = build_s3_js()
s3UploadDataPublic(jstxt, "sumatrapdf/sumatralatest.js")
def main():
if len(args) != 0:
usage()
if upload:
import_boto()
ensure_s3_doesnt_exist(s3_zip_name())
mydir = os.path.dirname(os.path.realpath(__file__))
print(mydir)
os.chdir(mydir)
build_and_test("rel", "Just32rel")
build_and_test("rel64bit", "Just64rel")
build_zip()
if upload: upload_to_s3()
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/env python
"""
tesshelper.py -- Utility operations to compare, report stats, and copy
public headers for tesseract 3.0x VS2008 Project
$RCSfile: tesshelper.py,v $ $Revision: 7ca575b377aa $ $Date: 2012/03/07 17:26:31 $
"""
r"""
Requires:
python 2.7 or greater: activestate.com
http://www.activestate.com/activepython/downloads
because using the new argparse module and new literal set syntax (s={1, 2}) .
General Notes:
--------------
Format for a .vcproj file entry:
<File
RelativePath="..\src\allheaders.h"
>
</File>
"""
epilogStr = r"""
Examples:
Assume that tesshelper.py is in c:\buildfolder\tesseract-3.02\vs2008,
which is also the current directory. Then,
python tesshelper .. compare
will compare c:\buildfolder\tesseract-3.02 "library" directories to the
libtesseract Project
(c:\buildfolder\tesseract-3.02\vs2008\libtesseract\libtesseract.vcproj).
python tesshelper .. report
will display summary stats for c:\buildfolder\tesseract-3.02 "library"
directories and the libtesseract Project.
python tesshelper .. copy ..\..\include
will copy all "public" libtesseract header files to
c:\buildfolder\include.
python tesshelper .. clean
will clean the vs2008 folder of all build directories, and .user, .suo,
.ncb, and other temp files.
"""
# imports of python standard library modules
# See Python Documentation | Library Reference for details
import collections
import glob
import argparse
import os
import re
import shutil
import sys
# ====================================================================
VERSION = "1.0 %s" % "$Date: 2012/03/07 17:26:31 $".split()[1]
PROJ_SUBDIR = r"vs2008\libtesseract"
PROJFILE = "libtesseract.vcproj"
NEWHEADERS_FILENAME = "newheaders.txt"
NEWSOURCES_FILENAME = "newsources.txt"
fileNodeTemplate = \
''' <File
RelativePath="..\..\%s"
>
</File>
'''
# ====================================================================
def getProjectfiles(libTessDir, libProjectFile, nTrimChars):
"""Return sets of all, c, h, and resources files in libtesseract Project"""
#extract filenames of header & source files from the .vcproj
projectCFiles = set()
projectHFiles = set()
projectRFiles = set()
projectFilesSet = set()
f = open(libProjectFile, "r")
data = f.read()
f.close()
projectFiles = re.findall(r'(?i)RelativePath="(\.[^"]+)"', data)
for projectFile in projectFiles:
root, ext = os.path.splitext(projectFile.lower())
if ext == ".c" or ext == ".cpp":
projectCFiles.add(projectFile)
elif ext == ".h":
projectHFiles.add(projectFile)
elif ext == ".rc":
projectRFiles.add(projectFile)
else:
print "unknown file type: %s" % projectFile
relativePath = os.path.join(libTessDir, projectFile)
relativePath = os.path.abspath(relativePath)
relativePath = relativePath[nTrimChars:].lower()
projectFilesSet.add(relativePath)
return projectFilesSet, projectHFiles, projectCFiles, projectRFiles
def getTessLibFiles(tessDir, nTrimChars):
"""Return set of all libtesseract files in tessDir"""
libDirs = [
"api",
"ccmain",
"ccstruct",
"ccutil",
"classify",
"cube",
"cutil",
"dict",
r"neural_networks\runtime",
"opencl",
"textord",
"viewer",
"wordrec",
#"training",
r"vs2008\port",
r"vs2008\libtesseract",
]
#create list of all .h, .c, .cpp files in "library" directories
tessFiles = set()
for curDir in libDirs:
baseDir = os.path.join(tessDir, curDir)
for filetype in ["*.c", "*.cpp", "*.h", "*.rc"]:
pattern = os.path.join(baseDir, filetype)
fileList = glob.glob(pattern)
for curFile in fileList:
curFile = os.path.abspath(curFile)
relativePath = curFile[nTrimChars:].lower()
tessFiles.add(relativePath)
return tessFiles
# ====================================================================
def tessCompare(tessDir):
'''Compare libtesseract Project files and actual "sub-library" files.'''
vs2008Dir = os.path.join(tessDir, "vs2008")
libTessDir = os.path.join(vs2008Dir, "libtesseract")
libProjectFile = os.path.join(libTessDir,"libtesseract.vcproj")
tessAbsDir = os.path.abspath(tessDir)
nTrimChars = len(tessAbsDir)+1
print 'Comparing VS2008 Project "%s" with\n "%s"' % (libProjectFile,
tessAbsDir)
projectFilesSet, projectHFiles, projectCFiles, projectRFiles = \
getProjectfiles(libTessDir, libProjectFile, nTrimChars)
tessFiles = getTessLibFiles(tessDir, nTrimChars)
extraFiles = tessFiles - projectFilesSet
print "%2d Extra files (in %s but not in Project)" % (len(extraFiles),
tessAbsDir)
headerFiles = []
sourceFiles = []
sortedList = list(extraFiles)
sortedList.sort()
for filename in sortedList:
root, ext = os.path.splitext(filename.lower())
if ext == ".h":
headerFiles.append(filename)
else:
sourceFiles.append(filename)
print " %s " % filename
print
print "%2d new header file items written to %s" % (len(headerFiles),
NEWHEADERS_FILENAME)
headerFiles.sort()
with open(NEWHEADERS_FILENAME, "w") as f:
for filename in headerFiles:
f.write(fileNodeTemplate % filename)
print "%2d new source file items written to %s" % (len(sourceFiles),
NEWSOURCES_FILENAME)
sourceFiles.sort()
with open(NEWSOURCES_FILENAME, "w") as f:
for filename in sourceFiles:
f.write(fileNodeTemplate % filename)
print
deadFiles = projectFilesSet - tessFiles
print "%2d Dead files (in Project but not in %s" % (len(deadFiles),
tessAbsDir)
sortedList = list(deadFiles)
sortedList.sort()
for filename in sortedList:
print " %s " % filename
# ====================================================================
def tessReport(tessDir):
"""Report summary stats on "sub-library" files and libtesseract Project file."""
vs2008Dir = os.path.join(tessDir, "vs2008")
libTessDir = os.path.join(vs2008Dir, "libtesseract")
libProjectFile = os.path.join(libTessDir,"libtesseract.vcproj")
tessAbsDir = os.path.abspath(tessDir)
nTrimChars = len(tessAbsDir)+1
projectFilesSet, projectHFiles, projectCFiles, projectRFiles = \
getProjectfiles(libTessDir, libProjectFile, nTrimChars)
tessFiles = getTessLibFiles(tessDir, nTrimChars)
print 'Summary stats for "%s" library directories' % tessAbsDir
folderCounters = {}
for tessFile in tessFiles:
tessFile = tessFile.lower()
folder, head = os.path.split(tessFile)
file, ext = os.path.splitext(head)
typeCounter = folderCounters.setdefault(folder, collections.Counter())
typeCounter[ext[1:]] += 1
folders = folderCounters.keys()
folders.sort()
totalFiles = 0
totalH = 0
totalCPP = 0
totalOther = 0
print
print " total h cpp"
print " ----- --- ---"
for folder in folders:
counters = folderCounters[folder]
nHFiles = counters['h']
nCPPFiles = counters['cpp']
total = nHFiles + nCPPFiles
totalFiles += total
totalH += nHFiles
totalCPP += nCPPFiles
print " %5d %3d %3d %s" % (total, nHFiles, nCPPFiles, folder)
print " ----- --- ---"
print " %5d %3d %3d" % (totalFiles, totalH, totalCPP)
print
print 'Summary stats for VS2008 Project "%s"' % libProjectFile
print " %5d %s" %(len(projectHFiles), "Header files")
print " %5d %s" % (len(projectCFiles), "Source files")
print " %5d %s" % (len(projectRFiles), "Resource files")
print " -----"
print " %5d" % (len(projectHFiles) + len(projectCFiles) + len(projectRFiles), )
# ====================================================================
def copyIncludes(fileSet, description, tessDir, includeDir):
"""Copy set of files to specified include dir."""
print
print 'Copying libtesseract "%s" headers to %s' % (description, includeDir)
print
sortedList = list(fileSet)
sortedList.sort()
count = 0
errList = []
for includeFile in sortedList:
filepath = os.path.join(tessDir, includeFile)
if os.path.isfile(filepath):
shutil.copy2(filepath, includeDir)
print "Copied: %s" % includeFile
count += 1
else:
print '***Error: "%s" doesn\'t exist"' % filepath
errList.append(filepath)
print '%d header files successfully copied to "%s"' % (count, includeDir)
if len(errList):
print "The following %d files were not copied:"
for filepath in errList:
print " %s" % filepath
def tessCopy(tessDir, includeDir):
'''Copy all "public" libtesseract Project header files to include directory.
Preserves directory hierarchy.'''
baseIncludeSet = {
r"api\baseapi.h",
r"api\capi.h",
r"api\apitypes.h",
r"ccstruct\publictypes.h",
r"ccmain\thresholder.h",
r"ccutil\host.h",
r"ccutil\basedir.h",
r"ccutil\tesscallback.h",
r"ccutil\unichar.h",
r"ccutil\platform.h",
}
strngIncludeSet = {
r"ccutil\strngs.h",
r"ccutil\memry.h",
r"ccutil\host.h",
r"ccutil\serialis.h",
r"ccutil\errcode.h",
r"ccutil\fileerr.h",
#r"ccutil\genericvector.h",
}
resultIteratorIncludeSet = {
r"ccmain\ltrresultiterator.h",
r"ccmain\pageiterator.h",
r"ccmain\resultiterator.h",
r"ccutil\genericvector.h",
r"ccutil\tesscallback.h",
r"ccutil\errcode.h",
r"ccutil\host.h",
r"ccutil\helpers.h",
r"ccutil\ndminx.h",
r"ccutil\params.h",
r"ccutil\unicharmap.h",
r"ccutil\unicharset.h",
}
genericVectorIncludeSet = {
r"ccutil\genericvector.h",
r"ccutil\tesscallback.h",
r"ccutil\errcode.h",
r"ccutil\host.h",
r"ccutil\helpers.h",
r"ccutil\ndminx.h",
}
blobsIncludeSet = {
r"ccstruct\blobs.h",
r"ccstruct\rect.h",
r"ccstruct\points.h",
r"ccstruct\ipoints.h",
r"ccutil\elst.h",
r"ccutil\host.h",
r"ccutil\serialis.h",
r"ccutil\lsterr.h",
r"ccutil\ndminx.h",
r"ccutil\tprintf.h",
r"ccutil\params.h",
r"viewer\scrollview.h",
r"ccstruct\vecfuncs.h",
}
extraFilesSet = {
#r"vs2008\include\stdint.h",
r"vs2008\include\leptonica_versionnumbers.vsprops",
r"vs2008\include\tesseract_versionnumbers.vsprops",
}
tessIncludeDir = os.path.join(includeDir, "tesseract")
if os.path.isfile(tessIncludeDir):
print 'Aborting: "%s" is a file not a directory.' % tessIncludeDir
return
if not os.path.exists(tessIncludeDir):
os.mkdir(tessIncludeDir)
#fileSet = baseIncludeSet | strngIncludeSet | genericVectorIncludeSet | blobsIncludeSet
fileSet = baseIncludeSet | strngIncludeSet | resultIteratorIncludeSet
copyIncludes(fileSet, "public", tessDir, tessIncludeDir)
copyIncludes(extraFilesSet, "extra", tessDir, includeDir)
# ====================================================================
def tessClean(tessDir):
'''Clean vs2008 folder of all build directories and certain temp files.'''
vs2008Dir = os.path.join(tessDir, "vs2008")
vs2008AbsDir = os.path.abspath(vs2008Dir)
answer = raw_input(
'Are you sure you want to clean the\n "%s" folder (Yes/No) [No]? ' %
vs2008AbsDir)
if answer.lower() not in ("yes",):
return
answer = raw_input('Only list the items to be deleted (Yes/No) [Yes]? ')
answer = answer.strip()
listOnly = answer.lower() not in ("no",)
for rootDir, dirs, files in os.walk(vs2008AbsDir):
for buildDir in ("LIB_Release", "LIB_Debug", "DLL_Release", "DLL_Debug"):
if buildDir in dirs:
dirs.remove(buildDir)
absBuildDir = os.path.join(rootDir, buildDir)
if listOnly:
print "Would remove: %s" % absBuildDir
else:
print "Removing: %s" % absBuildDir
shutil.rmtree(absBuildDir)
if rootDir == vs2008AbsDir:
for file in files:
if file.lower() not in ("tesseract.sln",
"tesshelper.py",
"readme.txt"):
absPath = os.path.join(rootDir, file)
if listOnly:
print "Would remove: %s" % absPath
else:
print "Removing: %s" % absPath
os.remove(absPath)
else:
for file in files:
root, ext = os.path.splitext(file)
if ext.lower() in (".suo",
".ncb",
".user",
) or (
len(ext)>0 and ext[-1] == "~"):
absPath = os.path.join(rootDir, file)
if listOnly:
print "Would remove: %s" % absPath
else:
print "Removing: %s" % absPath
os.remove(absPath)
# ====================================================================
def validateTessDir(tessDir):
"""Check that tessDir is a valid tesseract directory."""
if not os.path.isdir(tessDir):
raise argparse.ArgumentTypeError('Directory "%s" doesn\'t exist.' % tessDir)
projFile = os.path.join(tessDir, PROJ_SUBDIR, PROJFILE)
if not os.path.isfile(projFile):
raise argparse.ArgumentTypeError('Project file "%s" doesn\'t exist.' % projFile)
return tessDir
def validateDir(dir):
"""Check that dir is a valid directory named include."""
if not os.path.isdir(dir):
raise argparse.ArgumentTypeError('Directory "%s" doesn\'t exist.' % dir)
dirpath = os.path.abspath(dir)
head, tail = os.path.split(dirpath)
if tail.lower() != "include":
raise argparse.ArgumentTypeError('Include directory "%s" must be named "include".' % tail)
return dir
def main ():
parser = argparse.ArgumentParser(
epilog=epilogStr,
formatter_class=argparse.RawDescriptionHelpFormatter)
parser.add_argument("--version", action="version",
version="%(prog)s " + VERSION)
parser.add_argument('tessDir', type=validateTessDir,
help="tesseract installation directory")
subparsers = parser.add_subparsers(
dest="subparser_name",
title="Commands")
parser_changes = subparsers.add_parser('compare',
help="compare libtesseract Project with tessDir")
parser_changes.set_defaults(func=tessCompare)
parser_report = subparsers.add_parser('report',
help="report libtesseract summary stats")
parser_report.set_defaults(func=tessReport)
parser_copy = subparsers.add_parser('copy',
help="copy public libtesseract header files to includeDir")
parser_copy.add_argument('includeDir', type=validateDir,
help="Directory to copy header files to.")
parser_copy.set_defaults(func=tessCopy)
parser_clean = subparsers.add_parser('clean',
help="clean vs2008 folder of build folders and .user files")
parser_clean.set_defaults(func=tessClean)
#kludge because argparse has no ability to set default subparser
if (len(sys.argv) == 2):
sys.argv.append("compare")
args = parser.parse_args()
#handle commands
if args.func == tessCopy:
args.func(args.tessDir, args.includeDir)
else:
args.func(args.tessDir)
if __name__ == '__main__' :
main()
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2012 Zdenko Podobný
# Author: Zdenko Podobný
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Simple python demo script of tesseract-ocr 3.02 c-api
"""
import os
import sys
import ctypes
# Demo variables
lang = "eng"
filename = "../phototest.tif"
libpath = "/usr/local/lib64/"
libpath_w = "../vs2008/DLL_Release/"
TESSDATA_PREFIX = os.environ.get('TESSDATA_PREFIX')
if not TESSDATA_PREFIX:
TESSDATA_PREFIX = "../"
if sys.platform == "win32":
libname = libpath_w + "libtesseract302.dll"
libname_alt = "libtesseract302.dll"
os.environ["PATH"] += os.pathsep + libpath_w
else:
libname = libpath + "libtesseract.so.3.0.2"
libname_alt = "libtesseract.so.3"
try:
tesseract = ctypes.cdll.LoadLibrary(libname)
except:
try:
tesseract = ctypes.cdll.LoadLibrary(libname_alt)
except WindowsError, err:
print("Trying to load '%s'..." % libname)
print("Trying to load '%s'..." % libname_alt)
print(err)
exit(1)
tesseract.TessVersion.restype = ctypes.c_char_p
tesseract_version = tesseract.TessVersion()[:4]
# We need to check library version because libtesseract.so.3 is symlink
# and can point to other version than 3.02
if float(tesseract_version) < 3.02:
print("Found tesseract-ocr library version %s." % tesseract_version)
print("C-API is present only in version 3.02!")
exit(2)
api = tesseract.TessBaseAPICreate()
rc = tesseract.TessBaseAPIInit3(api, TESSDATA_PREFIX, lang);
if (rc):
tesseract.TessBaseAPIDelete(api)
print("Could not initialize tesseract.\n")
exit(3)
text_out = tesseract.TessBaseAPIProcessPages(api, filename, None , 0);
result_text = ctypes.string_at(text_out)
print result_text
| Python |
from os.path import dirname, join
import subprocess
basedir = dirname(__file__)
cmd = ['pybot', '--outputdir', join(basedir, 'results'), join(basedir, 'vacalc')]
pythonpath = '%s:%s' % (join(basedir, 'lib'), join(basedir, '..', 'src'))
subprocess.call(' '.join(cmd), shell=True, env={'PYTHONPATH': pythonpath})
| Python |
import os
import sys
import subprocess
import datetime
import tempfile
import vacalc
class VacalcLibrary(object):
def __init__(self):
self._db_file = os.path.join(tempfile.gettempdir(),
'vacalc-atestdb.csv')
def count_vacation(self, startdate, year):
resource = vacalc.Employee('Test Resource', startdate)
return vacalc.Vacation(resource.startdate, int(year)).days
def clear_database(self):
if os.path.isfile(self._db_file):
print 'Removing %s' % self._db_file
os.remove(self._db_file)
def add_employee(self, name, startdate):
self._run('add_employee', name, startdate)
def get_employee(self, name):
self._run('get_employee', name)
def show_vacation(self, name, year):
self._run('show_vacation', name, year)
def _run(self, command, *args):
cmd = [sys.executable, vacalc.__file__, command] + list(args)
print subprocess.list2cmdline(cmd)
proc = subprocess.Popen(cmd, stdout=subprocess.PIPE,
stderr=subprocess.STDOUT,
env={'VACALC_DB': self._db_file})
self._status = proc.stdout.read().strip()
print self._status
def status_should_be(self, status):
if self._status != status:
raise AssertionError("Expected status to be '%s' but it was '%s'"
% (status, self._status))
| Python |
from __future__ import with_statement
import os
import sys
import csv
import datetime
import tempfile
class VacalcError(Exception): pass
class EmployeeStore(object):
def __init__(self, db_file):
self._db_file = db_file
if self._db_file and os.path.isfile(self._db_file):
self._employees = self._read_employees(self._db_file)
else:
self._employees = {}
def _read_employees(self, path):
employees = {}
with open(path) as db:
for row in csv.reader(db):
employee = Employee(row[0], row[1])
employees[employee.name] = employee
return employees
def get_employee(self, name):
try:
return self._employees[name]
except KeyError:
raise VacalcError("Employee '%s' not found" % name)
def get_all_employees(self):
return self._employees.values()
def add_employee(self, employee):
if employee.name in self._employees:
raise VacalcError("Employee '%s' already exists in the system" %
employee.name)
self._employees[employee.name] = employee
self._serialize(employee)
def _serialize(self, employee):
if not self._db_file:
return
with open(self._db_file, 'a') as db:
writer = csv.writer(db, lineterminator='\n')
writer.writerow([employee.name, employee.startdate])
class Employee(object):
def __init__(self, name, startdate):
self.name = name
self.startdate = self._parse_date(startdate)
def _parse_date(self, datestring):
year, month, day = datestring.split('-')
return datetime.date(int(year), int(month), int(day))
class Vacation(object):
max_vacation = 12 * 2.5
no_vacation = 0
vacation_per_month = 2
credit_start_month = 4
work_days_required= 14
def __init__(self, empstartdate, vacation_year):
self.days = self._calculate_vacation(empstartdate, vacation_year)
def _calculate_vacation(self, start, year):
if self._has_worked_longer_than_year(start, year):
return self.max_vacation
if self._started_after_holiday_credit_year_ended(start, year):
return self.no_vacation
return self._count_working_months(start) * self.vacation_per_month
def _has_worked_longer_than_year(self, start, year):
return year-start.year > 1 or \
(year-start.year == 1 and start.month < self.credit_start_month)
def _started_after_holiday_credit_year_ended(self, start, year):
return start.year-year > 0 or \
(year == start.year and start.month >= self.credit_start_month)
def _count_working_months(self, start):
months = self.credit_start_month - start.month
if months <= 0:
months += 12
if self._first_month_has_too_few_working_days(start):
months -= 1
return months
def _first_month_has_too_few_working_days(self, start):
days = 0
date = start
while date:
if self._is_working_day(date):
days += 1
date = self._next_date(date)
return days < self.work_days_required
def _is_working_day(self, date):
return date.weekday() < 5
def _next_date(self, date):
try:
return date.replace(day=date.day+1)
except ValueError:
return None
class VacationCalculator(object):
def __init__(self, employeestore):
self._employeestore = employeestore
def show_vacation(self, name, year):
employee = self._employeestore.get_employee(name)
vacation = Vacation(employee.startdate, int(year))
return "%s has %d vacation days in year %s" \
% (name, vacation.days, year)
def add_employee(self, name, startdate):
employee = Employee(name, startdate)
self._employeestore.add_employee(employee)
return "Successfully added employee '%s'." % employee.name
def get_employee(self, name):
employee = self._employeestore.get_employee(name)
return '%s: start date %s' % (employee.name, employee.startdate)
def main(args):
db_file = os.environ.get('VACALC_DB', os.path.join(tempfile.gettempdir(),
'vacalcdb.csv'))
try:
cmd = getattr(VacationCalculator(EmployeeStore(db_file)), args[0])
return cmd(*args[1:])
except (AttributeError, TypeError):
raise VacalcError('invalid command or arguments')
if __name__ == '__main__':
try:
print main(sys.argv[1:])
sys.exit(0)
except VacalcError, err:
print err
sys.exit(1)
| Python |
VALUE_FROM_VAR_FILE='Expected Value'
| Python |
def this_keyword_is_in_funnylib():
print 'jee'
| Python |
from Queue import Queue
from threading import Event
try:
from multiprocessing.managers import BaseManager
except ImportError:
class Python26Required(object):
def __call__(self, *args):
raise RuntimeError('Requires Python > 2.6')
def __getattr__(self, name):
raise RuntimeError('Requires Python > 2.6')
BaseManager = Python26Required()
class _create_caching_getter(object):
def __init__(self, clazz):
self._clazz = clazz
self._objects = {}
def __call__(self, key):
if key not in self._objects:
self._objects[key] = self._clazz()
return self._objects[key]
class Communicate(object):
"""Library for communication between processes.
For example this can be used to handle communication between processes of the Parallel robot library.
Requires Python 2.6
Example:
Process 1 test file:
| *Settings* |
| Library | Communicate |
| *Test Cases* |
| Communicator |
| | [Setup] | Start Communication Service |
| | Send Message To | my message queue | hello world! |
| | ${message}= | Receive Message From | other message queue |
| | Should Be Equal | ${message} | hello! |
| | [Teardown] | Stop Communication Service |
Process 2 test file:
| *Settings* |
| Library | Communicate | ${process 1 ip address if on a different machine} |
| *Test Cases* |
| Helloer |
| | ${message}= | Receive Message From | my message queue |
| | Should Be Equal | ${message} | hello world! |
| | Send Message To | other message queue | hello! |
"""
def __init__(self, address='127.0.0.1', port=2187):
"""
`address` of the communication server.
`port` of the communication server.
"""
self._address = address
self._port = int(port)
self._authkey = 'live long and prosper'
self._queue = None
self._connected = False
def _connect(self):
self._create_manager().connect()
self._connected = True
def start_communication_service(self):
"""Starts a communication server that will be used to share messages and objects between processes.
"""
self._create_manager(_create_caching_getter(Queue),
_create_caching_getter(Event)).start()
self._connected = True
def stop_communication_service(self):
"""Stops a started communication server.
This ensures that the server and the messages that it has don't influence the next tests.
To ensure that this keyword really happens place this in the teardown section.
"""
self._manager.shutdown()
self._connected = False
def _create_manager(self, queue_getter=None, event_getter=None):
BaseManager.register('get_queue', queue_getter)
BaseManager.register('get_event', event_getter)
self._manager = BaseManager((self._address, self._port), self._authkey)
return self._manager
def send_message_to(self, queue_id, value):
"""Send a message to a message queue.
`queue_id` is the identifier for the queue.
`value` is the message. This can be a string, a number or any serializable object.
Example:
In one process
| Send Message To | my queue | hello world! |
...
In another process
| ${message}= | Receive Message From | my queue |
| Should Be Equal | ${message} | hello world! |
"""
self._get_queue(queue_id).put(value)
def receive_message_from(self, queue_id, timeout=None):
"""Receive and consume a message from a message queue.
By default this keyword will block until there is a message in the queue.
`queue_id` is the identifier for the queue.
`timeout` is the time out in seconds to wait.
Returns the value from the message queue. Fails if timeout expires.
Example:
In one process
| Send Message To | my queue | hello world! |
...
In another process
| ${message}= | Receive Message From | my queue |
| Should Be Equal | ${message} | hello world! |
"""
timeout = float(timeout) if timeout is not None else None
return self._get_queue(queue_id).get(timeout=timeout)
def _get_queue(self, queue_id):
if not self._connected:
self._connect()
return self._manager.get_queue(queue_id)
def wait_for_event(self, event_id, timeout=None):
"""Waits until event with `event_id` is signaled.
Fails if optional timeout expires.
`timeout` is the time out in seconds to wait.
Example:
In one process
| Wait For Event | my event |
...
In another process
| Signal Event | my event |
"""
timeout = float(timeout) if timeout is not None else None
self._get_event(event_id).wait(timeout=timeout)
#NOTE! If Event#clear is ever exposed it has to be secured (for example r/w lock) that none
#of the processes can do it while another is at this position.
if not self._get_event(event_id).isSet():
raise Exception('Timeout')
def signal_event(self, event_id):
"""Signals an event.
If a process is waiting for this event it will stop waiting after the signal.
`event` is the identifier for the event.
Example:
In one process
| Wait For Event | my event |
...
In another process
| Signal Event | my event |
"""
return self._get_event(event_id).set()
def _get_event(self, event_id):
if not self._connected:
self._connect()
return self._manager.get_event(event_id)
| Python |
# Copyright 2008-2011 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import with_statement
import subprocess
import time
from random import randint
import os
import re
import sys
from robot.libraries import BuiltIn
from robot.utils import html_escape, ArgumentParser
from robot.version import get_version
class Parallel(object):
"""
Library for executing tests in parallel from inside of a robot test case.
Tests are executed in subprocesses.
You can add arguments to all parallel test runs from `library importing`,
for a set of parallel tests with `Add Arguments For Parallel Tests` and
for an individual parallel test by passing the arguments in `Start Parallel Test`.
The following command line arguments (also from argument files) are automatically
passed to parallel tests:
--loglevel, --runmode, --pythonpath, --variable, --variablefile
Example:
| *Settings* |
| Library | Parallel | pybot |
| *Test Cases* |
| Runner |
| | Run Parallel Tests | Hello | World |
| Hello |
| | [Tags] | parallel |
| | Log | Hello ${WORLD} |
| World |
| | [Tags] | parallel |
| | Log | ${HELLO} World |
`pybot --exclude parallel --variable HELLO:Hello --variable WORLD:World .`
"""
def __init__(self, runner_script, *arguments):
"""
`runner_script` is pybot or jybot or a custom script.
`arguments` are default arguments given to every test execution.
Example:
| Library | Parallel | pybot | --variable | variable:value | --loglevel | DEBUG |
"""
self._script = runner_script
self._arguments = self._get_arguments(arguments)
self._processes = []
self._data_source = None
def _get_arguments(self, additional_arguments):
options,_ = ArgumentParser(_get_cmd_arguments()).parse_args(sys.argv[1:], argfile='argumentfile', unescape='escape')
args = []
for arg in ['loglevel', 'runmode', 'pythonpath', 'variable', 'variablefile']:
args += self._get_type_arguments(options, arg)
args += list(additional_arguments)
return args
def _get_type_arguments(self, options, key):
value = options[key]
args = []
if value is not None:
if not isinstance(value, list):
value = [value]
for var in value:
args += ['--%s' % key, var]
return args
def add_arguments_for_parallel_tests(self, *arguments):
"""Adds `arguments` to be used when parallel test is started.
`arguments` is a list of arguments to pass to parallel executions.
In the following example variable my_var is used in both of the tests
started with the keyword `Run Parallel Tests`:
| Add Arguments For Parallel Tests | --variable | my_var:value |
| Run Parallel Tests | Test | Another Test |
"""
self._arguments += list(arguments)
def set_data_source_for_parallel_tests(self, data_source):
"""Sets data source which is used when parallel tests are started.
`data_source` is path to file which contains the test/tests which are
started/executed with keywords `Start Parallel Test` or `Run Parallel
Tests`.
If tests to be executed are in the same suite and Robot Framework 2.5
or later is used, there is no need to use this keyword as `data_source`
can be automatically resolved.
Examples:
| Set Data Source For Parallel Tests | ${CURDIR}${/}my_parallel_suite.txt |
| Start Parallel Test | My Parallel Test |
| Wait All Parallel Tests |
"""
self._data_source = data_source
def start_parallel_test(self, test_name, *arguments):
"""Starts executing test with given `test_name` and `arguments`.
`arguments` is a list of Robot Framework command line arguments passed to
the started test execution. It should not include data source. Use
`Set Data Source For Parallel Tests` keyword for setting the data
source. Additional arguments can also be set in library import and with
`Add Arguments For Parallel Tests` keyword.
Returns a process object that represents this execution.
Example:
| Set Data Source For Parallel Tests | MySuite.txt |
| Start Parallel Test | Test From My Suite |
| Set Data Source For Parallel Tests | MyFriendsSuite.txt |
| Start Parallel Test | Test From My Friends Suite |
| Wait All Parallel Tests |
"""
if self._data_source is None:
self._data_source = BuiltIn.BuiltIn().replace_variables('${SUITE_SOURCE}')
process = _ParaRobo(test_name, self._data_source,
self._arguments+list(arguments))
process.run(self._script)
self._processes.append(process)
return process
def run_parallel_tests(self, *test_names):
"""Executes all given tests parallel and wait those to be ready.
Arguments can be set with keyword `Add Arguments For Parallel Tests`
and data source with keyword `Set Data Source For Parallel Tests`.
Example:
| Add Arguments For Parallel Tests | --variable | SOME_VARIABLE:someValue |
| Set Data Source For Parallel Tests | MySuite.txt |
| Run Parallel Tests | My Parallel Test | My Another Parallel Test |
When the parallel tests are from different data sources see the example in `Start Parallel Test`.
"""
processes = []
for name in test_names:
processes += [self.start_parallel_test(name)]
self.wait_parallel_tests(*processes)
def wait_parallel_tests(self, *processes):
"""Waits given `processes` to be ready and fails if any of the tests failed.
`Processes` are list of test execution processes returned from keyword
`Start Parallel Test`.
Example
| ${test 1}= | Start Parallel Test | First Test |
| ${test 2}= | Start Parallel Test | Test That Runs All The Time |
| Wait Parallel Tests | ${test 1} |
| ${test 3}= | Start Parallel Test | Third Test |
| Wait Parallel Tests | ${test 2} | ${test 3} |
"""
failed = []
for process in processes:
if process.wait() != 0:
failed += [process.test]
process.report()
self._processes.remove(process)
if failed:
raise AssertionError("Following tests failed:\n%s" % "\n".join(failed))
def wait_all_parallel_tests(self):
"""Wait all started test executions to be ready and fails if any of those failed."""
self.wait_parallel_tests(*self._processes)
def stop_all_parallel_tests(self):
"""Forcefully stops all the test executions.
NOTE: Requires Python 2.6 or later.
"""
for process in self._processes:
process.stop_test_execution()
self._processes = []
class _ParaRobo(object):
def __init__(self, test, data_source, arguments):
self.test = test
self._data_source = data_source
self._args = arguments
self._built_in = BuiltIn.BuiltIn()
id = self._create_id()
self._output = 'output_%s.xml' % id
self._log = 'log_%s.html' % id
self._output_dir = self._built_in.replace_variables("${OUTPUT DIR}")
self._monitor_out = os.path.join(self._output_dir, 'monitor_%s.txt' % id)
@property
def _suite_name(self):
name = os.path.splitext(os.path.basename(self._data_source))[0]
name = name.split('__', 1)[-1] # Strip possible prefix
name = name.replace('_', ' ').strip()
if name.islower():
name = name.title()
return name
def _create_id(self):
return "%s_%s" % (randint(0, 10000), time.strftime('%Y%m%d_%H%m%S.')+\
('%03d' % (int(time.time()*1000) % 1000)))
def run(self, script):
self._monitor_file = open(self._monitor_out, 'w')
cmd = [script,
'--outputdir', self._output_dir,
'--output', self._output,
'--report', 'None',
'--log', self._log,
'--monitorcolors', 'off',
'--test', self.test]+\
self._args + [self._data_source]
print "Starting test execution: %s" % " ".join(cmd)
self._process = subprocess.Popen(cmd,
shell=os.sep == '\\',
stdout=self._monitor_file,
stderr=self._monitor_file,
env=self._get_environment_variables())
def _get_environment_variables(self):
environment_variables = os.environ.copy()
if environment_variables.has_key("ROBOT_SYSLOG_FILE"):
del(environment_variables["ROBOT_SYSLOG_FILE"])
return environment_variables
def wait(self):
rc = self._process.wait()
self._monitor_file.close()
return rc
def report(self):
with open(self._monitor_out, 'r') as monitor_file:
monitor_output = monitor_file.read()
try:
os.remove(self._monitor_out)
except:
pass
match = re.search('^Log: (.*)$', monitor_output, re.MULTILINE)
monitor_output = self._replace_stdout_log_message_levels(monitor_output)
monitor_output = html_escape(monitor_output)
if match:
monitor_output = monitor_output.replace(match.group(1), '<a href="%s#test_%s.%s">%s</a>' % (self._log, self._suite_name, self.test, match.group(1)))
monitor_output = self._add_colours(monitor_output)
print "*HTML* %s" % monitor_output
def _replace_stdout_log_message_levels(self, output):
for level in ['TRACE', 'WARN', 'DEBUG', 'INFO', 'HTML']:
output = output.replace('\n*%s*' % level, '\n *%s*' % level)
return output
def _add_colours(self, output):
for name, colour in [("PASS", "pass"), ("FAIL", "fail"), ("ERROR", "fail")]:
output = output.replace(' %s ' % name, ' <span class="%s">%s</span> ' % (colour, name))
return output
def stop_test_execution(self):
try:
self._process.terminate()
except AttributeError:
pass
self.report()
def _get_cmd_arguments():
import robot
runner_path = os.path.join(os.path.dirname(os.path.abspath(robot.__file__)),
'runner.py')
with open(runner_path, 'r') as runner_file:
runner_content = runner_file.read()
return re.search('"""(.+)"""', runner_content, re.DOTALL).groups()[0]
| Python |
# -*- python -*-
# ex: set syntax=python:
import os
ROBOT_FRAMEWORK_REPOSITORY = 'http://robotframework.googlecode.com/svn/trunk/'
# This is the dictionary that the buildmaster pays attention to. We also use
# a shorter alias to save typing.
c = BuildmasterConfig = {}
####### BUILDSLAVES
from buildbot.buildslave import BuildSlave
c['slaves'] = [BuildSlave("debian-py2.4", "robotci")]
c['slavePortnum'] = 9989
####### CHANGESOURCES
from buildbot.changes.svnpoller import SVNPoller
c['change_source'] = SVNPoller(ROBOT_FRAMEWORK_REPOSITORY, pollinterval=180)
####### SCHEDULERS
from buildbot.scheduler import Scheduler
c['schedulers'] = []
c['schedulers'].append(Scheduler(name="all", branch=None, treeStableTimer=180,
builderNames=["PybotTests"]))
####### BUILDERS
# the 'builders' list defines the Builders. Each one is configured with a
# dictionary, using the following keys:
# name (required): the name used to describe this bilder
# slavename (required): which slave to use, must appear in c['bots']
# builddir (required): which subdirectory to run the builder in
# factory (required): a BuildFactory to define how the build is run
# periodicBuildTime (optional): if set, force a build every N seconds
from buildbot.process import factory
from buildbot.steps.source import SVN
from buildbot.steps.shell import ShellCommand
from buildbot.steps.master import MasterShellCommand
from buildbot.steps.transfer import FileUpload
import glob
OUTPUT_ARCHIVE = 'outputs.zip'
RESULT_DIR = 'results'
class ReportGenerator(MasterShellCommand):
def __init__(self, **kwargs):
command = ['./generate_reports.sh', RESULT_DIR]
MasterShellCommand.__init__(self, command)
self.addFactoryArguments(command=command)
def finished(self, results):
report = open(RESULT_DIR + '/report.html').read().replace('<a href="log.html',
'<a href="log')
self.addHTMLLog('report', report)
self.addHTMLLog('log', open(RESULT_DIR + '/log.html').read())
for sublog in sorted(glob.glob(RESULT_DIR + '/log-*.html')):
self.addHTMLLog(os.path.basename(sublog), open(sublog).read())
return MasterShellCommand.finished(self, results)
f1 = factory.BuildFactory()
f1.addStep(SVN(svnurl=ROBOT_FRAMEWORK_REPOSITORY))
f1.addStep(ShellCommand(command=['python', './install.py', 'in'],
description='Installing',
descriptionDone='Install'))
f1.addStep(ShellCommand(command=['atest/run_atests.py', 'buildbot', 'python',
'--monitorcolors off',
'--exclude manual',
'atest/robot/'],
description='Robot Tests',
descriptionDone='Robot Tests',
timeout=60*60))
f1.addStep(FileUpload(slavesrc='atest/results/' + OUTPUT_ARCHIVE,
masterdest=RESULT_DIR +'/'+ OUTPUT_ARCHIVE))
f1.addStep(ReportGenerator())
b1 = {'name': "PybotTests",
'slavename': "debian-py2.4",
'builddir': "pybot-build",
'factory': f1}
c['builders'] = [b1]
####### STATUS TARGETS
from buildbot.status import html
c['status'] = []
c['status'].append(html.WebStatus(http_port=8010))
from buildbot.status import mail
c['status'].append(mail.MailNotifier(fromaddr="buildbot@robot.radiaatto.ri.fi",
extraRecipients=["robotframework-commit@googlegroups.com"],
sendToInterestedUsers=False,
relayhost='10.127.0.12'))
#
# from buildbot.status import words
# c['status'].append(words.IRC(host="irc.example.com", nick="bb",
# channels=["#example"]))
#
# from buildbot.status import client
# c['status'].append(client.PBListener(9988))
####### DEBUGGING OPTIONS
# if you set 'debugPassword', then you can connect to the buildmaster with
# the diagnostic tool in contrib/debugclient.py . From this tool, you can
# manually force builds and inject changes, which may be useful for testing
# your buildmaster without actually commiting changes to your repository (or
# before you have a functioning 'sources' set up). The debug tool uses the
# same port number as the slaves do: 'slavePortnum'.
c['debugPassword'] = "passwd"
# if you set 'manhole', you can ssh into the buildmaster and get an
# interactive python shell, which may be useful for debugging buildbot
# internals. It is probably only useful for buildbot developers. You can also
# use an authorized_keys file, or plain telnet.
#from buildbot import manhole
#c['manhole'] = manhole.PasswordManhole("tcp:9999:interface=127.0.0.1",
# "admin", "password")
####### PROJECT IDENTITY
# the 'projectName' string will be used to describe the project that this
# buildbot is working on. For example, it is used as the title of the
# waterfall HTML page. The 'projectURL' string will be used to provide a link
# from buildbot HTML pages to your project's home page.
c['projectName'] = "Robot Framework"
c['projectURL'] = "http://robotframework.org/"
# the 'buildbotURL' string should point to the location where the buildbot's
# internal web server (usually the html.Waterfall page) is visible. This
# typically uses the port number set in the Waterfall 'status' entry, but
# with an externally-visible host name which the buildbot cannot figure out
# without some help.
c['buildbotURL'] = "http://robot.radiaatto.ri.fi:8080/"
| Python |
#!/usr/bin/env python
"""A tool for creating data driven test case for Robot Framework
Usage: testgen.py variablefile template output
This script reads the variable and template files and generates a test suite
which has all test cases found in the template multiplied with all the rows of
the variable file. Suite settings, variables and user keywords from the template
file are serialized as is.
Currently, the input files must be in tsv (tab separated values) format. Also
the output file is written in tsv. The variables file must have a format
demonstrated in the example below, e.g. header row, followed by a row with the
names of the variables, and on the subsequent rows the values for the
variables.
Options:
-h -? --help Print this usage instruction.
Example:
<<template.tsv>>
* Settings *
Documentation Example data driven suite
* Test Cases *
Example Test Keyword ${arg1} ${arg2}
* User Keywords *
Keyword [Arguments] ${val1} ${val2}
Log Many ${val1} ${val2}
<<variables.tsv>>
* Variables *
${arg1} ${arg2}
value1 value2
value11 value22
Given above files, command
python testgen.py variables.tsv template.tsv output.tsv
produces following test suite:
<<output.tsv>>
* Settings *
Documentation Example data driven suite
* Test Cases *
Example Test 1 Keyword value1 value2
Example Test 2 Keyword value11 value22
* User Keywords *
Keyword [Arguments] ${val1} ${val2}
Log Many ${val1} ${val2}
"""
import sys
import os
import csv
from robot.parsing.model import FileSuite
from robot.parsing.tsvreader import TsvReader
from robot.errors import DataError, Information
from robot import utils
class TestGeneratingSuite(FileSuite):
def serialize(self, variables, serializer):
self._serialize_settings(serializer)
self._serialize_variables(serializer)
self._serialize_tests(variables, serializer)
self._serialize_keywords(serializer)
def _serialize_settings(self, serializer):
serializer.start_settings()
if self.doc:
serializer.setting('Documentation', self.doc)
for name, value in self.metadata.items():
serializer.setting('Meta: %s' % name, [value])
for name in ['Default Tags', 'Force Tags', 'Suite Setup',
'Suite Teardown', 'Test Setup', 'Test Teardown',
]:
value = self._get_setting(self, name)
if value:
serializer.setting(name, value)
for imp in self.imports:
serializer.setting(imp.name, imp._item.value)
serializer.end_settings()
def _serialize_variables(self, serializer):
serializer.start_variables()
for var in self.variables:
serializer.variable(var.name, var.value)
serializer.end_variables()
def _serialize_tests(self, variables, serializer):
serializer.start_testcases()
for test in self.tests:
orig_name = test.name
for index, vars in enumerate(variables):
test.name = '%s %d' % (orig_name, (index+1))
serializer.start_testcase(test)
if test.doc:
serializer.setting('Documentation', [test.doc])
for name in ['Setup', 'Tags', 'Timeout']:
value = self._get_setting(test, name)
if value is not None:
serializer.setting(name, value)
for kw in test.keywords:
data = self._replace_variables(vars, [kw.name] + kw.args)
serializer.keyword(data)
if test.teardown is not None:
serializer.setting('Teardown', test.teardown)
serializer.end_testcase()
serializer.end_testcases()
def _serialize_keywords(self, serializer):
serializer.start_keywords()
for uk in self.user_keywords:
serializer.start_keyword(uk)
args = self._format_args(uk.args, uk.defaults, uk.varargs)
if args:
serializer.setting('Arguments', args)
if uk.doc:
serializer.setting('Documentation', uk.doc)
if uk.timeout is not None:
serializer.setting('Timeout', uk.timeout)
for kw in uk.keywords:
serializer.keyword([kw.name] + kw.args)
if uk.return_value:
serializer.setting('Return Value', uk.return_value)
serializer.end_keywords()
def _replace_variables(self, variables, data):
replaced = []
for elem in data:
for key in variables:
if key in elem:
elem = elem.replace(key, variables[key])
replaced.append(elem)
return replaced
def _get_setting(self, item, name):
return getattr(item, name.lower().replace(' ', '_'))
def _format_args(self, args, defaults, varargs):
parsed = []
if args:
parsed.extend(list(args))
if defaults:
for i, value in enumerate(defaults):
index = len(args) - len(defaults) + i
parsed[index] = parsed[index] + '=' + value
if varargs:
parsed.append(varargs)
return parsed
class VariableIterator(object):
def __init__(self, varfile):
self._variable_mapping = {}
self._variables = []
TsvReader().read(varfile, self)
def __iter__(self):
while self._variables:
data = self._variables.pop(0)
values = {}
for key in self._variable_mapping:
values[key] = data[self._variable_mapping[key]]
yield values
def start_table(self, name):
return name.lower().strip() == 'variables'
def add_row(self, row):
if not self._variable_mapping:
for pos in range(len(row)):
self._variable_mapping[row[pos]] = pos
else:
self._variables.append(row)
class AbstractFileWriter(object):
def __init__(self, path, cols):
self._output = open(path, 'wb')
self._cols = cols
self._tc_name = None
self._uk_name = None
def start_settings(self):
self._write_header_row(['Setting', 'Value'])
def end_settings(self):
self._write_empty_row()
def start_variables(self):
self._write_header_row(['Variable', 'Value'])
def end_variables(self):
self._write_empty_row()
def start_testcases(self):
self._write_header_row(['Test Case', 'Action', 'Argument'])
def end_testcases(self):
self._write_empty_row()
def start_testcase(self, testcase):
self._tc_name = testcase.name
def end_testcase(self):
if self._tc_name:
self._write_normal_row([self._tc_name])
self._tc_name = None
self._write_empty_row()
def start_keywords(self):
self._write_header_row(['Keyword', 'Action', 'Argument'])
def end_keywords(self):
self._write_empty_row()
self._output.close()
def start_keyword(self, userkeyword):
self._uk_name = userkeyword.name
def end_keyword(self):
if self._uk_name:
self._write_normal_row([self._uk_name])
self._uk_name = None
self._write_empty_row()
def setting(self, name, value):
if self._tc_name is None and self._uk_name is None:
self._write_normal_row([name] + value)
else: # TC and UK settings
row = [self._get_tc_or_uk_name(), '[%s]' % name] + value
self._write_normal_row(row, indent=1)
def variable(self, name, value):
self._write_normal_row([name] + value)
def keyword(self, keyword):
name = self._get_tc_or_uk_name()
# TODO: When adding support for PARALLEL, FOR, etc. need to use
# different indent when inside indented block
self._write_normal_row([name] + keyword, indent=1)
def _write_header_row(self, row):
row += [row[-1]] * (self._cols - len(row))
self._write_header_row_impl(row)
def _write_normal_row(self, row, indent=0):
firstrow = True
while True:
if firstrow:
current = row[:self._cols]
row = row[self._cols:]
firstrow = False
else:
current = ['']*indent + ['...'] + row[:self._cols-indent-1]
row = row[self._cols-indent-1:]
self._escape_empty_trailing_cells(current)
current += [''] * (self._cols - len(current))
self._write_normal_row_impl(current)
if not row:
break
def _write_empty_row(self):
self._write_normal_row([])
def _escape_empty_trailing_cells(self, row):
if len(row) > 0 and row[-1] == '':
row[-1] = '\\'
def _get_title(self, path):
dire, base = os.path.split(path)
if base.lower() == '__init__.html':
path = dire
return utils.printable_name_from_path(path)
def _write_header_row_impl(self, row):
raise NotImplementedError
def _write_normal_row_impl(self, row):
raise NotImplementedError
class TsvFileWriter(AbstractFileWriter):
def __init__(self, path):
AbstractFileWriter.__init__(self, path, 8)
self._writer = csv.writer(self._output, dialect='excel-tab')
def _write_header_row_impl(self, row):
self._writer.writerow(['*%s*' % cell for cell in row])
def _write_normal_row_impl(self, row):
self._writer.writerow([cell.encode('UTF-8') for cell in row])
def _get_tc_or_uk_name(self):
if self._tc_name:
name = self._tc_name
self._tc_name = ''
elif self._uk_name:
name = self._uk_name
self._uk_name = ''
else:
name = ''
return name
def generate_suite(cliargs):
opts, (varfile, templatefile, outfile) = _process_args(cliargs)
suite = TestGeneratingSuite(templatefile)
vars = VariableIterator(open(varfile))
if not outfile.endswith('tsv'):
outfile = outfile + '.tsv'
suite.serialize(vars, TsvFileWriter(outfile))
def _process_args(cliargs):
ap = utils.ArgumentParser(__doc__, arg_limits=(3, sys.maxint))
try:
opts, paths = ap.parse_args(cliargs, help='help', check_args=True)
except Information, msg:
exit(msg=str(msg))
except DataError, err:
exit(error=str(err))
return opts, paths
def exit(rc=0, error=None, msg=None):
if error:
print error, "\n\nUse '--help' option to get usage information."
if rc == 0:
rc = 255
if msg:
print msg
rc = 1
sys.exit(rc)
if __name__ == '__main__':
generate_suite(sys.argv[1:])
| Python |
import datetime
from vacalc.employeestore import Employee
def calculate_vacation(startdate, vacation_year, exp_vacation_days):
try:
sdate = datetime.date(*(int(item) for item in startdate.split('-')))
except Exception, err:
raise AssertionError('Invalid time format %s' % err)
actual_days = Employee('Test Employee', sdate).count_vacation(int(vacation_year))
if actual_days != int(exp_vacation_days):
raise AssertionError('%s != %s' % (exp_vacation_days, actual_days))
| Python |
from __future__ import with_statement
import os
import csv
import datetime
class VacalcError(RuntimeError): pass
class EmployeeStore(object):
def __init__(self, db_file):
self._db_file = db_file
if self._db_file and os.path.isfile(self._db_file):
self._employees = self._read_employees(self._db_file)
else:
self._employees = {}
def _read_employees(self, path):
employees = {}
with open(path) as db:
for row in csv.reader(db):
employee = Employee(row[0], self._parse_date(row[1]))
employees[employee.name] = employee
return employees
def refresh(self):
self.__init__(self._db_file)
def get_employee(self, name):
try:
return self._employees[name]
except KeyError:
raise VacalcError("Employee '%s' not found." % name)
def get_all_employees(self):
return self._employees.values()
def add_employee(self, name, startdate):
if name in self._employees:
raise VacalcError("Employee '%s' already exists in the system."
% name)
employee = Employee(name, self._parse_date(startdate))
self._employees[employee.name] = employee
self._serialize(employee)
return employee
def _serialize(self, employee):
if not self._db_file:
return
with open(self._db_file, 'a') as db:
writer = csv.writer(db, lineterminator='\n')
writer.writerow([employee.name, employee.startdate])
def _parse_date(self, datestring):
if not datestring:
raise VacalcError('No start date given.')
try:
year, month, day = (int(item) for item in datestring.split('-'))
except ValueError:
raise VacalcError('Invalid start date.')
try:
return datetime.date(year, month, day)
except ValueError, err:
raise VacalcError(err.args[0].capitalize() + '.')
class Employee(object):
max_vacation = int(12 * 2.5)
no_vacation = 0
vacation_per_month = 2
credit_start_month = 4
work_days_required= 14
def __init__(self, name, startdate):
self.name = name
self.startdate = startdate
def count_vacation(self, year):
return self._count_vacation(self.startdate, year)
def _count_vacation(self, startdate, year):
if self._has_worked_longer_than_year(startdate, year):
return self.max_vacation
if self._started_after_holiday_credit_year_ended(startdate, year):
return self.no_vacation
return self._count_working_months(startdate) * self.vacation_per_month
def _has_worked_longer_than_year(self, start, year):
return year-start.year > 1 or \
(year-start.year == 1 and start.month < self.credit_start_month)
def _started_after_holiday_credit_year_ended(self, start, year):
return start.year-year > 0 or \
(year == start.year and start.month >= self.credit_start_month)
def _count_working_months(self, start):
months = self.credit_start_month - start.month
if months <= 0:
months += 12
if self._first_month_has_too_few_working_days(start):
months -= 1
return months
def _first_month_has_too_few_working_days(self, start):
days = 0
date = start
while date:
if self._is_working_day(date):
days += 1
date = self._next_date(date)
return days < self.work_days_required
def _is_working_day(self, date):
return date.weekday() < 5
def _next_date(self, date):
try:
return date.replace(day=date.day+1)
except ValueError:
return None
| Python |
from vacalcapp import VacalcApplication
| Python |
from javax.swing import JFrame, JList, JPanel, JLabel, JTextField, JButton, Box, BoxLayout, JTable
from javax.swing.event import ListSelectionListener
from javax.swing.table import AbstractTableModel
from java.awt.event import ActionListener
from java.awt import FlowLayout, BorderLayout, Dimension, Font, Color
class VacalcFrame(object):
def __init__(self, employees):
self._frame = JFrame('Vacation Calculator',
defaultCloseOperation=JFrame.EXIT_ON_CLOSE)
self._frame.setContentPane(self._create_ui(employees))
self._frame.pack()
def _create_ui(self, employees):
panel = JPanel(layout=FlowLayout())
self._overview = EmployeeOverview(employees, self)
self._details = EmployeeDetails(employees)
self._welcome = Welcome()
panel.add(self._overview)
panel.add(self._welcome)
return panel
def show(self):
self._frame.setVisible(True)
def employee_selected(self, employee):
self._ensure_details_shown()
self._details.show_employee(employee)
def edit_new_employee(self):
self._ensure_details_shown()
self._details.edit_new_employee()
def _ensure_details_shown(self):
if self._welcome:
self._frame.contentPane.remove(self._welcome)
self._frame.contentPane.add(self._details)
self._frame.pack()
self._welcome = None
class EmployeeOverview(JPanel):
def __init__(self, employees, overview_listener):
JPanel.__init__(self, layout=BorderLayout())
self._listener = overview_listener
self._employee_list = self._create_employee_list(employees)
new_emp_btn = self._create_new_employee_button()
self.add(self._employee_list.widget, BorderLayout.PAGE_START)
self.add(new_emp_btn, BorderLayout.PAGE_END)
def _create_employee_list(self, employees):
list = EmployeeList(employees)
list.add_selection_listener(ListenerFactory(ListSelectionListener,
self._list_item_selected))
return list
def _create_new_employee_button(self):
btn = JButton('New Employee', name='new_employee_button')
btn.addActionListener(ListenerFactory(ActionListener, self._new_employee))
return btn
def _list_item_selected(self, event):
self._listener.employee_selected(self._employee_list.selected_employee())
def _new_employee(self, event):
self._employee_list.clear_selection()
self._listener.edit_new_employee()
class EmployeeList(object):
def __init__(self, employees):
self._employees = employees
self._employees.add_change_listener(self)
self._list = JList(preferredSize=(200, 200), name='employee_list')
self._populate_list()
def _populate_list(self):
self._list.setListData(self._employee_names())
def _employee_names(self):
return [e.name for e in self._employees.all()]
def add_selection_listener(self, listener):
self._list.addListSelectionListener(listener)
def selected_employee(self):
return self._employees.all()[self._list.getSelectedIndex()]
def employee_added(self, employee):
self._populate_list()
self._list.setSelectedValue(employee.name, True)
def adding_employee_failed(self, error):
pass
def clear_selection(self):
self._list.clearSelection()
@property
def widget(self):
return self._list
class EmployeeDetails(JPanel):
def __init__(self, employees):
JPanel.__init__(self, preferredSize=(400, 200))
layout = BoxLayout(self, BoxLayout.Y_AXIS)
self.setLayout(layout)
self._employees = employees
employees.add_change_listener(self)
self._create_status_label()
self._create_name_editor()
self._create_start_date_editor()
self._create_save_button()
self._create_vacation_display()
self._adding_employee = False
def _create_status_label(self):
self._status_label = JLabel(name='status_label',
font=Font(Font.SANS_SERIF, Font.PLAIN, 11))
self.add(self._status_label)
self._add_with_padding(self._status_label, 5)
def _create_name_editor(self):
self.add(JLabel(text='Employee Name:'))
self._name_editor = FixedHeightTextField('name_input')
self._add_with_padding(self._name_editor, 5)
def _create_start_date_editor(self):
self.add(JLabel(text='Start Date (yyyy-mm-dd):'))
self._start_date_editor = FixedHeightTextField('start_input')
self._add_with_padding(self._start_date_editor, 5)
def _create_save_button(self):
self._save_button = JButton('Save', name='save_button', visible=False)
self._save_button.addActionListener(ListenerFactory(ActionListener,
self._save_button_pushed))
self._add_with_padding(self._save_button, 5)
def _create_vacation_display(self):
# self._display = JTable()
# self._header = self._display.getTableHeader()
# self.add(self._header)
# self.add(self._display)
pass
def _add_with_padding(self, component, padding):
self.add(component)
self.add(Box.createRigidArea(Dimension(0, padding)))
def show_employee(self, employee):
self._name_editor.setText(employee.name)
self._start_date_editor.setText(str(employee.startdate))
self._name_editor.setEditable(False)
self._start_date_editor.setEditable(False)
self._save_button.setVisible(False)
if self._adding_employee:
self._adding_employee = False
else:
self._status_label.setText('')
# self._display.setVisible(True)
# self._display.setModel(VacationTableModel(employee))
# self._header.setVisible(True)
def edit_new_employee(self):
self._name_editor.setText('')
self._start_date_editor.setText('')
self._name_editor.setEditable(True)
self._start_date_editor.setEditable(True)
self._save_button.setVisible(True)
# self._display.setVisible(False)
# self._header.setVisible(False)
self._adding_employee = True
def _save_button_pushed(self, event):
self._employees.add(self._name_editor.getText(),
self._start_date_editor.getText())
def employee_added(self, employee):
self._status_label.setForeground(Color.BLACK)
self._status_label.setText("Employee '%s' was added successfully." % employee.name)
self._save_button.setVisible(False)
def adding_employee_failed(self, reason):
self._status_label.setForeground(Color.RED)
self._status_label.setText(reason)
class FixedHeightTextField(JTextField):
def __init__(self, name):
JTextField.__init__(self, name=name)
prefsize = self.preferredSize
maxsize = self.maximumSize
self.setMaximumSize(Dimension(maxsize.width, prefsize.height))
class Welcome(JPanel):
def __init__(self):
JPanel.__init__(self, preferredSize=(400,200))
self.add(JLabel('VaCalc v0.1'))
class VacationTableModel(AbstractTableModel):
_columns = ['Year', 'Vacation']
def __init__(self, employee):
self._employee = employee
def getColumnName(self, index):
return self._columns[index]
def getColumnCount(self):
return 2
def getRowCount(self):
return 1
def getValueAt(self, row, col):
if col == 0:
return '2010'
return '%s days' % self._employee.count_vacation(2010)
def ListenerFactory(interface, func):
from java.lang import Object
method = list(set(dir(interface)) - set(dir(Object)))[0]
return type('Listener', (interface,), {method: func})()
| Python |
import os
import tempfile
from org.robotframework.vacalc import VacationCalculator
from vacalc.ui import VacalcFrame
from vacalc.employeestore import EmployeeStore, VacalcError
class VacalcApplication(VacationCalculator):
def create(self):
default_db = os.path.join(tempfile.gettempdir(), 'vacalcdb.csv')
self._db_file= os.environ.get('VACALC_DB', default_db)
self._size = os.stat(self._db_file).st_size if os.path.exists(self._db_file) else 0
self._store = EmployeeStore(self._db_file)
self._frame = VacalcFrame(EmployeeController(self._store))
self._frame.show()
class EmployeeController(object):
def __init__(self, employeestore):
self._store = employeestore
self._change_listeners = []
def all(self):
return self._store.get_all_employees()
def add(self, name, startdate):
try:
employee = self._store.add_employee(name, startdate)
except VacalcError, err:
for l in self._change_listeners:
l.adding_employee_failed(unicode(err))
else:
for l in self._change_listeners:
l.employee_added(employee)
def add_change_listener(self, listener):
self._change_listeners.append(listener)
| Python |
from robot import run as run_robot
import cProfile
import pstats
filename = 'robot.profile'
cProfile.run('run_robot("/home/husa/workspace/robotframework/atest/testdata/misc/")', filename)
p = pstats.Stats(filename)
p.strip_dirs().sort_stats(-1).print_stats()
| Python |
#!/usr/bin/env python
"""Script to generate atest runners based on data files.
Usage: %s path/to/data.file
"""
from __future__ import with_statement
import sys, os
if len(sys.argv) != 2:
print __doc__ % os.path.basename(sys.argv[0])
sys.exit(1)
inpath = os.path.abspath(sys.argv[1])
outpath = inpath.replace(os.path.join('atest', 'testdata'),
os.path.join('atest', 'robot'))
dirname = os.path.dirname(outpath)
if not os.path.exists(dirname):
os.mkdir(dirname)
with open(inpath) as input:
tests = []
process = False
for line in input.readlines():
line = line.rstrip()
if line.startswith('*'):
name = line.replace('*', '').replace(' ', '').upper()
process = name in ('TESTCASE', 'TESTCASES')
elif process and line and line[0] != ' ':
tests.append(line.split(' ')[0])
with open(outpath, 'w') as output:
path = inpath.split(os.path.join('atest', 'testdata'))[1][1:]
output.write("""*** Settings ***
Suite Setup Run Tests ${EMPTY} %s
Force Tags regression pybot jybot
Resource atest_resource.txt
*** Test Cases ***
""" % path.replace(os.sep, '/'))
for test in tests:
output.write(test + '\n Check Test Case ${TESTNAME}\n\n')
print outpath
| Python |
#!/usr/bin/env python
"""A script for running Robot Framework's acceptance tests.
Usage: run_atests.py interpreter [options] datasource(s)
Data sources are paths to directories or files under `robot` folder.
Available options are the same that can be used with Robot Framework.
See its help (e.g. `pybot --help`) for more information.
The specified interpreter is used by acceptance tests under `robot` to
run test cases under `testdata`. It can be simply `python` or `jython`
(if they are in PATH) or to a path a selected interpreter (e.g.
`/usr/bin/python26`). Note that this script itself must always be
executed with Python.
Examples:
$ atest/run_atests.py python --test example atest/robot
$ atest/run_atests.py /usr/bin/jython25 atest/robot/tags/tag_doc.txt
"""
import signal
import subprocess
import os.path
import shutil
import glob
import sys
from zipfile import ZipFile, ZIP_DEFLATED
CURDIR = os.path.dirname(os.path.abspath(__file__))
RESULTDIR = os.path.join(CURDIR, 'results')
sys.path.insert(0, os.path.join(CURDIR, '..', 'src'))
import robot
ARGUMENTS = ' '.join('''
--doc RobotSPFrameworkSPacceptanceSPtests
--reporttitle RobotSPFrameworkSPTestSPReport
--logtitle RobotSPFrameworkSPTestSPLog
--metadata Interpreter:%(INTERPRETER)s
--metadata Platform:%(PLATFORM)s
--variable INTERPRETER:%(INTERPRETER)s
--variable STANDALONE_JYTHON:NO
--pythonpath %(PYTHONPATH)s
--include %(RUNNER)s
--outputdir %(OUTPUTDIR)s
--output output.xml
--report report.html
--log log.html
--escape space:SP
--escape star:STAR
--escape paren1:PAR1
--escape paren2:PAR2
--critical regression
--noncritical to_be_clarified
--noncritical static_html_checks
--SuiteStatLevel 3
--TagStatCombine jybotNOTpybot
--TagStatCombine pybotNOTjybot
--TagStatExclude pybot
--TagStatExclude jybot
'''.strip().splitlines())
def atests(interpreter, *params):
if os.path.isdir(RESULTDIR):
shutil.rmtree(RESULTDIR)
runner = ('jython' in os.path.basename(interpreter) and 'jybot'
or 'pybot')
args = ARGUMENTS % {
'PYTHONPATH' : os.path.join(CURDIR, 'resources'),
'OUTPUTDIR' : RESULTDIR,
'INTERPRETER': interpreter,
'PLATFORM': sys.platform,
'RUNNER': runner
}
if os.name == 'nt':
args += ' --exclude nonwindows'
if sys.platform == 'darwin' and runner == 'pybot':
args += ' --exclude nonmacpython'
runner = os.path.join(os.path.dirname(robot.__file__), 'runner.py')
command = '%s %s %s %s' % (sys.executable, runner, args, ' '.join(params))
print 'Running command\n%s\n' % command
sys.stdout.flush()
signal.signal(signal.SIGINT, signal.SIG_IGN)
return subprocess.call(command.split())
def buildbot(interpreter, *params):
params = '--log NONE --report NONE --SplitOutputs 1'.split() + list(params)
rc = atests(interpreter, *params)
zippath = os.path.join(RESULTDIR, 'outputs.zip')
zipfile = ZipFile(zippath, 'w', compression=ZIP_DEFLATED)
for output in glob.glob(os.path.join(RESULTDIR, '*.xml')):
zipfile.write(output, os.path.basename(output))
zipfile.close()
print 'Archive:', zippath
return rc
if __name__ == '__main__':
if len(sys.argv) == 1 or '--help' in sys.argv:
print __doc__
rc = 251
elif sys.argv[1] == 'buildbot':
rc = buildbot(*sys.argv[2:])
else:
rc = atests(*sys.argv[1:])
sys.exit(rc)
| Python |
def get_variables(*args):
return { 'PPATH_VARFILE_2' : ' '.join(args),
'LIST__PPATH_VARFILE_2' : args }
| Python |
PPATH_VARFILE = "Variable from varible file in PYTHONPATH" | Python |
list1 = [1, 2, 3, 4, 'foo', 'bar']
dictionary1 = {'a': 1}
dictionary2 = {'a': 1, 'b': 2}
| Python |
class ParameterLibrary:
def __init__(self, host='localhost', port='8080'):
self.host = host
self.port = port
def parameters(self):
return self.host, self.port | Python |
some_string = 'Hello, World!'
class _SomeObject:
pass
some_object = _SomeObject() | Python |
library = "It should be OK to have an attribute with same name as the module"
def keyword_from_submodule(arg='World'):
return "Hello, %s!" % arg
| Python |
class TraceLogArgsLibrary(object):
def only_mandatory(self, mand1, mand2):
pass
def mandatory_and_default(self, mand, default="default value"):
pass
def multiple_default_values(self, a=1, a2=2, a3=3, a4=4):
pass
def mandatory_and_varargs(self, mand, *varargs):
pass
def return_object_with_invalid_repr(self):
return InvalidRepr()
def return_object_with_non_ascii_string_repr(self):
return ByteRepr()
class InvalidRepr:
def __repr__(self):
return u'Hyv\xe4'
class ByteRepr:
def __repr__(self):
return 'Hyv\xe4' | Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.