code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
"""DisjointSets.py
Description:
Based on details provided by: http://en.wikipedia.org/wiki/Disjoint-set_data_structure
and modified based on notes of CS 473 Algorithms course by University of Illinois.
Finally adapted to Kruskal problem presented by course 75.29 Teoria de Algoritmos on
University of Buenos Aires.
Authors:
Garay, Ignacio
Liguori, Ariel
Musumeci, Pablo
"""
valueNodeDict = {}
class Node():
'''Represents a node in a tree, using rank.'''
def __init__(self, value, parent, rank):
self.value = value
self.parent = parent
self.rank = rank
valueNodeDict[value] = self
def __str__(self):
return str(value)
class Universe():
'''All sets will belong to the Universe.'''
def __init__(self):
self.sets = [] #list of all root nodes
def addSet(self, root):
self.sets.append(root)
U = Universe()
def internalFindSet(x):
'''Find the root of the tree that contains this node, uses
path compression at every stage.'''
if x.parent is not x:
x.parent = internalFindSet(x.parent)
return x.parent
def MakeSet(x):
'''Make a new node whose parent is itself, as some bibliography
mentions a singleton instance of x.'''
a = Node(x, None, 0) # O(1)
a.parent = a #O(1)
U.addSet(a) #O(1) = Amortized Worst Case
def FindSet(x):
'''Returns the root of the tree which contains this x'''
x_node = valueNodeDict[x] #O(n)
return internalFindSet(x_node)
def Union(x, y):
'''Union by rank based on pseudocode defined on beginning notes, unites
x and y destructively, updates ranks.'''
x_set = FindSet(x)
y_set = FindSet(y)
if x_set.rank > y_set.rank:
y_set.parent = x_set
else:
x_set.parent = y_set
if x_set.rank == y_set.rank:
y_set.rank += 1 | Python |
import re
import heapq
from UnionFind import UnionFind
from DisjointSets import *
class Edges:
def __init__(self, vertex1, vertex2, weight):
self.vertex1 = vertex1
self.vertex2 = vertex2
self.weight = weight
class Vertex:
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
class Grafo:
def __str__(self):
#Imprime el contenido de los vertices y las aristas
s = "Vertex -> Edges\n"
for k, v in self.grafo.iteritems():
s+= "%s -> %s\n" % (k, v)
return s
def __init__(self):
self.grafo = {}
def __init__(self, filePointer):
#Construye el grafo a partir de parsear el archivo
self.grafo = {}
for line in filePointer:
lines = line.replace(' ','').split(':')
# Guardo el vertice actual
verticeActual = lines[0]
if (not self.isVertex(Vertex(verticeActual))):
self.addVertex(Vertex(verticeActual))
num = ""
verticeVecino = ""
cadena = lines[1]
# Proceso la linea
for i in range(len(cadena)):
char = cadena[i]
if (char == ')'):
# Agrego arista entre VActual y V que estoy procesando
x = int(num)
print "Agrego arista %s , %s , %s" % (verticeActual, verticeVecino, x)
#self.addEdge(verticeActual, verticeVecino, x)
self.addEdge( Edges(Vertex(verticeActual), Vertex(verticeVecino), x) )
print "("+verticeActual +","+ verticeVecino+"," +num+")"
elif (char == '('):
num = ""
elif (char == ','):
verticeVecino = num
if (not self.isVertex(Vertex(verticeVecino))):
#self.addVertex(verticeVecino)
self.addVertex(Vertex(verticeVecino))
num = ""
else:
num += char
def addVertex(self, vertice):
#Agrega un vertice al grafo
self.grafo[vertice.value] = {}
def delVertex(self, vertice):
#Si el vertice esta en el grafo, lo remueve
try:
self.grafo.pop(vertice)
return True
except KeyError:
#El vertice no estan en el grafo
return False
def isVertex(self, vertice):
#Retorna true si el vertice esta en el grafo
try:
self.grafo[vertice.value]
return True
except KeyError:
return False
# def addEdge(self, vertice1, vertice2, arista):
#Agrega una arista si los vertices existen
# try:
# self.grafo[vertice1][vertice2] = arista
# self.grafo[vertice2][vertice1] = arista
# print "V1: ",self.grafo[vertice1]
# print "V2: ",self.grafo[vertice2]
# return True
# except KeyError:
# print "Error al agregar arista"
#Los vertices no estan en el grafo
# return False
def addEdge(self, edge):
#Agrega una arista si los vertices existen
try:
self.grafo[edge.vertex1.value][edge.vertex2.value] = edge.weight
self.grafo[edge.vertex2.value][edge.vertex1.value] = edge.weight
print "V1: ",self.grafo[edge.vertex1.value]
print "V2: ",self.grafo[edge.vertex2.value]
return True
except KeyError:
print "Error al agregar arista"
#Los vertices no estan en el grafo
return False
def delEdge(self, vertice1, vertice2):
#Remueve la arista del grafo
try:
self.grafo[vertice][vertice2].pop()
return True
except KeyError:
#Los vertices no estan en el grafo
return False
def getEdge(self, vertice1, vertice2):
try:
return self.grafo[vertice1][vertice2]
except KeyError:
#Los vertices no estan en el grafo
print "Los vertices no estan en el grafo"
return False
# def getMST(self):
# subtrees = UnionFind()
# tree = []
# edges = []
# for u in self.getAllVertex():
# for v in self.getVecinos(u):
##Agrego todas las aristas a un heap
# heapq.heappush(edges, (self.getEdge(u,v),u,v))
# print "Edges", edges
# min = heapq.heappop(edges)
# try:
# while (min):
# if subtrees[u] != subtrees[v]:
# tree.append((u,v))
# subtrees.union(u,v)
# min = heapq.heappop(edges)
# except IndexError:
# return tree
# Kruskal Definition
# - create a forest F (a set of trees), where each vertex in the graph is a separate tree
# - create a set S containing all the edges in the graph
# - while S is nonempty and F is not yet spanning
# remove an edge with minimum weight from S
# if that edge connects two different trees, then add it to the forest, combining the trees,
# otherwise discard that edge.
# - At the termination of the algorithm, the forest has only one component (ST).
def kruskal(self):
#for vertexs in getAllVertex()
#for edges in getVecinos
edges = []
for u in self.getAllVertex():
for v in self.getVecinos(u):
heapq.heappush(edges, (self.getEdge(u,v),Edges(u,v,self.getEdge(u,v))) )
T = [] #this contains all the edges in the tree
#run makeset on all the vertices
for vertex in self.getAllVertex():
MakeSet(vertex)
while edges:
min_edge = heapq.heappop(edges)[1]
if FindSet(min_edge.vertex1) is not FindSet(min_edge.vertex2):
#perform a union and add this edge to the Tree
T.append(min_edge)
Union(min_edge.vertex1, min_edge.vertex2)
return T
#return True
def getAllVertex(self):
# Devuelve una lista con todos los vertices
return self.grafo.keys()
def getAllEdges(self):
edges = []
for u in self.getAllVertex():
for v in self.getVecinos(u):
heapq.heappush(edges, Edges(u,v,self.getEdge(u,v)))
return edges
def getVecinos(self, vertex):
try:
return self.grafo[vertex]
except KeyError:
return False
| Python |
"""Graph.py
Description:
Simple, undirected and connected graph implementation for course 75.29
Teoria de Algoritmos at University of Buenos Aires. Provide simple graph
operations, calculates the minimum spanning tree using Kruskal algorithm
and work with Union and Set dataStructures defined on DisjointSet code,
which represent a Union by rank and uses path compression.
Authors:
Garay, Ignacio
Liguori, Ariel
Musumeci, Pablo
"""
import re
import heapq
from DisjointSets import *
class Edges:
def __init__(self, vertex1, vertex2, weight):
self.vertex1 = vertex1
self.vertex2 = vertex2
self.weight = weight
def __str__(self):
s= "%s -> %s : %s" % (self.vertex1, self.vertex2, self.weight)
return s
class Vertex:
def __init__(self, value):
self.value = value
def __str__(self):
return self.value
class Graph:
def __str__(self):
#Imprime el contenido de los vertices y las aristas
s = "Vertex -> Edges\n"
for k, v in self.graph.iteritems():
s+= "%s -> %s\n" % (k, v)
return s
def __init__(self):
self.graph = {}
def __init__(self, filePointer):
#Construye el graph a partir de parsear el archivo
self.graph = {}
for line in filePointer:
lines = line.replace(' ','').split(':')
# Guardo el vertice actual
verticeActual = lines[0]
if (not self.isVertex(Vertex(verticeActual))):
self.addVertex(Vertex(verticeActual))
num = ""
verticeVecino = ""
cadena = lines[1]
# Proceso la linea
for i in range(len(cadena)):
char = cadena[i]
if (char == ')'):
# Agrego arista entre VActual y V que estoy procesando
x = int(num)
#print "Agrego arista %s , %s , %s" % (verticeActual, verticeVecino, x)
self.addEdge( Edges(Vertex(verticeActual), Vertex(verticeVecino), x) )
#print "("+verticeActual +","+ verticeVecino+"," +num+")"
elif (char == '('):
num = ""
elif (char == ','):
verticeVecino = num
if (not self.isVertex(Vertex(verticeVecino))):
self.addVertex(Vertex(verticeVecino))
num = ""
else:
num += char
def addVertex(self, vertice):
#Agrega un vertice al graph
self.graph[vertice.value] = {}
def delVertex(self, vertice):
#Si el vertice esta en el graph, lo remueve
try:
self.graph.pop(vertice)
return True
except KeyError:
#El vertice no estan en el graph
return False
def isVertex(self, vertice):
#Retorna true si el vertice esta en el geafo
try:
self.graph[vertice.value]
return True
except KeyError:
return False
def addEdge(self, edge):
#Agrega una arista si los vertices existen
try:
self.graph[edge.vertex1.value][edge.vertex2.value] = edge.weight
self.graph[edge.vertex2.value][edge.vertex1.value] = edge.weight
#print "V1: ",self.graph[edge.vertex1.value]
#print "V2: ",self.graph[edge.vertex2.value]
return True
except KeyError:
print "Error trying to add edge: Vertexs doesn't belong to graph"
#Los vertices no estan en el graph
return False
def delEdge(self, vertice1, vertice2):
#Remueve la arista del graph
try:
self.graph[vertice][vertice2].pop()
return True
except KeyError:
#Los vertices no estan en el graph
print "Error: Vertexs doesn't belong to graph"
return False
def getEdge(self, vertice1, vertice2):
try:
return self.graph[vertice1][vertice2]
except KeyError:
#Los vertices no estan en el grafo
print "Error: Vertexs doesn't belong to graph"
return False
# Kruskal Definition
# - create a forest F (a set of trees), where each vertex in the graph is a separate tree
# - create a set S containing all the edges in the graph
# - while S is nonempty and F is not yet spanning
# remove an edge with minimum weight from S
# if that edge connects two different trees, then add it to the forest, combining the trees,
# otherwise discard that edge.
# - At the termination of the algorithm, the forest has only one component (ST).
# Complexity order: O (E log E) + O(V) + O(E)
# - Since G is connected |E| >= |V| --> O( E log E) + O(E) + O(E) --> O(E log E)
# - |E| <= |V|^2 -> log |E| = O(2 log V) = O(log V)
# - Finally the worst case analysis is: O ( E log V)
def kruskal(self):
edges = []
# E=Edges; V=Vertex
# O(|E| log E)
for u in self.getAllVertex(): #O(1)
for v in self.getVecinos(u): #O(1)
heapq.heappush(edges, (self.getEdge(u,v),Edges(u,v,self.getEdge(u,v))) ) #O(1)
T = [] #this contains all the edges in the tree
# O( |V| * MakeSet) = O(V)
for vertex in self.getAllVertex():
MakeSet(vertex) #O(1) See dijoint set for verification.
#O(E)
while edges:
min_edge = heapq.heappop(edges)[1] #O(E)
if FindSet(min_edge.vertex1) is not FindSet(min_edge.vertex2): #O(2 log E) = O(log E)
#perform a union and add this edge to the Tree
T.append(min_edge) #O(1)
Union(min_edge.vertex1, min_edge.vertex2) #O(log E)
return T
def getAllVertex(self):
# Devuelve una lista con todos los vertices
return self.graph.keys()
def getAllEdges(self):
edges = []
for u in self.getAllVertex():
for v in self.getVecinos(u):
heapq.heappush(edges, Edges(u,v,self.getEdge(u,v)))
return edges
def getVecinos(self, vertex):
try:
return self.graph[vertex]
except KeyError:
print "Error: Vertex doesn't belong to graph"
return False
| Python |
from Graph import Graph
def main():
graphFile = raw_input("Enter the file name containing a valid graph:")
filep = open(graphFile)
graph = Graph(filep)
print "Evaluating the graph:\n"
print graph
T = graph.kruskal()
print "Min. Spanning tree: \n"
for edges in T:
print edges
return 0
if __name__ == '__main__':
main()
| Python |
"""UnionFind.py
Union-find data structure. Based on Josiah Carlson's code,
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/215912
with significant additional changes by D. Eppstein.
"""
class UnionFind:
"""Union-find data structure.
Each unionFind instance X maintains a family of disjoint sets of
hashable objects, supporting the following two methods:
- X[item] returns a name for the set containing the given item.
Each set is named by an arbitrarily-chosen one of its members; as
long as the set remains unchanged it will keep the same name. If
the item is not yet part of a set in X, a new singleton set is
created for it.
- X.union(item1, item2, ...) merges the sets containing each item
into a single larger set. If any item is not yet part of a set
in X, it is added to X as one of the members of the merged set.
"""
def __init__(self):
"""Create a new empty union-find structure."""
self.weights = {}
self.parents = {}
def __getitem__(self, object):
"""Find and return the name of the set containing the object."""
# check for previously unknown object
if object not in self.parents:
self.parents[object] = object
self.weights[object] = 1
return object
# find path of objects leading to the root
path = [object]
root = self.parents[object]
while root != path[-1]:
path.append(root)
root = self.parents[root]
# compress the path and return
for ancestor in path:
self.parents[ancestor] = root
return root
def __iter__(self):
"""Iterate through all items ever found or unioned by this structure."""
return iter(self.parents)
def union(self, *objects):
"""Find the sets containing the objects and merge them all."""
roots = [self[x] for x in objects]
heaviest = max([(self.weights[r],r) for r in roots])[1]
for r in roots:
if r != heaviest:
self.weights[heaviest] += self.weights[r]
self.parents[r] = heaviest
| Python |
"""DisjointSets.py
Description:
Based on details provided by: http://en.wikipedia.org/wiki/Disjoint-set_data_structure
and modified based on notes of CS 473 Algorithms course by University of Illinois.
Finally adapted to Kruskal problem presented by course 75.29 Teoria de Algoritmos on
University of Buenos Aires.
Authors:
Garay, Ignacio
Liguori, Ariel
Musumeci, Pablo
"""
valueNodeDict = {}
class Node():
'''Represents a node in a tree, using rank.'''
def __init__(self, value, parent, rank):
self.value = value
self.parent = parent
self.rank = rank
valueNodeDict[value] = self
def __str__(self):
return str(value)
class Universe():
'''All sets will belong to the Universe.'''
def __init__(self):
self.sets = [] #list of all root nodes
def addSet(self, root):
self.sets.append(root)
U = Universe()
def internalFindSet(x):
'''Find the root of the tree that contains this node, uses
path compression at every stage.'''
if x.parent is not x:
x.parent = internalFindSet(x.parent)
return x.parent
def MakeSet(x):
'''Make a new node whose parent is itself, as some bibliography
mentions a singleton instance of x.'''
a = Node(x, None, 0) # O(1)
a.parent = a #O(1)
U.addSet(a) #O(1) = Amortized Worst Case
def FindSet(x):
'''Returns the root of the tree which contains this x'''
x_node = valueNodeDict[x] #O(n)
return internalFindSet(x_node)
def Union(x, y):
'''Union by rank based on pseudocode defined on beginning notes, unites
x and y destructively, updates ranks.'''
x_set = FindSet(x)
y_set = FindSet(y)
if x_set.rank > y_set.rank:
y_set.parent = x_set
else:
x_set.parent = y_set
if x_set.rank == y_set.rank:
y_set.rank += 1 | Python |
#!/usr/bin/env python
import sys
class node:
def __init__(self, number, bits = 0, time = 0):
self.bits = bits
self.time = time
self.id = number
def __cmp__(self, other):
if self.bits > other.bits:
return 1
elif self.bits < other.bits:
return -1
return 0
def __str__(self):
return "("+ "ID: "+ str(self.id) +" bits: " + str(self.bits) + " time: " + str(self.time) + ")"
def __repr__(self):
return self.__str__()
# Linear Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is an instance of nodes of
# (bits, time).
# This algorithm takes O(n) to verify if the set is a valid one
def schedule_valid_l(streams, restriction):
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
assert len(streams) > 0, "Empty stream"
t = b = 0
for elem in streams:
assert elem.bits >= 0 and elem.time >= 0, "Invalid element in stream"
t += elem.time
b += elem.bits
return b <= t * restriction
# Linear Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is an instance of nodes of
# (bits, time).
# This algorithm, based on the hypothesis that dictionaries take O(1) for
# any operation, takes o(n) time to solve the scheduling problem in our best case scenario. In the
# worst case scenario, it will take O(n log n)
def schedule_improved(streams, restriction):
# assert that checks that the restriction is not negative
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
# must have some element in the stream. Testing purposes only, later
# it will be replaced for an if len(stream) == 0: return []
assert len(streams) > 0, "Empty stream"
waiting = streams[:]
#heap that will contain the rejected in the first try, will be used as a buffer
visited = []
solution_set = []
t = 0
b = 0
n = len(streams)
# for every element in our set we will make an attempt to put it in our set
# this is the linear part of the algoritm. This will grant us the o(n) part of the
# deal for the best case scenario
for elem in waiting:
t += elem.time
b += elem.bits
if b <= restriction * t:
solution_set.append(elem)
n -= 1
else:
t -= elem.time
b -= elem.bits
visited.heappush(elem)
# if some elements have been rejected, we will be dealing with a O(n log n) case
# scenario
while (n >= 0):
elem = visited.heappop()
t += elem.time
b += elem.bits
# the case that we have an invalid set
if b >= restriction * t:
return []
# the happy path, the element can be inserted
else:
solution_set.append(elem)
n -= 1
# if it comes the case that we are here, we have found the solution
return solution_set
# Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is a list of
# tuples (bits, time).
# This algorithm takes O(n log n) in a worst case scenario, where n stands
# for the number of streams in our set.
def schedule(streams, restriction):
# assert that checks that the restriction is not negative
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
# must have some element in the stream. Testing purposes only, later
# it will be replaced for an if len(stream) == 0: return []
assert len(streams) > 0, "Empty stream"
t = 0
b = 0
# sorted uses Timsort, whose worst case is O(n log n)
aux_set = sorted(streams,key=lambda st: st.bits)
solution_set = []
for elem in aux_set:
# checks that our set does not break our hypothesis
assert elem.bits >= 0 and elem.time >= 0, "Invalid element in stream"
t += elem.time
b += elem.bits
if b > t * restriction:
return []
solution_set.append(elem)
return solution_set
def manage_l(streams, r):
print "Validation through linear algorithm. No visible solution available"
if (schedule_valid_l(streams,r)):
print "The set is a valid one."
else:
print "The set has no valid configuration."
def manage_n(streams, r):
print "Validation through n log n algorithm. Final solution available"
sol = schedule(streams, r)
if (len(sol) != 0):
print "The set is a valid one."
print "Solution:"
print sol
else:
print "The set has no valid configuration."
def format_input(inp):
fdesc = open(inp, "r")
lines = fdesc.readlines()
elements = map(lambda s: s.replace("\n",""), filter(lambda s: s != "\n", lines))
r = int(filter(lambda s: s != "r:", elements[0].split())[0])
streams = []
elements.pop(0)
for line in elements:
sline = line.split()
num = int(sline[0].replace(":",""))
b = int(sline[1].replace("(","").replace(",",""))
t = int(sline[2].replace(")", ""))
node(num, b, t)
streams.append(node(num, b, t))
return r, streams
def main():
if len(sys.argv) != 3:
print "Uso correcto: schedule.py [-l o -n] archivo"
sys.exit(1)
args = {"-l": False, "-n": False}
sys.argv.pop(0)
inp = ""
for elem in sys.argv:
if elem in args:
args[elem] = True
else:
inp = elem
streams = []
r = 0
try:
r, streams = format_input(inp)
except:
print "Invalid or incorrect file"
if args["-l"]:
manage_l(streams, r)
if args["-n"]:
manage_n(streams, r)
main()
| Python |
#!/usr/bin/env python
import sys
class node:
def __init__(self, number, bits = 0, time = 0):
self.bits = bits
self.time = time
self.id = number
def __cmp__(self, other):
if self.bits > other.bits:
return 1
elif self.bits < other.bits:
return -1
return 0
def __str__(self):
return "("+ "ID: "+ str(self.id) +" bits: " + str(self.bits) + " time: " + str(self.time) + ")"
def __repr__(self):
return self.__str__()
# Linear Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is an instance of nodes of
# (bits, time).
# This algorithm takes O(n) to verify if the set is a valid one
def schedule_valid_l(streams, restriction):
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
assert len(streams) > 0, "Empty stream"
t = b = 0
for elem in streams:
assert elem.bits >= 0 and elem.time >= 0, "Invalid element in stream"
t += elem.time
b += elem.bits
return b <= t * restriction
# Linear Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is an instance of nodes of
# (bits, time).
# This algorithm, based on the hypothesis that dictionaries take O(1) for
# any operation, takes o(n) time to solve the scheduling problem in our best case scenario. In the
# worst case scenario, it will take O(n log n)
def schedule_improved(streams, restriction):
# assert that checks that the restriction is not negative
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
# must have some element in the stream. Testing purposes only, later
# it will be replaced for an if len(stream) == 0: return []
assert len(streams) > 0, "Empty stream"
waiting = streams[:]
#heap that will contain the rejected in the first try, will be used as a buffer
visited = []
solution_set = []
t = 0
b = 0
n = len(streams)
# for every element in our set we will make an attempt to put it in our set
# this is the linear part of the algoritm. This will grant us the o(n) part of the
# deal for the best case scenario
for elem in waiting:
t += elem.time
b += elem.bits
if b <= restriction * t:
solution_set.append(elem)
n -= 1
else:
t -= elem.time
b -= elem.bits
visited.heappush(elem)
# if some elements have been rejected, we will be dealing with a O(n log n) case
# scenario
while (n >= 0):
elem = visited.heappop()
t += elem.time
b += elem.bits
# the case that we have an invalid set
if b >= restriction * t:
return []
# the happy path, the element can be inserted
else:
solution_set.append(elem)
n -= 1
# if it comes the case that we are here, we have found the solution
return solution_set
# Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is a list of
# tuples (bits, time).
# This algorithm takes O(n log n) in a worst case scenario, where n stands
# for the number of streams in our set.
def schedule(streams, restriction):
# assert that checks that the restriction is not negative
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
# must have some element in the stream. Testing purposes only, later
# it will be replaced for an if len(stream) == 0: return []
assert len(streams) > 0, "Empty stream"
t = 0
b = 0
# sorted uses Timsort, whose worst case is O(n log n)
aux_set = sorted(streams,key=lambda st: st.bits)
solution_set = []
for elem in aux_set:
# checks that our set does not break our hypothesis
assert elem.bits >= 0 and elem.time >= 0, "Invalid element in stream"
t += elem.time
b += elem.bits
if b > t * restriction:
return []
solution_set.append(elem)
return solution_set
def manage_l(streams, r):
print "Validation through linear algorithm. No visible solution available"
if (schedule_valid_l(streams,r)):
print "The set is a valid one."
else:
print "The set has no valid configuration."
def manage_n(streams, r):
print "Validation through n log n algorithm. Final solution available"
sol = schedule(streams, r)
if (len(sol) != 0):
print "The set is a valid one."
print "Solution:"
print sol
else:
print "The set has no valid configuration."
def format_input(inp):
fdesc = open(inp, "r")
lines = fdesc.readlines()
elements = map(lambda s: s.replace("\n",""), filter(lambda s: s != "\n", lines))
r = int(filter(lambda s: s != "r:", elements[0].split())[0])
streams = []
elements.pop(0)
for line in elements:
sline = line.split()
num = int(sline[0].replace(":",""))
b = int(sline[1].replace("(","").replace(",",""))
t = int(sline[2].replace(")", ""))
node(num, b, t)
streams.append(node(num, b, t))
return r, streams
def main():
if len(sys.argv) != 3:
print "Uso correcto: schedule.py [-l o -n] archivo"
sys.exit(1)
args = {"-l": False, "-n": False}
sys.argv.pop(0)
inp = ""
for elem in sys.argv:
if elem in args:
args[elem] = True
else:
inp = elem
streams = []
r = 0
try:
r, streams = format_input(inp)
except:
print "Invalid or incorrect file"
if args["-l"]:
manage_l(streams, r)
if args["-n"]:
manage_n(streams, r)
main()
| Python |
#!/usr/bin/env python
import sys
class node:
def __init__(self, number, bits = 0, time = 0):
self.bits = bits
self.time = time
self.id = number
def __cmp__(self, other):
if self.bits > other.bits:
return 1
elif self.bits < other.bits:
return -1
return 0
def __str__(self):
return "("+ "ID: "+ str(self.id) +" bits: " + str(self.bits) + " time: " + str(self.time) + ")"
def __repr__(self):
return self.__str__()
# Linear Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is an instance of nodes of
# (bits, time).
# This algorithm takes O(n) to verify if the set is a valid one
def schedule_valid_l(streams, restriction):
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
assert len(streams) > 0, "Empty stream"
t = b = 0
for elem in streams:
assert elem.bits >= 0 and elem.time >= 0, "Invalid element in stream"
t += elem.time
b += elem.bits
return b <= t * restriction
# Linear Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is an instance of nodes of
# (bits, time).
# This algorithm, based on the hypothesis that dictionaries take O(1) for
# any operation, takes o(n) time to solve the scheduling problem in our best case scenario. In the
# worst case scenario, it will take O(n log n)
def schedule_improved(streams, restriction):
# assert that checks that the restriction is not negative
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
# must have some element in the stream. Testing purposes only, later
# it will be replaced for an if len(stream) == 0: return []
assert len(streams) > 0, "Empty stream"
waiting = streams[:]
#heap that will contain the rejected in the first try, will be used as a buffer
visited = []
solution_set = []
t = 0
b = 0
n = len(streams)
# for every element in our set we will make an attempt to put it in our set
# this is the linear part of the algoritm. This will grant us the o(n) part of the
# deal for the best case scenario
for elem in waiting:
t += elem.time
b += elem.bits
if b <= restriction * t:
solution_set.append(elem)
n -= 1
else:
t -= elem.time
b -= elem.bits
visited.heappush(elem)
# if some elements have been rejected, we will be dealing with a O(n log n) case
# scenario
while (n >= 0):
elem = visited.heappop()
t += elem.time
b += elem.bits
# the case that we have an invalid set
if b >= restriction * t:
return []
# the happy path, the element can be inserted
else:
solution_set.append(elem)
n -= 1
# if it comes the case that we are here, we have found the solution
return solution_set
# Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is a list of
# tuples (bits, time).
# This algorithm takes O(n log n) in a worst case scenario, where n stands
# for the number of streams in our set.
def schedule(streams, restriction):
# assert that checks that the restriction is not negative
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
# must have some element in the stream. Testing purposes only, later
# it will be replaced for an if len(stream) == 0: return []
assert len(streams) > 0, "Empty stream"
t = 0
b = 0
# sorted uses Timsort, whose worst case is O(n log n)
aux_set = sorted(streams,key=lambda st: st.bits)
solution_set = []
for elem in aux_set:
# checks that our set does not break our hypothesis
assert elem.bits >= 0 and elem.time >= 0, "Invalid element in stream"
t += elem.time
b += elem.bits
if b > t * restriction:
return []
solution_set.append(elem)
return solution_set
def manage_l(streams, r):
print "Validation through linear algorithm. No visible solution available"
if (schedule_valid_l(streams,r)):
print "The set is a valid one."
else:
print "The set has no valid configuration."
def manage_n(streams, r):
print "Validation through n log n algorithm. Final solution available"
sol = schedule(streams, r)
if (len(sol) != 0):
print "The set is a valid one."
print "Solution:"
print sol
else:
print "The set has no valid configuration."
def format_input(inp):
fdesc = open(inp, "r")
lines = fdesc.readlines()
elements = map(lambda s: s.replace("\n",""), filter(lambda s: s != "\n", lines))
r = int(filter(lambda s: s != "r:", elements[0].split())[0])
streams = []
elements.pop(0)
for line in elements:
sline = line.split()
num = int(sline[0].replace(":",""))
b = int(sline[1].replace("(","").replace(",",""))
t = int(sline[2].replace(")", ""))
node(num, b, t)
streams.append(node(num, b, t))
return r, streams
def main():
if len(sys.argv) != 3:
print "Uso correcto: schedule.py [-l o -n] archivo"
sys.exit(1)
args = {"-l": False, "-n": False}
sys.argv.pop(0)
inp = ""
for elem in sys.argv:
if elem in args:
args[elem] = True
else:
inp = elem
streams = []
r = 0
try:
r, streams = format_input(inp)
except:
print "Invalid or incorrect file"
if args["-l"]:
manage_l(streams, r)
if args["-n"]:
manage_n(streams, r)
main()
| Python |
#!/usr/bin/env python
import sys
class node:
def __init__(self, number, bits = 0, time = 0):
self.bits = bits
self.time = time
self.id = number
def __cmp__(self, other):
if self.bits > other.bits:
return 1
elif self.bits < other.bits:
return -1
return 0
def __str__(self):
return "("+ "ID: "+ str(self.id) +" bits: " + str(self.bits) + " time: " + str(self.time) + ")"
def __repr__(self):
return self.__str__()
# Linear Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is an instance of nodes of
# (bits, time).
# This algorithm takes O(n) to verify if the set is a valid one
def schedule_valid_l(streams, restriction):
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
assert len(streams) > 0, "Empty stream"
t = b = 0
for elem in streams:
assert elem.bits >= 0 and elem.time >= 0, "Invalid element in stream"
t += elem.time
b += elem.bits
return b <= t * restriction
# Linear Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is an instance of nodes of
# (bits, time).
# This algorithm, based on the hypothesis that dictionaries take O(1) for
# any operation, takes o(n) time to solve the scheduling problem in our best case scenario. In the
# worst case scenario, it will take O(n log n)
def schedule_improved(streams, restriction):
# assert that checks that the restriction is not negative
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
# must have some element in the stream. Testing purposes only, later
# it will be replaced for an if len(stream) == 0: return []
assert len(streams) > 0, "Empty stream"
waiting = streams[:]
#heap that will contain the rejected in the first try, will be used as a buffer
visited = []
solution_set = []
t = 0
b = 0
n = len(streams)
# for every element in our set we will make an attempt to put it in our set
# this is the linear part of the algoritm. This will grant us the o(n) part of the
# deal for the best case scenario
for elem in waiting:
t += elem.time
b += elem.bits
if b <= restriction * t:
solution_set.append(elem)
n -= 1
else:
t -= elem.time
b -= elem.bits
visited.heappush(elem)
# if some elements have been rejected, we will be dealing with a O(n log n) case
# scenario
while (n >= 0):
elem = visited.heappop()
t += elem.time
b += elem.bits
# the case that we have an invalid set
if b >= restriction * t:
return []
# the happy path, the element can be inserted
else:
solution_set.append(elem)
n -= 1
# if it comes the case that we are here, we have found the solution
return solution_set
# Schedule function recieves the capacity restriction, which will be used
# to check if the set is a valid one. The stream argument is a list of
# tuples (bits, time).
# This algorithm takes O(n log n) in a worst case scenario, where n stands
# for the number of streams in our set.
def schedule(streams, restriction):
# assert that checks that the restriction is not negative
assert restriction >= 0, "Restriction less than 0 (%d)" % restriction
# must have some element in the stream. Testing purposes only, later
# it will be replaced for an if len(stream) == 0: return []
assert len(streams) > 0, "Empty stream"
t = 0
b = 0
# sorted uses Timsort, whose worst case is O(n log n)
aux_set = sorted(streams,key=lambda st: st.bits)
solution_set = []
for elem in aux_set:
# checks that our set does not break our hypothesis
assert elem.bits >= 0 and elem.time >= 0, "Invalid element in stream"
t += elem.time
b += elem.bits
if b > t * restriction:
return []
solution_set.append(elem)
return solution_set
def manage_l(streams, r):
print "Validation through linear algorithm. No visible solution available"
if (schedule_valid_l(streams,r)):
print "The set is a valid one."
else:
print "The set has no valid configuration."
def manage_n(streams, r):
print "Validation through n log n algorithm. Final solution available"
sol = schedule(streams, r)
if (len(sol) != 0):
print "The set is a valid one."
print "Solution:"
print sol
else:
print "The set has no valid configuration."
def format_input(inp):
fdesc = open(inp, "r")
lines = fdesc.readlines()
elements = map(lambda s: s.replace("\n",""), filter(lambda s: s != "\n", lines))
r = int(filter(lambda s: s != "r:", elements[0].split())[0])
streams = []
elements.pop(0)
for line in elements:
sline = line.split()
num = int(sline[0].replace(":",""))
b = int(sline[1].replace("(","").replace(",",""))
t = int(sline[2].replace(")", ""))
node(num, b, t)
streams.append(node(num, b, t))
return r, streams
def main():
if len(sys.argv) != 3:
print "Uso correcto: schedule.py [-l o -n] archivo"
sys.exit(1)
args = {"-l": False, "-n": False}
sys.argv.pop(0)
inp = ""
for elem in sys.argv:
if elem in args:
args[elem] = True
else:
inp = elem
streams = []
r = 0
try:
r, streams = format_input(inp)
except:
print "Invalid or incorrect file"
if args["-l"]:
manage_l(streams, r)
if args["-n"]:
manage_n(streams, r)
main()
| Python |
#!/usr/bin/python
import sys
class Edge(object):
def __init__(self, u, v, w):
self.source = u
self.sink = v
self.capacity = w
def __repr__(self):
return "%s->%s:%s" % (self.source, self.sink, self.capacity)
class FlowNetwork(object):
def __init__(self, path_file):
fd = None
self.adj = {}
self.flow = {}
try:
fd = open(path_file)
except(IOerror):
print "no existe el archivo a procesar"
sys.exit(1)
try:
self.add_vertex("s")
self.add_vertex("t")
lines = fd.readlines()
elements = map(lambda s: s.replace("\n", ""), filter(lambda s: s != "\n", lines))
self.n,self.m = map(lambda s: int(s), elements.pop(0).split(","))
Cn = map(lambda s: int(s), elements.pop(0). split(","))
Pn = map(lambda s: int(s), elements.pop(0). split(","))
for element in elements:
task, nadjs = element.split(":")
adjs = nadjs.split(",")
self.add_vertex("T"+task)
for adj in adjs:
if adj not in self.adj["s"]:
self.add_vertex("A"+adj)
self.add_edge("A"+adj, "t", Cn[int(adj)-1])
self.add_edge("T"+task, "A"+adj, float("inf"))
self.add_edge("s","T"+task, Pn[int(task)-1])
fd.close()
except:
print "error construyendo la red de transporte. Archivo corrupto"
sys.exit(1)
def add_vertex(self, vertex):
self.adj[vertex] = []
def get_edges(self, v):
return self.adj[v]
def add_edge(self, u, v, w=0):
if u == v:
raise ValueError("u == v")
edge = Edge(u,v,w)
redge = Edge(v,u,0)
edge.redge = redge
redge.redge = edge
self.adj[u].append(edge)
self.adj[v].append(redge)
self.flow[edge] = 0
self.flow[redge] = 0
def find_path(self, source, sink, path):
if source == sink:
return path
for edge in self.get_edges(source):
residual = edge.capacity - self.flow[edge]
if residual > 0 and not (edge,residual) in path:
result = self.find_path( edge.sink, sink, path + [(edge,residual)] )
if result != None:
return result
def max_flow(self, source = "s", sink = "t"):
path = self.find_path(source, sink, [])
while path != None:
flow = min(res for edge,res in path)
for edge,res in path:
self.flow[edge] += flow
self.flow[edge.redge] -= flow
path = self.find_path(source, sink, [])
# print [self.flow[edge] for edge in self.get_edges(source)]
return sum(self.flow[edge] for edge in self.get_edges(source))
def __repr__(self):
rep = []
inv_edges = [ s.sink for s in self.get_edges("t") if s.redge.capacity == self.flow[s.redge] ]
for inv_edge in inv_edges:
for edge in filter(lambda s: s.capacity == 0, self.get_edges(inv_edge)):
rep.append(edge.sink)
return " - ".join(rep)
try:
fl = FlowNetwork(sys.argv[1])
# fl.max_flow()
print "gasto maximo: " + str(fl.max_flow())
print "proyectos a realizar : " + str(fl)
except:
print "argumentos incorrectos"
| Python |
#!/usr/bin/python
import sys
class Edge(object):
def __init__(self, u, v, w):
self.source = u
self.sink = v
self.capacity = w
def __repr__(self):
return "%s->%s:%s" % (self.source, self.sink, self.capacity)
class FlowNetwork(object):
def __init__(self, path_file):
fd = None
self.adj = {}
self.flow = {}
try:
fd = open(path_file)
except(IOerror):
print "no existe el archivo a procesar"
sys.exit(1)
try:
self.add_vertex("s")
self.add_vertex("t")
lines = fd.readlines()
elements = map(lambda s: s.replace("\n", ""), filter(lambda s: s != "\n", lines))
self.n,self.m = map(lambda s: int(s), elements.pop(0).split(","))
Cn = map(lambda s: int(s), elements.pop(0). split(","))
Pn = map(lambda s: int(s), elements.pop(0). split(","))
for element in elements:
task, nadjs = element.split(":")
adjs = nadjs.split(",")
self.add_vertex("T"+task)
for adj in adjs:
if adj not in self.adj["s"]:
self.add_vertex("A"+adj)
self.add_edge("A"+adj, "t", Cn[int(adj)-1])
self.add_edge("T"+task, "A"+adj, float("inf"))
self.add_edge("s","T"+task, Pn[int(task)-1])
fd.close()
except:
print "error construyendo la red de transporte. Archivo corrupto"
sys.exit(1)
def add_vertex(self, vertex):
self.adj[vertex] = []
def get_edges(self, v):
return self.adj[v]
def add_edge(self, u, v, w=0):
if u == v:
raise ValueError("u == v")
edge = Edge(u,v,w)
redge = Edge(v,u,0)
edge.redge = redge
redge.redge = edge
self.adj[u].append(edge)
self.adj[v].append(redge)
self.flow[edge] = 0
self.flow[redge] = 0
def find_path(self, source, sink, path):
if source == sink:
return path
for edge in self.get_edges(source):
residual = edge.capacity - self.flow[edge]
if residual > 0 and not (edge,residual) in path:
result = self.find_path( edge.sink, sink, path + [(edge,residual)] )
if result != None:
return result
def max_flow(self, source = "s", sink = "t"):
path = self.find_path(source, sink, [])
while path != None:
flow = min(res for edge,res in path)
for edge,res in path:
self.flow[edge] += flow
self.flow[edge.redge] -= flow
path = self.find_path(source, sink, [])
# print [self.flow[edge] for edge in self.get_edges(source)]
return sum(self.flow[edge] for edge in self.get_edges(source))
def __repr__(self):
rep = []
inv_edges = [ s.sink for s in self.get_edges("t") if s.redge.capacity == self.flow[s.redge] ]
for inv_edge in inv_edges:
for edge in filter(lambda s: s.capacity == 0, self.get_edges(inv_edge)):
rep.append(edge.sink)
return " - ".join(rep)
try:
fl = FlowNetwork(sys.argv[1])
# fl.max_flow()
print "gasto maximo: " + str(fl.max_flow())
print "proyectos a realizar : " + str(fl)
except:
print "argumentos incorrectos"
| Python |
LEFT = 0
HEIGHT = 1
RIGHT = 2
def skyline(buildings):
if len(buildings) == 1:
keypoints= []
keypoints.append((buildings[0][LEFT],buildings[0][HEIGHT]))
keypoints.append((buildings[0][RIGHT],0))
return keypoints
keypoints = []
ListA = skyline(buildings[(len(buildings)/2):])
ListB = skyline(buildings[:(len(buildings)/2)])
CurrentHeightA = 0
CurrentHeightB = 0
CurrentX = 0
while ListA and ListB:
elemA = ListA[0]
elemB = ListB[0]
if elemA[LEFT] < elemB[LEFT]:
CurrentHeightA = elemA[HEIGHT]
CurrentX = elemA[LEFT]
keypoints.append((CurrentX, max(CurrentHeightA,CurrentHeightB)))
del ListA[0]
else:
CurrentX = elemB[LEFT]
CurrentHeightB = elemB[HEIGHT]
keypoints.append((CurrentX, max(CurrentHeightA,CurrentHeightB)))
del ListB[0]
if not ListA:
while ListB:
elemB = ListB[0]
keypoints.append(elemB)
del ListB[0];
if not ListB:
while ListA:
elemA = ListA[0]
keypoints.append(elemA)
del ListA[0];
return keypoints
def mergeBuildings(buildings):
actualHeight = 0
builds = []
for i in buildings:
if i[HEIGHT] != actualHeight:
builds.append(i)
actualHeight = i[HEIGHT]
return builds
if __name__ == '__main__':
buildingsFile = raw_input("Enter the file name containing a valid graph:")
filep = open(buildingsFile)
text=filep.read()
#Splitting into the (L,H,R) building triples or L-terns
buildings = text.strip('\n').split('\n')
#Splitting each triple to a list of 3 values and casting them to integers
buildings = [map(int,x.split(',')) for x in buildings]
print "\n"
print mergeBuildings(skyline(buildings))
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
class logger:
def __init__(self):
self.logfile = open("LOG", "w")
def dump(self, *data):
line = "-".join(map(str, data))
self.logfile.write(line + "\n")
def __del__(self):
self.logfile.close()
mylogger = logger()
def inventory(W, ms, cs, co, kmax):
X = [ 0 for i in xrange(kmax+1) ]
Xlast = 0
Xant = 0
U = [ 0 for i in xrange(kmax+1) ]
cost = co
lastbuy = 0
U[0] = W[0]
for k in xrange(1, kmax):
dif = W[k] - Xant
if (dif > 0):
if (co > dif * cs * (k - lastbuy + 1) and (dif + Xlast <= ms)):
cost += dif * cs
U[lastbuy] += dif
Xlast += dif
Xant += dif
else:
cost += co
lastbuy = k
U[k] = dif
Xant = Xant + U[k] - W[k]
assert Xant <= ms, "problema con los X[k], alguno fue mal calculado"
#mylogger.dump("iteracion {}, dif = {}, X[{}] = {}".format(k, dif,k, X[k]))
#mylogger.dump(U,X,cost)
return U, cost
def format_input(inp):
fdesc = open(inp, "r")
lines = fdesc.readlines()
fdesc.close()
elements = map(lambda s: s.replace("\n",""), filter(lambda s: s != "\n", lines))
n = int(elements.pop(0))
s = int(elements.pop(0))
c = int(elements.pop(0))
k = int(elements.pop(0))
W = []
for line in elements:
W.append(int(line))
return n,s,c,k,W
def main():
sys.argv.pop(0)
if len(sys.argv) != 1:
sys.exit(1,"uso incorrecto del scrip. Uso correcto: ./inventory.py ARCHIVODATOS")
n = s = c = k = W = None
try:
n, s, c, k, W = format_input(sys.argv.pop(0))
except:
sys.exit(1, "error en el archivo de datos")
U,costo = inventory(W,s,c,k,n)
# for i,compra in enumerate(J):
# print "J en el mes " + str(i) + ":" + str(compra)
for i,compra in enumerate(U):
print "compra en el mes " + str(i) + ":" + str(compra)
print "costo total: " + str(costo)
return 0
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
class logger:
def __init__(self):
self.logfile = open("LOG", "w")
def dump(self, *data):
line = "-".join(map(str, data))
self.logfile.write(line + "\n")
def __del__(self):
self.logfile.close()
mylogger = logger()
def inventory(W, ms, cs, co, kmax):
X = [ 0 for i in xrange(kmax+1) ]
Xlast = 0
Xant = 0
U = [ 0 for i in xrange(kmax+1) ]
cost = co
lastbuy = 0
U[0] = W[0]
for k in xrange(1, kmax):
dif = W[k] - Xant
if (dif > 0):
if (co > dif * cs * (k - lastbuy + 1) and (dif + Xlast <= ms)):
cost += dif * cs
U[lastbuy] += dif
Xlast += dif
Xant += dif
else:
cost += co
lastbuy = k
U[k] = dif
Xant = Xant + U[k] - W[k]
assert Xant <= ms, "problema con los X[k], alguno fue mal calculado"
#mylogger.dump("iteracion {}, dif = {}, X[{}] = {}".format(k, dif,k, X[k]))
#mylogger.dump(U,X,cost)
return U, cost
def format_input(inp):
fdesc = open(inp, "r")
lines = fdesc.readlines()
fdesc.close()
elements = map(lambda s: s.replace("\n",""), filter(lambda s: s != "\n", lines))
n = int(elements.pop(0))
s = int(elements.pop(0))
c = int(elements.pop(0))
k = int(elements.pop(0))
W = []
for line in elements:
W.append(int(line))
return n,s,c,k,W
def main():
sys.argv.pop(0)
if len(sys.argv) != 1:
sys.exit(1,"uso incorrecto del scrip. Uso correcto: ./inventory.py ARCHIVODATOS")
n = s = c = k = W = None
try:
n, s, c, k, W = format_input(sys.argv.pop(0))
except:
sys.exit(1, "error en el archivo de datos")
U,costo = inventory(W,s,c,k,n)
# for i,compra in enumerate(J):
# print "J en el mes " + str(i) + ":" + str(compra)
for i,compra in enumerate(U):
print "compra en el mes " + str(i) + ":" + str(compra)
print "costo total: " + str(costo)
return 0
if __name__ == '__main__':
main()
| Python |
# coding=latin-1
'''
@author: Leonardo Val <lval@ucu.edu.uy>
'''
class Game(object): ###########################################################
''' Base class for all game components. The instance represents a game state,
including information about the board, the pieces, the players and any
other data required to continue the game.
'''
def __init__(self, *players):
''' The constructor initializes the object to represent the game's
initial state. Players list indicates the players that will
participate in the game. These are not the actual agents in charge
of moving, but only their role. Examples are 'Xs' and 'Os' or
'Whites' and 'Blacks'. Subclasses must support an empty or None
players parameter, indicating a default option. A player may be any
hashable type, but str is recommended.
'''
self.players = players
def moves(self):
''' Returns all valid moves in the game state, for all players. This will
be a dict of the form {player:[move]}. A moves can be any hashable
type, but str is recommended.
If a player is not enabled, it must either have an empty list of
moves, or not be in the dict as a key. If the game is ended, the
result should be empty or None.
'''
return None
def results(self):
''' Returns the results of a finished game for every player. This will
be a dict of the form {player:float}. Draws are always 0, with
victory results being always positive and defeat always negative.
Must return an empty dict or None if the game is not finished.
'''
return dict([(j, 0) for j in self.players]) # Draw
def next(self, **moves):
''' Calculates and returns the next game state applying the given moves.
The moves parameter is a dict of the form {player:move}.
Result should be None if any move is invalid or game has ended.
'''
return None
def __hash__(self):
return hash(repr(self))
def match(game, *agents_list, **agents):
''' A match controller in the form of a generator. Participating agents can
be specified either as a list (agents_list) or pairs player=agent. If
the list is used, agents are assigned in the same order as the game
players.
The generator returns tuples. First (0, agents, initial game state).
After that (move_number, moves, game state) for each move.
Finally (None, results, final game state).
The generator handles the match, asking the enabled agents to move,
keeping track of game states and notifying all agents as needed.
'''
for player, agent in zip(game.players, agents_list):
agents[player] = agent
for player, agent in agents.iteritems(): # Tells all agents the match begins.
agent.match_begins(player, game)
move_num = 0
yield (move_num, agents, game)
results = game.results()
while not results: # Game is not over.
if(game.moves()==None):
print 'holaaa'
moves = dict([(p, agents[p].decision(game)) for p, ms in game.moves().iteritems() if ms])
game = game.next(**moves)
for player, agent in agents.iteritems(): # Tells all agents about the moves.
agent.match_moves(game, **moves)
move_num += 1
yield (move_num, moves, game)
results = game.results()
for player, agent in agents.iteritems(): # Tells all agentes the match ends.
agent.match_ends(game)
yield (None, results, game)
def run_match(game, *agents_list, **agents):
''' Runs a full match returning the results and final game state.
'''
for m, d, g in match(game, *agents_list, **agents):
if m is None: # Game over.
return (d, g)
return (None, game) # Should not happen.
################################################################################ | Python |
# coding=latin-1
''' Contests are sets of matches between many agents. Its purpose its to
evaluate each agents in comparison with the others.
Each contests its arranged in a different way.
@author: Leonardo Val <lval@ucu.edu.uy>
'''
import itertools, random, collections
from _base import match
from _utils import randgen
from five_field_kono import Five_field_kono
class Stats(): #################################################################
''' Statistics accumulator for Contest classes.
'''
def __init__(self):
lambda0 = lambda: 0
self._stats = collections.OrderedDict([
('keys', {}),
('matches_played', collections.defaultdict(lambda0)),
('matches_won', collections.defaultdict(lambda0)),
('matches_lost', collections.defaultdict(lambda0)),
('result_sum', collections.defaultdict(lambda0)),
('result_sum2', collections.defaultdict(lambda0))
])
self.__dict__.update(self._stats)
def clear(self):
''' Clears all statistics.
'''
for stat in self._stats.itervalues():
stat.clear()
def inc(self, stat, key):
return self.add(stat, key, 1)
def add(self, stat, key, value):
new_value = stat[key] + value
stat[key] = new_value
return new_value
def process(self, agents, match_num, move_num, d, game):
''' Accumulates statistics for each tuple generated by the contest.
'''
if move_num is None: # Finished match.
results = d
for player, agent in agents.iteritems():
self.keys.setdefault(agent, agent.name)
self.keys.setdefault(player, str(player))
self.inc(self.matches_played, agent)
self.inc(self.matches_played, player)
result = results[player]
if result: # Nonzero result means match is not a draw.
if result > 0: # Victory.
self.inc(self.matches_won, agent)
self.inc(self.matches_won, player)
if result < 0: # Defeat.
self.inc(self.matches_lost, agent)
self.inc(self.matches_lost, player)
self.add(self.result_sum, agent, result)
self.add(self.result_sum2, agent, result ** 2)
self.add(self.result_sum, player, result)
self.add(self.result_sum2, player, result ** 2)
def __str__(self):
''' Prints the statistics gathered in tabular form.
'''
keys = self.keys.keys()
keys.sort(lambda a1, a2: cmp(a1, a2))
return ','.join(self._stats.iterkeys()) +'\n'+ '\n'.join(
[','.join([str(stat[key]) for stat in self._stats.itervalues()]) for key in keys])
class Contest(object): #########################################################
''' Base class for all contests. Defines a common statistics gathering and
contest's matches handling.
'''
def __init__(self, game, agents, stats=None):
self.game = game
self.agents = list(agents)
self.stats = Stats() if stats is None else stats
def run(self, matches):
''' Receives a list of matches, given as tuples (game, agents) where
agent {player:agent}. This method run each of the matches, returning
every step of each match.
The last item returned is a tuple (None, None, stats, game) where
stats is a dict with the statistics gathered for each agent.
'''
self.stats.clear() # Erases previous statistics.
for match_num, (game, agents) in zip(xrange(10**5), matches):
for move_num, d, g in match(game, **agents):
self.stats.process(agents, match_num, move_num, d, g)
yield (match_num, move_num, d, g)
yield (None, None, self.stats, self.game)
def log(self, matches=None):
''' Transforms a contest generator into a line generator, that
can be used to display in the screen or write in a file.
'''
for n1, n2, a, _ in self.run(matches):
if n1 is None:
yield 'Agent,'+ ','.join([n for n in a.iterkeys()])
for agent in self.agents:
yield agent.name +','+ ','.join([str(stat[agent]) for stat in a.itervalues()])
elif n2 == 0 or n2 is None:
yield '[%d]: %s' % (n1, ', '.join(['%s:%s' % i for i in a.iteritems()]))
else:
yield '[%d] #%d %s' % (n1, n2, a)
def complete(contest):
for match_num, _, d, _ in contest.run():
if match_num is None: # Contest is over.
return d
class AllAgainstAll_Contest(Contest): ##########################################
''' All agents play count matches against all other agents, in all possible
combinations.
'''
def __init__(self, game, agents, count=1):
Contest.__init__(self, game, agents)
self.count = count
def run(self):
players = self.game.players
arrays = itertools.permutations(self.agents, len(players))
matches = [(self.game, dict(zip(players, array))) for array in arrays for _ in xrange(self.count)]
return Contest.run(self, matches)
class Sampling_Contest(Contest): ###############################################
''' Built so each agent will play a match with upto count randomly selected
opponents. Be warned that depending on the agents number and count,
some agents may play less matches that count.
'''
def __init__(self, game, agents, random=None, count=1):
Contest.__init__(self, game, agents)
self.random = randgen(random)
self.count = count
def matches(self):
control = dict([(agent, self.count) for agent in self.agents])
players = self.game.players
player_count = len(players)
while len(control) > player_count:
agents = control.keys()
self.random.shuffle(agents)
for array in itertools.cycle(itertools.permutations(agents, player_count)):
yield (self.game, dict(zip(players, array)))
remove_agents = False
for agent in array:
agent_count = control.pop(agent) - 1
if agent_count > 0:
control[agent] = agent_count
else:
remove_agents = True
if remove_agents:
break
def run(self):
return Contest.run(self, self.matches())
class Sort_Contest(Contest): ###################################################
''' Agents are sorted using count matches between them and comparing the
results. Which matches and how many times each agent plays depends on
the sort algorithm. Shuffling the agents list is recommended.
This is only usable with 2 player games.
'''
def __init__(self, game, agents, count=1):
Contest.__init__(self, game, agents)
self.count = count
self.__matches__ = []
def comp_fun(self, agent1, agent2):
players = self.game.players
agents = dict(zip(players, [agent1, agent2]))
results_agent1 = []
results_agent2 = []
for _ in xrange(self.count):
m = list(match(self.game, **agents))
self.__matches__.append((agents, m))
_, results, _ = m[-1]
results_agent1.append(results[players[0]])
results_agent2.append(results[players[1]])
return sum(results_agent1) - sum(results_agent2)
def run(self):
self.__matches__ = []
self.agents.sort(self.comp_fun)
self.stats.clear() # Erases previous statistics.
for match_num, (agents, moves) in zip(xrange(len(self.__matches__)), self.__matches__):
for move_num, d, g in moves:
self.stats.process(agents, match_num, move_num, d, g)
yield (match_num, move_num, d, g)
yield (None, None, self.stats, self.game)
class Pyramid_Contest(Contest): ################################################
''' Agents play count matches againts other. The winner gets to the next
round, and so on until the contest has one winner.
'''
def __init__(self, game, agents, count=1):
Contest.__init__(self, game, agents)
self.count = count
def run(self):
''' TODO
self.__matches__ = []
players = self.game.players
winners = list(self.agents)
iter_agents = itertools.cycle(winners)
matches = [[iter_agents.next() for _ in xrange(len(players))] for _ in xrange(round(len(winners) / 2))]
'''
if __name__ == '__main__': #####################################################
from tictactoe import TicTacToe
from _agents import RandomAgent, MiniMaxAgent, AlphaBetaAgent
from heuristicas import *
rnd = random.Random()
heuristicaSinGenotipo = heuristic_wrap()
agentes = [AlphaBetaAgent('MiniMaxAgent_%05d' % i, 3, rnd,heuristic=heuristicaSinGenotipo.heuristicaDistanciaDestino)for i in xrange(1)]
#agentes.extend([RandomAgent(rnd, 'RandomAgent_%05d' % i) for i in xrange(1)])
agentes.extend([AlphaBetaAgent('MiniMaxAgent_%05d' % i, 3, rnd,heuristic=heuristicaSinGenotipo.heuristicaDistanciaDestino) for i in xrange(1)])
print complete(AllAgainstAll_Contest(Five_field_kono(), agentes, 1))
| Python |
# coding=latin-1
''' Implementacion del juego Toads and Frogs
@author: Mariana Maceiras <mmaceira@ucu.edu.uy>
'''
from _utils import resultado, print_board, coord_id
from _base import Game
from _tests.test_games import GameTest
class Toads_Frogs(Game):
''' Game component for Toads and Frogs
'''
PLAYERS = ('Toads','Frogs')
def __init__(self, board=None, enabled=0, chips_per_player=3, empty_spaces=2):
Game.__init__(self, *Toads_Frogs.PLAYERS)
if board:
self.board = board
else:
self.board = 'T' * chips_per_player + '_' * empty_spaces + 'F' * chips_per_player
self.enabled = enabled
class _Move(int):
def __str__(self):
return coord_id(0,self)
def __repr__(self):
return str(self)
def moves(self):
if not self.enabled: # Toads move
moves = [self._Move(pos) for pos in xrange(len(self.board)) if self.board[pos:].startswith('T_') or self.board[pos:].startswith('TF_')]
else: # Frogs move
moves = [self._Move(pos) for pos in xrange(len(self.board)) if self.board[:pos+1].endswith('_F') or self.board[:pos+1].endswith('_TF')]
if moves:
return { self.players[self.enabled]: moves }
return None
def results(self):
# There is no draw in this game
enabled_player = self.players[self.enabled]
if not self.enabled:
moves = 'T_' in self.board or 'TF_' in self.board
else:
moves = '_F' in self.board or '_TF' in self.board
if not moves:
return resultado(enabled_player, self.players, -1)
return None
def next(self, **moves):
board_list = list(self.board)
enabled_player = self.players[self.enabled]
move = moves[enabled_player]
board_list[move] = '_'
if not self.enabled: # A toad moves
position = move+1 if board_list[move+1] == '_' else move+2
else: # A frog moves
position = move-1 if board_list[move-1] == '_' else move-2
board_list[position] = enabled_player[0]
return Toads_Frogs(''.join(board_list), (self.enabled + 1) % 2)
def __str__(self):
return print_board(self.board, 1, len(self.board)+1)
def __repr__(self):
return '%s[%s]' % (self.players[self.enabled][0], self.board)
class Test_Toads_Frogs(GameTest):
''' Toads and Frogs testcases
'''
def test_basic(self):
self.basic_test(Toads_Frogs, zero_sum=True, enabled_players=1)
def test_trace(self):
self.trace_test_text(Toads_Frogs(None, 0, 3, 2), '''\
T[TTT__FFF] Toads a3
F[TT_T_FFF] Frogs a6
T[TT_TF_FF] Toads a2
F[T_TTF_FF] Frogs a7
T[T_TTFF_F] Toads a1
F[_TTTFF_F] Frogs a8
''', Toads=-1, Frogs=1)
self.trace_test_text(Toads_Frogs(None, 0, 2, 1), '''\
T[TT_FF] Toads a2
F[T_TFF] Frogs a4
T[TFT_F] Toads a3
F[TF_TF] Frogs a5
T[TFFT_] Toads a4
''', Toads=1, Frogs=-1)
if __name__ == '__main__':
from _agents import RandomAgent, FileAgent
from _base import run_match, match
for move_number, moves, game_state in match(Toads_Frogs(None, 0, 5, 4), RandomAgent(name='Agent1'), RandomAgent(name='Agent2')):
if move_number is not None:
print '%d: %s -> %r' % (move_number, moves, game_state)
else:
print 'Result: %s' % (moves)
print 'Final board: %r' % (game_state)
#run_match(Toads_Frogs(None, 0, 5, 4), RandomAgent(), FileAgent(name='Human'))
| Python |
# coding=latin-1
'''
@author: Leonardo Val <lval@ucu.edu.uy>
'''
import sys
import random
from utils import *
from _utils import randgen
class Agent(object): ###########################################################
''' Base class for agents participating in games.
'''
def __init__(self, name):
self.name = name
self.player = None
def decision(self, game, *moves):
''' Agents move choice. If no moves are provided, choices are retrieved
from the game.
'''
if not moves:
all_moves = game.moves()
if all_moves:
moves = all_moves.get(self.player, None)
if not moves:
return None
return self._decision(moves)
def _decision(self, moves):
''' Method called by Agent.decision() to actually make the choice of
move. This should be overriden by subclasses.
'''
return moves[0] # Please do not use this default implementation.
def match_begins(self, player, game):
''' Tells the agent that a match he is participating in is starting.
This should not be called again until the match ends.
'''
self.player = player
def match_moves(self, game, **moves):
''' Tells the agent one or more agents have moved in the match he is
participating in. The moves parameter has the form {agent:move}.
The game parameter holds the resulting game state after all moves
have been applied.
'''
pass
def match_ends(self, game):
''' Tells the agent the match he was participating in has finished. The
game parameter holds the final game state.
'''
pass
def __str__(self):
return '%s(%s)' % (self.name, self.player)
def __hash__(self):
return self.name.__hash__()
class FileAgent(Agent): ########################################################
''' An agent that takes his moves from a file and keeps record of the match
in another one. It is also used like a user interface using standard
input and output.
'''
def __init__(self, in_file=None, out_file=None, name='FileAgent'):
Agent.__init__(self, name)
if in_file is None:
in_file = sys.stdin
self.in_file = in_file
if out_file is None:
out_file = sys.stdout
self.out_file = out_file
def __print_state__(self, game):
self.out_file.write('#\t%s\n' % str(game).replace('\n', '\n#\t'))
def match_begins(self, player, game):
Agent.match_begins(self, player, game)
self.out_file.write('# %s starts a match.\n' % (self,))
self.__print_state__(game)
self.out_file.flush()
def match_moves(self, game, **moves):
for player, move in moves.iteritems():
self.out_file.write('# %s moves %s.\n' % (player, move))
self.__print_state__(game)
self.out_file.flush()
def match_ends(self, game):
result = game.results()[self.player]
outcome = 'defeat' if result < 0 else 'victory' if result > 0 else 'draw'
self.out_file.write('# %s ends the match with %s (%.4f).\n' % (self, outcome, result))
self.out_file.flush()
def _decision(self, moves):
''' Writes all available moves and reads the decision from in_file. Each
move must be in a separate line. All lines starting with a '#' are
ignored.
'''
self.out_file.write('# Available moves for %s: ' % self)
self.out_file.write(' '.join([str(move) for move in moves]))
self.out_file.write('\n')
line = self.in_file.readline().strip()
while line:
if not line.startswith('#'):
for move in [move for move in moves if str(move) == line]:
return move
line = self.in_file.readline().strip()
return None
class RandomAgent(Agent): ######################################################
''' An agent that moves randomly.
'''
def __init__(self, random=None, name='RandomAgent'):
Agent.__init__(self, name)
# An instance of random.Random or equivalent is expected, else an
# integer seed or None to create a random.Random.
self.random = randgen(random)
def _decision(self, moves):
return self.random.choice(moves)
class ElProAgent(Agent): ######################################################
''' An agent that moves randomly.
'''
def __init__(self, random=None, name='ElproAgent'):
Agent.__init__(self, name)
def _decision(self, moves):
puntaje = 999999
mejorMovimiento = None
movimientos = []
for movimiento in moves:
tableroFuturo = hacerMovimiento(global_board.board, movimiento, fichas[self.player])
puntajeTablero = heuristicaDistanciaDestino(tableroFuturo, fichas[self.player])
if puntajeTablero == puntaje:
movimientos.append(movimiento)
if puntajeTablero < puntaje:
mejorMovimiento = movimiento
movimientos = []
movimientos.append(movimiento)
puntaje = puntajeTablero
global_random.shuffle(movimientos)
return global_random.choice(movimientos)
class MiniMaxAgent(Agent): #####################################################
''' An agent implementing simple heuristic MiniMax.
'''
def __init__(self, name="MiniMaxAgent", horizon=3, random=None, heuristic=None, genotipo=None):
Agent.__init__(self, name)
self.horizon = horizon
# An instance of random.Random or equivalent is expected, else an
# integer seed or None to create a random.Random.
self.random = randgen(random)
self.__heuristic__ = heuristic
self.genotipo_heuristic = genotipo
def decision(self, game, *moves):
nexts = [(move, self._minimax(game.next(**{self.player: move}), 1)) for move in game.moves()[self.player]]
max_val = max([val for (_, val) in nexts])
return self.random.choice([move for (move, val) in nexts if val == max_val])
def terminal_value(self, game, depth):
''' Returns a result if node is terminal or maximum depth has been
reached. Else returns None.
'''
results = game.results()
if results:
return results[self.player]
if depth >= self.horizon:
return self.heuristic(game, depth)
return None
def _minimax(self, game, depth):
result = self.terminal_value(game, depth)
if result is None:
moves = game.moves()
active_player = moves.keys()[0] # Assumes there is only one active player.
result = (max if active_player == self.player else min)(
[self._minimax(game.next(**{active_player: move}), depth+1) for move in moves[active_player]])
return result
def heuristic(self, game, depth):
''' This method implements the heuristic for the minimax algorithm. If
no implementation is provided it returns a random value. This default
behaviour usually should not be applied in a game.
'''
return self.__heuristic__(self, game, depth) if self.__heuristic__ else self.random.random() * 2 - 1
INFINITE = 0x7FFFFFFF
class AlphaBetaAgent(MiniMaxAgent):
''' An agent implementing MiniMax with alpha-beta pruning.
<http://en.wikipedia.org/wiki/Alpha-beta_pruning>
'''
def __init__(self, name="AlphaBetaAgent", horizon=3, random=None, heuristic=None):
MiniMaxAgent.__init__(self, name, horizon, random, heuristic)
def _minimax(self, game, depth, alpha=-INFINITE, beta=INFINITE):
printTableroHTML(game.board, "minimax.html", sleepTime = 0)
result = self.terminal_value(game, depth)
if not result is None: return result
moves = game.moves()
active_player = moves.keys()[0] # Assumes there is only one active player.
if active_player == self.player:
for move in moves[active_player]:
value = self._minimax(game.next(**{active_player: move}), depth + 1, alpha, beta)
if alpha < value: alpha = value
if beta <= alpha: break
return alpha
else:
for move in moves[active_player]:
value = self._minimax(game.next(**{active_player: move}), depth + 1, alpha, beta)
if beta > value: beta = value
if beta <= alpha: break
return beta
class NegaScoutAgent(MiniMaxAgent):
''' An agent implementing NegaScout or Principal Variation Search, an
optimization of alpha-beta pruning used with NegaMax.
<http://en.wikipedia.org/wiki/Negascout>
'''
def __init__(self, name="NegaScoutAgent", horizon=3, random=None, heuristic=None):
MiniMaxAgent.__init__(self, name, horizon, random, heuristic)
def _minimax(self, game, depth, alpha=-INFINITE, beta=INFINITE):
result = self.terminal_value(game, depth)
if not result is None: return result
b = beta
moves = game.moves()
active_player = moves.keys()[0] # Assumes there is only one active player.
first = True
for move in moves[active_player]:
a = self._minimax(game.next(**{active_player: move}), depth + 1, -b, -alpha)
if alpha < a < beta and not first: # Check if null-window failed high.
a = self._minimax(game.next(**{active_player: move}), depth + 1, -beta, -alpha) # Full re-search.
if alpha < a: alpha = a
if beta <= alpha: return alpha # Beta cut-off.
b = alpha + 1 # Set new null window.
first = False
return alpha
class TraceAgent(Agent): #######################################################
''' An agent that reenacts (and records) a move trace.
'''
def __init__(self, trace=[], proxy=None, name='TraceAgent'):
Agent.__init__(self, name)
self.trace = trace
self.proxy = proxy
def match_begins(self, player, game):
Agent.match_begins(self, player, game)
self._current_move = 0
def _decision(self, moves):
''' Returns the next move in the current trace. Else it uses the proxy
agent to get a new move, recording it in the agents trace.
'''
self._current_move += 1
if self._current_move - 1 < len(self.trace):
return self.trace[self._current_move - 1]
else:
move = self.proxy._decision(moves)
self.trace.append(move)
return move
INFINITE = 0x7FFFFFFF
################################################################################ | Python |
from utils import *
HEURISTIC_MAX_VALUE = 350.0
PORCENTAJES_COLUMNAS = [0.25, 0.50, 1.0, 0.50,0.25]
class heuristic_wrap():
def __init__(self, genotipo = None):
self.genotipo = genotipo
def heuristicaDistanciaDestino(self, agente, game, depth):
acumulador = 0
for i in range(filas):
for j in range(filas):
jugador = enabled_map[(game.enabled + 1) % 2]
if game.board[i][j] == jugador:
#Si la posicion es una final, darle el mayor puntaje a esa fila.
if estaEnPosicionFinal(i, j, jugador, game.board):
acumulador += HEURISTIC_MAX_VALUE / game.chips_per_player
#print jugador +" Estoy en posicion final, iupi ! " + str(i) + "-"+str(j) + "-"+str(game.board)
else:
puntaje = puntajeFilaJugador(i, jugador, self.genotipo)
acumulador += puntaje * PORCENTAJES_COLUMNAS[j]
resultado = acumulador / HEURISTIC_MAX_VALUE
return resultado if jugador == agente.player else - resultado
def puntajeFilaJugador(fila, jugador, genotipo):
if genotipo != None:
return genotipo[fila]
else:
if jugador == NEGRAS:
return (fila)
if jugador == BLANCAS:
return (4 - fila)
| Python |
'''
Created on Mar 29, 2011
@author: diego
'''
from globals import *
from fileUtils import *
from threading import Thread
import time
def printTablero(tablero):
print("\n")
for i in range(filas):
print tablero[i]
print("\n")
def printTableroHTML(tablero, file = "output.html", root = False, sleepTime = 1):
if not config.print_html_activated:
return
time.sleep(sleepTime)
data = "<html><head><meta http-equiv=\"refresh\" content=\"0\" ></head><table><tr><td> <table align=\"left\" border=\"8\" ><tr><td></td><td>0</td><td>1</td><td>2</td><td>3</td><td>4</td></tr>"
for i in range(filas):
data += "<tr><td>"+str(i)+"</td>"
for j in range(filas):
if tablero[i][j] == NEGRAS:
data += "<td><img src=\"img/NEGRAS.png\"/></td>"
continue
if tablero[i][j] == BLANCAS:
data += "<td><img src=\"img/BLANCAS.png\"/></td>"
continue
else:
data += "<td><img src=\"img/blank.png\"/></td>"
data += "</tr>"
data +="</table></td>"
if root:
data += "<td><iframe align=\"right\" width=\"600px\" height=\"600px\" src=\"minimax.html\" frameborder=\"0\" ></td>"
data += "</tr></table></html>"
writeToOutput(data,file)
class _Move():
def __init__(self,tupleOrigen,tupleDestino):
self.origen = tupleOrigen
self.destino = tupleDestino
def __str__(self):
return 'Origen: ' +str(self.origen[0])+ '-' + str(self.origen[1])+' destino: '+str(self.destino[0])+'-'+str(self.destino[1])
def __repr__(self):
return 'Origen: ' +str(self.origen[0])+ '-' + str(self.origen[1])+' destino: '+str(self.destino[0])+'-'+str(self.destino[1])
def iniciarTablero():
tablero = []
for i in range(filas):
fila = []
for j in range(filas):
fila.append(0)
tablero.append(fila)
for i in range(filas):
tablero[0][i] = NEGRAS
tablero[4][i] = BLANCAS
tablero[1][0] = NEGRAS
tablero[1][4] = NEGRAS
tablero[3][0] = BLANCAS
tablero[3][4] = BLANCAS
return tablero
def esJugador(fila, columna,jugador, tablero):
return tablero[fila][columna]==jugador
def estaEnPosicionFinal(fila, columna, jugador, tablero):
listaPosiciones = []
tupla = (fila,columna)
if (jugador == NEGRAS):
listaPosiciones = posicionesBlancas
elif(jugador == BLANCAS):
listaPosiciones = posicionesNegras
else:
return False
for casillero in listaPosiciones:
if casillero == tupla:
return True
def obtenerPosicionesOponente(jugador):
if jugador == NEGRAS:
return posicionesBlancas
else:
return posicionesNegras
def lugarDisponible(fila, columna, tablero):
return tablero[fila][columna] == 0
def lugarDisponibleTupla(tupla, tablero):
return tablero[tupla[0]][tupla[1]] == 0
def obtenerFichasJugador(jugador, tablero):
listaFichas = []
for i in range(filas):
for j in range(filas):
if tablero[i][j] == jugador:
listaFichas.append((i,j))
return listaFichas
#Verifica si el jugador (NEGRAS o BLANCAS) gano
def verificarGano(jugador, tablero):
result = True
for pos in obtenerPosicionesOponente(jugador):
result = result and esJugador(pos[0],pos[1],jugador, tablero)
return result
def obtenerOponente(jugador):
if jugador == NEGRAS:
return BLANCAS
return NEGRAS
#Funcion que retorna una lista con tuplas que indican los posibles movimientos a hacer
def obtenerPosiblesMovimientosFicha(fila, columna, tablero):
results = []
if not lugarDisponible(fila, columna, tablero):
arribaIzquierda = (fila -1, columna -1)
arribaDerecha = (fila-1,columna+1)
abajoIzquierda = (fila +1,columna -1)
abajoDerecha = (fila +1,columna +1)
if arribaIzquierda[0] >= 0 and arribaIzquierda[1] >= 0 and arribaIzquierda[0]<5 and arribaIzquierda[1]<5 and lugarDisponibleTupla(arribaIzquierda, tablero):
results.append(arribaIzquierda)
if arribaDerecha[0] >= 0 and arribaDerecha[1] >= 0 and arribaDerecha[0]<5 and arribaDerecha[1]<5 and lugarDisponibleTupla(arribaDerecha, tablero):
results.append(arribaDerecha)
if abajoDerecha[0] >= 0 and abajoDerecha[1] >= 0 and abajoDerecha[0] < 5 and abajoDerecha[1] < 5 and lugarDisponibleTupla(abajoDerecha, tablero):
results.append(abajoDerecha)
if abajoIzquierda[0] >= 0 and abajoIzquierda[1] >= 0 and abajoIzquierda[0] < 5 and abajoIzquierda[1] < 5 and lugarDisponibleTupla(abajoIzquierda, tablero):
results.append(abajoIzquierda)
return results
def obtenerPosiblesMovimientosJugador(jugador, tablero):
results = []
for ficha in obtenerFichasJugador(jugador, tablero):
for destino in (obtenerPosiblesMovimientosFicha(ficha[0], ficha[1], tablero)):
results.append(_Move(ficha, destino))
return results
def testGlobalBoard():
printTablero(global_board)
def hacerMovimiento(tablero, move, jugador):
nuevoTablero = []
for i in range(filas):
fila = []
for j in range(filas):
fila.append(tablero[i][j])
nuevoTablero.append(fila)
nuevoTablero[move.origen[0]][move.origen[1]]=0
nuevoTablero[move.destino[0]][move.destino[1]]=jugador
return nuevoTablero
#def movimientosToMoves(listaTuplas):
# movesARetornar = []
# for tupla in listaTuplas:
# movesARetornar.append(_Move(tupla))
# return movesARetornar
| Python |
'''
Created on Mar 30, 2011
@author: diego
'''
def writeToOutput(data, file = "output.html"):
try:
fileHandler = open(file,'w')
fileHandler.write(data)
fileHandler.close()
except IOError:
print "Error al escribir" | Python |
'''
Created on Mar 29, 2011
@author: diego
'''
from utils import *
iniciarTablero()
printTableroHTML()
print("Ingrese la posicion de la ficha que desea mover seguido del numero de posicion disponible.")
print("Una ficha se indica con una tupla. Ej: (1,2,3) es la ficha en fila 1 columna 2")
print("Ejemplo de ingreso: (1,2,3) Indica que se desea hacer el movimiento numero 3 para la ficha situada en (1,2)")
print("Movimientos disponibles: Los movimientos disponibles dependen de los lugares a donde se pueda mover la ficha. Los movimientos disponibles son basados en 0, en sentido horario y comiezan con el movimiento de arriba a la izquierda")
print("\n")
print("--------------------------------------------------------------------------------------------------")
print("\n")
nadieGano = True
while nadieGano:
try:
fila = raw_input("fila >")
columna = raw_input("columna >")
pos = (int(fila), int(columna))
if (not type(pos)==tuple):
print (type(pos))
print ("Debe ingresar una tupla indicando la ficha que desea mover")
continue
print("Tablero en "+fila +", "+columna +" = "+ str(tablero[pos[0]][pos[1]]))
if tablero[pos[0]][pos[1]] == 0:
print("Ese lugar esta disponible")
continue
movimientosDisponibles = obtenerPosiblesMovimientosFicha(pos[0], pos[1])
if movimientosDisponibles.__len__() == 0:
print("No hay movimientos disponibles para esa ficha")
continue
print "Movimientos disponibles: "+movimientosDisponibles.__str__()
movimiento = raw_input("indice movimiento >")
except ValueError:
print "Los numeros que ingrese deben ser numericos"
continue
movimiento = int(movimiento)
if movimiento < 0 or movimiento > 3:
print("El numero de movimiento debe ser entre 0 y 3")
continue
siguientePos = movimientosDisponibles[movimiento]
ficha = tablero[pos[0]][pos[1]]
tablero[pos[0]][pos[1]] = 0
tablero[siguientePos[0]][siguientePos[1]] = ficha
if verificarGano(ficha):
nadieGano = False
if ficha == 'M':
print "Fin de partida, Maquina ganador"
if ficha == 'H':
print "Fin de partida, Humano ganador"
printTableroHTML()
| Python |
from pyevolve import G1DList, Crossovers, Mutators
from pyevolve import GSimpleGA
from pyevolve import Selectors
from pyevolve import Statistics
from pyevolve import DBAdapters
from heuristicas import *
from pyevolve import Initializators, Mutators, Consts
import five_field_kono
from juegos._contests import *
from juegos._agents import RandomAgent, MiniMaxAgent, AlphaBetaAgent
import pyevolve
from five_field_kono import FiveFieldKonoMiniMaxAgent
import sys
rnd = random.Random()
# This function is the evaluation function, we want
# to give high score to more zero'ed chromosomes
def eval_func(chromosome):
heuristica = heuristic_wrap(chromosome)
agenteEvaluador = FiveFieldKonoMiniMaxAgent('MiniMaxAgent_%05d' % 1, 1, rnd,heuristic=heuristica.heuristicaDistanciaDestino)
agentes = []
agentes.append(agenteEvaluador)
agenteContrincante = FiveFieldKonoMiniMaxAgent('MiniMaxAgent_%05d' %2, 0, rnd)
agentes.append(agenteContrincante)
stats = complete(AllAgainstAll_Contest(Five_field_kono(), agentes, 5))
partidos_jugados = float(stats._stats['matches_played'][agenteEvaluador])
partidos_ganados = float(stats._stats['matches_won'][agenteEvaluador])
partidos_perdidos = float(stats._stats['matches_lost'][agenteEvaluador])
partidos_empatados = partidos_jugados - (partidos_ganados + partidos_perdidos)
resultado = (3* partidos_ganados + partidos_empatados )
print str(chromosome.genomeList) + ' - ' +str(resultado) +'. Partidos jugados: '+str(partidos_jugados)+'. Partidos ganados: '+str(partidos_ganados)+'. Jugador: '+agenteEvaluador.player + ' partidos empatados: '+str(partidos_empatados) +' partidos perdidos: '+str(partidos_perdidos)
return resultado
def run_main():
genome = G1DList.G1DList(5)
genome.setParams( rangemin=0, rangemax=50)
genome.initializator.set(Initializators.G1DListInitializatorInteger)
genome.mutator.set(Mutators.G1DListMutatorIntegerRange)
genome.crossover.set(Crossovers.G1DListCrossoverTwoPoint)
genome.evaluator.set(eval_func)
ga = GSimpleGA.GSimpleGA(genome)
ga.setMinimax(Consts.minimaxType["maximize"])
ga.setPopulationSize(6)
ga.setElitism(True)
ga.setGenerations(20)
ga.setMutationRate(0.02)
ga.setCrossoverRate(1.0)
ga.selector.set(Selectors.GRouletteWheel)
#ga.selector.set(Selectors.GRouletteWheel)
# This DBAdapter is to create graphs later, it'll store statistics in
# a SQLite db file
sqlite_adapter = DBAdapters.DBSQLite(identify="ex1", resetDB=True)
ga.setDBAdapter(sqlite_adapter)
ga.evolve(freq_stats=1)
best = ga.bestIndividual()
print best
print "\nBest individual score: %.2f\n" % (best.score,)
if __name__ == "__main__":
run_main()
# Enable the pyevolve logging system
pyevolve.logEnable()
# Genome instance, 1D List of 50 elements
#genome = G1DList.G1DList(5)
# Sets the range max and min of the 1D List
#genome.setParams(rangemin=1, rangemax=50)
# The evaluator function (evaluation function)
# Genetic Algorithm Instance
# Set the Roulette Wheel selector method, the number of generations and
# the termination criteria
#ga.terminationCriteria.set(GSimpleGA.ConvergenceCriteria)
# Sets the DB Adapter, the resetDB flag will make the Adapter recreate
# the database and erase all data every run, you should use this flag
# just in the first time, after the pyevolve.db was created, you can
# omit it.
# Do the evolution, with stats dump
# frequency of 20 generations
| Python |
# This code is part of Pyevolve.
# Require matplotlib v.0.98.5.0+
from optparse import OptionParser
from optparse import OptionGroup
STAT = {
"identify" : 0, "generation" : 1, "rawMin" : 2,
"fitMin" : 3, "rawDev" : 4, "fitMax" : 5,
"rawMax" : 6, "fitAve" : 7, "rawVar" : 8,
"rawAve" : 9
}
POP = {
"identify" : 0, "generation" : 1, "individual" : 2,
"fitness" : 3, "raw" : 4
}
def parse(line_record, field):
return line_record[STAT[field]]
def parsePop(line_record, field):
return line_record[POP[field]]
def graph_pop_heatmap_raw(all, minimize, colormap="jet", filesave=None):
pylab.imshow(all, aspect="equal", interpolation="gaussian", cmap=matplotlib.cm.__dict__[colormap])
pylab.title("Plot of pop. raw scores along the generations")
pylab.xlabel('Population')
pylab.ylabel('Generations')
pylab.grid(True)
pylab.colorbar()
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
def graph_pop_heatmap_fitness(all, minimize, colormap="jet", filesave=None):
pylab.imshow(all, aspect="equal", interpolation="gaussian", cmap=matplotlib.cm.__dict__[colormap])
pylab.title("Plot of pop. fitness scores along the generations")
pylab.xlabel('Population')
pylab.ylabel('Generations')
pylab.grid(True)
pylab.colorbar()
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
def graph_diff_raw(all, minimize, filesave=None):
x = []
diff_raw_y = []
diff_fit_y = []
for it in all:
x.append(parse(it, "generation"))
diff_raw_y.append(parse(it, "rawMax") - parse(it, "rawMin"))
diff_fit_y.append(parse(it, "fitMax") - parse(it, "fitMin"))
pylab.figure()
pylab.subplot(211)
pylab.plot(x, diff_raw_y, "g", label="Raw difference", linewidth=1.2)
pylab.fill_between(x, diff_raw_y, color="g", alpha=0.1)
diff_raw_max= max(diff_raw_y)
gen_max_raw = x[diff_raw_y.index(diff_raw_max)]
pylab.annotate("Maximum (%.2f)" % (diff_raw_max,), xy=(gen_max_raw, diff_raw_max), xycoords='data',
xytext=(-150, -20), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc"),
)
pylab.xlabel("Generation (#)")
pylab.ylabel("Raw difference")
pylab.title("Plot of evolution identified by '%s'" % (options.identify))
pylab.grid(True)
pylab.legend(prop=FontProperties(size="smaller"))
pylab.subplot(212)
pylab.plot(x, diff_fit_y, "b", label="Fitness difference", linewidth=1.2)
pylab.fill_between(x, diff_fit_y, color="b", alpha=0.1)
diff_fit_max= max(diff_fit_y)
gen_max_fit = x[diff_fit_y.index(diff_fit_max)]
pylab.annotate("Maximum (%.2f)" % (diff_fit_max,), xy=(gen_max_fit, diff_fit_max), xycoords='data',
xytext=(-150, -20), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc"),
)
pylab.xlabel("Generation (#)")
pylab.ylabel("Fitness difference")
pylab.grid(True)
pylab.legend(prop=FontProperties(size="smaller"))
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
def graph_maxmin_raw(all, minimize, filesave=None):
x = []
max_y = []
min_y = []
std_dev_y = []
avg_y = []
for it in all:
x.append(parse(it, "generation"))
max_y.append(parse(it, "rawMax"))
min_y.append(parse(it, "rawMin"))
std_dev_y.append(parse(it, "rawDev"))
avg_y.append(parse(it, "rawAve"))
pylab.figure()
pylab.plot(x, max_y, "g", label="Max raw", linewidth=1.2)
pylab.plot(x, min_y, "r", label="Min raw", linewidth=1.2)
pylab.plot(x, avg_y, "b", label="Avg raw", linewidth=1.2)
pylab.plot(x, std_dev_y, "k", label="Std Dev raw", linewidth=1.2)
pylab.fill_between(x, min_y, max_y, color="g", alpha=0.1, label="Diff max/min")
if minimize: raw_max = min(min_y)
else: raw_max= max(max_y)
if minimize: gen_max = x[min_y.index(raw_max)]
else: gen_max = x[max_y.index(raw_max)]
min_std = min(std_dev_y)
gen_min_std = x[std_dev_y.index(min_std)]
max_std = max(std_dev_y)
gen_max_std = x[std_dev_y.index(max_std)]
if minimize: annot_label = "Minimum (%.2f)" % (raw_max,)
else: annot_label = "Maximum (%.2f)" % (raw_max,)
pylab.annotate(annot_label, xy=(gen_max, raw_max), xycoords='data',
xytext=(8, 15), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc"),
)
pylab.annotate("Min StdDev (%.2f)" % (min_std,), xy=(gen_min_std, min_std), xycoords='data',
xytext=(8, 15), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc"),
)
pylab.annotate("Max StdDev (%.2f)" % (max_std,), xy=(gen_max_std, max_std), xycoords='data',
xytext=(8, 15), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc"),
)
pylab.xlabel("Generation (#)")
pylab.ylabel("Raw score")
pylab.title("Plot of evolution identified by '%s' (raw scores)" % (options.identify))
pylab.grid(True)
pylab.legend(prop=FontProperties(size="smaller"))
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
def graph_maxmin_fitness(all, minimize, filesave=None):
x = []
max_y = []
min_y = []
avg_y = []
for it in all:
x.append(parse(it, "generation"))
max_y.append(parse(it, "fitMax"))
min_y.append(parse(it, "fitMin"))
avg_y.append(parse(it, "fitAve"))
pylab.figure()
pylab.plot(x, max_y, "g", label="Max fitness")
pylab.plot(x, min_y, "r", label="Min fitness")
pylab.plot(x, avg_y, "b", label="Avg fitness")
pylab.fill_between(x, min_y, max_y, color="g", alpha=0.1, label="Diff max/min")
if minimize: raw_max = min(min_y)
else: raw_max = max(max_y)
if minimize: gen_max = x[min_y.index(raw_max)]
else: gen_max = x[max_y.index(raw_max)]
if minimize: annot_label = "Minimum (%.2f)" % (raw_max,)
else: annot_label = "Maximum (%.2f)" % (raw_max,)
pylab.annotate(annot_label, xy=(gen_max, raw_max), xycoords='data',
xytext=(8, 15), textcoords='offset points',
arrowprops=dict(arrowstyle="->",
connectionstyle="arc"),
)
pylab.xlabel("Generation (#)")
pylab.ylabel("Fitness score")
pylab.title("Plot of evolution identified by '%s' (fitness scores)" % (options.identify))
pylab.grid(True)
pylab.legend(prop=FontProperties(size="smaller"))
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
def graph_errorbars_raw(all, minimize, filesave=None):
x = []
y = []
yerr_max = []
yerr_min = []
for it in all:
x.append(parse(it, "generation"))
y.append(parse(it, "rawAve"))
ymax = parse(it, "rawMax") - parse(it, "rawAve")
ymin = parse(it, "rawAve") - parse(it, "rawMin")
yerr_max.append(ymax)
yerr_min.append(ymin)
pylab.figure()
pylab.errorbar(x, y, [yerr_min, yerr_max], ecolor="g")
pylab.xlabel('Generation (#)')
pylab.ylabel('Raw score Min/Avg/Max')
pylab.title("Plot of evolution identified by '%s' (raw scores)" % (options.identify))
pylab.grid(True)
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
def graph_errorbars_fitness(all, minimize, filesave=None):
x = []
y = []
yerr_max = []
yerr_min = []
for it in all:
x.append(parse(it, "generation"))
y.append(parse(it, "fitAve"))
ymax = parse(it, "fitMax") - parse(it, "fitAve")
ymin = parse(it, "fitAve") - parse(it, "fitMin")
yerr_max.append(ymax)
yerr_min.append(ymin)
pylab.figure()
pylab.errorbar(x, y, [yerr_min, yerr_max], ecolor="g")
pylab.xlabel('Generation (#)')
pylab.ylabel('Fitness score Min/Avg/Max')
pylab.title("Plot of evolution identified by '%s' (fitness scores)" % (options.identify))
pylab.grid(True)
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
def graph_compare_raw(all, minimize, id_list, filesave=None):
colors_list = ["g", "b", "r", "k", "m", "y"]
index = 0
pylab.figure()
for it_out in all:
x = []
max_y = []
min_y = []
for it in it_out:
x.append(parse(it, "generation"))
max_y.append(parse(it, "rawMax"))
min_y.append(parse(it, "rawMin"))
if minimize:
pylab.plot(x, max_y, colors_list[index], linewidth=0.05)
pylab.plot(x, min_y, colors_list[index], label="Raw min (%s)" % (id_list[index],), linewidth=1.3)
else:
pylab.plot(x, max_y, colors_list[index], label="Raw max (%s)" % (id_list[index],), linewidth=1.3)
pylab.plot(x, min_y, colors_list[index], linewidth=0.05)
pylab.fill_between(x, min_y, max_y, color=colors_list[index], alpha=0.06,)
index += 1
pylab.xlabel("Generation (#)")
pylab.ylabel("Raw score")
pylab.title("Plot of evolution identified by '%s' (raw scores)" % ('many',))
pylab.grid(True)
pylab.legend(prop=FontProperties(size="smaller"))
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
def graph_compare_fitness(all, minimize, id_list, filesave=None):
colors_list = ["g", "b", "r", "k", "m", "y"]
index = 0
pylab.figure()
for it_out in all:
x = []
max_y = []
min_y = []
for it in it_out:
x.append(parse(it, "generation"))
max_y.append(parse(it, "fitMax"))
min_y.append(parse(it, "fitMin"))
if minimize:
pylab.plot(x, max_y, colors_list[index], linewidth=0.05)
pylab.plot(x, min_y, colors_list[index], label="Fitness min (%s)" % (id_list[index],), linewidth=1.3)
else:
pylab.plot(x, max_y, colors_list[index], label="Fitness max (%s)" % (id_list[index],), linewidth=1.3)
pylab.plot(x, min_y, colors_list[index], linewidth=0.05)
pylab.fill_between(x, min_y, max_y, color=colors_list[index], alpha=0.06,)
index += 1
pylab.xlabel("Generation (#)")
pylab.ylabel("Fitness score")
pylab.title("Plot of evolution identified by '%s' (fitness scores)" % ('many',))
pylab.grid(True)
pylab.legend(prop=FontProperties(size="smaller"))
if filesave:
pylab.savefig(filesave)
print "Graph saved to %s file !" % (filesave,)
else:
pylab.show()
if __name__ == "__main__":
from pyevolve import __version__ as pyevolve_version
from pyevolve import __author__ as pyevolve_author
popGraph = False
print "Pyevolve %s - Graph Plot Tool" % (pyevolve_version,)
print "By %s\n" % (pyevolve_author,)
parser = OptionParser()
parser.add_option("-f", "--file", dest="dbfile",
help="Database file to read (default is 'pyevolve.db').", metavar="FILENAME", default="pyevolve.db")
parser.add_option("-i", "--identify", dest="identify",
help="The identify of evolution.", metavar="IDENTIFY")
parser.add_option("-o", "--outfile", dest="outfile",
help="""Write the graph image to a file (don't use extension, just the filename, default is png format, but you can change using --extension (-e) parameter).""",
metavar="OUTFILE")
parser.add_option("-e", "--extension", dest="extension",
help="""Graph image file format. Supported options (formats) are: emf, eps, pdf, png, ps, raw, rgba, svg, svgz. Default is 'png'.""",
metavar="EXTENSION", default="png")
parser.add_option("-g", "--genrange", dest="genrange",
help="""This is the generation range of the graph, ex: 1:30 (interval between 1 and 30).""",
metavar="GENRANGE")
parser.add_option("-c", "--colormap", dest="colormap",
help="""Sets the Color Map for the graph types 8 and 9. Some options are: summer, bone, gray, hot, jet, cooper, spectral. The default is 'jet'.""",
metavar="COLORMAP", default="jet")
parser.add_option("-m", "--minimize", action="store_true",
help="Sets the 'Minimize' mode, default is the Maximize mode. This option makes sense if you are minimizing your evaluation function.", dest="minimize")
group = OptionGroup(parser, "Graph types", "This is the supported graph types")
group.add_option("-0", action="store_true", help="Write all graphs to files. Graph types: 1, 2, 3, 4 and 5.", dest="all_graphs")
group.add_option("-1", action="store_true", help="Error bars graph (raw scores).", dest="errorbars_raw")
group.add_option("-2", action="store_true", help="Error bars graph (fitness scores).", dest="errorbars_fitness")
group.add_option("-3", action="store_true", help="Max/min/avg/std. dev. graph (raw scores).", dest="maxmin_raw")
group.add_option("-4", action="store_true", help="Max/min/avg graph (fitness scores).", dest="maxmin_fitness")
group.add_option("-5", action="store_true", help="Raw and Fitness min/max difference graph.", dest="diff_raw")
group.add_option("-6", action="store_true", help="Compare best raw score of two or more evolutions (you must specify the identify comma-separed list with --identify (-i) parameter, like 'one, two, three'), the maximum is 6 items.", dest="compare_raw")
group.add_option("-7", action="store_true", help="Compare best fitness score of two or more evolutions (you must specify the identify comma-separed list with --identify (-i) parameter, like 'one, two, three'), the maximum is 6 items.", dest="compare_fitness")
group.add_option("-8", action="store_true", help="Show a heat map of population raw score distribution between generations.", dest="pop_heatmap_raw")
group.add_option("-9", action="store_true", help="Show a heat map of population fitness score distribution between generations.", dest="pop_heatmap_fitness")
parser.add_option_group(group)
(options, args) = parser.parse_args()
if options.identify and (not options.errorbars_raw
and not options.errorbars_fitness
and not options.maxmin_raw
and not options.maxmin_fitness
and not options.diff_raw
and not options.all_graphs
and not options.compare_raw
and not options.pop_heatmap_raw
and not options.pop_heatmap_fitness
and not options.compare_fitness):
parser.error("You must choose one graph type !")
if (not options.identify) or (not options.dbfile):
parser.print_help()
exit()
print "Loading modules...."
import os.path
if not os.path.exists(options.dbfile):
print "Database file '%s' not found !" % (options.dbfile, )
exit()
import pylab
from matplotlib.font_manager import FontProperties
import matplotlib.cm
import sqlite3
import math
import os
print "Loading database and creating graph..."
identify_list = options.identify.split(",")
identify_list = map(str.strip, identify_list)
all = None
if options.pop_heatmap_raw or options.pop_heatmap_fitness:
conn = sqlite3.connect(options.dbfile)
c = conn.cursor()
if options.genrange:
genrange = options.genrange.split(":")
ret = c.execute("select distinct generation from population where identify = ? and generation between ? and ?", (options.identify, genrange[0], genrange[1]))
else:
ret = c.execute("select distinct generation from population where identify = ?", (options.identify,))
generations = ret.fetchall()
if len(generations) <= 0:
print "No generation data found for the identify '%s' !" % (options.identify,)
exit()
all = []
for gen in generations:
pop_tmp = []
ret = c.execute("select * from population where identify = ? and generation = ?", ( options.identify, gen[0]))
ret_fetch = ret.fetchall()
for it in ret_fetch:
if options.pop_heatmap_raw:
pop_tmp.append(parsePop(it, "raw"))
else:
pop_tmp.append(parsePop(it, "fitness"))
all.append(pop_tmp)
ret.close()
conn.close()
if len(all) <= 0:
print "No statistic data found for the identify '%s' !" % (options.identify,)
exit()
print "%d generations found !" % (len(all),)
popGraph = True
if len(identify_list) == 1 and not popGraph:
if options.compare_raw or options.compare_fitness:
parser.error("You can't use this graph type with only one identify !")
conn = sqlite3.connect(options.dbfile)
c = conn.cursor()
if options.genrange:
genrange = options.genrange.split(":")
ret = c.execute("select * from statistics where identify = ? and generation between ? and ?", (options.identify, genrange[0], genrange[1]))
else:
ret = c.execute("select * from statistics where identify = ?", (options.identify,))
all = ret.fetchall()
ret.close()
conn.close()
if len(all) <= 0:
print "No statistic data found for the identify '%s' !" % (options.identify,)
exit()
print "%d generations found !" % (len(all),)
elif len(identify_list) > 1 and not popGraph:
all = []
if (not options.compare_raw) and (not options.compare_fitness):
parser.error("You can't use many ids with this graph type !")
conn = sqlite3.connect(options.dbfile)
c = conn.cursor()
for item in identify_list:
if options.genrange:
genrange = options.genrange.split(":")
ret = c.execute("select * from statistics where identify = ? and generation between ? and ?", (item, genrange[0], genrange[1]))
else:
ret = c.execute("select * from statistics where identify = ?", (item,))
fetchall = ret.fetchall()
if len(fetchall) > 0:
all.append(fetchall)
ret.close()
conn.close()
if len(all) <= 0:
print "No statistic data found for the identify list '%s' !" % (options.identify,)
exit()
print "%d identify found !" % (len(all),)
if options.errorbars_raw:
if options.outfile: graph_errorbars_raw(all, options.minimize, options.outfile + "." + options.extension)
else: graph_errorbars_raw(all, options.minimize)
if options.errorbars_fitness:
if options.outfile: graph_errorbars_fitness(all, options.minimize, options.outfile + "." + options.extension)
else: graph_errorbars_fitness(all, options.minimize)
if options.maxmin_raw:
if options.outfile: graph_maxmin_raw(all, options.minimize, options.outfile + "." + options.extension)
else: graph_maxmin_raw(all, options.minimize)
if options.maxmin_fitness:
if options.outfile: graph_maxmin_fitness(all, options.minimize, options.outfile + "." + options.extension)
else: graph_maxmin_fitness(all, options.minimize)
if options.diff_raw:
if options.outfile: graph_diff_raw(all, options.minimize, options.outfile + "." + options.extension)
else: graph_diff_raw(all, options.minimize)
if options.all_graphs:
all_graph_functions = [graph_errorbars_raw, graph_errorbars_fitness, graph_maxmin_raw,
graph_maxmin_fitness, graph_diff_raw]
if options.outfile:
parser.error("You can't specify one file to all graphs !")
dirname = "graphs_" + options.identify
if not os.path.isdir(dirname):
os.mkdir(dirname)
for graph in all_graph_functions:
filename = dirname + "/"
filename += options.identify + "_" + graph.__name__[6:]
filename += "." + options.extension
graph(all, options.minimize, filename)
print "\n\tDone ! The graphs was saved in the directory '%s'" % (dirname)
if options.compare_raw:
if options.outfile: graph_compare_raw(all, options.minimize, identify_list, options.outfile + "." + options.extension)
else: graph_compare_raw(all, options.minimize, identify_list )
if options.compare_fitness:
if options.outfile: graph_compare_fitness(all, options.minimize, identify_list, options.outfile + "." + options.extension)
else: graph_compare_fitness(all, options.minimize, identify_list )
if options.pop_heatmap_raw:
if options.outfile: graph_pop_heatmap_raw(all, options.minimize, options.colormap, options.outfile + "." + options.extension)
else: graph_pop_heatmap_raw(all, options.minimize, options.colormap)
if options.pop_heatmap_fitness:
if options.outfile: graph_pop_heatmap_fitness(all, options.minimize, options.colormap, options.outfile + "." + options.extension)
else: graph_pop_heatmap_fitness(all, options.minimize, options.colormap)
| Python |
'''
Created on Apr 8, 2011
@author: diego
'''
from _tests.test_games import GameTest
from juegos._agents import AlphaBetaAgent, Agent
from juegos._base import Game
from juegos._utils import resultado
from utils import *
class Five_field_kono(Game):
PLAYERS = (BLANCAS,NEGRAS)
def __init__(self, board=None, enabled = 0, chips_per_player=7, empty_spaces=11):
Game.__init__(self,*Five_field_kono.PLAYERS)
if board:
self.board = board #si ya viene el tablero armado
setGlobalBoard(self.board)
else:
self.board = iniciarTablero()
setGlobalBoard(self.board)
self.enabled = enabled
self.chips_per_player = chips_per_player
def moves(self):
if self.results(): # In order to avoid returning both moves and results.
return None
moves = obtenerPosiblesMovimientosJugador(enabled_map[self.enabled], self.board)
return {self.players[self.enabled]:moves}
def next(self,**moves):
enabled_player = self.players[self.enabled]
move = moves[enabled_player]
nuevoTablero = hacerMovimiento(self.board, move, enabled_player)
return Five_field_kono(nuevoTablero, (self.enabled + 1)% 2)
def results(self):
'''Este metodo retorna si un jugador gano la partida,
con un diccionario Jugador:Resultado,
o si todavia nadie ha ganado la partida.'''
if verificarGano(NEGRAS, self.board):
return resultado(BLANCAS,self.players,-1)
'''resultado(ganador,listaJugadores,resultado)'''
elif verificarGano(BLANCAS, self.board):
return resultado(NEGRAS,self.players,-1)
if config.cantidad_jugadas_index >= config.cota_cantidad_jugadas:
print "limite de jugadas excedido"
return resultado(NEGRAS,self.players,0)
if obtenerPosiblesMovimientosJugador(obtenerOponente(NEGRAS), self.board).__len__()==0:
return resultado(NEGRAS,self.players,0)
if obtenerPosiblesMovimientosJugador(obtenerOponente(BLANCAS), self.board).__len__()==0:
return resultado(NEGRAS,self.players,0)
return None
def testBlancasGanan(self):
for i in range(filas):
self.board[0][i] = BLANCAS
self.board[1][i] = 0
self.board[2][i] = 0
self.board[3][i] = 0
self.board[4][i] = NEGRAS
self.board[1][0] = BLANCAS
self.board[1][4] = BLANCAS
self.board[2][2] = NEGRAS
self.board[3][3] = NEGRAS
def __str__(self):
return 'five field kono'
def __repr__(self):
return '%s[%s]' % (self.players[self.enabled][0], self.board)
class Test_Five_Field_Kono(GameTest):
''' Five field kono testcases
'''
def test_basic(self):
self.basic_test(Five_field_kono, zero_sum=True, enabled_players=0)
INFINITE = 0x7FFFFFFF
class FiveFieldKonoMiniMaxAgent(AlphaBetaAgent):
def match_moves(self, game, **moves):
config.cantidad_jugadas_index += 1
def match_begins(self, player, game):
config.cantidad_jugadas_index = 0
AlphaBetaAgent.match_begins(self, player, game)
self.game = game
def match_ends(self, game):
self.game = game
def __init_(self, name="FiveFieldKonoMiniMax", horizon=3, random=None, heuristic=None):
AlphaBetaAgent.__init__(self, name, horizon, random, heuristic)
def _minimax(self, game, depth, alpha=-INFINITE, beta=INFINITE):
printTableroHTML(game.board, "minimax.html", sleepTime =0)
#print "\t"*(-1 + depth), "->", fichasReverse[enabled_map[game.enabled]]
return AlphaBetaAgent._minimax(self, game, depth)
if __name__ == '__main__':
from juegos._agents import RandomAgent, FileAgent, MiniMaxAgent, AlphaBetaAgent
from juegos._base import run_match, match
from heuristicas import *
from utils import *
rnd = random.Random()
heuristica = heuristic_wrap([40,30,20,10,5])
agenteEvaluador = FiveFieldKonoMiniMaxAgent('MiniMaxAgent_%05d' % 1, 3, rnd,heuristic=heuristica.heuristicaDistanciaDestino)
for move_number, moves, game_state in match(Five_field_kono(),agenteEvaluador,RandomAgent(rnd, 'RandomAgent_%05d') ):
if move_number is not None:
print '%d: %s -> %r' % (move_number, moves, game_state)
else:
print 'Result: %s' % (moves)
print 'Final board: %r' % (game_state)
| Python |
'''
Created on Mar 29, 2011
@author: diego
'''
import random
filas = 5
#En el tablero el jugador de IA es marcado con M
NEGRAS = 'N'
#En el tablero el jugador de IA es marcado con M
BLANCAS = 'B'
#Las posiciones de maquina son los casilleros originales que ocupan las fichas del jugador maquina
posicionesNegras = [(0,0),(0,1),(0,2),(0,3),(0,4),(1,0),(1,4)]
#Las posiciones de jugador: idem maquina
posicionesBlancas = [(4,0),(4,1),(4,2),(4,3),(4,4),(3,0),(3,4)]
fichas = {'NEGRAS':NEGRAS,'BLANCAS':BLANCAS}
fichasReverse = {NEGRAS:'NEGRAS',BLANCAS:'BLANCAS'}
enabled_map = {0: BLANCAS, 1: NEGRAS}
global_random = random.Random(26)
class Config():
def __init__(self):
self.print_html_activated = False
self.cota_cantidad_jugadas = 2000
self.cantidad_jugadas_index = 0
class globalBoard():
def __init__(self,board):
self.board = board
global_board = globalBoard(None)
config = Config()
def setGlobalBoard(tablero):
global_board.board = tablero
def getGlobalBoard():
return global_board.board
| Python |
from tree import tree_node as node
from tree import tree_node
from tree import tree_edge as egde
from tree import tree
from copy import deepcopy
class specie(tree_node):
def __init__(self, r = None, name = None, theta = None):
node.__init__(self)
self.R = set()
self.name = name
self.theta = theta
if r:
self.r = r
self.R.add(r)
else:
self.r = None
def copy(self):
s = specie(self.r, self.name)
s.R = self.R
return s
class genetic_tree(tree):
def copy(self):
t = genetic_tree()
for edg in self.edges:
s = edg.source
d = edg.destination
s_in_tree = t.get_node(self.by_name(s.name))
d_in_tree = t.get_node(self.by_name(d.name))
if not s_in_tree: s_in_tree = tree_node(name = s.name)
if not d_in_tree: d_in_tree = tree_node(name = d.name)
t.add_edge(s_in_tree, d_in_tree)
root = self.get_root()
new_root = t.get_node(self.by_name(root.name))
t.root = new_root
for specie in self.nodes:
new_tree_specie = t.get_node(self.by_name(specie.name))
new_tree_specie.r = specie.r
new_tree_specie.R = set(specie.R)
return t
def __cmp__(self, other):
return cmp(self.__str__(), other.__str__())
def __hash__(self):
return id(self)
def __repr__(self):
return str(dict((node.name, node.r ) for node in self.nodes))
def __str__(self):
return str(self.__repr__())
| Python |
from graph import node, edge, graph
from copy import deepcopy, copy
class tree_node(node):
def __init__(self, name = None):
node.__init__(self)
self.sons = []
self.father = None
self.name = name
class tree_edge(edge):
def __init__(self, source, destination, data = None):
edge.__init__(self, [source], [destination])
source.sons.append(destination)
destination.father = source
self.source = source
self.destination = destination
class tree(graph):
'''when adding edges: or you get a nodes from graph, or you are
adding new nodes to graph'''
def add_edge(self, source, destination):
for nod in [source] + [destination]:
if not nod in self.nodes:
self.nodes.add(nod)
edg = tree_edge(source, destination)
self.edges.add(edg)
def get_root(self):
return self.root
def copy(self):
t = tree()
for edg in self.edges:
s = edg.source
d = edg.destination
s_in_tree = t.get_node(self.by_name(s.name))
d_in_tree = t.get_node(self.by_name(d.name))
if not s_in_tree: s_in_tree = tree_node(name = s.name)
if not d_in_tree: d_in_tree = tree_node(name = d.name)
t.add_edge(s_in_tree, d_in_tree)
root = self.get_root()
new_root = t.get_node(self.by_name(root.name))
t.root = new_root
return t
def get_leaf(self):
nod = self.get_root()
while not self.is_leaf(nod):
for n in nod.sons:
nod = n
continue
return nod
@classmethod
def is_leaf(self,nod):
if not nod.sons:return True
return False
@classmethod
def is_root(self,nod):
if not nod.father:return True
return False
if __name__ == '__main__':
tr = tree()
v1 = tree_node(name = 'v1')
v2 = tree_node(name = 'v2')
v3 = tree_node(name = 'v3')
v4 = tree_node(name = 'v4')
v5 = tree_node(name = 'v5')
v6 = tree_node(name = 'v6')
v7 = tree_node(name = 'v7')
tr.root = v1
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
v = tr.get_leaf()
print v.name
| Python |
class node:
def __init__(self, data = None):
self.data = data
class edge:
def __init__(self, sources, destinations, data = None):
self.sources = sources
self.destinations = destinations
self.data = data
class graph:
def __init__(self):
self.root = None
self.nodes = set()
self.edges = set()
def add_node(self, data):
nod = node(data)
self.nodes.add(nod)
return nod
def add_edge(self, sources, destinations):
'''when adding edges: or you get a nodes from graph, or you are
adding new nodes to graph'''
for nod in sources.union(destinations):
if not nod in self.nodes:
self.nodes.add(nod)
edg = edge(sources, destinations)
self.edges.add(edg)
def get_node(self, key = None, door = None,):
'''key is the a boolean function'''
for nod in self.nodes:
if key(nod):
return nod
return None
def get_edge(self, key):
'''key is the a boolean function'''
for edg in self.edges:
if key(edg):
return edg
return None
def by_name(self, name):
return lambda nod_or_edge: nod_or_edge.name == name
| Python |
from fitch import fitch_tree
from graph.genetic_tree import specie, genetic_tree
alpabet = ['A','C','T','G']
class aml_tree(fitch_tree):
def __init__(self):
pass
def R(self, tr):
def helper(node):
if node.r:
node.R = {}
for letter in alpabet:
node.R[letter] = 0
node.R[node.r] = 1
return
else:
for son in node.sons:
helper(son)
node.R = {'A':0,'C':0,'T':0,'G':0}
for letter1 in alpabet:
val1 = 0
for letter2 in alpabet:
val = node.theta[letter1][letter2]
val *= node.sons[0].R[letter2]
if val >= val1: val1 = val
val2 = 0
for letter2 in alpabet:
val = node.theta[letter1][letter2]
val *= node.sons[1].R[letter2]
if val >= val2: val2 = val
node.R[letter1] = val1 + val2
return
print 'going to helper'
helper(tr.get_root())
if __name__ == '__main__':
theta = {'A':{'A':0.4,'C':0.4,'G':0.1,'T':0.1},'C':{'A':0.4,'C':0.4,'G':0.1,'T':0.1},'G':{'A':0.4,'C':0.4,'G':0.1,'T':0.1},'T':{'A':0.4,'C':0.4,'G':0.1,'T':0.1}}
tr = genetic_tree()
v1 = specie(name = 'v1', theta = theta)
v2 = specie(name = 'v2', theta = theta)
v3 = specie(name = 'v3', theta = theta)
v4 = specie('A', name = 'v4', theta = theta)
v5 = specie('C', name = 'v5', theta = theta)
v6 = specie('G', name = 'v6', theta = theta)
v7 = specie('T', name = 'v7', theta = theta)
print v7.theta
tr.root = v1
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
aml = aml_tree()
print 'not in __init'
print aml.R(tr)
print tr.get_root().R
for node in tr.nodes:
print node.name, node.R
| Python |
from genetic_tree import *
tr = genetic_tree()
v1 = specie(name = 'v1')
v2 = specie(name = 'v2')
v3 = specie(name = 'v3')
v4 = specie('A', name = 'v4')
v5 = specie('A', name = 'v5')
v6 = specie('A', name = 'v6')
v7 = specie('G', name = 'v7')
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.root = v1
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
v = tr.get_node(tr.by_name('v1'))
print type(v)
print v.R
def ni(x):
print x
if x < 0:
print 'done'
return
ni(x-1)
ni(5)
| Python |
from graph.genetic_tree import genetic_tree
from graph.genetic_tree import specie
from graph.tree import tree
class fitch_tree(tree):
def __init__(self, tr):
self.gtr = tr
self.possible_r_trees = []
self.possible_trees = []
self.len = len(self.gtr.get_leaf().r)
self.score = 0
for i in range(self.len):
index_tree = self.gtr.copy()
for node in index_tree.nodes:
if node.r:
node.r = node.r[i]
node.R = set([node.r])
self.R(index_tree)
self.possible_r_trees.append(set([index_tree]))
self.r(index_tree.get_root(),index_tree, self.possible_r_trees[i])
self.get_possible_trees()
def get_possible_trees(self):
self.names = []
for node in self.gtr.nodes:
self.names.append(node.name)
dic = {}
for name in self.names:
dic[name] = ''
self.possible_trees.append(dic)
for i in range(self.len):
new_list = []
for place in self.possible_trees:
for itree in self.possible_r_trees[i]:
new_dic = dict(place)
for node in itree.nodes:
new_dic[node.name] += node.r
new_list.append(new_dic)
self.possible_trees = new_list
def R(self, tr):
tr.score = 0
def helper(node):
if node.R:
return node.R
else:
inter = set()
union = set()
for son in node.sons:
if inter:
inter = inter.intersection(helper(son))
else:
inter = helper(son)
union = union.union(helper(son))
if inter:#is not empty
node.R = inter
else:
self.score += 1
node.R = union
return node.R
helper(tr.get_root())
def r(self, node, tri ,trees):
leaf = genetic_tree.is_leaf
is_root = genetic_tree.is_root
if leaf(node):
return
elif node.r:
for son in node.sons:
self.r(son, tri, trees)
elif is_root(node):
for r_ in node.R:
node.r = r_
new_tree = tri.copy()
trees.add(new_tree)
self.r(new_tree.get_root(),new_tree, trees)
trees.remove(tri)
else:
if node.father.r not in node.R:
for r_ in node.R:
node.r = r_
new_tree = tri.copy()
trees.add(new_tree)
self.r(new_tree.get_root(),new_tree, trees)
trees.remove(tri)
else:
node.r = node.father.r
for son in node.sons:
self.r(son, tri, trees)
if __name__ == '__main__':
tr = genetic_tree()
v1 = specie(name = 'v1')
v2 = specie(name = 'v2')
v3 = specie(name = 'v3')
v4 = specie('A', name = 'v4')
v5 = specie('C', name = 'v5')
v6 = specie('G', name = 'v6')
v7 = specie('T', name = 'v7')
tr.root = v1
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
fitch = fitch_tree(tr)
for tree in fitch.possible_trees:
print tree
print 'fitch.score', fitch.score
| Python |
from fitch import *
if __name__ == '__main__':
print 'a)'
tr = genetic_tree()
v1 = specie(name = 'v1')
v2 = specie(name = 'v2')
v3 = specie(name = 'v3')
v4 = specie('AA', name = 'v4')
print 'v4: AA'
v5 = specie('AC', name = 'v5')
print 'v5: AC'
v6 = specie('CG', name = 'v6')
print 'v6: CG'
v7 = specie('GC', name = 'v7')
print 'v4: GC'
tr.root = v1
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
fitch = fitch_tree(tr)
for list in fitch.possible_trees:
line = ''
for name, letter in list.items():
line += name+': '+letter+', '
print line[:-2]
print 'b)'
tr = genetic_tree()
v1 = specie(name = 'v1')
v2 = specie(name = 'v2')
v3 = specie(name = 'v3')
v4 = specie('GA', name = 'v4')
print v4.name+':', v4.r
v5 = specie('AC', name = 'v5')
print v5.name+':', v5.r
v6 = specie('AG', name = 'v6')
print v6.name+':', v6.r
v7 = specie('CA', name = 'v7')
print v7.name+':', v7.r
tr.root = v1
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
fitch = fitch_tree(tr)
for tree in fitch.possible_trees:
line = ''
for name, r in tree.items():
line += name+': '+ r +', '
print line[:-2]
print 'c)'
tr = genetic_tree()
v1 = specie(name = 'v1')
v2 = specie(name = 'v2')
v3 = specie(name = 'v3')
v4 = specie('AA', name = 'v4')
print v4.name+':', v4.r
v5 = specie('GA', name = 'v5')
print v5.name+':', v5.r
v6 = specie('CA', name = 'v6')
print v6.name+':', v6.r
v7 = specie('TA', name = 'v7')
print v7.name+':', v7.r
tr.root = v1
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
fitch = fitch_tree(tr)
non_fitch = {'v1':'CA', 'v2':'CA', 'v3':'CA', 'v4':'AA', 'v5':'GA', 'v6': 'CA', 'v7':'TA', }
for list in fitch.possible_trees:
line = ''
#print line
for name, letter in list.items():
line += name+': '+letter+', '
print line[:-2]
print 'these are all fitch assignments.'
print 'and their most parsimony score is', str(fitch.score)+'.'
print 'another assignment is:'
line = ''
for name, letter in non_fitch.items():
line += name+': '+letter+', '
print line[:-2]
print 'd)'
tr = genetic_tree()
v1 = specie(name = 'v1')
v2 = specie(name = 'v2')
v3 = specie(name = 'v3')
v4 = specie('AAAG', name = 'v4')
print v4.name+':', v4.r
v5 = specie('AAGA', name = 'v5')
print v5.name+':', v5.r
v6 = specie('AGAA', name = 'v6')
print v6.name+':', v6.r
v7 = specie('GAAA', name = 'v7')
print v7.name+':', v7.r
tr.root = v1
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
fitch = fitch_tree(tr)
for list in fitch.possible_trees:
line = ''
for name, letter in list.items():
line += name+': '+letter+', '
print line[:-2]
| Python |
from graph.genetic_tree import genetic_tree as tree
from graph.genetic_tree import specie
is_leaf = tree.is_leaf
'''
Q4
'''
def are_isomorphic(tree1, tree2):
def dosomorphic(node1, node2):
# leaf(node1) XOR leaf(node2)
if is_leaf(node1) and not is_leaf(node2) or not is_leaf(node1) and is_leaf(node2) or len(node1.sons) != len(node2.sons):
return False
elif is_leaf(node1) and is_leaf(node2):
if node1.name == node2.name:
return True
return False
else:
if len(node1.sons) == 2:
return dosomorphic(node1.sons[0], node2.sons[0]) and dosomorphic(node1.sons[1], node2.sons[1]) \
or dosomorphic(node1.sons[0], node2.sons[1]) and dosomorphic(node1.sons[1], node2.sons[0])
else:
return dosomorphic(node1.sons[0], node2.sons[1])
return dosomorphic(tree1.get_root(), tree2.get_root())
if __name__ == '__main__':
tr = tree()
v1 = specie(name = 'v1')
v2 = specie(name = 'v2')
v3 = specie(name = 'v3')
v4 = specie('AA', name = 'v4')
print 'v4: AA'
v5 = specie('AC', name = 'v5')
print 'v5: AC'
v6 = specie('CG', name = 'v6')
print 'v6: CG'
v7 = specie('GC', name = 'v7')
print 'v4: GC'
tr.root = v1
tr.add_edge(v1, v2)
tr.add_edge(v1, v3)
tr.add_edge(v2, v4)
tr.add_edge(v2, v5)
tr.add_edge(v3, v6)
tr.add_edge(v3, v7)
tr2 = tree()
v1 = specie(name = 'v1')
v2 = specie(name = 'v2')
v3 = specie(name = 'v')
v4 = specie('GA', name = 'a')
print v4.name+':', v4.r
v5 = specie('AC', name = 'b')
print v5.name+':', v5.r
v6 = specie('AG', name = 'c')
print v6.name+':', v6.r
v7 = specie('CA', name = 'v7')
print v7.name+':', v7.r
tr2.root = v1
tr2.add_edge(v1, v2)
tr2.add_edge(v1, v3)
tr2.add_edge(v2, v4)
tr2.add_edge(v2, v5)
tr2.add_edge(v3, v6)
tr2.add_edge(v3, v7)
print are_isomorphic(tr, tr2) | Python |
#!/usr/bin/python2.5
from os import makedirs, chdir, environ
from os.path import join, expanduser
version = "<unknown>" # set during packaging
service = "fitness" # set during packaging
environ["FITNESS_VERSION"] = version
environ["FITNESS_SERVICE"] = service
home = expanduser(join("~", ".fitness"))
try:
makedirs(home)
except:
pass
chdir(home)
import fitness.fitness
fitness.fitness.main()
| Python |
import gtk
import time
import datetime
try:
import hildon
except:
from hildonstub import hildon
class MyFloat(float):
def entry(self,dialog):
entry = gtk.Entry()
self.setentry(entry)
entry.connect("focus-in-event", self.focus_in_event)
return entry
def focus_in_event(self,widget,event):
print 'select',widget.get_text()
widget.select_region(0,-1)
def setentry(self,entry):
txt='%.1f'%self
entry.set_text(txt)
class MyInt(int):
def entry(self,dialog):
entry = gtk.Entry()
self.setentry(entry)
return entry
def setentry(self,entry):
txt=str(self)
entry.set_text(txt)
# repr and init must use same format
datefmt='%d-%b-%y'
class MyDateEditor(hildon.DateEditor):
def get_text(self):
dt=datetime.date(*self.get_date())
return dt.strftime(datefmt)
class Date(object):
def __init__(self,year,month=None,day=None):
"""Create using either a 3-tuplet or a string with the exact same
format used in repr"""
if isinstance(year,datetime.date):
self.dt=year
elif year=="today":
self.dt=datetime.date.today()
else:
if isinstance(year,str):
s=year
t=time.strptime(s,datefmt)
year,month,day = t[0:3]
self.dt=datetime.date(year,month,day)
assert self.dt
def __repr__(self):
""" US string representation of date.
TODO: get local format from OS"""
return self.dt.strftime(datefmt)
def __cmp__(self,other):
return cmp(self.dt,other.dt)
def __sub__(self,other):
return self.dt-other.dt
def get_date(self):
return (self.dt.year,self.dt.month,self.dt.day)
def entry(self,dialog):
entry=MyDateEditor()
entry.set_date(*self.get_date())
return entry
def setentry(self,entry):
# On purpose dont do anything because we dont want date to be modified
# by completion selection
pass
class Combo(object):
list=[]
active=-1
def __init__(self,txt=None):
for idx,l in enumerate(self.list):
if l==txt:
self.active=idx
break
def __str__(self):
return self.list[self.active]
def entry(self,dialog):
e=gtk.combo_box_new_text()
for l in self.list:
e.append_text(l)
e.set_active(self.active)
# All other entries in the dialog box will be text entries (gtk.Entry)
# and their text value will be taken using get_text method.
e.get_text=e.get_active_text
return e
def setentry(self,entry):
entry.set_active(self.active)
class PAUnit(Combo):
list=["Item","Minute","Mile"]
class FoodUnit(Combo):
list=['Item','Tsp','Tbsp','Cup','Ounce','Slice','Bowl']
class Completion(object):
# An envolpe to hold text that can be entered with gtk.Entry and completion
def __init__(self,txt=''):
self.txt=txt
def entry(self,dialog):
self.dialog=dialog
p=dialog.parentDialog
self.parentDialog=p
l=p.dictlist
entry = gtk.Entry()
entry.set_text(self.txt)
completion = gtk.EntryCompletion()
completion.set_inline_completion(True)
completion.set_model(l)
entry.set_completion(completion)
completion.set_text_column(0)
completion.connect("insert-prefix", self.insert_cb)
return entry
def insert_cb(self,completion,prefix):
print 'insert',prefix
d=self.dialog
p=self.parentDialog
o=p.dict.get(prefix,None)
if o:
for r,l in enumerate(d.attributes):
attr=o.__getattribute__(l)
attr.setentry(d.entries[r])
def __str__(self):
return self.txt
def setentry(self,entry):
pass
| Python |
# Hildon Stub
import pygtk
pygtk.require('2.0')
import gtk, pango
import time
import datetime
class Program(object):
def __init__(self):
pass
def add_window(self,window):
pass
def connect(self,event,cb):
pass
class HildonWidget(object):
def run(self):
pass
def destroy(self):
pass
class CalendarPopup(HildonWidget):
DEF_PAD = 10
DEF_PAD_SMALL = 5
TM_YEAR_BASE = 1900
calendar_show_header = 0
calendar_show_days = 1
calendar_month_change = 2
calendar_show_week = 3
def calendar_date_to_string(self):
year, month, day = self.window.get_date()
mytime = time.mktime((year, month+1, day, 0, 0, 0, 0, 0, -1))
return time.strftime("%x", time.localtime(mytime))
def calendar_set_signal_strings(self, sig_str):
prev_sig = self.prev_sig.get()
self.prev2_sig.set_text(prev_sig)
prev_sig = self.last_sig.get()
self.prev_sig.set_text(prev_sig)
self.last_sig.set_text(sig_str)
def calendar_month_changed(self, widget):
buffer = "month_changed: %s" % self.calendar_date_to_string()
self.calendar_set_signal_strings(buffer)
def calendar_day_selected(self, widget):
buffer = "day_selected: %s" % self.calendar_date_to_string()
self.calendar_set_signal_strings(buffer)
def calendar_day_selected_double_click(self, widget):
buffer = "day_selected_double_click: %s"
buffer = buffer % self.calendar_date_to_string()
self.calendar_set_signal_strings(buffer)
year, month, day = self.window.get_date()
if self.marked_date[day-1] == 0:
self.window.mark_day(day)
self.marked_date[day-1] = 1
else:
self.window.unmark_day(day)
self.marked_date[day-1] = 0
def calendar_prev_month(self, widget):
buffer = "prev_month: %s" % self.calendar_date_to_string()
self.calendar_set_signal_strings(buffer)
def calendar_next_month(self, widget):
buffer = "next_month: %s" % self.calendar_date_to_string()
self.calendar_set_signal_strings(buffer)
def calendar_prev_year(self, widget):
buffer = "prev_year: %s" % self.calendar_date_to_string()
self.calendar_set_signal_strings(buffer)
def calendar_next_year(self, widget):
buffer = "next_year: %s" % self.calendar_date_to_string()
self.calendar_set_signal_strings(buffer)
def calendar_set_flags(self):
options = 0
for i in range(5):
if self.settings[i]:
options = options + (1<<i)
if self.window:
self.window.display_options(options)
def calendar_toggle_flag(self, toggle):
j = 0
for i in range(5):
if self.flag_checkboxes[i] == toggle:
j = i
self.settings[j] = not self.settings[j]
self.calendar_set_flags()
def calendar_font_selection_ok(self, button):
self.font = self.font_dialog.get_font_name()
if self.window:
font_desc = pango.FontDescription(self.font)
if font_desc:
self.window.modify_font(font_desc)
def calendar_select_font(self, button):
if not self.font_dialog:
window = gtk.FontSelectionDialog("Font Selection Dialog")
self.font_dialog = window
window.set_position(gtk.WIN_POS_MOUSE)
window.connect("destroy", self.font_dialog_destroyed)
window.ok_button.connect("clicked",
self.calendar_font_selection_ok)
window.cancel_button.connect_object("clicked",
lambda wid: wid.destroy(),
self.font_dialog)
window = self.font_dialog
if not (window.flags() & gtk.VISIBLE):
window.show()
else:
window.destroy()
self.font_dialog = None
def font_dialog_destroyed(self, data=None):
self.font_dialog = None
def __init__(self,parent_window,year,month,day):
self.dt=(year,month,day)
flags = [
"Show Heading",
"Show Day Names",
"No Month Change",
"Show Week Numbers",
]
self.window = None
self.font = None
self.font_dialog = None
self.flag_checkboxes = 5*[None]
self.settings = 5*[0]
self.marked_date = 31*[0]
window=gtk.Dialog("Options",parent_window,
gtk.DIALOG_MODAL | gtk.DIALOG_DESTROY_WITH_PARENT | gtk.DIALOG_NO_SEPARATOR,
(gtk.STOCK_CANCEL, gtk.RESPONSE_REJECT,
gtk.STOCK_OK, gtk.RESPONSE_ACCEPT
))
#window = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.root_window=window
window.set_title("Calendar")
#window.set_border_width(5)
#window.connect("destroy", self.delete_event)
#window.set_resizable(False)
#vbox = gtk.VBox(False, self.DEF_PAD)
#window.add(vbox)
vbox=window.vbox
# The top part of the window, Calendar, flags and fontsel.
hbox = gtk.HBox(False, self.DEF_PAD)
vbox.pack_start(hbox, True, True, self.DEF_PAD)
hbbox = gtk.HButtonBox()
hbox.pack_start(hbbox, False, False, self.DEF_PAD)
hbbox.set_layout(gtk.BUTTONBOX_SPREAD)
hbbox.set_spacing(5)
# Calendar widget
frame = gtk.Frame("Calendar")
hbbox.pack_start(frame, False, True, self.DEF_PAD)
calendar = gtk.Calendar()
calendar.select_month(month-1, year)
self.window = calendar
self.calendar_set_flags()
calendar.mark_day(day)
self.marked_date[day-1] = 1
frame.add(calendar)
calendar.connect("month_changed", self.calendar_month_changed)
calendar.connect("day_selected", self.calendar_day_selected)
calendar.connect("day_selected_double_click",
self.calendar_day_selected_double_click)
calendar.connect("prev_month", self.calendar_prev_month)
calendar.connect("next_month", self.calendar_next_month)
calendar.connect("prev_year", self.calendar_prev_year)
calendar.connect("next_year", self.calendar_next_year)
separator = gtk.VSeparator()
hbox.pack_start(separator, False, True, 0)
vbox2 = gtk.VBox(False, self.DEF_PAD)
hbox.pack_start(vbox2, False, False, self.DEF_PAD)
# Build the Right frame with the flags in
frame = gtk.Frame("Flags")
vbox2.pack_start(frame, True, True, self.DEF_PAD)
vbox3 = gtk.VBox(True, self.DEF_PAD_SMALL)
frame.add(vbox3)
for i in range(len(flags)):
toggle = gtk.CheckButton(flags[i])
toggle.connect("toggled", self.calendar_toggle_flag)
vbox3.pack_start(toggle, True, True, 0)
self.flag_checkboxes[i] = toggle
# Build the right font-button
button = gtk.Button("Font...")
button.connect("clicked", self.calendar_select_font)
vbox2.pack_start(button, False, False, 0)
# Build the Signal-event part.
frame = gtk.Frame("Signal events")
vbox.pack_start(frame, True, True, self.DEF_PAD)
vbox2 = gtk.VBox(True, self.DEF_PAD_SMALL)
frame.add(vbox2)
hbox = gtk.HBox (False, 3)
vbox2.pack_start(hbox, False, True, 0)
label = gtk.Label("Signal:")
hbox.pack_start(label, False, True, 0)
self.last_sig = gtk.Label("")
hbox.pack_start(self.last_sig, False, True, 0)
hbox = gtk.HBox (False, 3)
vbox2.pack_start(hbox, False, True, 0)
label = gtk.Label("Previous signal:")
hbox.pack_start(label, False, True, 0)
self.prev_sig = gtk.Label("")
hbox.pack_start(self.prev_sig, False, True, 0)
hbox = gtk.HBox (False, 3)
vbox2.pack_start(hbox, False, True, 0)
label = gtk.Label("Second previous signal:")
hbox.pack_start(label, False, True, 0)
self.prev2_sig = gtk.Label("")
hbox.pack_start(self.prev2_sig, False, True, 0)
bbox = gtk.HButtonBox ()
vbox.pack_start(bbox, False, False, 0)
bbox.set_layout(gtk.BUTTONBOX_END)
button = gtk.Button("Close")
button.connect("clicked", self.delete_event)
bbox.add(button)
button.set_flags(gtk.CAN_DEFAULT)
button.grab_default()
window.show_all()
def delete_event(self, widget=None): #, event, data=None):
self.dt=self.window.get_date()
self.dt=(self.dt[0],self.dt[1]+1,self.dt[2])
self.root_window.destroy()
print self.get_date()
return False
def get_date(self):
return self.dt
def run(self):
self.root_window.run()
self.delete_event()
class Window(gtk.Window):
# If false, the menu will be held in a seperate small window
single_window=True
def __init__(self):
gtk.Window.__init__(self)
if self.single_window:
self.vbox = gtk.VBox(False, 0)
gtk.Window.add(self,self.vbox)
self.vbox.show()
self.mwindow=self
else:
# create a seperate window just for the menu bar
self.mwindow = gtk.Window(gtk.WINDOW_TOPLEVEL)
self.mwindow.set_size_request(100, 20)
self.mwindow.set_title("Menu")
self.mwindow.connect("delete_event", lambda w,e: gtk.main_quit())
self.menu_bar = gtk.MenuBar()
self.mwindow.add(self.menu_bar)
self.menu_bar.show()
def add(self,widget):
if self.single_window:
self.vbox.pack_start(widget,False,False)
else:
gtk.Window.add(self,widget)
def set_menu(self,menu):
root_menu=menu
root_menu = gtk.MenuItem("Menu")
root_menu.show()
root_menu.set_submenu(menu)
self.menu_bar.append(root_menu)
self.mwindow.show()
class DateEditor(gtk.Button):
fmt='%d-%b-%y'
def __init__(self):
gtk.Button.__init__(self)
self.connect("clicked", self.clicked_callback)
self.year,self.month,self.day=(2007,12,31)
self.draw()
def clicked_callback(self,data=None):
dialog = CalendarPopup (None, self.year, self.month, self.day)
dialog.run()
self.year,self.month,self.day=dialog.get_date()
dialog.destroy()
self.draw()
def draw(self):
dt=datetime.date(self.year,self.month,self.day)
self.set_label(dt.strftime(self.fmt)) # date2string(dt))
def set_date(self,year,month,day):
self.year,self.month,self.day=(year,month,day)
self.draw()
def get_date(self):
return (self.year,self.month,self.day)
class HildonStub(object):
def __init__(self):
self.Program = Program
self.HildonWidget = HildonWidget
self.CalendarPopup = CalendarPopup
self.Window = Window
self.DateEditor = DateEditor
hildon = HildonStub()
| Python |
#!/usr/bin/env python2.5
# TODO dictonary of stored values should have a key based on description and
# unit. For this to work there should be an interactive recall of data from
# the dictonary which is based also on unit selected in dialog box
# there is no problem in upgrading existing CSV files because the _dict
# files already have the unit in each line.
# TODO Numeric fields should have the NumLock turned on by default.
# This is done with gtk.GetEntry.set_input_mode which is a Hildon extension.
# Sadly this is not in the current version of pymaemo c1.0-2
# TODO remove old records in DateObjList accoring to OptionsDialog.history
# TODO add to OptionsDialog the folder location for CSV files
# TODO DateObj.run.Del
# TODO current manual date range selection is buggy. Instead popup a dialog box in which Start/End dates are selected and validated.
# TODO validate date range
# TODO mark the location of input errors in dialog
# TODO localization
# The below license appears in the About dialog-box
license = """Fitness Record Book
2007-8 Ehud (Udi) Ben-Reuven & Ofer Barkai
Derived from:
Copyright 1997 Eric W. Sink
LEGAL DISCLAIMER - The author(s) of this software are not medical
practitioners of any kind. We do not have the education, experience,
license, credentials or desire to provide health-related advice. You
should consult a physician before undertaking any activities which are
intended to improve your health. By using this software, you agree
that we cannot be held responsible for any changes in your physical
condition or health.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
import gtk
try:
import osso
except:
from ossostub import osso
try:
import hildon
except:
from hildonstub import hildon
# The version and service name of the application
from os import getenv
version = getenv("FITNESS_VERSION", "<unknown>")
service = getenv("FITNESS_SERVICE", "fitness")
# the application main window launches lists which in tern launches
# dialogs which in turn are made from items
from items import *
from dialogs import *
from lists import *
class AboutDialog(gtk.AboutDialog):
def __init__(self):
gtk.AboutDialog.__init__(self)
#self.set_size_request(*SZ)
self.set_logo_icon_name("fitness")
self.set_name("Fitness Record Book")
self.set_version(version)
self.set_website("http://benreuven.com/udi/diet")
self.set_comments("You must agree to the license\nbefore using this program")
self.set_authors(["Eric W. Sink","Ehud (Udi) Ben-Reuven","Ofer Barkai"])
self.set_copyright("""Copyright (c) 1997 Eric W. Sink\nCopyright (c) 2000-4 Ehud (Udi) Ben-Reuven\nCopyright (c) 2007-8 Ehud (Udi) Ben-Reuven & Ofer Barkai""")
self.set_license(license)
self.run()
self.destroy()
class FitnessApp(hildon.Program):
def dialog_callback(self, widget,data):
if data==0:
self.foodDialog.run(self.window)
elif data==1:
self.paDialog.run(self.window)
else:
self.weightDialog.run(self.window)
self.draw()
def dtrange_callback(self, widget, data):
if data==0:
self.today()
elif data==1:
self.yesterday()
else:
self.week()
self.draw()
def draw(self):
sdate=Date(*self.sbutton.get_date())
edate=Date(*self.ebutton.get_date())
days=(edate-sdate).days+1
cal = self.foodDialog.cal_in_range(sdate,edate)
pa = self.paDialog.cal_in_range(sdate,edate)
met=days*self.weightDialog.last_weight()*self.optionsDialog.met
b=cal-pa
net=met-b
behav=b/days/self.optionsDialog.met
left=days*self.optionsDialog.weight*self.optionsDialog.met-b
self.values[0].set_text('%.1f'%cal)
self.values[1].set_text('%.1f'%pa)
self.values[2].set_text('%.1f'%met)
self.values[3].set_text('%.1f'%net)
self.values[4].set_text('%.1f'%behav)
self.values[5].set_text(str(days))
self.values[6].set_text('%.1f'%left)
def save(self, user_data=None):
self.optionsDialog.save()
self.foodDialog.save()
self.paDialog.save()
self.weightDialog.save()
def newfood_response(self, widget):
self.foodDialog.parent_window=self.window
self.foodDialog.dialog=self.window
self.foodDialog.new_event(None)
def save_response(self, widget):
self.force_save()
def menuitem_response(self, widget, data):
if data==1:
self.optionsDialog.run(self.window)
#self.draw()
elif data==0:
self.force_save()
elif data==2:
AboutDialog()
elif data==3:
self.quit(None)
def today(self):
t=datetime.date.today()
self.sbutton.set_date(t.year,t.month,t.day)
self.ebutton.set_date(t.year,t.month,t.day)
def yesterday(self):
t=datetime.date.today()
t-=datetime.timedelta(1)
self.sbutton.set_date(t.year,t.month,t.day)
self.ebutton.set_date(t.year,t.month,t.day)
def week(self):
t=datetime.date.today()
self.ebutton.set_date(t.year,t.month,t.day)
t-=datetime.timedelta(t.weekday())
self.sbutton.set_date(t.year,t.month,t.day)
# Called whenever the application is sent to background or
# get to foreground. If it goes to background, calls
#
def topmost_change(self, arg, user_data):
if self.get_is_topmost():
self.set_can_hibernate(False)
else:
self.autosave.force_autosave()
self.set_can_hibernate(True)
def updateobj(self,obj):
self.autosave.userdata_changed()
self.draw()
def force_save(self):
self.autosave.force_autosave()
def quit(self, evt):
self.force_save()
gtk.main_quit()
def date_change(self,gobject, property_spec,isstart):
self.draw()
def __init__(self):
hildon.Program.__init__(self)
self.context = osso.Context(service, version, False)
self.autosave = osso.Autosave(self.context)
# because of bug in osso.Autosave you must pass a call-back data
self.autosave.set_autosave_callback(self.save,1)
self.connect("notify::is-topmost", self.topmost_change)
gtk.set_application_name("")
self.window = hildon.Window()
self.window.set_size_request(*SZ)
self.window.set_title("Fitness Record Book")
self.window.connect("destroy", self.quit)
self.add_window(self.window)
menu = gtk.Menu()
c=0
# put Save first
for l in ["Save","Options...","About...","Close"]:
menu_items = gtk.MenuItem(l)
menu.append(menu_items)
menu_items.connect("activate", self.menuitem_response, c)
c+=1
menu_items.show()
self.window.set_menu(menu)
menu.show()
table = gtk.Table(11, 3, False)
r=0
c=0
for l in ["Today","Yesterday","This Week"]:
button = gtk.Button(l)
button.connect("clicked", self.dtrange_callback,c)
table.attach(button,c,c+1,r,r+2)
button.show()
c+=1
r+=2
button = hildon.DateEditor()
self.sbutton=button
# FIXME there is no event to DateEditor indicating that the date has changed
button.connect("notify::year", self.date_change,True)
button.connect("notify::month",self.date_change,True)
button.connect("notify::day",self.date_change,True)
table.attach(button,0,1,r,r+1)
button.show()
label = gtk.Label("thru")
table.attach(label,1,2,r,r+1)
label.show()
button = hildon.DateEditor()
self.ebutton=button
# FIXME there is no event to DateEditor indicating that the date has changed
button.connect("notify::year",self.date_change,False)
button.connect("notify::month",self.date_change,False)
button.connect("notify::day",self.date_change,False)
table.attach(button,2,3,r,r+1)
button.show()
r+=1
self.values=[]
for l in ["Calories In","PA Calories","Metabolism","Net Calories",
"Behaviorial Weight","Days in Range","Cals Left to Eat"]:
label = gtk.Label(l)
label.set_alignment(0, 0)
table.attach(label,0,2,r,r+1)
label.show()
label = gtk.Label("0.0")
label.set_alignment(0, 0)
table.attach(label,2,3,r,r+1)
label.show()
self.values.append(label)
r+=1
for c,l in enumerate(["Food","PA","Weight"]):
button = gtk.Button(l)
button.connect("clicked", self.dialog_callback,c)
table.attach(button,c,c+1,r,r+2)
button.show()
r=r+2
button=gtk.Button("New Food")
self.window.my_focus=button
button.connect("clicked", self.newfood_response)
table.attach(button,0,2,r,r+2)
button.show()
button=gtk.Button("Save")
button.connect("clicked", self.save_response)
table.attach(button,2,3,r,r+2)
button.show()
r=r+2
self.window.add(table)
table.show()
self.window.show()
self.optionsDialog = OptionsDialog(self)
self.weightDialog = WeightList(self)
self.weightDialog.build_run(self.window)
self.paDialog=PAList(self)
self.paDialog.build_run(self.window)
self.foodDialog=FoodList(self)
self.foodDialog.build_run(self.window)
self.load()
if self.optionsDialog.is_new:
AboutDialog()
self.optionsDialog.run(self.window)
self.window.set_focus(self.window.my_focus)
def load(self):
self.today()
self.draw()
def run(self):
gtk.main()
def main():
app = FitnessApp()
app.run()
if __name__=='__main__':
main()
| Python |
#!/usr/bin/env python2.5
# TODO dictonary of stored values should have a key based on description and
# unit. For this to work there should be an interactive recall of data from
# the dictonary which is based also on unit selected in dialog box
# there is no problem in upgrading existing CSV files because the _dict
# files already have the unit in each line.
# TODO Numeric fields should have the NumLock turned on by default.
# This is done with gtk.GetEntry.set_input_mode which is a Hildon extension.
# Sadly this is not in the current version of pymaemo c1.0-2
# TODO remove old records in DateObjList accoring to OptionsDialog.history
# TODO add to OptionsDialog the folder location for CSV files
# TODO DateObj.run.Del
# TODO current manual date range selection is buggy. Instead popup a dialog box in which Start/End dates are selected and validated.
# TODO validate date range
# TODO mark the location of input errors in dialog
# TODO localization
# The below license appears in the About dialog-box
license = """Fitness Record Book
2007-8 Ehud (Udi) Ben-Reuven & Ofer Barkai
Derived from:
Copyright 1997 Eric W. Sink
LEGAL DISCLAIMER - The author(s) of this software are not medical
practitioners of any kind. We do not have the education, experience,
license, credentials or desire to provide health-related advice. You
should consult a physician before undertaking any activities which are
intended to improve your health. By using this software, you agree
that we cannot be held responsible for any changes in your physical
condition or health.
This program is free software; you can redistribute it and/or
modify it under the terms of the GNU General Public License
as published by the Free Software Foundation; either version 2
of the License, or (at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; if not, write to the Free Software
Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
import gtk
try:
import osso
except:
from ossostub import osso
try:
import hildon
except:
from hildonstub import hildon
# The version and service name of the application
from os import getenv
version = getenv("FITNESS_VERSION", "<unknown>")
service = getenv("FITNESS_SERVICE", "fitness")
# the application main window launches lists which in tern launches
# dialogs which in turn are made from items
from items import *
from dialogs import *
from lists import *
class AboutDialog(gtk.AboutDialog):
def __init__(self):
gtk.AboutDialog.__init__(self)
#self.set_size_request(*SZ)
self.set_logo_icon_name("fitness")
self.set_name("Fitness Record Book")
self.set_version(version)
self.set_website("http://benreuven.com/udi/diet")
self.set_comments("You must agree to the license\nbefore using this program")
self.set_authors(["Eric W. Sink","Ehud (Udi) Ben-Reuven","Ofer Barkai"])
self.set_copyright("""Copyright (c) 1997 Eric W. Sink\nCopyright (c) 2000-4 Ehud (Udi) Ben-Reuven\nCopyright (c) 2007-8 Ehud (Udi) Ben-Reuven & Ofer Barkai""")
self.set_license(license)
self.run()
self.destroy()
class FitnessApp(hildon.Program):
def dialog_callback(self, widget,data):
if data==0:
self.foodDialog.run(self.window)
elif data==1:
self.paDialog.run(self.window)
else:
self.weightDialog.run(self.window)
self.draw()
def dtrange_callback(self, widget, data):
if data==0:
self.today()
elif data==1:
self.yesterday()
else:
self.week()
self.draw()
def draw(self):
sdate=Date(*self.sbutton.get_date())
edate=Date(*self.ebutton.get_date())
days=(edate-sdate).days+1
cal = self.foodDialog.cal_in_range(sdate,edate)
pa = self.paDialog.cal_in_range(sdate,edate)
met=days*self.weightDialog.last_weight()*self.optionsDialog.met
b=cal-pa
net=met-b
behav=b/days/self.optionsDialog.met
left=days*self.optionsDialog.weight*self.optionsDialog.met-b
self.values[0].set_text('%.1f'%cal)
self.values[1].set_text('%.1f'%pa)
self.values[2].set_text('%.1f'%met)
self.values[3].set_text('%.1f'%net)
self.values[4].set_text('%.1f'%behav)
self.values[5].set_text(str(days))
self.values[6].set_text('%.1f'%left)
def save(self, user_data=None):
self.optionsDialog.save()
self.foodDialog.save()
self.paDialog.save()
self.weightDialog.save()
def newfood_response(self, widget):
self.foodDialog.parent_window=self.window
self.foodDialog.dialog=self.window
self.foodDialog.new_event(None)
def save_response(self, widget):
self.force_save()
def menuitem_response(self, widget, data):
if data==1:
self.optionsDialog.run(self.window)
#self.draw()
elif data==0:
self.force_save()
elif data==2:
AboutDialog()
elif data==3:
self.quit(None)
def today(self):
t=datetime.date.today()
self.sbutton.set_date(t.year,t.month,t.day)
self.ebutton.set_date(t.year,t.month,t.day)
def yesterday(self):
t=datetime.date.today()
t-=datetime.timedelta(1)
self.sbutton.set_date(t.year,t.month,t.day)
self.ebutton.set_date(t.year,t.month,t.day)
def week(self):
t=datetime.date.today()
self.ebutton.set_date(t.year,t.month,t.day)
t-=datetime.timedelta(t.weekday())
self.sbutton.set_date(t.year,t.month,t.day)
# Called whenever the application is sent to background or
# get to foreground. If it goes to background, calls
#
def topmost_change(self, arg, user_data):
if self.get_is_topmost():
self.set_can_hibernate(False)
else:
self.autosave.force_autosave()
self.set_can_hibernate(True)
def updateobj(self,obj):
self.autosave.userdata_changed()
self.draw()
def force_save(self):
self.autosave.force_autosave()
def quit(self, evt):
self.force_save()
gtk.main_quit()
def date_change(self,gobject, property_spec,isstart):
self.draw()
def __init__(self):
hildon.Program.__init__(self)
self.context = osso.Context(service, version, False)
self.autosave = osso.Autosave(self.context)
# because of bug in osso.Autosave you must pass a call-back data
self.autosave.set_autosave_callback(self.save,1)
self.connect("notify::is-topmost", self.topmost_change)
gtk.set_application_name("")
self.window = hildon.Window()
self.window.set_size_request(*SZ)
self.window.set_title("Fitness Record Book")
self.window.connect("destroy", self.quit)
self.add_window(self.window)
menu = gtk.Menu()
c=0
# put Save first
for l in ["Save","Options...","About...","Close"]:
menu_items = gtk.MenuItem(l)
menu.append(menu_items)
menu_items.connect("activate", self.menuitem_response, c)
c+=1
menu_items.show()
self.window.set_menu(menu)
menu.show()
table = gtk.Table(11, 3, False)
r=0
c=0
for l in ["Today","Yesterday","This Week"]:
button = gtk.Button(l)
button.connect("clicked", self.dtrange_callback,c)
table.attach(button,c,c+1,r,r+2)
button.show()
c+=1
r+=2
button = hildon.DateEditor()
self.sbutton=button
# FIXME there is no event to DateEditor indicating that the date has changed
button.connect("notify::year", self.date_change,True)
button.connect("notify::month",self.date_change,True)
button.connect("notify::day",self.date_change,True)
table.attach(button,0,1,r,r+1)
button.show()
label = gtk.Label("thru")
table.attach(label,1,2,r,r+1)
label.show()
button = hildon.DateEditor()
self.ebutton=button
# FIXME there is no event to DateEditor indicating that the date has changed
button.connect("notify::year",self.date_change,False)
button.connect("notify::month",self.date_change,False)
button.connect("notify::day",self.date_change,False)
table.attach(button,2,3,r,r+1)
button.show()
r+=1
self.values=[]
for l in ["Calories In","PA Calories","Metabolism","Net Calories",
"Behaviorial Weight","Days in Range","Cals Left to Eat"]:
label = gtk.Label(l)
label.set_alignment(0, 0)
table.attach(label,0,2,r,r+1)
label.show()
label = gtk.Label("0.0")
label.set_alignment(0, 0)
table.attach(label,2,3,r,r+1)
label.show()
self.values.append(label)
r+=1
for c,l in enumerate(["Food","PA","Weight"]):
button = gtk.Button(l)
button.connect("clicked", self.dialog_callback,c)
table.attach(button,c,c+1,r,r+2)
button.show()
r=r+2
button=gtk.Button("New Food")
self.window.my_focus=button
button.connect("clicked", self.newfood_response)
table.attach(button,0,2,r,r+2)
button.show()
button=gtk.Button("Save")
button.connect("clicked", self.save_response)
table.attach(button,2,3,r,r+2)
button.show()
r=r+2
self.window.add(table)
table.show()
self.window.show()
self.optionsDialog = OptionsDialog(self)
self.weightDialog = WeightList(self)
self.weightDialog.build_run(self.window)
self.paDialog=PAList(self)
self.paDialog.build_run(self.window)
self.foodDialog=FoodList(self)
self.foodDialog.build_run(self.window)
self.load()
if self.optionsDialog.is_new:
AboutDialog()
self.optionsDialog.run(self.window)
self.window.set_focus(self.window.my_focus)
def load(self):
self.today()
self.draw()
def run(self):
gtk.main()
def main():
app = FitnessApp()
app.run()
if __name__=='__main__':
main()
| Python |
#!/usr/bin/python2.5
from os import makedirs, chdir, environ
from os.path import join, expanduser
version = "<unknown>" # set during packaging
service = "fitness" # set during packaging
environ["FITNESS_VERSION"] = version
environ["FITNESS_SERVICE"] = service
home = expanduser(join("~", ".fitness"))
try:
makedirs(home)
except:
pass
chdir(home)
import fitness.fitness
fitness.fitness.main()
| Python |
#
# Create a debian package
#
from sys import argv
from os import mkdir, makedirs, chdir, chmod, getcwd, walk, remove, rmdir, \
environ, popen
from os.path import getmtime, getsize, join, basename, dirname
from base64 import b64encode
from StringIO import StringIO
name = "fitness"
serviceprefix = "com.googlecode.FitnessRecordBook"
arch = "all"
size = "0"
pyfiles = ["fitness.py", "lists.py", "dialogs.py", "items.py"]
control = """Package: %s
Version: %s
Section: user/tools
Priority: optional
Architecture: %s
Installed-Size: %s
Maintainer: obb 770 <obb770@gmail.com>
Depends: python2.5, python2.5-runtime, maemo-select-menu-location
Description: Calorie counter application for the Internet Tablet
Manage your diet by keeping account of food and
physical activity.
.
Web site: http://benreuven.com/udi/diet
Maemo-Icon-26:
%s
"""
servicefilename = "%s.%s.service" % (serviceprefix, name)
servicefile = """[D-BUS Service]
Name=%s.%s
Exec=/usr/bin/%s
""" % (serviceprefix, name, name)
desktopfilename = "%s.desktop" % (name,)
desktopfile = """[Desktop Entry]
Encoding=UTF-8
Version=1.0
Type=Application
Name=Fitness
Exec=/usr/bin/%s
Icon=%s
StartupWMClass=%s
X-Window-Icon=%s
X-Window-Icon-Dimmed=%s
X-Osso-Service=%s.%s
X-Osso-Type=application/x-executable
""" % (name, name, name, name, name, serviceprefix, name)
# version
if len(argv) > 1:
version = argv[1]
elif "FITNESS_VERSION" in environ:
version = environ["FITNESS_VERSION"]
else:
f = popen("svn info | sed -n -e 's/Revision: *//p'", "rb")
version = f.read()
f.close()
version = "1." + version[:-1]
deb = "%s_%s_%s" % (name, version, arch)
def copy(src, open_src, dst, open_dst):
if open_src:
src = file(src, "rb")
if open_dst:
dst = file(dst, "wb")
while True:
buf = src.read(4096)
if not buf:
break
dst.write(buf)
if open_dst:
dst.close()
if open_src:
src.close()
def archive(arname, files):
arfile = file(arname, "wb")
arfile.write("!<arch>\n");
for filename in files:
arfile.write("%-16s%-12d0 0 100644 %-10d\140\n" %
(filename, getmtime(filename), getsize(filename)))
copy(filename, True, arfile, False)
if getsize(filename) % 2 != 0:
arfile.write("\n")
arfile.close()
def set_mode(filename, executable=False):
import stat
mode = stat.S_IRUSR | stat.S_IRGRP | stat.S_IROTH
mode = mode | stat.S_IWUSR
if executable:
mode = mode | stat.S_IXUSR | stat.S_IXGRP | stat.S_IXOTH
chmod(filename, mode)
def install(src, dst, executable=False):
dstdir = dirname(dst)
try:
makedirs(dstdir)
except:
pass
copy(src, False, dst, True)
src.close()
set_mode(dst, executable)
def py_install(src, datadir, dst):
# from the py_compile.compile()
from time import time
mtime = time()
code = ""
if (src):
mtime = getmtime(src)
f = file(src, "U")
code = f.read()
f.close()
if code and code[-1] != "\n":
code += "\n"
mtime = int(mtime)
f = StringIO()
from imp import get_magic
f.write(get_magic())
f.write(chr(mtime & 0xff))
f.write(chr((mtime >> 8) & 0xff))
f.write(chr((mtime >> 16) & 0xff))
f.write(chr((mtime >> 24) & 0xff))
from marshal import dumps
f.write(dumps(compile(code, "/" + dst, "exec")))
f.seek(0, 0)
install(f, join(datadir, dst + "o"))
def tar(tarname, root):
import tarfile
tf = tarfile.open(tarname, "w:gz")
cwd = getcwd()
chdir(root)
def add(path):
ti = tf.gettarinfo(path)
ti.uid = 0
ti.uname = "root"
ti.gid = 0
ti.gname = "root"
# TarInfo.tobuf() normalizes the path and removes the initial "./"
# this causes the "linda" tool to fail.
# Add "./" by intercepting the method and fixing the tar buffer
tobuf = ti.tobuf
def mytobuf(posix=False):
buf = tobuf(posix)
if not buf.startswith("./"):
if len(ti.name) > 98:
raise Exception(
"tar: path length must be shorter than 98 chars")
buf = "./" + buf[:(98 - 512)] + buf[(100 - 512):(148 - 512)] + \
" " + buf[(156 - 512):]
chksum = tarfile.calc_chksums(buf)[0]
buf = buf[:-364] + "%06o\0" % chksum + buf[-357:]
ti.buf = buf
return buf
ti.tobuf = mytobuf
if ti.isreg():
f = file(path, "rb")
tf.addfile(ti, f)
f.close()
else:
tf.addfile(ti)
for top, dirs, files in walk("."):
add(top)
for f in files:
add(join(top, f))
chdir(cwd)
tf.close()
def rm(path):
try:
remove(path)
return
except:
pass
try:
for top, dirs, files in walk(path, topdown=False):
for f in files:
remove(join(top, f))
for d in dirs:
rmdir(join(top, d))
rmdir(path)
except:
pass
def du(path):
size = 0
for top, dirs, files in walk(path):
for f in files:
size += getsize(join(top, f))
return (size + 1023) // 1024
def md5sum(path, md5file):
import md5
cwd = getcwd()
chdir(path)
for top, dirs, files in walk("."):
for name in files:
f = file(join(top, name), "rb")
m = md5.new()
while True:
buf = f.read(4096)
if not buf:
break
m.update(buf)
f.close()
md5file.write("%s %s\n" % (m.hexdigest(), join(top, name)[2:]))
chdir(cwd)
rm(deb + '.deb')
rm(deb)
mkdir(deb)
# data
# set the version and service name
f = file(name, "rb")
s = StringIO()
for line in f:
if line.startswith('version = "<unknown>"'):
s.write('version = "%s"\n' % (version,))
elif line.startswith('service = "fitness"'):
s.write('service = "%s.%s"\n' % (serviceprefix, name))
else:
s.write(line)
s.seek(0)
f.close()
install(s, join(deb, "data", "usr", "bin", name), True)
pkg_dir = join("usr", "lib", "python2.5", "site-packages", name)
py_install(None, join(deb, "data"), join(pkg_dir, "__init__.py"))
for pyfile in pyfiles:
py_install(pyfile, join(deb, "data"), join(pkg_dir, pyfile))
icon_dir = join(deb, "data", "usr", "share", "icons", "hicolor")
install(file(name + "_26x26.png", "rb"),
join(icon_dir, "26x26", "hildon", name + ".png"))
install(file(name + "_40x40.png", "rb"),
join(icon_dir, "40x40", "hildon", name + ".png"))
install(file(name + "_64x64.png", "rb"),
join(icon_dir, "scalable", "hildon", name + ".png"))
install(StringIO(desktopfile),
join(deb, "data", "usr", "share", "applications", "hildon",
desktopfilename))
install(StringIO(servicefile),
join(deb, "data", "usr", "share", "dbus-1", "services",
servicefilename))
install(file("README.txt", "rb"),
join(deb, "data", "usr", "share", "doc", name, "copyright"))
chdir(deb)
tar("data.tar.gz", "data")
chdir("..")
# control
mkdir(join(deb, "control"))
# size
size = "%d" % (du(join(deb, "data")),)
# icon
f = file(join(icon_dir, "26x26", "hildon", name + ".png"), "rb")
icon = f.read()
f.close()
icon_chars = []
for i, c in enumerate(b64encode(icon)):
icon_chars.append(c)
if (i + 1) % 69 == 0:
icon_chars.append("\n")
icon_chars.append(" ")
icon = "".join(icon_chars)
install(StringIO(control % (name, version, arch, size, icon)),
join(deb, "control", "control"))
md5sums = join(deb, "control", "md5sums")
md5file = file(md5sums, "wb")
md5sum(join(deb, "data"), md5file)
md5file.close()
set_mode(md5sums)
install(StringIO("""#!/bin/sh
gtk-update-icon-cache -f /usr/share/icons/hicolor
if [ "$1" = "configure" -a "$2" = "" ]; then
maemo-select-menu-location %s.desktop tana_fi_extras
fi
""" % (name,)), join(deb, "control", "postinst"), True)
chdir(deb)
tar("control.tar.gz", "control")
chdir("..")
install(StringIO("2.0\n"), join(deb, "debian-binary"));
chdir(deb)
archive(join("..",deb + ".deb"),
("debian-binary", "control.tar.gz", "data.tar.gz"))
chdir("..")
rm(deb)
| Python |
#!/usr/bin/env python
#
# This is a quick hack to enable uploading and deleting files on your
# googlecode project. It borrows from googlecode_upload.py and from libgmail
# for details on how to upload files and obtain the SID cookie by signing in
# to google accounts.
# The script probes the Subversion configuration files for authentication
# information and caches the google account SID cookie in the current
# working directory.
#
from urllib import urlencode
from urllib2 import urlopen, Request, HTTPError
from base64 import b64encode
from sys import argv, stdin, stdout
from getpass import getpass
from re import match
from os import listdir, remove, getenv
from os.path import join, expanduser, exists, basename
def getcred(userprompt, defaultuser=None):
defaultstr = ""
if defaultuser:
defaultstr = " [%s]" % (defaultuser,)
stdout.write("%s%s: " % (userprompt, defaultstr))
stdout.flush()
user = stdin.readline().rstrip()
if user == "":
user = defaultuser
password = getpass()
return user, password
def getsvn(project, force=True):
svndir = getenv("SUBVERSION_DIR", None)
if not svndir:
svndir = expanduser('~/.subversion')
authdir = join(svndir, "auth", "svn.simple")
user, password = None, None
if exists(authdir):
for hash in listdir(authdir):
f = file(join(authdir, hash))
lines = f.read().splitlines()
credentials = {}
for i in xrange(0, len(lines) - 1, 4):
credentials[lines[i + 1]] = lines[i + 3]
f.close()
if credentials['svn:realmstring'].find(project) >= 0:
user = credentials['username']
password = credentials['password']
break
if not user and force:
return getcred("Subversion user")
return user, password
def getsid(defaultuser=None, force_login=False):
SIDFILE = "googlesid"
if exists(SIDFILE):
if not force_login:
f = file(SIDFILE)
sid = f.readline().rstrip()
f.close()
return sid
else:
remove(SIDFILE)
user, password = getcred("Gmail user", defaultuser)
url = "https://www.google.com/accounts/ServiceLoginBoxAuth"
page = urlopen(url, urlencode((("Email", user), ("Passwd", password))))
for cookie in page.info().getheader("set-cookie").split(","):
if cookie.startswith("SID="):
sid = cookie.split(";")[0]
break
else:
he = HTTPError(url, 400, "Bad credentials", None, None)
he.url = url
raise he
f = file(SIDFILE, "wb")
f.write(sid)
f.close()
return sid
def delete_do(project, filename, force_login=False):
sid = getsid(getsvn(project, False)[0], force_login)
url = "http://code.google.com/p/%s/downloads/" % (project,)
req = Request(url + "delete?filename=" + filename, headers={"Cookie": sid})
page = " ".join(urlopen(req).read().splitlines())
try:
token = match(r'.*name=token\s+value="([^"]*)"', page).groups()[0]
pagegen = match(r'.*name=pagegen\s+value="([^"]*)"', page).groups()[0]
except:
# bad SID ?
if not force_login:
return delete_do(project, filename, True)
else:
return 400, "Too many failures"
req = Request(url + "delete.do",
data=urlencode((("token", token),
("pagegen", pagegen),
("filename", filename),
("delete", "Delete Download"))),
headers={"Cookie": sid})
urlopen(req)
return 200, "Deleted"
def delete(project, filename):
status, reason = None, None
try:
status, reason = delete_do(project, filename)
except HTTPError, e:
return e.code, e.msg, e.url
return (status, reason,
"%s.googlecode.com/files/%s" % (project, filename))
def upload_do(filename, project, user, password, summary, labels):
url = "https://%s.googlecode.com/files" % (project, )
req = Request(url)
BOUNDARY = "theboundary"
req.add_header("Content-type",
"multipart/form-data; boundary=%s" % BOUNDARY)
req.add_header("Authorization",
"Basic %s" % b64encode(":".join((user, password))))
data = []
fields = [("summary", summary)]
if labels != None:
fields.extend([("label", label) for label in labels])
for name, value in fields:
data.append("--" + BOUNDARY)
data.append('Content-Disposition: form-data; name="%s"' % name)
data.append("")
data.append(value)
data.append("--" + BOUNDARY)
data.append('Content-Disposition: form-data; name="filename"' +
'; filename="%s"' % basename(filename))
data.append("")
f = file(filename, "rb")
data.append(f.read())
f.close()
data.append("--" + BOUNDARY + "--")
data.append("")
req.add_data("\r\n".join(data))
location = None
try:
urlopen(req)
except HTTPError, e:
location = e.info().getheader("location")
if not location:
location = e.url
return e.code, e.msg, location
def upload(project, filename, summary="", labels=None):
user, password = getsvn(project)
if not user or not password:
return 400, "No svn credentials", project
return upload_do(filename, project, user, password, summary, labels)
def main():
result = (0, """
Usage: %s delete <project> <file-name>
%s upload <project> <file-name> <summary> [<label>,<label>,...]
"""
% (argv[0], argv[0]), "")
if len(argv) == 4 and argv[1] == "delete":
result = delete(argv[2], argv[3])
elif len(argv) >= 5 and argv[1] == "upload":
labels = None
if len(argv) >= 6:
labels = argv[5].split(",")
result = upload(argv[2], argv[3], argv[4], labels)
print "%d %s %s" % result
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/env python
#
# This is a quick hack to enable uploading and deleting files on your
# googlecode project. It borrows from googlecode_upload.py and from libgmail
# for details on how to upload files and obtain the SID cookie by signing in
# to google accounts.
# The script probes the Subversion configuration files for authentication
# information and caches the google account SID cookie in the current
# working directory.
#
from urllib import urlencode
from urllib2 import urlopen, Request, HTTPError
from base64 import b64encode
from sys import argv, stdin, stdout
from getpass import getpass
from re import match
from os import listdir, remove, getenv
from os.path import join, expanduser, exists, basename
def getcred(userprompt, defaultuser=None):
defaultstr = ""
if defaultuser:
defaultstr = " [%s]" % (defaultuser,)
stdout.write("%s%s: " % (userprompt, defaultstr))
stdout.flush()
user = stdin.readline().rstrip()
if user == "":
user = defaultuser
password = getpass()
return user, password
def getsvn(project, force=True):
svndir = getenv("SUBVERSION_DIR", None)
if not svndir:
svndir = expanduser('~/.subversion')
authdir = join(svndir, "auth", "svn.simple")
user, password = None, None
if exists(authdir):
for hash in listdir(authdir):
f = file(join(authdir, hash))
lines = f.read().splitlines()
credentials = {}
for i in xrange(0, len(lines) - 1, 4):
credentials[lines[i + 1]] = lines[i + 3]
f.close()
if credentials['svn:realmstring'].find(project) >= 0:
user = credentials['username']
password = credentials['password']
break
if not user and force:
return getcred("Subversion user")
return user, password
def getsid(defaultuser=None, force_login=False):
SIDFILE = "googlesid"
if exists(SIDFILE):
if not force_login:
f = file(SIDFILE)
sid = f.readline().rstrip()
f.close()
return sid
else:
remove(SIDFILE)
user, password = getcred("Gmail user", defaultuser)
url = "https://www.google.com/accounts/ServiceLoginBoxAuth"
page = urlopen(url, urlencode((("Email", user), ("Passwd", password))))
for cookie in page.info().getheader("set-cookie").split(","):
if cookie.startswith("SID="):
sid = cookie.split(";")[0]
break
else:
he = HTTPError(url, 400, "Bad credentials", None, None)
he.url = url
raise he
f = file(SIDFILE, "wb")
f.write(sid)
f.close()
return sid
def delete_do(project, filename, force_login=False):
sid = getsid(getsvn(project, False)[0], force_login)
url = "http://code.google.com/p/%s/downloads/" % (project,)
req = Request(url + "delete?filename=" + filename, headers={"Cookie": sid})
page = " ".join(urlopen(req).read().splitlines())
try:
token = match(r'.*name=token\s+value="([^"]*)"', page).groups()[0]
pagegen = match(r'.*name=pagegen\s+value="([^"]*)"', page).groups()[0]
except:
# bad SID ?
if not force_login:
return delete_do(project, filename, True)
else:
return 400, "Too many failures"
req = Request(url + "delete.do",
data=urlencode((("token", token),
("pagegen", pagegen),
("filename", filename),
("delete", "Delete Download"))),
headers={"Cookie": sid})
urlopen(req)
return 200, "Deleted"
def delete(project, filename):
status, reason = None, None
try:
status, reason = delete_do(project, filename)
except HTTPError, e:
return e.code, e.msg, e.url
return (status, reason,
"%s.googlecode.com/files/%s" % (project, filename))
def upload_do(filename, project, user, password, summary, labels):
url = "https://%s.googlecode.com/files" % (project, )
req = Request(url)
BOUNDARY = "theboundary"
req.add_header("Content-type",
"multipart/form-data; boundary=%s" % BOUNDARY)
req.add_header("Authorization",
"Basic %s" % b64encode(":".join((user, password))))
data = []
fields = [("summary", summary)]
if labels != None:
fields.extend([("label", label) for label in labels])
for name, value in fields:
data.append("--" + BOUNDARY)
data.append('Content-Disposition: form-data; name="%s"' % name)
data.append("")
data.append(value)
data.append("--" + BOUNDARY)
data.append('Content-Disposition: form-data; name="filename"' +
'; filename="%s"' % basename(filename))
data.append("")
f = file(filename, "rb")
data.append(f.read())
f.close()
data.append("--" + BOUNDARY + "--")
data.append("")
req.add_data("\r\n".join(data))
location = None
try:
urlopen(req)
except HTTPError, e:
location = e.info().getheader("location")
if not location:
location = e.url
return e.code, e.msg, location
def upload(project, filename, summary="", labels=None):
user, password = getsvn(project)
if not user or not password:
return 400, "No svn credentials", project
return upload_do(filename, project, user, password, summary, labels)
def main():
result = (0, """
Usage: %s delete <project> <file-name>
%s upload <project> <file-name> <summary> [<label>,<label>,...]
"""
% (argv[0], argv[0]), "")
if len(argv) == 4 and argv[1] == "delete":
result = delete(argv[2], argv[3])
elif len(argv) >= 5 and argv[1] == "upload":
labels = None
if len(argv) >= 6:
labels = argv[5].split(",")
result = upload(argv[2], argv[3], argv[4], labels)
print "%d %s %s" % result
if __name__ == "__main__":
main()
| Python |
class Context(object):
def __init__(self,name,version,flag):
self.name=name
self.version=version
self.flag=flag
class Autosave(object):
def __init__(self,context):
self.context=context
self.cb=None
def set_autosave_callback(self,cb,data=None):
self.cb=cb
def userdata_changed(self):
self.force_autosave()
def force_autosave(self):
self.cb()
class OSSOStub:
def __init__(self):
self.Context = Context
self.Autosave = Autosave
osso = OSSOStub() | Python |
import gtk
import csv
# the application main window launches lists which in tern launches
# dialogs which in turn are made from items
from items import *
from dialogs import *
class DateObjList(Dialog):
"""Managing objects that have a date field
"""
# When sublcassing, override the following:
title="Date"
objclass=DateObj
column_names = ['Date']
fname="fitness_dates.csv"
def updateobj(self,obj):
if obj.is_new:
obj.is_new=False
self.liststore.append([obj])
Dialog.updateobj(self,obj)
def load(self):
f = open(self.fname,"rb")
r = csv.reader(f)
for row in r:
obj = self.objclass(self)
obj.load(row)
self.liststore.append([obj])
f.close()
def save(self):
f = open(self.fname,"wb")
w=csv.writer(f)
for row in self.liststore:
row[0].save(w)
f.close()
def __init__(self,parentDialog):
self.parentDialog=parentDialog
# When subclassing, override the tuple with appropriate method to
# display the contnet of each column
self.cell_data_funcs = (self.cell_date,) #Note that this must be a tuple
self.liststore = gtk.ListStore(object)
try:
self.load()
except IOError:
pass
def cell_date(self, column, cell, model, iter):
"""Extract the date string from each object in the list, and place it
in a GUI cell which is part of the Date column
"""
obj=model.get_value(iter, 0)
cell.set_property('text', str(obj.date))
def new_event(self,widget):
"""Add a new DateObj to the list when the New button is pressed"""
obj = self.objclass(self)
obj.newvalues()
self.edit_obj(obj)
def date_sort(self, model, iter1, iter2):
"""Sort method used to keep the objects in the list sorted in descending
order """
obj1=model.get_value(iter1, 0)
obj2=model.get_value(iter2, 0)
if obj1 and obj2:
return cmp(obj1,obj2)
else:
return 1 #When adding a new entry, one of the objs is None.
def build_run(self,parent_window):
# create the TreeView
sm = gtk.TreeModelSort(self.liststore)
sm.set_sort_func(0,self.date_sort)
sm.set_sort_column_id(0, gtk.SORT_DESCENDING)
self.treeview = gtk.TreeView(sm)
# create the TreeViewColumns to display the data
self.tvcolumn = [None] * len(self.column_names)
for n in range(len(self.column_names)):
cell = gtk.CellRendererText()
self.tvcolumn[n] = gtk.TreeViewColumn(self.column_names[n], cell)
self.tvcolumn[n].set_cell_data_func(cell, self.cell_data_funcs[n])
self.treeview.append_column(self.tvcolumn[n])
self.treeview.connect('row-activated', self.edit)
def run(self,parent_window):
"""Run the dialog window for managing the list of objects
parent_window - the window from which this window was launched
"""
self.parent_window=parent_window
self.make_dialog(parent_window,OKCancel=False)
win=self.dialog
#self.build_run(parent_window)
self.scrolledwindow = gtk.ScrolledWindow()
self.scrolledwindow.set_policy(gtk.POLICY_AUTOMATIC,gtk.POLICY_AUTOMATIC)
self.scrolledwindow.add(self.treeview)
win.vbox.pack_start(self.scrolledwindow)
# Add Total/New buttons at the bottom
win.bTotal = gtk.Button('Back')
win.bTotal.connect('clicked', self.cancel_event)
win.bNew = gtk.Button('New')
win.bNew.connect('clicked', self.new_event)
win.hbox.pack_start(win.bTotal, True, True)
win.hbox.pack_start(win.bNew, True, True)
win.vbox.pack_end(win.hbox, False)
self.endrun()
def destroy(self, widget, data=None):
self.scrolledwindow.remove(self.treeview)
Dialog.destroy(self,widget,data)
def edit(self, treeview, path, column):
"""Edit an entry when an item in the list is double clicked"""
model = treeview.get_model()
iter = model.get_iter(path)
obj= model.get_value(iter, 0)
self.edit_obj(obj)
def edit_obj(self,obj):
# run the edit dialog of the object
obj.run(self.dialog)
# The date of the object may have been changed and the entire list
# needs to be resorted.
# Rebuild the sorted list of objects and plug them into tree view.
sm = gtk.TreeModelSort(self.liststore)
sm.set_sort_func(0,self.date_sort)
sm.set_sort_column_id(0, gtk.SORT_DESCENDING)
self.treeview.set_model(sm)
class WeightList(DateObjList):
"""Manage all weight entries"""
objclass=Weight
title="Weight"
column_names = ['Date', 'Weight']
fname="fitness_weights.csv"
def __init__(self,parentDialog):
DateObjList.__init__(self,parentDialog)
# This should come after the super's init because it overrides it.
self.cell_data_funcs = (self.cell_date, self.cell_weight)
def last_weight(self):
""" Return the latest weight value this is used to estimate the current
metabolisem of your body"""
maxobj=None
for row in self.liststore:
obj=row[0]
if not maxobj or obj > maxobj:
maxobj = obj
if maxobj:
return maxobj.weight
else:
# TODO take value from goal weight
return 81.
def cell_weight(self, column, cell, model, iter):
"""Extract the weight string from each object in the list, and place it
in a GUI cell which is part of the Weight column"""
obj=model.get_value(iter, 0)
cell.set_property('text', '%.1f'%obj.weight)
class CalList(DateObjList):
"""Manage all Cal entries"""
objclass=Cal
title="Cal"
column_names = ['Date', 'Desc', 'Cal']
def __init__(self,parentDialog):
# for all objects' names (desc item) keep the latest object
self.dict={}
# and build a liststore of these names
self.dictlist=gtk.ListStore(str)
DateObjList.__init__(self,parentDialog)
# This should come after the super's init because it overrides it.
self.cell_data_funcs = (self.cell_date, self.cell_desc, self.cell_cal)
def loadobjname(self,obj):
name=str(obj.desc)
if name not in self.dict:
self.dictlist.append([name])
self.dict[name]=obj
def updateobj(self,obj):
self.loadobjname(obj)
DateObjList.updateobj(self,obj)
def cell_desc(self, column, cell, model, iter):
"""Extract the description string from each object in the list,
and place it in a GUI cell which is part of the Desc column"""
obj=model.get_value(iter, 0)
cell.set_property('text', obj.desc)
def cell_cal(self, column, cell, model, iter):
"""Compute the total calories from each object in the list, and place it
in a GUI cell which is part of the Cal column"""
obj=model.get_value(iter, 0)
cell.set_property('text', '%.1f'%(obj.cals()))
def cal_in_range(self,sdate,edate):
""" Return the sum of calories inside the date range"""
calsum=0.
for row in self.liststore:
obj=row[0]
if obj.date >= sdate and obj.date <= edate:
calsum += obj.cals()
return calsum
def edit_obj(self,obj):
DateObjList.edit_obj(self,obj)
def load(self):
DateObjList.load(self)
f = open(self.dict_fname,"rb")
r = csv.reader(f)
for row in r:
obj = self.objclass(self)
obj.load(row)
self.loadobjname(obj)
f.close()
def save(self):
DateObjList.save(self)
f = open(self.dict_fname,"wb")
w=csv.writer(f)
#for (name,obj) in self.dict.iteritems():
#cls=obj.types[0]
#attr=obj.attributes[0]
#obj.__setattr__(attr)=cls(name)
for obj in self.dict.itervalues():
obj.save(w)
f.close()
class PAList(CalList):
objclass=PA
title="PA"
fname="fitness_pas.csv"
dict_fname="fitness_pa_dict.csv"
class FoodList(CalList):
objclass=Food
title="Food"
fname="fitness_foods.csv"
dict_fname="fitness_food_dict.csv"
| Python |
import gtk
import csv
try:
import hildon
except:
from hildonstub import hildon
# the application main window launches lists which in tern launches
# dialogs which in turn are made from items
from items import *
# All windows will have the same size
SZ=(600,400)
class Dialog(object):
""""Dialog box for editing values. Assuming all values are float.
Each value has a label in the GUI and an attribute in this object."""
#List below the labels, names and types of the attributes
labels = []
attributes = []
types = []
def __init__(self,parentDialog):
self.parentDialog=parentDialog
self.is_new = False
def updateobj(self,obj):
self.parentDialog.updateobj(self)
def make_dialog(self,parent_window,OKCancel=True):
"""Make the edit dialog box without running it. This can be extended
by sub class
"""
self.parent_window=parent_window
#self.parent_window.hide_all()
# Dont use gtk.Dialog and dont use modal because Nokia
win=gtk.Window()
win.set_title("Fitness Record Book")
win.set_transient_for(self.parent_window)
win.set_modal(True)
win.connect("delete_event", self.delete_event)
win.connect("destroy", self.destroy)
win.vbox=gtk.VBox()
win.add(win.vbox)
##win.vbox.show()
# create a box for the bottom row of keys.
win.hbox = gtk.HBox()
win.hbox.set_size_request(-1,60)
if OKCancel:
win.bOK = gtk.Button('OK')
win.bOK.connect('clicked', self.ok_event)
win.bCancel = gtk.Button('Cancel')
win.bCancel.connect('clicked', self.cancel_event)
win.hbox.pack_start(win.bOK, True, True)
##win.bOK.show()
win.hbox.pack_start(win.bCancel, True, True)
##win.bCancel.show()
win.vbox.pack_end(win.hbox, False)
##win.hbox.show()
self.dialog=win
self.dialog.set_size_request(*SZ)
def delete_event(self, widget, event, data=None):
return False
def destroy(self, widget, data=None):
self.parent_window.show_all()
self.parent_window.present()
try:
self.parent_window.set_focus(self.parent_window.my_focus)
except:
pass
def cancel_event(self, widget, data=None):
self.dialog.destroy()
def ok_event(self, widget, data=None):
temp_value=[]
try:
for i,attr in enumerate(self.attributes):
# find the class of the attribue
cls=self.types[i]
entry=self.entries[i]
# cast the text in the Entry widget to the class
# this could generate an exception on a bad entry
value=cls(entry.get_text())
temp_value.append(value)
except:
# If there were problems, dont destroy the window and the user will
# have to continue and play with it.
return False
for attr,value in zip(self.attributes,temp_value):
self.__setattr__(attr,value)
self.dialog.destroy()
self.updateobj(self)
return True
def run(self,parent_window):
self.make_dialog(parent_window)
table = gtk.Table(3, 2, False)
self.entries=[]
for r,l in enumerate(self.labels):
attr=self.__getattribute__(self.attributes[r])
label = gtk.Label(l)
label.set_alignment(0, 0)
table.attach(label,0,1,r,r+1)
#label.show()
entry=attr.entry(self)
self.entries.append(entry)
table.attach(entry,1,2,r,r+1)
#entry.show()
self.dialog.vbox.pack_start(table, False, False, 0)
#table.show()
self.endrun()
def endrun(self):
self.dialog.show_all()
self.parent_window.hide_all()
def newvalues(self):
self.is_new=True
for i,value in enumerate(self.values):
self.__setattr__(self.attributes[i],
self.types[i](value))
class OptionsDialog(Dialog):
""""Dialog box for editing options values."""
labels = ["Metabolism (KCal/Kg/day)","Goal weight (Kg)","History (days)"]
attributes = ["met","weight","history"]
types=[MyFloat,MyFloat,MyInt]
values=[18.,77.,30]
def __init__(self,parentDialog):
Dialog.__init__(self,parentDialog)
try:
self.load()
except IOError:
self.newvalues()
def save(self):
f = open("fitness_options.csv","wb")
csv.writer(f).writerow([self.__getattribute__(attr) for attr in self.attributes])
f.close()
def load(self):
f = open("fitness_options.csv","rb")
if not f: return False
r = csv.reader(f)
for row in r:
for i,value in enumerate(row):
value=self.types[i](value)
self.__setattr__(self.attributes[i],value)
f.close()
return True
class DateObj(Dialog):
"""An object that contains information that is assigned to a specifice date.
For example: Weight, food eating, Physical Activity.
It is possible for multiple objects to have the same date
"""
def __cmp__(self,other):
"""Comparing two DateObj is done by comparing their dates. This is needed
in order to sort the list of objects which is held in DateObjList"""
return cmp(self.date,other.date)
def save(self,w):
w.writerow([self.__getattribute__(attr) for attr in self.attributes])
def load(self,row):
for i,value in enumerate(row):
value=self.types[i](value)
self.__setattr__(self.attributes[i],value)
def run(self,parent_window):
Dialog.run(self,parent_window)
self.entries[1].grab_focus()
#self.parent_window.hide()
class Weight(DateObj):
"""Single weight entry"""
labels = ["Date","Weight"]
attributes = ["date","weight"]
types=[Date,MyFloat]
values=["today",0.] # use latest value
class Cal(DateObj):
"""Single cal entry"""
labels = ["Date","Desc","Quantity","Unit","Cal/Unit"]
attributes = ["date","desc","quant","unit","calunit"]
values = ["today","", 0., "", 0.]
def cals(self):
return self.quant * self.calunit
class PA(Cal):
types=[Date,Completion,MyFloat,PAUnit,MyFloat]
class Food(Cal):
types=[Date,Completion,MyFloat,FoodUnit,MyFloat]
| Python |
# -*- coding: utf-8 -*-
import pygame #@UnresolvedImport
from pygame.locals import *
from vec2d import vec2d
from enemy import Enemy
from enemy import BossBlackfiskEnemy
from enemy import BossTaggfiskEnemy
from player import PlayerShip
from shots import BaseShot
from powerup import Powerup
import random
import sys
import time
from powerup import Powerup
from creep import Creep
import os
class Game(object):
SCREEN_SIZE = SCREEN_WIDTH, SCREEN_HEIGHT = 640, 480
SCORE_RECT = pygame.Rect(10, 10, 150, 60)
def __init__(self):
"""Startar spelet.
Laddar ljud och grafik.
"""
pygame.init()
self.screen = pygame.display.set_mode(self.SCREEN_SIZE, 0, 32)
pygame.display.set_caption('Fishwars > Intro')
# Ladda ljud
pygame.mixer.music.load(os.path.join('sound','soundtrack.mp3'))
# Volym anges mellan 0 och 1
pygame.mixer.music.set_volume(0.5)
pygame.mixer.music.play(-1)
self.coin = pygame.mixer.Sound(os.path.join('sound','102_coin.wav'))
self.coin.set_volume(0.3)
self.bubble = pygame.mixer.Sound(os.path.join('sound','bubbla_liten.wav'))
# Ladda grafik
self.clock = pygame.time.Clock()
self.my_font = pygame.font.SysFont('arial', 20)
self.intropic = pygame.image.load(os.path.join('images','intro.png'))
self.background_image = pygame.image.load(os.path.join('images','sea-background.jpg'))
self.foreground_image = pygame.image.load(os.path.join('images','foreground.png'))
self.fore_foreground_image = pygame.image.load(os.path.join('images','fore_foreground.png'))
def quit(self):
"""Avslutar spelet."""
pygame.quit()
sys.exit()
def initialize(self):
"""Skapar alla objekt och nollställer bakgrunder osv."""
pygame.display.set_caption('Fishwars > Gogo! ^^')
# Spel inställningar
self.paused = False
self.player_score = 0
# Skapa grupper för fiender
self.enemy_grp = pygame.sprite.Group()
self.bonus_enemy_grp = pygame.sprite.Group()
self.spawn_bonus_monsterfiskar()
self.creeps = pygame.sprite.Group()
# Skapa spelaren
self.player = PlayerShip(self.screen)
self.player_grp = pygame.sprite.Group()
# För att hantera kollisioner
self.player_grp.add(self.player)
# Powerups!
self.powerup_grp = pygame.sprite.Group()
font = pygame.font.SysFont('Arial Black', 80)
self.gameOverImage = font.render("GAME OVER", True, (255,0,0))
self.woo = font.render("YOU WIN! :)", True, (0,255,0))
self.wooo = font.render("WELL DONE!", True, (0,255,0))
mindre_font = pygame.font.SysFont('Arial Black', 25)
self.gameOverRetry = mindre_font.render("Back to main menu? [y/n]", True, (255,0,0))
# Nollställ alla bakgrunder
self.bg_x = 0
self.fg_x = 0
self.ffg_x = 0
def spawn_bonus_monsterfiskar(self):
"""Spawnar fem stycken fiskar man kan få powerups av!
De kommer i rad med 50 px mellan varje.
"""
random_y = random.randint(0,self.SCREEN_HEIGHT-100)
x = self.SCREEN_WIDTH
for i in range(5):
self.bonus_enemy_grp.add(Enemy(screen=self.screen,
img_filename=os.path.join('images','monsterfisk.png'),
init_position=(x, random_y)
))
# Nästa fiende lite åt sidan
x += 50
def spawn_monster(self, antal, pref=None, lila=None):
"""Spawnar vanliga mobs.
@param antal: Hur många mobs
@param pref: Vilken typ
"""
x = self.SCREEN_WIDTH
all_monsters = [os.path.join('images','dygaddaa.png'),
os.path.join('images','taggfisk.png'),
os.path.join('images','monsterfisk.png'),
os.path.join('images','taggfisk-blue.png'),
os.path.join('images','taggfisk-green.png')]
if pref:
img = all_monsters[pref]
elif lila:
img = os.path.join('images','taggfisk-lila.png')
else:
img = random.choice(all_monsters)
for i in range(antal):
random_y = random.randint(0,self.SCREEN_HEIGHT-100)
temp = Enemy(screen=self.screen,
img_filename=img,
init_position=(x, random_y)
)
temp.set_speed(random.randint(-7, -3))
self.enemy_grp.add(temp)
# Nästa fiende lite åt sidan
x += 60
def spawn_creeps(self):
"""Spawnar ett studsande kryp.
Random riktning vid initialisering.
"""
random_y = random.randint(0,self.SCREEN_HEIGHT-100)
self.creeps.add(Creep(self.screen,
os.path.join('images','new_creep.png'),
# Position
( self.SCREEN_WIDTH, random_y),
# Riktning
( random.choice([-1, -0.7]),
random.choice([-0.7, 0, 0.7])),
# Hastighet
0.1))
def draw_instructions(self):
"""Ritar upp spelets instruktioner.
Aktiveras mha space-tangenten vid startskärmen.
"""
INSTR_RECT = pygame.Rect(20, 240, 200, 205)
# Rita den vit-transparenta lådan
transparent_box = pygame.Surface((INSTR_RECT.w, INSTR_RECT.h))
transparent_box.fill(pygame.Color(255, 255, 255))
transparent_box.set_alpha(50)
self.screen.blit(transparent_box, INSTR_RECT)
# Rita instruktionerna
my_font = pygame.font.SysFont('arial', 20)
INSTR_RECT = INSTR_RECT.move(10, 5)
instructions = ['Instructions:',
'[w] up',
'[a] left',
'[s] right',
'[d] down',
'[Enter] shoot',
'[Space] pause',
'[Escape] exit']
for instruction in instructions:
msg = my_font.render(instruction, True, pygame.Color('white'))
self.screen.blit(msg, INSTR_RECT)
# Flytta positionen nedåt för nästa instruktion
INSTR_RECT = INSTR_RECT.move(0, msg.get_height())
def draw_rimmed_box(self, box_rect, box_color,
rim_width=0,
rim_color=pygame.Color('black')):
""" Ritar en streckad låda. Strecket ritas utanför lådan."""
if rim_width:
# Räkna ut streckets kanter
rim_rect = pygame.Rect(box_rect.left - rim_width,
box_rect.top - rim_width,
box_rect.width + rim_width * 2,
box_rect.height + rim_width * 2)
# Och rita strecket
pygame.draw.rect(self.screen, rim_color, rim_rect)
# Därefter rita själva lådan
pygame.draw.rect(self.screen, box_color, box_rect)
def draw_score(self):
"""Ritar upp spelarens score i vänstra hörnet."""
score = 'Score: ' + str(self.player_score)
score_msg = self.my_font.render(score, True, pygame.Color('white'))
self.screen.blit(score_msg, self.SCORE_RECT)
def intro(self):
"""Spelets intro loop.
Spelar musik. Visar logo, om man trycker space visas
instruktioner. Enter för att starta spelet!
"""
show_instructions = False
while True:
# Sätt spelet till 30 FPS
time_passed = self.clock.tick(30)
# Kontrollera spelarens input
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.quit()
elif event.type == pygame.KEYDOWN:
if event.key == pygame.locals.K_ESCAPE:
self.quit()
elif event.key == pygame.K_SPACE:
show_instructions = not show_instructions
elif event.key == pygame.locals.K_RETURN:
self.coin.play()
# Den spelar långsamt.. så vänta lite
time.sleep(1)
self.initialize()
self.run()
# Rita introbilden
self.screen.blit(self.intropic, (0, 0))
# Om spelaren vill se instruktiner, rita de över
if show_instructions:
self.draw_instructions()
# Uppdatera skärmen sist
pygame.display.flip()
def run(self):
"""Spelets main loop."""
timer = 0
bosstimer = 0
creep_timer = 0
self.bosstime = False
self.bonus_active = False
self.bonus_done = False
self.big_bad_boss = None
self.phase_two = False
self.second_boss = False
self.other_boss = None
bonus_timer = 0
while True:
# Limit frame speed to 30 FPS
time_passed = self.clock.tick(30)
timer += time_passed
bosstimer += time_passed
bonus_timer += time_passed
creep_timer += time_passed
# Kolla spel-timers
# TODO! NGT SNYGGARE UTAN ALLA IFSATSER!
# Gör random monster varje sekund!
if(timer>1000 and not self.bosstime and not self.second_boss):
r = random.randint(0,100)
# Chans att nytt monster spawnar varje sekund!
if(r<65):
self.spawn_monster(1)
timer = 0
# Bonus efter X sekunder
if(bonus_timer>13000 and not self.player.power==5 and not self.bosstime):
self.spawn_bonus_monsterfiskar();
bonus_timer = 0
# BOSS EFTER 20 SEKUNDER
if(bosstimer>20000 and not self.bosstime and not self.phase_two):
y = 50
x = self.SCREEN_WIDTH
self.bosstime = True
self.big_bad_boss = BossTaggfiskEnemy(screen=self.screen,
img_filename=os.path.join('images','taggfisk-mindre.png'),
init_position=(x, y))
self.enemy_grp.add(self.big_bad_boss)
bosstimer = 0
# IF BOSS MAKE EXTRA MOBS
if(timer>1000 and self.bosstime):
r = random.randint(0,100)
if(r<25):
self.spawn_monster(1, 1)
timer = 0
# CHECK IF BOSS DEAD
if(self.big_bad_boss and self.big_bad_boss.is_dead() and self.bosstime):
self.bosstime = False
self.phase_two = True
bosstimer = 0
# Och x sekunder efter första bossen dör kommer nästa!
if bosstimer > 20000 and self.phase_two and not self.second_boss:
self.second_boss = True
y = 50
x = self.SCREEN_WIDTH
self.other_boss = BossBlackfiskEnemy(screen=self.screen,
img_filename=os.path.join('images','bigger_blackfisk.png'),
init_position=(x, y))
self.enemy_grp.add(self.other_boss)
if creep_timer>1000 and self.phase_two and not self.second_boss:
r = random.randint(0,100)
# 25% chans att nytt monster spawnar varje sekund!
if(r<25):
self.spawn_creeps()
creep_timer = 0
r_lila = random.randint(0, 100)
if(r_lila<5):
self.spawn_monster(1, lila=True)
if self.other_boss:
if self.other_boss.is_dead():
self.endloop()
# KONTROLLERA PLAYER INPUT ----------------------------------------
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.quit()
elif event.type == pygame.KEYDOWN:
# Om Escape - avsluta spelet
if event.key == pygame.locals.K_ESCAPE:
self.quit()
# Skeppets rörelse
elif event.key == pygame.locals.K_a:
self.player.x = -6
elif event.key == pygame.locals.K_d:
self.player.x = 6
elif event.key == pygame.locals.K_w:
self.player.y = -6
elif event.key == pygame.locals.K_s:
self.player.y = 6
# Om return - Spela ljud och avfyra vapnet!
elif event.key == pygame.locals.K_RETURN:
self.bubble.play()
self.player.fire_weapon()
# Om uppknapp - sluta röra skeppet
elif event.type == pygame.locals.KEYUP:
if event.key == pygame.locals.K_a:
self.player.x = 0
elif event.key == pygame.locals.K_d:
self.player.x = 0
elif event.key == pygame.locals.K_w:
self.player.y = 0
elif event.key == pygame.locals.K_s:
self.player.y = 0
# UPPDATERA ALLA OBJEKT -------------------------------------------
self.player_grp.update()
self.player.playershots_grp.update()
self.powerup_grp.update(time_passed)
self.bonus_enemy_grp.update(time_passed)
self.enemy_grp.update(time_passed)
if self.big_bad_boss:
self.big_bad_boss.shot_grp.update(time_passed)
if self.other_boss:
self.other_boss.shot_grp.update(time_passed)
for creep in self.creeps:
creep.update(time_passed)
# Förflytta bakgrunderna x pixlar vänster/uppdatering
self.bg_x -= 0.5
self.fg_x -= 2
self.ffg_x -= 2.5
# Om hela bakgrunden spelats upp, börja om
# !TODO VERKAR INTE FUNGERA!
if(self.bg_x==-4167):
print "change bg"
self.bg_x = 0
if(self.fg_x==-8000):
print "change fg"
self.fg_x = 0
# KONTROLLERA KOLLISIONER ----------------------------------------
for hit in pygame.sprite.groupcollide(self.player_grp, self.enemy_grp, 1, 1):
self.game_over()
for hit in pygame.sprite.groupcollide(self.player_grp, self.bonus_enemy_grp, 1, 1):
self.game_over()
for hit in pygame.sprite.groupcollide(self.player_grp, self.creeps, 1, 1):
self.game_over()
if self.big_bad_boss:
for hit in pygame.sprite.groupcollide(self.player_grp, self.big_bad_boss.shot_grp, 1, 1):
self.game_over()
if self.other_boss:
for hit in pygame.sprite.groupcollide(self.player_grp, self.other_boss.shot_grp, 1, 1):
self.game_over()
# Kontrollera kollisioner fiende/player-skott
for enemy in pygame.sprite.groupcollide(self.enemy_grp, self.player.playershots_grp, 0, 1):
self.player_score += 1000
enemy.decrease_health(1)
for enemy in pygame.sprite.groupcollide(self.creeps, self.player.playershots_grp, 0, 1):
self.player_score += 1000
enemy.decrease_health(1)
# Kontrollera kollisioner bonus/player-skott
for enemy in pygame.sprite.groupcollide(self.bonus_enemy_grp, self.player.playershots_grp, 0, 1):
# Ge spelaren poäng
self.player_score += 1000
# Och skada fienden
enemy.decrease_health(1)
if len(self.bonus_enemy_grp)==1 and not self.bonus_active:
self.bonus_active = True
self.powerup_grp.add(Powerup(self.screen, (enemy.rect.x, enemy.rect.y)))
# Kontrollera kollisioner bonus/spelaren
for powerup in pygame.sprite.groupcollide(self.powerup_grp, self.player_grp, 1, 0):
powerup.powerup_sound.play()
self.bonus_active = False
self.player.power += 2
# RITA ALLA OBJEKT ------------------------------------------------
# Bakgrunden måste ritas först = längst bak
self.screen.blit(self.background_image, (self.bg_x, 0))
self.screen.blit(self.foreground_image, (self.fg_x, 0))
self.player_grp.draw(self.screen)
self.powerup_grp.draw(self.screen)
for creep in self.creeps:
creep.draw()
for enemy in self.bonus_enemy_grp:
enemy.draw()
for enemy in self.enemy_grp:
enemy.draw()
if self.big_bad_boss:
for shot in self.big_bad_boss.shot_grp:
shot.rita()
if self.other_boss:
for shot in self.other_boss.shot_grp:
shot.rita()
self.player.playershots_grp.draw(self.screen)
# Växter i förgrunden! ^__^
self.screen.blit(self.fore_foreground_image, (self.ffg_x, 0))
self.draw_score()
# BOUNDARY TEST
#self.player.show_boundary()
#for shot in self.playershots_grp:
# shot.show_boundary()
#for enemy in self.enemy_grp:
# enemy.show_boundary()
# Flippa displayen
pygame.display.flip()
def game_over(self):
"""Game over loop."""
while True:
# Kontrollera player inputs
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.quit()
elif event.type == pygame.KEYDOWN:
# Om Escape - avsluta spelet
if event.key == pygame.locals.K_ESCAPE:
self.quit()
elif event.key == pygame.locals.K_y:
self.intro()
elif event.key == pygame.locals.K_n:
self.quit()
self.screen.blit(self.gameOverImage, (45,170))
self.screen.blit(self.gameOverRetry, (135,280))
pygame.display.flip()
def endloop(self):
"""End loop."""
while True:
# Kontrollera player inputs
for event in pygame.event.get():
if event.type == pygame.QUIT:
self.quit()
elif event.type == pygame.KEYDOWN:
# Om Escape - avsluta spelet
if event.key == pygame.locals.K_ESCAPE:
self.quit()
elif event.key == pygame.locals.K_y:
self.intro()
elif event.key == pygame.locals.K_n:
self.quit()
self.screen.blit(self.woo, (45,100))
self.screen.blit(self.wooo, (45,190))
pygame.display.flip()
def main():
game = Game()
game.intro()
if __name__=="__main__":
main() | Python |
########################################################################
import operator
import math
class vec2d(object):
"""2d vector class, supports vector and scalar operators,
and also provides a bunch of high level functions
"""
__slots__ = ['x', 'y']
def __init__(self, x_or_pair, y = None):
if y == None:
self.x = x_or_pair[0]
self.y = x_or_pair[1]
else:
self.x = x_or_pair
self.y = y
def __len__(self):
return 2
def __getitem__(self, key):
if key == 0:
return self.x
elif key == 1:
return self.y
else:
raise IndexError("Invalid subscript "+str(key)+" to vec2d")
def __setitem__(self, key, value):
if key == 0:
self.x = value
elif key == 1:
self.y = value
else:
raise IndexError("Invalid subscript "+str(key)+" to vec2d")
# String representaion (for debugging)
def __repr__(self):
return 'vec2d(%s, %s)' % (self.x, self.y)
# Comparison
def __eq__(self, other):
if hasattr(other, "__getitem__") and len(other) == 2:
return self.x == other[0] and self.y == other[1]
else:
return False
def __ne__(self, other):
if hasattr(other, "__getitem__") and len(other) == 2:
return self.x != other[0] or self.y != other[1]
else:
return True
def __nonzero__(self):
return self.x or self.y
# Generic operator handlers
def _o2(self, other, f):
"Any two-operator operation where the left operand is a vec2d"
if isinstance(other, vec2d):
return vec2d(f(self.x, other.x),
f(self.y, other.y))
elif (hasattr(other, "__getitem__")):
return vec2d(f(self.x, other[0]),
f(self.y, other[1]))
else:
return vec2d(f(self.x, other),
f(self.y, other))
def _r_o2(self, other, f):
"Any two-operator operation where the right operand is a vec2d"
if (hasattr(other, "__getitem__")):
return vec2d(f(other[0], self.x),
f(other[1], self.y))
else:
return vec2d(f(other, self.x),
f(other, self.y))
def _io(self, other, f):
"inplace operator"
if (hasattr(other, "__getitem__")):
self.x = f(self.x, other[0])
self.y = f(self.y, other[1])
else:
self.x = f(self.x, other)
self.y = f(self.y, other)
return self
# Addition
def __add__(self, other):
if isinstance(other, vec2d):
return vec2d(self.x + other.x, self.y + other.y)
elif hasattr(other, "__getitem__"):
return vec2d(self.x + other[0], self.y + other[1])
else:
return vec2d(self.x + other, self.y + other)
__radd__ = __add__
def __iadd__(self, other):
if isinstance(other, vec2d):
self.x += other.x
self.y += other.y
elif hasattr(other, "__getitem__"):
self.x += other[0]
self.y += other[1]
else:
self.x += other
self.y += other
return self
# Subtraction
def __sub__(self, other):
if isinstance(other, vec2d):
return vec2d(self.x - other.x, self.y - other.y)
elif (hasattr(other, "__getitem__")):
return vec2d(self.x - other[0], self.y - other[1])
else:
return vec2d(self.x - other, self.y - other)
def __rsub__(self, other):
if isinstance(other, vec2d):
return vec2d(other.x - self.x, other.y - self.y)
if (hasattr(other, "__getitem__")):
return vec2d(other[0] - self.x, other[1] - self.y)
else:
return vec2d(other - self.x, other - self.y)
def __isub__(self, other):
if isinstance(other, vec2d):
self.x -= other.x
self.y -= other.y
elif (hasattr(other, "__getitem__")):
self.x -= other[0]
self.y -= other[1]
else:
self.x -= other
self.y -= other
return self
# Multiplication
def __mul__(self, other):
if isinstance(other, vec2d):
return vec2d(self.x*other.x, self.y*other.y)
if (hasattr(other, "__getitem__")):
return vec2d(self.x*other[0], self.y*other[1])
else:
return vec2d(self.x*other, self.y*other)
__rmul__ = __mul__
def __imul__(self, other):
if isinstance(other, vec2d):
self.x *= other.x
self.y *= other.y
elif (hasattr(other, "__getitem__")):
self.x *= other[0]
self.y *= other[1]
else:
self.x *= other
self.y *= other
return self
# Division
def __div__(self, other):
return self._o2(other, operator.div)
def __rdiv__(self, other):
return self._r_o2(other, operator.div)
def __idiv__(self, other):
return self._io(other, operator.div)
def __floordiv__(self, other):
return self._o2(other, operator.floordiv)
def __rfloordiv__(self, other):
return self._r_o2(other, operator.floordiv)
def __ifloordiv__(self, other):
return self._io(other, operator.floordiv)
def __truediv__(self, other):
return self._o2(other, operator.truediv)
def __rtruediv__(self, other):
return self._r_o2(other, operator.truediv)
def __itruediv__(self, other):
return self._io(other, operator.floordiv)
# Modulo
def __mod__(self, other):
return self._o2(other, operator.mod)
def __rmod__(self, other):
return self._r_o2(other, operator.mod)
def __divmod__(self, other):
return self._o2(other, operator.divmod) #@UndefinedVariable
def __rdivmod__(self, other):
return self._r_o2(other, operator.divmod) #@UndefinedVariable
# Exponentation
def __pow__(self, other):
return self._o2(other, operator.pow)
def __rpow__(self, other):
return self._r_o2(other, operator.pow)
# Bitwise operators
def __lshift__(self, other):
return self._o2(other, operator.lshift)
def __rlshift__(self, other):
return self._r_o2(other, operator.lshift)
def __rshift__(self, other):
return self._o2(other, operator.rshift)
def __rrshift__(self, other):
return self._r_o2(other, operator.rshift)
def __and__(self, other):
return self._o2(other, operator.and_)
__rand__ = __and__
def __or__(self, other):
return self._o2(other, operator.or_)
__ror__ = __or__
def __xor__(self, other):
return self._o2(other, operator.xor)
__rxor__ = __xor__
# Unary operations
def __neg__(self):
return vec2d(operator.neg(self.x), operator.neg(self.y))
def __pos__(self):
return vec2d(operator.pos(self.x), operator.pos(self.y))
def __abs__(self):
return vec2d(abs(self.x), abs(self.y))
def __invert__(self):
return vec2d(-self.x, -self.y)
# vectory functions
def get_length_sqrd(self):
return self.x**2 + self.y**2
def get_length(self):
return math.sqrt(self.x**2 + self.y**2)
def __setlength(self, value):
length = self.get_length()
self.x *= value/length
self.y *= value/length
length = property(get_length, __setlength, None, "gets or sets the magnitude of the vector")
def rotate(self, angle_degrees):
radians = math.radians(angle_degrees)
cos = math.cos(radians)
sin = math.sin(radians)
x = self.x*cos - self.y*sin
y = self.x*sin + self.y*cos
self.x = x
self.y = y
def rotated(self, angle_degrees):
radians = math.radians(angle_degrees)
cos = math.cos(radians)
sin = math.sin(radians)
x = self.x*cos - self.y*sin
y = self.x*sin + self.y*cos
return vec2d(x, y)
def get_angle(self):
if (self.get_length_sqrd() == 0):
return 0
return math.degrees(math.atan2(self.y, self.x))
def __setangle(self, angle_degrees):
self.x = self.length
self.y = 0
self.rotate(angle_degrees)
angle = property(get_angle, __setangle, None, "gets or sets the angle of a vector")
def get_angle_between(self, other):
cross = self.x*other[1] - self.y*other[0]
dot = self.x*other[0] + self.y*other[1]
return math.degrees(math.atan2(cross, dot))
def normalized(self):
length = self.length
if length != 0:
return self/length
return vec2d(self)
def normalize_return_length(self):
length = self.length
if length != 0:
self.x /= length
self.y /= length
return length
def perpendicular(self):
return vec2d(-self.y, self.x)
def perpendicular_normal(self):
length = self.length
if length != 0:
return vec2d(-self.y/length, self.x/length)
return vec2d(self)
def dot(self, other):
return float(self.x*other[0] + self.y*other[1])
def get_distance(self, other):
return math.sqrt((self.x - other[0])**2 + (self.y - other[1])**2)
def get_dist_sqrd(self, other):
return (self.x - other[0])**2 + (self.y - other[1])**2
def projection(self, other):
other_length_sqrd = other[0]*other[0] + other[1]*other[1]
projected_length_times_other_length = self.dot(other)
return other*(projected_length_times_other_length/other_length_sqrd)
def cross(self, other):
return self.x*other[1] - self.y*other[0]
def interpolate_to(self, other, range):
return vec2d(self.x + (other[0] - self.x)*range, self.y + (other[1] - self.y)*range)
def convert_to_basis(self, x_vector, y_vector):
return vec2d(self.dot(x_vector)/x_vector.get_length_sqrd(), self.dot(y_vector)/y_vector.get_length_sqrd())
def __getstate__(self):
return [self.x, self.y]
def __setstate__(self, dict):
self.x, self.y = dict
########################################################################
## Unit Testing ##
########################################################################
if __name__ == "__main__":
import unittest
import pickle
####################################################################
class UnitTestVec2D(unittest.TestCase):
def setUp(self):
pass
def testCreationAndAccess(self):
v = vec2d(111,222)
self.assert_(v.x == 111 and v.y == 222)
v.x = 333
v[1] = 444
self.assert_(v[0] == 333 and v[1] == 444)
def testMath(self):
v = vec2d(111,222)
self.assertEqual(v + 1, vec2d(112,223))
self.assert_(v - 2 == [109,220])
self.assert_(v * 3 == (333,666))
self.assert_(v / 2.0 == vec2d(55.5, 111))
self.assert_(v / 2 == (55, 111))
self.assert_(v ** vec2d(2,3) == [12321, 10941048])
self.assert_(v + [-11, 78] == vec2d(100, 300))
self.assert_(v / [11,2] == [10,111])
def testReverseMath(self):
v = vec2d(111,222)
self.assert_(1 + v == vec2d(112,223))
self.assert_(2 - v == [-109,-220])
self.assert_(3 * v == (333,666))
self.assert_([222,999] / v == [2,4])
self.assert_([111,222] ** vec2d(2,3) == [12321, 10941048])
self.assert_([-11, 78] + v == vec2d(100, 300))
def testUnary(self):
v = vec2d(111,222)
v = -v
self.assert_(v == [-111,-222])
v = abs(v)
self.assert_(v == [111,222])
def testLength(self):
v = vec2d(3,4)
self.assert_(v.length == 5)
self.assert_(v.get_length_sqrd() == 25)
self.assert_(v.normalize_return_length() == 5)
self.assert_(v.length == 1)
v.length = 5
self.assert_(v == vec2d(3,4))
v2 = vec2d(10, -2)
self.assert_(v.get_distance(v2) == (v - v2).get_length())
def testAngles(self):
v = vec2d(0, 3)
self.assertEquals(v.angle, 90)
v2 = vec2d(v)
v.rotate(-90)
self.assertEqual(v.get_angle_between(v2), 90)
v2.angle -= 90
self.assertEqual(v.length, v2.length)
self.assertEquals(v2.angle, 0)
self.assertEqual(v2, [3, 0])
self.assert_((v - v2).length < .00001)
self.assertEqual(v.length, v2.length)
v2.rotate(300)
self.assertAlmostEquals(v.get_angle_between(v2), -60)
v2.rotate(v2.get_angle_between(v))
angle = v.get_angle_between(v2)
self.assertAlmostEquals(v.get_angle_between(v2), 0)
def testHighLevel(self):
basis0 = vec2d(5.0, 0)
basis1 = vec2d(0, .5)
v = vec2d(10, 1)
self.assert_(v.convert_to_basis(basis0, basis1) == [2, 2])
self.assert_(v.projection(basis0) == (10, 0))
self.assert_(basis0.dot(basis1) == 0)
def testCross(self):
lhs = vec2d(1, .5)
rhs = vec2d(4,6)
self.assert_(lhs.cross(rhs) == 4)
def testComparison(self):
int_vec = vec2d(3, -2)
flt_vec = vec2d(3.0, -2.0)
zero_vec = vec2d(0, 0)
self.assert_(int_vec == flt_vec)
self.assert_(int_vec != zero_vec)
self.assert_((flt_vec == zero_vec) == False)
self.assert_((flt_vec != int_vec) == False)
self.assert_(int_vec == (3, -2))
self.assert_(int_vec != [0, 0])
self.assert_(int_vec != 5)
self.assert_(int_vec != [3, -2, -5])
def testInplace(self):
inplace_vec = vec2d(5, 13)
inplace_ref = inplace_vec
inplace_src = vec2d(inplace_vec)
inplace_vec *= .5
inplace_vec += .5
inplace_vec /= (3, 6)
inplace_vec += vec2d(-1, -1)
alternate = (inplace_src*.5 + .5)/vec2d(3,6) + [-1, -1]
self.assertEquals(inplace_vec, inplace_ref)
self.assertEquals(inplace_vec, alternate)
def testPickle(self):
testvec = vec2d(5, .3)
testvec_str = pickle.dumps(testvec)
loaded_vec = pickle.loads(testvec_str)
self.assertEquals(testvec, loaded_vec)
####################################################################
unittest.main()
######################################################################## | Python |
# -*- coding: utf-8 -*-
import pygame #@UnresolvedImport
from vec2d import vec2d
import os
class Creep(pygame.sprite.Sprite):
"""Representerar ett fiende-kryp."""
# Static
explosion_sound = None
def __init__(self, screen, img_filename, init_position,
init_direction, speed):
"""Skapar ett nytt kryp.
@param screen: Ytan där krypet ska målas.
@param image_filname: Image file för krypet.
@param init_position: Startposition.
@param init_direction: Startriktning.
@param speed: Hastighet i pixels/ms
"""
pygame.sprite.Sprite.__init__(self)
if Creep.explosion_sound is None:
# Ladda bara ljudet EN gång, en statisk variabel
Creep.explosion_sound = pygame.mixer.Sound(os.path.join('sound','bomb_explosion.wav'))
self.explosion_sound = Creep.explosion_sound
self.explosion_sound.set_volume(0.2)
self.health = 5
self.state = Creep.ALIVE
self.screen = screen
self.speed = speed
self.explosion_image = pygame.image.load(os.path.join('images','boom.png')).convert_alpha()
self.explosion_timer = 0
# Originalbilden
self.base_image = pygame.image.load(img_filename).convert_alpha()
# Bilden som skall roteras osv
self.image = self.base_image
# Rect behövs för den kolissionshanteringen
self.rect = self.image.get_rect()
# Start-position. En vektor
self.pos = vec2d(init_position)
# Start-riktning. En normaliserad vektor
self.direction = vec2d(init_direction).normalized()
def is_alive(self):
return self.state in (Creep.ALIVE, Creep.EXPLODING)
def update(self, time_passed):
"""Updatera creep.
@param time_passed: Den tid i ms som passerat sedan senaste uppdateringen.
"""
if self.state == Creep.ALIVE:
# Sätt rätt riktning på krypet. Rotate tar ett surface-objekt
# och riktningen det skall rotera! Mot-urs rotation, så negativa
# vinklar innebär rotation med-urs. Vi använder en negativ
# vinkel eftersom xy-planet är inverterat i pygame.
self.image = pygame.transform.rotate(
self.base_image, -self.direction.angle)
# Beräkna förflyttningen. Riktningen, vilket är en normaliserad
# vektor multiplicerat med sträckan dvs. hastighet x tiden
displacement = vec2d(
self.direction.x * self.speed * time_passed,
self.direction.y * self.speed * time_passed)
# Sätt den nya positionen
self.pos += displacement
# Uppdatera dess rect för kollisioner
self.rect = self.image.get_rect()
self.rect.x = self.pos.x
self.rect.y = self.pos.y
# Studsa på väggar.
self.image_w, self.image_h = self.image.get_size()
# Minska skärmens gränser med krypets höjd och bredd,
# vilket gör att krypets centrerade position kommer att
# studsa lite före skärmens gräns. Snyggare
bounds_rect = self.screen.get_rect().inflate(
-self.image_w, -self.image_h)
# Om utanför vänsterkanten
if self.pos.x < bounds_rect.left:
# Sätt pos inte längre än kanten
self.pos.x = bounds_rect.left
# Ändra riktningvektorn till andra hållet
self.direction.x *= -1
elif self.pos.x > bounds_rect.right:
self.pos.x = bounds_rect.right
self.direction.x *= -1
elif self.pos.y < bounds_rect.top:
self.pos.y = bounds_rect.top
self.direction.y *= -1
elif self.pos.y > bounds_rect.bottom:
self.pos.y = bounds_rect.bottom
self.direction.y *= -1
elif self.state == Creep.EXPLODING:
self.explosion_timer += time_passed
if self.explosion_timer > 100:
self.explosion_sound.play()
self.state = Creep.DEAD
self.kill()
elif self.state == Creep.DEAD:
pass
def draw(self):
"""Ritar krypet på den Surface som angavs vid skapandet."""
if self.state == Creep.ALIVE:
# Centrera kryp-bildens position,
# eftersom bilden ändrar storlek när den roterar
draw_pos = self.image.get_rect().move(
# Sätt dess x-position till halva bildens bredd
self.pos.x - self.image_w / 2,
# Sätt dess y-position till halva bildens höjd
self.pos.y - self.image_h / 2)
# Rita kryp-image på screen-image, centrerat
self.screen.blit(self.image, draw_pos)
elif self.state == Creep.EXPLODING:
# Centrera explosionens position,
draw_pos = self.explosion_image.get_rect().move(
# Sätt dess x-position till halva skillnaden
self.rect.x - abs(((self.image.get_width()-self.explosion_image.get_width()) / 2)),
# Sätt dess y-position till halva skillnaden
self.rect.y - abs(((self.image.get_height()-self.explosion_image.get_height()) / 2)))
self.screen.blit(self.explosion_image, draw_pos)
elif self.state == Creep.DEAD:
pass
def decrease_health(self, n):
self.health -= n
if self.health == 0:
self.explode()
def explode(self):
self.state = Creep.EXPLODING
#----------- PRIVATA VARIABLER --------------------------------#
# De tillstånd krypet kan befinna sig i.
# ALIVE: Krypet åker levand och glad omkring.
# EXPLODING: En stund bara, före det dör.
# DEAD: Dött och inaktivt.
(ALIVE, EXPLODING, DEAD) = range(3)
| Python |
# -*- coding: utf-8 -*-
import pygame #@UnresolvedImport
from vec2d import vec2d
from shots import BaseShot
import os
class PlayerShip(pygame.sprite.Sprite):
"""Player ship."""
def __init__(self, screen):
"""Konstruktorn."""
pygame.sprite.Sprite.__init__(self)
self.screen = screen
# Originalbilden
self.image = pygame.image.load(os.path.join('images','mort.png')).convert_alpha()
# Rect behövs för kolissionshanteringen
self.rect = self.image.get_rect()
self.rect.center = (100, 220)
self.x = 0
self.y = 0
self.power = 1 # 1,3,5
# Behövs för att samla skotten
self.playershots_grp = pygame.sprite.Group()
def update(self):
"""Update metoden kallas varje gång vi itererar spel-loopen.
Förflyttar spelarens skepp och kontrollerar gränserna.
"""
self.rect.move_ip(self.x,self.y)
# Så att inte skeppet åker utanför kanterna
if self.rect.left < 0:
self.rect.left = 0
elif self.rect.right > self.screen.get_width():
self.rect.right = self.screen.get_width()
if self.rect.top < 0:
self.rect.top = 0
elif self.rect.bottom >= self.screen.get_height():
self.rect.bottom = self.screen.get_height()
def show_boundary(self):
"""Vart går gränsen? Ritar en röd ruta vid player rect.
For testing purposes.
"""
pygame.draw.rect(self.screen, pygame.Color('Red'), self.rect, 1)
def fire_weapon(self):
"""Skjuter vapnet.
Börjar med en bubbla. Nästa nivå är tre bubblor.
Och nästa fem bubblor.. Men sen?
"""
ydir = [0, 1.5, -1.5, 3, -3]
shot_xpos = self.rect.x + 35
shot_ypos = self.rect.y + 7
shot_xdir = 7
for i in range(self.power):
self.playershots_grp.add(BaseShot(self.screen,
shot_xpos,
shot_ypos,
shot_xdir,
ydir[i]))
| Python |
# -*- coding: utf-8 -*-
import pygame #@UnresolvedImport
import random
from vec2d import vec2d
import os
#------------------------------------------------------------------------------
# BaseShot - Spelarens vanliga bubbelskott
#
#------------------------------------------------------------------------------
class BaseShot(pygame.sprite.Sprite):
"""Player basic shots."""
def __init__(self, screen, init_x, init_y, dir_x=0, dir_y=0):
"""Konstruktorn.
init_x är startposition
dir_x är riktningen och hastighet i pxl
"""
pygame.sprite.Sprite.__init__(self)
self.screen = screen
# TODO! Statisk bild för skott-klassen
self.image = pygame.image.load(os.path.join('images','bubbla.png')).convert_alpha()
self.rect = self.image.get_rect()
self.rect.x = init_x
self.rect.y = init_y
self.dir_x = dir_x
self.dir_y = dir_y
def update(self):
"""Förflyttar skottet.
dir_x är rikningen i x-led
dir_y är riktningen i y-led
"""
self.rect.move_ip(self.dir_x, self.dir_y)
def show_boundary(self):
"""Vart går gränsen? Ritar en röd ruta vid player rect.
For testing purposes.
"""
pygame.draw.rect(self.screen, pygame.Color('Red'), self.rect, 1)
#------------------------------------------------------------------------------
# BossShot
#
#
#------------------------------------------------------------------------------
class BossShot(BaseShot):
def __init__(self, screen, init_x, init_y):
# Anropa dess parents konstruktor
BaseShot.__init__(self, screen, init_x, init_y)
# Skapa en rect!
self.rect = pygame.Rect(init_x, init_y, 16, 16)
def update(self, time_passed):
self.rect.move_ip(-5, 0)
def rita(self):
"""For lazers! Ritar upp en blinkande boll.
Byter färg varje update så den blinkar.
"""
r = random.randint(0, 255)
g = random.randint(0, 255)
b = random.randint(0, 255)
# Ritar cirkeln med pos som center av cirkeln!
# Vill att den ska använda mitten av hitboxen, dvs Rect som origo.
pygame.draw.circle(self.screen, pygame.Color(r,b,g), (self.rect.x+8,self.rect.y+8), 8)
# Testa hitbox
# pygame.draw.rect(self.screen, pygame.Color('Red'), self.rect, 1)
#------------------------------------------------------------------------------
# VektorShot
#
#
#------------------------------------------------------------------------------
class VektorShot(pygame.sprite.Sprite):
def __init__(self, screen, init_position, init_direction, speed):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
# Skapa en rect! för ritning och kollisionshantering
init_x, init_y = init_position
self.rect = pygame.Rect(init_x, init_y, 16, 16)
# Start-position. En vektor
self.pos = vec2d(init_position)
# Start-riktning. En normaliserad vektor
self.direction = vec2d(init_direction).normalized()
self.speed = speed
def update(self, time_passed):
# Beräkna förflyttningen. Riktningen, vilket är en normaliserad
# vektor multiplicerat med sträckan dvs. hastighet x tiden
displacement = vec2d(
self.direction.x * self.speed * time_passed,
self.direction.y * self.speed * time_passed)
# Sätt den nya positionen
self.pos += displacement
# Uppdatera objektets rect - för kollisioner
self.rect.x = self.pos.x
self.rect.y = self.pos.y
def rita(self):
r = random.randint(0, 255)
g = random.randint(0, 255)
b = random.randint(0, 255)
# Ritar cirkeln med pos som center av cirkeln!
# Vill att den ska använda mitten av hitboxen, dvs Rect som origo.
pygame.draw.circle(self.screen, pygame.Color(r,b,g), (int(self.pos.x),int(self.pos.y)), 8)
| Python |
# -*- coding: utf-8 -*-
import pygame #@UnresolvedImport
from vec2d import vec2d
import random
from shots import BossShot
from shots import VektorShot
import os
#------------------------------------------------------------------------------------------------------
# Vanliga mobs.
#
#------------------------------------------------------------------------------------------------------
class Enemy(pygame.sprite.Sprite):
"""Representerar en fiende."""
# De tillstånd fienden kan befinna sig i .. ie 0, 1 eller 2
(ALIVE, EXPLODING, DEAD) = range(3)
# Static
explosion_sound = None
def __init__(self, screen, img_filename, init_position):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
# Originalbilden
self.image = pygame.image.load(img_filename).convert_alpha()
self.explosion_image = pygame.image.load(os.path.join('images','boom.png')).convert_alpha()
if Enemy.explosion_sound is None:
# Ladda bara ljudet EN gång, en statisk variabel
Enemy.explosion_sound = pygame.mixer.Sound(os.path.join('sound','bomb_explosion.wav'))
self.explosion_sound = Enemy.explosion_sound
self.explosion_sound.set_volume(0.2)
# Rect behövs för kolissionshanteringen
self.rect = self.image.get_rect()
self.rect.x, self.rect.y = init_position
self.state = Enemy.ALIVE
self.speed = -3
self.health = 3
self.explosion_timer = 0
def set_speed(self, new_speed):
self.speed = new_speed
def decrease_health(self, amount):
"""Minskar health med angiven mängd.
Om noll exploderar den.
"""
self.health -= amount
if(self.health<=0):
self.state = Enemy.EXPLODING
def update(self, time_passed):
"""Update metoden kallas varje gång vi itererar spel-loopen.
Förflyttar fienden. Eller uppdatera explosionstimern.
Exploderar 100 ms, sedan tas fienden bort ur!
"""
if self.state == Enemy.ALIVE:
# Sätt den nya positionen
self.rect.move_ip(self.speed, 0)
if(self.rect.x < -55):
# Döda den om den hamnar utanför skärmen.
self.kill()
elif self.state == Enemy.EXPLODING:
self.explosion_timer += time_passed
if self.explosion_timer > 100:
self.state = Enemy.DEAD
self.explosion_sound.play()
self.kill()
elif self.state == Enemy.DEAD:
self.kill()
def show_boundary(self):
"""Vart går gränsen? Ritar en röd ruta vid player rect.
For testing purposes.
"""
pygame.draw.rect(self.screen, pygame.Color('Red'), self.rect, 1)
def draw(self):
"""Ritar fienden på skärmen.
Två olika uppritning beroende på state.
ALIVE
EXPLOSION
"""
if self.state == Enemy.ALIVE:
self.screen.blit(self.image, self.rect)
elif self.state == Enemy.EXPLODING:
# Centrera explosionens position,
draw_pos = self.explosion_image.get_rect().move(
# Sätt dess x-position till halva skillnaden
self.rect.x - abs(((self.image.get_width()-self.explosion_image.get_width()) / 2)),
# Sätt dess y-position till halva skillnaden
self.rect.y - abs(((self.image.get_height()-self.explosion_image.get_height()) / 2)))
self.screen.blit(self.explosion_image, draw_pos)
#------------------------------------------------------------------------------------------------------
# Taggfisk-boss.
#
#
#------------------------------------------------------------------------------------------------------
class BossTaggfiskEnemy(Enemy):
"""Klass för första bossen! Ärver från basklassen Enemy
Har annorlunda hitbox. Bara träff vid munnen.
Skjuter skott av typen BossShot ..
Har 100 hp.
"""
def __init__(self, screen, img_filename, init_position):
Enemy.__init__(self, screen, img_filename, init_position)
self.arriving = pygame.mixer.Sound(os.path.join('sound','boss_arrive.wav'))
self.arriving.play()
self.explosion_image_big = pygame.image.load(os.path.join('images','taggfisk-explosion.png')).convert_alpha()
self.traffad_image = pygame.image.load(os.path.join('images','taggfisk-mindre_hit.png')).convert_alpha()
self.stay = False
self.health = 100
# Vill ha en separat rect för ritandet
# Och modifera rect och göra en mindre hitbox.
self.draw_rect = self.rect
self.rect = pygame.Rect(self.rect.x, self.rect.y+135, 20, 70)
self.direction_y = 2
self.ishurt = False
self.ishurt_counter = 2
self.hurtsound = pygame.mixer.Sound(os.path.join('sound','boss_hit.wav'))
self.shoot_timer = 0
self.vektor_shoot_timer = 0
self.shot_grp = pygame.sprite.Group()
self.vektorshot_grp = pygame.sprite.Group()
def is_dead(self):
"""Kontrollerar bara om bossen är död."""
return self.health==0
def update(self, time_passed):
"""Kör in bossen och stannar 1/3 in ungefär.
Då åker den upp och ner istället och skjuter.
Olika update beroende på tillstånd.
ALIVE
EXPLODING
DEAD
"""
if self.state == BossTaggfiskEnemy.ALIVE:
if self.stay:
if(self.draw_rect.y > self.screen.get_height()-370):
self.direction_y *= -1
elif(self.draw_rect.y < 10):
self.direction_y *= -1
self.draw_rect.move_ip(0,self.direction_y)
self.rect.move_ip(0,self.direction_y)
elif (self.draw_rect.x > 320):
self.draw_rect.move_ip(-3,0)
self.rect.move_ip(-3,0)
else:
self.stay = True
# Uppdatera skott-timern!
self.shoot_timer += time_passed
self.vektor_shoot_timer += time_passed
# Varje 700 ms skjuter bossen en blinkande boll!
if (self.shoot_timer > 700):
self.shoot_timer = 0
# Positionen utgår från bossens hitbox = Övre vänstra hörnet av munnen
# Flyttar den ner 25 px för att skottet skall komma från mitten.
self.shot_grp.add(BossShot(self.screen, self.rect.x, self.rect.y+25))
# Ibland skjuter han snett också!
if (self.vektor_shoot_timer > 400):
self.vektor_shoot_timer = 0
self.shot_grp.add(VektorShot(self.screen,
(self.rect.x, self.rect.y+25),
(-1,
random.uniform(-1,1)),
0.2
))
elif self.state == BossTaggfiskEnemy.EXPLODING:
self.explosion_timer += time_passed
if self.explosion_timer > 145:
self.state = Enemy.DEAD
self.explosion_sound.play()
self.kill()
elif self.state == BossTaggfiskEnemy.DEAD:
self.kill()
def decrease_health(self, amount):
"""Minskar health."""
self.hurtsound.play()
self.health -= amount
self.ishurt = True
if(self.health<=0):
self.state = BossTaggfiskEnemy.EXPLODING
def draw(self):
"""Har en egen draw för att rita kraftmätare osv.
Olika draw beroende på tillstånd.
ALIVE
EXPLODING
"""
if self.state == BossTaggfiskEnemy.ALIVE:
# Rita låda att den blir träffad
if self.ishurt:
self.ishurt_counter -= 1
self.screen.blit(self.traffad_image, self.draw_rect)
self.screen.fill(pygame.Color('Red'), self.rect)
self.screen.blit(self.explosion_image, self.rect.move(0,random.randint(1,40)))
else:
self.screen.blit(self.image, self.draw_rect)
if self.ishurt_counter == 0:
self.ishurt_counter = 3
self.ishurt = False
# Rita health bar på 100x20 pixlar
health_bar_x = self.draw_rect.x -20
health_bar_y = self.draw_rect.y
self.screen.fill(pygame.Color('red'),
(health_bar_x, health_bar_y, 100, 20))
self.screen.fill(pygame.Color('green'),
(health_bar_x, health_bar_y,
self.health, 20))
# Rita stor explosion!
elif self.state == BossTaggfiskEnemy.EXPLODING:
self.screen.blit(self.explosion_image_big, self.draw_rect)
#------------------------------------------------------------------------------------------------------
# Bläckfisk-boss.
#
#
#------------------------------------------------------------------------------------------------------
class BossBlackfiskEnemy(BossTaggfiskEnemy):
def __init__(self, screen, img_filename, init_position):
BossTaggfiskEnemy.__init__(self, screen, img_filename, init_position)
self.traffad_image = pygame.image.load(os.path.join('images','bigger_blackfisk_hit.png')).convert_alpha()
self.rect = pygame.Rect(self.rect.x, self.rect.y-30, 20, 120)
def update(self, time_passed):
"""Kör in bossen och stannar 1/3 in ungefär.
Då åker den upp och ner istället och skjuter.
Olika update beroende på tillstånd.
ALIVE
EXPLODING
DEAD
"""
if self.state == BossBlackfiskEnemy.ALIVE:
if self.stay:
if(self.draw_rect.y > self.screen.get_height()-370):
self.direction_y *= -1
elif(self.draw_rect.y < 10):
self.direction_y *= -1
self.draw_rect.move_ip(0,self.direction_y)
self.rect.move_ip(0,self.direction_y)
elif (self.draw_rect.x > 320):
self.draw_rect.move_ip(-3,0)
self.rect.move_ip(-3,0)
else:
self.stay = True
# Uppdatera skott-timern!
self.vektor_shoot_timer += time_passed
# Skjut! om dags
if (self.vektor_shoot_timer > 2000):
self.vektor_shoot_timer = 0
# Sprayar skott!
for y in self.frange(-1, 1, 0.20):
self.shot_grp.add(VektorShot(self.screen,
(self.rect.x+200, self.rect.y+55),
(-1, y),
0.2
))
elif self.state == BossBlackfiskEnemy.EXPLODING:
self.explosion_timer += time_passed
if self.explosion_timer > 145:
self.state = Enemy.DEAD
self.explosion_sound.play()
self.kill()
elif self.state == BossBlackfiskEnemy.DEAD:
self.kill()
def draw(self):
"""Har en egen draw för att rita kraftmätare osv.
Olika draw beroende på tillstånd.
ALIVE
EXPLODING
"""
if self.state == BossTaggfiskEnemy.ALIVE:
# Rita annan bild om den blir träffad
if self.ishurt:
self.ishurt_counter -= 1
self.screen.blit(self.traffad_image, self.draw_rect)
self.screen.blit(self.explosion_image, self.rect.move(0,random.randint(1,80)))
else:
self.screen.blit(self.image, self.draw_rect)
if self.ishurt_counter == 0:
self.ishurt_counter = 3
self.ishurt = False
# Rita health bar på 100x20 pixlar
health_bar_x = self.draw_rect.x -20
health_bar_y = self.draw_rect.y
self.screen.fill(pygame.Color('red'),
(health_bar_x, health_bar_y, 100, 20))
self.screen.fill(pygame.Color('green'),
(health_bar_x, health_bar_y,
self.health, 20))
# Rita stor explosion!
elif self.state == BossTaggfiskEnemy.EXPLODING:
self.screen.blit(self.explosion_image_big, self.draw_rect)
def frange(self, start, end=None, inc=None):
"""Som range fast, decimala increments..."""
if end == None:
end = start + 0.0
start = 0.0
if inc == None:
inc = 1.0
L = []
while 1:
next = start + len(L) * inc
if inc > 0 and next >= end:
break
elif inc < 0 and next <= end:
break
L.append(next)
return L
| Python |
# -*- coding: utf-8 -*-
import pygame #@UnresolvedImport
from vec2d import vec2d
import os
'''
Created on 2 jun 2010
@author: Ingemar
'''
class Powerup(pygame.sprite.Sprite):
"""Representerar en powerup."""
def __init__(self, screen, init_position):
pygame.sprite.Sprite.__init__(self)
self.screen = screen
# !TODO! Ladda med os.join.osv
self.image_normal = pygame.image.load(os.path.join('images','pearl-normal.png')).convert_alpha()
self.image = self.image_normal
self.image_blink = pygame.image.load(os.path.join('images','pearl-blink.png')).convert_alpha()
self.powerup_sound = pygame.mixer.Sound(os.path.join('sound','powerup.wav'))
self.powerup_sound.set_volume(0.8)
# Rect behövs för kolissionshanteringen
self.rect = self.image.get_rect()
# Startpositionen
self.rect.x, self.rect.y = init_position
self.blink_timer = 0
# Start-position. En vektor
self.pos = vec2d(init_position)
# Start-riktning. En normaliserad vektor
self.direction = vec2d((-1,-1)).normalized()
self.speed = 0.15
def update(self, time_passed):
self.blink_timer += time_passed
# Beräkna förflyttningen. Riktningen, vilket är en normaliserad
# vektor multiplicerat med sträckan dvs. hastighet x tiden
displacement = vec2d(
self.direction.x * self.speed * time_passed,
self.direction.y * self.speed * time_passed)
# Sätt den nya positionen
self.pos += displacement
# Uppdatera dess rect för kolissioner
self.rect = self.image.get_rect()
self.rect.x = self.pos.x
self.rect.y = self.pos.y
# Studsa på väggar. Om utanför vänsterkanten
bounds_rect = self.screen.get_rect()
# Om utanför vänsterkanten
if self.pos.x < bounds_rect.left-5:
# Sätt pos inte längre än kanten
self.pos.x = bounds_rect.left-5
# Ändra riktningvektorn till andra hållet
self.direction.x *= -1
elif self.pos.x > bounds_rect.right-35:
self.pos.x = bounds_rect.right-35
self.direction.x *= -1
elif self.pos.y < bounds_rect.top-5:
self.pos.y = bounds_rect.top-5
self.direction.y *= -1
elif self.pos.y > bounds_rect.bottom-35:
self.pos.y = bounds_rect.bottom-35
self.direction.y *= -1
# Ändrar bilder efter varje x ms
if self.blink_timer>200:
self.blink_timer = 0
if self.image == self.image_normal:
self.image = self.image_blink
else:
self.image = self.image_normal
| Python |
# -*- coding: utf-8 -*-
"""
Created---------------------------------------------------------------------------------------------
Importing things:-----------------------------------------------------------------------------------
----------------------------------------------------------------------------------------------------
"""
import sys , os , MySQLdb
from PIL import Image
import qrcode
try:
from karekodyeniyeni import Ui_Form
except:
print("QRCode is not installed properly.")
raise SystemExit
try:
from PyQt4 import QtGui
from PyQt4 import QtCore
except:
print("pyqt4-dev-tools'? To install\nsudo apt-get install pyqt4-dev-tools")
raise SystemExit
class MyForm(QtGui.QWidget):
def __init__(self):
super(MyForm, self).__init__()
self.ui = Ui_Form()
self.ui.setupUi(self)
#self.ui.pushButton_2.clicked.connect(self.dbvericek)
self.ui.pushButton_2.clicked.connect(self.dbkayit)
#karekodolusturucu
def kare_olustur(self, name, num, loc, arc):
degisken = name + "-" +str(num) + "-" + loc + "-" + arc
img = qrcode.make(str(degisken))
img.save(str(degisken) + '.png')
self.ui.label_7.setPixmap(QtGui.QPixmap(degisken + ".png"))
self.dbvericek()
#degiskenler db-kayıt ve kare olustur fonk baslantici
def dbkayit(self):
name = self.ui.lineEdit.text()
num = self.ui.spinBox.value()
loc = self.ui.lineEdit_3.text()
arc = self.ui.lineEdit_4.text()
db = MySQLdb.connect(host="localhost",user="root",passwd="01400140",db="atasam")
cursor = db.cursor()
cursor.execute("INSERT INTO demirbas2(dbname , dbnum , dbloc , dbarc) VALUES('%s', %s, '%s', '%s');" % (name , num , loc , arc))
db.commit()
db.close()
self.kare_olustur(name, num, loc, arc)
#veritabanından veri çek ve sqldeki satır sayısını bul tabloolustur fonk tetikle
def dbvericek(self):
dbcek = MySQLdb.connect(host="localhost",user="root",passwd="01400140",db="atasam")
cursor = dbcek.cursor()
cursor.execute("SELECT * FROM demirbas2")
row = cursor.fetchall()
sqllsatirsayisi = cursor.rowcount
for i in row:
dbgname = i[0]
dbgnum = i[1]
dbgloc = i[2]
dbgarc = i[3]
self.tabloolustur(sqllsatirsayisi, dbgname, dbgnum, dbgloc, dbgarc)
#boştablo olustur veritabanından çektiklerini boş listeye bas
def tabloolustur(self, sqllsatirsayisi, dbgname, dbgnum, dbgloc, dbgarc):
self.table = QtGui.QTableWidget()
self.ui.tableWidget.setRowCount(sqllsatirsayisi)
self.ui.tableWidget.setColumnCount(4)
self.ui.tableWidget.setItem(1, 1, QtGui.QTableWidgetItem("sdasdasdas"))
app = QtGui.QApplication(sys.argv)
f = MyForm()
f.show()
app.exec_()
| Python |
# -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'karekodyeni.ui'
#
# Created: Thu Dec 5 12:52:36 2013
# by: PyQt4 UI code generator 4.9.1
#
# WARNING! All changes made in this file will be lost!
from PyQt4 import QtCore, QtGui
try:
_fromUtf8 = QtCore.QString.fromUtf8
except AttributeError:
_fromUtf8 = lambda s: s
class Ui_Form(object):
def setupUi(self, Form):
Form.setObjectName(_fromUtf8("Form"))
Form.resize(634, 370)
sizePolicy = QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Preferred)
sizePolicy.setHorizontalStretch(6)
sizePolicy.setVerticalStretch(0)
sizePolicy.setHeightForWidth(Form.sizePolicy().hasHeightForWidth())
Form.setSizePolicy(sizePolicy)
Form.setMinimumSize(QtCore.QSize(634, 370))
Form.setMaximumSize(QtCore.QSize(634, 370))
self.tabWidget = QtGui.QTabWidget(Form)
self.tabWidget.setGeometry(QtCore.QRect(9, 9, 616, 342))
self.tabWidget.setObjectName(_fromUtf8("tabWidget"))
self.tab = QtGui.QWidget()
self.tab.setObjectName(_fromUtf8("tab"))
self.layoutWidget = QtGui.QWidget(self.tab)
self.layoutWidget.setGeometry(QtCore.QRect(9, 9, 596, 292))
self.layoutWidget.setObjectName(_fromUtf8("layoutWidget"))
self.gridLayout_4 = QtGui.QGridLayout(self.layoutWidget)
self.gridLayout_4.setMargin(0)
self.gridLayout_4.setObjectName(_fromUtf8("gridLayout_4"))
self.gridLayout = QtGui.QGridLayout()
self.gridLayout.setObjectName(_fromUtf8("gridLayout"))
self.lineEdit_3 = QtGui.QLineEdit(self.layoutWidget)
self.lineEdit_3.setMinimumSize(QtCore.QSize(175, 25))
self.lineEdit_3.setMaximumSize(QtCore.QSize(175, 25))
self.lineEdit_3.setObjectName(_fromUtf8("lineEdit_3"))
self.gridLayout.addWidget(self.lineEdit_3, 2, 1, 1, 1)
self.label = QtGui.QLabel(self.layoutWidget)
self.label.setMinimumSize(QtCore.QSize(115, 25))
self.label.setMaximumSize(QtCore.QSize(115, 25))
self.label.setAlignment(QtCore.Qt.AlignCenter)
self.label.setObjectName(_fromUtf8("label"))
self.gridLayout.addWidget(self.label, 0, 0, 1, 1)
self.label_2 = QtGui.QLabel(self.layoutWidget)
self.label_2.setMinimumSize(QtCore.QSize(115, 25))
self.label_2.setMaximumSize(QtCore.QSize(115, 25))
self.label_2.setAlignment(QtCore.Qt.AlignCenter)
self.label_2.setObjectName(_fromUtf8("label_2"))
self.gridLayout.addWidget(self.label_2, 1, 0, 1, 1)
self.lineEdit_4 = QtGui.QLineEdit(self.layoutWidget)
self.lineEdit_4.setMinimumSize(QtCore.QSize(175, 25))
self.lineEdit_4.setMaximumSize(QtCore.QSize(175, 25))
self.lineEdit_4.setObjectName(_fromUtf8("lineEdit_4"))
self.gridLayout.addWidget(self.lineEdit_4, 3, 1, 1, 1)
self.label_3 = QtGui.QLabel(self.layoutWidget)
self.label_3.setMinimumSize(QtCore.QSize(115, 25))
self.label_3.setMaximumSize(QtCore.QSize(115, 25))
self.label_3.setAlignment(QtCore.Qt.AlignCenter)
self.label_3.setObjectName(_fromUtf8("label_3"))
self.gridLayout.addWidget(self.label_3, 2, 0, 1, 1)
self.label_4 = QtGui.QLabel(self.layoutWidget)
self.label_4.setMinimumSize(QtCore.QSize(115, 25))
self.label_4.setMaximumSize(QtCore.QSize(115, 25))
self.label_4.setLayoutDirection(QtCore.Qt.LeftToRight)
self.label_4.setAutoFillBackground(False)
self.label_4.setAlignment(QtCore.Qt.AlignCenter)
self.label_4.setObjectName(_fromUtf8("label_4"))
self.gridLayout.addWidget(self.label_4, 3, 0, 1, 1)
self.lineEdit = QtGui.QLineEdit(self.layoutWidget)
self.lineEdit.setMinimumSize(QtCore.QSize(175, 25))
self.lineEdit.setMaximumSize(QtCore.QSize(175, 25))
self.lineEdit.setObjectName(_fromUtf8("lineEdit"))
self.gridLayout.addWidget(self.lineEdit, 0, 1, 1, 1)
self.spinBox = QtGui.QSpinBox(self.layoutWidget)
self.spinBox.setObjectName(_fromUtf8("spinBox"))
self.gridLayout.addWidget(self.spinBox, 1, 1, 1, 1)
self.gridLayout_4.addLayout(self.gridLayout, 0, 0, 1, 1)
self.label_7 = QtGui.QLabel(self.layoutWidget)
self.label_7.setMinimumSize(QtCore.QSize(290, 290))
self.label_7.setMaximumSize(QtCore.QSize(290, 290))
self.label_7.setText(_fromUtf8(""))
self.label_7.setPixmap(QtGui.QPixmap(_fromUtf8("emre.png")))
self.label_7.setScaledContents(True)
self.label_7.setObjectName(_fromUtf8("label_7"))
self.gridLayout_4.addWidget(self.label_7, 0, 1, 2, 1)
self.gridLayout_2 = QtGui.QGridLayout()
self.gridLayout_2.setObjectName(_fromUtf8("gridLayout_2"))
spacerItem = QtGui.QSpacerItem(20, 100, QtGui.QSizePolicy.Minimum, QtGui.QSizePolicy.Expanding)
self.gridLayout_2.addItem(spacerItem, 0, 0, 1, 1)
spacerItem1 = QtGui.QSpacerItem(98, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_2.addItem(spacerItem1, 1, 0, 1, 1)
self.pushButton_3 = QtGui.QPushButton(self.layoutWidget)
self.pushButton_3.setEnabled(True)
self.pushButton_3.setObjectName(_fromUtf8("pushButton_3"))
self.gridLayout_2.addWidget(self.pushButton_3, 1, 1, 1, 1)
self.pushButton_2 = QtGui.QPushButton(self.layoutWidget)
self.pushButton_2.setEnabled(True)
self.pushButton_2.setObjectName(_fromUtf8("pushButton_2"))
self.gridLayout_2.addWidget(self.pushButton_2, 1, 2, 1, 1)
self.gridLayout_4.addLayout(self.gridLayout_2, 1, 0, 1, 1)
self.tabWidget.addTab(self.tab, _fromUtf8(""))
self.tab_2 = QtGui.QWidget()
self.tab_2.setObjectName(_fromUtf8("tab_2"))
self.tableWidget = QtGui.QTableWidget(self.tab_2)
self.tableWidget.setGeometry(QtCore.QRect(10, 10, 591, 241))
self.tableWidget.setSelectionBehavior(QtGui.QAbstractItemView.SelectRows)
self.tableWidget.setObjectName(_fromUtf8("tableWidget"))
self.tableWidget.setColumnCount(4)
self.tableWidget.setRowCount(0)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(0, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(1, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(2, item)
item = QtGui.QTableWidgetItem()
self.tableWidget.setHorizontalHeaderItem(3, item)
self.layoutWidget1 = QtGui.QWidget(self.tab_2)
self.layoutWidget1.setGeometry(QtCore.QRect(10, 260, 591, 29))
self.layoutWidget1.setObjectName(_fromUtf8("layoutWidget1"))
self.gridLayout_3 = QtGui.QGridLayout(self.layoutWidget1)
self.gridLayout_3.setMargin(0)
self.gridLayout_3.setObjectName(_fromUtf8("gridLayout_3"))
spacerItem2 = QtGui.QSpacerItem(308, 20, QtGui.QSizePolicy.Expanding, QtGui.QSizePolicy.Minimum)
self.gridLayout_3.addItem(spacerItem2, 0, 0, 1, 1)
self.pushButton = QtGui.QPushButton(self.layoutWidget1)
self.pushButton.setObjectName(_fromUtf8("pushButton"))
self.gridLayout_3.addWidget(self.pushButton, 0, 1, 1, 1)
self.pushButton_4 = QtGui.QPushButton(self.layoutWidget1)
self.pushButton_4.setObjectName(_fromUtf8("pushButton_4"))
self.gridLayout_3.addWidget(self.pushButton_4, 0, 2, 1, 1)
self.tabWidget.addTab(self.tab_2, _fromUtf8(""))
self.label_6 = QtGui.QLabel(Form)
self.label_6.setGeometry(QtCore.QRect(260, 350, 151, 17))
font = QtGui.QFont()
font.setPointSize(8)
font.setBold(True)
font.setItalic(False)
font.setWeight(75)
self.label_6.setFont(font)
self.label_6.setObjectName(_fromUtf8("label_6"))
self.retranslateUi(Form)
self.tabWidget.setCurrentIndex(0)
QtCore.QMetaObject.connectSlotsByName(Form)
Form.setTabOrder(self.lineEdit, self.spinBox)
Form.setTabOrder(self.spinBox, self.lineEdit_3)
Form.setTabOrder(self.lineEdit_3, self.lineEdit_4)
Form.setTabOrder(self.lineEdit_4, self.pushButton_2)
Form.setTabOrder(self.pushButton_2, self.pushButton_3)
Form.setTabOrder(self.pushButton_3, self.tabWidget)
Form.setTabOrder(self.tabWidget, self.pushButton)
Form.setTabOrder(self.pushButton, self.tableWidget)
Form.setTabOrder(self.tableWidget, self.pushButton_4)
def retranslateUi(self, Form):
Form.setWindowTitle(QtGui.QApplication.translate("Form", "Form", None, QtGui.QApplication.UnicodeUTF8))
self.label.setText(QtGui.QApplication.translate("Form", "Malzeme Cinsi", None, QtGui.QApplication.UnicodeUTF8))
self.label_2.setText(QtGui.QApplication.translate("Form", "Adet", None, QtGui.QApplication.UnicodeUTF8))
self.label_3.setText(QtGui.QApplication.translate("Form", "Yeri", None, QtGui.QApplication.UnicodeUTF8))
self.label_4.setText(QtGui.QApplication.translate("Form", "Açıklama", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_3.setText(QtGui.QApplication.translate("Form", "Temizle", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_2.setText(QtGui.QApplication.translate("Form", "Kaydet", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab), QtGui.QApplication.translate("Form", "Kayıt", None, QtGui.QApplication.UnicodeUTF8))
item = self.tableWidget.horizontalHeaderItem(0)
item.setText(QtGui.QApplication.translate("Form", "Malzeme", None, QtGui.QApplication.UnicodeUTF8))
item = self.tableWidget.horizontalHeaderItem(1)
item.setText(QtGui.QApplication.translate("Form", "Adet", None, QtGui.QApplication.UnicodeUTF8))
item = self.tableWidget.horizontalHeaderItem(2)
item.setText(QtGui.QApplication.translate("Form", "Buluduğu Yer", None, QtGui.QApplication.UnicodeUTF8))
item = self.tableWidget.horizontalHeaderItem(3)
item.setText(QtGui.QApplication.translate("Form", "Açıklama", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton.setText(QtGui.QApplication.translate("Form", "Sil", None, QtGui.QApplication.UnicodeUTF8))
self.pushButton_4.setText(QtGui.QApplication.translate("Form", "Yazdır", None, QtGui.QApplication.UnicodeUTF8))
self.tabWidget.setTabText(self.tabWidget.indexOf(self.tab_2), QtGui.QApplication.translate("Form", "Liste", None, QtGui.QApplication.UnicodeUTF8))
self.label_6.setText(QtGui.QApplication.translate("Form", "Powered by ATASAM", None, QtGui.QApplication.UnicodeUTF8))
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-06-04
@author: shell.xu
'''
import sys
import logging
import traceback
import random
import pyweb
def test_json(request, post):
if 'count' not in request.session: request.session['count'] = 0
else: request.session['count'] += 1
li = [request.session['count'],]
for i in xrange(1, 100): li.append(random.randint(0, 100))
return li
def test_post(request):
request.recv_body()
print 'client send:', request.get_body()
response = request.make_response()
response.append_body('client send: %s' % request.get_body())
return response
mc = pyweb.Memcache()
mc.add_server('localhost')
sess = pyweb.MemcacheSession(mc, 300)
dis = pyweb.Dispatch([
['^/pyweb/files/(?P<filepath>.*)', pyweb.StaticFile('~/')],
['^/pyweb/tpl/(?P<filepath>.*)', pyweb.TemplateFile('.')],
['^/pyweb/post.*', test_post],
['^/pyweb/.*', sess, pyweb.J, test_json],
])
# dis = pyweb.MemcacheCache(mc, dis)
dis = pyweb.MemoryCache(20, dis)
def main(fastcgi = False, unix_sock = False, daemon = True):
pyweb.set_log()
if fastcgi: serve = pyweb.FastCGIServer(dis)
else: serve = pyweb.HttpServer(dis)
if daemon:
daemon = pyweb.Daemon(serve)
daemon.lock_pidfile('test.pid')
try:
if unix_sock: serve.listen_unix('test.sock', reuse = True)
else: serve.listen(reuse = True)
try: daemon.run()
except KeyboardInterrupt: print 'exit.'
finally: daemon.free_pidfile()
else:
if unix_sock: serve.listen_unix('test.sock', reuse = True)
else: serve.listen(reuse = True)
try: serve.run()
except KeyboardInterrupt: print 'exit.'
if __name__ == '__main__':
if len(sys.argv) > 1 and sys.argv[1] == 'fastcgi': fastcgi = True
else: fastcgi = False
main(fastcgi, daemon = False)
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-09-29
@author: shell.xu
'''
import os
from distutils.core import setup
setup(name = 'pyweb', version = os.environ['VERSION'], url = 'http://shell909090.com/',
author = 'Shell.E.Xu', author_email = 'shell909090@gmail.com',
maintainer = 'Shell.E.Xu', maintainer_email = 'shell909090@gmail.com',
license = 'MIT', description = 'A web framework written by python.',
packages = ['pyweb'])
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-06-04
@author: shell.xu
'''
import sys
import logging
import traceback
import pyweb
def test_google():
request = pyweb.HttpRequest.make_request('http://www.google.com/')
response = pyweb.http_client(request)
print response.get_body()
def test_self():
request = pyweb.HttpRequest.make_request(
'http://localhost:8080/pyweb/post/')
request.append_body('abcde')
response = pyweb.http_client(request)
print response.get_body()
if __name__ == '__main__': test_self()
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-06-04
@author: shell.xu
'''
from __future__ import with_statement
import socket
import logging
import traceback
from contextlib import contextmanager
from urlparse import urlparse
import ebus
import esock
import daemon
import basehttp
import template
class HttpRequest(basehttp.HttpMessage):
''' Http请求对象
@ivar timeout: Server所附加的超时对象
@ivar verb: 用户请求动作
@ivar url: 用户请求原始路径
@ivar version: 用户请求版本
@ivar urls: 通常应当存在,为url的解析结果
@ivar hostname: 主机名
@ivar responsed: Response附加,当开始应答后增加标志,阻止下一个应答
@ivar url_match: 可能存在,Dispatch附加,当url匹配后,用于保存匹配结果
@ivar cookie: 可能存在,Session添加,用于保存cookie信息和变更
@ivar session: 可能存在,Session添加,用于保存session,dict,内容必须可json序列化 '''
VERBS = ['OPTIONS', 'GET', 'HEAD', 'POST', 'PUT', 'DELETE', 'TRACE', 'CONNECT']
VERSIONS = ['HTTP/1.0', 'HTTP/1.1']
@classmethod
def make_request(cls, url, self = None):
''' 用于非接收的情况下,根据url构造一个request。 '''
if self is None: self = cls(None)
urls = urlparse(url)
if urls.port: port = int(urls.port)
elif urls.scheme.lower() == 'https': port = 443
else: port = 80
self.sockaddr = [urls.hostname, port, urls.username, urls.password]
self.verb, self.version = 'GET', 'HTTP/1.1'
if not urls.query: self.url = urls.path
else: self.url = '%s?%s' % (urls.path, urls.query)
return self
def load_header(self):
''' 读取请求头,一般不由客户调用 '''
info = self.recv_headers()
if len(info) < 3: raise basehttp.BadRequestError(info)
self.verb, self.url, self.version = \
info[0].upper(), info[1], info[2].upper()
self.proc_header()
def proc_header(self):
''' 处理请求头,一般不由客户调用 '''
if self.url.startswith('/') or self.url.lower().find('://') != -1:
self.urls = urlparse(self.url)
self.hostname = self.urls.netloc
else: self.hostname, self.urls = self.url, {}
if self.verb not in self.VERBS: raise basehttp.MethodNotAllowedError(self.verb)
if self.version not in self.VERSIONS:
raise basehttp.HttpException(505, self.version)
def get_params(self):
''' 获得get方式请求参数 '''
return basehttp.get_params_dict(self.urls.query)
def post_params(self):
''' 获得post方式请求参数 '''
self.recv_body()
return basehttp.get_params_dict(self.get_body())
def make_header(self):
''' 生成请求头 '''
return self.make_headers([self.verb, self.url, self.version])
def make_response(self, code = 200):
''' 生成响应对象
@param code: 响应对象的代码,默认200
@param res_type: 响应对象的类别,默认是HttpResponse '''
response = HttpResponse(self, code)
if self.get_header('connection', '').lower() == 'close' or \
code >= 500 or self.version.upper() != 'HTTP/1.1':
response.connection = False
return response
def make_redirect(self, url, code = 302):
''' 生成重定向响应 '''
response = self.make_response(code)
response.set_header('location', url)
return response
class HttpResponse(basehttp.HttpMessage):
''' Http应答对象
@ivar request: 请求对象
@ivar connection: 是否保持连接,默认为保持
@ivar code: 返回码
@ivar cache: 缓存,目前未使用 '''
def __init__(self, request, code):
''' 生成响应对象 '''
super(HttpResponse, self).__init__(request.sock)
self.request, self.connection = request, True
self.header_sended, self.body_sended = False, False
self.code, self.version, self.cache = code, request.version, None
self.phrase = basehttp.DEFAULT_PAGES[code][0]
def load_header(self):
''' 从远程接收头信息,而不是本地构造。 '''
info = self.recv_headers()
self.version, self.code, self.phrase = \
info[0].upper(), int(info[1]), ''.join(info[2:])
trans_code = self.get_header('transfer-encoding', 'identity')
self.chunk_mode = trans_code != 'identity'
def make_header(self):
return self.make_headers([self.version, str(self.code), self.phrase,])
def send_header(self, auto = False):
''' 发送响应头 '''
if self.header_sended: return
self.request.responsed = True
if auto and 'content-length' not in self.header:
self.set_header('content-length', self.body_len())
self.sock.sendall(self.make_header())
self.header_sended = True
def append_body(self, data):
''' 保存响应数据 '''
if isinstance(data, unicode): data = data.encode('utf-8', 'ignore')
if not isinstance(data, str): data = str(data)
self.content.append(data)
def finish(self):
''' 结束响应发送过程,此后整个请求不能发送和追加任何数据 '''
if not self.header_sended: self.send_header(True)
if not self.body_sended and self.content:
for data in self.content: self.send_body(data)
self.body_sended = True
pack_fields = ['header', 'content', 'chunk_mode', 'body_recved',
'connection', 'header_sended', 'body_sended', 'code',
'version', 'cache', 'phrase']
def pack(self):
return [getattr(self, n) for n in self.pack_fields]
def unpack(self, objs):
for n, v in zip(self.pack_fields, objs): setattr(self, n, v)
class HttpServer(esock.EpollSocket):
BREAK_CONN, RESPONSE_DEBUG = False, True
RequestCls = HttpRequest
def __init__(self, app):
super(HttpServer, self).__init__()
self.app, self.timeout = app, 60
def handler(self, sock):
while True:
request = self.RequestCls(sock)
response = self.process_request(request)
if response is None: break
if daemon.weblog: daemon.weblog.log_req(request, response)
if not response.connection or self.BREAK_CONN: break
def process_request(self, request):
try: request.load_header()
except(EOFError, socket.error): return None
try:
logging.debug(request.make_header()[:-4])
request.timeout = ebus.bus.set_timeout(self.timeout,
basehttp.TimeoutError)
try: response = self.app(request)
finally: request.timeout.cancel()
if not response: response = request.make_response(500)
except(EOFError, socket.error): return None
except basehttp.HttpException, err:
response = self.err_handler(request, err, err.args[0])
except Exception, err:
response = self.err_handler(request, err)
logging.exception('app unknown error')
if not response: return None
try: response.finish()
except: return None
logging.debug(response.make_header()[:-4])
return response
tpl = template.Template(template = '<html><head><title>{%=res.phrase%}</title></head><body><h1>{%=code%} {%=res.phrase%}</h1><h3>{%=default_pages[code][1]%}</h3>{%if res_dbg:%}<br/>Debug Info:<br/>{%if len(err.args) > 1:%}{%="%s<br/>" % str(err.args[1:])%}{%end%}{%="<pre>%s</pre>" % debug_info%}{%end%}</body></html>')
def err_handler(self, request, err, code = 500):
if hasattr(request, 'responsed'): return None
response = request.make_response(code)
info = {'res': response, 'code': code, 'res_dbg': self.RESPONSE_DEBUG,
'err': err, 'debug_info': ''.join(traceback.format_exc()),
'default_pages': basehttp.DEFAULT_PAGES}
response.append_body(self.tpl.render(info))
return response
class SockFactory(object):
@contextmanager
def item(self):
sock = esock.EpollSocket()
try: yield sock
finally: sock.close()
def connect(self, sock, sockaddr): sock.connect(sockaddr[0], sockaddr[1])
default_sock_factory = SockFactory()
code_has_nobody = [100, 101, 204, 304]
def http_client(request, sock_factory = default_sock_factory):
with sock_factory.item() as request.sock:
sock_factory.connect(request.sock, request.sockaddr)
if request.content:
request.set_header('content-length', str(request.body_len()))
request.sock.sendall(request.make_header())
for data in request.content: request.send_body(data)
response = request.make_response()
response.load_header()
response.recv_body(request.verb != 'HEAD' and \
response.code not in code_has_nobody)
return response
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-10-21
@author: shell.xu
@todo: 目前只实现了同余算法,没有实现一致性哈希算法。
'''
from __future__ import with_statement
import socket
import binascii
import esock
def k_node_mod(srvs, k):
''' 从服务器列表中,根据k,挑选一个合适的服务器
@param srvs: 服务器的list
@param k: 键值对象
@return: 获得一个服务器
'''
if len(srvs) == 1: return srvs[0]
crc = binascii.crc32(k)
return srvs[crc % len(srvs)]
class ContConnectException(Exception): pass
k_node_func = k_node_mod
class Memcache(object):
''' memcache的客户端驱动 '''
def __init__(self): self.srvs = []
def add_server(self, host, port = 11211, max_size = 10000):
''' 增加一个服务器
@param host: 服务器地址
@param port: 端口,默认11211
@param max_size: 该服务器最大可使用的连接数 '''
self.srvs.append(esock.EpollSocketPool(host, port, max_size))
def _server_response(self, conn):
''' 从服务器获得响应。 '''
line = conn.recv_until('\r\n')
cmd, sp, data = line.partition(' ')
if cmd == 'ERROR': raise Exception()
elif cmd in ('CLIENT_ERROR', 'SERVER_ERROR'): raise Exception(data)
return cmd, data
def _cmd_to_put(self, cmd, k, v, f, exp):
if isinstance(v, unicode): v = v.encode('utf-8')
try:
with k_node_func(self.srvs, k).item() as conn:
conn.sendall('%s %s %d %d %d\r\n%s\r\n' % (cmd, k, f, exp, len(v), v))
cmd, data = self._server_response(conn)
return cmd == 'STORED'
except socket.error: raise ContConnectException()
def add(self, k, v, f = 0, exp = 0):
''' 增加一个k-v对象 '''
return self._cmd_to_put('add', k, v, f, exp)
def set(self, k, v, f = 0, exp = 0):
''' set一个k-v对象 '''
return self._cmd_to_put('set', k, v, f, exp)
def replace(self, k, v, f = 0, exp = 0):
''' replace一个k-v对象 '''
return self._cmd_to_put('replace', k, v, f, exp)
def get(self, k):
''' get一个k-v对象 '''
try:
with k_node_func(self.srvs, k).item() as conn:
conn.sendall('get %s\r\n' % k)
cmd, data = self._server_response(conn)
if cmd == 'END': return 0, None
assert(cmd == 'VALUE')
kr, f, l = data.split()
d = conn.recv_length(int(l)+2)[:-2]
cmd, data = self._server_response(conn)
assert(cmd == 'END' and k == kr)
return f, d
except socket.error: raise ContConnectException()
def _cmd_to_one(self, k, *params):
try:
with k_node_func(self.srvs, k).item() as conn:
conn.sendall(' '.join(params))
return self._server_response(conn)
except socket.error: raise ContConnectException()
def delete(self, k, exp = 0):
''' 删除一个k-v对象 '''
cmd, data = self._cmd_to_one(k, 'delete', k, str(exp))
return cmd == 'DELETED'
def incr(self, k, v):
''' 增加一个k-v对象的值 '''
cmd, data = self._cmd_to_one(k, 'incr', k, str(v))
if cmd == 'NOT_FOUND': return None
return int(cmd)
def decr(self, k, v):
''' 减少一个k-v对象的值 '''
cmd, data = self._cmd_to_one(k, 'decr', k, str(v))
if cmd == 'NOT_FOUND': return None
return int(cmd)
def _cmd_to_all(self, *params):
try:
for srv in self.srvs:
with srv.item() as conn:
conn.sendall(' '.join(params) + '\r\n')
yield self._server_response(conn)
except socket.error: raise ContConnectException()
def flush_all(self, exp = 0):
''' 丢弃所有数据 '''
for cmd, data in self._cmd_to_all('flush_all'): pass
def version(self, exp = 0):
''' 获得所有服务器的版本信息
@return: 版本信息的list,按照服务器添加的次序。 '''
rslt = []
for cmd, data in self._cmd_to_all('version'):
assert(cmd == 'VERSION')
rslt.append(data)
return rslt
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-06-04
@author: shell.xu
'''
import struct
import logging
import http
def nvpair_data(data, b):
if ord(data[b]) < 128: return b + 1, ord(data[b])
else: return b + 4, struct.unpack('<L', data[b : b + 4] & 0x7fffffff)[0]
def nvpair(data, b):
b, name_len = nvpair_data(data, b)
b, value_len = nvpair_data(data, b)
n, v = data[b : b + name_len], data[b + name_len : b + name_len + value_len]
return b+name_len+value_len, n, v
def begin_request(self, reqid, content):
self.fcgi_header, self.fcgi_reqid = {}, reqid
role, flags = struct.unpack('>HB', content[:3])
assert(role == 1)
self.fcgi_keep_conn = flags & 1 == 1
def params(self, reqid, content):
if len(content) == 0: return True
i = 0
while i < len(content):
i, n, v = nvpair(content, i)
self.fcgi_header[n] = v
if n == 'REQUEST_METHOD': self.verb = v
elif n == 'REQUEST_URI': self.url = v
elif n == 'SERVER_PROTOCOL': self.version = v
elif n.startswith('HTTP_'):
self.add_header(n[5:].lower().replace('_', '-'), v)
def stdin(self, reqid, content):
if len(content) == 0: self.end_body()
else: self.append_body(content)
class FcgiRequest(http.HttpRequest):
'''
@ivar fcgi_header: fcgi的头部
@ivar fcgi_reqid: fcgi的请求序列号
@ivar fcgi_keep_conn: fcgi连接是否复用
'''
RECORD_FUNCS = {1:begin_request, 4:params, 5:stdin}
def recv_record(self):
data = self.sock.recv_length(8)
ver, tp, reqid, cont_len, pad_len, r = struct.unpack('>BBHHBB', data)
logging.debug('recv_record %s %s %s' % (tp, reqid, cont_len))
content = self.sock.recv_length(cont_len)
self.sock.recv_length(pad_len)
return tp, reqid, content
def load_header(self):
while True:
tp, reqid, content = self.recv_record()
func = self.RECORD_FUNCS.get(tp, None)
if not func: raise Exception('record %d %d' % (tp, reqid))
if func(self, reqid, content): break
while not self.body_recved:
tp, reqid, content = self.recv_record()
assert(tp == 5)
stdin(self, reqid, content)
self.proc_header()
def make_response(self, code = 200):
''' 生成响应对象
@param code: 响应对象的代码,默认200 '''
response = FcgiResponse(self, code)
if not self.fcgi_keep_conn or code >= 500: response.connection = False
return response
class FcgiResponse(http.HttpResponse):
def fcgi_record(self, tp, data):
reqid = self.request.fcgi_reqid
return struct.pack('>BBHHBB', 1, tp, reqid, len(data), 0, 0) + data
def make_header(self):
return self.fcgi_record(6, self.make_headers(None))
def send_body(self, data):
if self.body_sended: return
if isinstance(data, unicode): data = data.encode('utf-8')
if not isinstance(data, str): data = str(data)
i = 0
while i<<15 < len(data):
b = i << 15
e = 1 << 15 + b
if e > len(data): e = len(data)
self.sock.sendall(self.fcgi_record(6, data[b:e]))
i += 1
def finish(self):
if not self.header_sended: self.send_header(True)
if not self.body_sended and self.content:
self.send_body(''.join(self.content))
self.body_sended = True
data = self.fcgi_record(6, '') + self.fcgi_record(3, '\0' * 8)
self.sock.sendall(data)
class FcgiServer(http.HttpServer):
RequestCls = FcgiRequest
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-11-12
@author: shell.xu
'''
import sys
import time
import heapq
import logging
from greenlet import greenlet
from contextlib import contextmanager
try:
import epoll
epoll_factory = epoll.poll
timout_factor = 1000
python_epoll = True
except ImportError:
import select as epoll
epoll_factory = epoll.epoll
timout_factor = 1.0
python_epoll = False
class TimeOutException(Exception): pass
class TimeoutObject(object):
''' 超时对象 '''
def __init__(self, timeout, gr, exp):
self.t, self.gr, self.exp = timeout, gr, exp
def __eq__(self, o): return self is o
def __cmp__(self, o): return self.t > o.t
def cancel(self):
''' 取消超时对象的作用 '''
bus.unset_timeout(self)
class EpollBus(object):
def __init__(self):
self.fdrmap, self.fdwmap = {}, {}
self.queue, self.wait_for_end = [], {}
self.tol, self.ftol = [], []
self.init_poll()
def init_poll(self):
''' 初始化poll对象,必须在fork后使用。 '''
self.poll = epoll_factory()
if not python_epoll: self._epoll_modify = self.poll.modify
else: self._epoll_modify = self.poll.register
def _setpoll(self, fd):
''' 同步某fd的fdmap和poll注册 '''
ev = epoll.POLLHUP
if fd in self.fdrmap: ev |= epoll.POLLIN
if fd in self.fdwmap: ev |= epoll.POLLOUT
try: self._epoll_modify(fd, ev)
except IOError: self.poll.register(fd, ev)
def wait_for_read(self, fd):
''' 等待,直到某fd可以读 '''
self.fdrmap[fd] = greenlet.getcurrent()
self._setpoll(fd)
self.schedule()
try: del self.fdrmap[fd]
except KeyError: pass
self._setpoll(fd)
def wait_for_write(self, fd):
''' 等待,直到某fd可以写 '''
self.fdwmap[fd] = greenlet.getcurrent()
self._setpoll(fd)
self.schedule()
try: del self.fdwmap[fd]
except KeyError: pass
self._setpoll(fd)
def unreg(self, fd):
''' 反注册某fd,不再发生任何事件 '''
try: del self.fdwmap[fd]
except KeyError: pass
try: del self.fdrmap[fd]
except KeyError: pass
try: self.poll.unregister(fd)
except (IOError, KeyError): pass
def set_timeout(self, timeout, exp = TimeOutException):
''' 注册某个greenlet的超时,返回超时对象 '''
gr = greenlet.getcurrent()
ton = TimeoutObject(time.time() + timeout, gr, exp)
self.ftol.append(ton)
return ton
def unset_timeout(self, ton):
''' 取消某greenlet的超时
@param ton: 超时对象 '''
if ton in self.ftol: self.ftol.remove(ton)
elif ton in self.tol:
try:
self.tol.remove(ton)
heapq.heapify(self.tol)
except ValueError: pass
def _pre_switch(self):
while self.ftol: heapq.heappush(self.tol, self.ftol.pop())
def add_queue(self, gr, *args):
''' 加入调度队列
@param gr: 需要被调度的greenlet对象
@param args: 调度greenlet对象的参数 '''
self.queue.append((gr, args))
return len(self.queue) > 50
def fork_gr(self, func, *args):
''' 建立新的gr,并加入调度队列。
@param func: 主函数
@param args: 参数 '''
gr = greenlet(self._gr_root)
self.add_queue(gr, func, *args)
return gr
def _gr_root(self, func, *args):
try:
curgr = greenlet.getcurrent()
rslt = func(*args)
gr_waits = self.wait_for_end.pop(curgr, None)
if gr_waits is not None:
for gr in gr_waits: self.add_queue(gr)
return rslt
except KeyboardInterrupt: raise
except: logging.exception('unknown error')
def switch_out(self, gr, *args):
''' 人工调出,执行gr所指定的上下文,最终可能切回。
@param gr: greenlet,需要被切入的上下文
@param args: 切入的参数 '''
if not gr: return
self.add_queue(greenlet.getcurrent())
self._pre_switch()
gr.switch(*args)
def wait_for_gr(self, gr):
''' 等待一个gr结束。注意被等待的gr必须是使用fork_gr建立的。 '''
curgr = greenlet.getcurrent()
if gr.dead: return
if gr not in self.wait_for_end: self.wait_for_end[gr] = []
if curgr not in self.wait_for_end[gr]:
self.wait_for_end[gr].append(curgr)
while not gr.dead: self.schedule()
def _switch_queue(self):
''' 调度队列,直到列队空或者当前gr被队列调度。
除非预先设定监听fd事件,否则当前上下文不会自动被切回。
@return: 如果队列空,返回None,如果当前gr被调度,返回被调度的gr对象和参数。 '''
gr = greenlet.getcurrent()
while self.queue:
q = self.queue[-1]
if q[0].dead: self.queue.pop()
elif q[0] == gr: return self.queue.pop()
else:
self._pre_switch()
q[0].switch(*q[1])
def _fire_timeout(self):
''' 检测timeout的发生。
@return: 发生所有timeout后,返回下一个timeout发生的interval。 '''
t = time.time()
while self.tol and t > self.tol[0].t:
next = heapq.heappop(self.tol)
self.throw_gr_exp(next.gr, next.exp)
if not self.tol: return -1
return (self.tol[0].t - t) * timout_factor
def throw_gr_exp(self, gr, exp):
''' 向某个gr抛出异常,会切回当前gr。 '''
if not gr: return
self.add_queue(greenlet.getcurrent())
gr.throw(exp)
def _load_poll(self, timeout = -1):
''' 读取poll对象,并且将发生事件的fd所注册的gr加入队列。
@param timeout: 下一次超时的interval。 '''
for fd, ev in self.poll.poll(timeout):
# print 'event come', fd, ev
if ev & epoll.POLLHUP:
self.throw_gr_exp(self.fdwmap.get(fd, None), EOFError)
self.throw_gr_exp(self.fdrmap.get(fd, None), EOFError)
self.unreg(fd)
# TODO: close here
elif ev & epoll.POLLIN and fd in self.fdrmap:
if self.add_queue(self.fdrmap[fd]): break
elif ev & epoll.POLLOUT and fd in self.fdwmap:
if self.add_queue(self.fdwmap[fd]): break
else: self._setpoll(fd)
def schedule(self):
''' 调度,进入调度后不会自动被切回。 '''
while not self._switch_queue():
self._load_poll(self._fire_timeout())
bus = EpollBus()
class TokenPool(object):
''' 令牌池,程序可以从中获得一块令牌。当令牌耗尽时,阻塞直到有程序释放令牌为止。
用法:
token = TokenPool(10)
with token.item():
do things... '''
def __init__(self, max_item): self.token, self.gr_wait = max_item, []
@contextmanager
def item(self):
gr = greenlet.getcurrent()
while self.token == 0:
if gr not in self.gr_wait: self.gr_wait.append(gr)
bus.schedule()
self.token -= 1
try: yield
finally:
self.token += 1
if self.token == 1 and self.gr_wait:
bus.switch_out(self.gr_wait.pop())
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-09-27
@author: shell.xu
'''
from __future__ import with_statement
import os
class TemplateCode(object):
def __init__(self): self.deep, self.rslt, self.defs = 0, [], []
def str(self, s):
if s: self.rslt.append(u'%swrite(u\'\'\'%s\'\'\')' % (u'\t' * self.deep, s))
def code(self, s):
r = self.map_code(s)
if r: self.rslt.append(r)
def map_code(self, s):
s, tab = s.strip(), self.deep
if s.startswith(u'='): s = u'write(%s)' % s[1:]
elif s.startswith(u'end'):
self.deep -= 1
return
elif s.startswith(u'for') or s.startswith(u'if'): self.deep += 1
elif s.startswith(u'el'): tab -= 1
elif s.startswith(u'def'):
self.defs.append(s + u'\n')
return
elif s.startswith(u'include'):
self.include(s[8:])
return
elif s.startswith(u'import'):
self.defs.append(s + u'\n')
return
return u'%s%s' % (u'\t' * tab, s)
def include(self, filepath):
with open(filepath, 'r') as tfile:
self.process(tfile.read().decode('utf-8'))
def process(self, s):
while True:
i = s.partition(u'{%')
if not i[1]: break
if i[0].strip(): self.str(i[0])
t = i[2].partition(u'%}')
if not t[1]: raise Exception('not match')
self.code(t[0])
s = t[2]
self.str(s)
def get_code(self): return u'\n'.join(self.rslt)
class Template(object):
'''
模板对象,用于生成模板
代码:
info = {'r': r, 'objs': [(1, 2), (3, 4)]}
response.append_body(tpl.render(info))
模板:
<html><head><title>{%=r.get('a', 'this is title')%}</title></head>
<body><table><tr><td>col1</td><td>col2</td></tr>
{%for i in objs:%}<tr><td>{%=i[0]%}</td><td>{%=i[1]%}</td></tr>{%end%}
</table></body></html>
'''
def __init__(self, filepath = None, template = None, env = None):
'''
@param filepath: 文件路径,直接从文件中load
@param template: 字符串,直接编译字符串
'''
if not env: env = globals()
self.tc, self.env = TemplateCode(), env
if filepath: self.loadfile(filepath)
elif template: self.loadstr(template)
def loadfile(self, filepath):
''' 从文件中读取字符串编译 '''
self.modify_time = os.stat(filepath).st_mtime
self.tc = TemplateCode()
with open(filepath, 'r') as tfile: self.loadstr(tfile.read())
def loadstr(self, template):
''' 编译字符串成为可执行的内容 '''
if isinstance(template, str): template = template.decode('utf-8')
self.tc.process(template)
self.htmlcode, self.defcodes = compile(self.tc.get_code(), '', 'exec'), {}
for i in self.tc.defs:
eval(compile(i, '', 'exec'), self.env, self.defcodes)
def reload(self, filepath):
''' 如果读取文件,测试文件是否更新。 '''
if not hasattr(self, 'modify_time') or \
os.stat(filepath).st_mtime > self.modify_time:
self.loadfile(filepath)
def render(self, kargs):
''' 根据参数渲染模板 '''
b = []
kargs['write'] = lambda x: b.append(unicode(x))
eval(self.htmlcode, self.defcodes, kargs)
return u''.join(b)
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-09-27
@author: shell.xu
'''
from __future__ import with_statement
import os
import stat
import urllib
import logging
from os import path
from datetime import datetime
import basehttp
import template
import apps
def get_stat_str(mode):
stat_list = []
if stat.S_ISDIR(mode): stat_list.append("d")
if stat.S_ISREG(mode): stat_list.append("f")
if stat.S_ISLNK(mode): stat_list.append("l")
if stat.S_ISSOCK(mode): stat_list.append("s")
return ''.join(stat_list)
def calc_path(filepath, base_dir):
url_path = urllib.unquote(filepath)
real_path = path.join(base_dir, url_path.lstrip('/'))
real_path = path.abspath(path.realpath(real_path))
if not real_path.startswith(base_dir): raise basehttp.HttpException(403)
return url_path, real_path
class StaticFile(object):
''' 静态文件的处理类 '''
MIME = basehttp.MIME
PIPE_LENGTH = 512 * 1024
DEFAULT_INDEX_SET = ['index.htm', 'index.html']
def __init__(self, base_dir, show_directory = True, index_set = None):
'''
@param base_dir: 指定根路径
@param show_directory: 如果为True,则当请求一个目录的时候,列出目录
@param index_set: 默认index文件名
'''
base_dir = path.expanduser(base_dir)
self.base_dir = path.abspath(path.realpath(base_dir))
self.show_directory = show_directory
if index_set: self.index_set = index_set
else: self.index_set = self.DEFAULT_INDEX_SET
def __call__(self, request):
url_path, real_path = calc_path(request.url_match['filepath'], self.base_dir)
logging.debug("StaticFile: %s requested and %s hit." % \
(request.urls.path, real_path))
if path.isdir(real_path):
return self.dir_app(request, url_path, real_path)
else: return self.file_app(request, real_path)
def file_app(self, request, real_path):
if not os.access(real_path, os.R_OK):
raise basehttp.NotFoundError(real_path)
file_stat = os.lstat(real_path)
modify = request.get_header("if-modified-since")
if modify:
modify = basehttp.get_http_date(modify)
if modify <= datetime.fromtimestamp(file_stat.st_mtime):
raise basehttp.HttpException(304)
response = request.make_response()
content_type = self.MIME.get(path.splitext(real_path)[1], "text/html")
response.set_header("content-type", content_type)
modify = basehttp.make_http_date(datetime.fromtimestamp(file_stat.st_mtime))
response.set_header("last-modified", modify)
if file_stat.st_size < self.PIPE_LENGTH:
with open(real_path, "rb") as datafile:
response.append_body(datafile.read())
response.cache = 300
else:
response.set_header("content-length", os.stat(real_path)[6])
response.send_header()
with open(real_path, "rb") as datafile:
while True:
data = datafile.read(4096)
if len(data) == 0: break
response.send_body(data)
response.body_sended = True
response.connection = False
return response
tpl = template.Template(template = '{%import os%}{%from os import path%}<html><head><meta http-equiv="Content-Type" content="text/html; charset=utf-8"/></head><body><table><thead><tr><td>file name</td><td>file mode</td><td>file size</td></tr></thead><tbody>{%for name in namelist:%}{%name=name.decode("utf-8")%}{%stat_info = os.lstat(path.join(real_path, name))%}<tr><td><a href="{%=path.join(url_path, name).replace(os.sep, "/")%}">{%=name%}</a></td><td>{%=get_stat_str(stat_info.st_mode)%}</td><td>{%=stat_info.st_size%}</td></tr>{%end%}</tbody></table></body>')
def dir_app(self, request, url_path, real_path):
for index_file in self.index_set:
test_path = path.join(real_path, index_file)
if os.access(test_path, os.R_OK):
return self.file_app(request, test_path)
if not self.show_directory: raise basehttp.NotFoundError(real_path)
if not os.access(real_path, os.X_OK): raise basehttp.NotFoundError(real_path)
response = request.make_response()
namelist = os.listdir(real_path)
namelist.sort()
info = {'namelist': namelist, 'get_stat_str': get_stat_str,
'real_path': real_path, 'url_path': url_path}
response.append_body(self.tpl.render(info))
response.connection = False
response.cache = 300
return response
class TemplateFile(object):
'''
模板自动生成系统,将指定目录下的所有文件自动进行模板泛化编译。
例子:
['^/html/(?P<filepath>.*)', pyweb.TemplateFile('~/tpl/')],
'''
def __init__(self, base_dir):
''' @param base_dir: 指定根路径 '''
base_dir = path.expanduser(base_dir)
self.base_dir = path.abspath(path.realpath(base_dir))
self.cache = {}
def __call__(self, request, *param):
url_path, real_path = calc_path(request.url_match['filepath'], self.base_dir)
if not path.isfile(real_path): raise basehttp.HttpException(403)
if real_path not in self.cache:
self.cache[real_path] = template.Template(filepath = real_path)
else: self.cache[real_path].reload(real_path)
tplfile = self.cache[real_path]
query_info = basehttp.get_params_dict(request.urls.query)
funcname = query_info.get('func', None)
if funcname:
funcobj = tplfile.defcodes.get(funcname, None)
if not funcobj: raise basehttp.NotFoundError()
response = apps.J(request, funcobj, *param)
else:
response = request.make_response()
info = {'request': request, 'response': response, 'param': param}
response.append_body(tplfile.render(info))
return response
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-09-27
@author: shell.xu
'''
from __future__ import with_statement
import re
import time
import heapq
import urllib
import random
import cPickle
import logging
import traceback
import simplejson as json
import basehttp
import memcache
def J(request, func, *params):
''' JSON对象的包装,将请求解析为一个JSON对象,调用被包装函数
将返回生成JSON,并填写到响应对象中。
配置例子:['^/def.*', pyweb.J, test_json]
代码:
def test_json(request, json):
li = []
for i in xrange(1, 100): li.append(random.randint(0, 100))
return li
'''
try:
post = request.post_params()
obj = func(request, post, *params)
if obj is None: code, content = 500, 'function return None'
else: code, content = 200, json.dumps(obj)
except Exception, err:
logging.exception('json func error.')
code, content = 500, str(err)
response = request.make_response(code)
response.append_body(content)
return response
def redirect(request, url, *params):
''' 重定向函数
配置例子:['^/def.*', pyweb.redirect, '/abc']'''
return request.make_redirect(url)
class Dispatch(object):
''' 分派器对象,根据正则,找到合适的处理程序对客户请求进行处理。
例子:dispatch = pyweb.Dispatch([
['^/json/list_money.*', pyweb.J, list_money],
['^/json/add_money.*', pyweb.J, add_money],
['.*', hello_kitty],
]) '''
def __init__(self, urlmap = None):
'''
@param urlmap: 一个多个映射关系组成的列表,以从前向后的次序进行匹配,每项规则包括多个内容。
第一项是url正则规则,如果是字符串会自动进行re编译。
第二项是处理程序。
其余项目会作为处理程序的参数传入,re匹配的groupdict会作为字典参数传入。
'''
self.urlmap = []
if urlmap: self.urlmap = map(self.re_url_obj, urlmap)
def re_url_obj(self, obj):
if isinstance(obj[0], (str, unicode)): obj[0] = re.compile(obj[0])
return obj
def func_wrapper(self, url, *args):
def get_func(func):
self.urlmap.append([re.compile(url), func] + args)
return func
return get_func
def json_wrapper(self, url, *args):
def get_func(func):
self.urlmap.append([re.compile(url), J, func] + args)
return func
return get_func
def __call__(self, request):
for obj in self.urlmap:
m = obj[0].match(request.urls.path)
if not m: continue
request.url_match = m.groupdict()
return obj[1](request, *obj[2:])
raise basehttp.NotFoundError()
class Cache(object):
''' 缓存对象的基类,需要重载get_data和set_data函数。 '''
def __init__(self, app = None): self.app = app
def __call__(self, request, *params):
pd = self.get_data(request.urls.path)
if pd:
response = request.make_response()
response.unpack(cPickle.loads(pd))
return response
if self.app: response = self.app(request, *params)
else: response = params[0](request, *params[1:])
if response and response.cache is not None:
response.set_header('cache-control', 'max-age=%d' % response.cache)
pd = cPickle.dumps(response.pack(), 2)
self.set_data(request.urls.path, pd, response.cache)
return response
class MemcacheCache(Cache):
''' 利用memcache实现cache的实现 '''
def __init__(self, mc, app = None):
super(MemcacheCache, self).__init__(app)
self.mc = mc
def get_data(self, k):
''' 获得key为k的被缓存对象 '''
try: f, data = self.mc.get('cache:' + k)
except memcache.ContConnectException:
logging.error('memcache can\'t connect')
return None
return data
def set_data(self, k, v, exp):
''' 设定key为k的缓存对象 '''
try: self.mc.set('cache:' + k, v, exp = exp)
except memcache.ContConnectException:
logging.error('memcache can\'t connect')
class ObjHeap(object):
''' 使用lru算法的对象缓存容器,感谢Evan Prodromou <evan@bad.dynu.ca>。
thx for Evan Prodromou <evan@bad.dynu.ca>. '''
class __node(object):
def __init__(self, k, v, f): self.k, self.v, self.f = k, v, f
def __cmp__(self, o): return self.f > o.f
def __init__(self, size):
''' 初始化对象
@param size: 最高缓存对象数 '''
self.size, self.f = size, 0
self.__dict, self.__heap = {}, []
def __len__(self): return len(self.__dict)
def __contains__(self, k): return self.__dict.has_key(k)
def __setitem__(self, k, v):
if self.__dict.has_key(k):
n = self.__dict[k]
n.v = v
self.f += 1
n.f = self.f
heapq.heapify(self.__heap)
else:
while len(self.__heap) >= self.size:
del self.__dict[heapq.heappop(self.__heap).k]
self.f = 0
for n in self.__heap: n.f = 0
n = self.__node(k, v, self.f)
self.__dict[k] = n
heapq.heappush(self.__heap, n)
def __getitem__(self, k):
n = self.__dict[k]
self.f += 1
n.f = self.f
heapq.heapify(self.__heap)
return n.v
def __delitem__(self, k):
n = self.__dict[k]
del self.__dict[k]
self.__heap.remove(n)
heapq.heapify(self.__heap)
return n.v
def __iter__(self):
c = self.__heap[:]
while len(c): yield heapq.heappop(c).k
raise StopIteration
class MemoryCache(Cache):
''' 利用内存实现cache的实现 '''
def __init__(self, size, app = None):
''' 构造一个使用内存的对象缓存器,通常仅仅用于少量对象的高速缓存。
@param size: 可以容纳多少个对象 '''
super(MemoryCache, self).__init__(app)
self.oh = ObjHeap(size)
def get_data(self, k):
''' 获得key为k的被缓存对象 '''
try: o = self.oh[k]
except KeyError: return None
if o[1] >= time.time(): return o[0]
del self.oh[k]
return None
def set_data(self, k, v, exp):
''' 设定key为k的缓存对象 '''
self.oh[k] = (v, time.time() + exp)
random.seed()
alpha = '0123456789abcdefghijklmnopqrstuvwxyzABCDEFGHIJKLMNOPQRSTUVWXYZ+/'
def get_rnd_sess(): return ''.join(random.sample(alpha, 32))
class Cookie(object):
''' 管理cookie对象列表 '''
def __init__(self, cookie):
''' 解析数据,建立cookie
@param cookie: cookie字符串 '''
if not cookie: self.v = {}
else: self.v = basehttp.get_params_dict(cookie, ';')
self.m = set()
def get(self, k, d): return self.v.get(k, d)
def __contains__(self, k): return k in self.v
def __getitem__(self, k): return self.v[k]
def __delitem__(self, k):
self.m.add(k)
del self.v[k]
def __setitem__(self, k, v):
self.m.add(k)
self.v[k] = v
def set_cookie(self):
''' 生成适合http多重头部格式的cookie数据 '''
rslt = []
for k in self.m: rslt.append('%s=%s' % (k, urllib.quote(self.v[k])))
return rslt
class Session(object):
''' 管理session对象的基类,需要实现set_data和get_data '''
def __init__(self, timeout, app = None):
self.app, self.exp = app, timeout
def __call__(self, request, *params):
request.cookie = Cookie(request.header.get('cookie', None))
sessionid = request.cookie.get('sessionid', '')
if not sessionid:
sessionid = get_rnd_sess()
request.cookie['sessionid'] = sessionid
data = None
else: data = self.get_data(sessionid)
if not data: request.session = {}
else: request.session = cPickle.loads(data)
if self.app: response = self.app(request, *params)
else: response = params[0](request, *params[1:])
self.set_data(sessionid, cPickle.dumps(request.session, 2))
set_cookie = request.cookie.set_cookie()
if set_cookie: response.header['Set-Cookie'] = set_cookie
return response
class MemcacheSession(Session):
''' Session的Memcache实现,根据cookie内的sessionid来读写memcache内数据
可以作为app使用,或者作为映射包装器。app首先作用。
例子:
mc = pyweb.Memcache()
mc.add_server('localhost')
app例子:
dispatch = pyweb.Dispatch(...)
app = pyweb.MemcacheSession(mc, 300, dispatch)
映射包装器例子:
sess = pyweb.MemcacheSession(mc, 300)
dispatch = pyweb.Dispatch([ ['.*', sess, hello_kitty], ]) '''
def __init__(self, mc, timeout, app = None):
super(MemcacheSession, self).__init__(timeout, app)
self.mc = mc
def get_data(self, sessionid):
try: f, data = self.mc.get('sess:%s' % sessionid)
except memcache.ContConnectException:
logging.error('memcache can\'t connect')
return None
return data
def set_data(self, sessionid, data):
try: self.mc.set('sess:%s' % sessionid, data, exp = self.exp)
except memcache.ContConnectException:
logging.error('memcache can\'t connect')
class MongoSession(Session):
''' 未实现 '''
def __init__(self, conn, timeout, app = None):
super(MemcacheSession, self).__init__(timeout, app)
self.conn = conn
# TODO: Monge未实现
def get_data(self, sessionid):
f, data = self.mc.get('sess:%s' % sessionid)
return data
def set_data(self, sessionid, data):
self.mc.set('sess:%s' % sessionid, data, exp = self.exp)
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-09-29
@author: shell.xu
'''
from apps import J, redirect, Dispatch, MemcacheCache, MemoryCache
from apps import MemcacheSession, MongoSession
from basehttp import *
from daemon import Daemon, set_weblog, set_log
from ebus import TimeOutException, bus, TokenPool
from esock import EpollSocket
from fcgi import FcgiServer
from files import StaticFile, TemplateFile
from http import HttpRequest, HttpResponse, HttpServer, http_client
from memcache import Memcache
from template import Template
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-06-04
@author: shell.xu
'''
import os
import errno
import socket
from greenlet import greenlet
from contextlib import contextmanager
import ebus
class SockBase(object):
buffer_size = 65536
def __init__(self, sock = None, socktype = socket.AF_INET, reuse = True):
if sock: self.sock = sock
else: self.sock = socket.socket(socktype, socket.SOCK_STREAM)
if reuse:
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
if hasattr(self, 'setsock'): self.setsock()
self.recv_rest = ""
def fileno(self): return self.sock.fileno()
def listen(self, addr = '', port = 8080, listen_queue = 50, **kargs):
self.sockaddr = (addr, port)
self.sock.bind(self.sockaddr)
self.sock.listen(listen_queue)
def listen_unix(self, sockpath = '', listen_queue = 50, **kargs):
self.sockaddr = sockpath
try: os.remove(sockpath)
except OSError: pass
self.sock.bind(self.sockaddr)
self.sock.listen(listen_queue)
def connect(self, hostaddr, port):
self.sockaddr = (hostaddr, port)
self.sock.connect(self.sockaddr)
def close(self): self.sock.close()
def sendall(self, data):
return self.sock.sendall(data)
def recv(self, size):
data = self.sock.recv(size)
if len(data) == 0: raise EOFError
return data
def datas(self):
''' 通过迭代器,获得数据对象。
用法:
for data in sock.datas():
do things with data...
'''
if self.recv_rest:
d, self.recv_rest = self.recv_rest, ''
yield d
while True:
d = self.sock.recv(self.buffer_size)
if len(d) == 0: raise StopIteration
yield d
def recv_until(self, break_str = "\r\n\r\n"):
''' 读取数据,直到读取到特定字符串为止。
@param break_str: 中断字符串。
@return: 读取到的内容,不包括break_str。 '''
while self.recv_rest.rfind(break_str) == -1:
self.recv_rest += self.recv(self.buffer_size)
data, part, self.recv_rest = self.recv_rest.partition(break_str)
return data
def recv_length(self, length):
''' 调用recv,直到读取了特定长度。
@param length: 读取长度。 '''
while len(self.recv_rest) < length:
self.recv_rest += self.recv(length - len(self.recv_rest))
if len(self.recv_rest) != length:
data, self.recv_rest = self.recv_rest[:length], self.recv_rest[length:]
else: data, self.recv_rest = self.recv_rest, ''
return data
class EpollSocket(SockBase):
''' 使用ebus调度的socket对象。 '''
def setsock(self): self.sock.setblocking(0)
conn_errset = set((errno.EINPROGRESS, errno.EALREADY, errno.EWOULDBLOCK))
def connect(self, hostaddr, port, timeout = 60):
''' 连接某个主机
@param hostaddr: 主机名
@param port: 端口号
@param timeout: 超时时间
'''
self.sockaddr = (hostaddr, port)
ton = ebus.bus.set_timeout(timeout)
try:
try:
while True:
err = self.sock.connect_ex(self.sockaddr)
if not err: return
elif err in self.conn_errset:
ebus.bus.wait_for_write(self.sock.fileno())
else: raise socket.error(err, errno.errorcode[err])
except ebus.TimeOutException:
raise socket.error(111, no.errorcode[111])
finally: ton.cancel()
def close(self):
''' 关闭端口,包括unreg fd。 '''
ebus.bus.unreg(self.sock.fileno())
super(EpollSocket, self).close()
def send(self, data, flags = 0):
''' 对原生send的封装 '''
while True:
try: return self.sock.send(data, flags)
except socket.error, err:
if err.args[0] != errno.EAGAIN: raise
ebus.bus.wait_for_write(self.sock.fileno())
def sendall(self, data, flags = 0):
''' 对原生sendall的封装 '''
tail, len_data = self.send(data, flags), len(data)
while tail < len_data: tail += self.send(data[tail:], flags)
def recv(self, size):
''' 对原生recv的封装 '''
while True:
try:
data = self.sock.recv(size)
if len(data) == 0: raise EOFError
return data
except socket.error, err:
if err.args[0] != errno.EAGAIN: raise
ebus.bus.wait_for_read(self.sock.fileno())
def datas(self):
''' 通过迭代器,获得数据对象。
用法:
for data in sock.datas():
do things with data...
'''
if self.recv_rest:
data, self.recv_rest = self.recv_rest, ''
yield data
while True:
try:
data = self.sock.recv(self.buffer_size)
if len(data) == 0: raise StopIteration
yield data
except socket.error, err:
if err.args[0] != errno.EAGAIN: raise
ebus.bus.wait_for_read(self.sock.fileno())
def accept(self):
''' 对原生accept的封装 '''
while True:
try: return self.sock.accept()
except socket.error, err:
if err.args[0] != errno.EAGAIN: raise
ebus.bus.wait_for_read(self.sock.fileno())
def run(self):
''' 对某个监听中的端口,接受连接,并调用on_accept方法。 '''
ebus.bus.init_poll()
self.gr = greenlet.getcurrent()
try:
while True:
ebus.bus.wait_for_read(self.sock.fileno())
s, addr = self.accept()
ebus.bus.fork_gr(self.on_accept, s, addr)
finally: ebus.bus.unreg(self.sock.fileno())
def on_accept(self, s, addr):
''' 协程起点,处理某个sock。
@param s: 基于epoll的socket对象。
@param addr: accept的地址。 '''
sock = EpollSocket(s)
try:
sock.from_addr, sock.server = addr, self
sock.gr = greenlet.getcurrent()
self.handler(sock)
finally: sock.close()
class ObjPool(object):
''' 对象池,程序可以从中获得一个对象。当对象耗尽时,阻塞直到有程序释放对象为止。
具体实现必须重载create函数和unbind函数。
用法:
objpool = ObjPool(10)
with token.item() as obj:
do things with obj... '''
def __init__(self, max_item):
self.max_item = max_item
self.pool, self.count, self.gr_wait = [], 0, []
@contextmanager
def item(self):
gr = greenlet.getcurrent()
while self.count >= self.max_item:
if gr not in self.gr_wait: self.gr_wait.append(gr)
bus.schedule()
if not self.pool: self.pool.append(self.create())
self.count += 1
obj = self.pool.pop()
try: yield obj
finally:
self.unbind(obj)
self.pool.append(obj)
self.count -= 1
if self.count == self.max_item - 1 and self.gr_wait:
bus.switch_out(self.gr_wait.pop())
def create(self):
''' 返回一个对象,用于对象创建 '''
pass
def unbind(self):
''' 将对象和当前gr分离,常用于socket对象的unreg。 '''
pass
class EpollSocketPool(ObjPool):
''' 基于epoll socket的连接池。 '''
def __init__(self, host, port, max_size):
super(EpollSocketPool, self).__init__(max_size)
self.sockaddr = (host, port)
def create(self):
sock = EpollSocket()
sock.connect(self.sockaddr[0], self.sockaddr[1])
return sock
def unbind(self, sock): ebus.bus.unreg(sock.fileno())
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-10-24
@author: shell.xu
'''
from __future__ import with_statement
import os
import sys
import time
import fcntl
import signal
import logging
import datetime
from os import path
daemon = None
def handler(signum, frame):
global daemon
if signum == signal.SIGTERM:
daemon.running = False
while len(daemon.workers) >0:
for i in workers: os.kill(i, signal.SIGTERM)
time.sleep(1)
class Daemon(object):
def __init__(self, server):
global daemon
self.server, self.workers = server, []
self.running = True
self.daemon = self
def get_cpu(self):
with open("/proc/cpuinfo", "r") as cpu_file:
cpuinfo = cpu_file.readlines()
self.cpu = len(filter(lambda x: x.startswith("processor"), cpuinfo))
def run(self, num = 0):
if num == 0:
if not hasattr(self, 'cpu'): self.get_cpu()
num = self.cpu
for i in xrange(0, num): self.workers.append(self.run_child())
self.running = True
while True:
pid, st = os.wait()
self.workers.remove(pid)
if self.running: self.workers.append(self.run_child())
def run_child(self):
pid = os.fork()
if pid < 0: raise OSError('fork failed')
if pid: return pid
self.server.run()
def lock_pidfile(self, pidfile):
self.pidfile = pidfile
try:
os.stat(pidfile)
raise Exception('a instance running')
except OSError: pass
self.fdpid = os.open(pidfile, os.O_RDWR | os.O_CREAT, 0600)
fcntl.lockf(self.fdpid, fcntl.LOCK_EX)
os.write(self.fdpid, str(os.getpid()))
def free_pidfile(self):
os.close(self.fdpid)
try: os.remove(self.pidfile)
except OSError: pass
def daemonize(self, root_dir = '/tmp'):
pid = os.fork()
if pid < 0: raise OSError('fork failed')
if pid > 0: sys.exit(-1)
os.setsid()
for i in xrange(0, 3): os.close(i)
fdnul = os.open('/dev/null', os.O_RDWR) # this is fd:0
if fdnul < 0: sys.exit(-1)
for i in xrange(1, 3): os.dup2(fdnul, i)
os.umask(027)
os.chdir(root_dir)
signal.signal(signal.SIGTERM, handler)
weblog = None
DATEFMT = "%Y%m%d %H:%M:%S"
LOGFORMAT = "[%(asctime)s]%(name)s:%(levelname)s:%(message)s"
class ApacheLog(object):
def __init__(self, filepath):
if hasattr(filepath, 'write'): self.logfile = filepath
else:
self.filepath = path.expanduser(filepath)
self.logfile = open(self.filepath, "a")
def _get_time(self): return datetime.datetime.now().strftime(DATEFMT)
def log_req(self, req, res):
output = '%s - %s [%s] "%s %s %s" %d %s "%s" "%s"\r\n' % \
(req.sock.from_addr[0], "-", self._get_time(), req.verb, req.url,
req.version, res.code, res.body_len(),
req.get_header('referer', '-'), req.get_header('user-agent', '-'))
self.logfile.write(output)
self.logfile.flush()
def set_weblog(filepath):
global weblog
if not weblog: weblog = ApacheLog(filepath)
def set_log(filepath = None, level = logging.INFO):
kargs = {'level': level, 'format': LOGFORMAT, 'datefmt': DATEFMT}
if filepath: kargs['filename'] = filepath
logging.basicConfig(**kargs)
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
'''
@date: 2010-06-04
@author: shell.xu
'''
import socket
import urllib
from datetime import datetime
from urlparse import urlparse
class HttpException(Exception): pass
class BadRequestError(HttpException):
def __init__(self, *params): HttpException.__init__(self, 400, *params)
class NotFoundError(HttpException):
def __init__(self, *params): HttpException.__init__(self, 404, *params)
class MethodNotAllowedError(HttpException):
def __init__(self, *params): HttpException.__init__(self, 405, *params)
class NotAcceptableError(HttpException):
def __init__(self, *params): HttpException.__init__(self, 406, *params)
class TimeoutError(HttpException):
def __init__(self, *params): HttpException.__init__(self, 408, *params)
class BadGatewayError(HttpException):
def __init__(self, *params): HttpException.__init__(self, 502, *params)
DEFAULT_PAGES = {
100:('Continue', 'Request received, please continue'),
101:('Switching Protocols',
'Switching to new protocol; obey Upgrade header'),
200:('OK', ''),
201:('Created', 'Document created, URL follows'),
202:('Accepted', 'Request accepted, processing continues off-line'),
203:('Non-Authoritative Information', 'Request fulfilled from cache'),
204:('No Content', 'Request fulfilled, nothing follows'),
205:('Reset Content', 'Clear input form for further input.'),
206:('Partial Content', 'Partial content follows.'),
300:('Multiple Choices', 'Object has several resources -- see URI list'),
301:('Moved Permanently', 'Object moved permanently -- see URI list'),
302:('Found', 'Object moved temporarily -- see URI list'),
303:('See Other', 'Object moved -- see Method and URL list'),
304:('Not Modified', 'Document has not changed since given time'),
305:('Use Proxy',
'You must use proxy specified in Location to access this resource.'),
307:('Temporary Redirect', 'Object moved temporarily -- see URI list'),
400:('Bad Request', 'Bad request syntax or unsupported method'),
401:('Unauthorized', 'No permission -- see authorization schemes'),
402:('Payment Required', 'No payment -- see charging schemes'),
403:('Forbidden', 'Request forbidden -- authorization will not help'),
404:('Not Found', 'Nothing matches the given URI'),
405:('Method Not Allowed', 'Specified method is invalid for this server.'),
406:('Not Acceptable', 'URI not available in preferred format.'),
407:('Proxy Authentication Required',
'You must authenticate with this proxy before proceeding.'),
408:('Request Timeout', 'Request timed out; try again later.'),
409:('Conflict', 'Request conflict.'),
410:('Gone', 'URI no longer exists and has been permanently removed.'),
411:('Length Required', 'Client must specify Content-Length.'),
412:('Precondition Failed', 'Precondition in headers is false.'),
413:('Request Entity Too Large', 'Entity is too large.'),
414:('Request-URI Too Long', 'URI is too long.'),
415:('Unsupported Media Type', 'Entity body in unsupported format.'),
416:('Requested Range Not Satisfiable', 'Cannot satisfy request range.'),
417:('Expectation Failed', 'Expect condition could not be satisfied.'),
500:('Internal Server Error', 'Server got itself in trouble'),
501:('Not Implemented', 'Server does not support this operation'),
502:('Bad Gateway', 'Invalid responses from another server/proxy.'),
503:('Service Unavailable',
'The server cannot process the request due to a high load'),
504:('Gateway Timeout',
'The gateway server did not receive a timely response'),
505:('HTTP Version Not Supported', 'Cannot fulfill request.'),
}
MIME = {
'.htm':"text/html", '.html':"text/html", ".txt":"text/plain",
".xhtm":"application/xhtml+xml", ".xhtml":"application/xhtml+xml",
".xsit":"text/xml", ".xsl":"text/xml", ".xml":"text/xml",
".gif":"image/gif", ".jpg":"image/jpeg", ".jpeg":"image/jpeg",
".png":"image/png", ".tif":"image/tiff", ".tiff":"image/tiff",
".wav":"audio/x-wav",
".gz":"application/x-gzip", ".bz2":"application/x-bzip2",
".tar":"application/x-tar", ".zip":"application/zip"
}
class HttpMessage(object):
''' Http消息处理基类
@ivar header: 消息头 '''
def __init__(self, sock):
''' Http消息基础构造 '''
self.sock, self.header, self.content = sock, {}, []
self.chunk_mode, self.body_recved = False, False
def set_header(self, k, v):
''' 设定头,不论原来是什么内容 '''
self.header[k.lower()] = v
def add_header(self, k, v):
''' 添加头,如果没有这项则新建 '''
k = k.lower()
if k not in self.header: self.header[k] = v
elif hasattr(self.header[k], 'append'): self.header[k].append(v)
else: self.header[k] = [self.header[k], v]
def get_header(self, k, v = None):
''' 获得头的第一个元素,如果不存在则返回v '''
l = self.header.get(k.lower(), None)
if l is None: return v
if isinstance(v, list): return l[0]
else: return l
def recv_headers(self):
''' 抽象的读取Http头部 '''
lines = self.sock.recv_until().splitlines()
for line in lines[1:]:
if not line.startswith(' ') and not line.startswith('\t'):
part = line.partition(":")
if not part[1]: raise BadRequestError(line)
self.add_header(part[0], part[2].strip())
else: self.add_header(part[0], line[1:])
return lines[0].split()
def make_headers(self, start_line_info):
''' 抽象的头部生成过程 '''
if not start_line_info: lines = []
else: lines = [" ".join(start_line_info)]
for k, l in self.header.items():
k = '-'.join([t.capitalize() for t in k.split('-')])
if hasattr(l, '__iter__'):
for v in l: lines.append("%s: %s" %(k, v))
else: lines.append("%s: %s" %(k, l))
return "\r\n".join(lines) + "\r\n\r\n"
def body_len(self): return sum([len(i) for i in self.content])
def append_body(self, data): self.content.append(data)
def end_body(self): self.body_recved = True
def send_body(self, data):
''' 发送一个数据片段 '''
if isinstance(data, unicode): data = data.encode('utf-8')
if not isinstance(data, str): data = str(data)
if not self.chunk_mode: self.sock.sendall(data)
else: self.sock.sendall('%x\r\n%s\r\n' %(len(data), data))
def get_body(self): return ''.join(self.content)
def recv_body(self, hasbody = False):
''' 进行body接收过程,数据会写入本对象的append_body函数中 '''
if self.body_recved: return
if self.get_header('transfer-encoding', 'identity') != 'identity':
chunk_size = 1
while chunk_size != 0:
chunk = self.sock.recv_until('\r\n').split(';')
chunk_size = int(chunk[0], 16)
self.append_body(self.sock.recv_length(chunk_size + 2)[:-2])
elif 'content-length' in self.header:
length = int(self.get_header('content-length'))
for data in self.sock.datas():
self.append_body(data)
length -= len(data)
if length <= 0: break
elif hasbody:
try:
for d in self.sock.datas(): self.append_body(d)
except (EOFError, socket.error): pass
self.end_body()
HTTP_DATE_FMTS = ["%a %d %b %Y %H:%M:%S"]
def get_http_date(date_str):
for fmt in HTTP_DATE_FMTS:
try: return datetime.strptime(date_str, fmt)
except ValueError: pass
def make_http_date(date_obj):
return date_obj.strftime(HTTP_DATE_FMTS[0])
def get_params_dict(data, sp = '&'):
''' 将请求参数切分成词典 '''
if not data: return {}
rslt = {}
for p in data.split(sp):
i = p.partition('=')
rslt[i[0]] = urllib.unquote(i[2])
return rslt
| Python |
#!/usr/bin/python
import sys
import os
import os.path
import ConfigParser
import csv
import httplib
cwd = os.path.abspath((os.path.dirname(sys.argv[0])))
config_file = os.path.join(cwd, 'fetch_samples.conf')
#print 'config file: %s' % config_file
config = ConfigParser.RawConfigParser()
config.read(config_file)
samples_dir = os.path.abspath(os.path.join(cwd, config.get('dirs', 'samples_dir')))
if not os.access(samples_dir, os.W_OK):
os.mkdirs(samples_dir)
lang_file = os.path.abspath(os.path.join(cwd, config.get('languages', 'lang_file')))
lang_reader = csv.reader(open(lang_file), delimiter='|')
for row in lang_reader:
lang = row[2]
if not lang:
continue
sample_path = '/ig/api?weather=Omsk&hl=%s' % lang
connection = httplib.HTTPConnection('www.google.com')
connection.request('GET', sample_path)
response = connection.getresponse()
encoding = response.getheader('Content-Type', 'text/xml; charset=utf8').split(';')[-1].split('=')[-1].strip()
sample_file = os.path.join(samples_dir, ('%s.%s.xml' % (lang, encoding)))
print '%s -> %s' % (sample_path, sample_file)
file = open(sample_file, 'w')
file.write(response.read())
file.close()
connection.close()
| Python |
#!/usr/bin/python
import sys
import os
import os.path
import ConfigParser
from xml.etree import ElementTree
cwd = os.path.abspath((os.path.dirname(sys.argv[0])))
config_file = os.path.join(cwd, 'analyze_samples.conf')
#print 'config file: %s' % config_file
config = ConfigParser.RawConfigParser()
config.read(config_file)
samples_dir = os.path.abspath(os.path.join(cwd, config.get('dirs', 'samples_dir')))
for file in os.listdir(samples_dir):
(lang, encoding) = file.split('.')[0:2]
#print lang
#content = unicode(open(os.path.abspath(os.path.join(samples_dir, file))).read(), encoding)
#print content
try:
xml = ElementTree.parse(os.path.abspath(os.path.join(samples_dir, file)),
ElementTree.XMLParser(encoding=encoding))
except Exception as e:
print file, e
continue
unit = next(xml.iter('unit_system')).get('data')
wind = next(xml.iter('wind_condition')).get('data')
humidity = next(xml.iter('humidity')).get('data')
print ('%s\t%s\t%s\t%s' % (lang, unit, wind, humidity)).encode('utf-8')
| Python |
import pygame
from sys import exit
from pygame.locals import *
import os
import random
os.environ["SDL_VIDEO_CENTERED"] = "1"
pygame.init()
tela = pygame.display.set_mode((600, 480), 0, 32)
pygame.init()
pygame.display.set_caption("FittingBricks")
pygame.mouse.set_visible(False)
#funcao que faz os marcadores surgirem aleatoriamente.
def bloco_aleatorio():
aleatorio = random.randint(1,7)
blocos = { 1:"bloco1.png", 2:"bloco2.png", 3:"bloco3.png",4:"bloco4.png", 5:"bloco5.png", 6:"bloco6.png",7:"bloco7.png" }
bloco_da_vez = blocos.get(aleatorio)
return bloco_da_vez
#imagens
fundo1 = pygame.image.load("./Imagens/tela1.1.jpg")
fundo2 = pygame.image.load("./Imagens/tela2.jpg")
fundo3 = pygame.image.load("./Imagens/tela3.jpg")
fundo4 = pygame.image.load("./Imagens/tela4.jpg")
peca = pygame.image.load("Imagens" + os.sep + bloco_aleatorio())
#musica do jogo
pygame.mixer.init
pygame.mixer.music.load('./sons/Tetris1.mp3')
pygame.mixer.music.play(-1)
toca = True
#Funcao do menu sobre
def menu_sobre():
sobre_on = True
#tela.blit(fundo4,(0,0))
pygame.display.update()
while sobre_on == True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
sobre_on = False
elif event.type == KEYDOWN:
if event.key == K_m:
if toca:
pygame.mixer.music.pause()
toca = False
else:
pygame.mixer.music.unpause()
toca = True
elif event.key == K_RETURN:
pygame.display.set_mode((800, 480), 0 , 32)
tela.blit(fundo4, (0,0))
pygame.display.update()
#Funcao do menu das instrucoes
def menu_instrucao():
instr_on = True
tela.blit(fundo2, (0,0))
pygame.display.update()
while instr_on == True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
instr_on = False
elif event.type == KEYDOWN:
if event.key == K_m:
if toca:
pygame.mixer.music.pause()
toca = False
else:
pygame.mixer.music.unpause()
toca = True
elif event.key == K_RETURN:
pygame.display.set_mode((800, 480), 0 , 32)
tela.blit(fundo2, (0,0))
pygame.display.update()
#Funcao do menu de creditos
def menu_creditos():
creditos_on = True
tela.blit(fundo3, (0,0))
pygame.display.update()
while creditos_on == True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
creditos_on = False
elif event.type == KEYDOWN:
if event.key == K_m:
if toca:
pygame.mixer.music.pause()
toca = False
else:
pygame.mixer.music.unpause()
toca = True
elif event.key == K_RETURN:
pygame.display.set_mode((800, 480), 0 , 32)
tela.blit(fundo3, (0,0))
pygame.display.update()
#Funcao do menu principal\inicial
def menu_principal():
global toca
opcao = 1
opcoes = {1:(143, 120), 2:(143, 188), 3:(143, 256),4:(143, 327),5:(143, 395)}
menu_aberto = True
tela.blit(fundo1, (0,0))
peca = pygame.image.load("Imagens" + os.sep + bloco_aleatorio())
tela.blit(peca, (opcoes[opcao]))
pygame.display.update()
while menu_aberto == True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_m:
if toca:
pygame.mixer.music.pause()
toca = False
else:
pygame.mixer.music.unpause()
toca = True
if event.key == K_DOWN:
peca = pygame.image.load("Imagens" + os.sep + bloco_aleatorio())
opcao += 1
if opcao > 5:
opcao = 1
elif event.key == K_UP:
opcao -= 1
if opcao < 1:
opcao = 4
elif event.key == K_RETURN:
if opcao == 1:
import tetrix
tetrix.main()
menu_aberto = False
elif opcao == 2:
menu_instrucao()
elif opcao == 3:
menu_creditos()
elif opcao == 4:
menu_sobre()
elif opcao == 5:
exit()
elif event.key == K_ESCAPE:
exit()
if menu_aberto == True:
pygame.display.update()
tela.blit(fundo1, (0,0))
tela.blit(peca, (opcoes[opcao]))
menu_principal()
| Python |
import pygame
pygame.init()
pygame.key.set_repeat(640,480)
import random
import os
def main():
txtcolor = (250,250,250)
tela = pygame.display.set_mode((640,480))
tela.fill((255,255,255))
erase = pygame.Rect(0,0,120,85)
mafont0 = pygame.font.SysFont('calibri',15) ; mafont1 = pygame.font.SysFont('calibri',56) ; mafont2 = pygame.font.SysFont('calibri',20); mafont3 = pygame.font.SysFont('calibri',20)
tela.blit(pygame.image.load('./Imagens/fundo2.jpg'),(0,0))
mask = pygame.image.load('./Imagens/mask.png')
matriz = (((0,0,0,15),(4,4,4,4),(0,0,0,15),(2,2,2,2)),
((0,0,6,6),(0,0,6,6),(0,0,6,6),(0,0,6,6)),
((0,0,2,14),(0,4,4,6),(0,0,7,4),(0,6,2,2)),
((0,0,7,1),(0,2,2,6),(0,0,4,7),(0,6,4,4)),
((0,0,6,12),(0,4,6,2),(0,0,3,6),(0,4,6,2)),
((0,0,12,6),(0,2,6,4),(0,0,6,3),(0,2,6,4)),
((0,0,2,7),(0,4,6,4),(0,0,7,2),(0,2,6,2)))
class Objet:
def __init__(self,matriz,index,couleur):
self.x = 4 ; self.y = 0
self.rect = pygame.Rect(80,0,80,80)
self.objet = matriz
self.index = index
self.couleur = couleur
self.make_image()
pygame.time.set_timer(pygame.USEREVENT,speed)
def make_image(self):
surface = pygame.Surface((80,80)).convert_alpha()
surface.fill((0,0,0,0))
for i,x in enumerate(self.objet[self.index]):
if x:
for j,y in enumerate(bin(x)[2:].zfill(4)):
if y=='1':
surface.fill(self.couleur,(j*20,i*20,20,20))
surface.blit(mask,(j*20,i*20))
self.image = surface
def draw(self):
erase.topleft = (self.rect.x-20,self.rect.y-5)
tela.blit(bg,erase.topleft,erase)
tela.blit(self.image,self.rect.topleft)
tela.blit(bg,(20,0),(20,0,200,80))
pygame.display.update((erase,(20,0,200,80)))
def update(self):
def shift(direction):
ref = [int(i*2**(8-self.x-direction)) for i in self.objet[self.index]]
if not any(a&b for a,b in zip(ref,grid[self.y:])):
self.x += direction ; self.rect.x += direction*20 ; self.draw()
def down():
if self.rect.y%20==0:
ref = [int(i*2**(8-self.x)) for i in self.objet[self.index]]
if not any(a&b for a,b in zip(ref,grid[self.y+1:])):
self.y += 1 ; self.rect.y += 2 ; self.draw()
return 1
else: return 0
else :
self.rect.y += 2
self.draw()
return 1
def rotate():
ref = [int(i*2**(8-self.x)) for i in self.objet[(self.index+1)%4]]
if not any(a&b for a,b in zip(ref,grid[self.y:])):
self.index = (self.index+1)%4
self.make_image()
self.draw()
while True:
e = pygame.event.wait().type
if e == pygame.USEREVENT:
if not down() : break
elif e == pygame.KEYDOWN:
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT]:shift(-1)
elif keys[pygame.K_RIGHT]:shift(1)
elif keys[pygame.K_UP]:rotate()
elif keys[pygame.K_ESCAPE]:import menu
elif keys[pygame.K_p]:
tela.blit(mafont3.render('PAUSE',1,txtcolor),(30,200))
tela.blit(mafont3.render('Pressione uma tecla para iniciar',1,txtcolor),(30,225))
pygame.display.flip()
while pygame.event.wait().type != pygame.KEYDOWN : pass
tela.blit(bg,(0,0))
tela.blit(self.image,self.rect.topleft)
tela.blit(bg,(20,0),(20,0,200,80))
pygame.display.flip()
elif keys[pygame.K_DOWN]:
self.rect.y = int(self.rect.y)
while down() and pygame.event.poll().type != pygame.KEYUP: pygame.time.wait(2)
elif e == pygame.QUIT:
pygame.event.post(pygame.event.Event(pygame.QUIT))
pygame.event.post(pygame.event.Event(pygame.QUIT))
break
def grid_update(obj):
global bg,speed,score,speedx
sc = 0
ref = [int(i*2**(8-obj.x)) for i in obj.objet[obj.index]]
for a,i in enumerate(ref):
grid[obj.y+a]=grid[obj.y+a]|i
if grid[obj.y+a]==4095:
sc += 100
del(grid[obj.y+a])
grid.insert(0,2049)
tela.blit(tela,(20,100),(20,80,200,(obj.y+a)*20-80))
score += sc ; speedx += sc*0.000225 ; speedx -= 0.1 ; speed = int(speedx)
if speed == 0:
tela.blit(mafont3.render('Velocidade Max',1,txtcolor),(30,150))
tela.blit(mafont2.render('bug nao corrigido ainda',1,txtcolor),(30,175))
tela.blit(mafont3.render('ENTER para recomecar o jogo',1,txtcolor),(27,230))
pygame.event.clear()
pygame.event.post(pygame.event.Event(pygame.QUIT))
tela.fill((255,255,255),(236,239,164,15))
tela.blit(mafont0.render(str(50-speed),1,(25,50,250)),(236,239))
tela.fill((255,255,255),(236,289,164,15))
tela.blit(mafont0.render(str(score),1,(25,50,250)),(236,289))
pygame.display.flip()
bg = tela.copy()
def play():
global bg,grid,speed,score,speedx
speedx = 50 ; speed = 50 ; score = 0
tela.fill((0,0,0),(20,80,200,400))
grid = [2049]*24+[4095]
tela.fill((255,255,255),(236,239,164,15)) ; tela.fill((255,255,255),(236,289,164,15)) ; tela.fill((0,0,0),(221,120,80,80))
tela.blit(mafont0.render(str(1),1,(25,50,250)),(236,239)) ; tela.blit(mafont0.render(str(0),1,(25,50,250)),(236,289))
next = Objet(matriz[random.randint(0,5)],random.randint(0,3),(random.randint(100,255),random.randint(0,255),random.randint(0,255)))
tela.blit(next.image,(221,120))
tela.blit(mafont3.render('Pressione uma tecla para iniciar',1,txtcolor),(30,200))
pygame.display.flip()
tela.fill((0,0,0),(20,80,200,400))
while True:
if pygame.event.wait().type == pygame.QUIT:return
elif pygame.event.wait().type == pygame.KEYDOWN:break
while pygame.event.wait().type != pygame.QUIT:
truc = next
next = Objet(matriz[random.randint(0,6)],random.randint(0,3),(random.randint(200,255),random.randint(100,255),random.randint(0,255)))
tela.fill((0,0,0),(221,120,80,80))
tela.blit(next.image,(221,120))
pygame.display.flip()
bg = tela.copy()
truc.update()
if truc.y==0 :
tela.blit(mafont3.render('PERDEU !',1,txtcolor),(30,200))
tela.blit(mafont3.render('ENTER para recomecar o jogo',1,txtcolor),(27,230))
pygame.display.flip()
break
grid_update(truc)
play()
while pygame.event.wait().type != pygame.QUIT:
if pygame.key.get_pressed()[pygame.K_RETURN]: play()
if pygame.key.get_pressed()[pygame.K_ESCAPE]: return
if __name__ == "__main__": main()
| Python |
import pygame
from sys import exit
from pygame.locals import *
import os
import random
os.environ["SDL_VIDEO_CENTERED"] = "1"
pygame.init()
tela = pygame.display.set_mode((640, 480), 0, 32)
pygame.init()
pygame.display.set_caption("FittingBricks")
pygame.mouse.set_visible(False)
#funcao que faz os marcadores surgirem aleatoriamente.
def bloco_aleatorio():
aleatorio = random.randint(1,7)
blocos = { 1:"bloco1.png", 2:"bloco2.png", 3:"bloco3.png",4:"bloco4.png", 5:"bloco5.png", 6:"bloco6.png",7:"bloco7.png" }
bloco_da_vez = blocos.get(aleatorio)
return bloco_da_vez
#imagens
fundo1 = pygame.image.load("./Imagens/tela1.1.jpg")
fundo2 = pygame.image.load("./Imagens/tela2.jpg")
fundo3 = pygame.image.load("./Imagens/tela3.jpg")
fundo4 = pygame.image.load("./Imagens/tela4.jpg")
peca = pygame.image.load("Imagens" + os.sep + bloco_aleatorio())
#musica do jogo
pygame.mixer.init
pygame.mixer.music.load('./sons/Tetris1.mp3')
pygame.mixer.music.play(-1)
toca = True
#Funcao do menu sobre
def menu_sobre():
sobre_on = True
#tela.blit(fundo4,(0,0))
pygame.display.update()
while sobre_on == True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
sobre_on = False
elif event.type == KEYDOWN:
if event.key == K_m:
if toca:
pygame.mixer.music.pause()
toca = False
else:
pygame.mixer.music.unpause()
toca = True
elif event.key == K_RETURN:
pygame.display.set_mode((800, 480), 0 , 32)
tela.blit(fundo4, (0,0))
pygame.display.update()
#Funcao do menu das instrucoes
def menu_instrucao():
instr_on = True
tela.blit(fundo2, (0,0))
pygame.display.update()
while instr_on == True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
instr_on = False
elif event.type == KEYDOWN:
if event.key == K_m:
if toca:
pygame.mixer.music.pause()
toca = False
else:
pygame.mixer.music.unpause()
toca = True
elif event.key == K_RETURN:
pygame.display.set_mode((800, 480), 0 , 32)
tela.blit(fundo2, (0,0))
pygame.display.update()
#Funcao do menu de creditos
def menu_creditos():
creditos_on = True
tela.blit(fundo3, (0,0))
pygame.display.update()
while creditos_on == True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_ESCAPE:
creditos_on = False
elif event.type == KEYDOWN:
if event.key == K_m:
if toca:
pygame.mixer.music.pause()
toca = False
else:
pygame.mixer.music.unpause()
toca = True
elif event.key == K_RETURN:
pygame.display.set_mode((800, 480), 0 , 32)
tela.blit(fundo3, (0,0))
pygame.display.update()
#Funcao do menu principal\inicial
def menu_principal():
global toca
opcao = 1
opcoes = {1:(143, 120), 2:(143, 188), 3:(143, 256),4:(143, 327),5:(143, 395)}
menu_aberto = True
tela.blit(fundo1, (0,0))
peca = pygame.image.load("Imagens" + os.sep + bloco_aleatorio())
tela.blit(peca, (opcoes[opcao]))
pygame.display.update()
while menu_aberto == True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_m:
if toca:
pygame.mixer.music.pause()
toca = False
else:
pygame.mixer.music.unpause()
toca = True
if event.key == K_DOWN:
peca = pygame.image.load("Imagens" + os.sep + bloco_aleatorio())
opcao += 1
if opcao > 5:
opcao = 1
elif event.key == K_UP:
opcao -= 1
if opcao < 1:
opcao = 4
elif event.key == K_RETURN:
if opcao == 1:
import tetrix
tetrix.main()
menu_aberto = False
elif opcao == 2:
menu_instrucao()
elif opcao == 3:
menu_creditos()
elif opcao == 4:
menu_sobre()
elif opcao == 5:
exit()
elif event.key == K_ESCAPE:
exit()
if menu_aberto == True:
pygame.display.update()
tela.blit(fundo1, (0,0))
tela.blit(peca, (opcoes[opcao]))
menu_principal()
| Python |
import pygame
pygame.init()
pygame.key.set_repeat(640,480)
import random
import os
from sys import exit
#Codigo copiado e Editado
def main():
txtcolor = (250,250,250)
tela = pygame.display.set_mode((670,510))
tela.fill((255,255,255))
erase = pygame.Rect(0,0,120,85)
mafont0 = pygame.font.SysFont('calibri',15) ; mafont1 = pygame.font.SysFont('calibri',56) ; mafont2 = pygame.font.SysFont('calibri',14); mafont3 = pygame.font.SysFont('calibri',14)
tela.blit(pygame.image.load('./Imagens/fundo2.jpg'),(0,0))
mask = pygame.image.load('./Imagens/mask.png')
matriz = (((0,0,0,15),(4,4,4,4),(0,0,0,15),(2,2,2,2)),
((0,0,6,6),(0,0,6,6),(0,0,6,6),(0,0,6,6)),
((0,0,2,14),(0,4,4,6),(0,0,7,4),(0,6,2,2)),
((0,0,7,1),(0,2,2,6),(0,0,4,7),(0,6,4,4)),
((0,0,6,12),(0,4,6,2),(0,0,3,6),(0,4,6,2)),
((0,0,12,6),(0,2,6,4),(0,0,6,3),(0,2,6,4)),
((0,0,2,7),(0,4,6,4),(0,0,7,2),(0,2,6,2)))
class Objet:
def __init__(self,matriz,index,couleur):
self.x = 4 ; self.y = 0
self.rect = pygame.Rect(80,0,80,80)
self.objet = matriz
self.index = index
self.couleur = couleur
self.make_image()
pygame.time.set_timer(pygame.USEREVENT,speed)
def make_image(self):
surface = pygame.Surface((80,80)).convert_alpha()
surface.fill((0,0,0,0))
for i,x in enumerate(self.objet[self.index]):
if x:
for j,y in enumerate(bin(x)[2:].zfill(4)):
if y=='1':
surface.fill(self.couleur,(j*20,i*20,20,20))
surface.blit(mask,(j*20,i*20))
self.image = surface
def draw(self):
erase.topleft = (self.rect.x-20,self.rect.y-5)
tela.blit(bg,erase.topleft,erase)
tela.blit(self.image,self.rect.topleft)
tela.blit(bg,(20,0),(20,0,200,80))
pygame.display.update((erase,(20,0,200,80)))
def update(self):
def shift(direction):
ref = [int(i*2**(8-self.x-direction)) for i in self.objet[self.index]]
if not any(a&b for a,b in zip(ref,grid[self.y:])):
self.x += direction ; self.rect.x += direction*20 ; self.draw()
def down():
if self.rect.y%20==0:
ref = [int(i*2**(8-self.x)) for i in self.objet[self.index]]
if not any(a&b for a,b in zip(ref,grid[self.y+1:])):
self.y += 1 ; self.rect.y += 2 ; self.draw()
return 1
else: return 0
else :
self.rect.y += 2
self.draw()
return 1
def rotate():
ref = [int(i*2**(8-self.x)) for i in self.objet[(self.index+1)%4]]
if not any(a&b for a,b in zip(ref,grid[self.y:])):
self.index = (self.index+1)%4
self.make_image()
self.draw()
while True:
e = pygame.event.wait().type
if e == pygame.USEREVENT:
if not down() : break
elif e == pygame.KEYDOWN:
keys = pygame.key.get_pressed()
if keys[pygame.K_LEFT]:shift(-1)
elif keys[pygame.K_RIGHT]:shift(1)
elif keys[pygame.K_UP]:rotate()
elif keys[pygame.K_ESCAPE]:import menu
elif keys[pygame.K_p]:
tela.blit(mafont3.render('PAUSE',1,txtcolor),(30,200))
tela.blit(mafont3.render('Pressione uma tecla para iniciar',1,txtcolor),(30,225))
pygame.display.flip()
while pygame.event.wait().type != pygame.KEYDOWN : pass
tela.blit(bg,(0,0))
tela.blit(self.image,self.rect.topleft)
tela.blit(bg,(20,0),(20,0,200,80))
pygame.display.flip()
elif keys[pygame.K_DOWN]:
self.rect.y = int(self.rect.y)
while down() and pygame.event.poll().type != pygame.KEYUP: pygame.time.wait(2)
elif e == pygame.QUIT:
pygame.event.post(pygame.event.Event(pygame.QUIT))
pygame.event.post(pygame.event.Event(pygame.QUIT))
break
def grid_update(obj):
global bg,speed,score,speedx
sc = 0
ref = [int(i*2**(8-obj.x)) for i in obj.objet[obj.index]]
for a,i in enumerate(ref):
grid[obj.y+a]=grid[obj.y+a]|i
if grid[obj.y+a]==4095:
sc += 100
del(grid[obj.y+a])
grid.insert(0,2049)
tela.blit(tela,(20,100),(20,80,200,(obj.y+a)*20-80))
score += sc ; speedx += sc*0.000225 ; speedx -= 0.1 ; speed = int(speedx)
if speed == 0:
tela.blit(mafont3.render('Velocidade Max',1,txtcolor),(30,150))
tela.blit(mafont2.render('bug nao corrigido ainda',1,txtcolor),(30,175))
tela.blit(mafont3.render('ENTER para recomecar o jogo',1,txtcolor),(27,230))
pygame.event.clear()
pygame.event.post(pygame.event.Event(pygame.QUIT))
tela.fill((255,255,255),(236,239,164,15))
tela.blit(mafont0.render(str(50-speed),1,(25,50,250)),(236,239))
tela.fill((255,255,255),(236,289,164,15))
tela.blit(mafont0.render(str(score),1,(25,50,250)),(236,289))
pygame.display.flip()
bg = tela.copy()
def play():
global bg,grid,speed,score,speedx
speedx = 50 ; speed = 50 ; score = 0
tela.fill((0,0,0),(20,80,200,400))
grid = [2049]*24+[4095]
tela.fill((255,255,255),(236,239,164,15)) ; tela.fill((255,255,255),(236,289,164,15)) ; tela.fill((0,0,0),(221,120,80,80))
tela.blit(mafont0.render(str(1),1,(25,50,250)),(236,239)) ; tela.blit(mafont0.render(str(0),1,(25,50,250)),(236,289))
next = Objet(matriz[random.randint(0,5)],random.randint(0,3),(random.randint(100,255),random.randint(0,255),random.randint(0,255)))
tela.blit(next.image,(221,120))
tela.blit(mafont3.render('Pressione uma tecla para iniciar',1,txtcolor),(30,200))
pygame.display.flip()
tela.fill((0,0,0),(20,80,200,400))
while True:
if pygame.event.wait().type == pygame.QUIT:exit()
elif pygame.event.wait().type == pygame.KEYDOWN:break
while pygame.event.wait().type != pygame.QUIT:
truc = next
next = Objet(matriz[random.randint(0,6)],random.randint(0,3),(random.randint(200,255),random.randint(100,255),random.randint(0,255)))
tela.fill((0,0,0),(221,120,80,80))
tela.blit(next.image,(221,120))
pygame.display.flip()
bg = tela.copy()
truc.update()
if truc.y==0 :
tela.blit(mafont3.render('PERDEU !',1,txtcolor),(30,200))
tela.blit(mafont3.render('ENTER para recomecar o jogo',1,txtcolor),(27,230))
pygame.display.flip()
break
grid_update(truc)
play()
while pygame.event.wait().type != pygame.QUIT:
if pygame.key.get_pressed()[pygame.K_RETURN]: play()
if pygame.key.get_pressed()[pygame.K_ESCAPE]: return
if __name__ == "__main__": main()
| Python |
import pygame,os
from pygame.locals import *
from sys import exit
pygame.init()
tela = pygame.display.set_mode((800,600))
tela.fill((255,255,255))
pygame.display.set_caption('FittingBricks')
icon = pygame.image.load("imagens" + os.sep + "icone.png").convert_alpha()
pygame.display.set_icon(icon)
pygame.mouse.set_visible(False)
bloco_varia_y = 0.09
bloco_imagem = pygame.image.load("imagens" + os.sep + 'bloco6.png').convert_alpha()
bloco_width,bloco_height = bloco_imagem.get_size()
bloco_x,bloco_y = ((800/2 - bloco_width/2),bloco_height)
while True:
for event in pygame.event.get():
if event.type == QUIT:
exit()
elif event.type == KEYDOWN:
if event.key == K_UP:
bloco_imagem = pygame.transform.rotate(bloco_imagem,90)
bloco_width,bloco_height = bloco_imagem.get_size()
if event.key == K_RIGHT:
bloco_x += 20
if event.key == K_LEFT:
bloco_x -= 20
if event.key == K_DOWN:
bloco_y += 15
if event.key == K_ESCAPE:
exit()
elif event.type == KEYUP:
pass
if bloco_y + bloco_height >= 600:
bloco_y = 600
elif bloco_y < 600:
tela.fill((255,255,255))
bloco_y += bloco_varia_y
tela.blit(bloco_imagem,(bloco_x,bloco_y))
pygame.display.update()
pass
| Python |
#!/usr/bin/python
#
# Copyright (C) 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'afshar@google.com (Ali Afshar)'
# Add the library location to the path
import sys
sys.path.insert(0, 'lib')
import os
import httplib2
import sessions
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from apiclient.discovery import build
from apiclient.http import MediaUpload
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
from oauth2client.client import AccessTokenRefreshError
from oauth2client.appengine import CredentialsProperty
from oauth2client.appengine import StorageByKeyName
from oauth2client.appengine import simplejson as json
ALL_SCOPES = ('https://www.googleapis.com/auth/drive.file '
'https://www.googleapis.com/auth/userinfo.email '
'https://www.googleapis.com/auth/userinfo.profile')
def SibPath(name):
"""Generate a path that is a sibling of this file.
Args:
name: Name of sibling file.
Returns:
Path to sibling file.
"""
return os.path.join(os.path.dirname(__file__), name)
# Load the secret that is used for client side sessions
# Create one of these for yourself with, for example:
# python -c "import os; print os.urandom(64)" > session-secret
SESSION_SECRET = open(SibPath('session.secret')).read()
class Credentials(db.Model):
"""Datastore entity for storing OAuth2.0 credentials.
The CredentialsProperty is provided by the Google API Python Client, and is
used by the Storage classes to store OAuth 2.0 credentials in the data store."""
credentials = CredentialsProperty()
def CreateService(service, version, creds):
"""Create a Google API service.
Load an API service from a discovery document and authorize it with the
provided credentials.
Args:
service: Service name (e.g 'drive', 'oauth2').
version: Service version (e.g 'v1').
creds: Credentials used to authorize service.
Returns:
Authorized Google API service.
"""
# Instantiate an Http instance
http = httplib2.Http()
# Authorize the Http instance with the passed credentials
creds.authorize(http)
# Build a service from the passed discovery document path
return build(service, version, http=http)
class DriveState(object):
"""Store state provided by Drive."""
def __init__(self, state):
"""Create a new instance of drive state.
Parse and load the JSON state parameter.
Args:
state: State query parameter as a string.
"""
if state:
state_data = json.loads(state)
self.action = state_data['action']
self.ids = map(str, state_data.get('ids', []))
else:
self.action = 'create'
self.ids = []
@classmethod
def FromRequest(cls, request):
"""Create a Drive State instance from an HTTP request.
Args:
cls: Type this class method is called against.
request: HTTP request.
"""
return DriveState(request.get('state'))
class BaseDriveHandler(webapp.RequestHandler):
"""Base request handler for drive applications.
Adds Authorization support for Drive.
"""
def CreateOAuthFlow(self):
"""Create OAuth2.0 flow controller
This controller can be used to perform all parts of the OAuth 2.0 dance
including exchanging an Authorization code.
Args:
request: HTTP request to create OAuth2.0 flow for
Returns:
OAuth2.0 Flow instance suitable for performing OAuth2.0.
"""
flow = flow_from_clientsecrets('client_secrets.json', scope='')
# Dynamically set the redirect_uri based on the request URL. This is extremely
# convenient for debugging to an alternative host without manually setting the
# redirect URI.
flow.redirect_uri = self.request.url.split('?', 1)[0].rsplit('/', 1)[0]
return flow
def GetCodeCredentials(self):
"""Create OAuth 2.0 credentials by extracting a code and performing OAuth2.0.
The authorization code is extracted form the URI parameters. If it is absent,
None is returned immediately. Otherwise, if it is present, it is used to
perform step 2 of the OAuth 2.0 web server flow.
Once a token is received, the user information is fetched from the userinfo
service and stored in the session. The token is saved in the datastore against
the user ID received from the userinfo service.
Args:
request: HTTP request used for extracting an authorization code and the
session information.
Returns:
OAuth2.0 credentials suitable for authorizing clients or None if
Authorization could not take place.
"""
# Other frameworks use different API to get a query parameter.
code = self.request.get('code')
if not code:
# returns None to indicate that no code was passed from Google Drive.
return None
# Auth flow is a controller that is loaded with the client information,
# including client_id, client_secret, redirect_uri etc
oauth_flow = self.CreateOAuthFlow()
# Perform the exchange of the code. If there is a failure with exchanging
# the code, return None.
try:
creds = oauth_flow.step2_exchange(code)
except FlowExchangeError:
return None
# Create an API service that can use the userinfo API. Authorize it with our
# credentials that we gained from the code exchange.
users_service = CreateService('oauth2', 'v2', creds)
# Make a call against the userinfo service to retrieve the user's information.
# In this case we are interested in the user's "id" field.
userid = users_service.userinfo().get().execute().get('id')
# Store the user id in the user's cookie-based session.
session = sessions.LilCookies(self, SESSION_SECRET)
session.set_secure_cookie(name='userid', value=userid)
# Store the credentials in the data store using the userid as the key.
StorageByKeyName(Credentials, userid, 'credentials').put(creds)
return creds
def GetSessionCredentials(self):
"""Get OAuth 2.0 credentials for an HTTP session.
If the user has a user id stored in their cookie session, extract that value
and use it to load that user's credentials from the data store.
Args:
request: HTTP request to use session from.
Returns:
OAuth2.0 credentials suitable for authorizing clients.
"""
# Try to load the user id from the session
session = sessions.LilCookies(self, SESSION_SECRET)
userid = session.get_secure_cookie(name='userid')
if not userid:
# return None to indicate that no credentials could be loaded from the
# session.
return None
# Load the credentials from the data store, using the userid as a key.
creds = StorageByKeyName(Credentials, userid, 'credentials').get()
# if the credentials are invalid, return None to indicate that the credentials
# cannot be used.
if creds and creds.invalid:
return None
return creds
def RedirectAuth(self):
"""Redirect a handler to an authorization page.
Used when a handler fails to fetch credentials suitable for making Drive API
requests. The request is redirected to an OAuth 2.0 authorization approval
page and on approval, are returned to application.
Args:
handler: webapp.RequestHandler to redirect.
"""
flow = self.CreateOAuthFlow()
# Manually add the required scopes. Since this redirect does not originate
# from the Google Drive UI, which authomatically sets the scopes that are
# listed in the API Console.
flow.scope = ALL_SCOPES
# Create the redirect URI by performing step 1 of the OAuth 2.0 web server
# flow.
uri = flow.step1_get_authorize_url(flow.redirect_uri)
# Perform the redirect.
self.redirect(uri)
class MainPage(BaseDriveHandler):
"""Web handler for the main page.
Handles requests and returns the user interface for Open With and Create
cases. Responsible for parsing the state provided from the Drive UI and acting
appropriately.
"""
def get(self):
"""Handle GET for Create New and Open With.
This creates an authorized client, and checks whether a resource id has
been passed or not. If a resource ID has been passed, this is the Open
With use-case, otherwise it is the Create New use-case.
"""
# Fetch the credentials by extracting an OAuth 2.0 authorization code from
# the request URL. If the code is not present, redirect to the OAuth 2.0
# authorization URL.
creds = self.GetCodeCredentials()
if not creds:
return self.RedirectAuth()
# Extract the numerical portion of the client_id from the stored value in
# the OAuth flow. You could also store this value as a separate variable
# somewhere.
client_id = self.CreateOAuthFlow().client_id.split('.')[0].split('-')[0]
# Generate a state instance for the request, this includes the action, and
# the file id(s) that have been sent from the Drive user interface.
drive_state = DriveState.FromRequest(self.request)
if drive_state.action == 'open':
file_ids = [str(i) for i in drive_state.ids]
else:
file_ids = ['']
self.RenderTemplate(file_ids=file_ids, client_id=client_id)
def RenderTemplate(self, **context):
"""Render a named template in a context.
Args:
name: Template name.
context: Keyword arguments to render as template variables.
"""
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render('index.html', context))
class ServiceHandler(BaseDriveHandler):
"""Web handler for the service to read and write to Drive."""
def post(self):
"""Called when HTTP POST requests are received by the web application.
The POST body is JSON which is deserialized and used as values to create a
new file in Drive. The authorization access token for this action is
retreived from the data store.
"""
# Create a Drive service
service = self.CreateDrive()
if service is None:
return
# Load the data that has been posted as JSON
data = self.RequestJSON()
# Create a new file data structure.
resource = {
'title': data['title'],
'description': data['description'],
'mimeType': data['mimeType'],
}
try:
# Make an insert request to create a new file. A MediaInMemoryUpload
# instance is used to upload the file body.
resource = service.files().insert(
body=resource,
media_body=MediaInMemoryUpload(data.get('content', ''),
data['mimeType']),
).execute()
# Respond with the new file id as JSON.
self.RespondJSON(resource['id'])
except AccessTokenRefreshError:
# In cases where the access token has expired and cannot be refreshed
# (e.g. manual token revoking) redirect the user to the authorization page
# to authorize.
self.RedirectAuth()
def get(self):
"""Called when HTTP GET requests are received by the web application.
Use the query parameter file_id to fetch the required file's metadata then
content and return it as a JSON object.
Since DrEdit deals with text files, it is safe to dump the content directly
into JSON, but this is not the case with binary files, where something like
Base64 encoding is more appropriate.
"""
# Create a Drive service
service = self.CreateDrive()
if service is None:
return
try:
# Requests are expected to pass the file_id query parameter.
file_id = self.request.get('file_id')
if file_id:
# Fetch the file metadata by making the service.files().get method of
# the Drive API.
f = service.files().get(id=file_id).execute()
downloadUrl = f.get('downloadUrl')
# If a download URL is provided in the file metadata, use it to make an
# authorized request to fetch the file ontent. Set this content in the
# data to return as the 'content' field. If there is no downloadUrl,
# just set empty content.
if downloadUrl:
resp, f['content'] = service._http.request(downloadUrl)
else:
f['content'] = ''
else:
f = None
# Generate a JSON response with the file data and return to the client.
self.RespondJSON(f)
except AccessTokenRefreshError:
# Catch AccessTokenRefreshError which occurs when the API client library
# fails to refresh a token. This occurs, for example, when a refresh token
# is revoked. When this happens the user is redirected to the
# Authorization URL.
self.RedirectAuth()
def put(self):
"""Called when HTTP PUT requests are received by the web application.
The PUT body is JSON which is deserialized and used as values to update
a file in Drive. The authorization access token for this action is
retreived from the data store.
"""
# Create a Drive service
service = self.CreateDrive()
if service is None:
return
# Load the data that has been posted as JSON
data = self.RequestJSON()
try:
# Create a new file data structure.
resource = {
'title': data['title'] or 'Untitled Document',
'description': data['description'],
'mimeType': data['mimeType'],
}
# Make an update request to update the file. A MediaInMemoryUpload
# instance is used to upload the file body. Because of a limitation, this
# request must be made in two parts, the first to update the metadata, and
# the second to update the body.
resource = service.files().update(
id=data['resource_id'],
newRevision=False,
body=resource,
media_body=None,
).execute()
resource = service.files().update(
id=data['resource_id'],
newRevision=True,
body=None,
media_body=MediaInMemoryUpload(data.get('content', ''),
data['mimeType']),
).execute()
# Respond with the updated file id as JSON.
self.RespondJSON(resource['id'])
except AccessTokenRefreshError:
# In cases where the access token has expired and cannot be refreshed
# (e.g. manual token revoking) redirect the user to the authorization page
# to authorize.
self.RedirectAuth()
def CreateDrive(self):
"""Create a drive client instance.
The service can only ever retrieve the credentials from the session.
"""
# For the service, the session holds the credentials
creds = self.GetSessionCredentials()
if creds:
# If the session contains credentials, use them to create a Drive service
# instance.
return CreateService('drive', 'v1', creds)
else:
# If no credentials could be loaded from the session, redirect the user to
# the authorization page.
self.RedirectAuth()
def RedirectAuth(self):
"""Redirect a handler to an authorization page.
Used when a handler fails to fetch credentials suitable for making Drive API
requests. The request is redirected to an OAuth 2.0 authorization approval
page and on approval, are returned to application.
Args:
handler: webapp.RequestHandler to redirect.
"""
flow = self.CreateOAuthFlow()
# Manually add the required scopes. Since this redirect does not originate
# from the Google Drive UI, which authomatically sets the scopes that are
# listed in the API Console.
flow.scope = ALL_SCOPES
# Create the redirect URI by performing step 1 of the OAuth 2.0 web server
# flow.
uri = flow.step1_get_authorize_url(flow.redirect_uri)
# Perform the redirect.
self.RespondJSON({'redirect': uri})
def RespondJSON(self, data):
"""Generate a JSON response and return it to the client.
Args:
data: The data that will be converted to JSON to return.
"""
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(data))
def RequestJSON(self):
"""Load the request body as JSON.
Returns:
Request body loaded as JSON or None if there is no request body.
"""
if self.request.body:
return json.loads(self.request.body)
class MediaInMemoryUpload(MediaUpload):
"""MediaUpload for a chunk of bytes.
Construct a MediaFileUpload and pass as the media_body parameter of the
method. For example, if we had a service that allowed plain text:
"""
def __init__(self, body, mimetype='application/octet-stream',
chunksize=256*1024, resumable=False):
"""Create a new MediaBytesUpload.
Args:
body: string, Bytes of body content.
mimetype: string, Mime-type of the file or default of
'application/octet-stream'.
chunksize: int, File will be uploaded in chunks of this many bytes. Only
used if resumable=True.
resumable: bool, True if this is a resumable upload. False means upload
in a single request.
"""
self._body = body
self._mimetype = mimetype
self._resumable = resumable
self._chunksize = chunksize
def chunksize(self):
"""Chunk size for resumable uploads.
Returns:
Chunk size in bytes.
"""
return self._chunksize
def mimetype(self):
"""Mime type of the body.
Returns:
Mime type.
"""
return self._mimetype
def size(self):
"""Size of upload.
Returns:
Size of the body.
"""
return len(self._body)
def resumable(self):
"""Whether this upload is resumable.
Returns:
True if resumable upload or False.
"""
return self._resumable
def getbytes(self, begin, length):
"""Get bytes from the media.
Args:
begin: int, offset from beginning of file.
length: int, number of bytes to read, starting at begin.
Returns:
A string of bytes read. May be shorter than length if EOF was reached
first.
"""
return self._body[begin:begin + length]
# Create an WSGI application suitable for running on App Engine
application = webapp.WSGIApplication(
[('/', MainPage), ('/svc', ServiceHandler)],
# XXX Set to False in production.
debug=True
)
def main():
"""Main entry point for executing a request with this handler."""
run_wsgi_app(application)
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2012 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = 'afshar@google.com (Ali Afshar)'
import os
import httplib2
import sessions
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from apiclient.discovery import build_from_document
from apiclient.http import MediaUpload
from oauth2client import client
from oauth2client.appengine import CredentialsProperty
from oauth2client.appengine import StorageByKeyName
from oauth2client.appengine import simplejson as json
APIS_BASE = 'https://www.googleapis.com'
ALL_SCOPES = ('https://www.googleapis.com/auth/drive.file '
'https://www.googleapis.com/auth/userinfo.email '
'https://www.googleapis.com/auth/userinfo.profile')
CODE_PARAMETER = 'code'
STATE_PARAMETER = 'state'
SESSION_SECRET = open('session.secret').read()
DRIVE_DISCOVERY_DOC = open('drive.json').read()
USERS_DISCOVERY_DOC = open('users.json').read()
class Credentials(db.Model):
"""Datastore entity for storing OAuth2.0 credentials."""
credentials = CredentialsProperty()
def CreateOAuthFlow(request):
"""Create OAuth2.0 flow controller
Args:
request: HTTP request to create OAuth2.0 flow for
Returns:
OAuth2.0 Flow instance suitable for performing OAuth2.0.
"""
flow = client.flow_from_clientsecrets('client-debug.json', scope='')
flow.redirect_uri = request.url.split('?', 1)[0].rstrip('/')
return flow
def GetCodeCredentials(request):
"""Create OAuth2.0 credentials by extracting a code and performing OAuth2.0.
Args:
request: HTTP request used for extracting an authorization code.
Returns:
OAuth2.0 credentials suitable for authorizing clients.
"""
code = request.get(CODE_PARAMETER)
if code:
oauth_flow = CreateOAuthFlow(request)
creds = oauth_flow.step2_exchange(code)
users_service = CreateService(USERS_DISCOVERY_DOC, creds)
userid = users_service.userinfo().get().execute().get('id')
request.session.set_secure_cookie(name='userid', value=userid)
StorageByKeyName(Credentials, userid, 'credentials').put(creds)
return creds
def GetSessionCredentials(request):
"""Get OAuth2.0 credentials for an HTTP session.
Args:
request: HTTP request to use session from.
Returns:
OAuth2.0 credentials suitable for authorizing clients.
"""
userid = request.session.get_secure_cookie(name='userid')
if userid:
creds = StorageByKeyName(Credentials, userid, 'credentials').get()
if creds and not creds.invalid:
return creds
def CreateService(discovery_doc, creds):
"""Create a Google API service.
Args:
discovery_doc: Discovery doc used to configure service.
creds: Credentials used to authorize service.
Returns:
Authorized Google API service.
"""
http = httplib2.Http()
creds.authorize(http)
return build_from_document(discovery_doc, APIS_BASE, http=http)
def RedirectAuth(handler):
"""Redirect a handler to an authorization page.
Args:
handler: webapp.RequestHandler to redirect.
"""
flow = CreateOAuthFlow(handler.request)
flow.scope = ALL_SCOPES
uri = flow.step1_get_authorize_url(flow.redirect_uri)
handler.redirect(uri)
def CreateDrive(handler):
"""Create a fully authorized drive service for this handler.
Args:
handler: RequestHandler from which drive service is generated.
Returns:
Authorized drive service, generated from the handler request.
"""
request = handler.request
request.session = sessions.LilCookies(handler, SESSION_SECRET)
creds = GetCodeCredentials(request) or GetSessionCredentials(request)
if creds:
return CreateService(DRIVE_DISCOVERY_DOC, creds)
else:
RedirectAuth(handler)
def ServiceEnabled(view):
"""Decorator to inject an authorized service into an HTTP handler.
Args:
view: HTTP request handler method.
Returns:
Decorated handler which accepts the service as a parameter.
"""
def ServiceDecoratedView(handler, view=view):
service = CreateDrive(handler)
response_data = view(handler, service)
handler.response.headers['Content-Type'] = 'text/html'
handler.response.out.write(response_data)
return ServiceDecoratedView
def ServiceEnabledJson(view):
"""Decorator to inject an authorized service into a JSON HTTP handler.
Args:
view: HTTP request handler method.
Returns:
Decorated handler which accepts the service as a parameter.
"""
def ServiceDecoratedView(handler, view=view):
service = CreateDrive(handler)
if handler.request.body:
data = json.loads(handler.request.body)
else:
data = None
response_data = json.dumps(view(handler, service, data))
handler.response.headers['Content-Type'] = 'application/json'
handler.response.out.write(response_data)
return ServiceDecoratedView
class DriveState(object):
"""Store state provided by Drive."""
def __init__(self, state):
self.ParseState(state)
@classmethod
def FromRequest(cls, request):
"""Create a Drive State instance from an HTTP request.
Args:
cls: Type this class method is called against.
request: HTTP request.
"""
return DriveState(request.get(STATE_PARAMETER))
def ParseState(self, state):
"""Parse a state parameter and set internal values.
Args:
state: State parameter to parse.
"""
if state.startswith('{'):
self.ParseJsonState(state)
else:
self.ParsePlainState(state)
def ParseJsonState(self, state):
"""Parse a state parameter that is JSON.
Args:
state: State parameter to parse
"""
state_data = json.loads(state)
self.action = state_data['action']
self.ids = map(str, state_data.get('ids', []))
def ParsePlainState(self, state):
"""Parse a state parameter that is a plain resource id or missing.
Args:
state: State parameter to parse
"""
if state:
self.action = 'open'
self.ids = [state]
else:
self.action = 'create'
self.ids = []
class MediaInMemoryUpload(MediaUpload):
"""MediaUpload for a chunk of bytes.
Construct a MediaFileUpload and pass as the media_body parameter of the
method. For example, if we had a service that allowed plain text:
"""
def __init__(self, body, mimetype='application/octet-stream',
chunksize=256*1024, resumable=False):
"""Create a new MediaBytesUpload.
Args:
body: string, Bytes of body content.
mimetype: string, Mime-type of the file or default of
'application/octet-stream'.
chunksize: int, File will be uploaded in chunks of this many bytes. Only
used if resumable=True.
resumable: bool, True if this is a resumable upload. False means upload
in a single request.
"""
self._body = body
self._mimetype = mimetype
self._resumable = resumable
self._chunksize = chunksize
def chunksize(self):
"""Chunk size for resumable uploads.
Returns:
Chunk size in bytes.
"""
return self._chunksize
def mimetype(self):
"""Mime type of the body.
Returns:
Mime type.
"""
return self._mimetype
def size(self):
"""Size of upload.
Returns:
Size of the body.
"""
return len(self._body)
def resumable(self):
"""Whether this upload is resumable.
Returns:
True if resumable upload or False.
"""
return self._resumable
def getbytes(self, begin, length):
"""Get bytes from the media.
Args:
begin: int, offset from beginning of file.
length: int, number of bytes to read, starting at begin.
Returns:
A string of bytes read. May be shorter than length if EOF was reached
first.
"""
return self._body[begin:begin + length]
def RenderTemplate(name, **context):
"""Render a named template in a context.
Args:
name: Template name.
context: Keyword arguments to render as template variables.
"""
return template.render(name, context)
| Python |
"""
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import base64
import urllib
import time
import random
import urlparse
import hmac
import binascii
import httplib2
try:
from urlparse import parse_qs
parse_qs # placate pyflakes
except ImportError:
# fall back for Python 2.5
from cgi import parse_qs
try:
from hashlib import sha1
sha = sha1
except ImportError:
# hashlib was added in Python 2.5
import sha
import _version
__version__ = _version.__version__
OAUTH_VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
class Error(RuntimeError):
"""Generic exception class."""
def __init__(self, message='OAuth error occurred.'):
self._message = message
@property
def message(self):
"""A hack to get around the deprecation errors in 2.6."""
return self._message
def __str__(self):
return self._message
class MissingSignature(Error):
pass
def build_authenticate_header(realm=''):
"""Optional WWW-Authenticate header (401 error)"""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def build_xoauth_string(url, consumer, token=None):
"""Build an XOAUTH string for use in SMTP/IMPA authentication."""
request = Request.from_consumer_and_token(consumer, token,
"GET", url)
signing_method = SignatureMethod_HMAC_SHA1()
request.sign_request(signing_method, consumer, token)
params = []
for k, v in sorted(request.iteritems()):
if v is not None:
params.append('%s="%s"' % (k, escape(v)))
return "%s %s %s" % ("GET", url, ','.join(params))
def to_unicode(s):
""" Convert to unicode, raise exception with instructive error
message if s is not unicode, ascii, or utf-8. """
if not isinstance(s, unicode):
if not isinstance(s, str):
raise TypeError('You are required to pass either unicode or string here, not: %r (%s)' % (type(s), s))
try:
s = s.decode('utf-8')
except UnicodeDecodeError, le:
raise TypeError('You are required to pass either a unicode object or a utf-8 string here. You passed a Python string object which contained non-utf-8: %r. The UnicodeDecodeError that resulted from attempting to interpret it as utf-8 was: %s' % (s, le,))
return s
def to_utf8(s):
return to_unicode(s).encode('utf-8')
def to_unicode_if_string(s):
if isinstance(s, basestring):
return to_unicode(s)
else:
return s
def to_utf8_if_string(s):
if isinstance(s, basestring):
return to_utf8(s)
else:
return s
def to_unicode_optional_iterator(x):
"""
Raise TypeError if x is a str containing non-utf8 bytes or if x is
an iterable which contains such a str.
"""
if isinstance(x, basestring):
return to_unicode(x)
try:
l = list(x)
except TypeError, e:
assert 'is not iterable' in str(e)
return x
else:
return [ to_unicode(e) for e in l ]
def to_utf8_optional_iterator(x):
"""
Raise TypeError if x is a str or if x is an iterable which
contains a str.
"""
if isinstance(x, basestring):
return to_utf8(x)
try:
l = list(x)
except TypeError, e:
assert 'is not iterable' in str(e)
return x
else:
return [ to_utf8_if_string(e) for e in l ]
def escape(s):
"""Escape a URL including any /."""
return urllib.quote(s.encode('utf-8'), safe='~')
def generate_timestamp():
"""Get seconds since epoch (UTC)."""
return int(time.time())
def generate_nonce(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
def generate_verifier(length=8):
"""Generate pseudorandom number."""
return ''.join([str(random.randint(0, 9)) for i in range(length)])
class Consumer(object):
"""A consumer of OAuth-protected services.
The OAuth consumer is a "third-party" service that wants to access
protected resources from an OAuth service provider on behalf of an end
user. It's kind of the OAuth client.
Usually a consumer must be registered with the service provider by the
developer of the consumer software. As part of that process, the service
provider gives the consumer a *key* and a *secret* with which the consumer
software can identify itself to the service. The consumer will include its
key in each request to identify itself, but will use its secret only when
signing requests, to prove that the request is from that particular
registered consumer.
Once registered, the consumer can then use its consumer credentials to ask
the service provider for a request token, kicking off the OAuth
authorization process.
"""
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def __str__(self):
data = {'oauth_consumer_key': self.key,
'oauth_consumer_secret': self.secret}
return urllib.urlencode(data)
class Token(object):
"""An OAuth credential used to request authorization or a protected
resource.
Tokens in OAuth comprise a *key* and a *secret*. The key is included in
requests to identify the token being used, but the secret is used only in
the signature, to prove that the requester is who the server gave the
token to.
When first negotiating the authorization, the consumer asks for a *request
token* that the live user authorizes with the service provider. The
consumer then exchanges the request token for an *access token* that can
be used to access protected resources.
"""
key = None
secret = None
callback = None
callback_confirmed = None
verifier = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
if self.key is None or self.secret is None:
raise ValueError("Key and secret must be set.")
def set_callback(self, callback):
self.callback = callback
self.callback_confirmed = 'true'
def set_verifier(self, verifier=None):
if verifier is not None:
self.verifier = verifier
else:
self.verifier = generate_verifier()
def get_callback_url(self):
if self.callback and self.verifier:
# Append the oauth_verifier.
parts = urlparse.urlparse(self.callback)
scheme, netloc, path, params, query, fragment = parts[:6]
if query:
query = '%s&oauth_verifier=%s' % (query, self.verifier)
else:
query = 'oauth_verifier=%s' % self.verifier
return urlparse.urlunparse((scheme, netloc, path, params,
query, fragment))
return self.callback
def to_string(self):
"""Returns this token as a plain string, suitable for storage.
The resulting string includes the token's secret, so you should never
send or store this string where a third party can read it.
"""
data = {
'oauth_token': self.key,
'oauth_token_secret': self.secret,
}
if self.callback_confirmed is not None:
data['oauth_callback_confirmed'] = self.callback_confirmed
return urllib.urlencode(data)
@staticmethod
def from_string(s):
"""Deserializes a token from a string like one returned by
`to_string()`."""
if not len(s):
raise ValueError("Invalid parameter string.")
params = parse_qs(s, keep_blank_values=False)
if not len(params):
raise ValueError("Invalid parameter string.")
try:
key = params['oauth_token'][0]
except Exception:
raise ValueError("'oauth_token' not found in OAuth request.")
try:
secret = params['oauth_token_secret'][0]
except Exception:
raise ValueError("'oauth_token_secret' not found in "
"OAuth request.")
token = Token(key, secret)
try:
token.callback_confirmed = params['oauth_callback_confirmed'][0]
except KeyError:
pass # 1.0, no callback confirmed.
return token
def __str__(self):
return self.to_string()
def setter(attr):
name = attr.__name__
def getter(self):
try:
return self.__dict__[name]
except KeyError:
raise AttributeError(name)
def deleter(self):
del self.__dict__[name]
return property(getter, attr, deleter)
class Request(dict):
"""The parameters and information for an HTTP request, suitable for
authorizing with OAuth credentials.
When a consumer wants to access a service's protected resources, it does
so using a signed HTTP request identifying itself (the consumer) with its
key, and providing an access token authorized by the end user to access
those resources.
"""
version = OAUTH_VERSION
def __init__(self, method=HTTP_METHOD, url=None, parameters=None,
body='', is_form_encoded=False):
if url is not None:
self.url = to_unicode(url)
self.method = method
if parameters is not None:
for k, v in parameters.iteritems():
k = to_unicode(k)
v = to_unicode_optional_iterator(v)
self[k] = v
self.body = body
self.is_form_encoded = is_form_encoded
@setter
def url(self, value):
self.__dict__['url'] = value
if value is not None:
scheme, netloc, path, params, query, fragment = urlparse.urlparse(value)
# Exclude default port numbers.
if scheme == 'http' and netloc[-3:] == ':80':
netloc = netloc[:-3]
elif scheme == 'https' and netloc[-4:] == ':443':
netloc = netloc[:-4]
if scheme not in ('http', 'https'):
raise ValueError("Unsupported URL %s (%s)." % (value, scheme))
# Normalized URL excludes params, query, and fragment.
self.normalized_url = urlparse.urlunparse((scheme, netloc, path, None, None, None))
else:
self.normalized_url = None
self.__dict__['url'] = None
@setter
def method(self, value):
self.__dict__['method'] = value.upper()
def _get_timestamp_nonce(self):
return self['oauth_timestamp'], self['oauth_nonce']
def get_nonoauth_parameters(self):
"""Get any non-OAuth parameters."""
return dict([(k, v) for k, v in self.iteritems()
if not k.startswith('oauth_')])
def to_header(self, realm=''):
"""Serialize as a header for an HTTPAuth request."""
oauth_params = ((k, v) for k, v in self.items()
if k.startswith('oauth_'))
stringy_params = ((k, escape(str(v))) for k, v in oauth_params)
header_params = ('%s="%s"' % (k, v) for k, v in stringy_params)
params_header = ', '.join(header_params)
auth_header = 'OAuth realm="%s"' % realm
if params_header:
auth_header = "%s, %s" % (auth_header, params_header)
return {'Authorization': auth_header}
def to_postdata(self):
"""Serialize as post data for a POST request."""
d = {}
for k, v in self.iteritems():
d[k.encode('utf-8')] = to_utf8_optional_iterator(v)
# tell urlencode to deal with sequence values and map them correctly
# to resulting querystring. for example self["k"] = ["v1", "v2"] will
# result in 'k=v1&k=v2' and not k=%5B%27v1%27%2C+%27v2%27%5D
return urllib.urlencode(d, True).replace('+', '%20')
def to_url(self):
"""Serialize as a URL for a GET request."""
base_url = urlparse.urlparse(self.url)
try:
query = base_url.query
except AttributeError:
# must be python <2.5
query = base_url[4]
query = parse_qs(query)
for k, v in self.items():
query.setdefault(k, []).append(v)
try:
scheme = base_url.scheme
netloc = base_url.netloc
path = base_url.path
params = base_url.params
fragment = base_url.fragment
except AttributeError:
# must be python <2.5
scheme = base_url[0]
netloc = base_url[1]
path = base_url[2]
params = base_url[3]
fragment = base_url[5]
url = (scheme, netloc, path, params,
urllib.urlencode(query, True), fragment)
return urlparse.urlunparse(url)
def get_parameter(self, parameter):
ret = self.get(parameter)
if ret is None:
raise Error('Parameter not found: %s' % parameter)
return ret
def get_normalized_parameters(self):
"""Return a string that contains the parameters that must be signed."""
items = []
for key, value in self.iteritems():
if key == 'oauth_signature':
continue
# 1.0a/9.1.1 states that kvp must be sorted by key, then by value,
# so we unpack sequence values into multiple items for sorting.
if isinstance(value, basestring):
items.append((to_utf8_if_string(key), to_utf8(value)))
else:
try:
value = list(value)
except TypeError, e:
assert 'is not iterable' in str(e)
items.append((to_utf8_if_string(key), to_utf8_if_string(value)))
else:
items.extend((to_utf8_if_string(key), to_utf8_if_string(item)) for item in value)
# Include any query string parameters from the provided URL
query = urlparse.urlparse(self.url)[4]
url_items = self._split_url_string(query).items()
url_items = [(to_utf8(k), to_utf8(v)) for k, v in url_items if k != 'oauth_signature' ]
items.extend(url_items)
items.sort()
encoded_str = urllib.urlencode(items)
# Encode signature parameters per Oauth Core 1.0 protocol
# spec draft 7, section 3.6
# (http://tools.ietf.org/html/draft-hammer-oauth-07#section-3.6)
# Spaces must be encoded with "%20" instead of "+"
return encoded_str.replace('+', '%20').replace('%7E', '~')
def sign_request(self, signature_method, consumer, token):
"""Set the signature parameter to the result of sign."""
if not self.is_form_encoded:
# according to
# http://oauth.googlecode.com/svn/spec/ext/body_hash/1.0/oauth-bodyhash.html
# section 4.1.1 "OAuth Consumers MUST NOT include an
# oauth_body_hash parameter on requests with form-encoded
# request bodies."
self['oauth_body_hash'] = base64.b64encode(sha(self.body).digest())
if 'oauth_consumer_key' not in self:
self['oauth_consumer_key'] = consumer.key
if token and 'oauth_token' not in self:
self['oauth_token'] = token.key
self['oauth_signature_method'] = signature_method.name
self['oauth_signature'] = signature_method.sign(self, consumer, token)
@classmethod
def make_timestamp(cls):
"""Get seconds since epoch (UTC)."""
return str(int(time.time()))
@classmethod
def make_nonce(cls):
"""Generate pseudorandom number."""
return str(random.randint(0, 100000000))
@classmethod
def from_request(cls, http_method, http_url, headers=None, parameters=None,
query_string=None):
"""Combines multiple parameter sources."""
if parameters is None:
parameters = {}
# Headers
if headers and 'Authorization' in headers:
auth_header = headers['Authorization']
# Check that the authorization header is OAuth.
if auth_header[:6] == 'OAuth ':
auth_header = auth_header[6:]
try:
# Get the parameters from the header.
header_params = cls._split_header(auth_header)
parameters.update(header_params)
except:
raise Error('Unable to parse OAuth parameters from '
'Authorization header.')
# GET or POST query string.
if query_string:
query_params = cls._split_url_string(query_string)
parameters.update(query_params)
# URL parameters.
param_str = urlparse.urlparse(http_url)[4] # query
url_params = cls._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return cls(http_method, http_url, parameters)
return None
@classmethod
def from_consumer_and_token(cls, consumer, token=None,
http_method=HTTP_METHOD, http_url=None, parameters=None,
body='', is_form_encoded=False):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': consumer.key,
'oauth_timestamp': cls.make_timestamp(),
'oauth_nonce': cls.make_nonce(),
'oauth_version': cls.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
if token.verifier:
parameters['oauth_verifier'] = token.verifier
return Request(http_method, http_url, parameters, body=body,
is_form_encoded=is_form_encoded)
@classmethod
def from_token_and_callback(cls, token, callback=None,
http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = callback
return cls(http_method, http_url, parameters)
@staticmethod
def _split_header(header):
"""Turn Authorization: header into parameters."""
params = {}
parts = header.split(',')
for param in parts:
# Ignore realm parameter.
if param.find('realm') > -1:
continue
# Remove whitespace.
param = param.strip()
# Split key-value.
param_parts = param.split('=', 1)
# Remove quotes and unescape the value.
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
@staticmethod
def _split_url_string(param_str):
"""Turn URL string into parameters."""
parameters = parse_qs(param_str.encode('utf-8'), keep_blank_values=True)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
class Client(httplib2.Http):
"""OAuthClient is a worker to attempt to execute a request."""
def __init__(self, consumer, token=None, cache=None, timeout=None,
proxy_info=None):
if consumer is not None and not isinstance(consumer, Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, Token):
raise ValueError("Invalid token.")
self.consumer = consumer
self.token = token
self.method = SignatureMethod_HMAC_SHA1()
httplib2.Http.__init__(self, cache=cache, timeout=timeout, proxy_info=proxy_info)
def set_signature_method(self, method):
if not isinstance(method, SignatureMethod):
raise ValueError("Invalid signature method.")
self.method = method
def request(self, uri, method="GET", body='', headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS, connection_type=None):
DEFAULT_POST_CONTENT_TYPE = 'application/x-www-form-urlencoded'
if not isinstance(headers, dict):
headers = {}
if method == "POST":
headers['Content-Type'] = headers.get('Content-Type',
DEFAULT_POST_CONTENT_TYPE)
is_form_encoded = \
headers.get('Content-Type') == 'application/x-www-form-urlencoded'
if is_form_encoded and body:
parameters = parse_qs(body)
else:
parameters = None
req = Request.from_consumer_and_token(self.consumer,
token=self.token, http_method=method, http_url=uri,
parameters=parameters, body=body, is_form_encoded=is_form_encoded)
req.sign_request(self.method, self.consumer, self.token)
schema, rest = urllib.splittype(uri)
if rest.startswith('//'):
hierpart = '//'
else:
hierpart = ''
host, rest = urllib.splithost(rest)
realm = schema + ':' + hierpart + host
if is_form_encoded:
body = req.to_postdata()
elif method == "GET":
uri = req.to_url()
else:
headers.update(req.to_header(realm=realm))
return httplib2.Http.request(self, uri, method=method, body=body,
headers=headers, redirections=redirections,
connection_type=connection_type)
class Server(object):
"""A skeletal implementation of a service provider, providing protected
resources to requests from authorized consumers.
This class implements the logic to check requests for authorization. You
can use it with your web server or web framework to protect certain
resources with OAuth.
"""
timestamp_threshold = 300 # In seconds, five minutes.
version = OAUTH_VERSION
signature_methods = None
def __init__(self, signature_methods=None):
self.signature_methods = signature_methods or {}
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.name] = signature_method
return self.signature_methods
def verify_request(self, request, consumer, token):
"""Verifies an api call and checks all the parameters."""
self._check_version(request)
self._check_signature(request, consumer, token)
parameters = request.get_nonoauth_parameters()
return parameters
def build_authenticate_header(self, realm=''):
"""Optional support for the authenticate header."""
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
def _check_version(self, request):
"""Verify the correct version of the request for this server."""
version = self._get_version(request)
if version and version != self.version:
raise Error('OAuth version %s not supported.' % str(version))
def _get_version(self, request):
"""Return the version of the request for this server."""
try:
version = request.get_parameter('oauth_version')
except:
version = OAUTH_VERSION
return version
def _get_signature_method(self, request):
"""Figure out the signature with some defaults."""
try:
signature_method = request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# Get the signature method object.
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise Error('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_verifier(self, request):
return request.get_parameter('oauth_verifier')
def _check_signature(self, request, consumer, token):
timestamp, nonce = request._get_timestamp_nonce()
self._check_timestamp(timestamp)
signature_method = self._get_signature_method(request)
try:
signature = request.get_parameter('oauth_signature')
except:
raise MissingSignature('Missing oauth_signature.')
# Validate the signature.
valid = signature_method.check(request, consumer, token, signature)
if not valid:
key, base = signature_method.signing_base(request, consumer, token)
raise Error('Invalid signature. Expected signature base '
'string: %s' % base)
def _check_timestamp(self, timestamp):
"""Verify that timestamp is recentish."""
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise Error('Expired timestamp: given %d and now %s has a '
'greater difference than threshold %d' % (timestamp, now,
self.timestamp_threshold))
class SignatureMethod(object):
"""A way of signing requests.
The OAuth protocol lets consumers and service providers pick a way to sign
requests. This interface shows the methods expected by the other `oauth`
modules for signing requests. Subclass it and implement its methods to
provide a new way to sign requests.
"""
def signing_base(self, request, consumer, token):
"""Calculates the string that needs to be signed.
This method returns a 2-tuple containing the starting key for the
signing and the message to be signed. The latter may be used in error
messages to help clients debug their software.
"""
raise NotImplementedError
def sign(self, request, consumer, token):
"""Returns the signature for the given request, based on the consumer
and token also provided.
You should use your implementation of `signing_base()` to build the
message to sign. Otherwise it may be less useful for debugging.
"""
raise NotImplementedError
def check(self, request, consumer, token, signature):
"""Returns whether the given signature is the correct signature for
the given consumer and token signing the given request."""
built = self.sign(request, consumer, token)
return built == signature
class SignatureMethod_HMAC_SHA1(SignatureMethod):
name = 'HMAC-SHA1'
def signing_base(self, request, consumer, token):
if not hasattr(request, 'normalized_url') or request.normalized_url is None:
raise ValueError("Base URL for request is not set.")
sig = (
escape(request.method),
escape(request.normalized_url),
escape(request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def sign(self, request, consumer, token):
"""Builds the base signature string."""
key, raw = self.signing_base(request, consumer, token)
hashed = hmac.new(key, raw, sha)
# Calculate the digest base 64.
return binascii.b2a_base64(hashed.digest())[:-1]
class SignatureMethod_PLAINTEXT(SignatureMethod):
name = 'PLAINTEXT'
def signing_base(self, request, consumer, token):
"""Concatenates the consumer key and secret with the token's
secret."""
sig = '%s&' % escape(consumer.secret)
if token:
sig = sig + escape(token.secret)
return sig, sig
def sign(self, request, consumer, token):
key, raw = self.signing_base(request, consumer, token)
return raw
| Python |
# This is the version of this source code.
manual_verstr = "1.5"
auto_build_num = "211"
verstr = manual_verstr + "." + auto_build_num
try:
from pyutil.version_class import Version as pyutil_Version
__version__ = pyutil_Version(verstr)
except (ImportError, ValueError):
# Maybe there is no pyutil installed.
from distutils.version import LooseVersion as distutils_Version
__version__ = distutils_Version(verstr)
| Python |
"""
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import oauth2
import imaplib
class IMAP4_SSL(imaplib.IMAP4_SSL):
"""IMAP wrapper for imaplib.IMAP4_SSL that implements XOAUTH."""
def authenticate(self, url, consumer, token):
if consumer is not None and not isinstance(consumer, oauth2.Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, oauth2.Token):
raise ValueError("Invalid token.")
imaplib.IMAP4_SSL.authenticate(self, 'XOAUTH',
lambda x: oauth2.build_xoauth_string(url, consumer, token))
| Python |
"""
The MIT License
Copyright (c) 2007-2010 Leah Culver, Joe Stump, Mark Paschal, Vic Fryzel
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import oauth2
import smtplib
import base64
class SMTP(smtplib.SMTP):
"""SMTP wrapper for smtplib.SMTP that implements XOAUTH."""
def authenticate(self, url, consumer, token):
if consumer is not None and not isinstance(consumer, oauth2.Consumer):
raise ValueError("Invalid consumer.")
if token is not None and not isinstance(token, oauth2.Token):
raise ValueError("Invalid token.")
self.docmd('AUTH', 'XOAUTH %s' % \
base64.b64encode(oauth2.build_xoauth_string(url, consumer, token)))
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Command-line tools for authenticating via OAuth 2.0
Do the OAuth 2.0 Web Server dance for a command line application. Stores the
generated credentials in a common file that is used by other example apps in
the same directory.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = ['run']
import BaseHTTPServer
import gflags
import socket
import sys
from client import FlowExchangeError
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
FLAGS = gflags.FLAGS
gflags.DEFINE_boolean('auth_local_webserver', True,
('Run a local web server to handle redirects during '
'OAuth authorization.'))
gflags.DEFINE_string('auth_host_name', 'localhost',
('Host name to use when running a local web server to '
'handle redirects during OAuth authorization.'))
gflags.DEFINE_multi_int('auth_host_port', [8080, 8090],
('Port to use when running a local web server to '
'handle redirects during OAuth authorization.'))
class ClientRedirectServer(BaseHTTPServer.HTTPServer):
"""A server to handle OAuth 2.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into query_params and then stops serving.
"""
query_params = {}
class ClientRedirectHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""A handler for OAuth 2.0 redirects back to localhost.
Waits for a single request and parses the query parameters
into the servers query_params and then stops serving.
"""
def do_GET(s):
"""Handle a GET request.
Parses the query parameters and prints a message
if the flow has completed. Note that we can't detect
if an error occurred.
"""
s.send_response(200)
s.send_header("Content-type", "text/html")
s.end_headers()
query = s.path.split('?', 1)[-1]
query = dict(parse_qsl(query))
s.server.query_params = query
s.wfile.write("<html><head><title>Authentication Status</title></head>")
s.wfile.write("<body><p>The authentication flow has completed.</p>")
s.wfile.write("</body></html>")
def log_message(self, format, *args):
"""Do not log messages to stdout while running as command line program."""
pass
def run(flow, storage):
"""Core code for a command-line application.
Args:
flow: Flow, an OAuth 2.0 Flow to step through.
storage: Storage, a Storage to store the credential in.
Returns:
Credentials, the obtained credential.
"""
if FLAGS.auth_local_webserver:
success = False
port_number = 0
for port in FLAGS.auth_host_port:
port_number = port
try:
httpd = ClientRedirectServer((FLAGS.auth_host_name, port),
ClientRedirectHandler)
except socket.error, e:
pass
else:
success = True
break
FLAGS.auth_local_webserver = success
if FLAGS.auth_local_webserver:
oauth_callback = 'http://%s:%s/' % (FLAGS.auth_host_name, port_number)
else:
oauth_callback = 'oob'
authorize_url = flow.step1_get_authorize_url(oauth_callback)
print 'Go to the following link in your browser:'
print authorize_url
print
if FLAGS.auth_local_webserver:
print 'If your browser is on a different machine then exit and re-run this'
print 'application with the command-line parameter '
print '--noauth_local_webserver.'
print
code = None
if FLAGS.auth_local_webserver:
httpd.handle_request()
if 'error' in httpd.query_params:
sys.exit('Authentication request was rejected.')
if 'code' in httpd.query_params:
code = httpd.query_params['code']
else:
print 'Failed to find "code" in the query parameters of the redirect.'
sys.exit('Try running with --noauth_local_webserver.')
else:
code = raw_input('Enter verification code: ').strip()
try:
credential = flow.step2_exchange(code)
except FlowExchangeError, e:
sys.exit('Authentication has failed: %s' % e)
storage.put(credential)
credential.set_store(storage)
print 'Authentication successful.'
return credential
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
"""Multi-credential file store with lock support.
This module implements a JSON credential store where multiple
credentials can be stored in one file. That file supports locking
both in a single process and across processes.
The credential themselves are keyed off of:
* client_id
* user_agent
* scope
The format of the stored data is like so:
{
'file_version': 1,
'data': [
{
'key': {
'clientId': '<client id>',
'userAgent': '<user agent>',
'scope': '<scope>'
},
'credential': {
# JSON serialized Credentials.
}
}
]
}
"""
__author__ = 'jbeda@google.com (Joe Beda)'
import base64
import fcntl
import logging
import os
import threading
try: # pragma: no cover
import simplejson
except ImportError: # pragma: no cover
try:
# Try to import from django, should work on App Engine
from django.utils import simplejson
except ImportError:
# Should work for Python2.6 and higher.
import json as simplejson
from client import Storage as BaseStorage
from client import Credentials
logger = logging.getLogger(__name__)
# A dict from 'filename'->_MultiStore instances
_multistores = {}
_multistores_lock = threading.Lock()
class Error(Exception):
"""Base error for this module."""
pass
class NewerCredentialStoreError(Error):
"""The credential store is a newer version that supported."""
pass
def get_credential_storage(filename, client_id, user_agent, scope,
warn_on_readonly=True):
"""Get a Storage instance for a credential.
Args:
filename: The JSON file storing a set of credentials
client_id: The client_id for the credential
user_agent: The user agent for the credential
scope: string or list of strings, Scope(s) being requested
warn_on_readonly: if True, log a warning if the store is readonly
Returns:
An object derived from client.Storage for getting/setting the
credential.
"""
filename = os.path.realpath(os.path.expanduser(filename))
_multistores_lock.acquire()
try:
multistore = _multistores.setdefault(
filename, _MultiStore(filename, warn_on_readonly))
finally:
_multistores_lock.release()
if type(scope) is list:
scope = ' '.join(scope)
return multistore._get_storage(client_id, user_agent, scope)
class _MultiStore(object):
"""A file backed store for multiple credentials."""
def __init__(self, filename, warn_on_readonly=True):
"""Initialize the class.
This will create the file if necessary.
"""
self._filename = filename
self._thread_lock = threading.Lock()
self._file_handle = None
self._read_only = False
self._warn_on_readonly = warn_on_readonly
self._create_file_if_needed()
# Cache of deserialized store. This is only valid after the
# _MultiStore is locked or _refresh_data_cache is called. This is
# of the form of:
#
# (client_id, user_agent, scope) -> OAuth2Credential
#
# If this is None, then the store hasn't been read yet.
self._data = None
class _Storage(BaseStorage):
"""A Storage object that knows how to read/write a single credential."""
def __init__(self, multistore, client_id, user_agent, scope):
self._multistore = multistore
self._client_id = client_id
self._user_agent = user_agent
self._scope = scope
def acquire_lock(self):
"""Acquires any lock necessary to access this Storage.
This lock is not reentrant.
"""
self._multistore._lock()
def release_lock(self):
"""Release the Storage lock.
Trying to release a lock that isn't held will result in a
RuntimeError.
"""
self._multistore._unlock()
def locked_get(self):
"""Retrieve credential.
The Storage lock must be held when this is called.
Returns:
oauth2client.client.Credentials
"""
credential = self._multistore._get_credential(
self._client_id, self._user_agent, self._scope)
if credential:
credential.set_store(self)
return credential
def locked_put(self, credentials):
"""Write a credential.
The Storage lock must be held when this is called.
Args:
credentials: Credentials, the credentials to store.
"""
self._multistore._update_credential(credentials, self._scope)
def _create_file_if_needed(self):
"""Create an empty file if necessary.
This method will not initialize the file. Instead it implements a
simple version of "touch" to ensure the file has been created.
"""
if not os.path.exists(self._filename):
old_umask = os.umask(0177)
try:
open(self._filename, 'a+').close()
finally:
os.umask(old_umask)
def _lock(self):
"""Lock the entire multistore."""
self._thread_lock.acquire()
# Check to see if the file is writeable.
if os.access(self._filename, os.W_OK):
self._file_handle = open(self._filename, 'r+')
fcntl.lockf(self._file_handle.fileno(), fcntl.LOCK_EX)
else:
# Cannot open in read/write mode. Open only in read mode.
self._file_handle = open(self._filename, 'r')
self._read_only = True
if self._warn_on_readonly:
logger.warn('The credentials file (%s) is not writable. Opening in '
'read-only mode. Any refreshed credentials will only be '
'valid for this run.' % self._filename)
if os.path.getsize(self._filename) == 0:
logger.debug('Initializing empty multistore file')
# The multistore is empty so write out an empty file.
self._data = {}
self._write()
elif not self._read_only or self._data is None:
# Only refresh the data if we are read/write or we haven't
# cached the data yet. If we are readonly, we assume is isn't
# changing out from under us and that we only have to read it
# once. This prevents us from whacking any new access keys that
# we have cached in memory but were unable to write out.
self._refresh_data_cache()
def _unlock(self):
"""Release the lock on the multistore."""
if not self._read_only:
fcntl.lockf(self._file_handle.fileno(), fcntl.LOCK_UN)
self._file_handle.close()
self._thread_lock.release()
def _locked_json_read(self):
"""Get the raw content of the multistore file.
The multistore must be locked when this is called.
Returns:
The contents of the multistore decoded as JSON.
"""
assert self._thread_lock.locked()
self._file_handle.seek(0)
return simplejson.load(self._file_handle)
def _locked_json_write(self, data):
"""Write a JSON serializable data structure to the multistore.
The multistore must be locked when this is called.
Args:
data: The data to be serialized and written.
"""
assert self._thread_lock.locked()
if self._read_only:
return
self._file_handle.seek(0)
simplejson.dump(data, self._file_handle, sort_keys=True, indent=2)
self._file_handle.truncate()
def _refresh_data_cache(self):
"""Refresh the contents of the multistore.
The multistore must be locked when this is called.
Raises:
NewerCredentialStoreError: Raised when a newer client has written the
store.
"""
self._data = {}
try:
raw_data = self._locked_json_read()
except Exception:
logger.warn('Credential data store could not be loaded. '
'Will ignore and overwrite.')
return
version = 0
try:
version = raw_data['file_version']
except Exception:
logger.warn('Missing version for credential data store. It may be '
'corrupt or an old version. Overwriting.')
if version > 1:
raise NewerCredentialStoreError(
'Credential file has file_version of %d. '
'Only file_version of 1 is supported.' % version)
credentials = []
try:
credentials = raw_data['data']
except (TypeError, KeyError):
pass
for cred_entry in credentials:
try:
(key, credential) = self._decode_credential_from_json(cred_entry)
self._data[key] = credential
except:
# If something goes wrong loading a credential, just ignore it
logger.info('Error decoding credential, skipping', exc_info=True)
def _decode_credential_from_json(self, cred_entry):
"""Load a credential from our JSON serialization.
Args:
cred_entry: A dict entry from the data member of our format
Returns:
(key, cred) where the key is the key tuple and the cred is the
OAuth2Credential object.
"""
raw_key = cred_entry['key']
client_id = raw_key['clientId']
user_agent = raw_key['userAgent']
scope = raw_key['scope']
key = (client_id, user_agent, scope)
credential = None
credential = Credentials.new_from_json(simplejson.dumps(cred_entry['credential']))
return (key, credential)
def _write(self):
"""Write the cached data back out.
The multistore must be locked.
"""
raw_data = {'file_version': 1}
raw_creds = []
raw_data['data'] = raw_creds
for (cred_key, cred) in self._data.items():
raw_key = {
'clientId': cred_key[0],
'userAgent': cred_key[1],
'scope': cred_key[2]
}
raw_cred = simplejson.loads(cred.to_json())
raw_creds.append({'key': raw_key, 'credential': raw_cred})
self._locked_json_write(raw_data)
def _get_credential(self, client_id, user_agent, scope):
"""Get a credential from the multistore.
The multistore must be locked.
Args:
client_id: The client_id for the credential
user_agent: The user agent for the credential
scope: A string for the scope(s) being requested
Returns:
The credential specified or None if not present
"""
key = (client_id, user_agent, scope)
return self._data.get(key, None)
def _update_credential(self, cred, scope):
"""Update a credential and write the multistore.
This must be called when the multistore is locked.
Args:
cred: The OAuth2Credential to update/set
scope: The scope(s) that this credential covers
"""
key = (cred.client_id, cred.user_agent, scope)
self._data[key] = cred
self._write()
def _get_storage(self, client_id, user_agent, scope):
"""Get a Storage object to get/set a credential.
This Storage is a 'view' into the multistore.
Args:
client_id: The client_id for the credential
user_agent: The user agent for the credential
scope: A string for the scope(s) being requested
Returns:
A Storage object that can be used to get/set this cred
"""
return self._Storage(self, client_id, user_agent, scope)
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""An OAuth 2.0 client.
Tools for interacting with OAuth 2.0 protected resources.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import base64
import clientsecrets
import copy
import datetime
import httplib2
import logging
import os
import sys
import time
import urllib
import urlparse
HAS_OPENSSL = False
try:
from oauth2client.crypt import Signer
from oauth2client.crypt import make_signed_jwt
from oauth2client.crypt import verify_signed_jwt_with_certs
HAS_OPENSSL = True
except ImportError:
pass
try: # pragma: no cover
import simplejson
except ImportError: # pragma: no cover
try:
# Try to import from django, should work on App Engine
from django.utils import simplejson
except ImportError:
# Should work for Python2.6 and higher.
import json as simplejson
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
# Determine if we can write to the file system, and if we can use a local file
# cache behing httplib2.
if hasattr(os, 'tempnam'):
# Put cache file in the director '.cache'.
CACHED_HTTP = httplib2.Http('.cache')
else:
CACHED_HTTP = httplib2.Http()
logger = logging.getLogger(__name__)
# Expiry is stored in RFC3339 UTC format
EXPIRY_FORMAT = '%Y-%m-%dT%H:%M:%SZ'
# Which certs to use to validate id_tokens received.
ID_TOKEN_VERIFICATON_CERTS = 'https://www.googleapis.com/oauth2/v1/certs'
class Error(Exception):
"""Base error for this module."""
pass
class FlowExchangeError(Error):
"""Error trying to exchange an authorization grant for an access token."""
pass
class AccessTokenRefreshError(Error):
"""Error trying to refresh an expired access token."""
pass
class UnknownClientSecretsFlowError(Error):
"""The client secrets file called for an unknown type of OAuth 2.0 flow. """
pass
class AccessTokenCredentialsError(Error):
"""Having only the access_token means no refresh is possible."""
pass
class VerifyJwtTokenError(Error):
"""Could on retrieve certificates for validation."""
pass
def _abstract():
raise NotImplementedError('You need to override this function')
class Credentials(object):
"""Base class for all Credentials objects.
Subclasses must define an authorize() method that applies the credentials to
an HTTP transport.
Subclasses must also specify a classmethod named 'from_json' that takes a JSON
string as input and returns an instaniated Crentials object.
"""
NON_SERIALIZED_MEMBERS = ['store']
def authorize(self, http):
"""Take an httplib2.Http instance (or equivalent) and
authorizes it for the set of credentials, usually by
replacing http.request() with a method that adds in
the appropriate headers and then delegates to the original
Http.request() method.
"""
_abstract()
def _to_json(self, strip):
"""Utility function for creating a JSON representation of an instance of Credentials.
Args:
strip: array, An array of names of members to not include in the JSON.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
t = type(self)
d = copy.copy(self.__dict__)
for member in strip:
del d[member]
if 'token_expiry' in d and isinstance(d['token_expiry'], datetime.datetime):
d['token_expiry'] = d['token_expiry'].strftime(EXPIRY_FORMAT)
# Add in information we will need later to reconsistitue this instance.
d['_class'] = t.__name__
d['_module'] = t.__module__
return simplejson.dumps(d)
def to_json(self):
"""Creating a JSON representation of an instance of Credentials.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
@classmethod
def new_from_json(cls, s):
"""Utility class method to instantiate a Credentials subclass from a JSON
representation produced by to_json().
Args:
s: string, JSON from to_json().
Returns:
An instance of the subclass of Credentials that was serialized with
to_json().
"""
data = simplejson.loads(s)
# Find and call the right classmethod from_json() to restore the object.
module = data['_module']
m = __import__(module, fromlist=module.split('.')[:-1])
kls = getattr(m, data['_class'])
from_json = getattr(kls, 'from_json')
return from_json(s)
class Flow(object):
"""Base class for all Flow objects."""
pass
class Storage(object):
"""Base class for all Storage objects.
Store and retrieve a single credential. This class supports locking
such that multiple processes and threads can operate on a single
store.
"""
def acquire_lock(self):
"""Acquires any lock necessary to access this Storage.
This lock is not reentrant."""
pass
def release_lock(self):
"""Release the Storage lock.
Trying to release a lock that isn't held will result in a
RuntimeError.
"""
pass
def locked_get(self):
"""Retrieve credential.
The Storage lock must be held when this is called.
Returns:
oauth2client.client.Credentials
"""
_abstract()
def locked_put(self, credentials):
"""Write a credential.
The Storage lock must be held when this is called.
Args:
credentials: Credentials, the credentials to store.
"""
_abstract()
def get(self):
"""Retrieve credential.
The Storage lock must *not* be held when this is called.
Returns:
oauth2client.client.Credentials
"""
self.acquire_lock()
try:
return self.locked_get()
finally:
self.release_lock()
def put(self, credentials):
"""Write a credential.
The Storage lock must be held when this is called.
Args:
credentials: Credentials, the credentials to store.
"""
self.acquire_lock()
try:
self.locked_put(credentials)
finally:
self.release_lock()
class OAuth2Credentials(Credentials):
"""Credentials object for OAuth 2.0.
Credentials can be applied to an httplib2.Http object using the authorize()
method, which then adds the OAuth 2.0 access token to each request.
OAuth2Credentials objects may be safely pickled and unpickled.
"""
def __init__(self, access_token, client_id, client_secret, refresh_token,
token_expiry, token_uri, user_agent, id_token=None):
"""Create an instance of OAuth2Credentials.
This constructor is not usually called by the user, instead
OAuth2Credentials objects are instantiated by the OAuth2WebServerFlow.
Args:
access_token: string, access token.
client_id: string, client identifier.
client_secret: string, client secret.
refresh_token: string, refresh token.
token_expiry: datetime, when the access_token expires.
token_uri: string, URI of token endpoint.
user_agent: string, The HTTP User-Agent to provide for this application.
id_token: object, The identity of the resource owner.
Notes:
store: callable, A callable that when passed a Credential
will store the credential back to where it came from.
This is needed to store the latest access_token if it
has expired and been refreshed.
"""
self.access_token = access_token
self.client_id = client_id
self.client_secret = client_secret
self.refresh_token = refresh_token
self.store = None
self.token_expiry = token_expiry
self.token_uri = token_uri
self.user_agent = user_agent
self.id_token = id_token
# True if the credentials have been revoked or expired and can't be
# refreshed.
self.invalid = False
def to_json(self):
return self._to_json(Credentials.NON_SERIALIZED_MEMBERS)
@classmethod
def from_json(cls, s):
"""Instantiate a Credentials object from a JSON description of it. The JSON
should have been produced by calling .to_json() on the object.
Args:
data: dict, A deserialized JSON object.
Returns:
An instance of a Credentials subclass.
"""
data = simplejson.loads(s)
if 'token_expiry' in data and not isinstance(data['token_expiry'],
datetime.datetime):
try:
data['token_expiry'] = datetime.datetime.strptime(
data['token_expiry'], EXPIRY_FORMAT)
except:
data['token_expiry'] = None
retval = OAuth2Credentials(
data['access_token'],
data['client_id'],
data['client_secret'],
data['refresh_token'],
data['token_expiry'],
data['token_uri'],
data['user_agent'],
data.get('id_token', None))
retval.invalid = data['invalid']
return retval
@property
def access_token_expired(self):
"""True if the credential is expired or invalid.
If the token_expiry isn't set, we assume the token doesn't expire.
"""
if self.invalid:
return True
if not self.token_expiry:
return False
now = datetime.datetime.utcnow()
if now >= self.token_expiry:
logger.info('access_token is expired. Now: %s, token_expiry: %s',
now, self.token_expiry)
return True
return False
def set_store(self, store):
"""Set the Storage for the credential.
Args:
store: Storage, an implementation of Stroage object.
This is needed to store the latest access_token if it
has expired and been refreshed. This implementation uses
locking to check for updates before updating the
access_token.
"""
self.store = store
def _updateFromCredential(self, other):
"""Update this Credential from another instance."""
self.__dict__.update(other.__getstate__())
def __getstate__(self):
"""Trim the state down to something that can be pickled."""
d = copy.copy(self.__dict__)
del d['store']
return d
def __setstate__(self, state):
"""Reconstitute the state of the object from being pickled."""
self.__dict__.update(state)
self.store = None
def _generate_refresh_request_body(self):
"""Generate the body that will be used in the refresh request."""
body = urllib.urlencode({
'grant_type': 'refresh_token',
'client_id': self.client_id,
'client_secret': self.client_secret,
'refresh_token': self.refresh_token,
})
return body
def _generate_refresh_request_headers(self):
"""Generate the headers that will be used in the refresh request."""
headers = {
'content-type': 'application/x-www-form-urlencoded',
}
if self.user_agent is not None:
headers['user-agent'] = self.user_agent
return headers
def _refresh(self, http_request):
"""Refreshes the access_token.
This method first checks by reading the Storage object if available.
If a refresh is still needed, it holds the Storage lock until the
refresh is completed.
"""
if not self.store:
self._do_refresh_request(http_request)
else:
self.store.acquire_lock()
try:
new_cred = self.store.locked_get()
if (new_cred and not new_cred.invalid and
new_cred.access_token != self.access_token):
logger.info('Updated access_token read from Storage')
self._updateFromCredential(new_cred)
else:
self._do_refresh_request(http_request)
finally:
self.store.release_lock()
def _do_refresh_request(self, http_request):
"""Refresh the access_token using the refresh_token.
Args:
http: An instance of httplib2.Http.request
or something that acts like it.
Raises:
AccessTokenRefreshError: When the refresh fails.
"""
body = self._generate_refresh_request_body()
headers = self._generate_refresh_request_headers()
logger.info('Refresing access_token')
resp, content = http_request(
self.token_uri, method='POST', body=body, headers=headers)
if resp.status == 200:
# TODO(jcgregorio) Raise an error if loads fails?
d = simplejson.loads(content)
self.access_token = d['access_token']
self.refresh_token = d.get('refresh_token', self.refresh_token)
if 'expires_in' in d:
self.token_expiry = datetime.timedelta(
seconds=int(d['expires_in'])) + datetime.datetime.utcnow()
else:
self.token_expiry = None
if self.store:
self.store.locked_put(self)
else:
# An {'error':...} response body means the token is expired or revoked,
# so we flag the credentials as such.
logger.error('Failed to retrieve access token: %s' % content)
error_msg = 'Invalid response %s.' % resp['status']
try:
d = simplejson.loads(content)
if 'error' in d:
error_msg = d['error']
self.invalid = True
if self.store:
self.store.locked_put(self)
except:
pass
raise AccessTokenRefreshError(error_msg)
def authorize(self, http):
"""Authorize an httplib2.Http instance with these credentials.
Args:
http: An instance of httplib2.Http
or something that acts like it.
Returns:
A modified instance of http that was passed in.
Example:
h = httplib2.Http()
h = credentials.authorize(h)
You can't create a new OAuth subclass of httplib2.Authenication
because it never gets passed the absolute URI, which is needed for
signing. So instead we have to overload 'request' with a closure
that adds in the Authorization header and then calls the original
version of 'request()'.
"""
request_orig = http.request
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
if not self.access_token:
logger.info('Attempting refresh to obtain initial access_token')
self._refresh(request_orig)
# Modify the request headers to add the appropriate
# Authorization header.
if headers is None:
headers = {}
headers['authorization'] = 'OAuth ' + self.access_token
if self.user_agent is not None:
if 'user-agent' in headers:
headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
else:
headers['user-agent'] = self.user_agent
import logging
logging.info(str(uri))
logging.info(str(headers))
resp, content = request_orig(uri, method, body, headers,
redirections, connection_type)
if resp.status == 401:
logger.info('Refreshing due to a 401')
self._refresh(request_orig)
headers['authorization'] = 'OAuth ' + self.access_token
return request_orig(uri, method, body, headers,
redirections, connection_type)
else:
return (resp, content)
http.request = new_request
return http
class AccessTokenCredentials(OAuth2Credentials):
"""Credentials object for OAuth 2.0.
Credentials can be applied to an httplib2.Http object using the
authorize() method, which then signs each request from that object
with the OAuth 2.0 access token. This set of credentials is for the
use case where you have acquired an OAuth 2.0 access_token from
another place such as a JavaScript client or another web
application, and wish to use it from Python. Because only the
access_token is present it can not be refreshed and will in time
expire.
AccessTokenCredentials objects may be safely pickled and unpickled.
Usage:
credentials = AccessTokenCredentials('<an access token>',
'my-user-agent/1.0')
http = httplib2.Http()
http = credentials.authorize(http)
Exceptions:
AccessTokenCredentialsExpired: raised when the access_token expires or is
revoked.
"""
def __init__(self, access_token, user_agent):
"""Create an instance of OAuth2Credentials
This is one of the few types if Credentials that you should contrust,
Credentials objects are usually instantiated by a Flow.
Args:
access_token: string, access token.
user_agent: string, The HTTP User-Agent to provide for this application.
Notes:
store: callable, a callable that when passed a Credential
will store the credential back to where it came from.
"""
super(AccessTokenCredentials, self).__init__(
access_token,
None,
None,
None,
None,
None,
user_agent)
@classmethod
def from_json(cls, s):
data = simplejson.loads(s)
retval = AccessTokenCredentials(
data['access_token'],
data['user_agent'])
return retval
def _refresh(self, http_request):
raise AccessTokenCredentialsError(
"The access_token is expired or invalid and can't be refreshed.")
class AssertionCredentials(OAuth2Credentials):
"""Abstract Credentials object used for OAuth 2.0 assertion grants.
This credential does not require a flow to instantiate because it
represents a two legged flow, and therefore has all of the required
information to generate and refresh its own access tokens. It must
be subclassed to generate the appropriate assertion string.
AssertionCredentials objects may be safely pickled and unpickled.
"""
def __init__(self, assertion_type, user_agent,
token_uri='https://accounts.google.com/o/oauth2/token',
**unused_kwargs):
"""Constructor for AssertionFlowCredentials.
Args:
assertion_type: string, assertion type that will be declared to the auth
server
user_agent: string, The HTTP User-Agent to provide for this application.
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
"""
super(AssertionCredentials, self).__init__(
None,
None,
None,
None,
None,
token_uri,
user_agent)
self.assertion_type = assertion_type
def _generate_refresh_request_body(self):
assertion = self._generate_assertion()
body = urllib.urlencode({
'assertion_type': self.assertion_type,
'assertion': assertion,
'grant_type': 'assertion',
})
return body
def _generate_assertion(self):
"""Generate the assertion string that will be used in the access token
request.
"""
_abstract()
if HAS_OPENSSL:
# PyOpenSSL is not a prerequisite for oauth2client, so if it is missing then
# don't create the SignedJwtAssertionCredentials or the verify_id_token()
# method.
class SignedJwtAssertionCredentials(AssertionCredentials):
"""Credentials object used for OAuth 2.0 Signed JWT assertion grants.
This credential does not require a flow to instantiate because it
represents a two legged flow, and therefore has all of the required
information to generate and refresh its own access tokens.
"""
MAX_TOKEN_LIFETIME_SECS = 3600 # 1 hour in seconds
def __init__(self,
service_account_name,
private_key,
scope,
private_key_password='notasecret',
user_agent=None,
token_uri='https://accounts.google.com/o/oauth2/token',
**kwargs):
"""Constructor for SignedJwtAssertionCredentials.
Args:
service_account_name: string, id for account, usually an email address.
private_key: string, private key in P12 format.
scope: string or list of strings, scope(s) of the credentials being
requested.
private_key_password: string, password for private_key.
user_agent: string, HTTP User-Agent to provide for this application.
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
kwargs: kwargs, Additional parameters to add to the JWT token, for
example prn=joe@xample.org."""
super(SignedJwtAssertionCredentials, self).__init__(
'http://oauth.net/grant_type/jwt/1.0/bearer',
user_agent,
token_uri=token_uri,
)
if type(scope) is list:
scope = ' '.join(scope)
self.scope = scope
self.private_key = private_key
self.private_key_password = private_key_password
self.service_account_name = service_account_name
self.kwargs = kwargs
@classmethod
def from_json(cls, s):
data = simplejson.loads(s)
retval = SignedJwtAssertionCredentials(
data['service_account_name'],
data['private_key'],
data['private_key_password'],
data['scope'],
data['user_agent'],
data['token_uri'],
data['kwargs']
)
retval.invalid = data['invalid']
return retval
def _generate_assertion(self):
"""Generate the assertion that will be used in the request."""
now = long(time.time())
payload = {
'aud': self.token_uri,
'scope': self.scope,
'iat': now,
'exp': now + SignedJwtAssertionCredentials.MAX_TOKEN_LIFETIME_SECS,
'iss': self.service_account_name
}
payload.update(self.kwargs)
logging.debug(str(payload))
return make_signed_jwt(
Signer.from_string(self.private_key, self.private_key_password),
payload)
def verify_id_token(id_token, audience, http=None,
cert_uri=ID_TOKEN_VERIFICATON_CERTS):
"""Verifies a signed JWT id_token.
Args:
id_token: string, A Signed JWT.
audience: string, The audience 'aud' that the token should be for.
http: httplib2.Http, instance to use to make the HTTP request. Callers
should supply an instance that has caching enabled.
cert_uri: string, URI of the certificates in JSON format to
verify the JWT against.
Returns:
The deserialized JSON in the JWT.
Raises:
oauth2client.crypt.AppIdentityError if the JWT fails to verify.
"""
if http is None:
http = CACHED_HTTP
resp, content = http.request(cert_uri)
if resp.status == 200:
certs = simplejson.loads(content)
return verify_signed_jwt_with_certs(id_token, certs, audience)
else:
raise VerifyJwtTokenError('Status code: %d' % resp.status)
def _urlsafe_b64decode(b64string):
# Guard against unicode strings, which base64 can't handle.
b64string = b64string.encode('ascii')
padded = b64string + '=' * (4 - len(b64string) % 4)
return base64.urlsafe_b64decode(padded)
def _extract_id_token(id_token):
"""Extract the JSON payload from a JWT.
Does the extraction w/o checking the signature.
Args:
id_token: string, OAuth 2.0 id_token.
Returns:
object, The deserialized JSON payload.
"""
segments = id_token.split('.')
if (len(segments) != 3):
raise VerifyJwtTokenError(
'Wrong number of segments in token: %s' % id_token)
return simplejson.loads(_urlsafe_b64decode(segments[1]))
class OAuth2WebServerFlow(Flow):
"""Does the Web Server Flow for OAuth 2.0.
OAuth2Credentials objects may be safely pickled and unpickled.
"""
def __init__(self, client_id, client_secret, scope, user_agent=None,
auth_uri='https://accounts.google.com/o/oauth2/auth',
token_uri='https://accounts.google.com/o/oauth2/token',
**kwargs):
"""Constructor for OAuth2WebServerFlow.
Args:
client_id: string, client identifier.
client_secret: string client secret.
scope: string or list of strings, scope(s) of the credentials being
requested.
user_agent: string, HTTP User-Agent to provide for this application.
auth_uri: string, URI for authorization endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
**kwargs: dict, The keyword arguments are all optional and required
parameters for the OAuth calls.
"""
self.client_id = client_id
self.client_secret = client_secret
if type(scope) is list:
scope = ' '.join(scope)
self.scope = scope
self.user_agent = user_agent
self.auth_uri = auth_uri
self.token_uri = token_uri
self.params = {
'access_type': 'offline',
}
self.params.update(kwargs)
self.redirect_uri = None
def step1_get_authorize_url(self, redirect_uri='oob'):
"""Returns a URI to redirect to the provider.
Args:
redirect_uri: string, Either the string 'oob' for a non-web-based
application, or a URI that handles the callback from
the authorization server.
If redirect_uri is 'oob' then pass in the
generated verification code to step2_exchange,
otherwise pass in the query parameters received
at the callback uri to step2_exchange.
"""
self.redirect_uri = redirect_uri
query = {
'response_type': 'code',
'client_id': self.client_id,
'redirect_uri': redirect_uri,
'scope': self.scope,
}
query.update(self.params)
parts = list(urlparse.urlparse(self.auth_uri))
query.update(dict(parse_qsl(parts[4]))) # 4 is the index of the query part
parts[4] = urllib.urlencode(query)
return urlparse.urlunparse(parts)
def step2_exchange(self, code, http=None):
"""Exhanges a code for OAuth2Credentials.
Args:
code: string or dict, either the code as a string, or a dictionary
of the query parameters to the redirect_uri, which contains
the code.
http: httplib2.Http, optional http instance to use to do the fetch
"""
if not (isinstance(code, str) or isinstance(code, unicode)):
code = code['code']
body = urllib.urlencode({
'grant_type': 'authorization_code',
'client_id': self.client_id,
'client_secret': self.client_secret,
'code': code,
'redirect_uri': self.redirect_uri,
'scope': self.scope,
})
headers = {
'content-type': 'application/x-www-form-urlencoded',
}
if self.user_agent is not None:
headers['user-agent'] = self.user_agent
if http is None:
http = httplib2.Http()
resp, content = http.request(self.token_uri, method='POST', body=body,
headers=headers)
if resp.status == 200:
# TODO(jcgregorio) Raise an error if simplejson.loads fails?
d = simplejson.loads(content)
access_token = d['access_token']
refresh_token = d.get('refresh_token', None)
token_expiry = None
if 'expires_in' in d:
token_expiry = datetime.datetime.utcnow() + datetime.timedelta(
seconds=int(d['expires_in']))
if 'id_token' in d:
d['id_token'] = _extract_id_token(d['id_token'])
logger.info('Successfully retrieved access token: %s' % content)
return OAuth2Credentials(access_token, self.client_id,
self.client_secret, refresh_token, token_expiry,
self.token_uri, self.user_agent,
id_token=d.get('id_token', None))
else:
logger.error('Failed to retrieve access token: %s' % content)
error_msg = 'Invalid response %s.' % resp['status']
try:
d = simplejson.loads(content)
if 'error' in d:
error_msg = d['error']
except:
pass
raise FlowExchangeError(error_msg)
def flow_from_clientsecrets(filename, scope, message=None):
"""Create a Flow from a clientsecrets file.
Will create the right kind of Flow based on the contents of the clientsecrets
file or will raise InvalidClientSecretsError for unknown types of Flows.
Args:
filename: string, File name of client secrets.
scope: string or list of strings, scope(s) to request.
message: string, A friendly string to display to the user if the
clientsecrets file is missing or invalid. If message is provided then
sys.exit will be called in the case of an error. If message in not
provided then clientsecrets.InvalidClientSecretsError will be raised.
Returns:
A Flow object.
Raises:
UnknownClientSecretsFlowError if the file describes an unknown kind of Flow.
clientsecrets.InvalidClientSecretsError if the clientsecrets file is
invalid.
"""
try:
client_type, client_info = clientsecrets.loadfile(filename)
if client_type in [clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED]:
return OAuth2WebServerFlow(
client_info['client_id'],
client_info['client_secret'],
scope,
None, # user_agent
client_info['auth_uri'],
client_info['token_uri'])
except clientsecrets.InvalidClientSecretsError:
if message:
sys.exit(message)
else:
raise
else:
raise UnknownClientSecretsFlowError(
'This OAuth 2.0 flow is unsupported: "%s"' * client_type)
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for OAuth.
Utilities for making it easier to work with OAuth 2.0
credentials.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import threading
try: # pragma: no cover
import simplejson
except ImportError: # pragma: no cover
try:
# Try to import from django, should work on App Engine
from django.utils import simplejson
except ImportError:
# Should work for Python2.6 and higher.
import json as simplejson
from client import Storage as BaseStorage
from client import Credentials
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from a file."""
def __init__(self, filename):
self._filename = filename
self._lock = threading.Lock()
def acquire_lock(self):
"""Acquires any lock necessary to access this Storage.
This lock is not reentrant."""
self._lock.acquire()
def release_lock(self):
"""Release the Storage lock.
Trying to release a lock that isn't held will result in a
RuntimeError.
"""
self._lock.release()
def locked_get(self):
"""Retrieve Credential from file.
Returns:
oauth2client.client.Credentials
"""
credentials = None
try:
f = open(self._filename, 'r')
content = f.read()
f.close()
except IOError:
return credentials
try:
credentials = Credentials.new_from_json(content)
credentials.set_store(self)
except ValueError:
pass
return credentials
def locked_put(self, credentials):
"""Write Credentials to file.
Args:
credentials: Credentials, the credentials to store.
"""
f = open(self._filename, 'w')
f.write(credentials.to_json())
f.close()
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""OAuth 2.0 utilities for Django.
Utilities for using OAuth 2.0 in conjunction with
the Django datastore.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import oauth2client
import base64
import pickle
from django.db import models
from oauth2client.client import Storage as BaseStorage
class CredentialsField(models.Field):
__metaclass__ = models.SubfieldBase
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if not value:
return None
if isinstance(value, oauth2client.client.Credentials):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value, connection, prepared=False):
return base64.b64encode(pickle.dumps(value))
class FlowField(models.Field):
__metaclass__ = models.SubfieldBase
def get_internal_type(self):
return "TextField"
def to_python(self, value):
if value is None:
return None
if isinstance(value, oauth2client.client.Flow):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value, connection, prepared=False):
return base64.b64encode(pickle.dumps(value))
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from
the datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsField
on a db model class.
"""
def __init__(self, model_class, key_name, key_value, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
key_value: string, key value for the entity that has the credentials
property_name: string, name of the property that is an CredentialsProperty
"""
self.model_class = model_class
self.key_name = key_name
self.key_value = key_value
self.property_name = property_name
def locked_get(self):
"""Retrieve Credential from datastore.
Returns:
oauth2client.Credentials
"""
credential = None
query = {self.key_name: self.key_value}
entities = self.model_class.objects.filter(**query)
if len(entities) > 0:
credential = getattr(entities[0], self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self)
return credential
def locked_put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
args = {self.key_name: self.key_value}
entity = self.model_class(**args)
setattr(entity, self.property_name, credentials)
entity.save()
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Google App Engine
Utilities for making it easier to use OAuth 2.0 on Google App Engine.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import base64
import httplib2
import logging
import pickle
import time
try: # pragma: no cover
import simplejson
except ImportError: # pragma: no cover
try:
# Try to import from django, should work on App Engine
from django.utils import simplejson
except ImportError:
# Should work for Python2.6 and higher.
import json as simplejson
import clientsecrets
from client import AccessTokenRefreshError
from client import AssertionCredentials
from client import Credentials
from client import Flow
from client import OAuth2WebServerFlow
from client import Storage
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.api.app_identity import app_identity
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import login_required
from google.appengine.ext.webapp.util import run_wsgi_app
OAUTH2CLIENT_NAMESPACE = 'oauth2client#ns'
class InvalidClientSecretsError(Exception):
"""The client_secrets.json file is malformed or missing required fields."""
pass
class AppAssertionCredentials(AssertionCredentials):
"""Credentials object for App Engine Assertion Grants
This object will allow an App Engine application to identify itself to Google
and other OAuth 2.0 servers that can verify assertions. It can be used for
the purpose of accessing data stored under an account assigned to the App
Engine application itself. The algorithm used for generating the assertion is
the Signed JSON Web Token (JWT) algorithm. Additional details can be found at
the following link:
http://self-issued.info/docs/draft-jones-json-web-token.html
This credential does not require a flow to instantiate because it represents
a two legged flow, and therefore has all of the required information to
generate and refresh its own access tokens.
"""
def __init__(self, scope,
audience='https://accounts.google.com/o/oauth2/token',
assertion_type='http://oauth.net/grant_type/jwt/1.0/bearer',
token_uri='https://accounts.google.com/o/oauth2/token', **kwargs):
"""Constructor for AppAssertionCredentials
Args:
scope: string, scope of the credentials being requested.
audience: string, The audience, or verifier of the assertion. For
convenience defaults to Google's audience.
assertion_type: string, Type name that will identify the format of the
assertion string. For convience, defaults to the JSON Web Token (JWT)
assertion type string.
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
"""
self.scope = scope
self.audience = audience
self.app_name = app_identity.get_service_account_name()
super(AppAssertionCredentials, self).__init__(
assertion_type,
None,
token_uri)
@classmethod
def from_json(cls, json):
data = simplejson.loads(json)
retval = AccessTokenCredentials(
data['scope'],
data['audience'],
data['assertion_type'],
data['token_uri'])
return retval
def _generate_assertion(self):
header = {
'typ': 'JWT',
'alg': 'RS256',
}
now = int(time.time())
claims = {
'aud': self.audience,
'scope': self.scope,
'iat': now,
'exp': now + 3600,
'iss': self.app_name,
}
jwt_components = [base64.b64encode(simplejson.dumps(seg))
for seg in [header, claims]]
base_str = ".".join(jwt_components)
key_name, signature = app_identity.sign_blob(base_str)
jwt_components.append(base64.b64encode(signature))
return ".".join(jwt_components)
class FlowProperty(db.Property):
"""App Engine datastore Property for Flow.
Utility property that allows easy storage and retreival of an
oauth2client.Flow"""
# Tell what the user type is.
data_type = Flow
# For writing to datastore.
def get_value_for_datastore(self, model_instance):
flow = super(FlowProperty,
self).get_value_for_datastore(model_instance)
return db.Blob(pickle.dumps(flow))
# For reading from datastore.
def make_value_from_datastore(self, value):
if value is None:
return None
return pickle.loads(value)
def validate(self, value):
if value is not None and not isinstance(value, Flow):
raise db.BadValueError('Property %s must be convertible '
'to a FlowThreeLegged instance (%s)' %
(self.name, value))
return super(FlowProperty, self).validate(value)
def empty(self, value):
return not value
class CredentialsProperty(db.Property):
"""App Engine datastore Property for Credentials.
Utility property that allows easy storage and retrieval of
oath2client.Credentials
"""
# Tell what the user type is.
data_type = Credentials
# For writing to datastore.
def get_value_for_datastore(self, model_instance):
logging.info("get: Got type " + str(type(model_instance)))
cred = super(CredentialsProperty,
self).get_value_for_datastore(model_instance)
if cred is None:
cred = ''
else:
cred = cred.to_json()
return db.Blob(cred)
# For reading from datastore.
def make_value_from_datastore(self, value):
logging.info("make: Got type " + str(type(value)))
if value is None:
return None
if len(value) == 0:
return None
try:
credentials = Credentials.new_from_json(value)
except ValueError:
credentials = None
return credentials
def validate(self, value):
value = super(CredentialsProperty, self).validate(value)
logging.info("validate: Got type " + str(type(value)))
if value is not None and not isinstance(value, Credentials):
raise db.BadValueError('Property %s must be convertible '
'to a Credentials instance (%s)' %
(self.name, value))
#if value is not None and not isinstance(value, Credentials):
# return None
return value
class StorageByKeyName(Storage):
"""Store and retrieve a single credential to and from
the App Engine datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsProperty
on a datastore model class, and that entities
are stored by key_name.
"""
def __init__(self, model, key_name, property_name, cache=None):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
property_name: string, name of the property that is a CredentialsProperty
cache: memcache, a write-through cache to put in front of the datastore
"""
self._model = model
self._key_name = key_name
self._property_name = property_name
self._cache = cache
def locked_get(self):
"""Retrieve Credential from datastore.
Returns:
oauth2client.Credentials
"""
if self._cache:
json = self._cache.get(self._key_name)
if json:
return Credentials.new_from_json(json)
credential = None
entity = self._model.get_by_key_name(self._key_name)
if entity is not None:
credential = getattr(entity, self._property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self)
if self._cache:
self._cache.set(self._key_name, credentials.to_json())
return credential
def locked_put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
entity = self._model.get_or_insert(self._key_name)
setattr(entity, self._property_name, credentials)
entity.put()
if self._cache:
self._cache.set(self._key_name, credentials.to_json())
class CredentialsModel(db.Model):
"""Storage for OAuth 2.0 Credentials
Storage of the model is keyed by the user.user_id().
"""
credentials = CredentialsProperty()
class OAuth2Decorator(object):
"""Utility for making OAuth 2.0 easier.
Instantiate and then use with oauth_required or oauth_aware
as decorators on webapp.RequestHandler methods.
Example:
decorator = OAuth2Decorator(
client_id='837...ent.com',
client_secret='Qh...wwI',
scope='https://www.googleapis.com/auth/plus')
class MainHandler(webapp.RequestHandler):
@decorator.oauth_required
def get(self):
http = decorator.http()
# http is authorized with the user's Credentials and can be used
# in API calls
"""
def __init__(self, client_id, client_secret, scope,
auth_uri='https://accounts.google.com/o/oauth2/auth',
token_uri='https://accounts.google.com/o/oauth2/token',
message=None):
"""Constructor for OAuth2Decorator
Args:
client_id: string, client identifier.
client_secret: string client secret.
scope: string or list of strings, scope(s) of the credentials being
requested.
auth_uri: string, URI for authorization endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
token_uri: string, URI for token endpoint. For convenience
defaults to Google's endpoints but any OAuth 2.0 provider can be used.
message: Message to display if there are problems with the OAuth 2.0
configuration. The message may contain HTML and will be presented on the
web interface for any method that uses the decorator.
"""
self.flow = OAuth2WebServerFlow(client_id, client_secret, scope, None,
auth_uri, token_uri)
self.credentials = None
self._request_handler = None
self._message = message
self._in_error = False
def _display_error_message(self, request_handler):
request_handler.response.out.write('<html><body>')
request_handler.response.out.write(self._message)
request_handler.response.out.write('</body></html>')
def oauth_required(self, method):
"""Decorator that starts the OAuth 2.0 dance.
Starts the OAuth dance for the logged in user if they haven't already
granted access for this application.
Args:
method: callable, to be decorated method of a webapp.RequestHandler
instance.
"""
def check_oauth(request_handler, *args):
if self._in_error:
self._display_error_message(request_handler)
return
user = users.get_current_user()
# Don't use @login_decorator as this could be used in a POST request.
if not user:
request_handler.redirect(users.create_login_url(
request_handler.request.uri))
return
# Store the request URI in 'state' so we can use it later
self.flow.params['state'] = request_handler.request.url
self._request_handler = request_handler
self.credentials = StorageByKeyName(
CredentialsModel, user.user_id(), 'credentials').get()
if not self.has_credentials():
return request_handler.redirect(self.authorize_url())
try:
method(request_handler, *args)
except AccessTokenRefreshError:
return request_handler.redirect(self.authorize_url())
return check_oauth
def oauth_aware(self, method):
"""Decorator that sets up for OAuth 2.0 dance, but doesn't do it.
Does all the setup for the OAuth dance, but doesn't initiate it.
This decorator is useful if you want to create a page that knows
whether or not the user has granted access to this application.
From within a method decorated with @oauth_aware the has_credentials()
and authorize_url() methods can be called.
Args:
method: callable, to be decorated method of a webapp.RequestHandler
instance.
"""
def setup_oauth(request_handler, *args):
if self._in_error:
self._display_error_message(request_handler)
return
user = users.get_current_user()
# Don't use @login_decorator as this could be used in a POST request.
if not user:
request_handler.redirect(users.create_login_url(
request_handler.request.uri))
return
self.flow.params['state'] = request_handler.request.url
self._request_handler = request_handler
self.credentials = StorageByKeyName(
CredentialsModel, user.user_id(), 'credentials').get()
method(request_handler, *args)
return setup_oauth
def has_credentials(self):
"""True if for the logged in user there are valid access Credentials.
Must only be called from with a webapp.RequestHandler subclassed method
that had been decorated with either @oauth_required or @oauth_aware.
"""
return self.credentials is not None and not self.credentials.invalid
def authorize_url(self):
"""Returns the URL to start the OAuth dance.
Must only be called from with a webapp.RequestHandler subclassed method
that had been decorated with either @oauth_required or @oauth_aware.
"""
callback = self._request_handler.request.relative_url('/oauth2callback')
url = self.flow.step1_get_authorize_url(callback)
user = users.get_current_user()
memcache.set(user.user_id(), pickle.dumps(self.flow),
namespace=OAUTH2CLIENT_NAMESPACE)
return url
def http(self):
"""Returns an authorized http instance.
Must only be called from within an @oauth_required decorated method, or
from within an @oauth_aware decorated method where has_credentials()
returns True.
"""
return self.credentials.authorize(httplib2.Http())
class OAuth2DecoratorFromClientSecrets(OAuth2Decorator):
"""An OAuth2Decorator that builds from a clientsecrets file.
Uses a clientsecrets file as the source for all the information when
constructing an OAuth2Decorator.
Example:
decorator = OAuth2DecoratorFromClientSecrets(
os.path.join(os.path.dirname(__file__), 'client_secrets.json')
scope='https://www.googleapis.com/auth/plus')
class MainHandler(webapp.RequestHandler):
@decorator.oauth_required
def get(self):
http = decorator.http()
# http is authorized with the user's Credentials and can be used
# in API calls
"""
def __init__(self, filename, scope, message=None):
"""Constructor
Args:
filename: string, File name of client secrets.
scope: string, Space separated list of scopes.
message: string, A friendly string to display to the user if the
clientsecrets file is missing or invalid. The message may contain HTML and
will be presented on the web interface for any method that uses the
decorator.
"""
try:
client_type, client_info = clientsecrets.loadfile(filename)
if client_type not in [clientsecrets.TYPE_WEB, clientsecrets.TYPE_INSTALLED]:
raise InvalidClientSecretsError('OAuth2Decorator doesn\'t support this OAuth 2.0 flow.')
super(OAuth2DecoratorFromClientSecrets,
self).__init__(
client_info['client_id'],
client_info['client_secret'],
scope,
client_info['auth_uri'],
client_info['token_uri'],
message)
except clientsecrets.InvalidClientSecretsError:
self._in_error = True
if message is not None:
self._message = message
else:
self._message = "Please configure your application for OAuth 2.0"
def oauth2decorator_from_clientsecrets(filename, scope, message=None):
"""Creates an OAuth2Decorator populated from a clientsecrets file.
Args:
filename: string, File name of client secrets.
scope: string, Space separated list of scopes.
message: string, A friendly string to display to the user if the
clientsecrets file is missing or invalid. The message may contain HTML and
will be presented on the web interface for any method that uses the
decorator.
Returns: An OAuth2Decorator
"""
return OAuth2DecoratorFromClientSecrets(filename, scope, message)
class OAuth2Handler(webapp.RequestHandler):
"""Handler for the redirect_uri of the OAuth 2.0 dance."""
@login_required
def get(self):
error = self.request.get('error')
if error:
errormsg = self.request.get('error_description', error)
self.response.out.write(
'The authorization request failed: %s' % errormsg)
else:
user = users.get_current_user()
flow = pickle.loads(memcache.get(user.user_id(),
namespace=OAUTH2CLIENT_NAMESPACE))
# This code should be ammended with application specific error
# handling. The following cases should be considered:
# 1. What if the flow doesn't exist in memcache? Or is corrupt?
# 2. What if the step2_exchange fails?
if flow:
credentials = flow.step2_exchange(self.request.params)
StorageByKeyName(
CredentialsModel, user.user_id(), 'credentials').put(credentials)
self.redirect(str(self.request.get('state')))
else:
# TODO Add error handling here.
pass
application = webapp.WSGIApplication([('/oauth2callback', OAuth2Handler)])
def main():
run_wsgi_app(application)
| Python |
#!/usr/bin/python2.4
# -*- coding: utf-8 -*-
#
# Copyright (C) 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import base64
import hashlib
import logging
import time
from OpenSSL import crypto
try: # pragma: no cover
import simplejson
except ImportError: # pragma: no cover
try:
# Try to import from django, should work on App Engine
from django.utils import simplejson
except ImportError:
# Should work for Python2.6 and higher.
import json as simplejson
CLOCK_SKEW_SECS = 300 # 5 minutes in seconds
AUTH_TOKEN_LIFETIME_SECS = 300 # 5 minutes in seconds
MAX_TOKEN_LIFETIME_SECS = 86400 # 1 day in seconds
class AppIdentityError(Exception):
pass
class Verifier(object):
"""Verifies the signature on a message."""
def __init__(self, pubkey):
"""Constructor.
Args:
pubkey, OpenSSL.crypto.PKey, The public key to verify with.
"""
self._pubkey = pubkey
def verify(self, message, signature):
"""Verifies a message against a signature.
Args:
message: string, The message to verify.
signature: string, The signature on the message.
Returns:
True if message was singed by the private key associated with the public
key that this object was constructed with.
"""
try:
crypto.verify(self._pubkey, signature, message, 'sha256')
return True
except:
return False
@staticmethod
def from_string(key_pem, is_x509_cert):
"""Construct a Verified instance from a string.
Args:
key_pem: string, public key in PEM format.
is_x509_cert: bool, True if key_pem is an X509 cert, otherwise it is
expected to be an RSA key in PEM format.
Returns:
Verifier instance.
Raises:
OpenSSL.crypto.Error if the key_pem can't be parsed.
"""
if is_x509_cert:
pubkey = crypto.load_certificate(crypto.FILETYPE_PEM, key_pem)
else:
pubkey = crypto.load_privatekey(crypto.FILETYPE_PEM, key_pem)
return Verifier(pubkey)
class Signer(object):
"""Signs messages with a private key."""
def __init__(self, pkey):
"""Constructor.
Args:
pkey, OpenSSL.crypto.PKey, The private key to sign with.
"""
self._key = pkey
def sign(self, message):
"""Signs a message.
Args:
message: string, Message to be signed.
Returns:
string, The signature of the message for the given key.
"""
return crypto.sign(self._key, message, 'sha256')
@staticmethod
def from_string(key, password='notasecret'):
"""Construct a Signer instance from a string.
Args:
key: string, private key in P12 format.
password: string, password for the private key file.
Returns:
Signer instance.
Raises:
OpenSSL.crypto.Error if the key can't be parsed.
"""
pkey = crypto.load_pkcs12(key, password).get_privatekey()
return Signer(pkey)
def _urlsafe_b64encode(raw_bytes):
return base64.urlsafe_b64encode(raw_bytes).rstrip('=')
def _urlsafe_b64decode(b64string):
# Guard against unicode strings, which base64 can't handle.
b64string = b64string.encode('ascii')
padded = b64string + '=' * (4 - len(b64string) % 4)
return base64.urlsafe_b64decode(padded)
def _json_encode(data):
return simplejson.dumps(data, separators = (',', ':'))
def make_signed_jwt(signer, payload):
"""Make a signed JWT.
See http://self-issued.info/docs/draft-jones-json-web-token.html.
Args:
signer: crypt.Signer, Cryptographic signer.
payload: dict, Dictionary of data to convert to JSON and then sign.
Returns:
string, The JWT for the payload.
"""
header = {'typ': 'JWT', 'alg': 'RS256'}
segments = [
_urlsafe_b64encode(_json_encode(header)),
_urlsafe_b64encode(_json_encode(payload)),
]
signing_input = '.'.join(segments)
signature = signer.sign(signing_input)
segments.append(_urlsafe_b64encode(signature))
logging.debug(str(segments))
return '.'.join(segments)
def verify_signed_jwt_with_certs(jwt, certs, audience):
"""Verify a JWT against public certs.
See http://self-issued.info/docs/draft-jones-json-web-token.html.
Args:
jwt: string, A JWT.
certs: dict, Dictionary where values of public keys in PEM format.
audience: string, The audience, 'aud', that this JWT should contain. If
None then the JWT's 'aud' parameter is not verified.
Returns:
dict, The deserialized JSON payload in the JWT.
Raises:
AppIdentityError if any checks are failed.
"""
segments = jwt.split('.')
if (len(segments) != 3):
raise AppIdentityError(
'Wrong number of segments in token: %s' % jwt)
signed = '%s.%s' % (segments[0], segments[1])
signature = _urlsafe_b64decode(segments[2])
# Parse token.
json_body = _urlsafe_b64decode(segments[1])
try:
parsed = simplejson.loads(json_body)
except:
raise AppIdentityError('Can\'t parse token: %s' % json_body)
# Check signature.
verified = False
for (keyname, pem) in certs.items():
verifier = Verifier.from_string(pem, True)
if (verifier.verify(signed, signature)):
verified = True
break
if not verified:
raise AppIdentityError('Invalid token signature: %s' % jwt)
# Check creation timestamp.
iat = parsed.get('iat')
if iat is None:
raise AppIdentityError('No iat field in token: %s' % json_body)
earliest = iat - CLOCK_SKEW_SECS
# Check expiration timestamp.
now = long(time.time())
exp = parsed.get('exp')
if exp is None:
raise AppIdentityError('No exp field in token: %s' % json_body)
if exp >= now + MAX_TOKEN_LIFETIME_SECS:
raise AppIdentityError(
'exp field too far in future: %s' % json_body)
latest = exp + CLOCK_SKEW_SECS
if now < earliest:
raise AppIdentityError('Token used too early, %d < %d: %s' %
(now, earliest, json_body))
if now > latest:
raise AppIdentityError('Token used too late, %d > %d: %s' %
(now, latest, json_body))
# Check audience.
if audience is not None:
aud = parsed.get('aud')
if aud is None:
raise AppIdentityError('No aud field in token: %s' % json_body)
if aud != audience:
raise AppIdentityError('Wrong recipient, %s != %s: %s' %
(aud, audience, json_body))
return parsed
| Python |
# Copyright (C) 2011 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for reading OAuth 2.0 client secret files.
A client_secrets.json file contains all the information needed to interact with
an OAuth 2.0 protected service.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
try: # pragma: no cover
import simplejson
except ImportError: # pragma: no cover
try:
# Try to import from django, should work on App Engine
from django.utils import simplejson
except ImportError:
# Should work for Python2.6 and higher.
import json as simplejson
# Properties that make a client_secrets.json file valid.
TYPE_WEB = 'web'
TYPE_INSTALLED = 'installed'
VALID_CLIENT = {
TYPE_WEB: {
'required': [
'client_id',
'client_secret',
'redirect_uris',
'auth_uri',
'token_uri'],
'string': [
'client_id',
'client_secret'
]
},
TYPE_INSTALLED: {
'required': [
'client_id',
'client_secret',
'redirect_uris',
'auth_uri',
'token_uri'],
'string': [
'client_id',
'client_secret'
]
}
}
class Error(Exception):
"""Base error for this module."""
pass
class InvalidClientSecretsError(Error):
"""Format of ClientSecrets file is invalid."""
pass
def _validate_clientsecrets(obj):
if obj is None or len(obj) != 1:
raise InvalidClientSecretsError('Invalid file format.')
client_type = obj.keys()[0]
if client_type not in VALID_CLIENT.keys():
raise InvalidClientSecretsError('Unknown client type: %s.' % client_type)
client_info = obj[client_type]
for prop_name in VALID_CLIENT[client_type]['required']:
if prop_name not in client_info:
raise InvalidClientSecretsError(
'Missing property "%s" in a client type of "%s".' % (prop_name,
client_type))
for prop_name in VALID_CLIENT[client_type]['string']:
if client_info[prop_name].startswith('[['):
raise InvalidClientSecretsError(
'Property "%s" is not configured.' % prop_name)
return client_type, client_info
def load(fp):
obj = simplejson.load(fp)
return _validate_clientsecrets(obj)
def loads(s):
obj = simplejson.loads(s)
return _validate_clientsecrets(obj)
def loadfile(filename):
try:
fp = file(filename, 'r')
try:
obj = simplejson.load(fp)
finally:
fp.close()
except IOError:
raise InvalidClientSecretsError('File not found: "%s"' % filename)
return _validate_clientsecrets(obj)
| Python |
import Cookie
import datetime
import time
import email.utils
import calendar
import base64
import hashlib
import hmac
import re
import logging
# Ripped from the Tornado Framework's web.py
# http://github.com/facebook/tornado/commit/39ac6d169a36a54bb1f6b9bf1fdebb5c9da96e09
#
# Tornado is licensed under the Apache Licence, Version 2.0
# (http://www.apache.org/licenses/LICENSE-2.0.html).
#
# Example:
# from vendor.prayls.lilcookies import LilCookies
# cookieutil = LilCookies(self, application_settings['cookie_secret'])
# cookieutil.set_secure_cookie(name = 'mykey', value = 'myvalue', expires_days= 365*100)
# cookieutil.get_secure_cookie(name = 'mykey')
class LilCookies:
@staticmethod
def _utf8(s):
if isinstance(s, unicode):
return s.encode("utf-8")
assert isinstance(s, str)
return s
@staticmethod
def _time_independent_equals(a, b):
if len(a) != len(b):
return False
result = 0
for x, y in zip(a, b):
result |= ord(x) ^ ord(y)
return result == 0
@staticmethod
def _signature_from_secret(cookie_secret, *parts):
""" Takes a secret salt value to create a signature for values in the `parts` param."""
hash = hmac.new(cookie_secret, digestmod=hashlib.sha1)
for part in parts: hash.update(part)
return hash.hexdigest()
@staticmethod
def _signed_cookie_value(cookie_secret, name, value):
""" Returns a signed value for use in a cookie.
This is helpful to have in its own method if you need to re-use this function for other needs. """
timestamp = str(int(time.time()))
value = base64.b64encode(value)
signature = LilCookies._signature_from_secret(cookie_secret, name, value, timestamp)
return "|".join([value, timestamp, signature])
@staticmethod
def _verified_cookie_value(cookie_secret, name, signed_value):
"""Returns the un-encrypted value given the signed value if it validates, or None."""
value = signed_value
if not value: return None
parts = value.split("|")
if len(parts) != 3: return None
signature = LilCookies._signature_from_secret(cookie_secret, name, parts[0], parts[1])
if not LilCookies._time_independent_equals(parts[2], signature):
logging.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - 31 * 86400:
logging.warning("Expired cookie %r", value)
return None
try:
return base64.b64decode(parts[0])
except:
return None
def __init__(self, handler, cookie_secret):
"""You must specify the cookie_secret to use any of the secure methods.
It should be a long, random sequence of bytes to be used as the HMAC
secret for the signature.
"""
if len(cookie_secret) < 45:
raise ValueError("LilCookies cookie_secret should at least be 45 characters long, but got `%s`" % cookie_secret)
self.handler = handler
self.request = handler.request
self.response = handler.response
self.cookie_secret = cookie_secret
def cookies(self):
"""A dictionary of Cookie.Morsel objects."""
if not hasattr(self, "_cookies"):
self._cookies = Cookie.BaseCookie()
if "Cookie" in self.request.headers:
try:
self._cookies.load(self.request.headers["Cookie"])
except:
self.clear_all_cookies()
return self._cookies
def get_cookie(self, name, default=None):
"""Gets the value of the cookie with the given name, else default."""
if name in self.cookies():
return self._cookies[name].value
return default
def set_cookie(self, name, value, domain=None, expires=None, path="/",
expires_days=None, **kwargs):
"""Sets the given cookie name/value with the given options.
Additional keyword arguments are set on the Cookie.Morsel
directly.
See http://docs.python.org/library/cookie.html#morsel-objects
for available attributes.
"""
name = LilCookies._utf8(name)
value = LilCookies._utf8(value)
if re.search(r"[\x00-\x20]", name + value):
# Don't let us accidentally inject bad stuff
raise ValueError("Invalid cookie %r: %r" % (name, value))
if not hasattr(self, "_new_cookies"):
self._new_cookies = []
new_cookie = Cookie.BaseCookie()
self._new_cookies.append(new_cookie)
new_cookie[name] = value
if domain:
new_cookie[name]["domain"] = domain
if expires_days is not None and not expires:
expires = datetime.datetime.utcnow() + datetime.timedelta(days=expires_days)
if expires:
timestamp = calendar.timegm(expires.utctimetuple())
new_cookie[name]["expires"] = email.utils.formatdate(
timestamp, localtime=False, usegmt=True)
if path:
new_cookie[name]["path"] = path
for k, v in kwargs.iteritems():
new_cookie[name][k] = v
# The 2 lines below were not in Tornado. Instead, they output all their cookies to the headers at once before a response flush.
for vals in new_cookie.values():
self.response.headers._headers.append(('Set-Cookie', vals.OutputString(None)))
def clear_cookie(self, name, path="/", domain=None):
"""Deletes the cookie with the given name."""
expires = datetime.datetime.utcnow() - datetime.timedelta(days=365)
self.set_cookie(name, value="", path=path, expires=expires,
domain=domain)
def clear_all_cookies(self):
"""Deletes all the cookies the user sent with this request."""
for name in self.cookies().iterkeys():
self.clear_cookie(name)
def set_secure_cookie(self, name, value, expires_days=30, **kwargs):
"""Signs and timestamps a cookie so it cannot be forged.
To read a cookie set with this method, use get_secure_cookie().
"""
value = LilCookies._signed_cookie_value(self.cookie_secret, name, value)
self.set_cookie(name, value, expires_days=expires_days, **kwargs)
def get_secure_cookie(self, name, value=None):
"""Returns the given signed cookie if it validates, or None."""
if value is None: value = self.get_cookie(name)
return LilCookies._verified_cookie_value(self.cookie_secret, name, value)
def _cookie_signature(self, *parts):
return LilCookies._signature_from_secret(self.cookie_secret)
| Python |
# Copyright (C) 2007 Joe Gregorio
#
# Licensed under the MIT License
"""MIME-Type Parser
This module provides basic functions for handling mime-types. It can handle
matching mime-types against a list of media-ranges. See section 14.1 of the
HTTP specification [RFC 2616] for a complete explanation.
http://www.w3.org/Protocols/rfc2616/rfc2616-sec14.html#sec14.1
Contents:
- parse_mime_type(): Parses a mime-type into its component parts.
- parse_media_range(): Media-ranges are mime-types with wild-cards and a 'q'
quality parameter.
- quality(): Determines the quality ('q') of a mime-type when
compared against a list of media-ranges.
- quality_parsed(): Just like quality() except the second parameter must be
pre-parsed.
- best_match(): Choose the mime-type with the highest quality ('q')
from a list of candidates.
"""
__version__ = '0.1.3'
__author__ = 'Joe Gregorio'
__email__ = 'joe@bitworking.org'
__license__ = 'MIT License'
__credits__ = ''
def parse_mime_type(mime_type):
"""Parses a mime-type into its component parts.
Carves up a mime-type and returns a tuple of the (type, subtype, params)
where 'params' is a dictionary of all the parameters for the media range.
For example, the media range 'application/xhtml;q=0.5' would get parsed
into:
('application', 'xhtml', {'q', '0.5'})
"""
parts = mime_type.split(';')
params = dict([tuple([s.strip() for s in param.split('=', 1)])\
for param in parts[1:]
])
full_type = parts[0].strip()
# Java URLConnection class sends an Accept header that includes a
# single '*'. Turn it into a legal wildcard.
if full_type == '*':
full_type = '*/*'
(type, subtype) = full_type.split('/')
return (type.strip(), subtype.strip(), params)
def parse_media_range(range):
"""Parse a media-range into its component parts.
Carves up a media range and returns a tuple of the (type, subtype,
params) where 'params' is a dictionary of all the parameters for the media
range. For example, the media range 'application/*;q=0.5' would get parsed
into:
('application', '*', {'q', '0.5'})
In addition this function also guarantees that there is a value for 'q'
in the params dictionary, filling it in with a proper default if
necessary.
"""
(type, subtype, params) = parse_mime_type(range)
if not params.has_key('q') or not params['q'] or \
not float(params['q']) or float(params['q']) > 1\
or float(params['q']) < 0:
params['q'] = '1'
return (type, subtype, params)
def fitness_and_quality_parsed(mime_type, parsed_ranges):
"""Find the best match for a mime-type amongst parsed media-ranges.
Find the best match for a given mime-type against a list of media_ranges
that have already been parsed by parse_media_range(). Returns a tuple of
the fitness value and the value of the 'q' quality parameter of the best
match, or (-1, 0) if no match was found. Just as for quality_parsed(),
'parsed_ranges' must be a list of parsed media ranges.
"""
best_fitness = -1
best_fit_q = 0
(target_type, target_subtype, target_params) =\
parse_media_range(mime_type)
for (type, subtype, params) in parsed_ranges:
type_match = (type == target_type or\
type == '*' or\
target_type == '*')
subtype_match = (subtype == target_subtype or\
subtype == '*' or\
target_subtype == '*')
if type_match and subtype_match:
param_matches = reduce(lambda x, y: x + y, [1 for (key, value) in \
target_params.iteritems() if key != 'q' and \
params.has_key(key) and value == params[key]], 0)
fitness = (type == target_type) and 100 or 0
fitness += (subtype == target_subtype) and 10 or 0
fitness += param_matches
if fitness > best_fitness:
best_fitness = fitness
best_fit_q = params['q']
return best_fitness, float(best_fit_q)
def quality_parsed(mime_type, parsed_ranges):
"""Find the best match for a mime-type amongst parsed media-ranges.
Find the best match for a given mime-type against a list of media_ranges
that have already been parsed by parse_media_range(). Returns the 'q'
quality parameter of the best match, 0 if no match was found. This function
bahaves the same as quality() except that 'parsed_ranges' must be a list of
parsed media ranges.
"""
return fitness_and_quality_parsed(mime_type, parsed_ranges)[1]
def quality(mime_type, ranges):
"""Return the quality ('q') of a mime-type against a list of media-ranges.
Returns the quality 'q' of a mime-type when compared against the
media-ranges in ranges. For example:
>>> quality('text/html','text/*;q=0.3, text/html;q=0.7,
text/html;level=1, text/html;level=2;q=0.4, */*;q=0.5')
0.7
"""
parsed_ranges = [parse_media_range(r) for r in ranges.split(',')]
return quality_parsed(mime_type, parsed_ranges)
def best_match(supported, header):
"""Return mime-type with the highest quality ('q') from list of candidates.
Takes a list of supported mime-types and finds the best match for all the
media-ranges listed in header. The value of header must be a string that
conforms to the format of the HTTP Accept: header. The value of 'supported'
is a list of mime-types. The list of supported mime-types should be sorted
in order of increasing desirability, in case of a situation where there is
a tie.
>>> best_match(['application/xbel+xml', 'text/xml'],
'text/*;q=0.5,*/*; q=0.1')
'text/xml'
"""
split_header = _filter_blank(header.split(','))
parsed_header = [parse_media_range(r) for r in split_header]
weighted_matches = []
pos = 0
for mime_type in supported:
weighted_matches.append((fitness_and_quality_parsed(mime_type,
parsed_header), pos, mime_type))
pos += 1
weighted_matches.sort()
return weighted_matches[-1][0][1] and weighted_matches[-1][2] or ''
def _filter_blank(i):
for s in i:
if s.strip():
yield s
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Classes to encapsulate a single HTTP request.
The classes implement a command pattern, with every
object supporting an execute() method that does the
actuall HTTP request.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = [
'HttpRequest', 'RequestMockBuilder', 'HttpMock'
'set_user_agent', 'tunnel_patch'
]
import StringIO
import copy
import gzip
import httplib2
import mimeparse
import mimetypes
import os
import urllib
import urlparse
import uuid
from anyjson import simplejson
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
from email.parser import FeedParser
from errors import BatchError
from errors import HttpError
from errors import ResumableUploadError
from errors import UnexpectedBodyError
from errors import UnexpectedMethodError
from model import JsonModel
class MediaUploadProgress(object):
"""Status of a resumable upload."""
def __init__(self, resumable_progress, total_size):
"""Constructor.
Args:
resumable_progress: int, bytes sent so far.
total_size: int, total bytes in complete upload.
"""
self.resumable_progress = resumable_progress
self.total_size = total_size
def progress(self):
"""Percent of upload completed, as a float."""
return float(self.resumable_progress) / float(self.total_size)
class MediaUpload(object):
"""Describes a media object to upload.
Base class that defines the interface of MediaUpload subclasses.
"""
def getbytes(self, begin, end):
raise NotImplementedError()
def size(self):
raise NotImplementedError()
def chunksize(self):
raise NotImplementedError()
def mimetype(self):
return 'application/octet-stream'
def resumable(self):
return False
def _to_json(self, strip=None):
"""Utility function for creating a JSON representation of a MediaUpload.
Args:
strip: array, An array of names of members to not include in the JSON.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
t = type(self)
d = copy.copy(self.__dict__)
if strip is not None:
for member in strip:
del d[member]
d['_class'] = t.__name__
d['_module'] = t.__module__
return simplejson.dumps(d)
def to_json(self):
"""Create a JSON representation of an instance of MediaUpload.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
return self._to_json()
@classmethod
def new_from_json(cls, s):
"""Utility class method to instantiate a MediaUpload subclass from a JSON
representation produced by to_json().
Args:
s: string, JSON from to_json().
Returns:
An instance of the subclass of MediaUpload that was serialized with
to_json().
"""
data = simplejson.loads(s)
# Find and call the right classmethod from_json() to restore the object.
module = data['_module']
m = __import__(module, fromlist=module.split('.')[:-1])
kls = getattr(m, data['_class'])
from_json = getattr(kls, 'from_json')
return from_json(s)
class MediaFileUpload(MediaUpload):
"""A MediaUpload for a file.
Construct a MediaFileUpload and pass as the media_body parameter of the
method. For example, if we had a service that allowed uploading images:
media = MediaFileUpload('smiley.png', mimetype='image/png', chunksize=1000,
resumable=True)
service.objects().insert(
bucket=buckets['items'][0]['id'],
name='smiley.png',
media_body=media).execute()
"""
def __init__(self, filename, mimetype=None, chunksize=10000, resumable=False):
"""Constructor.
Args:
filename: string, Name of the file.
mimetype: string, Mime-type of the file. If None then a mime-type will be
guessed from the file extension.
chunksize: int, File will be uploaded in chunks of this many bytes. Only
used if resumable=True.
resumable: bool, True if this is a resumable upload. False means upload
in a single request.
"""
self._filename = filename
self._size = os.path.getsize(filename)
self._fd = None
if mimetype is None:
(mimetype, encoding) = mimetypes.guess_type(filename)
self._mimetype = mimetype
self._chunksize = chunksize
self._resumable = resumable
def mimetype(self):
return self._mimetype
def size(self):
return self._size
def chunksize(self):
return self._chunksize
def resumable(self):
return self._resumable
def getbytes(self, begin, length):
"""Get bytes from the media.
Args:
begin: int, offset from beginning of file.
length: int, number of bytes to read, starting at begin.
Returns:
A string of bytes read. May be shorted than length if EOF was reached
first.
"""
if self._fd is None:
self._fd = open(self._filename, 'rb')
self._fd.seek(begin)
return self._fd.read(length)
def to_json(self):
"""Creating a JSON representation of an instance of Credentials.
Returns:
string, a JSON representation of this instance, suitable to pass to
from_json().
"""
return self._to_json(['_fd'])
@staticmethod
def from_json(s):
d = simplejson.loads(s)
return MediaFileUpload(
d['_filename'], d['_mimetype'], d['_chunksize'], d['_resumable'])
class HttpRequest(object):
"""Encapsulates a single HTTP request."""
def __init__(self, http, postproc, uri,
method='GET',
body=None,
headers=None,
methodId=None,
resumable=None):
"""Constructor for an HttpRequest.
Args:
http: httplib2.Http, the transport object to use to make a request
postproc: callable, called on the HTTP response and content to transform
it into a data object before returning, or raising an exception
on an error.
uri: string, the absolute URI to send the request to
method: string, the HTTP method to use
body: string, the request body of the HTTP request,
headers: dict, the HTTP request headers
methodId: string, a unique identifier for the API method being called.
resumable: MediaUpload, None if this is not a resumbale request.
"""
self.uri = uri
self.method = method
self.body = body
self.headers = headers or {}
self.methodId = methodId
self.http = http
self.postproc = postproc
self.resumable = resumable
# Pull the multipart boundary out of the content-type header.
major, minor, params = mimeparse.parse_mime_type(
headers.get('content-type', 'application/json'))
# Terminating multipart boundary get a trailing '--' appended.
self.multipart_boundary = params.get('boundary', '').strip('"') + '--'
# If this was a multipart resumable, the size of the non-media part.
self.multipart_size = 0
# The resumable URI to send chunks to.
self.resumable_uri = None
# The bytes that have been uploaded.
self.resumable_progress = 0
self.total_size = 0
if resumable is not None:
if self.body is not None:
self.multipart_size = len(self.body)
else:
self.multipart_size = 0
self.total_size = (
self.resumable.size() +
self.multipart_size +
len(self.multipart_boundary))
def execute(self, http=None):
"""Execute the request.
Args:
http: httplib2.Http, an http object to be used in place of the
one the HttpRequest request object was constructed with.
Returns:
A deserialized object model of the response body as determined
by the postproc.
Raises:
apiclient.errors.HttpError if the response was not a 2xx.
httplib2.Error if a transport error has occured.
"""
if http is None:
http = self.http
if self.resumable:
body = None
while body is None:
_, body = self.next_chunk(http)
return body
else:
resp, content = http.request(self.uri, self.method,
body=self.body,
headers=self.headers)
if resp.status >= 300:
raise HttpError(resp, content, self.uri)
return self.postproc(resp, content)
def next_chunk(self, http=None):
"""Execute the next step of a resumable upload.
Can only be used if the method being executed supports media uploads and
the MediaUpload object passed in was flagged as using resumable upload.
Example:
media = MediaFileUpload('smiley.png', mimetype='image/png',
chunksize=1000, resumable=True)
request = service.objects().insert(
bucket=buckets['items'][0]['id'],
name='smiley.png',
media_body=media)
response = None
while response is None:
status, response = request.next_chunk()
if status:
print "Upload %d%% complete." % int(status.progress() * 100)
Returns:
(status, body): (ResumableMediaStatus, object)
The body will be None until the resumable media is fully uploaded.
"""
if http is None:
http = self.http
if self.resumable_uri is None:
start_headers = copy.copy(self.headers)
start_headers['X-Upload-Content-Type'] = self.resumable.mimetype()
start_headers['X-Upload-Content-Length'] = str(self.resumable.size())
start_headers['Content-Length'] = '0'
resp, content = http.request(self.uri, self.method,
body="",
headers=start_headers)
if resp.status == 200 and 'location' in resp:
self.resumable_uri = resp['location']
else:
raise ResumableUploadError("Failed to retrieve starting URI.")
if self.body:
begin = 0
data = self.body
else:
begin = self.resumable_progress - self.multipart_size
data = self.resumable.getbytes(begin, self.resumable.chunksize())
# Tack on the multipart/related boundary if we are at the end of the file.
if begin + self.resumable.chunksize() >= self.resumable.size():
data += self.multipart_boundary
headers = {
'Content-Range': 'bytes %d-%d/%d' % (
self.resumable_progress, self.resumable_progress + len(data) - 1,
self.total_size),
}
resp, content = http.request(self.resumable_uri, 'PUT',
body=data,
headers=headers)
if resp.status in [200, 201]:
return None, self.postproc(resp, content)
elif resp.status == 308:
# A "308 Resume Incomplete" indicates we are not done.
self.resumable_progress = int(resp['range'].split('-')[1]) + 1
if self.resumable_progress >= self.multipart_size:
self.body = None
if 'location' in resp:
self.resumable_uri = resp['location']
else:
raise HttpError(resp, content, self.uri)
return MediaUploadProgress(self.resumable_progress, self.total_size), None
def to_json(self):
"""Returns a JSON representation of the HttpRequest."""
d = copy.copy(self.__dict__)
if d['resumable'] is not None:
d['resumable'] = self.resumable.to_json()
del d['http']
del d['postproc']
return simplejson.dumps(d)
@staticmethod
def from_json(s, http, postproc):
"""Returns an HttpRequest populated with info from a JSON object."""
d = simplejson.loads(s)
if d['resumable'] is not None:
d['resumable'] = MediaUpload.new_from_json(d['resumable'])
return HttpRequest(
http,
postproc,
uri=d['uri'],
method=d['method'],
body=d['body'],
headers=d['headers'],
methodId=d['methodId'],
resumable=d['resumable'])
class BatchHttpRequest(object):
"""Batches multiple HttpRequest objects into a single HTTP request."""
def __init__(self, callback=None, batch_uri=None):
"""Constructor for a BatchHttpRequest.
Args:
callback: callable, A callback to be called for each response, of the
form callback(id, response). The first parameter is the request id, and
the second is the deserialized response object.
batch_uri: string, URI to send batch requests to.
"""
if batch_uri is None:
batch_uri = 'https://www.googleapis.com/batch'
self._batch_uri = batch_uri
# Global callback to be called for each individual response in the batch.
self._callback = callback
# A map from id to (request, callback) pairs.
self._requests = {}
# List of request ids, in the order in which they were added.
self._order = []
# The last auto generated id.
self._last_auto_id = 0
# Unique ID on which to base the Content-ID headers.
self._base_id = None
def _id_to_header(self, id_):
"""Convert an id to a Content-ID header value.
Args:
id_: string, identifier of individual request.
Returns:
A Content-ID header with the id_ encoded into it. A UUID is prepended to
the value because Content-ID headers are supposed to be universally
unique.
"""
if self._base_id is None:
self._base_id = uuid.uuid4()
return '<%s+%s>' % (self._base_id, urllib.quote(id_))
def _header_to_id(self, header):
"""Convert a Content-ID header value to an id.
Presumes the Content-ID header conforms to the format that _id_to_header()
returns.
Args:
header: string, Content-ID header value.
Returns:
The extracted id value.
Raises:
BatchError if the header is not in the expected format.
"""
if header[0] != '<' or header[-1] != '>':
raise BatchError("Invalid value for Content-ID: %s" % header)
if '+' not in header:
raise BatchError("Invalid value for Content-ID: %s" % header)
base, id_ = header[1:-1].rsplit('+', 1)
return urllib.unquote(id_)
def _serialize_request(self, request):
"""Convert an HttpRequest object into a string.
Args:
request: HttpRequest, the request to serialize.
Returns:
The request as a string in application/http format.
"""
# Construct status line
parsed = urlparse.urlparse(request.uri)
request_line = urlparse.urlunparse(
(None, None, parsed.path, parsed.params, parsed.query, None)
)
status_line = request.method + ' ' + request_line + ' HTTP/1.1\n'
major, minor = request.headers.get('content-type', 'text/plain').split('/')
msg = MIMENonMultipart(major, minor)
headers = request.headers.copy()
# MIMENonMultipart adds its own Content-Type header.
if 'content-type' in headers:
del headers['content-type']
for key, value in headers.iteritems():
msg[key] = value
msg['Host'] = parsed.netloc
msg.set_unixfrom(None)
if request.body is not None:
msg.set_payload(request.body)
body = msg.as_string(False)
# Strip off the \n\n that the MIME lib tacks onto the end of the payload.
if request.body is None:
body = body[:-2]
return status_line + body
def _deserialize_response(self, payload):
"""Convert string into httplib2 response and content.
Args:
payload: string, headers and body as a string.
Returns:
A pair (resp, content) like would be returned from httplib2.request.
"""
# Strip off the status line
status_line, payload = payload.split('\n', 1)
protocol, status, reason = status_line.split(' ')
# Parse the rest of the response
parser = FeedParser()
parser.feed(payload)
msg = parser.close()
msg['status'] = status
# Create httplib2.Response from the parsed headers.
resp = httplib2.Response(msg)
resp.reason = reason
resp.version = int(protocol.split('/', 1)[1].replace('.', ''))
content = payload.split('\r\n\r\n', 1)[1]
return resp, content
def _new_id(self):
"""Create a new id.
Auto incrementing number that avoids conflicts with ids already used.
Returns:
string, a new unique id.
"""
self._last_auto_id += 1
while str(self._last_auto_id) in self._requests:
self._last_auto_id += 1
return str(self._last_auto_id)
def add(self, request, callback=None, request_id=None):
"""Add a new request.
Every callback added will be paired with a unique id, the request_id. That
unique id will be passed back to the callback when the response comes back
from the server. The default behavior is to have the library generate it's
own unique id. If the caller passes in a request_id then they must ensure
uniqueness for each request_id, and if they are not an exception is
raised. Callers should either supply all request_ids or nevery supply a
request id, to avoid such an error.
Args:
request: HttpRequest, Request to add to the batch.
callback: callable, A callback to be called for this response, of the
form callback(id, response). The first parameter is the request id, and
the second is the deserialized response object.
request_id: string, A unique id for the request. The id will be passed to
the callback with the response.
Returns:
None
Raises:
BatchError if a resumable request is added to a batch.
KeyError is the request_id is not unique.
"""
if request_id is None:
request_id = self._new_id()
if request.resumable is not None:
raise BatchError("Resumable requests cannot be used in a batch request.")
if request_id in self._requests:
raise KeyError("A request with this ID already exists: %s" % request_id)
self._requests[request_id] = (request, callback)
self._order.append(request_id)
def execute(self, http=None):
"""Execute all the requests as a single batched HTTP request.
Args:
http: httplib2.Http, an http object to be used in place of the one the
HttpRequest request object was constructed with. If one isn't supplied
then use a http object from the requests in this batch.
Returns:
None
Raises:
apiclient.errors.HttpError if the response was not a 2xx.
httplib2.Error if a transport error has occured.
"""
if http is None:
for request_id in self._order:
request, callback = self._requests[request_id]
if request is not None:
http = request.http
break
if http is None:
raise ValueError("Missing a valid http object.")
msgRoot = MIMEMultipart('mixed')
# msgRoot should not write out it's own headers
setattr(msgRoot, '_write_headers', lambda self: None)
# Add all the individual requests.
for request_id in self._order:
request, callback = self._requests[request_id]
msg = MIMENonMultipart('application', 'http')
msg['Content-Transfer-Encoding'] = 'binary'
msg['Content-ID'] = self._id_to_header(request_id)
body = self._serialize_request(request)
msg.set_payload(body)
msgRoot.attach(msg)
body = msgRoot.as_string()
headers = {}
headers['content-type'] = ('multipart/mixed; '
'boundary="%s"') % msgRoot.get_boundary()
resp, content = http.request(self._batch_uri, 'POST', body=body,
headers=headers)
if resp.status >= 300:
raise HttpError(resp, content, self._batch_uri)
# Now break up the response and process each one with the correct postproc
# and trigger the right callbacks.
boundary, _ = content.split(None, 1)
# Prepend with a content-type header so FeedParser can handle it.
header = 'Content-Type: %s\r\n\r\n' % resp['content-type']
content = header + content
parser = FeedParser()
parser.feed(content)
respRoot = parser.close()
if not respRoot.is_multipart():
raise BatchError("Response not in multipart/mixed format.")
parts = respRoot.get_payload()
for part in parts:
request_id = self._header_to_id(part['Content-ID'])
headers, content = self._deserialize_response(part.get_payload())
# TODO(jcgregorio) Remove this temporary hack once the server stops
# gzipping individual response bodies.
if content[0] != '{':
gzipped_content = content
content = gzip.GzipFile(
fileobj=StringIO.StringIO(gzipped_content)).read()
request, cb = self._requests[request_id]
postproc = request.postproc
response = postproc(resp, content)
if cb is not None:
cb(request_id, response)
if self._callback is not None:
self._callback(request_id, response)
class HttpRequestMock(object):
"""Mock of HttpRequest.
Do not construct directly, instead use RequestMockBuilder.
"""
def __init__(self, resp, content, postproc):
"""Constructor for HttpRequestMock
Args:
resp: httplib2.Response, the response to emulate coming from the request
content: string, the response body
postproc: callable, the post processing function usually supplied by
the model class. See model.JsonModel.response() as an example.
"""
self.resp = resp
self.content = content
self.postproc = postproc
if resp is None:
self.resp = httplib2.Response({'status': 200, 'reason': 'OK'})
if 'reason' in self.resp:
self.resp.reason = self.resp['reason']
def execute(self, http=None):
"""Execute the request.
Same behavior as HttpRequest.execute(), but the response is
mocked and not really from an HTTP request/response.
"""
return self.postproc(self.resp, self.content)
class RequestMockBuilder(object):
"""A simple mock of HttpRequest
Pass in a dictionary to the constructor that maps request methodIds to
tuples of (httplib2.Response, content, opt_expected_body) that should be
returned when that method is called. None may also be passed in for the
httplib2.Response, in which case a 200 OK response will be generated.
If an opt_expected_body (str or dict) is provided, it will be compared to
the body and UnexpectedBodyError will be raised on inequality.
Example:
response = '{"data": {"id": "tag:google.c...'
requestBuilder = RequestMockBuilder(
{
'plus.activities.get': (None, response),
}
)
apiclient.discovery.build("plus", "v1", requestBuilder=requestBuilder)
Methods that you do not supply a response for will return a
200 OK with an empty string as the response content or raise an excpetion
if check_unexpected is set to True. The methodId is taken from the rpcName
in the discovery document.
For more details see the project wiki.
"""
def __init__(self, responses, check_unexpected=False):
"""Constructor for RequestMockBuilder
The constructed object should be a callable object
that can replace the class HttpResponse.
responses - A dictionary that maps methodIds into tuples
of (httplib2.Response, content). The methodId
comes from the 'rpcName' field in the discovery
document.
check_unexpected - A boolean setting whether or not UnexpectedMethodError
should be raised on unsupplied method.
"""
self.responses = responses
self.check_unexpected = check_unexpected
def __call__(self, http, postproc, uri, method='GET', body=None,
headers=None, methodId=None, resumable=None):
"""Implements the callable interface that discovery.build() expects
of requestBuilder, which is to build an object compatible with
HttpRequest.execute(). See that method for the description of the
parameters and the expected response.
"""
if methodId in self.responses:
response = self.responses[methodId]
resp, content = response[:2]
if len(response) > 2:
# Test the body against the supplied expected_body.
expected_body = response[2]
if bool(expected_body) != bool(body):
# Not expecting a body and provided one
# or expecting a body and not provided one.
raise UnexpectedBodyError(expected_body, body)
if isinstance(expected_body, str):
expected_body = simplejson.loads(expected_body)
body = simplejson.loads(body)
if body != expected_body:
raise UnexpectedBodyError(expected_body, body)
return HttpRequestMock(resp, content, postproc)
elif self.check_unexpected:
raise UnexpectedMethodError(methodId)
else:
model = JsonModel(False)
return HttpRequestMock(None, '{}', model.response)
class HttpMock(object):
"""Mock of httplib2.Http"""
def __init__(self, filename, headers=None):
"""
Args:
filename: string, absolute filename to read response from
headers: dict, header to return with response
"""
if headers is None:
headers = {'status': '200 OK'}
f = file(filename, 'r')
self.data = f.read()
f.close()
self.headers = headers
def request(self, uri,
method='GET',
body=None,
headers=None,
redirections=1,
connection_type=None):
return httplib2.Response(self.headers), self.data
class HttpMockSequence(object):
"""Mock of httplib2.Http
Mocks a sequence of calls to request returning different responses for each
call. Create an instance initialized with the desired response headers
and content and then use as if an httplib2.Http instance.
http = HttpMockSequence([
({'status': '401'}, ''),
({'status': '200'}, '{"access_token":"1/3w","expires_in":3600}'),
({'status': '200'}, 'echo_request_headers'),
])
resp, content = http.request("http://examples.com")
There are special values you can pass in for content to trigger
behavours that are helpful in testing.
'echo_request_headers' means return the request headers in the response body
'echo_request_headers_as_json' means return the request headers in
the response body
'echo_request_body' means return the request body in the response body
'echo_request_uri' means return the request uri in the response body
"""
def __init__(self, iterable):
"""
Args:
iterable: iterable, a sequence of pairs of (headers, body)
"""
self._iterable = iterable
def request(self, uri,
method='GET',
body=None,
headers=None,
redirections=1,
connection_type=None):
resp, content = self._iterable.pop(0)
if content == 'echo_request_headers':
content = headers
elif content == 'echo_request_headers_as_json':
content = simplejson.dumps(headers)
elif content == 'echo_request_body':
content = body
elif content == 'echo_request_uri':
content = uri
return httplib2.Response(resp), content
def set_user_agent(http, user_agent):
"""Set the user-agent on every request.
Args:
http - An instance of httplib2.Http
or something that acts like it.
user_agent: string, the value for the user-agent header.
Returns:
A modified instance of http that was passed in.
Example:
h = httplib2.Http()
h = set_user_agent(h, "my-app-name/6.0")
Most of the time the user-agent will be set doing auth, this is for the rare
cases where you are accessing an unauthenticated endpoint.
"""
request_orig = http.request
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
"""Modify the request headers to add the user-agent."""
if headers is None:
headers = {}
if 'user-agent' in headers:
headers['user-agent'] = user_agent + ' ' + headers['user-agent']
else:
headers['user-agent'] = user_agent
resp, content = request_orig(uri, method, body, headers,
redirections, connection_type)
return resp, content
http.request = new_request
return http
def tunnel_patch(http):
"""Tunnel PATCH requests over POST.
Args:
http - An instance of httplib2.Http
or something that acts like it.
Returns:
A modified instance of http that was passed in.
Example:
h = httplib2.Http()
h = tunnel_patch(h, "my-app-name/6.0")
Useful if you are running on a platform that doesn't support PATCH.
Apply this last if you are using OAuth 1.0, as changing the method
will result in a different signature.
"""
request_orig = http.request
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
"""Modify the request headers to add the user-agent."""
if headers is None:
headers = {}
if method == 'PATCH':
if 'oauth_token' in headers.get('authorization', ''):
logging.warning(
'OAuth 1.0 request made with Credentials after tunnel_patch.')
headers['x-http-method-override'] = "PATCH"
method = 'POST'
resp, content = request_orig(uri, method, body, headers,
redirections, connection_type)
return resp, content
http.request = new_request
return http
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for OAuth.
Utilities for making it easier to work with OAuth.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import copy
import httplib2
import logging
import oauth2 as oauth
import urllib
import urlparse
from anyjson import simplejson
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
class Error(Exception):
"""Base error for this module."""
pass
class RequestError(Error):
"""Error occurred during request."""
pass
class MissingParameter(Error):
pass
class CredentialsInvalidError(Error):
pass
def _abstract():
raise NotImplementedError('You need to override this function')
def _oauth_uri(name, discovery, params):
"""Look up the OAuth URI from the discovery
document and add query parameters based on
params.
name - The name of the OAuth URI to lookup, one
of 'request', 'access', or 'authorize'.
discovery - Portion of discovery document the describes
the OAuth endpoints.
params - Dictionary that is used to form the query parameters
for the specified URI.
"""
if name not in ['request', 'access', 'authorize']:
raise KeyError(name)
keys = discovery[name]['parameters'].keys()
query = {}
for key in keys:
if key in params:
query[key] = params[key]
return discovery[name]['url'] + '?' + urllib.urlencode(query)
class Credentials(object):
"""Base class for all Credentials objects.
Subclasses must define an authorize() method
that applies the credentials to an HTTP transport.
"""
def authorize(self, http):
"""Take an httplib2.Http instance (or equivalent) and
authorizes it for the set of credentials, usually by
replacing http.request() with a method that adds in
the appropriate headers and then delegates to the original
Http.request() method.
"""
_abstract()
class Flow(object):
"""Base class for all Flow objects."""
pass
class Storage(object):
"""Base class for all Storage objects.
Store and retrieve a single credential.
"""
def get(self):
"""Retrieve credential.
Returns:
apiclient.oauth.Credentials
"""
_abstract()
def put(self, credentials):
"""Write a credential.
Args:
credentials: Credentials, the credentials to store.
"""
_abstract()
class OAuthCredentials(Credentials):
"""Credentials object for OAuth 1.0a
"""
def __init__(self, consumer, token, user_agent):
"""
consumer - An instance of oauth.Consumer.
token - An instance of oauth.Token constructed with
the access token and secret.
user_agent - The HTTP User-Agent to provide for this application.
"""
self.consumer = consumer
self.token = token
self.user_agent = user_agent
self.store = None
# True if the credentials have been revoked
self._invalid = False
@property
def invalid(self):
"""True if the credentials are invalid, such as being revoked."""
return getattr(self, "_invalid", False)
def set_store(self, store):
"""Set the storage for the credential.
Args:
store: callable, a callable that when passed a Credential
will store the credential back to where it came from.
This is needed to store the latest access_token if it
has been revoked.
"""
self.store = store
def __getstate__(self):
"""Trim the state down to something that can be pickled."""
d = copy.copy(self.__dict__)
del d['store']
return d
def __setstate__(self, state):
"""Reconstitute the state of the object from being pickled."""
self.__dict__.update(state)
self.store = None
def authorize(self, http):
"""Authorize an httplib2.Http instance with these Credentials
Args:
http - An instance of httplib2.Http
or something that acts like it.
Returns:
A modified instance of http that was passed in.
Example:
h = httplib2.Http()
h = credentials.authorize(h)
You can't create a new OAuth
subclass of httplib2.Authenication because
it never gets passed the absolute URI, which is
needed for signing. So instead we have to overload
'request' with a closure that adds in the
Authorization header and then calls the original version
of 'request()'.
"""
request_orig = http.request
signer = oauth.SignatureMethod_HMAC_SHA1()
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
"""Modify the request headers to add the appropriate
Authorization header."""
response_code = 302
http.follow_redirects = False
while response_code in [301, 302]:
req = oauth.Request.from_consumer_and_token(
self.consumer, self.token, http_method=method, http_url=uri)
req.sign_request(signer, self.consumer, self.token)
if headers is None:
headers = {}
headers.update(req.to_header())
if 'user-agent' in headers:
headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
else:
headers['user-agent'] = self.user_agent
resp, content = request_orig(uri, method, body, headers,
redirections, connection_type)
response_code = resp.status
if response_code in [301, 302]:
uri = resp['location']
# Update the stored credential if it becomes invalid.
if response_code == 401:
logging.info('Access token no longer valid: %s' % content)
self._invalid = True
if self.store is not None:
self.store(self)
raise CredentialsInvalidError("Credentials are no longer valid.")
return resp, content
http.request = new_request
return http
class TwoLeggedOAuthCredentials(Credentials):
"""Two Legged Credentials object for OAuth 1.0a.
The Two Legged object is created directly, not from a flow. Once you
authorize and httplib2.Http instance you can change the requestor and that
change will propogate to the authorized httplib2.Http instance. For example:
http = httplib2.Http()
http = credentials.authorize(http)
credentials.requestor = 'foo@example.info'
http.request(...)
credentials.requestor = 'bar@example.info'
http.request(...)
"""
def __init__(self, consumer_key, consumer_secret, user_agent):
"""
Args:
consumer_key: string, An OAuth 1.0 consumer key
consumer_secret: string, An OAuth 1.0 consumer secret
user_agent: string, The HTTP User-Agent to provide for this application.
"""
self.consumer = oauth.Consumer(consumer_key, consumer_secret)
self.user_agent = user_agent
self.store = None
# email address of the user to act on the behalf of.
self._requestor = None
@property
def invalid(self):
"""True if the credentials are invalid, such as being revoked.
Always returns False for Two Legged Credentials.
"""
return False
def getrequestor(self):
return self._requestor
def setrequestor(self, email):
self._requestor = email
requestor = property(getrequestor, setrequestor, None,
'The email address of the user to act on behalf of')
def set_store(self, store):
"""Set the storage for the credential.
Args:
store: callable, a callable that when passed a Credential
will store the credential back to where it came from.
This is needed to store the latest access_token if it
has been revoked.
"""
self.store = store
def __getstate__(self):
"""Trim the state down to something that can be pickled."""
d = copy.copy(self.__dict__)
del d['store']
return d
def __setstate__(self, state):
"""Reconstitute the state of the object from being pickled."""
self.__dict__.update(state)
self.store = None
def authorize(self, http):
"""Authorize an httplib2.Http instance with these Credentials
Args:
http - An instance of httplib2.Http
or something that acts like it.
Returns:
A modified instance of http that was passed in.
Example:
h = httplib2.Http()
h = credentials.authorize(h)
You can't create a new OAuth
subclass of httplib2.Authenication because
it never gets passed the absolute URI, which is
needed for signing. So instead we have to overload
'request' with a closure that adds in the
Authorization header and then calls the original version
of 'request()'.
"""
request_orig = http.request
signer = oauth.SignatureMethod_HMAC_SHA1()
# The closure that will replace 'httplib2.Http.request'.
def new_request(uri, method='GET', body=None, headers=None,
redirections=httplib2.DEFAULT_MAX_REDIRECTS,
connection_type=None):
"""Modify the request headers to add the appropriate
Authorization header."""
response_code = 302
http.follow_redirects = False
while response_code in [301, 302]:
# add in xoauth_requestor_id=self._requestor to the uri
if self._requestor is None:
raise MissingParameter(
'Requestor must be set before using TwoLeggedOAuthCredentials')
parsed = list(urlparse.urlparse(uri))
q = parse_qsl(parsed[4])
q.append(('xoauth_requestor_id', self._requestor))
parsed[4] = urllib.urlencode(q)
uri = urlparse.urlunparse(parsed)
req = oauth.Request.from_consumer_and_token(
self.consumer, None, http_method=method, http_url=uri)
req.sign_request(signer, self.consumer, None)
if headers is None:
headers = {}
headers.update(req.to_header())
if 'user-agent' in headers:
headers['user-agent'] = self.user_agent + ' ' + headers['user-agent']
else:
headers['user-agent'] = self.user_agent
resp, content = request_orig(uri, method, body, headers,
redirections, connection_type)
response_code = resp.status
if response_code in [301, 302]:
uri = resp['location']
if response_code == 401:
logging.info('Access token no longer valid: %s' % content)
# Do not store the invalid state of the Credentials because
# being 2LO they could be reinstated in the future.
raise CredentialsInvalidError("Credentials are invalid.")
return resp, content
http.request = new_request
return http
class FlowThreeLegged(Flow):
"""Does the Three Legged Dance for OAuth 1.0a.
"""
def __init__(self, discovery, consumer_key, consumer_secret, user_agent,
**kwargs):
"""
discovery - Section of the API discovery document that describes
the OAuth endpoints.
consumer_key - OAuth consumer key
consumer_secret - OAuth consumer secret
user_agent - The HTTP User-Agent that identifies the application.
**kwargs - The keyword arguments are all optional and required
parameters for the OAuth calls.
"""
self.discovery = discovery
self.consumer_key = consumer_key
self.consumer_secret = consumer_secret
self.user_agent = user_agent
self.params = kwargs
self.request_token = {}
required = {}
for uriinfo in discovery.itervalues():
for name, value in uriinfo['parameters'].iteritems():
if value['required'] and not name.startswith('oauth_'):
required[name] = 1
for key in required.iterkeys():
if key not in self.params:
raise MissingParameter('Required parameter %s not supplied' % key)
def step1_get_authorize_url(self, oauth_callback='oob'):
"""Returns a URI to redirect to the provider.
oauth_callback - Either the string 'oob' for a non-web-based application,
or a URI that handles the callback from the authorization
server.
If oauth_callback is 'oob' then pass in the
generated verification code to step2_exchange,
otherwise pass in the query parameters received
at the callback uri to step2_exchange.
"""
consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
client = oauth.Client(consumer)
headers = {
'user-agent': self.user_agent,
'content-type': 'application/x-www-form-urlencoded'
}
body = urllib.urlencode({'oauth_callback': oauth_callback})
uri = _oauth_uri('request', self.discovery, self.params)
resp, content = client.request(uri, 'POST', headers=headers,
body=body)
if resp['status'] != '200':
logging.error('Failed to retrieve temporary authorization: %s', content)
raise RequestError('Invalid response %s.' % resp['status'])
self.request_token = dict(parse_qsl(content))
auth_params = copy.copy(self.params)
auth_params['oauth_token'] = self.request_token['oauth_token']
return _oauth_uri('authorize', self.discovery, auth_params)
def step2_exchange(self, verifier):
"""Exhanges an authorized request token
for OAuthCredentials.
Args:
verifier: string, dict - either the verifier token, or a dictionary
of the query parameters to the callback, which contains
the oauth_verifier.
Returns:
The Credentials object.
"""
if not (isinstance(verifier, str) or isinstance(verifier, unicode)):
verifier = verifier['oauth_verifier']
token = oauth.Token(
self.request_token['oauth_token'],
self.request_token['oauth_token_secret'])
token.set_verifier(verifier)
consumer = oauth.Consumer(self.consumer_key, self.consumer_secret)
client = oauth.Client(consumer, token)
headers = {
'user-agent': self.user_agent,
'content-type': 'application/x-www-form-urlencoded'
}
uri = _oauth_uri('access', self.discovery, self.params)
resp, content = client.request(uri, 'POST', headers=headers)
if resp['status'] != '200':
logging.error('Failed to retrieve access token: %s', content)
raise RequestError('Invalid response %s.' % resp['status'])
oauth_params = dict(parse_qsl(content))
token = oauth.Token(
oauth_params['oauth_token'],
oauth_params['oauth_token_secret'])
return OAuthCredentials(consumer, token, self.user_agent)
| Python |
#!/usr/bin/python2.4
#
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model objects for requests and responses.
Each API may support one or more serializations, such
as JSON, Atom, etc. The model classes are responsible
for converting between the wire format and the Python
object representation.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import gflags
import logging
import urllib
from anyjson import simplejson
from errors import HttpError
FLAGS = gflags.FLAGS
gflags.DEFINE_boolean('dump_request_response', False,
'Dump all http server requests and responses. '
)
def _abstract():
raise NotImplementedError('You need to override this function')
class Model(object):
"""Model base class.
All Model classes should implement this interface.
The Model serializes and de-serializes between a wire
format such as JSON and a Python object representation.
"""
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing requests with a serialized body.
Args:
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query_params: dict, parameters that appear in the query
body_value: object, the request body as a Python object, which must be
serializable.
Returns:
A tuple of (headers, path_params, query, body)
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query: string, query part of the request URI
body: string, the body serialized in the desired wire format.
"""
_abstract()
def response(self, resp, content):
"""Convert the response wire format into a Python object.
Args:
resp: httplib2.Response, the HTTP response headers and status
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
Raises:
apiclient.errors.HttpError if a non 2xx response is received.
"""
_abstract()
class BaseModel(Model):
"""Base model class.
Subclasses should provide implementations for the "serialize" and
"deserialize" methods, as well as values for the following class attributes.
Attributes:
accept: The value to use for the HTTP Accept header.
content_type: The value to use for the HTTP Content-type header.
no_content_response: The value to return when deserializing a 204 "No
Content" response.
alt_param: The value to supply as the "alt" query parameter for requests.
"""
accept = None
content_type = None
no_content_response = None
alt_param = None
def _log_request(self, headers, path_params, query, body):
"""Logs debugging information about the request if requested."""
if FLAGS.dump_request_response:
logging.info('--request-start--')
logging.info('-headers-start-')
for h, v in headers.iteritems():
logging.info('%s: %s', h, v)
logging.info('-headers-end-')
logging.info('-path-parameters-start-')
for h, v in path_params.iteritems():
logging.info('%s: %s', h, v)
logging.info('-path-parameters-end-')
logging.info('body: %s', body)
logging.info('query: %s', query)
logging.info('--request-end--')
def request(self, headers, path_params, query_params, body_value):
"""Updates outgoing requests with a serialized body.
Args:
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query_params: dict, parameters that appear in the query
body_value: object, the request body as a Python object, which must be
serializable by simplejson.
Returns:
A tuple of (headers, path_params, query, body)
headers: dict, request headers
path_params: dict, parameters that appear in the request path
query: string, query part of the request URI
body: string, the body serialized as JSON
"""
query = self._build_query(query_params)
headers['accept'] = self.accept
headers['accept-encoding'] = 'gzip, deflate'
if 'user-agent' in headers:
headers['user-agent'] += ' '
else:
headers['user-agent'] = ''
headers['user-agent'] += 'google-api-python-client/1.0'
if body_value is not None:
headers['content-type'] = self.content_type
body_value = self.serialize(body_value)
self._log_request(headers, path_params, query, body_value)
return (headers, path_params, query, body_value)
def _build_query(self, params):
"""Builds a query string.
Args:
params: dict, the query parameters
Returns:
The query parameters properly encoded into an HTTP URI query string.
"""
if self.alt_param is not None:
params.update({'alt': self.alt_param})
astuples = []
for key, value in params.iteritems():
if type(value) == type([]):
for x in value:
x = x.encode('utf-8')
astuples.append((key, x))
else:
if getattr(value, 'encode', False) and callable(value.encode):
value = value.encode('utf-8')
astuples.append((key, value))
return '?' + urllib.urlencode(astuples)
def _log_response(self, resp, content):
"""Logs debugging information about the response if requested."""
if FLAGS.dump_request_response:
logging.info('--response-start--')
for h, v in resp.iteritems():
logging.info('%s: %s', h, v)
if content:
logging.info(content)
logging.info('--response-end--')
def response(self, resp, content):
"""Convert the response wire format into a Python object.
Args:
resp: httplib2.Response, the HTTP response headers and status
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
Raises:
apiclient.errors.HttpError if a non 2xx response is received.
"""
self._log_response(resp, content)
# Error handling is TBD, for example, do we retry
# for some operation/error combinations?
if resp.status < 300:
if resp.status == 204:
# A 204: No Content response should be treated differently
# to all the other success states
return self.no_content_response
return self.deserialize(content)
else:
logging.debug('Content from bad request was: %s' % content)
raise HttpError(resp, content)
def serialize(self, body_value):
"""Perform the actual Python object serialization.
Args:
body_value: object, the request body as a Python object.
Returns:
string, the body in serialized form.
"""
_abstract()
def deserialize(self, content):
"""Perform the actual deserialization from response string to Python
object.
Args:
content: string, the body of the HTTP response
Returns:
The body de-serialized as a Python object.
"""
_abstract()
class JsonModel(BaseModel):
"""Model class for JSON.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request and response bodies.
"""
accept = 'application/json'
content_type = 'application/json'
alt_param = 'json'
def __init__(self, data_wrapper=False):
"""Construct a JsonModel.
Args:
data_wrapper: boolean, wrap requests and responses in a data wrapper
"""
self._data_wrapper = data_wrapper
def serialize(self, body_value):
if (isinstance(body_value, dict) and 'data' not in body_value and
self._data_wrapper):
body_value = {'data': body_value}
return simplejson.dumps(body_value)
def deserialize(self, content):
body = simplejson.loads(content)
if isinstance(body, dict) and 'data' in body:
body = body['data']
return body
@property
def no_content_response(self):
return {}
class RawModel(JsonModel):
"""Model class for requests that don't return JSON.
Serializes and de-serializes between JSON and the Python
object representation of HTTP request, and returns the raw bytes
of the response body.
"""
accept = '*/*'
content_type = 'application/json'
alt_param = None
def deserialize(self, content):
return content
@property
def no_content_response(self):
return ''
class ProtocolBufferModel(BaseModel):
"""Model class for protocol buffers.
Serializes and de-serializes the binary protocol buffer sent in the HTTP
request and response bodies.
"""
accept = 'application/x-protobuf'
content_type = 'application/x-protobuf'
alt_param = 'proto'
def __init__(self, protocol_buffer):
"""Constructs a ProtocolBufferModel.
The serialzed protocol buffer returned in an HTTP response will be
de-serialized using the given protocol buffer class.
Args:
protocol_buffer: The protocol buffer class used to de-serialize a
response from the API.
"""
self._protocol_buffer = protocol_buffer
def serialize(self, body_value):
return body_value.SerializeToString()
def deserialize(self, content):
return self._protocol_buffer.FromString(content)
@property
def no_content_response(self):
return self._protocol_buffer()
def makepatch(original, modified):
"""Create a patch object.
Some methods support PATCH, an efficient way to send updates to a resource.
This method allows the easy construction of patch bodies by looking at the
differences between a resource before and after it was modified.
Args:
original: object, the original deserialized resource
modified: object, the modified deserialized resource
Returns:
An object that contains only the changes from original to modified, in a
form suitable to pass to a PATCH method.
Example usage:
item = service.activities().get(postid=postid, userid=userid).execute()
original = copy.deepcopy(item)
item['object']['content'] = 'This is updated.'
service.activities.patch(postid=postid, userid=userid,
body=makepatch(original, item)).execute()
"""
patch = {}
for key, original_value in original.iteritems():
modified_value = modified.get(key, None)
if modified_value is None:
# Use None to signal that the element is deleted
patch[key] = None
elif original_value != modified_value:
if type(original_value) == type({}):
# Recursively descend objects
patch[key] = makepatch(original_value, modified_value)
else:
# In the case of simple types or arrays we just replace
patch[key] = modified_value
else:
# Don't add anything to patch if there's no change
pass
for key in modified:
if key not in original:
patch[key] = modified[key]
return patch
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for discovery based APIs
A client library for Google's discovery based APIs.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
__all__ = [
'build', 'build_from_document'
]
import copy
import httplib2
import logging
import os
import random
import re
import uritemplate
import urllib
import urlparse
import mimeparse
import mimetypes
try:
from urlparse import parse_qsl
except ImportError:
from cgi import parse_qsl
from apiclient.anyjson import simplejson
from apiclient.errors import HttpError
from apiclient.errors import InvalidJsonError
from apiclient.errors import MediaUploadSizeError
from apiclient.errors import UnacceptableMimeTypeError
from apiclient.errors import UnknownApiNameOrVersion
from apiclient.errors import UnknownLinkType
from apiclient.http import HttpRequest
from apiclient.http import MediaFileUpload
from apiclient.http import MediaUpload
from apiclient.model import JsonModel
from apiclient.model import RawModel
from apiclient.schema import Schemas
from email.mime.multipart import MIMEMultipart
from email.mime.nonmultipart import MIMENonMultipart
URITEMPLATE = re.compile('{[^}]*}')
VARNAME = re.compile('[a-zA-Z0-9_-]+')
DISCOVERY_URI = ('https://www.googleapis.com/discovery/v1/apis/'
'{api}/{apiVersion}/rest')
DEFAULT_METHOD_DOC = 'A description of how to use this function'
# Query parameters that work, but don't appear in discovery
STACK_QUERY_PARAMETERS = ['trace', 'fields', 'pp', 'prettyPrint', 'userIp',
'userip', 'strict']
RESERVED_WORDS = ['and', 'assert', 'break', 'class', 'continue', 'def', 'del',
'elif', 'else', 'except', 'exec', 'finally', 'for', 'from',
'global', 'if', 'import', 'in', 'is', 'lambda', 'not', 'or',
'pass', 'print', 'raise', 'return', 'try', 'while' ]
def _fix_method_name(name):
if name in RESERVED_WORDS:
return name + '_'
else:
return name
def _write_headers(self):
# Utility no-op method for multipart media handling
pass
def _add_query_parameter(url, name, value):
"""Adds a query parameter to a url
Args:
url: string, url to add the query parameter to.
name: string, query parameter name.
value: string, query parameter value.
Returns:
Updated query parameter. Does not update the url if value is None.
"""
if value is None:
return url
else:
parsed = list(urlparse.urlparse(url))
q = parse_qsl(parsed[4])
q.append((name, value))
parsed[4] = urllib.urlencode(q)
return urlparse.urlunparse(parsed)
def key2param(key):
"""Converts key names into parameter names.
For example, converting "max-results" -> "max_results"
"""
result = []
key = list(key)
if not key[0].isalpha():
result.append('x')
for c in key:
if c.isalnum():
result.append(c)
else:
result.append('_')
return ''.join(result)
def build(serviceName, version,
http=None,
discoveryServiceUrl=DISCOVERY_URI,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Construct a Resource for interacting with an API.
Construct a Resource object for interacting with
an API. The serviceName and version are the
names from the Discovery service.
Args:
serviceName: string, name of the service
version: string, the version of the service
discoveryServiceUrl: string, a URI Template that points to
the location of the discovery service. It should have two
parameters {api} and {apiVersion} that when filled in
produce an absolute URI to the discovery document for
that service.
developerKey: string, key obtained
from https://code.google.com/apis/console
model: apiclient.Model, converts to and from the wire format
requestBuilder: apiclient.http.HttpRequest, encapsulator for
an HTTP request
Returns:
A Resource object with methods for interacting with
the service.
"""
params = {
'api': serviceName,
'apiVersion': version
}
if http is None:
http = httplib2.Http()
requested_url = uritemplate.expand(discoveryServiceUrl, params)
# REMOTE_ADDR is defined by the CGI spec [RFC3875] as the environment
# variable that contains the network address of the client sending the
# request. If it exists then add that to the request for the discovery
# document to avoid exceeding the quota on discovery requests.
if 'REMOTE_ADDR' in os.environ:
requested_url = _add_query_parameter(requested_url, 'userIp',
os.environ['REMOTE_ADDR'])
logging.info('URL being requested: %s' % requested_url)
resp, content = http.request(requested_url)
if resp.status == 404:
raise UnknownApiNameOrVersion("name: %s version: %s" % (serviceName,
version))
if resp.status >= 400:
raise HttpError(resp, content, requested_url)
try:
service = simplejson.loads(content)
except ValueError, e:
logging.error('Failed to parse as JSON: ' + content)
raise InvalidJsonError()
filename = os.path.join(os.path.dirname(__file__), 'contrib',
serviceName, 'future.json')
try:
f = file(filename, 'r')
future = f.read()
f.close()
except IOError:
future = None
return build_from_document(content, discoveryServiceUrl, future,
http, developerKey, model, requestBuilder)
def build_from_document(
service,
base,
future=None,
http=None,
developerKey=None,
model=None,
requestBuilder=HttpRequest):
"""Create a Resource for interacting with an API.
Same as `build()`, but constructs the Resource object
from a discovery document that is it given, as opposed to
retrieving one over HTTP.
Args:
service: string, discovery document
base: string, base URI for all HTTP requests, usually the discovery URI
future: string, discovery document with future capabilities
auth_discovery: dict, information about the authentication the API supports
http: httplib2.Http, An instance of httplib2.Http or something that acts
like it that HTTP requests will be made through.
developerKey: string, Key for controlling API usage, generated
from the API Console.
model: Model class instance that serializes and
de-serializes requests and responses.
requestBuilder: Takes an http request and packages it up to be executed.
Returns:
A Resource object with methods for interacting with
the service.
"""
service = simplejson.loads(service)
base = urlparse.urljoin(base, service['basePath'])
if future:
future = simplejson.loads(future)
auth_discovery = future.get('auth', {})
else:
future = {}
auth_discovery = {}
schema = Schemas(service)
if model is None:
features = service.get('features', [])
model = JsonModel('dataWrapper' in features)
resource = createResource(http, base, model, requestBuilder, developerKey,
service, future, schema)
def auth_method():
"""Discovery information about the authentication the API uses."""
return auth_discovery
setattr(resource, 'auth_discovery', auth_method)
return resource
def _cast(value, schema_type):
"""Convert value to a string based on JSON Schema type.
See http://tools.ietf.org/html/draft-zyp-json-schema-03 for more details on
JSON Schema.
Args:
value: any, the value to convert
schema_type: string, the type that value should be interpreted as
Returns:
A string representation of 'value' based on the schema_type.
"""
if schema_type == 'string':
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
elif schema_type == 'integer':
return str(int(value))
elif schema_type == 'number':
return str(float(value))
elif schema_type == 'boolean':
return str(bool(value)).lower()
else:
if type(value) == type('') or type(value) == type(u''):
return value
else:
return str(value)
MULTIPLIERS = {
"KB": 2 ** 10,
"MB": 2 ** 20,
"GB": 2 ** 30,
"TB": 2 ** 40,
}
def _media_size_to_long(maxSize):
"""Convert a string media size, such as 10GB or 3TB into an integer."""
if len(maxSize) < 2:
return 0
units = maxSize[-2:].upper()
multiplier = MULTIPLIERS.get(units, 0)
if multiplier:
return int(maxSize[:-2]) * multiplier
else:
return int(maxSize)
def createResource(http, baseUrl, model, requestBuilder,
developerKey, resourceDesc, futureDesc, schema):
class Resource(object):
"""A class for interacting with a resource."""
def __init__(self):
self._http = http
self._baseUrl = baseUrl
self._model = model
self._developerKey = developerKey
self._requestBuilder = requestBuilder
def createMethod(theclass, methodName, methodDesc, futureDesc):
methodName = _fix_method_name(methodName)
pathUrl = methodDesc['path']
httpMethod = methodDesc['httpMethod']
methodId = methodDesc['id']
mediaPathUrl = None
accept = []
maxSize = 0
if 'mediaUpload' in methodDesc:
mediaUpload = methodDesc['mediaUpload']
mediaPathUrl = mediaUpload['protocols']['simple']['path']
mediaResumablePathUrl = mediaUpload['protocols']['resumable']['path']
accept = mediaUpload['accept']
maxSize = _media_size_to_long(mediaUpload.get('maxSize', ''))
if 'parameters' not in methodDesc:
methodDesc['parameters'] = {}
for name in STACK_QUERY_PARAMETERS:
methodDesc['parameters'][name] = {
'type': 'string',
'location': 'query'
}
if httpMethod in ['PUT', 'POST', 'PATCH']:
methodDesc['parameters']['body'] = {
'description': 'The request body.',
'type': 'object',
'required': True,
}
if 'request' in methodDesc:
methodDesc['parameters']['body'].update(methodDesc['request'])
else:
methodDesc['parameters']['body']['type'] = 'object'
if 'mediaUpload' in methodDesc:
methodDesc['parameters']['media_body'] = {
'description': 'The filename of the media request body.',
'type': 'string',
'required': False,
}
methodDesc['parameters']['body']['required'] = False
argmap = {} # Map from method parameter name to query parameter name
required_params = [] # Required parameters
repeated_params = [] # Repeated parameters
pattern_params = {} # Parameters that must match a regex
query_params = [] # Parameters that will be used in the query string
path_params = {} # Parameters that will be used in the base URL
param_type = {} # The type of the parameter
enum_params = {} # Allowable enumeration values for each parameter
if 'parameters' in methodDesc:
for arg, desc in methodDesc['parameters'].iteritems():
param = key2param(arg)
argmap[param] = arg
if desc.get('pattern', ''):
pattern_params[param] = desc['pattern']
if desc.get('enum', ''):
enum_params[param] = desc['enum']
if desc.get('required', False):
required_params.append(param)
if desc.get('repeated', False):
repeated_params.append(param)
if desc.get('location') == 'query':
query_params.append(param)
if desc.get('location') == 'path':
path_params[param] = param
param_type[param] = desc.get('type', 'string')
for match in URITEMPLATE.finditer(pathUrl):
for namematch in VARNAME.finditer(match.group(0)):
name = key2param(namematch.group(0))
path_params[name] = name
if name in query_params:
query_params.remove(name)
def method(self, **kwargs):
for name in kwargs.iterkeys():
if name not in argmap:
raise TypeError('Got an unexpected keyword argument "%s"' % name)
for name in required_params:
if name not in kwargs:
raise TypeError('Missing required parameter "%s"' % name)
for name, regex in pattern_params.iteritems():
if name in kwargs:
if isinstance(kwargs[name], basestring):
pvalues = [kwargs[name]]
else:
pvalues = kwargs[name]
for pvalue in pvalues:
if re.match(regex, pvalue) is None:
raise TypeError(
'Parameter "%s" value "%s" does not match the pattern "%s"' %
(name, pvalue, regex))
for name, enums in enum_params.iteritems():
if name in kwargs:
if kwargs[name] not in enums:
raise TypeError(
'Parameter "%s" value "%s" is not an allowed value in "%s"' %
(name, kwargs[name], str(enums)))
actual_query_params = {}
actual_path_params = {}
for key, value in kwargs.iteritems():
to_type = param_type.get(key, 'string')
# For repeated parameters we cast each member of the list.
if key in repeated_params and type(value) == type([]):
cast_value = [_cast(x, to_type) for x in value]
else:
cast_value = _cast(value, to_type)
if key in query_params:
actual_query_params[argmap[key]] = cast_value
if key in path_params:
actual_path_params[argmap[key]] = cast_value
body_value = kwargs.get('body', None)
media_filename = kwargs.get('media_body', None)
if self._developerKey:
actual_query_params['key'] = self._developerKey
model = self._model
# If there is no schema for the response then presume a binary blob.
if 'response' not in methodDesc:
model = RawModel()
headers = {}
headers, params, query, body = model.request(headers,
actual_path_params, actual_query_params, body_value)
expanded_url = uritemplate.expand(pathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
resumable = None
multipart_boundary = ''
if media_filename:
# Convert a simple filename into a MediaUpload object.
if isinstance(media_filename, basestring):
(media_mime_type, encoding) = mimetypes.guess_type(media_filename)
if media_mime_type is None:
raise UnknownFileType(media_filename)
if not mimeparse.best_match([media_mime_type], ','.join(accept)):
raise UnacceptableMimeTypeError(media_mime_type)
media_upload = MediaFileUpload(media_filename, media_mime_type)
elif isinstance(media_filename, MediaUpload):
media_upload = media_filename
else:
raise TypeError('media_filename must be str or MediaUpload.')
if media_upload.resumable():
resumable = media_upload
# Check the maxSize
if maxSize > 0 and media_upload.size() > maxSize:
raise MediaUploadSizeError("Media larger than: %s" % maxSize)
# Use the media path uri for media uploads
if media_upload.resumable():
expanded_url = uritemplate.expand(mediaResumablePathUrl, params)
else:
expanded_url = uritemplate.expand(mediaPathUrl, params)
url = urlparse.urljoin(self._baseUrl, expanded_url + query)
if body is None:
# This is a simple media upload
headers['content-type'] = media_upload.mimetype()
expanded_url = uritemplate.expand(mediaResumablePathUrl, params)
if not media_upload.resumable():
body = media_upload.getbytes(0, media_upload.size())
else:
# This is a multipart/related upload.
msgRoot = MIMEMultipart('related')
# msgRoot should not write out it's own headers
setattr(msgRoot, '_write_headers', lambda self: None)
# attach the body as one part
msg = MIMENonMultipart(*headers['content-type'].split('/'))
msg.set_payload(body)
msgRoot.attach(msg)
# attach the media as the second part
msg = MIMENonMultipart(*media_upload.mimetype().split('/'))
msg['Content-Transfer-Encoding'] = 'binary'
if media_upload.resumable():
# This is a multipart resumable upload, where a multipart payload
# looks like this:
#
# --===============1678050750164843052==
# Content-Type: application/json
# MIME-Version: 1.0
#
# {'foo': 'bar'}
# --===============1678050750164843052==
# Content-Type: image/png
# MIME-Version: 1.0
# Content-Transfer-Encoding: binary
#
# <BINARY STUFF>
# --===============1678050750164843052==--
#
# In the case of resumable multipart media uploads, the <BINARY
# STUFF> is large and will be spread across multiple PUTs. What we
# do here is compose the multipart message with a random payload in
# place of <BINARY STUFF> and then split the resulting content into
# two pieces, text before <BINARY STUFF> and text after <BINARY
# STUFF>. The text after <BINARY STUFF> is the multipart boundary.
# In apiclient.http the HttpRequest will send the text before
# <BINARY STUFF>, then send the actual binary media in chunks, and
# then will send the multipart delimeter.
payload = hex(random.getrandbits(300))
msg.set_payload(payload)
msgRoot.attach(msg)
body = msgRoot.as_string()
body, _ = body.split(payload)
resumable = media_upload
else:
payload = media_upload.getbytes(0, media_upload.size())
msg.set_payload(payload)
msgRoot.attach(msg)
body = msgRoot.as_string()
multipart_boundary = msgRoot.get_boundary()
headers['content-type'] = ('multipart/related; '
'boundary="%s"') % multipart_boundary
logging.info('URL being requested: %s' % url)
return self._requestBuilder(self._http,
model.response,
url,
method=httpMethod,
body=body,
headers=headers,
methodId=methodId,
resumable=resumable)
docs = [methodDesc.get('description', DEFAULT_METHOD_DOC), '\n\n']
if len(argmap) > 0:
docs.append('Args:\n')
for arg in argmap.iterkeys():
if arg in STACK_QUERY_PARAMETERS:
continue
repeated = ''
if arg in repeated_params:
repeated = ' (repeated)'
required = ''
if arg in required_params:
required = ' (required)'
paramdesc = methodDesc['parameters'][argmap[arg]]
paramdoc = paramdesc.get('description', 'A parameter')
if '$ref' in paramdesc:
docs.append(
(' %s: object, %s%s%s\n The object takes the'
' form of:\n\n%s\n\n') % (arg, paramdoc, required, repeated,
schema.prettyPrintByName(paramdesc['$ref'])))
else:
paramtype = paramdesc.get('type', 'string')
docs.append(' %s: %s, %s%s%s\n' % (arg, paramtype, paramdoc, required,
repeated))
enum = paramdesc.get('enum', [])
enumDesc = paramdesc.get('enumDescriptions', [])
if enum and enumDesc:
docs.append(' Allowed values\n')
for (name, desc) in zip(enum, enumDesc):
docs.append(' %s - %s\n' % (name, desc))
if 'response' in methodDesc:
docs.append('\nReturns:\n An object of the form\n\n ')
docs.append(schema.prettyPrintSchema(methodDesc['response']))
setattr(method, '__doc__', ''.join(docs))
setattr(theclass, methodName, method)
def createNextMethodFromFuture(theclass, methodName, methodDesc, futureDesc):
""" This is a legacy method, as only Buzz and Moderator use the future.json
functionality for generating _next methods. It will be kept around as long
as those API versions are around, but no new APIs should depend upon it.
"""
methodName = _fix_method_name(methodName)
methodId = methodDesc['id'] + '.next'
def methodNext(self, previous):
"""Retrieve the next page of results.
Takes a single argument, 'body', which is the results
from the last call, and returns the next set of items
in the collection.
Returns:
None if there are no more items in the collection.
"""
if futureDesc['type'] != 'uri':
raise UnknownLinkType(futureDesc['type'])
try:
p = previous
for key in futureDesc['location']:
p = p[key]
url = p
except (KeyError, TypeError):
return None
url = _add_query_parameter(url, 'key', self._developerKey)
headers = {}
headers, params, query, body = self._model.request(headers, {}, {}, None)
logging.info('URL being requested: %s' % url)
resp, content = self._http.request(url, method='GET', headers=headers)
return self._requestBuilder(self._http,
self._model.response,
url,
method='GET',
headers=headers,
methodId=methodId)
setattr(theclass, methodName, methodNext)
def createNextMethod(theclass, methodName, methodDesc, futureDesc):
methodName = _fix_method_name(methodName)
methodId = methodDesc['id'] + '.next'
def methodNext(self, previous_request, previous_response):
"""Retrieves the next page of results.
Args:
previous_request: The request for the previous page.
previous_response: The response from the request for the previous page.
Returns:
A request object that you can call 'execute()' on to request the next
page. Returns None if there are no more items in the collection.
"""
# Retrieve nextPageToken from previous_response
# Use as pageToken in previous_request to create new request.
if 'nextPageToken' not in previous_response:
return None
request = copy.copy(previous_request)
pageToken = previous_response['nextPageToken']
parsed = list(urlparse.urlparse(request.uri))
q = parse_qsl(parsed[4])
# Find and remove old 'pageToken' value from URI
newq = [(key, value) for (key, value) in q if key != 'pageToken']
newq.append(('pageToken', pageToken))
parsed[4] = urllib.urlencode(newq)
uri = urlparse.urlunparse(parsed)
request.uri = uri
logging.info('URL being requested: %s' % uri)
return request
setattr(theclass, methodName, methodNext)
# Add basic methods to Resource
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
if futureDesc:
future = futureDesc['methods'].get(methodName, {})
else:
future = None
createMethod(Resource, methodName, methodDesc, future)
# Add in nested resources
if 'resources' in resourceDesc:
def createResourceMethod(theclass, methodName, methodDesc, futureDesc):
methodName = _fix_method_name(methodName)
def methodResource(self):
return createResource(self._http, self._baseUrl, self._model,
self._requestBuilder, self._developerKey,
methodDesc, futureDesc, schema)
setattr(methodResource, '__doc__', 'A collection resource.')
setattr(methodResource, '__is_resource__', True)
setattr(theclass, methodName, methodResource)
for methodName, methodDesc in resourceDesc['resources'].iteritems():
if futureDesc and 'resources' in futureDesc:
future = futureDesc['resources'].get(methodName, {})
else:
future = {}
createResourceMethod(Resource, methodName, methodDesc, future)
# Add <m>_next() methods to Resource
if futureDesc and 'methods' in futureDesc:
for methodName, methodDesc in futureDesc['methods'].iteritems():
if 'next' in methodDesc and methodName in resourceDesc['methods']:
createNextMethodFromFuture(Resource, methodName + '_next',
resourceDesc['methods'][methodName],
methodDesc['next'])
# Add _next() methods
# Look for response bodies in schema that contain nextPageToken, and methods
# that take a pageToken parameter.
if 'methods' in resourceDesc:
for methodName, methodDesc in resourceDesc['methods'].iteritems():
if 'response' in methodDesc:
responseSchema = methodDesc['response']
if '$ref' in responseSchema:
responseSchema = schema.get(responseSchema['$ref'])
hasNextPageToken = 'nextPageToken' in responseSchema.get('properties',
{})
hasPageToken = 'pageToken' in methodDesc.get('parameters', {})
if hasNextPageToken and hasPageToken:
createNextMethod(Resource, methodName + '_next',
resourceDesc['methods'][methodName],
methodName)
return Resource()
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for OAuth.
Utilities for making it easier to work with OAuth 1.0 credentials.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import pickle
import threading
from apiclient.oauth import Storage as BaseStorage
class Storage(BaseStorage):
"""Store and retrieve a single credential to and from a file."""
def __init__(self, filename):
self._filename = filename
self._lock = threading.Lock()
def get(self):
"""Retrieve Credential from file.
Returns:
apiclient.oauth.Credentials
"""
self._lock.acquire()
try:
f = open(self._filename, 'r')
credentials = pickle.loads(f.read())
f.close()
credentials.set_store(self.put)
except:
credentials = None
self._lock.release()
return credentials
def put(self, credentials):
"""Write a pickled Credentials to file.
Args:
credentials: Credentials, the credentials to store.
"""
self._lock.acquire()
f = open(self._filename, 'w')
f.write(pickle.dumps(credentials))
f.close()
self._lock.release()
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import apiclient
import base64
import pickle
from django.db import models
class OAuthCredentialsField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self):
return 'VARCHAR'
def to_python(self, value):
if value is None:
return None
if isinstance(value, apiclient.oauth.Credentials):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
class FlowThreeLeggedField(models.Field):
__metaclass__ = models.SubfieldBase
def db_type(self):
return 'VARCHAR'
def to_python(self, value):
print "In to_python", value
if value is None:
return None
if isinstance(value, apiclient.oauth.FlowThreeLegged):
return value
return pickle.loads(base64.b64decode(value))
def get_db_prep_value(self, value):
return base64.b64encode(pickle.dumps(value))
| Python |
# Copyright (C) 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities for Google App Engine
Utilities for making it easier to use the
Google API Client for Python on Google App Engine.
"""
__author__ = 'jcgregorio@google.com (Joe Gregorio)'
import pickle
from google.appengine.ext import db
from apiclient.oauth import OAuthCredentials
from apiclient.oauth import FlowThreeLegged
class FlowThreeLeggedProperty(db.Property):
"""Utility property that allows easy
storage and retreival of an
apiclient.oauth.FlowThreeLegged"""
# Tell what the user type is.
data_type = FlowThreeLegged
# For writing to datastore.
def get_value_for_datastore(self, model_instance):
flow = super(FlowThreeLeggedProperty,
self).get_value_for_datastore(model_instance)
return db.Blob(pickle.dumps(flow))
# For reading from datastore.
def make_value_from_datastore(self, value):
if value is None:
return None
return pickle.loads(value)
def validate(self, value):
if value is not None and not isinstance(value, FlowThreeLegged):
raise BadValueError('Property %s must be convertible '
'to a FlowThreeLegged instance (%s)' %
(self.name, value))
return super(FlowThreeLeggedProperty, self).validate(value)
def empty(self, value):
return not value
class OAuthCredentialsProperty(db.Property):
"""Utility property that allows easy
storage and retrieval of
apiclient.oath.OAuthCredentials
"""
# Tell what the user type is.
data_type = OAuthCredentials
# For writing to datastore.
def get_value_for_datastore(self, model_instance):
cred = super(OAuthCredentialsProperty,
self).get_value_for_datastore(model_instance)
return db.Blob(pickle.dumps(cred))
# For reading from datastore.
def make_value_from_datastore(self, value):
if value is None:
return None
return pickle.loads(value)
def validate(self, value):
if value is not None and not isinstance(value, OAuthCredentials):
raise BadValueError('Property %s must be convertible '
'to an OAuthCredentials instance (%s)' %
(self.name, value))
return super(OAuthCredentialsProperty, self).validate(value)
def empty(self, value):
return not value
class StorageByKeyName(object):
"""Store and retrieve a single credential to and from
the App Engine datastore.
This Storage helper presumes the Credentials
have been stored as a CredenialsProperty
on a datastore model class, and that entities
are stored by key_name.
"""
def __init__(self, model, key_name, property_name):
"""Constructor for Storage.
Args:
model: db.Model, model class
key_name: string, key name for the entity that has the credentials
property_name: string, name of the property that is a CredentialsProperty
"""
self.model = model
self.key_name = key_name
self.property_name = property_name
def get(self):
"""Retrieve Credential from datastore.
Returns:
Credentials
"""
entity = self.model.get_or_insert(self.key_name)
credential = getattr(entity, self.property_name)
if credential and hasattr(credential, 'set_store'):
credential.set_store(self.put)
return credential
def put(self, credentials):
"""Write a Credentials to the datastore.
Args:
credentials: Credentials, the credentials to store.
"""
entity = self.model.get_or_insert(self.key_name)
setattr(entity, self.property_name, credentials)
entity.put()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.