code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
import threading
import socket
import pickle
from communication import *
import walker
import random
import upload
class Server(threading.Thread):
def __init__(self, pman, fman, port):
threading.Thread.__init__(self)
self._port = port
self._pman = pman
self._fman = fman
def msg(self, cmd, data=None):
return pickle.dumps((self._pman.name, cmd, data))
def run(self):
inp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
inp.bind(("", self._port))
inp.listen(5)
while 1:
data=""
conn, addr = inp.accept()
while 1:
randominput = conn.recv(1024)
if not randominput: break
data += randominput
name, cmd, data = pickle.loads(data)
##If request is a "Hello" message, return plist
if cmd == HELLO:
conn.send(self.msg(OK, self._pman.plist))
##Retrieve senders info, add to plist if not already present
self._pman.pappend(data)
##Check whether peer is able to accept a new neighbor, add if possible
elif cmd == NEIGHBOR:
res = self._pman.nappend(data)
if res == ACCEPTED:
res = self.msg(ACCEPTED, self._fman.flist)
conn.send(res)
else:
conn.send(self.msg(REJECTED))
##Dropped as neighbor
elif cmd == DROPPED:
self._pman.nremove(data)
conn.send(self.msg(OK))
if len(self._pman.nlist) < 1:
self._pman.getneighbors()
##Return neighborhood list
elif cmd == NLIST:
conn.send(self.msg(OK, self._pman.nlist))
##Return number of neighbors
elif cmd == NNEIGHBORS:
conn.send(self.msg(OK, len(self._pman.nlist)))
##Reply pong to acknowledge peer is alive
elif cmd == PING:
if data is not None:
self._fman.processchangelist(name, data)
conn.send(self.msg(PONG))
##Update neighbors file list
elif cmd == NFLIST:
if self._pman.isneighbor(name):
self._fman.addnflist(name, data)
conn.send(self.msg(OK))
##Requested file has been found
elif cmd == FOUND:
conn.send(self.msg(OK))
fname, peer = data
self._pman.file_found(fname, peer)
##Find request send by neighbor
elif cmd == FIND:
conn.send(self.msg(OK))
fname, ttl, req = data
peer = self._fman.find(fname)
if peer is not None:
pname, neighbors, ip, port = req
fromname, cmd, data = comm(ip, port, self._pman.name, FOUND, (fname, peer))
else:
ttl = ttl - 1
if ttl != 0 and len(self._pman.nlist) > 1:
prec = random.choice(self._pman.nlist)
while prec[0] == name:
prec = random.choice(self._pman.nlist)
walk = walker.Walker(ttl, fname, prec, self._pman.plist[0], req)
walk.run()
##Download request by
elif cmd == GET:
conn.send(self.msg(OK))
fname, peer = data
pname, neightbor, ip, port = peer
up = upload.Uploader(ip, port, fname, self._pman)
up.setDaemon(1)
up.start()
elif cmd == FILE:
conn.send(self.msg(OK))
fname, content = data
self._fman.write_file(fname, content)
conn.shutdown(socket.SHUT_WR)
conn.close()
| Python |
import os
import sys
import peermanager
import filemanager
import pinger
import server
##Pretty print peer
def out_peer(peer):
name, neighbors, ip, port = peer
print name + " (" + str(neighbors) + ", " + ip + ":" + str(port) + ")"
##Pretty print peer list
def out_list(plist):
for peer in plist:
out_peer(peer)
##Main method including the basic user interface to interact with program
def main(name, neighbors, port):
pman = peermanager.PeerManager(name, neighbors, port)
fman = filemanager.FileManager(pman)
pman.fman = fman
serve = server.Server(pman, fman, port)
ping = pinger.Pinger(pman)
walker_ttl = 3
walker_k = 3
serve.setDaemon(1)
ping.setDaemon(1)
serve.start()
ping.start()
while 1:
try:
strings = raw_input(name + "> ").split()
except EOFError:
pass
if strings[0] == "hello":
ip, port = strings[1].split(":")
pman.hello(ip, int(port))
print "Peer list:"
out_list(pman.plist)
print "\nNeighborhood:"
out_list(pman.nlist)
elif strings[0] == "plist":
out_list(pman.plist)
elif strings[0] == "nlist":
peers = []
out = False
i = 1
while i < len(strings):
if strings[i] != "-o":
peers.append(strings[i])
elif strings[i] == "-o":
out = True
f = open(strings[i + 1], "w")
if len(peers) == 0: peers.append(pman.name)
f.write(pman.creategraph(peers))
f.close()
i += 1
if not out:
if len(peers) == 0: peers.append(pman.name)
print pman.creategraph(peers)
elif strings[0] == "flist":
flist = fman.getstructflist()
print "File list:"
for fil in flist:
print fil[0][0] + " " + ", ".join(fil[1:])
elif strings[0] == "find":
print "Searching..."
def out(fname, peer):
print "File found: " + fname + " -> " + str(peer)
pman.init_search(strings[1], walker_ttl, walker_k, out)
elif strings[0] == "exit":
sys.exit(0)
elif strings[0] == "random":
filemanager.addrandom(fman)
print "Random files added from " + fman.dir
elif strings[0] == "nflists":
print fman.nflists
elif strings[0] == "cd":
fman._dir += strings[1] + "/"
print fman._dir
elif strings[0] == "get":
ip, port = strings[1].split(":")
print "Download started..."
fman.fileget(ip, int(port), strings[2])
print "Download finished"
elif strings[0] == "fget":
print "Searching..."
def download(fname, peer, pman = pman, fman = fman):
print fname + " " + str(peer) + " " + fman._dir
name, neighbors, ip, port = peer
if name != pman.name:
print "Download started..."
print ip + ", " + str(port) + ", " + fname
fman.fileget(ip, port, fname)
print "Download finished"
else:
print "You already have the file"
pman.init_search(strings[1], walker_ttl, walker_k, download)
elif strings[0] == "walker":
walker_k = int(strings[1])
walker_ttl = int(strings[2])
print "Walker: k=" + str(walker_k) + " ttl=" + str(walker_ttl)
if __name__ == "__main__":
main(sys.argv[1], int(sys.argv[2]), int(sys.argv[3]))
| Python |
import sys
def main(amount):
res = "nlist"
for i in range(amount):
i+=1
res += " p"+str(i)
res += " -o trial.dot"
print res
if __name__ == "__main__":
main(int(sys.argv[1]))
| Python |
import gc
import subprocess
import random
import time
gc.disable()
name = 2
port = 5991
processes = []
for i in range(5):
p = subprocess.Popen(['python', 'main.py', 'p'+str(name), str(random.randint(1,10)), str(port)], shell=False, stdin=subprocess.PIPE)
p.communicate(input='hello 127.0.0.1:5990')[0]
name += 1
port += 1
time.sleep(2)
| Python |
from communication import *
class Walker:
def __init__(self, ttl, fname, receiverpeer, senderpeer, requester):
self._rec_peer = receiverpeer
self._sender_peer = senderpeer
self._requester = requester
self._fname = fname
self._ttl = ttl
def run(self):
name, neighbors, ip, port = self._rec_peer
fromname, cmd, data = comm(ip, port, self._sender_peer[0], FIND, (self._fname, self._ttl, self._requester)) | Python |
import os
import sys
import time
import socket
import pickle
import random
import threading
class Peer(threading.Thread):
##Initiate the peer.
def __init__(self, name, neighbors, port):
threading.Thread.__init__(self)
self._name = name
self._neighbors = neighbors
self._port = port
self._plist = [(name, neighbors, "127.0.0.1", port)]
self._nlist = []
self._dir = os.getcwd() + "/files/"
self._flist = [name]
##Hello method which also searches for neighbors
def hello(self, ip, port):
self.traverseplist(self.boot(ip, port))
self.getneighbors()
##Bootstrapping for Hello method for peer discovery with bootstrap ip
def boot(self, ip, port):
out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
out.connect((ip, port))
out.send("hello")
out.send(pickle.dumps(self._plist[0]))
out.shutdown(socket.SHUT_WR)
##Receive plist from connected peer
data = ""
while 1:
inp = out.recv(1024)
if not inp: break
data += inp
out.close()
##Reconstruct bufferedplist from bootstrapped ip and return
return self.construct(data)
##Traverse each peer in plist with "Hello" requests - recursive for the win
def traverseplist(self, plist):
print self._plist
for name, neighbors, ip, port in plist:
print ip
print port
if not self.isintuplelist(name, self._plist):
self._plist.append((name, neighbors, ip, port))
self.traverseplist(self.boot(ip, port))
##Get neighbors by asking peers with most to fewest max neighbors.
def getneighbors(self):
i = 0
priolist = self.createprioritylist()
while i < len(priolist) and len(self._nlist) != self._neighbors:
name, neighbors, ip, port = priolist[i]
out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
out.connect((ip, port))
out.send("neighbor")
out.send(pickle.dumps(self._plist[0]))
out.shutdown(socket.SHUT_WR)
##Receive reply from peer
data = ""
while 1:
inp = out.recv(1024)
if not inp: break
data += inp
out.close()
##Interpret data and add neighbor
if data.find("accepted") != -1:
self._nlist.append(priolist[i])
i += 1
##Creates a list with negihborhood priorities
def createprioritylist(self):
priolist = list(self._plist)
priolist.pop(0)
priolist.sort(self.compare)
return priolist
##Compare two peers to each other based on the max number of neighbors - used by createprioritylist
def compare(self, p1, p2):
return cmp(p1[1], p2[1])
## Pings a neighbor every 15 seconds to check if said neighbor is still alive. If they aren't they are removed from the nlist
def pingneighbors(self):
i = 0
while 1:
if len(self._nlist) != 0:
name, neighbors, ip, port = self._nlist[i % len(self._nlist)]
out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
out.connect((ip, port))
out.send("ping")
out.send(pickle.dumps(self._flist))
out.shutdown(socket.SHUT_WR)
data = ""
while 1:
out.settimeout(5.0)
inp = out.recv(1024)
if not inp: break
data += inp
if data is "":
print "neighbor removed: " + str(self._nlist[i % len(self._nlist)])
self._nlist.pop(i % len(self._nlist))
out.close()
i += 1
print data
time.sleep(60)
##Deconstruct plist
def deconstruct(self, alist):
plistpickle = pickle.dumps(alist)
return plistpickle
##Reconstruct plist
def construct(self, plistpickle):
plist = pickle.loads(plistpickle)
return plist
##Method for checking whether peer is already present in a given 4-tuple list
def isintuplelist(self, name, list):
for name2, neighbors, ip, port in list:
if name == name2:
return 1
return 0
##Method for creating graphviz document
def creategraph(self, peers):
selectedpeers = []
neighborhood = []
for name, neighbors, ip, port in self._plist:
for p in peers:
if name == p:
selectedpeers.append((name, neighbors, ip, port))
for apeer in selectedpeers:
nlist = self.getnlist(apeer)
for npeer in nlist:
if not (apeer, npeer) in neighborhood and not (npeer, apeer) in neighborhood and npeer in selectedpeers:
neighborhood.append((apeer, npeer))
graph = "graph network {\n"
for name, neighbors, ip, port in selectedpeers:
graph += "\"" + name + "(" + str(neighbors) + ")\";\n"
for apeer, npeer in neighborhood:
graph += "\"" + apeer[0] + "(" + str(apeer[1]) + ")\" -- \"" + npeer[0] + "(" + str(npeer[1]) + ")\";\n"
graph += "}\n"
return graph
##Method for tedious socket opening and closing to retrieve nlist for creategraph method
def getnlist(self, peer):
name, neighbors, ip, port = peer
out = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
out.connect((ip, port))
out.send("nlist")
out.shutdown(socket.SHUT_WR)
data = ""
while 1:
inp = out.recv(1024)
if not inp: break
data += inp
out.close()
nlist = pickle.loads(data)
return nlist
##Create flist with random number of files from a given directory. Files are represented by 2-tuples
def addrandomfiles(self):
files = os.listdir(self._dir)
amount = random.randint(1, len(files))
for i in range(amount):
name, size = files[i], os.stat(self._dir + files[i]).st_size
self._flist.append((name, size))
##Listening method running in thread to constantly listen for requests
def listen(self):
inp = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
inp.bind(("", self._port))
inp.listen(5)
while 1:
data=""
conn, addr = inp.accept()
while 1:
randominput = conn.recv(1024)
if not randominput: break
data += randominput
##If request is a "Hello" message, return plist
if data.find("hello") != -1:
conn.send(self.deconstruct(self._plist))
conn.shutdown(socket.SHUT_WR)
##Retrieve senders info, add to plist if not already present
buff = pickle.loads(data[5:])
if not self.isintuplelist(buff[0], self._plist):
self._plist.append(buff)
##Check whether peer is able to accept a new neighbor, add if possible
if data.find("neighbor") != -1:
buff = pickle.loads(data[8:])
if len(self._nlist) < self._neighbors:
if not self.isintuplelist(buff[0], self._nlist):
conn.send("accepted")
self._nlist.append(buff)
conn.shutdown(socket.SHUT_WR)
##Return neighborhood list
if data == "nlist":
conn.send(self.deconstruct(self._nlist))
conn.shutdown(socket.SHUT_WR)
##Reply pong to acknowledge peer is alive
if data.find("ping") != -1:
##TODO: We get the peers flist here. Implement handling of this.
conn.send("pong")
conn.shutdown(socket.SHUT_WR)
print data
conn.close()
##Main method including the basic user interface to interact with program
def main(name, neighbors, port):
peer = Peer(name, int(neighbors), int(port))
listen = threading.Thread(name="listen", target=peer.listen)
listen.setDaemon(1)
#ping = threading.Thread(name="ping", target=peer.pingneighbors)
#ping.setDaemon(1)
listen.start()
#ping.start()
while 1:
try:
strings = raw_input(name + "> ").split()
except EOFError:
break
if strings[0] == "hello":
ip, port = strings[1].split(":")
print ip
print port
peer.hello(ip, int(port))
elif strings[0] == "plist":
print peer._plist
elif strings[0] == "nlist" and len(strings) > 1:
peers = []
i = 1
while i < len(strings):
if strings[i] != "-o":
peers.append(strings[i])
elif strings[i] == "-o":
f = open(strings[i + 1], "w")
f.write(peer.creategraph(peers))
f.close()
i += 1
elif strings[0] == "nlist":
print peer._nlist
elif strings[0] == "flist":
peer.addrandomfiles()
print peer._flist
elif strings[0] == "find":
print "Not yet implemented"
elif strings[0] == "exit":
sys.exit(0)
elif strings[0] == "continue":
while 1: pass
if __name__ == "__main__":
main(sys.argv[1], sys.argv[2], sys.argv[3])
| Python |
import sys
import main
import time
import random
import threading
class test(threading.Thread):
def __init__(self, name, port):
threading.Thread.__init__(self)
self._name = name
self._port = port
def run(self):
p = main.main("p" + str(self._name), random.randint(1, 10), self._port)
name = 2
port = 5991
for i in range(3):
t = test(name, port)
t.start()
sys.stdin.write("hello 127.0.0.1:5990")
name += 1
port += 1
time.sleep(3)
while 1:
pass
| Python |
# Mayan Calendar Structure initialization.
from mayan_cal import Base
# Class and Type initialization.
from types import ClassType
vars = vars()
def instantiate_singletons():
# Instantiate each class in this module that derives from Base, but that is the
# intermediate model between Base and the data class.
#
# Each instance is assigned to the module replacing the name of the defined class.
# This represents singularity of each class.
#
# This is about taking advantage of the nature of object instances as derived
# from Base.
#
# It is done in phases, so that assignment/resolution of qualities are done by
# instance instead of class.
#
# First phase: instantiate seasons, assign to color-directions.
# Second phsae: instantiate colors, timecells, chakras and digits for tribes.
# Third phase: instantiate the tribes, tones and lunar phases.
#
for (n, v) in vars.items():
if type(v) is not ClassType or not issubclass(v, Base):
continue
if v in (Base, ColorDirection, Chakra, Digit, Timecell, Tribe,
Tone, LunarPhase, Castle, Season):
continue
vars[n] = v()
## Intermediary class models.
class Season(Base):
class Meta:
attributes = ('name', 'color', 'guardian', 'aspects')
class ColorDirection(Base):
class Meta:
attributes = ('color', 'direction', 'effect', 'season')
index = dict() # Shared by subclasses with no conflicts.
class Chakra(Base):
class Meta:
attributes = ('name', 'greek_name', 'position')
index = dict() # Shared by subclasses with no conflicts.
class Timecell(Base):
class Meta:
attributes = ('name',)
index = dict() # Shared by subclasses with no conflicts.
class Digit(Base):
class Meta:
attributes = ('name',)
index = dict() # Shared by subclasses with no conflicts.
class Tribe(Base):
class Meta:
attributes = ('number', 'name', 'color', 'chakra', 'digit',
'timecell', 'action', 'power', 'essence', 'birthday')
# XXX pprint.saferepr fails!
# + ('antipode', 'analog', 'occult')
index = dict() # Shared by subclasses with no conflicts.
def __call__(self, birthday):
self.birthday = birthday
return self
class Tone(Base):
class Meta:
attributes = ('number', 'name', 'power', 'action', 'essence')
index = dict() # Shared by subclasses with no conflicts.
class LunarPhase(Base):
class Meta:
attributes = ('name', 'phase', 'attributes', 'effects')
class Castle(Base):
class Meta:
attributes = ('name', 'color', 'effect')
## Seasons -- associated with color-directions.
class SeasonOfTheDog(Season):
name = 'White Northern Dog Season of Heart'
guardian = 'dog'
aspects = ('emotion', 'mammality', 'heart')
class SeasonOfTheSerpent(Season):
name = 'Red Eastern Serpent Season of Instinct'
guardian = 'serpent'
aspects = ('instinct', 'primality')
class SeasonOfTheEagle(Season):
name = 'Blue Western Eagle Season of Vision'
guardian = 'eagle'
aspects = ('mental', 'planetary', 'visionary')
class SeasonOfTheSun(Season):
name = 'Yellow Southern Sun Season of Enlightenment'
guardian = 'sun'
aspects = ('enlightenment', 'solarity')
# Instantiate seasons for color-directions.
# Todo: reverse assignment of colors to the seasons in trailing resolution phase.
instantiate_singletons()
## Colors -- shared by tribes
# Colors have corresponding directions, and the two are always
# associated with each other.
#
# They are also associated with the four seasons, although separately.
#
class WhiteNorth(ColorDirection):
color = 'White'
direction = 'North'
effect = 'refines'
season = SeasonOfTheDog
class RedEast(ColorDirection):
color = 'Red'
direction = 'East'
effect = 'initiates'
season = SeasonOfTheSerpent
class BlueWest(ColorDirection):
color = 'Blue'
direction = 'West'
effect = 'transforms'
season = SeasonOfTheEagle
class YellowSouth(ColorDirection):
color = 'Yellow'
direction = 'South'
effect = 'ripens'
season = SeasonOfTheSun
## Chakras -- shared by tribes
class Crown(Chakra):
name = 'Crown'
greek_name = 'DALI'
position = 7
class ThirdEye(Chakra):
name = 'Third Eye'
greek_name = 'GAMMA'
position = 6
class Throat(Chakra):
name = 'Throat'
greek_name = 'ALPHA'
position = 5
class Heart(Chakra):
name = 'Heart'
greek_name = 'SILIO'
position = 4
class SolarPlexus(Chakra):
name = 'Solar Plexus'
greek_name = 'LIMI'
position = 3
class SecondChakra(Chakra):
name = 'Second Chakra'
greek_name = 'KALI'
position = 2
class Root(Chakra):
name = 'Root'
greek_name = 'SELI'
position = 1
## Timecells -- shared by tribes
class Input(Timecell):
name = 'INPUT'
class Store(Timecell):
name = 'STORE'
class Process(Timecell):
name = 'PROCESS'
class Output(Timecell):
name = 'OUTPUT'
class Matrix(Timecell):
name = 'MATRIX'
## Digits -- Unique to tribe
class RightThumb(Digit):
pass
class RightIndexFinger(Digit):
pass
class RightMiddleFinger(Digit):
name = 'right middle finger'
side = 'right'
place = 'middle'
kind = 'finger'
# code = ??
class RightRingFinger(Digit):
pass
class RightPinkyFinger(Digit):
pass
class LeftThumb(Digit):
pass
class LeftIndexFinger(Digit):
pass
class LeftMiddleFinger(Digit):
pass
class LeftRingFinger(Digit):
pass
class LeftPinkyFinger(Digit):
pass
class RightBigToe(Digit):
pass
class RightIndexToe(Digit):
pass
class RightMiddleToe(Digit):
pass
class RightRingToe(Digit):
pass
class RightPinkyToe(Digit):
pass
class LeftBigToe(Digit):
pass
class LeftIndexToe(Digit):
pass
class LeftMiddleToe(Digit):
pass
class LeftRingToe(Digit):
pass
class LeftPinkyToe(Digit):
pass
# Instantiate constructs defined up to this point.
instantiate_singletons()
## Tribes -- 20 Solar Seals.
# Note that tribe #0 == #20 (YellowSun). See the resolution phase below.
# ! Also, the glyphs for BlueNight and BlueMonkey look similar.
class RedDragon(Tribe):
number = 1
name = 'Red Dragon'
color = RedEast
chakra = Throat
digit = RightIndexFinger
timecell = Input
action = 'nurtures'
power = 'birth'
essence = 'being'
# Need to be resolved.
antipode = 'BlueNight'
analog = 'WhiteMirror'
occult = 'YellowSun'
class WhiteWind(Tribe):
number = 2
name = 'White Wind'
color = WhiteNorth
chakra = Heart
digit = RightMiddleFinger
timecell = Input
action = 'communicates'
power = 'spirit'
essence = 'breath'
antipode = 'YellowHuman'
analog = 'RedEarth'
occult = 'BlueStorm'
class BlueNight(Tribe):
number = 3
name = 'Blue Night'
color = BlueWest
chakra = SolarPlexus
digit = RightRingFinger
timecell = Input
action = 'dreams'
power = 'intuition'
essence = 'abundance'
antipode = 'RedSkywalker'
analog = 'YellowStar'
occult = 'WhiteMirror'
class YellowSeed(Tribe):
number = 4
name = 'Yellow Seed'
color = YellowSouth
chakra = Root
digit = RightPinkyFinger
timecell = Input
action = 'targets'
power = 'flowering'
essence = 'awareness'
antipode = 'WhiteWizard'
analog = 'BlueEagle'
occult = 'RedEarth'
class RedSerpent(Tribe):
number = 5
name = 'Red Serpent'
color = RedEast
chakra = Crown
digit = RightBigToe
timecell = Store
action = 'survives'
power = 'life force'
essence = 'instinct'
antipode = 'BlueEagle'
analog = 'WhiteWizard'
occult = 'YellowWarrior'
class WhiteWorldbridger(Tribe):
number = 6
name = 'White Worldbridger'
color = WhiteNorth
chakra = Throat
digit = RightIndexToe
timecell = Store
action = 'equalizes'
power = 'death'
essence = 'opportunity'
antipode = 'YellowWarrior'
analog = 'RedSkywalker'
occult = 'BlueEagle'
class BlueHand(Tribe):
number = 7
name = 'Blue Hand'
color = BlueWest
chakra = Heart
digit = RightMiddleToe
timecell = Store
action = 'knows'
power = 'accomplishment'
essence = 'healing'
antipode = 'RedEarth'
analog = 'YellowHuman'
occult = 'WhiteWizard'
class YellowStar(Tribe):
number = 8
name = 'Yellow Star'
color = YellowSouth
chakra = SolarPlexus
digit = RightRingToe
timecell = Store
action = 'beautifies'
power = 'elegance'
essence = 'art'
antipode = 'WhiteMirror'
analog = 'BlueNight'
occult = 'RedSkywalker'
class RedMoon(Tribe):
number = 9
name = 'Red Moon'
color = RedEast
chakra = Root
digit = RightPinkyToe
timecell = Process
action = 'purifies'
power = 'universal water'
essence = 'flower'
antipode = 'BlueStorm'
analog = 'WhiteDog'
occult = 'YellowHuman'
class WhiteDog(Tribe):
number = 10
name = 'White Dog'
color = WhiteNorth
chakra = Crown
digit = LeftThumb
timecell = Process
action = 'loves'
power = 'loyalty'
essence = 'heart'
antipode = 'YellowSun'
analog = 'RedMoon'
occult = 'BlueMonkey'
class BlueMonkey(Tribe):
number = 11
name = 'Blue Monkey'
color = BlueWest
chakra = Throat
digit = LeftIndexFinger
timecell = Process
action = 'plays'
power = 'magic'
essence = 'illusion'
antipode = 'RedDragon'
analog = 'YellowWarrior'
occult = 'WhiteDog'
class YellowHuman(Tribe):
number = 12
name = 'Yellow Human'
color = YellowSouth
chakra = Heart
digit = LeftMiddleFinger
timecell = Process
action = 'influences'
power = 'free will'
essence = 'wisdom'
antipode = 'WhiteWind'
analog = 'BlueHand'
occult = 'RedMoon'
class RedSkywalker(Tribe):
number = 13
name = 'Red Skywalker'
color = RedEast
chakra = SolarPlexus
digit = LeftRingFinger
timecell = Output
action = 'explores'
power = 'space'
essence = 'wakefulness'
antipode = 'BlueNight'
analog = 'WhiteWorldbridger'
occult = 'YellowStar'
class WhiteWizard(Tribe):
number = 14
name = 'White Wizard'
color = WhiteNorth
chakra = Root
digit = LeftPinkyFinger
timecell = Output
action = 'enchants'
power = 'timelessness'
essence = 'receptivity'
antipode = 'YellowSeed'
analog = 'RedSerpent'
occult = 'BlueHand'
class BlueEagle(Tribe):
number = 15
name = 'Blue Eagle'
color = BlueWest
chakra = Crown
digit = LeftBigToe
timecell = Output
action = 'creates'
power = 'vision'
essence = 'mind'
antipode = 'RedSerpent'
analog = 'YellowSeed'
occult = 'WhiteWorldbridger'
class YellowWarrior(Tribe):
number = 16
name = 'Yellow Warrior'
color = YellowSouth
chakra = Throat
digit = LeftIndexToe
timecell = Output
action = 'questions'
power = 'intelligence'
essence = 'fearlessness'
antipode = 'WhiteWorldbridger'
analog = 'BlueMonkey'
occult = 'RedSerpent'
class RedEarth(Tribe):
number = 17
name = 'Red Earth'
color = RedEast
chakra = Heart
digit = LeftMiddleToe
timecell = Matrix
action = 'evolves'
power = 'synchronicity'
essence = 'navigation'
antipode = 'BlueHand'
analog = 'WhiteWind'
occult = 'YellowSeed'
class WhiteMirror(Tribe):
number = 18
name = 'White Mirror'
color = WhiteNorth
chakra = SolarPlexus
digit = LeftRingToe
timecell = Matrix
action = 'evolves'
power = 'endlessness'
essence = 'order'
antipode = 'YellowStar'
analog = 'RedDragon'
occult = 'BlueNight'
class BlueStorm(Tribe):
number = 19
name = 'Blue Storm'
color = BlueWest
chakra = Root
digit = LeftPinkyToe
timecell = Matrix
action = 'catalyzes'
power = 'self-generation'
essence = 'energy'
antipode = 'RedMoon'
analog = 'YellowSun'
occult = 'WhiteWind'
class YellowSun(Tribe):
number = 20 # Also, 0.
name = 'Yellow Sun'
color = YellowSouth
chakra = Crown
digit = RightThumb
timecell = Matrix
action = 'enlightens'
power = 'universal fire'
essence = 'life'
antipode = 'WhiteDog'
analog = 'BlueStorm'
occult = 'RedDragon'
## Tones
class Magnetic(Tone):
number = 1
name = 'Magnetic'
power = 'unify'
action = 'attract'
essence = 'purpose'
class Lunar(Tone):
number = 2
name = 'Lunar'
power = 'polarize'
action = 'stabilize'
essence = 'challenge'
class Electric(Tone):
number = 3
name = 'Electric'
power = 'activate'
action = 'bond'
essence = 'service'
class SelfExisting(Tone):
number = 4
name = 'Self-Existing'
power = 'define'
action = 'measure'
essence = 'form'
class Overtone(Tone):
number = 5
name = 'Overtone'
power = 'empower'
action = 'command'
essence = 'radiance'
class Rhythmic(Tone):
number = 6
name = 'Rhythmic'
power = 'organize'
action = 'balance'
essence = 'equality'
class Resonant(Tone):
number = 7
name = 'Resonant'
power = 'channel'
action = 'inspire'
essence = 'attunement'
class Galactic(Tone):
number = 8
name = 'Galactic'
power = 'harmonize'
action = 'model'
essence = 'integrity'
class Solar(Tone):
number = 9
name = 'Solar'
power = 'pulse'
action = 'realize'
essence = 'intention'
class Planetary(Tone):
number = 10
name = 'Planetary'
power = 'perfect'
action = 'produce'
essence = 'manifestation'
class Spectral(Tone):
number = 11
name = 'Spectral'
power = 'dissolve'
action = 'release'
essence = 'liberation'
class Crystal(Tone):
number = 12
name = 'Crystal'
power = 'dedicate'
action = 'universalize'
essence = 'cooperation'
class Cosmic(Tone):
number = 13
name = 'Cosmic'
power = 'endure'
action = 'transcend'
essence = 'presence'
## Lunar Phases
class NewMoon(LunarPhase):
name = 'New Moon'
phase = 0.0 # (Or, -1.0)
attributes = ('instinct', 'new beginning', 'seed')
effects = ('emerge', 'renew', 'generate', 'plan', 'establish', 'initiate', 'awaken')
class Crescent(LunarPhase):
name = 'Crescent'
phase = 0.25
attributes = ('foundation', 'resistance', 'resources')
effects = ('venture', 'gather', 'fortify', 'prepare', 'filter', 'externalize',
'advance', 'expand')
class FirstQuarter(LunarPhase):
name = 'First Quarter'
phase = 0.5
attributes = ('extension', 'courage', 'balance')
effects = ('organize', 'confront', 'motivate', 'assert', 'materialize', 'form',
'empower', 'pursue')
class Gibbous(LunarPhase):
name = 'Gibbous'
phase = 0.75
attributes = ('analysis', 'strategy', 'focus')
effects = ('mature', 'develop', 'articulate', 'perfect', 'refine', 'discern',
'evaluate', 'observe')
class FullMoon(LunarPhase):
name = 'Full Moon'
phase = 1.0
attributes = ('illumination', 'revelation', 'climax')
effects = ('celebrate', 'beautify', 'produce', 'receive', 'express',
'distribute', 'intuit', 'surrender', 'fulfill')
class Disseminating(LunarPhase):
name = 'Disseminating'
phase = -0.25
attributes = ('wisdom', 'self-love', 'returning')
effects = ('heal', 'share', 'teach', 'communicate', 'demonstrate',
'release', 'review', 'amend')
class LastQuarter(LunarPhase):
name = 'Last Quarter'
phase = -0.5
attributes = ('shifting', 'visionary')
effects = ('purify', 'assimilate', 'meditate', 'rest', 'reap',
'recuperate', 'honor', 'reflect', 'evolve')
class Balsamic(LunarPhase):
name = 'Balsamic'
phase = -0.75
attributes = ('compost', 'dormancy', 'dreamtime', 'prayer')
effects = ('integrate', 'allow', 'nourish', 'dissolve', 'aspire')
# Initiate again for tribes, tones and phases.
instantiate_singletons()
# Resolution and back-referencing.
for (n, v) in vars.items():
if isinstance(v, Tribe):
v.index[n] = v
v.index[v.number] = v
# Cyclic -- YellowSun.
if v.number == 20:
v.index[0] = v
# Many-to-many.
v.color.index.setdefault(n, []).append(v)
v.chakra.index.setdefault(n, []).append(v)
v.timecell.index.setdefault(n, []).append(v)
# One-to-one.
v.digit.tribe = v
v.digit.index[n] = v.digit
v.antipode = vars[v.antipode]
v.analog = vars[v.analog]
v.occult = vars[v.occult]
if isinstance(v, Tone):
v.index[n] = v
v.index[v.number] = v
del vars, n, v
class Day(Base):
# Pre-calculated store for day information.
# An affirmation is derived from a Day.
class Meta:
attributes = ('tribe', 'tone', 'guide')
def __init__(self, day, tribe, tone, guide):
self.day = day # The date
self.tribe = tribe
self.tone = tone
self.guide = guide
def gettribe(self):
return self.tribe
| Python |
# Copyright 2008 Clint Banis. All rights reserved. Not for reproduction.
from mayan_cal import gregarian_date
from mayan_cal.affirmation import daily_affirmation, todays_affirmation
af = daily_affirmation
today = todays_affirmation()
class Friend:
def __init__(self, year, month, day):
self.birthday = gregarian_date(year, month, day)
self.af = daily_affirmation(self.birthday)
def __str__(self):
return str(self.af)
clint = Friend(1983, 7, 26)
| Python |
#!/usr/local/bin/python
INCLUDE_PATH = 'C:\\Documents and Settings\\Clint\\My Documents\\pythonlib'
# distutils
def setup():
from cx_Freeze import setup, Executable
import sys
setup(name = "setMayanWallpaper",
version = "0.1",
description = "Mayan Windows Desktop Wallpaper rendering.",
executables = [Executable("mayan_cal/script.py",
path = sys.path + [INCLUDE_PATH])],
packages = ['mayan_cal'])
MODULES = ('mayan_cal', 'mayan_cal.structure', 'mayan_cal.calculate',
'mayan_cal.affirmation', 'mayan_cal.drawing',
'mayan_cal.desktop')
def doublequote_repr(o):
return '"' + repr("'" + str(o))[2:]
def main():
# Todo: Don't use --include-modules, use --package
target_dir = "Mayan Calendar"
target_name = "MayanWallpaper.exe"
prolog = "cmd /C \"python C:\\cygwin\\bin\\freeze --include-path=. --include-modules="
modules = ','.join(MODULES)
epilog = " mayan_cal\\script.py --target-dir=%s --target-name=%s\"" % \
(doublequote_repr(target_dir), doublequote_repr(target_name))
from os import system as system_command
from os.path import join as joinpath
verbose = True
def shell_command(cmd):
if verbose:
print cmd
return system_command(cmd)
mkdir = lambda path: shell_command("mkdir -p " + doublequote_repr(path))
copy = lambda path, *files: shell_command("cp -Rf %s %s" % \
(" ".join(doublequote_repr(f) for f in files),
doublequote_repr(path)))
export = lambda tree, path: shell_command(("svn export " + \
"file:///repository/trunk/MayanCalendar/%s" + \
" %s") % (tree, doublequote_repr(path)))
# Run cx freeze.
shell_command(prolog + modules + epilog)
# Copy glyph data files.
export("glyphs", joinpath(target_dir, "glyphs"))
# Make a temporary directory tree.
mkdir(joinpath(target_dir, "affirmations", "daily"))
# Copy preferences ini and shortcut.
copy(target_dir, "Preferences.reg", "Wallpaper Update.lnk")
# Generate post-install script for putting data files and output folder
# into Program Files, and updating the registry with the config file.
# Generate short-cuts.
# Generate schedule-task.
# Generate registry-config.
# Platform.createShortcut(joinpath("dist", "Mayan Wallpaper Update"),
# target_path = 'mayanCal.exe', run_in = '',
# '--verbose')
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Clint Banis.
# All rights reserved.
#
# Draws a daily affirmation picture with tribe and tone glyphs and
# affirmation text.
#
# The Glyph class organizes the operations against folders.
# Operations include:
# - Generating a wallpaper image for the day.
# - Setting the wallpaper via the Windows Shell API.
# - Managing generated files.
#
# --
#
# GENERAL DESIGN:
# - Object oriented drawing
# - Paints the tone and tribe glyphs, the fully toned name of the day,
# and the lines of the daily affirmation across the bottom.
# - Allows soft backgrounds and a highlighted time prompt showing the
# Gregarian Calendar day.
# - Font draw resizing.
#
# --
#
# TODO:
# - Download new font (papyrus)
# - Larger font size
#
# - Site specific path configuration: this will befound in script.Application
#
from os.path import join as joinpath, exists
from os import listdir
from PIL.ImageFont import truetype
# Trying to use a mkdir variant that makes subdirectories.
try: from os import mkdirs
except ImportError:
def mkdirs(d):
from os import mkdir
return mkdir(d)
from pdb import set_trace as debug
from mayan_cal import GALACTIC_CONSTANT # Reference here because we're insecure.
from mayan_cal.affirmation import Affirmation, daily_affirmation, todays_affirmation
class COLOR:
BLACK = (0, 0, 0 )
WHITE = (255, 255, 255)
RED = (255, 0, 0 )
BLUE = (0, 0, 255)
YELLOW = (255, 255, 0 )
CYAN = (0, 255, 255)
WHITE_BG = (225, 225, 240)
RED_BG = (255, 165, 165)
BLUE_BG = (100, 100, 250)
YELLOW_BG = (100, 225, 225)
# Opposites
NORTH_BG = BLUE_BG
EAST_BG = YELLOW_BG
WEST_BG = WHITE_BG
SOUTH_BG = RED_BG
NORTH_FILL = (55, 55, 250)
EAST_FILL = (0, 200, 200)
WEST_FILL = (225, 225, 240)
SOUTH_FILL = (225, 50, 50)
## MAGENTA = (200, 200, 0 ) # XXX
## DATE_STR_FILL = MAGENTA
class Backend(object):
# All Wallpaper instances are tied to a backend object for loading
# and saving.
#
# The Backend is abstract, in that is must have its FOLDERS attribute
# configured as an object containing TRIBE_INPUT, TONE_INPUT, and OUTPUT
# paths.
#
# The Backend simply represents the input source for all drawings and
# configuration.
#
INPUT_SUFFIXES = ('png', 'jpg', 'jpeg')
OUTPUT_SUFFIX = 'png'
class AbstractStateError(Exception):
pass
class Folders:
BASE = joinpath('.') # ('mayan_cal')
GLYPHS = joinpath(BASE, 'glyphs')
TRIBE_INPUT = joinpath(GLYPHS, 'tribes')
TONE_INPUT = joinpath(GLYPHS, 'tones')
OUTPUT = joinpath('affirmations', 'daily')
FONTFILE_PATH = joinpath(GLYPHS, 'pilfont')
def __init__(self, output_dir = None, glyph_dir = None,
tribe_dir = None, tone_dir = None,
fontfile_path = None):
# First, program the output dir, and the tribe and tone
# directories separately.
if output_dir:
self.OUTPUT = output_dir
if tribe_dir:
self.TRIBE_INPUT = tribe_dir
if tone_dir:
self.TONE_INPUT = tone_dir
if glyph_dir:
self.GLYPHS = glyph_dir
# If the glyph dir is set, then the tribe and tone
# folders are appended instead, overriding.
self.TRIBE_INPUT = joinpath(glyph_dir, tribe_dir or 'tribes')
self.TONE_INPUT = joinpath(glyph_dir, tone_dir or 'tones' )
if fontfile_path:
self.FONTFILE_PATH = fontfile_path
self.verify_state()
def verify_state(self):
# Move into FOLDERS implementation.
for attr in ('TRIBE_INPUT', 'TONE_INPUT', 'OUTPUT', 'FONTFILE_PATH'):
assert hasattr(self, attr), \
self.AbstractStateError('%s not set!' % attr)
def __init__(self, **kwd):
self.folders = (kwd)
def get_folders(self):
return getattr(self, '_folders', None)
def set_folders(self, kwd):
self._folders = Backend.Folders(**kwd)
folders = FOLDERS = property(get_folders, set_folders)
def search_filename(self, name, input_folder, suffixes = None):
# This seems complicated, but is supposed to lend flexibility in
# searching insensitively to case and with naming the input folder.
# Note this also strips all spaces from name matching.
# XXX Document this function better.
if suffixes is None:
suffixes = self.INPUT_SUFFIXES
processed_name = name.lower().replace(' ', '')
for candidate in listdir(input_folder):
processed_candidate = candidate.lower().replace(' ', '')
if processed_candidate.startswith(processed_name):
for suffix in suffixes:
if '%s.%s' % (processed_name, suffix) == processed_candidate:
return joinpath(input_folder, candidate)
raise NameError(name) # XXX Should raise OSError(errno = errno.ENOENT)
def get_tribe_glyph_filename(self, tribe):
return self.search_filename(tribe.name, self.FOLDERS.TRIBE_INPUT)
def get_tone_glyph_filename(self, tone):
return self.search_filename(tone.name, self.FOLDERS.TONE_INPUT)
# Do I want to support this?
def get_wallpaper(self, factory, day):
return factory(day, backend = self)
# Image operations.
def load_tribe_glyph(self, tribe):
# Load and return an image based on tribe.
return load_glyph(self.get_tribe_glyph_filename(tribe))
def load_tone_glyph(self, tone):
# Load and return an image based on the tone.
return load_glyph(self.get_tone_glyph_filename(tone))
def load_font(self, filename = None, size = 12):
try: return truetype(filename or self.FOLDERS.FONTFILE_PATH, size)
except IOError, e:
from errno import ENOENT
# The message is set by the imaging library.
if e.errno != ENOENT and e.message != 'cannot open resource':
raise
# PIL operation.
def save_image(self, im, filename):
if not exists(self.FOLDERS.OUTPUT):
mkdirs(self.FOLDERS.OUTPUT)
path = joinpath(self.FOLDERS.OUTPUT, filename)
im.save(path)
from mayan_cal.desktop import file_checksum
self.last_checksum = file_checksum(path)
class Wallpaper:
# The actual drawing and saving is separated from this class and
# found in the module namespace (implemented via CANVAS).
#
# Property descriptors tend to cache attributes like picture.
def __init__(self, day, backend = None, **kwd):
assert isinstance(day, Affirmation)
self.backend = backend
self.day = day
self.__dict__.update(kwd)
# Assembly shortcuts.
def get_title(self):
return '%s %s' % (self.day.tone.name, self.day.tribe.name)
def get_affirmation_lines(self):
return affirmation_lines(self.day)
title = property(get_title)
__iter__ = get_affirmation_lines
def load_tribe_glyph(self):
return self.backend.load_tribe_glyph(self.day.tribe)
def load_tone_glyph(self):
return self.backend.load_tone_glyph(self.day.tone)
tribe_glyph = property(load_tribe_glyph)
tone_glyph = property(load_tone_glyph)
def get_tribe_background(self):
# Should this be rendered from the CANVAS?
if hasattr(self, 'bgcolor'):
return self.bgcolor
# Case sensitive!
return CANVAS.BACKGROUNDS.get(self.day.tribe.color.color)
tribe_background = property(get_tribe_background)
def get_prompt_color(self):
return CANVAS.TIME_PROMPT.get(self.day.tribe.color.color)
def load_picture(self):
# Call the global routine to do the actual drawing.
d = self.day.day
p = d.strftime
time_prompt = p('%%A, %%B %s%s %%Y' % (p('%e').lstrip(), th(d.day)))
return CANVAS(self.backend.load_font(),
self.get_tribe_background(),
(self.get_title(),
self.get_affirmation_lines()),
(self.load_tribe_glyph(),
self.load_tone_glyph()),
time_prompt,
self.get_prompt_color())
def get_picture(self):
try: return self._picture
except AttributeError:
pict = self._picture = self.load_picture()
return pict
def del_picture(self):
try: del self._picture
except AttributeError: pass
picture = property(fget = get_picture, fdel = del_picture)
def get_filename(self):
return 'MayanWallpaper-%s.%s' % (self.day.day.strftime('%b%dth%Y'),
self.backend.OUTPUT_SUFFIX)
@property
def full_path(self):
return joinpath(self.backend.FOLDERS.OUTPUT, self.filename)
filename = property(get_filename)
def save_image(self, im):
return self.backend.save_image(im, self.get_filename())
@property
def saved(self):
im = self.load_picture()
self.save_image(im)
return im
@property
def full_saved_path(self):
(self.saved)
return self.full_path
def install_desktop(self):
# Install based on module implementation.
from mayan_cal.desktop import install_wallpaper
return install_wallpaper(wallpaper = self)
# Move this into affirmations.py?
def affirmation_lines(af):
line = []
for part in af:
if part == '\n':
yield ' '.join(line)
line = []
else:
line.append(part)
if line:
yield ' '.join(line)
# Implementation of drawing.
from PIL import Image, ImageDraw, ImageFont
def load_glyph(filename):
return Image.open(filename)
X, Y = W, H = 0, 1
class CANVAS(object):
# Opposites.
BACKGROUNDS = {'White' : COLOR.NORTH_BG, 'North' : COLOR.NORTH_BG,
'Red' : COLOR.EAST_BG, 'East' : COLOR.EAST_BG,
'Blue' : COLOR.WEST_BG, 'West' : COLOR.WEST_BG,
'Yellow' : COLOR.SOUTH_BG, 'South' : COLOR.SOUTH_BG}
TIME_PROMPT = {'White' : COLOR.NORTH_FILL, 'North' : COLOR.NORTH_FILL,
'Red' : COLOR.EAST_FILL, 'East' : COLOR.EAST_FILL,
'Blue' : COLOR.WEST_FILL, 'West' : COLOR.WEST_FILL,
'Yellow' : COLOR.SOUTH_FILL, 'South' : COLOR.SOUTH_FILL}
SIZE = (400, 350)
CENTER = (SIZE[X] / 2, SIZE[X] / 2)
TONE_NEWSIZE = (85, 165)
TRIBE_NEWSIZE = (200, 200)
TRANSPARENT = COLOR.WHITE
class PADDING:
class GLYPH:
XAXIS = 10
class TEXT:
XAXIS = 4
YAXIS = 4
TRIBE_OFFSET = (CENTER[X] - \
(TRIBE_NEWSIZE[X] / 2) + \
int(TRIBE_NEWSIZE[X] / 2 * .1), \
40)
TONE_OFFSET = (TRIBE_OFFSET[X] - TONE_NEWSIZE[X] - PADDING.GLYPH.XAXIS, 55)
class TITLE_OFFSET:
YAXIS = 15
class AFFIRMATION_OFFSET:
YAXIS = 255
def __new__(self, *args, **kwd):
i = object.__new__(self)
return i.draw_affirmation(*args, **kwd)
# Object instance methods.
def draw_affirmation(self, font, bgcolor, (title, affirmation),
(tribe_glyph, tone_glyph), time_prompt, prompt_fill):
# Assemble a new graphic with the tribe and tone glyphs, and
# drawing the affirmation text around it.
#
# Eventually this should go into the class as a certain picture
# method. Right now its down here because of its reliance on PIL.
# Create a new canvas.
im = Image.new('RGB', self.SIZE, bgcolor)
# Draw the tone glyph center, left, transparently.
tone_glyph = tone_glyph.resize(self.TONE_NEWSIZE)
# Generate transparency mask from tone_glyph.
mask = transparent_mask(tone_glyph)
im.paste(tone_glyph, box = self.TONE_OFFSET, mask = mask)
# Draw the tribe glyph, center, overlapping.
tribe_glyph = tribe_glyph.resize(self.TRIBE_NEWSIZE)
im.paste(tribe_glyph, box = self.TRIBE_OFFSET)
# Create drawing context for text.
draw = ImageDraw.Draw(im)
# Draw the title string, top, overlapping.
# Todo: Calculate center alignment.
draw.text((self.center_text_xaxis(draw, title), self.TITLE_OFFSET.YAXIS),
title, fill = COLOR.BLACK, font = font)
# Draw the affirmation lines, bottom spanning vertical, overlapping.
# Todo: Calculate center alignment.
yaxis = self.AFFIRMATION_OFFSET.YAXIS
for line in affirmation:
draw.text((self.center_text_xaxis(draw, line), yaxis),
line, fill = COLOR.BLACK, font = font)
yaxis += draw.textsize(line)[Y] + self.PADDING.TEXT.YAXIS # Padding
metrics = draw.textsize(time_prompt)
draw.text((self.TRIBE_OFFSET[X] + self.TRIBE_NEWSIZE[X] \
- metrics[X] - self.PADDING.TEXT.XAXIS * 2,
self.TRIBE_OFFSET[Y] + self.TRIBE_NEWSIZE[Y] \
- metrics[Y] - self.PADDING.TEXT.YAXIS * 2),
time_prompt, fill = prompt_fill, font = font)
return im
def center_text_xaxis(self, draw, text):
return self.CENTER[X] - (draw.textsize(text)[X] / 2)
def draw_resized_text(self, text, im, box, font = None, bgcolor = None):
# Emulate font-sizing by doing PIL resize and paste.
if font is None:
import ImageFont
font = ImageFont.load_default()
# Draw into a temporary box. (padding?)
# bgcolor - maybe use an alpha-friendly blend against original image?
tmp = Image.new(im.mode, font.getsize(text))
draw = ImageDraw.Draw(tmp)
draw.text((0, 0), text)
# Resize the temporary image to the dimensions given by the box.
fullsize = (box[2]-box[0], box[3]-box[1])
tmp.resize(fullsize) # ANTIALIAS?
# Now paste it into the bounding-box on the original image.
im.paste(tmp, box = box)
def transparent_mask(glyph, bgcolor = CANVAS.TRANSPARENT, opacity = 255):
# This works too!
if glyph.mode != 'RGBA':
glyph = glyph.convert('RGBA')
# XXX Otherwise, modify THIS image's alpha band.
def alphaMask((r, g, b, a)):
if (r, g, b) == bgcolor:
return 0
# XXX I'm not even really sure providing an opacity here
# will effect the transparency.
return opacity
alpha = Image.new('1', glyph.size)
alpha.putdata(map(alphaMask, glyph.getdata()))
glyph.putalpha(alpha)
return glyph
def saveImageTo(original, destination):
# Suggest mode via filename.
Image.open(original).save(destination)
return destination
def th(day):
return bool(int(day / 10) != 1) \
and {1: 'st', 2: 'nd', 3: 'rd'}.get(day % 10) \
or 'th'
# XXX Local Application.
Store = Backend()
class Today(Wallpaper):
def __init__(self, backend = Store):
Wallpaper.__init__(self, todays_affirmation(), backend = backend)
def get_filename(self):
return 'today.%s' % self.backend.OUTPUT_SUFFIX
# Day passed is an affirmation.
def draw(day = None, backend = Store, wallpaper = None):
# A wallpaper is necessary to return the saved (complete) state.
# It will default to the day. backend is necessary.
if wallpaper is None:
wallpaper = day is None and Today or Wallpaper(day, backend = backend)
# Load Picture from Canvas and save to outgoing directory.
# Return filename?
return wallpaper.saved
# Draw a full calendar month.
# Todo: parameterized font.
def month_days(year, month):
from mayan_cal import gregarian_date
try:
for day in xrange(1, 32):
yield gregarian_date(year, month, day)
except ValueError:
pass
def daily_month_image(day, backend = Store, **kwd):
wallpaper = Wallpaper(daily_affirmation(day), backend = backend, **kwd)
# Force wallpaper to delete stored picture because we're doing bad things to it.
im = wallpaper.picture
try: del wallpaper.picture
except AttributeError: pass
return im.resize((MONTHDAY_WIDTH, MONTHDAY_HEIGHT)), wallpaper
RESOLUTION = (1280, 1024)
MONTHDAY_WIDTH = RESOLUTION[X] / 7
MONTHDAY_HEIGHT = RESOLUTION[Y] / 5
def month_day_images(year, month, backend = Store, **kwd):
for day in month_days(year, month):
im, wallpaper = daily_month_image(day, backend = backend, **kwd)
yield (day, im, wallpaper.day.tone.name + ' ' + wallpaper.day.tribe.name)
def draw_month_images(images):
canvas = Image.new('RGB', RESOLUTION, COLOR.WHITE)
x = y = w = 0
for (day, im, name) in images:
w += 1 # Todo: get weekday name.
print '#%3d' % w, 'Pasting [%s] %-35s' % (day, name), 'Into (%3d, %3d)' % (x, y)
canvas.paste(im, box = (x, y))
if 0 == (day.day % 7):
# End of week.
x = 0
y += MONTHDAY_HEIGHT
else:
x += MONTHDAY_WIDTH
if w == 7:
w = 0
month_name = day.strftime('%B %Y')
draw = ImageDraw.Draw(canvas)
w, h = draw.textsize(month_name)
draw.text((canvas.size[X] - w - 8, canvas.size[Y] - h - 8), month_name, fill = COLOR.BLACK)
return canvas
def draw_month(year, month, backend = Store):
return draw_month_images(month_day_images(year, month, backend = backend))
def draw_year(year, backend = Store):
for month in xrange(1, 13):
yield (month, draw_month(year, month))
# Annual calendar wallpapers -- These backgrounds are indexed later in the procedure.
TRIBE_BACKGROUNDS = [COLOR.RED_BG, COLOR.WHITE_BG, COLOR.BLUE_BG, COLOR.YELLOW_BG]
TRIBE_OFFSET = ['Red', 'White', 'Blue', 'Yellow'].index
# Capable of starting from a single index.
def cycle_color(start, bg = TRIBE_BACKGROUNDS):
while True:
for color in bg[start:]:
yield color
for color in bg[:start]:
yield color
def draw_galactic_year(new_day, backend = Store):
canvas = Image.new('RGB', (MONTHDAY_WIDTH * 13, MONTHDAY_HEIGHT * 20), COLOR.WHITE)
x = y = i = 0
# Todo: Start iterating from new_day.{day, month, year}
for m in xrange(1, 13):
if i >= GALACTIC_CONSTANT:
break # For sho.
for day in month_days(new_day.year, m):
i += 1
if i == 1:
# On the very first iteration, we temporarily construct an affirmation
# simply to gain its kin-qualified tribe color to start the cycle.
offset = TRIBE_OFFSET(daily_affirmation(day).tribe.color.color)
next_bgcolor = cycle_color(offset).next
im, wallpaper = daily_month_image(day, backend = backend,
bgcolor = next_bgcolor())
name = wallpaper.day.tone.name + ' ' + wallpaper.day.tribe.name
print '#%3d' % i, 'Pasting [%s] %-35s' % (day, name), 'Into (%3d, %3d)' % (x, y)
canvas.paste(im, box = (x * MONTHDAY_WIDTH, y * MONTHDAY_HEIGHT))
if y == 19:
y = 0
x += 1
else:
y += 1
if x == 13:
x = 0
return canvas
def draw_text_lines(draw, origin, text,
padding = CANVAS.PADDING.TEXT.YAXIS,
color = COLOR.BLACK):
y = 0
for line in text.split('\n'):
draw.text((origin[X], origin[Y] + y), line, fill = color)
y += draw.textsize(line)[Y] + padding
def draw_galactic_info_chart(new_day, backend = Store):
# Pass the FIRST day of the year!
canvas = Image.new('RGB', (1000, 800), color = COLOR.WHITE)
HEIGHT = 700
OFFSET = 50
# Left pane - Draw full calendar year.
im = draw_galactic_year(new_day, backend = backend)
ratio = (float(HEIGHT) / im.size[Y])
resizex = int(ratio * im.size[X])
im = im.resize((resizex, HEIGHT))
canvas.paste(im, box = (OFFSET, OFFSET, im.size[X] + OFFSET, im.size[Y] + OFFSET))
# New vertical alignment.
rightpaneleft = resizex + (OFFSET * 2)
# Right pane - Text about first date of year.
draw = ImageDraw.Draw(canvas)
text = new_day.strftime('%B %d' + th(new_day.day) + ' %Y')
text += ' - First day of the New Year'
text += '\n'
text += daily_affirmation(new_day).tribe.color.season.name
draw_text_lines(draw, (rightpaneleft, OFFSET), text, color = COLOR.BLACK)
# New vertical alignment.
glyphposy = (draw.textsize(text)[Y] * 2) + (OFFSET * 2)
# Draw wallpaper glyph.
im = Wallpaper(daily_affirmation(new_day), backend = backend).picture
im = im.resize((400, int((400.0 / im.size[X]) * im.size[Y])))
canvas.paste(im, box = (rightpaneleft , glyphposy ,
rightpaneleft + im.size[X],
glyphposy + im.size[Y]))
undertextposy = glyphposy + im.size[Y] + (OFFSET * 4)
draw.text((rightpaneleft, undertextposy),
'Destiny Oracle', fill = COLOR.BLACK)
return canvas
def watermark_wallpaper(image, watermark = None, box = (),
bgcolor = CANVAS.TRANSPARENT, opacity = 255):
# Resize to box dimensions.
size = (box[2] - box[0], box[3] - box[1])
wm = watermark.resize(size)
# Make the background color transparent.
mask = transparent_mask(wm, bgcolor = bgcolor, opacity = opacity)
# Paste the wallpaper glyph translucently onto original image.
image.paste(wm, box = box, mask = mask)
return image
def getbbox((x, y), (w, h)):
return (x, y, x + w, y + h)
def watermark_resolution(image, watermark = None,
resolution = (None, None),
width = None, height = None,
**kwd):
# From upper right.
OFFSET = 15
box = getbbox((resolution[X] - width - OFFSET, OFFSET), (width, height))
return watermark_wallpaper(image, watermark = watermark, box = box, **kwd)
def compositeWatermark(wallpaper, watermark, resolution, output, **kwd):
wallpaper_im = Image.open(wallpaper).resize(resolution)
watermark_im = Image.open(watermark)
width = kwd.get('width', 200)
height = kwd.get('height', 180)
try: del kwd['width']
except KeyError: pass
try: del kwd['height']
except KeyError: pass
composite = watermark_resolution(wallpaper_im, watermark = watermark_im,
resolution = resolution,
width = width, height = height,
**kwd)
composite.save(output)
return composite
def draw_destiny_castle(day, bgcolor = COLOR.WHITE):
# XXX Todo: bypass affirmations and use calculate functions directly.
af = daily_affirmation(day)
tribe, tone = af.tribe, af.tone
canvas = Image.new('RGB', 400, 300, bgcolor = bgcolor)
# Draw star-mark.
# Draw containing spirals.
return canvas
| Python |
#!/bin/python
# Copyright 2008 Clint Banis. All rights reserved.
#
# This script serves as the master module getting the software path right
# before importing all sub modules into its namespace.
#
# The main function is to run the Application ware which implements a
# command-line configurable implementation of the desktop rendering.
#
# USAGE:
# mayan_cal/script.py --day=1983,7,26 --install-wallpaper --watermark=auto
#
# This script is compiled as the frozen executable entry point giving access
# to the rest of the functionality.
#
__version__ = '1.0'
from sys import path as system_path
from os.path import exists, splitext, join as joinpath
from os.path import sep as PATH_SEP, dirname, basename
from os import chdir, environ
from time import sleep
from traceback import print_exc as traceback
from optparse import OptionParser
from pdb import set_trace as debug, runcall
IMPORTS = '''
from mayan_cal import gregarian_date
from mayan_cal.affirmation import daily_affirmation
from mayan_cal.drawing import Backend, Wallpaper, saveImageTo, compositeWatermark
from mayan_cal.desktop import Platform
'''
DESKTOP_RESOLUTION = (1280, 1200)
STANDARD_ENDING = '\Local Settings\Application Data\Microsoft\Wallpaper1.bmp'
def define_application():
# Use via setup_shell after importing necessary mayan_cal modules.
## When application programming doesn't need to be deferred until after
## the shell (system package paths and cwd) is setup, this becomes a
## global construct.
global Application
class Application(Backend):
'Provides a command-line front end for utilizing the Wallpaper interface.'
class CommandLine(OptionParser):
USAGE = '%prog'
VERSION = ('%%prog %s' % __version__)
def __init__(self):
usage = Application.CommandLine.USAGE
version = Application.CommandLine.VERSION
OptionParser.__init__(self, usage = usage, version = version)
self.set_defaults(day = todays_date(),
render_bitmap = True,
affirmation_file = None,
watermark_background = 'preferred',
install_wallpaper = True)
# Control operations.
self.add_option("-p", "--preferred-background",
help = "Install a preferred background into the Windows Registry.\n")
# Input options.
self.add_option("-d", "--day", dest = "day",
help = "Specify source day.\n")
self.add_option("-f", "--affirmation-file", "--file",
help = "Specify source affirmation wallpaper rendering.\n")
self.add_option("-m", "--watermark-background", "--watermark",
help = "Program watermark filename or date specifier.\n")
self.add_option("-t", "--watermark-mode",
help = "program watermark positioning and translucence")
self.add_option("-s", "--screen-resolution",
help = "specify resulting background width-height constraints")
# Output options.
self.add_option("-r", "--render-bitmap", "--render-bmp", "--bmp",
dest = "render_bitmap",
action = "store_true",
help = "perform Windows bitmap conversion")
self.add_option("--no-render-bitmap",
dest = "render_bitmap",
action = "store_false",
help = "do not perform bitmap conversion")
self.add_option("-i", "--install-wallpaper",
dest = "install_wallpaper",
action = "store_true",
help = "install wallpaper onto the Windows desktop")
self.add_option("--no-install-wallpaper",
dest = "install_wallpaper",
action = "store_false",
help = "do not install wallpaper onto Windows desktop")
self.add_option("-o", "--output-dir",
help = "Location to put temporary drawing files.")
self.add_option("--tribe-dir",
help = "Location of tribe glyphs.")
self.add_option("--tone-dir",
help = "Location of tone glyphs.")
self.add_option("-g", "--glyph-dir",
help = "Location of tribe and tone glyph directories.")
self.add_option("-v", "--verbose",
dest = "verbose", action = "store_true",
help = "be verbose when operating")
self.add_option("-q", "--quiet",
dest = "verbose", action = "store_false",
help = "be non-verbose when operating")
# Internal Debug
self.add_option("-e", "--debug",
action = "store_true",
help = "Enter internal debugging of the run frame.")
def __init__(self):
# Initial folder set to platform configuration:
Backend.__init__(self, output_dir = self['WallpaperTempDir'],
glyph_dir = self['MayanGlyphs' ],
tribe_dir = self['MayanTribeDir' ],
tone_dir = self['MayanToneDir' ])
# Now this doesn't make the most sense: This class wants to reuse
# the parser, so that you can pass multiple shell invocations throgh
# one Application via a gui front-end, but the above application state
# isn't formalized in a way that the application can be reset between
# runs. No big deal.
@property
def cmdln_parser(self):
# Make one instance!
try: return self._parser
except AttributeError:
# See how this encapsulates the parser routine!
p = self._parser = self.CommandLine().parse_args
return p
## return self.CommandLine().parse_args
def get_shell_options(self, argv = None):
if argv is None:
from sys import argv
return self.cmdln_parser(argv)
def run(self, argv = None, options = None, args = None):
# This function attempt to resolve a 'target' after post-image-processing,
# which becomes the path required for a Windows Registry Desktop install.
if options is None:
assert args is None
(options, args) = self.get_shell_options(argv = argv)
if options.debug:
# Start tracing this frame.
debug()
# First see if the preferred background is specified, and if so,
# do nothing but install the argument in the reg/config and exit.
bg = self.get_preferred_background(options.preferred_background,
options.verbose)
# Alternately do folder set to any directories specified on command line.
self.folders = dict(output_dir = options.output_dir or self['WallpaperTempDir'],
glyph_dir = options.glyph_dir or self['MayanGlyphs'],
tribe_dir = options.tribe_dir or self['MayanTribes'],
tone_dir = options.tone_dir or self['MayanTones'])
# Determine either wallpaper or day, not both.
# assert (bool(options.affirmation_file) ^ bool(type(options.day) is not date))
target = self.get_affirmation_file(options.day, options.verbose) \
or options.affirmation_file
if options.verbose:
self.log("\t%r" % target)
if options.screen_resolution or True:
# Resolve options.SCREEN_RESOLUTION from arguments.
# This is so following functions can take advantage of
# the setting. The resize action can occur if no others
# utilized the resolution.
options.SCREEN_RESOLUTION = DESKTOP_RESOLUTION
target = self.get_watermark_background(options.watermark_background,
target, # As source.
options.SCREEN_RESOLUTION,
options.verbose) or target
if options.render_bitmap:
# Convert to Windows Bitmap and store somewhere.
if options.verbose:
self.log("Converting to Windows Bitmap...")
converted_path = self['CurrentWallpaperPathname'] or 'CurrentWallpaper.bmp'
converted_path = joinpath(self.FOLDERS.OUTPUT, converted_path)
# rename_extension(target, 'bmp')
target = saveImageTo(target, converted_path)
if options.verbose:
self.log("\t%r" % target)
# Install to Windows desktop option.
if options.install_wallpaper:
if options.verbose:
self.log("Installing Windows background desktop wallpaper...")
self.log("\t%r" % target)
Platform.installWallpaper(target)
def get_preferred_background(self, bg, verbose):
if bg:
if bg == 'auto':
# Determine from currently installed system wallpaper.
## converted = self.getConfigUserSystem('ConvertedWallpaper')
bg = self.getConfigUserSystem('Wallpaper')
## if current.endswith(STANDARD_ENDING) and converted != current:
## bg = converted
## else:
## raise RuntimeError("No automatic background determinable:\n\t%r\n\t%r" % (converted, current))
assert bg, RuntimeError("No automatic background determinable!")
elif bg == 'none':
bg = None
if bg is None:
# Remove preferred background option from registry.
# If the registry value is "none", this doesn't happen.
if verbose:
self.log("Removing Preferred Background setting from registry...")
# Todo: Fail safe.
self.delConfigUserSystem('PreferredBackground')
else:
if verbose:
self.log("Setting Preferred Background to %r..." % bg)
# XXX Requires absolute path normalization upon background path.
self.setConfigUserSystem('PreferredBackground', bg)
## from sys import exit as sys_exit
## sys_exit(0) # Success
return bg
def get_affirmation_file(self, day, verbose):
# Formulate the first target based on the affirmations wallpaper
# created for the day, which might be parsed from the command line.
if day:
# Configure affirmation via Wallpaper.
if type(day) is gregarian_date:
af = daily_affirmation(day)
elif True:
# Attempt to construct a date from the string.
print day
af = daily_affirmation(self.parse_datestring(day))
# Use day programming to construct Wallpaper rendering.
wallpaper = Wallpaper(af, backend = self)
# This executes the save function to ensure a rendered file.
if verbose:
self.log("Generating Affirmation Wallpaper file...")
return (wallpaper.full_saved_path)
def get_watermark_background(self, watermark, source, resolution, verbose):
if watermark:
if type(watermark) is str and watermark.lower() in ("auto", "preferred"):
watermark = self.getConfigUserSystem('PreferredBackground')
if not watermark:
watermark = None
if verbose:
self.log("No Preferred Background found!")
else:
if verbose:
self.log("Using Preferred Background Watermark %r." % watermark)
if type(watermark) is str and watermark.lower() == "none":
watermark = None
if watermark:
# Compose a watermarked image.
if verbose:
self.log("Composing Background with Wallpaper Watermark...")
filename, ext = splitext(basename(source))
bg = splitext(basename(watermark))[0]
composition = '%s on %s%s' % (filename, bg, ext)
# Draw the temporary watermarked background image file.
bg = joinpath(dirname(source), composition)
compositeWatermark(watermark,
source,
resolution,
bg)
if verbose:
self.log("\t%r" % bg)
return bg
month_names = {'january': 1, 'february': 2, 'march': 3, 'april': 4,
'may': 5, 'june': 6, 'july': 7, 'august': 8,
'september': 9, 'october': 9, 'november': 11, 'december': 12}
def parse_datestring(self, datestr):
month, day, year = datestr.split()
year = int(year)
day = int(day)
month = self.month_names.get(month.lower()) or int(month)
return gregarian_date(year, month, day)
ConfigKey = getattr(Platform, 'DesktopKey', None)
# ConfigKey += 'MayanCalendar\\'
def getConfigUserSystem(self, key, default = None):
if Platform and self.ConfigKey:
try: return Platform.RegistryAPI[self.ConfigKey + key]
except WindowsError:
pass
return default
def setConfigUserSystem(self, key, value):
if Platform and self.ConfigKey:
try: Platform.RegistryAPI[self.ConfigKey + key] = value
except WindowsError:
raise # What else?
__getitem__ = getConfigUserSystem
__setitem__ = setConfigUserSystem
def delConfigUserSystem(self, key):
pass
def log(self, message):
try: stream = getattr(self, 'logStream')
except AttributeError:
from sys import stdout as stream
print >> stream, message
def runForLog(self, function, *args, **kwd):
try: prev = self.logStream
except AttributeError:
prev = AttributeError
from cStringIO import StringIO
buf = self.logStream = StringIO()
# Ignores value of function.
try: function(*args, **kwd)
finally:
if prev is AttributeError:
del self.logStream
else:
self.logStream = prev
return buf.getvalue()
return Application
def todays_date():
# Time might be relative, but not on the desktop.
from time import localtime as time, mktime
return gregarian_date.fromtimestamp(mktime(time()))
def rename_extension(filename, new_ext):
# Express a filename but with a different extension.
return '%s.%s' % (splitext(filename)[0], new_ext)
def make_application(*args, **kwd):
app = define_application()
return app(*args, **kwd)
def setup_shell(script = None):
if script is None:
script = globals().get('__file__', None)
if script:
package_path = dirname(dirname(script)).strip()
if package_path == '':
package_path = '.'
# Makes this product module accessible to the script environment.
system_path.append(package_path)
if package_path != '.':
chdir(package_path)
exec IMPORTS in globals(), globals()
def main():
setup_shell()
try:
# from pdb import runcall as apply
apply(make_application().run)
except SystemExit:
pass
except:
# This is to facilitate watching the console error during headless (scheduled) invocation.
# Don't do this for production environments (via Scheduled Task).
traceback()
try: line = raw_input(' > ')
except KeyboardInterrupt:
pass
else:
if line == 'debug':
debug()
if __name__ == '__main__':
main()
| Python |
#!/usr/local/bin/python
INCLUDE_PATH = 'C:\\Documents and Settings\\Clint\\My Documents\\pythonlib'
# distutils
def setup():
from cx_Freeze import setup, Executable
import sys
setup(name = "setMayanWallpaper",
version = "0.1",
description = "Mayan Windows Desktop Wallpaper rendering.",
executables = [Executable("mayan_cal/script.py",
path = sys.path + [INCLUDE_PATH])],
packages = ['mayan_cal'])
MODULES = ('mayan_cal', 'mayan_cal.structure', 'mayan_cal.calculate',
'mayan_cal.affirmation', 'mayan_cal.drawing',
'mayan_cal.desktop')
def doublequote_repr(o):
return '"' + repr("'" + str(o))[2:]
def main():
# Todo: Don't use --include-modules, use --package
target_dir = "Mayan Calendar"
target_name = "MayanWallpaper.exe"
prolog = "cmd /C \"python C:\\cygwin\\bin\\freeze --include-path=. --include-modules="
modules = ','.join(MODULES)
epilog = " mayan_cal\\script.py --target-dir=%s --target-name=%s\"" % \
(doublequote_repr(target_dir), doublequote_repr(target_name))
from os import system as system_command
from os.path import join as joinpath
verbose = True
def shell_command(cmd):
if verbose:
print cmd
return system_command(cmd)
mkdir = lambda path: shell_command("mkdir -p " + doublequote_repr(path))
copy = lambda path, *files: shell_command("cp -Rf %s %s" % \
(" ".join(doublequote_repr(f) for f in files),
doublequote_repr(path)))
export = lambda tree, path: shell_command(("svn export " + \
"file:///repository/trunk/MayanCalendar/%s" + \
" %s") % (tree, doublequote_repr(path)))
# Run cx freeze.
shell_command(prolog + modules + epilog)
# Copy glyph data files.
export("glyphs", joinpath(target_dir, "glyphs"))
# Make a temporary directory tree.
mkdir(joinpath(target_dir, "affirmations", "daily"))
# Copy preferences ini and shortcut.
copy(target_dir, "Preferences.reg", "Wallpaper Update.lnk")
# Generate post-install script for putting data files and output folder
# into Program Files, and updating the registry with the config file.
# Generate short-cuts.
# Generate schedule-task.
# Generate registry-config.
# Platform.createShortcut(joinpath("dist", "Mayan Wallpaper Update"),
# target_path = 'mayanCal.exe', run_in = '',
# '--verbose')
if __name__ == '__main__':
main()
| Python |
#!/bin/python
# Copyright 2008 Clint Banis. All rights reserved.
#
# This script serves as the master module getting the software path right
# before importing all sub modules into its namespace.
#
# The main function is to run the Application ware which implements a
# command-line configurable implementation of the desktop rendering.
#
# USAGE:
# mayan_cal/script.py --day=1983,7,26 --install-wallpaper --watermark=auto
#
# This script is compiled as the frozen executable entry point giving access
# to the rest of the functionality.
#
__version__ = '1.0'
from sys import path as system_path
from os.path import exists, splitext, join as joinpath
from os.path import sep as PATH_SEP, dirname, basename
from os import chdir, environ
from time import sleep
from traceback import print_exc as traceback
from optparse import OptionParser
from pdb import set_trace as debug, runcall
IMPORTS = '''
from mayan_cal import gregarian_date
from mayan_cal.affirmation import daily_affirmation
from mayan_cal.drawing import Backend, Wallpaper, saveImageTo, compositeWatermark
from mayan_cal.desktop import Platform
'''
DESKTOP_RESOLUTION = (1280, 1200)
STANDARD_ENDING = '\Local Settings\Application Data\Microsoft\Wallpaper1.bmp'
def define_application():
# Use via setup_shell after importing necessary mayan_cal modules.
## When application programming doesn't need to be deferred until after
## the shell (system package paths and cwd) is setup, this becomes a
## global construct.
global Application
class Application(Backend):
'Provides a command-line front end for utilizing the Wallpaper interface.'
class CommandLine(OptionParser):
USAGE = '%prog'
VERSION = ('%%prog %s' % __version__)
def __init__(self):
usage = Application.CommandLine.USAGE
version = Application.CommandLine.VERSION
OptionParser.__init__(self, usage = usage, version = version)
self.set_defaults(day = todays_date(),
render_bitmap = True,
affirmation_file = None,
watermark_background = 'preferred',
install_wallpaper = True)
# Control operations.
self.add_option("-p", "--preferred-background",
help = "Install a preferred background into the Windows Registry.\n")
# Input options.
self.add_option("-d", "--day", dest = "day",
help = "Specify source day.\n")
self.add_option("-f", "--affirmation-file", "--file",
help = "Specify source affirmation wallpaper rendering.\n")
self.add_option("-m", "--watermark-background", "--watermark",
help = "Program watermark filename or date specifier.\n")
self.add_option("-t", "--watermark-mode",
help = "program watermark positioning and translucence")
self.add_option("-s", "--screen-resolution",
help = "specify resulting background width-height constraints")
# Output options.
self.add_option("-r", "--render-bitmap", "--render-bmp", "--bmp",
dest = "render_bitmap",
action = "store_true",
help = "perform Windows bitmap conversion")
self.add_option("--no-render-bitmap",
dest = "render_bitmap",
action = "store_false",
help = "do not perform bitmap conversion")
self.add_option("-i", "--install-wallpaper",
dest = "install_wallpaper",
action = "store_true",
help = "install wallpaper onto the Windows desktop")
self.add_option("--no-install-wallpaper",
dest = "install_wallpaper",
action = "store_false",
help = "do not install wallpaper onto Windows desktop")
self.add_option("-o", "--output-dir",
help = "Location to put temporary drawing files.")
self.add_option("--tribe-dir",
help = "Location of tribe glyphs.")
self.add_option("--tone-dir",
help = "Location of tone glyphs.")
self.add_option("-g", "--glyph-dir",
help = "Location of tribe and tone glyph directories.")
self.add_option("-v", "--verbose",
dest = "verbose", action = "store_true",
help = "be verbose when operating")
self.add_option("-q", "--quiet",
dest = "verbose", action = "store_false",
help = "be non-verbose when operating")
# Internal Debug
self.add_option("-e", "--debug",
action = "store_true",
help = "Enter internal debugging of the run frame.")
def __init__(self):
# Initial folder set to platform configuration:
Backend.__init__(self, output_dir = self['WallpaperTempDir'],
glyph_dir = self['MayanGlyphs' ],
tribe_dir = self['MayanTribeDir' ],
tone_dir = self['MayanToneDir' ])
# Now this doesn't make the most sense: This class wants to reuse
# the parser, so that you can pass multiple shell invocations throgh
# one Application via a gui front-end, but the above application state
# isn't formalized in a way that the application can be reset between
# runs. No big deal.
@property
def cmdln_parser(self):
# Make one instance!
try: return self._parser
except AttributeError:
# See how this encapsulates the parser routine!
p = self._parser = self.CommandLine().parse_args
return p
## return self.CommandLine().parse_args
def get_shell_options(self, argv = None):
if argv is None:
from sys import argv
return self.cmdln_parser(argv)
def run(self, argv = None, options = None, args = None):
# This function attempt to resolve a 'target' after post-image-processing,
# which becomes the path required for a Windows Registry Desktop install.
if options is None:
assert args is None
(options, args) = self.get_shell_options(argv = argv)
if options.debug:
# Start tracing this frame.
debug()
# First see if the preferred background is specified, and if so,
# do nothing but install the argument in the reg/config and exit.
bg = self.get_preferred_background(options.preferred_background,
options.verbose)
# Alternately do folder set to any directories specified on command line.
self.folders = dict(output_dir = options.output_dir or self['WallpaperTempDir'],
glyph_dir = options.glyph_dir or self['MayanGlyphs'],
tribe_dir = options.tribe_dir or self['MayanTribes'],
tone_dir = options.tone_dir or self['MayanTones'])
# Determine either wallpaper or day, not both.
# assert (bool(options.affirmation_file) ^ bool(type(options.day) is not date))
target = self.get_affirmation_file(options.day, options.verbose) \
or options.affirmation_file
if options.verbose:
self.log("\t%r" % target)
if options.screen_resolution or True:
# Resolve options.SCREEN_RESOLUTION from arguments.
# This is so following functions can take advantage of
# the setting. The resize action can occur if no others
# utilized the resolution.
options.SCREEN_RESOLUTION = DESKTOP_RESOLUTION
target = self.get_watermark_background(options.watermark_background,
target, # As source.
options.SCREEN_RESOLUTION,
options.verbose) or target
if options.render_bitmap:
# Convert to Windows Bitmap and store somewhere.
if options.verbose:
self.log("Converting to Windows Bitmap...")
converted_path = self['CurrentWallpaperPathname'] or 'CurrentWallpaper.bmp'
converted_path = joinpath(self.FOLDERS.OUTPUT, converted_path)
# rename_extension(target, 'bmp')
target = saveImageTo(target, converted_path)
if options.verbose:
self.log("\t%r" % target)
# Install to Windows desktop option.
if options.install_wallpaper:
if options.verbose:
self.log("Installing Windows background desktop wallpaper...")
self.log("\t%r" % target)
Platform.installWallpaper(target)
def get_preferred_background(self, bg, verbose):
if bg:
if bg == 'auto':
# Determine from currently installed system wallpaper.
## converted = self.getConfigUserSystem('ConvertedWallpaper')
bg = self.getConfigUserSystem('Wallpaper')
## if current.endswith(STANDARD_ENDING) and converted != current:
## bg = converted
## else:
## raise RuntimeError("No automatic background determinable:\n\t%r\n\t%r" % (converted, current))
assert bg, RuntimeError("No automatic background determinable!")
elif bg == 'none':
bg = None
if bg is None:
# Remove preferred background option from registry.
# If the registry value is "none", this doesn't happen.
if verbose:
self.log("Removing Preferred Background setting from registry...")
# Todo: Fail safe.
self.delConfigUserSystem('PreferredBackground')
else:
if verbose:
self.log("Setting Preferred Background to %r..." % bg)
# XXX Requires absolute path normalization upon background path.
self.setConfigUserSystem('PreferredBackground', bg)
## from sys import exit as sys_exit
## sys_exit(0) # Success
return bg
def get_affirmation_file(self, day, verbose):
# Formulate the first target based on the affirmations wallpaper
# created for the day, which might be parsed from the command line.
if day:
# Configure affirmation via Wallpaper.
if type(day) is gregarian_date:
af = daily_affirmation(day)
elif True:
# Attempt to construct a date from the string.
print day
af = daily_affirmation(self.parse_datestring(day))
# Use day programming to construct Wallpaper rendering.
wallpaper = Wallpaper(af, backend = self)
# This executes the save function to ensure a rendered file.
if verbose:
self.log("Generating Affirmation Wallpaper file...")
return (wallpaper.full_saved_path)
def get_watermark_background(self, watermark, source, resolution, verbose):
if watermark:
if type(watermark) is str and watermark.lower() in ("auto", "preferred"):
watermark = self.getConfigUserSystem('PreferredBackground')
if not watermark:
watermark = None
if verbose:
self.log("No Preferred Background found!")
else:
if verbose:
self.log("Using Preferred Background Watermark %r." % watermark)
if type(watermark) is str and watermark.lower() == "none":
watermark = None
if watermark:
# Compose a watermarked image.
if verbose:
self.log("Composing Background with Wallpaper Watermark...")
filename, ext = splitext(basename(source))
bg = splitext(basename(watermark))[0]
composition = '%s on %s%s' % (filename, bg, ext)
# Draw the temporary watermarked background image file.
bg = joinpath(dirname(source), composition)
compositeWatermark(watermark,
source,
resolution,
bg)
if verbose:
self.log("\t%r" % bg)
return bg
month_names = {'january': 1, 'february': 2, 'march': 3, 'april': 4,
'may': 5, 'june': 6, 'july': 7, 'august': 8,
'september': 9, 'october': 9, 'november': 11, 'december': 12}
def parse_datestring(self, datestr):
month, day, year = datestr.split()
year = int(year)
day = int(day)
month = self.month_names.get(month.lower()) or int(month)
return gregarian_date(year, month, day)
ConfigKey = getattr(Platform, 'DesktopKey', None)
# ConfigKey += 'MayanCalendar\\'
def getConfigUserSystem(self, key, default = None):
if Platform and self.ConfigKey:
try: return Platform.RegistryAPI[self.ConfigKey + key]
except WindowsError:
pass
return default
def setConfigUserSystem(self, key, value):
if Platform and self.ConfigKey:
try: Platform.RegistryAPI[self.ConfigKey + key] = value
except WindowsError:
raise # What else?
__getitem__ = getConfigUserSystem
__setitem__ = setConfigUserSystem
def delConfigUserSystem(self, key):
pass
def log(self, message):
try: stream = getattr(self, 'logStream')
except AttributeError:
from sys import stdout as stream
print >> stream, message
def runForLog(self, function, *args, **kwd):
try: prev = self.logStream
except AttributeError:
prev = AttributeError
from cStringIO import StringIO
buf = self.logStream = StringIO()
# Ignores value of function.
try: function(*args, **kwd)
finally:
if prev is AttributeError:
del self.logStream
else:
self.logStream = prev
return buf.getvalue()
return Application
def todays_date():
# Time might be relative, but not on the desktop.
from time import localtime as time, mktime
return gregarian_date.fromtimestamp(mktime(time()))
def rename_extension(filename, new_ext):
# Express a filename but with a different extension.
return '%s.%s' % (splitext(filename)[0], new_ext)
def make_application(*args, **kwd):
app = define_application()
return app(*args, **kwd)
def setup_shell(script = None):
if script is None:
script = globals().get('__file__', None)
if script:
package_path = dirname(dirname(script)).strip()
if package_path == '':
package_path = '.'
# Makes this product module accessible to the script environment.
system_path.append(package_path)
if package_path != '.':
chdir(package_path)
exec IMPORTS in globals(), globals()
def main():
setup_shell()
try:
# from pdb import runcall as apply
apply(make_application().run)
except SystemExit:
pass
except:
# This is to facilitate watching the console error during headless (scheduled) invocation.
# Don't do this for production environments (via Scheduled Task).
traceback()
try: line = raw_input(' > ')
except KeyboardInterrupt:
pass
else:
if line == 'debug':
debug()
if __name__ == '__main__':
main()
| Python |
from mayan_cal import today
from mayan_cal.structure import Day
from mayan_cal.calculate import get_tribe_tone, get_guiding_power
class Template(object):
STRING = \
'''I %(TONE_POWER)s in order to %(TRIBE_ACTION)s
%(TONE_ACTION_TENSED)s %(TRIBE_ESSENCE)s
I seal the %(TIMECELL)s of %(TRIBE_POWER)s
With the %(TONE_NAME)s tone of %(TONE_ESSENCE)s
I am guided by the power of %(GUIDING_POWER)s
'''
# Are all these parts really necessary? Not really; just use
# Template.STRING % Affirmation(date).formatted_values
class Part:
def __init__(self, name):
self.name = name
def __mod__(self, values):
return values[self.name]
TONE_NAME = Part('TONE_NAME')
TONE_ACTION = Part('TONE_ACTION')
TONE_POWER = Part('TONE_POWER')
TONE_ESSENCE = Part('TONE_ESSENCE')
TONE_ACTION_TENSED = Part('TONE_ACTION_TENSED')
TRIBE_ACTION = Part('TRIBE_ACTION')
TRIBE_POWER = Part('TRIBE_POWER')
TRIBE_ESSENCE = Part('TRIBE_ESSENCE')
TIMECELL = Part('TIMECELL')
GUIDING_POWER = Part('GUIDING_POWER')
NEWLINE = '\n'
parts = ('I', TONE_POWER, 'in order to', TRIBE_ACTION, NEWLINE,
TONE_ACTION_TENSED, TRIBE_ESSENCE, NEWLINE,
'I seal the', TIMECELL, 'of', TRIBE_POWER, NEWLINE,
'With the', TONE_NAME, 'tone of', TONE_ESSENCE, NEWLINE,
'I am guided by the power of', GUIDING_POWER, NEWLINE)
def __new__(self, values, kind = 'string'):
if kind == 'string':
return self.STRING % values
if kind == 'parts':
return (isinstance(p, self.Part) and p % values or p \
for p in self.parts)
TENSE = 'ing'
def tensed_word_form(action):
# Create a grammatically correct word-form by stripping
# trailing Es.
i = len(action)
while action[i - 1] in ('e', 'E'):
i -= 1
return action[:i] + TENSE
# XXX Action/Power TENSING
class Affirmation(Day):
@property
def values(self):
return {'TONE_NAME' : self.tone.name,
'TONE_ACTION' : self.tone.action,
'TONE_POWER' : self.tone.power,
'TONE_ESSENCE' : self.tone.essence,
'TONE_ACTION_TENSED': tensed_word_form(self.tone.action),
'TRIBE_ACTION' : self.tribe.action,
'TRIBE_POWER' : self.tribe.power,
'TRIBE_ESSENCE': self.tribe.essence,
'TIMECELL' : self.tribe.timecell.name,
'GUIDING_POWER': self.guide}
@property
def formatted_values(self):
return dict((n, str(v).upper()) for (n, v) in self.values.iteritems())
@property
def as_string(self):
return Template(self.formatted_values, kind = 'string')
@property
def as_parts(self):
return Template(self.formatted_values, kind = 'parts')
def __str__(self):
return self.as_string
def __iter__(self):
return self.as_parts
def daily_affirmation(day):
tribe, tone = get_tribe_tone(day)
guide = get_guiding_power(day)
return Affirmation(day, tribe, tone, guide)
def todays_affirmation():
return daily_affirmation(today())
| Python |
# Standard library requirements.
# Note that datetime only supports years back to 1 AD.
# Todo: set calendar starting epoch.
from pprint import saferepr
from datetime import date as gregorian_date
# xxx gregOrian
from datetime import date as gregarian_date
from time import localtime as localnow
from time import gmtime as gmnow
# Which should I use..?
now = localnow
def checkGregarianDate(date):
assert type(date) is gregarian_date, TypeError(type(date).__name__)
def today():
today = now()
return gregarian_date(today[0], today[1], today[2])
def birthday(gmdate, timezone):
# Return a date converted from GM timezone to local timezone.
pass
# Mayan Constants
GALACTIC_CONSTANT = 260
NUMERICAL_BASE = 20
HIGHEST_TONE = 13
# Base object class.
_missing = object() # Placeholder.
class Base:
class Meta:
attributes = ()
def __repr__(self):
# XXX pprint.saferepr doesn't actually protect against recursion
# for instance-type repr. PISS
attrs = [(n, getattr(self, n, _missing)) for n in self.Meta.attributes]
attrs = ' '.join('%s=%s' % (n, saferepr(v)) for (n, v) in attrs if v is not _missing)
return '<%s%s%s>' % (self.__class__.__name__,
attrs and ' ' or '', attrs)
# XXX Remove debugging facility when not needed.
from pdb import set_trace as debug, runcall
__ns__ = type(__builtins__) is dict and __builtins__ or __builtins__.__dict__
__ns__.update(debug = debug, runcall = runcall)
| Python |
## Windows (TM) Desktop Wallpaper Install.
# Copyright 2008 Clint Banis. All rights reserved.
#
# Built on a Windows XP machine.
try:
class Windows:
# According to http://support.microsoft.com/default.aspx?scid=97142
SPI_SETDESKWALLPAPER = 20
import ctypes
# Registry settings.
StretchWallpaperStyle = '2'
CenterWallpaperStyle = '0'
TileWallpaperOff = '0'
DefaultWallpaperStyle = StretchWallpaperStyle
DesktopKey = 'HKCU\\Control Panel\\Desktop\\'
@classmethod
def installWallpaper(self, path):
self.installRegistry(path)
self.signalDesktop(path)
@classmethod
def signalDesktop(self, path):
'Signal the Windows Desktop to change the Background Wallpaper.'
# Active State article #435877.
#
# Registers a change event with the windows desktop subsystem causing it
# to reload the background wallpaper image from the path being set.
#
# This should be done after all wallpaper registry changes are made.
#
self.ctypes.windll.user32.SystemParametersInfoA \
(self.SPI_SETDESKWALLPAPER, 0, path, 0)
@classmethod
def installRegistry(self, path):
a = self.RegistryAPI
d = self.DesktopKey
a[d + 'Wallpaper' ] = path
a[d + 'OriginalWallpaper' ] = path
a[d + 'ConvertedWallpaper'] = path
a[d + 'WallpaperStyle' ] = self.DefaultWallpaperStyle
a[d + 'TileWallpaper' ] = self.TileWallpaperOff
class RegistryAPI:
# Concise winreg providing key-value get-set item descriptors.
import _winreg
allPermissions = (_winreg.KEY_ALL_ACCESS|
_winreg.KEY_CREATE_LINK|
_winreg.KEY_CREATE_SUB_KEY|
_winreg.KEY_ENUMERATE_SUB_KEYS|
_winreg.KEY_EXECUTE|
_winreg.KEY_NOTIFY|
_winreg.KEY_QUERY_VALUE|
_winreg.KEY_READ|
_winreg.KEY_SET_VALUE|
_winreg.KEY_WRITE)
keyOpenFlags = (_winreg.KEY_SET_VALUE|_winreg.KEY_QUERY_VALUE)
# Lookup.
key_path_aliases = {'HKCU': 'HKEY_CURRENT_USER'}
def get_key_path(self, path):
assert type(path) is str
path = path.replace('/', '\\')
first = path.find('\\')
if first >= 0:
key = path[:first]
path = path[first+1:]
# Resolve key.
key = self.key_path_aliases.get(key, key)
assert key.startswith('HKEY_')
return (getattr(self._winreg, key), path)
return (None, path)
def get_key_path_name(self, path):
key, path = self.get_key_path(path)
last = path.rfind('\\')
if last >= 0:
return (key, path[:last], path[last+1:])
return (key, path)
# Registry access.
def open_key_name(self, path):
key, path, name = self.get_key_path_name(path)
return (self._winreg.OpenKey(key, path, 0, self.keyOpenFlags), name)
def close_with_key_access(self, key, function, *args, **kwd):
try: return function(key, *args, **kwd)
finally:
self._winreg.CloseKey(key)
# Accessors.
def getwith_key_value(self, key, name):
(data, kind) = self._winreg.QueryValueEx(key, name)
# assert kind == self._winreg.REG_SZ
return str(data)
def setwith_key_value(self, key, name, value):
return self._winreg.SetValueEx(key, name, 0, self._winreg.REG_SZ, value)
def get_key_value(self, name):
key, name = self.open_key_name(name)
return self.close_with_key_access(key, self.getwith_key_value, name)
def set_key_value(self, name, value):
key, name = self.open_key_name(name)
return self.close_with_key_access(key, self.setwith_key_value, name, value)
__getitem__ = get_key_value
__setitem__ = set_key_value
RegistryAPI = RegistryAPI()
@classmethod
def makeShortcut(self, shortcut_path, target_path, *args, **kwd):
import win32com.client
shell = win32com.client.Dispatch("WScript.Shell")
if len(shortcut_path) > 4 and shortcut_path[-4:].lower() != '.lnk':
shortcut_path = shortcut_path + '.lnk'
shortcut = shell.CreateShortCut(shortcut_path)
shortcut.Targetpath = target_path
# Put arguments in the .Arguments property.
# What about run-in?
shortcut.save()
return shortcut
# http://www.blog.pythonlibrary.org/?p=21
## # link.py
## # From a demo by Mark Hammond, corrupted by Mike Fletcher
## from win32com.shell import shell
## import pythoncom, os
##
## class PyShortcut:
## def __init__( self ):
## self._base = pythoncom.CoCreateInstance(
## shell.CLSID_ShellLink, None,
## pythoncom.CLSCTX_INPROC_SERVER, shell.IID_IShellLink
## )
## def load( self, filename ):
## # Get an IPersist interface
## # which allows save/restore of object to/from files
## self._base.QueryInterface( pythoncom.IID_IPersistFile ).Load( filename )
## def save( self, filename ):
## self._base.QueryInterface( pythoncom.IID_IPersistFile ).Save( filename,
## 0 )
## def __getattr__( self, name ):
## if name != "_base":
## return getattr( self._base, name )
##
## if __name__=='__main__':
## import sys
## file = sys.argv[1]
## shortcut = PyShortcut()
## if os.path.exists( file ):
## # load and dump info from file...
## shortcut.load( file )
## # now print data...
## print 'Shortcut in file %s to
## file:\n\t%s\nArguments:\n\t%s\nDescription:\n\t%s\nWorking
## Directory:\n\t%s'%(
## file,
## shortcut.GetPath(shell.SLGP_SHORTPATH)[0],
## shortcut.GetArguments(),
## shortcut.GetDescription(),
## shortcut.GetWorkingDirectory()
## )
## else:
## # create the shortcut using rest of args...
## data = map( None, sys.argv[2:], ("SetPath", "SetArguments",
## "SetDescription", "SetWorkingDirectory") )
## for value, function in data:
## if value and function:
## # call function on each non-null value
## getattr( shortcut, function)( value )
## shortcut.save( file )
## import win32com.client
## import winshell
##
## userDesktop = winshell.desktop()
## shell = win32com.client.Dispatch('WScript.Shell')
##
## shortcut = shell.CreateShortCut(userDesktop + '\\Zimbra Webmail.lnk')
## shortcut.Targetpath = r'C:\Program Files\Mozilla Firefox\firefox.exe'
## shortcut.Arguments = 'http://mysite.com/auth/preauth.php'
## shortcut.WorkingDirectory = r'C:\Program Files\Mozilla Firefox'
## shortcut.save()
## import _winreg
## ffkey = _winreg.OpenKey( _winreg.HKEY_LOCAL_MACHINE, 'SOFTWARE\\Mozilla\\Mozilla Firefox')
## ffver = _winreg.QueryValueEx( ffkey, 'CurrentVersion' )[0]
## print ffver
## ffmainkey = _winreg.OpenKey( ffkey, sub + "\\Main" )
## ffpath = _winreg.QueryValueEx( ffmainkey, 'PathToExe' )[0]
## _winreg.CloseKey( ffkey )
## _winreg.CloseKey( ffmainkey )
## import _winreg
## print _winreg.QueryValue(_winreg.HKEY_CLASSES_ROOT,
## 'FirefoxURL\shell\open\command')
## import win32com.client
## import winshell
##
## shortcut = shell.CreateShortCut(userDesktop + '\\MyShortcut.lnk')
## shortcut.TargetPath = r'Program Files\Mozilla Firefox\firefox.exe'
## shortcut.Arguments = r'http://mysite.com/auth/preauth.php'
## shortcut.WorkingDirectory = r'C:\Program Files\Mozilla Firefox'
## shortcut.save()
## import os
## import winshell
##
## winshell.CreateShortcut (
## Path=os.path.join (winshell.desktop (), "Zimbra Monkeys.lnk"),
## Target=r"c:\Program Files\Mozilla Firefox\firefox.exe",
## Arguments="http://mysite.com/auth/preauth.php",
## Description="Open http://localhost with Firefox",
## StartIn=r'C:\Program Files\Mozilla Firefox'
## )
# Scheduled Tasks.
## import pythoncom, win32api
## import time
## from win32com.taskscheduler import taskscheduler
##
## def create_daily_task(name, cmd, hour=None, minute=None):
## """creates a daily task"""
## cmd = cmd.split()
## ts = pythoncom.CoCreateInstance(taskscheduler.CLSID_CTaskScheduler,None,
## pythoncom.CLSCTX_INPROC_SERVER,
## taskscheduler.IID_ITaskScheduler)
##
## if '%s.job' % name not in ts.Enum():
## task = ts.NewWorkItem(name)
##
## task.SetApplicationName(cmd[0])
## task.SetParameters(' '.join(cmd[1:]))
## task.SetPriority(taskscheduler.REALTIME_PRIORITY_CLASS)
## task.SetFlags(taskscheduler.TASK_FLAG_RUN_ONLY_IF_LOGGED_ON)
## task.SetAccountInformation('', None)
## ts.AddWorkItem(name, task)
## run_time = time.localtime(time.time() + 300)
## tr_ind, tr = task.CreateTrigger()
## tt = tr.GetTrigger()
## tt.Flags = 0
## tt.BeginYear = int(time.strftime('%Y', run_time))
## tt.BeginMonth = int(time.strftime('%m', run_time))
## tt.BeginDay = int(time.strftime('%d', run_time))
## if minute is None:
## tt.StartMinute = int(time.strftime('%M', run_time))
## else:
## tt.StartMinute = minute
## if hour is None:
## tt.StartHour = int(time.strftime('%H', run_time))
## else:
## tt.StartHour = hour
## tt.TriggerType = int(taskscheduler.TASK_TIME_TRIGGER_DAILY)
## tr.SetTrigger(tt)
## pf = task.QueryInterface(pythoncom.IID_IPersistFile)
## pf.Save(None,1)
## task.Run()
## else:
## raise KeyError("%s already exists" % name)
##
## task = ts.Activate(name)
## exit_code, startup_error_code = task.GetExitCode()
## return win32api.FormatMessage(startup_error_code)
## Pywin32 comes with a module that lets you do this, win32com.taskscheduler.
## You can use PyITaskScheduler.SetTargetComputer to access tasks on remote
## machines.
except:
Windows = False
Platform = Windows
# Used in computing file state in converted wallpapers.
def file_checksum(path):
# Faster RCs possible.
from md5 import md5
BUFSIZE = 2 ** 14 # 16384
read = open(path).read
hash = md5()
try:
while True:
buf = read(BUFSIZE)
if buf == '':
break
hash.update(buf)
except EOFError:
pass
return hash.hexdigest()
| Python |
# Galactic Harmonic Module.
from mayan_cal import GALACTIC_CONSTANT, NUMERICAL_BASE, HIGHEST_TONE
from mayan_cal import gregarian_date, checkGregarianDate
from mayan_cal.structure import Tribe, Tone
def digit_order():
# Returns a list of digits sorted corresponding to the Tribe numbers.
tribes = Tribe.index.keys()
tribes = filter(lambda n:type(n) is int, tribes)
tribes.sort()
for t in tribes:
yield Tribe.index[t].digit
# Todo: Complete conversion algorithm.
# This conversion chart was generated by print_year_conversion().
year_conversion_chart = {1997: 257, 1949: 257, 2001: 257,
1902: 2, 1954: 2, 2006: 2,
1876: 132, 1928: 132, 1980: 132,
1907: 7, 1959: 7, 2011: 7,
1881: 137, 1933: 137, 1985: 137,
1860: 12, 1912: 12, 1964: 12,
1886: 142, 1938: 142, 1990: 142,
1865: 17, 1917: 17, 1969: 17,
1891: 147, 1943: 147, 1995: 147,
1870: 22, 1922: 22, 1974: 22,
1896: 152, 1948: 152, 2000: 152,
1875: 27, 1927: 27, 1979: 27,
1901: 157, 1953: 157, 2005: 157,
1880: 32, 1932: 32, 1984: 32,
1906: 162, 1958: 162, 2010: 162,
1885: 37, 1937: 37, 1989: 37,
1859: 167, 1911: 167, 1963: 167,
1890: 42, 1942: 42, 1994: 42,
1864: 172, 1916: 172, 1968: 172,
1895: 47, 1947: 47, 1999: 47,
1869: 177, 1921: 177, 1973: 177,
1900: 52, 1952: 52, 2004: 52,
1874: 182, 1926: 182, 1978: 182,
1905: 57, 1957: 57, 2009: 57,
1879: 187, 1931: 187, 1983: 187,
1858: 62, 1910: 62, 1962: 62,
1884: 192, 1936: 192, 1988: 192,
1863: 67, 1915: 67, 1967: 67,
1889: 197, 1941: 197, 1993: 197,
1868: 72, 1920: 72, 1972: 72,
1894: 202, 1946: 202, 1998: 202,
1873: 77, 1925: 77, 1977: 77,
1899: 207, 1951: 207, 2003: 207,
1878: 82, 1930: 82, 1982: 82,
1904: 212, 1956: 212, 2008: 212,
1883: 87, 1935: 87, 1987: 87,
1909: 217, 1961: 217, 2013: 217,
1888: 92, 1940: 92, 1992: 92,
1862: 222, 1914: 222, 1966: 222,
1893: 97, 1945: 97, 1997: 97,
1867: 227, 1919: 227, 1971: 227,
1898: 102, 1950: 102, 2002: 102,
1872: 232, 1924: 232, 1976: 232,
1903: 107, 1955: 107, 2007: 107,
1877: 237, 1929: 237, 1981: 237,
1908: 112, 1960: 112, 2012: 112,
1882: 242, 1934: 242, 1986: 242,
1861: 117, 1913: 117, 1965: 117,
1887: 247, 1939: 247, 1991: 247,
1866: 122, 1918: 122, 1970: 122,
1992: 252, 1944: 252, 1996: 252,
1871: 127, 1923: 127, 1975: 127}
conversion_to_year = {
# 2, 7 are wrapped around below.
# 3 -1
# 1 -2
# 0 -1
# 2 +2
# 4 +2
# 10
12: [1860, 1912, 1964],
17: [1865, 1917, 1969],
22: [1870, 1922, 1974],
27: [1875, 1927, 1979],
32: [1880, 1932, 1984],
37: [1885, 1937, 1989],
42: [1890, 1942, 1994],
47: [1895, 1947, 1999],
52: [1900, 1952, 2004],
57: [1905, 1957, 2009],
# 10
62: [1858, 1910, 1962],
67: [1863, 1915, 1967],
72: [1868, 1920, 1972],
77: [1873, 1925, 1977],
82: [1878, 1930, 1982],
87: [1883, 1935, 1987],
92: [1888, 1940, 1992],
97: [1893, 1945, 1997],
102: [1898, 1950, 2002],
107: [1903, 1955, 2007],
112: [1908, 1960, 2012],
# 10
117: [1861, 1913, 1965],
122: [1866, 1918, 1970],
127: [1871, 1923, 1975],
132: [1876, 1928, 1980],
137: [1881, 1933, 1985],
142: [1886, 1938, 1990],
147: [1891, 1943, 1995],
152: [1896, 1948, 2000],
157: [1901, 1953, 2005],
162: [1906, 1958, 2010],
# 11
167: [1859, 1911, 1963],
172: [1864, 1916, 1968],
177: [1869, 1921, 1973],
182: [1874, 1926, 1978],
187: [1879, 1931, 1983],
192: [1884, 1936, 1988],
197: [1889, 1941, 1993],
202: [1894, 1946, 1998],
207: [1899, 1951, 2003],
212: [1904, 1956, 2008],
217: [1909, 1961, 2013],
222: [1862, 1914, 1966],
227: [1867, 1919, 1971],
232: [1872, 1924, 1976],
237: [1877, 1929, 1981],
242: [1882, 1934, 1986],
247: [1887, 1939, 1991],
252: [1992, 1944, 1996],
257: [1997, 1949, 2001],
2: [1902, 1954, 2006],
7: [1907, 1959, 2011],
}
# !! Make sure important files aren't overwritten with this naming scheme !!
YEAR_CONVERSION_CHART_VARIABLE_NAME = 'year_conversion_chart'
YEAR_CONVERSION_CHART_FILENAME = 'mayan_cal/%s.py' % (YEAR_CONVERSION_CHART_VARIABLE_NAME)
def calculate_year_conversion():
chart = globals()[YEAR_CONVERSION_CHART_VARIABLE_NAME] = dict()
for (c, s) in conversion_to_year.iteritems():
for y in s:
chart[y] = c
calculate_year_conversion()
def print_year_conversion(stream = None, variable_name = YEAR_CONVERSION_CHART_VARIABLE_NAME):
# Initialize stream.
if stream is None:
stream = open(YEAR_CONVERSION_CHART_FILENAME, 'w')
# Create a tab constant.
tab = len(variable_name) + len(' = {')
tab = ' ' * tab
# Print variable assignment header.
w = stream.write
w(variable_name)
w(' = {')
items = conversion_to_year.items()
p = len(items)
pSub1 = p - 1
# Iterate items and years tracking indices and upper bounds.
for i in xrange(0, p):
c, s = items[i]
x = len(s)
xSub1 = x - 1
for y in xrange(0, x):
if y == 0 and i != 0:
w(tab)
# Write formatted entry.
w('%d: %3d' % (s[y], c))
# Write line/section terminators.
if i == pSub1:
w(y == xSub1 and '}\n' or ', ')
else:
w(y == xSub1 and ',\n' or ', ')
# There is no need to extrapolate for other gregarian months, so this
# will remain a static chart.
month_conversion_chart = {1: 0, 2: 31, 3: 59,
4: 90, 5: 120, 6: 151,
7: 181, 8: 212, 9: 243,
10: 13, 11: 44, 12: 74}
def get_year_conversion(year):
# Every 52 years, three years converge on the same conversion number.
# For decades > 1900, year - 1900 + 52 = conversion number.
assert year in year_conversion_chart, ValueError(year)
return year_conversion_chart[year]
def get_month_conversion(month):
assert month in month_conversion_chart, ValueError(month)
return month_conversion_chart[month]
def get_kin_number(day):
checkGregarianDate(day)
assert type(day) is gregarian_date, TypeError(type(day).__name__)
total = get_year_conversion(day.year) + get_month_conversion(day.month) + day.day
assert total >= 0, ValueError('Conversion algorithm yielded negative value! (%d)' % total)
if total < GALACTIC_CONSTANT:
return total
return total - GALACTIC_CONSTANT
def get_tribe_tone(day):
kin = get_kin_number(day)
tribe = Tribe.index[kin % NUMERICAL_BASE]
tone = Tone.index[(kin % HIGHEST_TONE) or HIGHEST_TONE]
return tribe, tone
def get_guiding_power(kin_number):
# The guiding power rotates with each mayan day from the seed
# for each tribe and then repeating. The day must be converted
# to a 260-day mayan calendar date first.
pass
class GuidingPowerFile(list):
# Converts a file to a list of Tribes representing their power.
def __init__(self, filename):
self.start_date = None
list.__init__(self, self.parse(filename))
def parse(self, filename):
gaps = []
for line in open(filename):
commentpos = line.find('#')
if commentpos >= 0:
line = line[:commentpos]
line = line.strip()
if not line:
continue
if line[0] == '[':
assert line[-1] == ']'
date = self.parse_datestring(line[1:-1])
# Detect gaps
# Determinate start_date
else:
tribe = self.power_index[line.lower()]
# validate
yield tribe
# verify against gaps
def parse_datestring(self, datestring):
pass
power_index = dict((tribe.power, tribe.number)
for tribe in Tribe.index.itervalues()
if isinstance(tribe, Tribe))
def analyze_guiding_powers(filename):
guiding_powers = GuidingPowerFile(filename)
return guiding_powers
| Python |
from django.conf.urls.defaults import *
urlpatterns = patterns('fbauthfacade.views',
(r'graph/(.*)', 'view_graph'),
)
| Python |
from django.utils import simplejson as json
from django.http import HttpResponseRedirect, HttpResponse
from fbauthfacade import AUTH_CODE, ACCESS_TOKEN
from urllib import urlencode, splitquery
from cgi import parse_qsl
def _get_request_param(request, name):
return request.GET[name]
def view_graph(request, path = None):
if path == 'oauth/access_token':
return view_oauth_access_token(request)
if path == 'oauth/authorize':
return view_oauth_authorize(request)
# Otherwise, handle normal graph object accesses.
return HttpResponse(json.dumps(_get_graph_data(path), indent = 1))
def view_oauth_authorize(request):
# client_id
## for (name, value) in request.GET.iteritems():
## print ' %s: %r' % (name, str(value)[:100])
redirect_uri = _get_request_param(request, 'redirect_uri')
url = rebuild_query(redirect_uri, dict(code = AUTH_CODE))
return HttpResponseRedirect(url)
def view_oauth_access_token(request):
# client_id
# client_secret
# redirect_uri
code = _get_request_param(request, 'code')
if code == AUTH_CODE:
response = dict(access_token = ACCESS_TOKEN)
return HttpResponse(urlencode(response))
def _get_graph_data(object):
return dict(name = object, id = 1234,
link = '/facebook/%s' % object)
def simple_parse_qs(qs):
# Evaluate only last value for each key-pair parameter.
r = dict()
for (name, value) in parse_qsl(qs):
r[name] = value
return r
def rebuild_query(original, args):
# Reduce original query string to dictionary, update with existing
# dictionary, then rebuild new path with query string.
(path, query) = splitquery(original)
if query is None:
query = args
else:
query = simple_parse_qs(query)
query.update(args)
return '%s?%s' % (path, urlencode(query))
| Python |
AUTH_CODE = 'the-auth-code'
ACCESS_TOKEN = 'abcdefgh'
| Python |
from html import *
class Site:
RegisterTemplate('site/base/page.html',
DOCTYPE() + HTML(HEAD(TITLE(Block('page_title')),
ExternalStylesheet('/style.css')),
BODY(DIV(Block('page_detail'),
id = 'container'))))
class Page:
Base = Child.Model('site/base/page.html')
Title = Block.Model('page_title')
Detail = Block.Model('page_detail')
class Article:
Title = Variable.Name.article.title
Content = Variable.Name.article.content
Content = TABLE.Model(TR.Model(TD.Model(Article.Title, className = 'caption'),
TD.Model(Article.Content, className = 'article')))
Prototype = Base.Model(Title.Model(Article.Title, ' | Site'),
Detail.Model(Content))
@classmethod
def View(self, request = None, **values):
return prettify(render(self.Prototype(), **values))
print Site.Page.View(article = dict(title = 'Article #1',
content = 'A bit of content.'))
| Python |
# In-Memory Template Repository & Loader.
from django.template import loader, TemplateDoesNotExist
_template_repository = dict()
# Registration.
def RegisterTemplate(name, tmpl):
if isinstance(tmpl, basestring):
tmpl = TemplateRepresentation(tmpl, name = name)
assert isinstance(tmpl, TemplateRepresentation)
_template_repository[name] = tmpl
# Querying.
def get_template_query(template_name, template_dirs = None):
result = _template_repository.get(template_name)
if result is not None:
yield result
def get_template_name(name, kind = 'template'):
return '%s:%s' % (kind, name)
class TemplateRepresentation(object):
def __init__(self, template, name = '', kind = ''):
self._template = template
self._name = name
self._kind = kind
def get_name(self):
return get_template_name(self._name, kind = self._kind)
def get_source(self):
return self._template
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.get_name())
def load_template_source(template_name, template_dirs = None):
if isinstance(template_name, TemplateRepresentation):
return template_name.get_source()
for result in get_template_query(template_name, template_dirs = template_dirs):
# Return first.
return (result.get_source(), result.get_name())
from django import template
raise template.TemplateDoesNotExist, template_name
load_template_source.is_usable = True
# Installation.
def install():
## from django.template import loader
## existing = loader.template_source_loaders or ()
## if load_template_source not in existing:
## existing += (load_template_source,)
##
## loader.template_source_loaders = existing
ensure_exists()
def ensure_exists():
# HACK: Make sure that our loader is part of the system loaders,
# otherwise another part of the site (besides turbine) initiated
# template subsystem. We force the reload.
#
try: from django.template import loader
except ImportError: pass
else:
try: ldrs = loader.template_source_loaders
except AttributeError: pass
else:
if ldrs is not None and load_template_source not in ldrs:
# Force reload:
loader.template_source_loaders = None
# What if we're not in TEMPLATE_LOADERS setting?
# Force mechanism to setup custom loaders per application,
# then insert our own.
try: loader.find_template('')
except TemplateDoesNotExist: pass
# We're pretty sure they're in a tuple by now.
loader.template_source_loaders += (load_template_source,)
| Python |
# Front End.
def render(templateString, **values):
from django.template import Template, Context
return Template(templateString).render(Context(values))
from xml.sax import make_parser
from xml.sax.handler import ContentHandler, ErrorHandler, property_lexical_handler
from xml.sax.xmlreader import InputSource
try: from cStringIO import StringIO
except ImportError: from StringIO import StringIO
def ParseString(source, handler):
parser = make_parser()
parser.setContentHandler(handler)
parser.setErrorHandler(handler)
parser.setProperty(property_lexical_handler, handler)
inpsrc = InputSource()
inpsrc.setByteStream(StringIO(source))
parser.parse(inpsrc)
TAB = ' '
class XMLPrettifier(object, ContentHandler, ErrorHandler):
# Lexical Handler
comment = startCDATA = endCDATA = endDTD = None
def startDTD(self, name, publicId, systemId):
args = (name, publicId, systemId)
self << '<!DOCTYPE %s>\n' % ' '.join(map(str, filter(None, args)))
# ContentHandler
def startElement(self, name, attrs):
self.startElementNS(name, '', attrs)
def endElement(self, name):
self.endElementNS(name, '')
def startElementNS(self, name, qname, attrs):
self.flushCharacters()
attrs = ' '.join('%s="%s"' % nv for nv in attrs.items())
self << '<%s%s%s>\n' % (name, attrs and ' ' or '', attrs)
self += 1
def endElementNS(self, name, qname):
self.flushCharacters()
self -= 1
self << '</%s>\n' % name
def characters(self, content):
self.addCharacters(content)
def processingInstruction(self, target, data):
self << '<?%s %s?>\n' % (target, data)
# ErrorHandler:
def handleError(self, exception):
linenr = exception.getLineNumber()
column = exception.getColumnNumber()
lines = self.source.split('\n')
line = lines[linenr - 1]
print 'Fatal error parsing line %d, column %d:\n%s\n%s^' % \
(linenr, column, line, ' ' * (column - 1))
error = warning = fatalError = handleError
def __new__(self, *args, **kwd):
o = object.__new__(self)
o.__init__(*args, **kwd)
return str(o)
def __init__(self, source, destbuf = None, tab = TAB):
self.source = source
if destbuf is None:
destbuf = StringIO()
self.destbuf = destbuf
self.contentbuf = ''
self.indent = 0
self.tab = tab
ParseString(source, self)
@property
def indenttab(self):
try: return self._indenttab
except AttributeError:
t = self._indenttab = self.tab * self.indent
return t
def resetindenttab(self):
try: del self._indenttab
except AttributeError: pass
def __iadd__(self, value):
self.resetindenttab()
self.indent += int(value)
return self
def __isub__(self, value):
self.resetindenttab()
self.indent -= int(value)
return self
def __lshift__(self, value):
self.destbuf.write(self.indenttab)
self.destbuf.write(value)
return self
def addCharacters(self, content):
self.contentbuf += content
def flushCharacters(self):
content = self.contentbuf
self.contentbuf = ''
# Normalize whitespace.
for line in content.split('\n'):
if line:
self << ' '.join(line.split())
self.destbuf.write('\n')
def __str__(self):
return self.destbuf.getvalue()
def prettify(html):
return XMLPrettifier(html)
| Python |
# Django Template and HTML Generation.
# --
# To know my deed: tis better to not know myself.
# Liver of blaspheming Jew.
# Thine linen bedsheets of countenance do fear!
# Your royal preparation makes us hear something.
#
from pdb import runcall
Absent = object()
def traceOff(function):
# Do not trace!
function.__trace_off__ = True
return function
# Base Generator Routines & Types.
from types import GeneratorType as generator
def toTemplateString(result):
if type(result) in (list, tuple, generator):
# Recursive flattening and concatenation.
return ''.join(toTemplateString(el) for el in result)
return str(result)
class Model:
def __init__(self, type, *args, **kwd):
self.type = type
self.args = args
self.kwd = kwd
@staticmethod
def MergeDictionary(d1, d2):
d3 = d1.copy()
d3.update(d2)
return d3
def __call__(self, *args, **kwd):
args = self.args + args
kwd = self.MergeDictionary(self.kwd, kwd)
return toTemplateString(self.type(*args, **kwd))
__str__ = __call__
def Model(self, *args, **kwd):
return self.__class__(self, *args, **kwd)
def toQuotedString(s):
return '"%s"' % repr('"' + s)[2:-1]
def Element(element):
# Decorator.
def wrapping(*args, **kwd):
return toTemplateString(element(*args, **kwd))
def thisModel(*args, **kwd):
return Model(element, *args, **kwd)
wrapping.Model = thisModel
return wrapping
def Parameters(*args):
return args
@Element
def Tag(name, *parameters):
params = ' '.join(map(str, parameters))
yield '{%% %s%s%s %%}' % (name, params and ' ' or '', params)
@Element
def ClosedTag(name, parameters, *content, **kwd):
yield Tag(name, *parameters)
yield content
closing = kwd.pop('closing')
if closing is True:
yield Tag('end' + name)
elif callable(closing):
yield Tag(str(closing(name)))
elif isinstance(closing, basestring):
yield Tag(closing)
@Element
def Child(parent, *content):
yield Tag('extends', *Parameters(toQuotedString(parent)))
yield content
@Element
def Block(name, *content):
yield ClosedTag('block', Parameters(name), *content,
**dict(closing = 'endblock ' + name))
@Element
def Variable(name, *filters):
yield '{{ ' + name
for f in filters:
yield '|'
yield f
yield ' }}'
class VariableGenerator(object):
def __init__(self, value, *filters):
self._value = value
self._filters = filters
def __getattr__(self, name):
if name.startswith('_'):
return object.__getattr__(self, name)
value = self._value
return self.__class__('%s%s%s' % (value and value or '', value and '.' or '', name),
*self._filters)
def __add__(self, f):
self._filters += f
return self
def __str__(self):
return Variable(self._value, *self._filters)
Variable.Name = VariableGenerator('')
del VariableGenerator
@Element
def Filter(name, parameter = Absent):
if parameter is Absent:
return str(name)
return '%s:%s' % (name, toQuotedString(parameter))
class HTMLElement:
def __init__(self, name, *args, **kwd):
self.name = name
self.self_closing = kwd.pop('self_closing', False) or False
self.Model = Model(self, *args, **kwd)
@classmethod
def TranslateAttributes(self, attributes):
for (name, value) in attributes.iteritems():
yield (self.TRANSLATIONS.get(name, name), value)
TRANSLATIONS = dict(className = 'class')
@Element
def __call__(self, *content, **attributes):
attributes = ' '.join('%s="%s"' % (name, value) for (name, value) in self.TranslateAttributes(attributes))
if self.self_closing:
assert not content
yield '<%s%s%s />' % (self.name, attributes and ' ' or '', attributes)
else:
yield '<%s%s%s>' % (self.name, attributes and ' ' or '', attributes)
yield content
yield '</%s>' % self.name
_registered_doctypes = {'': 'HTML'}
def DOCTYPE(name = ''):
return '<!DOCTYPE %s>' % _registered_doctypes.get(name, name)
import template_repository
template_repository.install()
from lib import *
from rendering import *
from template_repository import RegisterTemplate
| Python |
# HTML Element Construction Library.
from html import HTMLElement
vars().update(dict((_, HTMLElement(_)) for _ in
['HTML', 'HEAD', 'TITLE',
'SCRIPT', 'LINK', 'META',
'BODY', 'DIV', 'P', 'SPAN', 'HR', 'BR',
'H1', 'H2', 'H3', 'H4', 'H5', 'H6',
'TABLE', 'TR', 'TD']))
def ExternalStylesheet(href, media = None):
values = dict(href = href, rel = 'stylesheet', type = 'text/css')
if media is not None:
values['media'] = media
return LINK(**values)
# Django Template Filters & Tag Construction Library.
| Python |
#!python
# Parsing.
import re
index_pattern = re.compile(r'(?:(?P<vnum>\d+)\.zon)|\$$')
def parseindex(index_name):
## return [int(i) for i in
## map(lambda n:index_pattern.match(n).groupdict()['vnum'],
## open(index_name).readlines()) \
## if type(i) is str and i.isdigit()]
# Something more verbose, to debug:
index = []
for line in open(index_name).readlines():
match = index_pattern.match(line)
if match is not None:
vnum = match.groupdict()['vnum']
if type(vnum) is str and vnum.isdigit():
index.append(int(vnum))
return index
def segment(array, nr):
nr = int(nr)
ln = len(array)
step = ln / nr
assert step
## print 'ln: %s, nr: %s, step: %s' % (ln, nr, step)
## print array
x = 0
while (ln - x) >= step:
## print 'x: %s, x+step: %s: ln-x: %s, array[x:x+step]: %s' % \
## (x, x+step, ln-x, array[x:x+step])
yield array[x:x+step]
x += step
yield array[x:]
def nr_segments(total_size, max_size, allowance):
return total_size / ((1 - float(allowance)) * max_size)
# Build.
def writeIndex(filename, index):
fl = open(filename, 'w')
for vnum in index:
fl.write('%d.zon\n' % vnum)
fl.write('$\n')
fl.flush()
fl.close()
from os.path import dirname, basename, join as joinpath, isdir, isfile
from os import mkdir, system
from tempfile import gettempdir
def getDirectory(base, *parts):
if not isdir(base) and base:
mkdir(base)
for d in parts:
base = joinpath(base, d)
if not isdir(base):
mkdir(base)
return base
def getBuildSegmentBase(build_name, nr):
return getDirectory(gettempdir(), build_name, '%04d' % nr)
## def getZoneSpan(base, vnum):
## # Todo: build this once!
## from stuphlib.wldlib import WorldLoader
## from stuphlib.dblib import LocalFileIndex
## loader = WorldLoader(LocalFileIndex(base))
##
## zone = loader.createZone(vnum)
## return zone.span
def copy_file(src, dest):
if isfile(src):
system('/bin/cp "%s" "%s"' % (src, dest))
def remove_directory(path):
system('/bin/rm -Rf "%s"' % path)
def zip_archive(name, source):
print 'zip_archive: %r -> %r' % (source, name)
# Zip up contents without containing segmentation folder.
system('cd "%s"; zip -c "%s" .' % (source, name))
# system('zip -c "%s" "%s"' % (name, source))
FIXES = ('zon', 'wld', 'obj', 'mob', 'shp', 'sch')
def prepare_segment((build_name, world_base, index_name, archive_base), (segment, nr)):
#
# This builds a temporary directory and then zips it to file.
# Todo: compile zip archive directly without intermediate fs.
#
try:
# Rebuild a world index segment.
build_base = getBuildSegmentBase(build_name, nr)
print 'build_base: %r' % build_base
# Build parts containers.
parts = {}
for fix in FIXES:
parts[fix] = getDirectory(build_base, fix)
# Build zon index.
writeIndex(joinpath(parts['zon'], index_name), segment)
# This is not true: Spanned contents go into one file!
## # Must load the zone record to discover zone span!
## span = getZoneSpan(world_base, vnum)
# Build parts.
for fix in FIXES:
for vnum in segment:
this = '%d.%s' % (vnum, fix)
copy_file(joinpath(world_base, fix, this),
joinpath(parts[fix], this))
# Zip Archive.
zip_archive(joinpath(archive_base, '%04d.zip' % nr),
build_base)
finally:
# Remove temporary build directories.
remove_directory(build_base)
def prepare_segmented_index(index_file, build_name,
(total_size, max_size, allowance)):
world_base = dirname(dirname(index_file))
index_name = basename(index_file)
archive_base = getDirectory(world_base, build_name)
total = nr_segments(total_size, max_size, allowance)
nr = 1
for seg in segment(parseindex(index_file), total):
prepare_segment((build_name, world_base, index_name, archive_base),
(seg, nr))
nr += 1
# Test -- Calculate index segments where each (average) size
# will fit within maximum allowed size, with room to spare.
TOTAL_SIZE = 4676642
MAX_SIZE = (1 << 20)
ALLOWANCE = 0.25
# INDEX_FILE = r'H:/StuphMUD/lib/world/zon/index'
BUILD_NAME = 'stuphworld-segments'
BUILD_PARTS = [BUILD_NAME] # 'only'
def parse_cmdln(argv):
from optparse import OptionParser
parser = OptionParser()
return parser.parse_args(argv)
def main(argv = None):
(options, args) = parse_cmdln(argv)
index_file = args[0]
this_build = '-'.join(BUILD_PARTS)
prepare_segmented_index(index_file, this_build,
(TOTAL_SIZE, MAX_SIZE, ALLOWANCE))
if __name__ == '__main__':
main()
| Python |
#!python
# Parsing.
import re
index_pattern = re.compile(r'(?:(?P<vnum>\d+)\.zon)|\$$')
def parseindex(index_name):
## return [int(i) for i in
## map(lambda n:index_pattern.match(n).groupdict()['vnum'],
## open(index_name).readlines()) \
## if type(i) is str and i.isdigit()]
# Something more verbose, to debug:
index = []
for line in open(index_name).readlines():
match = index_pattern.match(line)
if match is not None:
vnum = match.groupdict()['vnum']
if type(vnum) is str and vnum.isdigit():
index.append(int(vnum))
return index
def segment(array, nr):
nr = int(nr)
ln = len(array)
step = ln / nr
assert step
## print 'ln: %s, nr: %s, step: %s' % (ln, nr, step)
## print array
x = 0
while (ln - x) >= step:
## print 'x: %s, x+step: %s: ln-x: %s, array[x:x+step]: %s' % \
## (x, x+step, ln-x, array[x:x+step])
yield array[x:x+step]
x += step
yield array[x:]
def nr_segments(total_size, max_size, allowance):
return total_size / ((1 - float(allowance)) * max_size)
# Build.
def writeIndex(filename, index):
fl = open(filename, 'w')
for vnum in index:
fl.write('%d.zon\n' % vnum)
fl.write('$\n')
fl.flush()
fl.close()
from os.path import dirname, basename, join as joinpath, isdir, isfile
from os import mkdir, system
from tempfile import gettempdir
def getDirectory(base, *parts):
if not isdir(base) and base:
mkdir(base)
for d in parts:
base = joinpath(base, d)
if not isdir(base):
mkdir(base)
return base
def getBuildSegmentBase(build_name, nr):
return getDirectory(gettempdir(), build_name, '%04d' % nr)
## def getZoneSpan(base, vnum):
## # Todo: build this once!
## from stuphlib.wldlib import WorldLoader
## from stuphlib.dblib import LocalFileIndex
## loader = WorldLoader(LocalFileIndex(base))
##
## zone = loader.createZone(vnum)
## return zone.span
def copy_file(src, dest):
if isfile(src):
system('/bin/cp "%s" "%s"' % (src, dest))
def remove_directory(path):
system('/bin/rm -Rf "%s"' % path)
def zip_archive(name, source):
print 'zip_archive: %r -> %r' % (source, name)
# Zip up contents without containing segmentation folder.
system('cd "%s"; zip -c "%s" .' % (source, name))
# system('zip -c "%s" "%s"' % (name, source))
FIXES = ('zon', 'wld', 'obj', 'mob', 'shp', 'sch')
def prepare_segment((build_name, world_base, index_name, archive_base), (segment, nr)):
#
# This builds a temporary directory and then zips it to file.
# Todo: compile zip archive directly without intermediate fs.
#
try:
# Rebuild a world index segment.
build_base = getBuildSegmentBase(build_name, nr)
print 'build_base: %r' % build_base
# Build parts containers.
parts = {}
for fix in FIXES:
parts[fix] = getDirectory(build_base, fix)
# Build zon index.
writeIndex(joinpath(parts['zon'], index_name), segment)
# This is not true: Spanned contents go into one file!
## # Must load the zone record to discover zone span!
## span = getZoneSpan(world_base, vnum)
# Build parts.
for fix in FIXES:
for vnum in segment:
this = '%d.%s' % (vnum, fix)
copy_file(joinpath(world_base, fix, this),
joinpath(parts[fix], this))
# Zip Archive.
zip_archive(joinpath(archive_base, '%04d.zip' % nr),
build_base)
finally:
# Remove temporary build directories.
remove_directory(build_base)
def prepare_segmented_index(index_file, build_name,
(total_size, max_size, allowance)):
world_base = dirname(dirname(index_file))
index_name = basename(index_file)
archive_base = getDirectory(world_base, build_name)
total = nr_segments(total_size, max_size, allowance)
nr = 1
for seg in segment(parseindex(index_file), total):
prepare_segment((build_name, world_base, index_name, archive_base),
(seg, nr))
nr += 1
# Test -- Calculate index segments where each (average) size
# will fit within maximum allowed size, with room to spare.
TOTAL_SIZE = 4676642
MAX_SIZE = (1 << 20)
ALLOWANCE = 0.25
# INDEX_FILE = r'H:/StuphMUD/lib/world/zon/index'
BUILD_NAME = 'stuphworld-segments'
BUILD_PARTS = [BUILD_NAME] # 'only'
def parse_cmdln(argv):
from optparse import OptionParser
parser = OptionParser()
return parser.parse_args(argv)
def main(argv = None):
(options, args) = parse_cmdln(argv)
index_file = args[0]
this_build = '-'.join(BUILD_PARTS)
prepare_segmented_index(index_file, this_build,
(TOTAL_SIZE, MAX_SIZE, ALLOWANCE))
if __name__ == '__main__':
main()
| Python |
from xmlrpclib import ServerProxy, Fault
from optparse import OptionParser
from config import ServerConfiguration
from config import create_download_request
class ClientProxy:
# Network Model.
def __init__(self, address, port = None):
self.address = address
self.port = port
def getProxyUrl(self):
portstr = '' if self.port is None else ':%d' % int(self.port)
return 'http://%s%s' % (self.address, portstr)
def getProxy(self):
return ServerProxy(self.getProxyUrl())
def invoke(self, action, *args):
rpc = getattr(self.getProxy(), action)
# debug()
return rpc(*args)
class DownloadControllerClient(ClientProxy):
# Validate Specific Actions -- Interpret Success/Result Results.
def initiate(self, url, name, **values):
req = create_download_request(url, name, **values)
return self.invoke('download.initiate', req)
def cancel(self, name, complete = None, delete = None):
return self.invoke('download.cancel', name, complete, delete)
def pause(self, name):
return self.invoke('download.pause', name)
def resume(self, name):
return self.invoke('download.resume', name)
def complete(self, name, delete = None):
return self.invoke('download.complete', name)
def status(self, name):
return self.invoke('download.status', name)
def query(self, criteria):
return self.invoke('download.query', criteria)
def restart(self, name):
return self.invoke('download.restart', name)
def server_status(self):
return self.invoke('server.status')
def server_start(self):
return self.invoke('server.start')
def server_stop(self):
return self.invoke('server.stop')
def server_shutdown(self):
return self.invoke('server.shutdown')
# Utilizes Controller Client Internally.
DEFAULT_CLIENT_PROGRAM_NAME = 'downmgr-client'
class CommandClient:
# Configure and invoke commands.
class UsageError(Exception):
def __init__(self, message = None, client = None):
self._message = message
self.client = client
def getUsage(self):
if self._message:
return self._message
return self.client.getUsage()
def usageError(self, message):
raise self.UsageError(message, client = self)
def __init__(self, config, controller, prog = None):
self.prog = prog or DEFAULT_CLIENT_PROGRAM_NAME
self.controller = controller
def getParser(self):
try: return self.parser
except AttributeError:
parser = OptionParser(prog = self.prog)
self.setupCmdlnParser(parser)
self.parser = parser
return parser
def parseCmdln(self, args):
argv = list(args)
return self.getParser().parse_args(argv)
@classmethod
def getUsage(self):
# Todo: integrate command-parser.
return (self.__doc__)
def setupCmdlnParser(self, parser):
parser.add_option('-C', '--config-file')
parser.add_option('-p', '--port')
parser.add_option('--remote-address')
for (opt_args, opt_kwd) in self.PARSER_OPTIONS:
parser.add_option(*opt_args, **opt_kwd)
def doCommand(self, args):
(options, args) = self.parseCmdln(args)
return self.doCommandWithArgs(options, args)
# Override these:
PARSER_OPTIONS = []
def doCommandWithArgs(self, options, args):
pass
def Option(*args, **kwd):
return (args, kwd)
class SimpleDownloadActionClient(CommandClient):
# Simple Download Control.
PARSER_OPTIONS = [Option('--name' ),
Option('--idnum', type = int)]
def getName(self, options, args):
name = options.idnum
if name is None:
name = options.name
if name is None:
try: name = args[0]
except IndexError:
self.usageError('Expected download <name-or-id>!')
return name
def doCommandWithArgs(self, options, args):
name = self.getName(options, args)
action = getattr(self.controller, self.ACTION)
action(name)
class SimpleServerActionClient(CommandClient):
def doCommandWithArgs(self, options, args):
action = getattr(self.controller, self.ACTION)
self.displayResponse(action())
def displayResponse(self, response):
if response is not None:
print '%s Response: %r' % (self.__class__.__name__, response)
# Command Implementations.
# Todo: Document.
class Initiate(CommandClient):
'Start a download by building a description.'
PARSER_OPTIONS = [Option('--name' ),
Option('--url' ),
Option('--group' ),
Option('--filename'),
Option('--folder' ),
Option('--order' )]
def doCommandWithArgs(self, options, args):
try: url = options.url or args[0]
except IndexError:
return self.usageError('Expected <url> argument!')
self.controller.initiate(url, options.name,
order = options.order,
group = options.group,
filename = options.filename,
folder = options.folder)
class Query(CommandClient):
QUERYABLE = ['name', 'url', 'group', 'filename', 'folder', 'order', 'state']
PARSER_OPTIONS = [Option('--%s' % q) for q in QUERYABLE]
def doCommandWithArgs(self, options, args):
criteria = [(k, getattr(options, k)) for k in self.QUERYABLE]
criteria = dict((k, v) for (k, v) in criteria if v is not None)
self.displayResponse(self.controller.query(criteria))
def displayResponse(self, downloads):
print '%s Matches:' % self.__class__.__name__
for idnum in downloads:
print ' #[%4d]' % idnum
class Cancel(SimpleDownloadActionClient):
PARSER_OPTIONS = list(SimpleDownloadActionClient.PARSER_OPTIONS)
PARSER_OPTIONS.append(Option('--complete', action = 'store_true'))
PARSER_OPTIONS.append(Option('--delete' , action = 'store_true'))
def doCommandWithArgs(self, options, args):
name = self.getName()
self.controller.cancel(name, options.complete, options.delete)
class Pause(SimpleDownloadActionClient):
ACTION = 'pause'
class Resume(SimpleDownloadActionClient):
ACTION = 'resume'
class Status(SimpleDownloadActionClient):
ACTION = 'status'
class Complete(SimpleDownloadActionClient):
PARSER_OPTIONS = list(SimpleDownloadActionClient.PARSER_OPTIONS)
PARSER_OPTIONS.append(Option('--delete' , action = 'store_true'))
def doCommandWithArgs(self, options, args):
name = self.getName()
self.controller.complete(name, options.delete)
class Restart(SimpleDownloadActionClient):
ACTION = 'restart'
class ServerStatus(SimpleServerActionClient):
ACTION = 'server_status'
class ServerStart(SimpleServerActionClient):
ACTION = 'server_start'
class ServerShutdown(SimpleServerActionClient):
ACTION = 'server_shutdown'
class ServerStop(SimpleServerActionClient):
ACTION = 'server_stop'
class ServerConsole(CommandClient):
def doCommandWithArgs(self, options, args):
import readline
from sys import stdout
output = stdout.write
proxy = self.controller.getProxy()
evaluate = proxy.debug.evaluate
while True:
try: syntax = raw_input(' +> ')
except EOFError:
print
break
try: output(evaluate(syntax))
except Fault, e:
output(e.faultString)
# Command Registration.
CLIENT_COMMANDS = dict(initiate = Initiate, cancel = Cancel ,
pause = Pause , resume = Resume ,
restart = Restart , complete = Complete,
status = Status , query = Query )
CLIENT_COMMANDS['server-status' ] = ServerStatus
CLIENT_COMMANDS['server-start' ] = ServerStart
CLIENT_COMMANDS['server-shutdown'] = ServerShutdown
CLIENT_COMMANDS['server-stop' ] = ServerStop
CLIENT_COMMANDS['debug-console' ] = ServerConsole
def getCommandUsage():
def _getCommandUsage(command, clientClass):
command = clientClass.__name__
usage = clientClass.getUsage() or ''
usage = usage.split('\n')
usage = filter(None, usage)
if usage:
TAB = '\n '
usage = '%s%s\n' % (TAB, TAB.join(usage))
return ' %s:%s' % (command, usage)
return ' %s' % command
commands = '\n'.join(_getCommandUsage(name, cls) for (name, cls) \
in CLIENT_COMMANDS.iteritems())
return ('Client Commands:\n' + commands)
def doClientCommand(*argv):
# Front End.
# XXX: further usage merging with server.
if len(argv) > 1 and not argv[1].startswith('--'):
argv = list(argv)
clientFactory = CLIENT_COMMANDS.get(argv[1])
if clientFactory:
config = ServerConfiguration()
ctlr = DownloadControllerClient(config.address, config.port)
client = clientFactory(config, ctlr, prog = argv[0])
try: client.doCommand(argv[2:])
except CommandClient.UsageError, e:
print e.getUsage()
except Fault, e:
print e.faultString
else: # help
print getCommandUsage()
return True
| Python |
#!python
from server import doRunServer
from client import doClientCommand
def main(argv = None):
if argv is None:
from sys import argv
# Analyze Server or Client Startup Mode.
if not doClientCommand(*argv):
doRunServer(*argv)
if __name__ == '__main__':
main()
| Python |
from SimpleXMLRPCServer import SimpleXMLRPCServer, SimpleXMLRPCRequestHandler
from ConfigParser import ConfigParser, DEFAULTSECT, NoOptionError
from cStringIO import StringIO
from time import time as now
from optparse import OptionParser
from xmlrpclib import Fault, loads as loads_xmlrpc, dumps as dumps_xmlrpc
from config import ServerConfiguration
from config import create_random_name
from config import parse_safe_string
from config import reraise
from config import debug
from platform import getNativePlatform
NativePlatform = getNativePlatform()
class Download:
# Todo: group order.
class Group:
pass
class Request:
@classmethod
def parse(self, descr):
# Multi-request.
cfg = ConfigParser()
cfg.readfp(StringIO(descr))
for section in cfg.sections():
values = dict((k, cfg.get(section, k)) for \
k in cfg.options(section))
# Pre-Validation.
if 'name' not in values:
# XXX Is this right?
if section is not DEFAULTSECT:
values['name'] = section
assert 'url' in values
yield self(**values)
def __init__(self, **values):
keys = self.__keys = []
for (k, v) in values.iteritems():
if not k.startswith('__'):
keys.append(k)
setattr(self, k, v)
# Post-Validation.
if 'filename' not in keys:
self.filename = None
def __repr__(self):
values = ', '.join('%s: %r' % (k, getattr(self, k)) \
for k in self.__keys)
return '%s {%s}' % (self.__class__.__name__, values)
class Selector:
class Criterium:
def match(self, value):
pass
def __init__(self, criteria):
# Todo: filter/translate only allowed query keys.
self.criteria = criteria
def __call__(self, downloads):
for dnl in downloads:
if self.match(dnl):
yield dnl
def match(self, dnl):
if self.criteria:
for (k, c) in self.criteria.iteritems():
try: v = getattr(dnl, k)
except AttributeError:
return False
if issubclass(c.__class__, self.Criterium):
if not c.match(v):
return False
elif v != c:
return False
return True
DOWNLOAD_CANCELLED = -1
DOWNLOAD_FINISHED = 0
DOWNLOAD_INITIALIZED = 1
DOWNLOAD_PAUSED = 2
DOWNLOAD_ACTIVE = 3
_state_names = {DOWNLOAD_CANCELLED : 'Cancelled',
DOWNLOAD_FINISHED : 'Finished',
DOWNLOAD_INITIALIZED : 'Initialized',
DOWNLOAD_PAUSED : 'Paused',
DOWNLOAD_ACTIVE : 'Active'}
def __init__(self, manager, req):
self.manager = manager
self.req = req
# Encapsulate the request, but activate settings:
self.idnum = manager.create_download_id()
self.name = req.name or manager.create_download_name()
# Start method is separate from construction (for object serialization).
self.state = self.DOWNLOAD_INITIALIZED
self.pid = None
self.start_time = now()
def download_args(self):
args = dict(url = self.req.url)
if self.req.filename:
args['filename'] = self.req.filename
return args
def start(self):
if self.state in [self.DOWNLOAD_INITIALIZED]:
self.pid = self.manager.spawn_download(self.download_args())
self.state = self.DOWNLOAD_ACTIVE
def cancel(self, complete = False, delete = False):
# Abort.
if self.state != self.DOWNLOAD_FINISHED:
if self.state in [self.DOWNLOAD_ACTIVE, self.DOWNLOAD_PAUSED]:
self.manager.kill_download(self.pid)
self.state = self.DOWNLOAD_CANCELLED
if complete:
self.complete(delete = delete)
def pause(self):
if self.state == self.DOWNLOAD_ACTIVE:
self.manager.pause_download(self.pid)
self.state = self.DOWNLOAD_PAUSED
def resume(self):
if self.state == self.DOWNLOAD_PAUSED:
self.manager.resume_download(self.pid)
self.state = self.DOWNLOAD_ACTIVE
def reap(self, status):
self.state = self.DOWNLOAD_FINISHED
def complete(self, delete = False):
# Todo: process delete flag.
if self.state not in [self.DOWNLOAD_FINISHED, self.DOWNLOAD_CANCELLED]:
# State-Error.
raise ValueError(self.state)
self.manager.remove_download(self)
# Description.
def get_status(self):
return dict(state = self._state_names[self.state],
duration = now() - self.start_time)
def __repr__(self):
return '%s (%r)' % (self.__class__.__name__, self.req)
class EventSink:
class Dispatch:
def __init__(self, manager, *parts):
self.manager = manager
self.name = self.buildEventName(parts)
def __getattr__(self, name):
return self.__class__(self.manager, self.name, name)
def __call__(self, *args, **kwd):
try: fire = self.manager.fireEvent
except AttributeError: pass
else: return fire(self.name, *args, **kwd)
@classmethod
def buildEventName(self, parts):
parts = filter(None, parts)
# capitalize = lambda s: s[0].upper() + s[1:]
# return ''.join(capitalize(p) for p in parts)
return parts and '_'.join(parts) or ''
def __init__(self, manager = None):
self.event = self.Dispatch(manager or self)
class PluginManager(EventSink):
# Todo: Configuration options.
def __init__(self, config):
EventSink.__init__(self)
self.plugins = []
# Invocation Framework.
class Outcome(Exception):
def __init__(self, outcome):
self.outcome = outcome
def getOutcome(self):
return self.outcome
def fireEvent(self, name, *args, **kwd):
for p in self.plugins:
try: p.handleEvent(self, name, *args, **kwd)
except self.Outcome, o:
return o.getOutcome()
@classmethod
def eventOutcome(self, *args, **kwd):
raise self.Outcome(*args, **kwd)
# Installation Framework.
def installPlugin(self, plugin):
# Todo: Delegate into plugins??
if plugin not in self.plugins:
self.plugins.append(plugin)
plugin.install(self)
def uninstallPlugin(self, plugin):
if plugin in self.plugins:
self.plugins.remove(plugin)
plugin.uninstall(self)
def reloadPlugin(self, plugin):
pass
class Plugin:
'''
class Custom(PluginManager.Plugin):
def parse_request(self, descr):
return self.eventOutcome(True)
def create_download(self, req):
return self.eventOutcome(True)
def download_created(self, dnl):
return self.eventOutcome(True)
def start_download(self, dnl):
return self.eventOutcome(True)
def resume_download(self, dnl):
return self.eventOutcome(True)
def download_status(self, dnl):
return self.eventOutcome(True)
'''
def handleEvent(self, manager, name, *args, **kwd):
action = getattr(self, name, None)
if callable(action):
return action(manager, *args, **kwd)
def eventOutcome(self, *args, **kwd):
return PluginManager.eventOutcome(*args, **kwd)
def install(self, manager):
return True
def uninstall(self, manager):
return True
# RPC Methods.
def install(self):
pass
def uninstall(self):
pass
def reload(self):
pass
class DownloadManager(NativePlatform, PluginManager):
# Todo: Download persistance.
# Todo: Permissive Capabilities.
# Todo: SIGCHLD reaper.
NAME_LENGTH = 32
NAME_CHARACTERS = 'abcdef0123456789'
def __init__(self, config):
PluginManager.__init__(self, config)
NativePlatform.__init__(self, config)
self.downloadsByName = {}
self.downloadsByIdnum = {}
self.downloadsByPID = {}
@property
def downloads(self):
return self.downloadsByIdnum.itervalues()
def create_download_id(self):
downloads = [dnl.idnum for dnl in self.downloads]
return max(downloads) + 1 if downloads else 0
def create_download_name(self):
while True:
name = create_random_name(self.NAME_LENGTH, self.NAME_CHARACTERS)
if name not in self.downloadsByName:
return name
def get_download(self, name):
if type(name) is int:
return self.downloadsByIdnum[name]
if type(name) is str:
return self.downloadsByName[name]
def remove_download(self, dnl):
del self.downloadsByIdnum[dnl.idnum]
del self.downloadsByName[dnl.name]
del self.downloadsByPID[dnl.pid]
def reap(self, signr, stack_frame):
# If this fails, it was never meant to be.
for (pid, status) in self.cleanupChildProcesses():
dnl = self.downloadsByPID.get(pid)
if dnl is not None:
dnl.reap(status)
self.event.server.reap.process(dnl, status, stack_frame)
# XMLRPC Methods.
def download(self, descr):
request_list = self.event.parse.request(descr)
if request_list is None:
request_list = Download.Request.parse(descr)
request_list = list(request_list)
rql = self.event.prepare.request(request_list)
if rql is not None:
request_list = rql
ids = []
for req in request_list:
if req.name in self.downloadsByName:
raise NameError(req.name)
dnl = self.event.create.download(req)
if dnl is None:
dnl = Download(self, req)
self.downloadsByName[dnl.name] = dnl
self.downloadsByIdnum[dnl.idnum] = dnl
if not self.event.start.download(dnl):
dnl.start()
self.downloadsByPID[dnl.pid] = dnl
self.event.download.created(dnl)
ids.append(dnl.idnum)
return ids
def cancel(self, name, complete = None, delete = None):
dnl = self.get_download(name)
if not self.event.cancel.download(dnl):
dnl.cancel(complete = complete, delete = delete)
def pause(self, name):
dnl = self.get_download(name)
if not self.event.pause.download(dnl):
dnl.pause()
def resume(self, name):
dnl = self.get_download(name)
if not self.event.resume.download(dnl):
dnl.pause()
def status(self, name):
dnl = self.get_download(name)
return self.event.download.status(dnl) or dnl.get_status()
def complete(self, name, delete = False):
# Finishes the download by removing it from manager.
dnl = self.get_download(name)
self.event.download.complete(dnl)
if dnl.complete(delete = delete):
self.remove_download(dnl)
def query(self, criteria):
# Return download ids matching criteria.
select = Download.Selector(criteria)
return [dnl.idnum for dnl in select(self.downloads)]
def restart(self, name):
pass
class ServerControl(EventSink):
SERVER_CLOSED = 0
SERVER_CLOSING = 2
SERVER_STOPPED = 1
SERVER_STOPPING = 3
SERVER_RUNNING = 4
_state_names = {SERVER_CLOSED : 'Closed' ,
SERVER_CLOSING : 'Closing' ,
SERVER_STOPPED : 'Stopped' ,
SERVER_STOPPING : 'Stopping',
SERVER_RUNNING : 'Running' }
_state_codes = {'Closed' : SERVER_CLOSED ,
'Closing' : SERVER_CLOSING ,
'Stopped' : SERVER_STOPPED ,
'Stopping' : SERVER_STOPPING,
'Running ' : SERVER_RUNNING }
def __init__(self):
EventSink.__init__(self, self)
def serve_cooperatively(self):
self.set_state(self.SERVER_RUNNING)
while True:
state = self.get_state()
if state != self.SERVER_RUNNING:
break
# Todo: implement long timeouts (for catching server shutdowns.)
try: self.handle_request()
except NativePlatform.SelectError, e:
if NativePlatform.isInterruptError(e):
pass
except KeyboardInterrupt:
self.set_state(self.SERVER_STOPPING)
break
if state == self.SERVER_CLOSING:
self.server_shutdown()
self.set_state(self.SERVER_CLOSED)
else:
self.set_state(self.SERVER_STOPPED)
def get_state(self):
try: return self.__state
except AttributeError:
return self.SERVER_STOPPED
def set_state(self, state):
state = self._changeState(state)
self.__state = int(state)
def _changeState(self, state):
# Operate EventSink.
change = self.event.server.state.change(self._state_names[state])
if change is not None:
change = self._state_codes.get(change)
if change is not None:
state = change
return state
# RPC Methods.
def srv_start(self):
if self.get_state() == self.SERVER_STOPPED:
nth(self.serve_cooperatively, ())
def srv_stop(self):
self.set_state(self.SERVER_STOPPING)
def srv_shutdown(self):
self.set_state(self.SERVER_CLOSING)
def srv_state(self):
return self._state_names[self.get_state()]
class XMLRPC(SimpleXMLRPCServer):
# Customized server generates better faultString (for debugging purposes)
# and also defines unregister_function, RequestHandler (for plugin dynamicity).
from SimpleHTTPServer import SimpleHTTPRequestHandler
class RequestHandler(SimpleXMLRPCRequestHandler, SimpleHTTPRequestHandler):
def handle(self):
# Todo: implement bypass (or WSGI!)
self.server.event.handle.server.request(self)
return SimpleXMLRPCRequestHandler.handle(self)
def __init__(self, config):
SimpleXMLRPCServer.__init__(self, (config.address, config.port),
allow_none = True, logRequests = False,
requestHandler = self.RequestHandler)
def _marshaled_dispatch(self, data, dispatch_method = None):
# debug()
try:
params, method = loads_xmlrpc(data)
if dispatch_method is not None:
response = dispatch_method(method, params)
else:
response = self._dispatch(method, params)
response = (response,)
response = dumps_xmlrpc(response, methodresponse=1,
allow_none=self.allow_none, encoding=self.encoding)
except Fault, fault:
response = dumps_xmlrpc(fault, allow_none=self.allow_none,
encoding=self.encoding)
except:
response = dumps_xmlrpc(Fault(1, self.getFaultString()),
allow_none=self.allow_none, encoding=self.encoding)
return response
def getFaultString(self):
## import sys
## return "%s:%s" % (sys.exc_type, sys.exc_value)
from traceback import format_exc
return format_exc()
def unregister_function(self, name):
del self.funcs[name]
class DownloadServer(XMLRPC, ServerControl, DownloadManager):
def __init__(self, config):
XMLRPC.__init__(self, config)
DownloadManager.__init__(self, config)
self.register_download_functions()
def register_download_functions(self):
self.register_function(self.download , 'download.initiate' )
self.register_function(self.cancel , 'download.cancel' )
self.register_function(self.pause , 'download.pause' )
self.register_function(self.resume , 'download.resume' )
self.register_function(self.status , 'download.status' )
self.register_function(self.complete , 'download.complete' )
self.register_function(self.query , 'download.query' )
self.register_function(self.restart , 'download.restart' )
self.register_function(self.srv_state , 'server.status' )
self.register_function(self.srv_start , 'server.start' )
self.register_function(self.srv_shutdown, 'server.shutdown' )
self.register_function(self.srv_stop , 'server.stop' )
self.register_function(self.install , 'plugin.install' )
self.register_function(self.uninstall , 'plugin.uninstall' )
self.register_function(self.reload , 'plugin.reload' )
# Builtin Plugins.
class Logging(PluginManager.Plugin):
# Formula.
def getTimeString(self):
from datetime import datetime
now = datetime.today()
return '%s/%s/%s %s:%s:%s' % (now.month, now.day, now.year,
now.hour, now.minute, now.second)
def log(self, msg, level = 0):
print '[%s] :: %s' % (self.getTimeString(), msg)
# Download Events.
def parse_request(self, manager, descr):
descr = '\n '.join(descr.split('\n'))
self.log('Parsing Description:\n {\n %s\n }' % descr)
def prepare_request(self, manager, req):
self.log('Preparing Request: {%r}' % req)
def create_download(self, manager, req):
self.log('Creating Download: {%r}'% req)
def download_created(self, manager, dnl):
self.log('Download Created: {%r}' % dnl)
def start_download(self, manager, dnl):
self.log('Starting Download: {%r}' % dnl)
def cancel_download(self, manager, dnl):
self.log('Cancelling Download: {%r}' % dnl)
def pause_download(self, manager, dnl):
self.log('Pausing Download: {%r}' % dnl)
def resume_download(self, manager, dnl):
self.log('Resuming Download: {%r}' % dnl)
def download_status(self, manager, dnl):
self.log('Download Status: {%r}' % dnl)
# Server Events.
def server_state_change(self, manager, state):
self.log('Server State Change: %s' % state)
## def handle_server_request(self, manager, request):
## self.log('Handling Request: {%r}' % request)
def server_reap_process(self, manager, dnl, status, stack_frame):
self.log('Server Reap Process [#%d]: %s' % (status, stack_frame))
class Debugging(PluginManager.Plugin):
def install(self, manager):
self.manager = manager
manager.register_function(self.evaluate, 'debug.evaluate')
def uninstall(self, manager):
self.manager = None
manager.unregister_function('debug.evaluate')
_ns_locals = {}
def evaluate(self, *args, **kwd):
self._ns_locals['manager'] = self.manager
from downmgr.config import evaluate
return evaluate(self._ns_locals, *args, **kwd)
def handle_server_request(self, manager, request):
self._ns_locals['request'] = request
# Front End.
def parse_cmdln(argv = None):
parser = OptionParser()
# Server Options.
parser.add_option('-C', '--config-file')
parser.add_option('--bind-address')
parser.add_option('-p', '--port')
parser.add_option('--wget')
# Builtin Plugins.
parser.add_option('--logging', action = 'store_true')
parser.add_option('--debugging', action = 'store_true')
return parser.parse_args(argv)
def doRunServer(*argv):
(options, args) = parse_cmdln(list(argv))
config = ServerConfiguration()
if options.bind_address:
config.address = options.bind_address
if options.port:
config.port = options.port
if options.wget:
config.wgetbin = options.wget
global dnl_server
dnl_server = DownloadServer(config)
if options.logging:
dnl_server.installPlugin(Logging())
if options.debugging:
dnl_server.installPlugin(Debugging())
dnl_server.serve_cooperatively()
| Python |
# Utility.
from compiler import parse as parse_code
from random import choice
from cStringIO import StringIO
from pdb import set_trace as debug
from sys import exc_info as getSystemException
class Configuration:
def __init__(self, **kwd):
self.__dict__.update(kwd)
class ServerConfiguration(Configuration):
address = '127.0.0.1'
port = 5000
wgetbin = 'wget'
def create_random_name(length, characters):
return ''.join(choice(characters) for x in xrange(length))
def parse_safe_string(string):
'Evaluate string encoded in python syntax (but nothing else).'
try: return parse_code(string).doc
except SyntaxError:
pass
def create_download_request(url, name, **kwd):
# Todo: build multi-download request. Here??
buf = StringIO()
print >> buf, '[%s]' % (name or '?')
print >> buf, 'url: %s' % url
for (n, v) in kwd.iteritems():
if v is not None:
print >> buf, '%s: %r' % (n, v)
return buf.getvalue()
def reraise(exc = (None, None, None)):
(type, value, tb) = exc
if value is None:
(type, value, tb) = getSystemException()
raise (type, value, tb)
# Debugging.
def evaluate(ns_locals, source):
'''
Evaluate python code directly within (RPC) server!
'''
## global lastSourceCode
## lastSourceCode = source
if not source.strip():
return ''
try: code = compile(source, '<web>', 'single')
except SyntaxError, e:
return e.args
import __main__ as main
ns_globals = main.__dict__
from cStringIO import StringIO as newBuffer
output = newBuffer()
import sys
stdout = sys.stdout
stderr = sys.stderr
sys.stdout = output
sys.stderr = output
# Todo: redirect stdin
try: exec code in ns_globals, ns_locals
except:
from traceback import print_exc
print_exc()
finally:
sys.stdout = stdout
sys.stderr = stderr
return output.getvalue()
| Python |
from os import spawnl, P_NOWAIT
from os import waitpid as waitForProcessExit
from signal import signal as installSignalHandler
from select import error as SelectError
from errno import EINTR, ECHILD
def kill(pid):
"""kill function for Win32"""
import win32api
handle = win32api.OpenProcess(1, 0, pid)
return (0 != win32api.TerminateProcess(handle, 0))
def isInterruptError(e):
return (e.args[0] == EINTR)
def isReapComplete(e):
return (e.args[0] == ECHILD)
class NativePlatform:
def __init__(self, config):
self.wgetbin = config.wgetbin
# XXX How to install SIGCHLD?
# installSignalHandler(self.reap)
def reap(self, signr, stack_frame):
if signr == theReapSignal:
for (pid, status) in cleanupChildProcesses():
pass
def cleanupChildProcesses(self):
while True:
try: (pid, status) = waitForProcessExit(-1, WNOHANG)
except OSError, e:
if isReapComplete(e):
break
reraise()
yield (pid, status >> 8)
# Not really needed, since using spawn*
## def safe_shell_string(self, string):
## return "'%s'" % string
def spawn_download(self, kwd):
args = ['-c', kwd['url']]
if 'filename' in kwd:
args.append('-O')
args.append(kwd['filename'])
return spawnl(P_NOWAIT, self.wgetbin, *args)
def kill_download(self, pid):
send_signal(pid, theKillSignal)
def pause_download(self, pid):
send_signal(pid, thePauseSignal)
def resume_download(self, pid):
send_signal(pid, theResumeSignal)
| Python |
from os import spawnl, P_NOWAIT, WNOHANG
from os import kill as send_signal
from os import waitpid as waitForProcessExit
from signal import SIGKILL as theKillSignal
from signal import SIGSTOP as thePauseSignal
from signal import SIGCONT as theResumeSignal
from signal import SIGCHLD as theReapSignal
from signal import signal as installSignalHandler
from select import error as SelectError
from errno import EINTR, ECHILD
def isInterruptError(e):
return (e.args[0] == EINTR)
def isReapComplete(e):
return (e.args[0] == ECHILD)
class NativePlatform:
def __init__(self, config):
self.wgetbin = config.wgetbin
installSignalHandler(theReapSignal, self.reap)
def reap(self, signr, stack_frame):
if signr == theReapSignal:
for (pid, status) in cleanupChildProcesses():
pass
def cleanupChildProcesses(self):
while True:
try: (pid, status) = waitForProcessExit(-1, WNOHANG)
except OSError, e:
if isReapComplete(e):
break
reraise()
yield (pid, status >> 8)
# Not really needed, since using spawn*
## def safe_shell_string(self, string):
## return "'%s'" % string
def spawn_download(self, kwd):
args = ['-c', kwd['url']]
if 'filename' in kwd:
args.append('-O')
args.append(kwd['filename'])
return spawnl(P_NOWAIT, self.wgetbin, *args)
def kill_download(self, pid):
send_signal(pid, theKillSignal)
def pause_download(self, pid):
send_signal(pid, thePauseSignal)
def resume_download(self, pid):
send_signal(pid, theResumeSignal)
| Python |
from os import name as os_platform_name
def getNativePlatform():
g = globals()
l = locals()
module = '%s.%s' % (__name__, os_platform_name)
try: return __import__(module, g, l, ['']).NativePlatform
except ImportError:
# XXX Differentiate between ImportErrors because module doesn't exist,
# and those because of errors in the init code.
raise SystemError('Platform not supported: %r' % os_platform_name)
| Python |
#!python
from server import doRunServer
from client import doClientCommand
def main(argv = None):
if argv is None:
from sys import argv
# Analyze Server or Client Startup Mode.
if not doClientCommand(*argv):
doRunServer(*argv)
if __name__ == '__main__':
main()
| Python |
# Storage Management Backend and Interfaces
__all__ = ['StorageManagement', 'StorageUnit', 'UserStorage']
import shelve
import re
from .runtime import *
from .architecture import *
class StorageManagement(Component):
DEFAULT_SHELF_NAME = '.application.db'
def __init__(self, application):
Component.__init__(self, application)
db_path = application.getConfigOption('db-path')
self.shelf = shelve.open(self.getPathName(db_path))
@classmethod
def addCmdlnOptions(self, parser):
parser.add_option('--db-path', '--db', default = self.DEFAULT_SHELF_NAME)
parser.add_option('--dump-db')
def getStorage(self, key, interface = None):
unit = StorageUnit.ReadStore(self.shelf, key)
return key.getStorageInterface(self.shelf, unit, interface)
def dumpDB(self, filename):
from json import dump as dumpJson
db = dict(self.shelf.iteritems())
fl = open(filename, 'wb')
dumpJson(db, fl, indent = 2)
fl.flush()
fl.close()
class AuxiliaryApi(ServiceBase):
# Kind of a toy api for exploring a parked store.
NAME = 'Storage::Aux'
def lookupUnitAccess(self, name):
if name == 'UserStorage':
return UserStorage
elif name == 'RightsStorage':
return security.RightsManagement.RightsStorage
raise NameError(name)
def buildAuxiliaryStore(self, argv):
class AuxApp:
# Auxiliary application.
logMessages = []
log = logMessages.append
# Build app and storage.
from optparse import OptionParser
parser = OptionParser()
StorageManagement.addCmdlnOptions(parser)
(options, args) = parser.parse_args(argv)
storage = StorageManagement(AuxApp(), options, args)
storage.application.storage = storage
return storage
class StorageUnit:
@classmethod
def Open(self, app, *args, **kwd):
return app.storage.getStorage(self.Key(*args, **kwd))
def __init__(self, key, store, unit):
self.key = key
self.store = store
self.unit = unit
def synchronize(self):
self.WriteStore(self.store, self.key, self.unit)
@classmethod
def getInterface(self, key, store, unit):
return self.Interface(self(key, store, unit))
@classmethod
def ReadStore(self, store, key, create_unit = None):
unitName = key.getUnitName()
try: return store[unitName]
except KeyError:
unit = create_unit if callable(create_unit) else {}
store[unitName] = unit
store.sync()
return unit
@classmethod
def WriteStore(self, store, key, unit):
store[key.getUnitName()] = unit
store.sync()
STORAGE_REALM = '----'
class StringKey:
def __init__(self, storageClass, value):
self.storageClass = storageClass
self.value = value
def getStorageInterface(self, store, unit, interface):
return self.storageClass.getInterface(self, store, unit)
def getUnitName(self):
return '%s[%s]' % (self.storageClass.STORAGE_REALM, self.value)
@classmethod
def Key(self, *args, **kwd):
return self.StringKey(self, *args, **kwd)
@classmethod
def GetUnitNamePattern(self):
pattern = '^%s\[(?P<unit>.*?)\]$' % re.escape(self.STORAGE_REALM)
return re.compile(pattern)
@classmethod
def MatchKeys(self, keys):
p = self.GetUnitNamePattern().match
for k in keys:
m = p(k)
if m is not None:
yield (k, m.groupdict()['unit'])
class Interface:
def __init__(self, unit):
self.unit = unit
def getKey(self):
return self.unit.key
def synchronizeStore(self):
self.unit.synchronize()
def getValue(self, name, default = None):
return self.unit.unit.get(name, default)
def setValue(self, name, value):
self.unit.unit[name] = value
def __enter__(self):
return self
def __exit__(self, etype = None, value = None, tb = None):
self.synchronizeStore()
class UserStorage(StorageUnit):
STORAGE_REALM = 'UserStorage'
class Interface(StorageUnit.Interface):
AUTH_SECRET_KEY = 'auth-secret-key'
def checkAccess(self, digest):
# Basically hash username against secret key and compare to digest.
# Makes the assumption that the key string-value is the same as the username.
secretKey = self.getValue(self.AUTH_SECRET_KEY, '')
calc = security.CalculateDigest(secretKey, self.getKey().value)
return calc == digest
def changeSecretKey(self, secretKey):
self.setValue(self.AUTH_SECRET_KEY, secretKey)
USER_ROLES = 'user-roles'
PUBLIC_ROLE = 'PUBLIC'
def getUserRoles(self):
return setListAdd(self.getValue(self.USER_ROLES), self.PUBLIC_ROLE)
def setUserRoles(self, *roles):
self.setValue(self.USER_ROLES, list(roles))
def addUserRole(self, role):
self.setUserRoles(*setListAdd(self.getUserRoles(), role))
def removeUserRole(self, role):
self.setUserRoles(*setListRemove(self.getUserRoles(), role))
# This stuff should go into runtime (misc)
def setListAdd(thisSet, item):
if thisSet is None:
thisSet = []
if item not in thisSet:
thisSet.append(item)
return thisSet
def setListRemove(thisSet, item):
if thisSet is None:
return []
try: thisSet.remove(item)
except ValueError: pass
return thisSet
## REGEX_SPECIAL_CHARS = r'()[].*$^\?+'
## REGEX_SPECIAL_CHARS = [(c, r'\%s' % c) for c in REGEX_SPECIAL_CHARS]
##
## def escape_regex(s):
## # Escape a regular expression string so that it matches literally.
## # Todo: speed this up.
## for (c, r) in REGEX_SPECIAL_CHARS:
## s = s.replace(c, r)
##
## return s
def searchAllStorageUnits(store, *unitClasses):
storeKeys = store.keys()
for unitCls in unitClasses:
for (k, n) in unitCls.MatchKeys(storeKeys):
yield Synthetic(unitClass = unitCls,
key = k, name = n,
Open = lambda app, uc = unitCls, n = n: uc.Open(app, n))
def showStorageUnits(units):
for u in units:
print '%-30s: %s' % (u.unitClass.STORAGE_REALM, u.name)
from . import security
| Python |
# Dealing with the Platform/Runtime Core
# todo:
# break into package:
# core.py (Most of these objects)
# utility.py (Misc functional routines)
#
# other platform-related modules
#
__all__ = ['Object', 'Synthetic', 'breakOn', 'set_trace', 'runcall',
'getSystemException', 'getCurrentRuntimeFrame', 'showTraceback',
'printException', 'basename', 'expanduser', 'expandvars', 'nth',
'getCurrentSystemTime', 'LookupObject', 'MethodCall', 'Method',
'threading', 'NewBuffer', 'sys', 'extractTraceback',
'getCapitalizedName', 'synchronized', 'synchronizedWith',
'getAttributeChain', 'Shuttle', 'buildCmdln',
'Print', 'PrintTo', 'Throw', 'Catch', 'repeat',
'sendSignal', 'thisProcessId', 'contextmanager']
from os.path import basename, expanduser, expandvars
from sys import exc_info as getSystemException, _getframe as getCurrentRuntimeFrame
from traceback import print_exc as printException, extract_tb
from thread import start_new_thread as _nth
from time import time as getCurrentSystemTime
from StringIO import StringIO as NewBuffer
class Unimplemented:
def __init__(self, name, cause):
self.name = name
self.cause = cause
def __call__(self, *args, **kwd):
raise NotImplementedError(self.cause)
def ImportImplementation(moduleName, objectName = ''):
try: return __import__(moduleName, fromlist = [objectName])
except ImportError, e:
return Unimplemented(moduleName, e)
## sendSignal = ImportImplementation('os', 'kill')
## thisProcessId = ImportImplementation('os', 'getpid')
try: from os import kill as sendSignal
except ImportError, e:
sendSignal = Unimplemented('sendSignal', e)
try: from os import getpid as thisProcessId
except ImportError, e:
thisProcessId = Unimplemented('thisProcessId', e)
import threading
import sys
from .debugging import breakOn, set_trace, runcall
class Object:
class Meta:
Attributes = []
def __init__(self, *attributes):
self.Attributes = attributes
@classmethod
def toString(self, instance):
def getAttr(name):
if callable(name):
attr = name(instance)
name = ''
elif isinstance(name, (list, tuple)):
(name, function) = name
assert callable(function)
attr = function(instance)
else:
attr = getattr(self, name)
if name.endswith('()') and callable(attr):
attr = attr()
return (name, attr)
return ', '.join('%s = %r' % getAttr(n) for n in self.Attributes)
# Serialization??
def __repr__(self):
attrs = self.Meta.toString(self)
return '<%s%s%s>' % (self.__class__.__name__,
attrs and ' ' or '', attrs)
# __str__ = __repr__
class Synthetic:
def __init__(self, **kwd):
self.__dict__ = kwd
def __repr__(self):
return 'Synthetic: %r' % self.__dict__
def update(self, values = None, **kwd):
if isinstance(values, Synthetic):
self.__dict__.update(values.__dict__)
elif isinstance(values, dict):
self.__dict__.update(values)
self.__dict__.update(kwd)
# Should probably go in architecture:
class Method:
# Should be called "Node" or something, since it's not always a method.
def __init__(self, name, call, getprop = None):
self.__name = name
self.__call = call
self.__getprop = getprop
def __getattr__(self, name):
return Method('%s.%s' % (self.__name, name),
self.__call, self.__getprop)
def __call__(self, *args, **kwd):
return self.__call(self.__name, *args, **kwd)
@classmethod
def GetProperty(self, methodObject):
# Not this easy: should call another __getprop method that
# doesn't rely in __getattr__ on the remote server, but something
# adjacent to callEntityMethod.
return methodObject.__getprop(methodObject.__name)
class MethodCall:
def __init__(self, callMethod, getProperty = None):
self.__callMethod = callMethod
self.__getProperty = getProperty
def __getattr__(self, name):
return Method(name, self.__callMethod, self.__getProperty)
from types import MethodType, InstanceType
def getObjectAttribute(object, name):
try: return getattr(object, name)
except AttributeError:
if isinstance(object, InstanceType):
return getInstanceAttribute(object, name)
def walkAllBases(cls):
visited = set()
def _walk(c, w):
visited.add(c)
for b in c.__bases__:
if b not in visited:
w(b, w)
_walk(cls, _walk)
return visited
def getInstanceAttribute(object, name):
cls = object.__class__
try: member = getattr(cls, name)
except AttributeError:
for cls in walkAllBases(cls): # Rebinding variable 'cls'
try: member = getattr(cls, name)
except AttributeError:
pass
else:
raise AttributeError(name)
if isinstance(member, MethodType):
# Bind it.
if member.im_self is None:
return MethodType(object, object, cls)
return member
def getAttributeChain(object, name):
for name in name.split('.'):
object = getObjectAttribute(object, name)
return object
def LookupObject(name, raise_import_errors = False):
'A clever routine that can import an object from a system module from any attribute depth.'
parts = name.split('.')
moduleObject = None
module = None
n = len(parts)
x = 0
while x < n:
name = '.'.join(parts[:x+1])
moduleObject = module
# Try to find the module that contains the object.
try: module = __import__(name, globals(), locals(), [''])
except ImportError:
break
x += 1
# No top-level module could be imported.
if moduleObject is None:
if x == 1:
# There was no sub-object name specified: normally,
# we don't want namespaces, but let's allow it now.
return module
raise ImportError(name)
object = moduleObject
while x < n:
# Now look for the object value in the module.
# If an attribute can't be found -- this is where we raise the original import-error?
## if raise_import_errors:
## module = __import__(name, globals(), locals(), [''])
object = getattr(object, parts[x])
x += 1
return object
def getCapitalizedName(name):
return name[0].upper() + name[1:]
def showTraceback():
f = getCurrentRuntimeFrame(1)
stack = []
while f:
stack.append(f)
f = f.f_back
tb = []
stack.reverse()
for f in stack:
co = f.f_code
tb.append('%s(%d)%s()' % (basename(co.co_filename), f.f_lineno, co.co_name))
print '\n'.join(tb) + '\n'
_sorted_system_paths_memo = None #: Internal
_sorted_system_paths_key = set()
def isSortedSystemPathMemoOutdated():
if _sorted_system_paths_memo is None:
return True
fresh_key = set(sys.path)
global _sorted_system_paths_key
if _sorted_system_paths_key != fresh_key:
_sorted_system_paths_key = fresh_key
return True
def getSortedSystemPaths():
if isSortedSystemPathMemoOutdated():
global _sorted_system_paths_memo
_sorted_system_paths_memo = [(len(p), p) for p in sys.path]
_sorted_system_paths_memo.sort(lambda a, b:cmp(a[0], b[0])) # descending length?
return _sorted_system_paths_memo
# This shouldn't have to change even if new paths are added.
_stripped_path_memo = {}
def stripSystemPath(filename, system_paths):
try: return _stripped_path_memo[filename]
except KeyError:
for (n, p) in system_paths:
if filename.startswith(p):
if p:
# Chop off the path this module was loaded from.
stripped = filename[n+1:]
break
else:
# Nothing found?? Probably ''
stripped = filename
_stripped_path_memo[filename] = stripped
return stripped
def extractTraceback(tb, limit = None, fullpaths = False):
stack = extract_tb(tb, limit = limit)
if not fullpaths:
system_paths = getSortedSystemPaths()
for i in xrange(len(stack)):
# Strip off the path part used for locating.
(filename, lineno, name, line) = stack[i]
filename = stripSystemPath(filename, system_paths)
stack[i] = (filename, lineno, name, line)
return stack
def synchronized(function):
lock = threading.Lock()
def synchronizedFunction(*args, **kwd):
with lock:
return function(*args, **kwd)
return synchronizedFunction
def synchronizedWith(object, recursive = False):
def buildLock():
return threading.RLock() \
if recursive \
else threading.Lock()
if object is None:
# Anonymous lock.
__synch_lock = buildLock()
def getLock():
return __synch_lock
else:
# Control-object-accessible lock.
def getLock():
return object.__synch_lock
try: getLock()
except AttributeError:
object.__synch_lock = buildLock()
def makeSynchronizer(function):
def synchronizedFunctionWith(*args, **kwd):
with getLock():
return function(*args, **kwd)
return synchronizedFunctionWith
return makeSynchronizer
class Shuttle:
def __init__(self, function, *args, **kwd):
self.function = function
self.curriedArgs = args
self.curriedKwd = kwd
def __call__(self, *theseArgs, **thisKwd):
args = self.curriedArgs + theseArgs
kwd = self.curriedKwd.copy()
kwd.update(thisKwd)
return self.function(*args, **kwd)
def buildCmdlnWithBase(baseArgs, baseOptions, *args, **kwd):
args = list(baseArgs) + list(args)
opts = baseOptions.copy()
opts.update(kwd)
def iterateOptions(opts):
for (n, v) in opts.iteritems():
if isinstance(v, (list, tuple)):
for x in v:
yield (n, x)
else:
yield (n, v)
def optionToSwitch((n, v)):
n = n.replace('_', '-')
if v is True:
return '--%s' % n
return '--%s=%s' % (n, v)
cmdln = map(optionToSwitch, iterateOptions(opts))
cmdln.extend(args)
return cmdln
def buildCmdln(*args, **kwd):
return buildCmdlnWithBase((), {}, *args, **kwd)
def nth(function, *args, **kwd):
return _nth(function, args, kwd)
def Catch(f, *catches, **others):
try: return f()
except:
from sys import exc_info
(etype, value, tb) = exc_info()
for (ctype, cf) in catches:
if isinstance(value, ctype):
return cf(etype, value, tb)
try: excall = others['Except']
except KeyError: raise etype, value, tb
else: return excall(etype, value, tb)
finally:
try: fin = others['Finally']
except KeyError: pass
else: fin()
def Print(*args):
print ' '.join(map(str, args))
def PrintTo(stream, *args):
print >> stream, ' '.join(map(str, args))
def Throw(etype, *args, **kwd):
raise etype(*args, **kwd)
def repeat(f, n, filter = None):
from __builtin__ import filter as ff
return ff(filter, (f(x) for x in xrange(n)))
# Sorry: require this:
from contextlib import contextmanager
| Python |
# Data Format Encoding
__all__ = ['IncreasePackageSize', 'PackageSizeTerminated', 'EncodePackageSize',
'Packager', 'Unpackager', 'Serializable', 'Mapping', 'Compress',
'EntitySpace']
__others__ = ['unpackMessage', 'packMessage', 'unpackMessageSize', 'unpackMessageBuffer',
'encodeMessage', 'encodeCommand', 'encodeResponse', 'encodeException',
'encodePackageSize', 'encodePackage', 'flattenCommandMessage']
import types
from .runtime import *
from .architecture import Serializable
# Encoding Model
def getInstanceClassName(instance):
cls = instance.__class__
return '%s.%s' % (cls.__module__, cls.__name__)
class Table(dict):
def __init__(self, **preflags):
self.preflags = preflags
def matching(self, type, **flags):
effective = self.preflags.copy()
effective.update(flags)
# Decorator.
def makeDispatcher(function):
self[type] = function
function._packaging_flags = effective
return function
return makeDispatcher
@classmethod
def IsFlagged(self, function, *flags):
try: p = function._packaging_flags
except AttributeError: return False
else:
for f in flags:
if not p.get(f):
return False
return True
COMPRESSION_LEVEL = {1: 'zlib', 2: 'gzip', 3: 'bz2'}
def Compress(packed, level):
try: packed = packed.encode(COMPRESSION_LEVEL[level])
except KeyError: return packed
else: return 'Z%d%s' % (level, packed)
class Packager:
def __init__(self, entityMap, buf = None, memo = None):
self.entityMap = entityMap
self.reset(buf = buf, memo = memo)
def reset(self, buf = None, memo = None):
self.buf = NewBuffer() if buf is None else buf
self.memo = {}
def compress(self, packed, level):
if callable(level):
return level(packed)
return Compress(packed, level)
def pack(self, value, compression = False):
self.packObject(value, self.buf.write)
packed = self.buf.getvalue()
return self.compress(packed, compression)
packaging = Table()
def packObject(self, object, write):
# Todo: write a version that pre-scans for memoized/marking requirements.
# This will cut down on space required (marks set for every single object).
# Will require a second packaging table for tracking this, and building
# an intermediate id set to determine if the objects should be marked.
if isinstance(object, Method):
# SUPER-SPECIAL CASE!
# Send this method request inline:
result = Method.GetProperty(object)
self.packObject(result, write)
return
mark = False
try: p = self.packaging[type(object)]
except KeyError:
p = self.__class__.packUnknown
else:
if not Table.IsFlagged(p, 'primitive'):
vId = id(object)
try: mark = self.memo[vId]
except KeyError:
mark = len(self.memo)
self.memo[vId] = mark
else:
write('M%d#' % mark)
return
if mark is not False:
write('K%d#' % mark)
p(self, object, write)
## # Remembers already-packed objects.
## vId = id(object)
## try: mark = self.memo[vId]
## except KeyError:
## try: p = self.packaging[type(object)]
## except KeyError:
## p = self.__class__.packUnknown
## else:
## # If it's a new object, encode packed structure,
## # but only if it's a known type. This way, we're
## # not marking references.
## if not Table.IsFlagged(p, 'primitive'):
## self.memo[vId] = len(self.memo)
##
## p(self, object, write)
## else:
## # Otherwise, just encode the memory id.
## write('M%d#' % mark)
def packUnknown(self, object, write):
refId = self.entityMap.getObjectRef(object)
write('R%d#' % refId)
def packProxy(self, value, write):
write('R%d#' % value.refId)
# Basic Primitives.
@packaging.matching(types.BooleanType, primitive = True)
def packBoolean(self, value, write):
write('T' if value else 'F')
@packaging.matching(types.NoneType, primitive = True)
def packNone(self, value, write):
write('N')
# Numerical Types.
@packaging.matching(types.IntType, primitive = True)
def packInteger(self, value, write):
write('I%d#' % value)
@packaging.matching(types.FloatType, primitive = True)
def packDecimal(self, value, write):
write('D%f#' % value)
@packaging.matching(types.LongType, primitive = True)
def packInteger(self, value, write):
write('L%ld#' % value)
try:
@packaging.matching(types.ComplexType, primitive = True)
def packComplex(self, value, write):
write('J%d+%d#' % (value.real, value.imag))
except AttributeError:
pass
# String Types.
@packaging.matching(types.StringType)
def packString(self, value, write):
write('S%d#%s' % (len(value), value))
@packaging.matching(types.UnicodeType)
def packUnicode(self, value, write):
write('U%d#%s' % (len(value), value))
# Structure Types.
@packaging.matching(types.TupleType)
def packTuple(self, value, write):
write('P%d#' % len(value))
for v in value:
self.packObject(v, write)
@packaging.matching(types.ListType)
def packList(self, value, write):
write('W%d#' % len(value))
for v in value:
self.packObject(v, write)
@packaging.matching(types.DictType)
def packDict(self, value, write):
write('A%d#' % len(value))
for (n, v) in value.iteritems():
self.packObject(n, write)
self.packObject(v, write)
# Object Types.
@packaging.matching(types.InstanceType)
def packInstance(self, value, write):
if isinstance(value, EntitySpace.Proxy):
# Handle this separately from packUnknown, because
# we don't want some dickhead to serialize it.
self.packProxy(value, write)
elif isinstance(value, EntitySpace.Binding):
self.packProxy(EntitySpace.Binding.GetProxy(value), write)
elif isinstance(value, Serializable):
n = getInstanceClassName(value)
write('H%d#%s' % (len(n), n))
if getattr(value, '__getstate__'):
write('S')
state = value.__getstate__()
self.packObject(state, write)
else:
write('D')
self.packObject(value.__dict__, write)
else:
self.packUnknown(value, write)
class Unpackager:
def __init__(self, entityMap, buf = None, memo = None):
self.entityMap = entityMap
self.reset(buf = buf, memo = memo)
def reset(self, buf = None, memo = None):
self.buf = NewBuffer() if buf is None else buf
self.memo = {}
def uncompress(self, packed, level):
try: return packed.decode(COMPRESSION_LEVEL[level])
except KeyError:
return packed
def unpack(self, m):
if m[0] == 'Z':
m = self.decompress(m[2:], int(m[1]))
return self.unpackBuffer(NewBuffer(m))
def unpackBuffer(self, buf):
r = buf.read
def readHash():
b = ''
while True:
c = r(1)
if c == '':
raise EOFError('Expected #')
if c == '#':
return b
b += c
self.read = r
self.readHash = readHash
return self.unpackObject()
def unpackObject(self):
c = self.read(1)
if c == 'M':
# Read mark id for previously-memorized object.
mark = int(self.readHash())
return self.memo[mark]
elif c == 'K':
# Read mark id for this record.
mark = int(self.readHash())
c = self.read(1)
else:
mark = False
# Read the object.
u = self.packaging[c]
object = u(self, self.readHash)
if mark is not False:
# Remember this object.
self.memo[mark] = object
return object
packaging = Table()
# Basic Primitives.
@packaging.matching('N')
def unpackNone(self, readHash):
return None
@packaging.matching('T')
def unpackTrue(self, readHash):
return True
@packaging.matching('F')
def unpackFalse(self, readHash):
return False
# Numerical Types.
@packaging.matching('I')
def unpackInteger(self, readHash):
return int(readHash())
@packaging.matching('D')
def unpackDecimal(self, readHash):
return float(readHash())
@packaging.matching('L')
def unpackInteger(self, readHash):
return long(readHash())
try:
ComplexType = types.ComplexType
@packaging.matching('J')
def unpackComplex(self, readHash):
(real, imag) = readHash().split('+')
real = int(real)
imag = int(imag)
return self.ComplexType(real, imag)
except AttributeError:
pass
# String Types.
@packaging.matching('S')
def unpackString(self, readHash):
return str(self.read(int(readHash())))
@packaging.matching('U')
def unpackUnicode(self, readHash):
return unicode(self.read(int(readHash())))
# Structure Types.
@packaging.matching('P')
def unpackTuple(self, readHash):
uo = self.unpackObject
return tuple(uo() for x in xrange(int(readHash())))
@packaging.matching('W')
def unpackList(self, readHash):
uo = self.unpackObject
return [uo() for x in xrange(int(readHash()))]
@packaging.matching('A')
def unpackDict(self, readHash):
uo = self.unpackObject
return dict((uo(), uo()) for x in xrange(int(readHash())))
# Object Types.
@packaging.matching('H')
def deserializeInstance(self, readHash):
# Read class name and try to find/import it.
className = self.read(int(readHash()))
classObject = LookupClassObject(className)
# Instantiate with nullary constructor.
instance = classObject()
# Reconstruct State.
c = self.read(1)
if c == 'S':
state = self.unpackObject(readHash)
instance.__setstate__(state)
elif c == 'D':
instance.__dict__ = self.unpackObject(readHash)
else:
raise TypeError('Instance deserialization type: %r' % c)
# Reference Types.
@packaging.matching('R')
def deferenceEntity(self, readHash):
return self.entityMap.getObjectOrProxy(int(readHash()))
class Mapping(dict):
class Proxy:
def __init__(self, refId):
self.refId = refId
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.refId)
class Binding(MethodCall):
# An entity proxy that has been bound to a controlling endpoint.
def __init__(self, proxy, callEntityMethod, getEntityProperty):
MethodCall.__init__(self, self.__callProxyMethod, self.__getProxyProperty)
self.__proxy = proxy
self.__callEntityMethod = callEntityMethod
self.__getEntityProperty = getEntityProperty
def __repr__(self):
return '<%s: %r>' % (self.__class__.__name__, self.__proxy)
def __callProxyMethod(self, name, *args, **kwd):
return self.__callEntityMethod(self.__proxy, name, *args, **kwd)
def __getProxyProperty(self, name):
return self.__getEntityProperty(self.__proxy, name)
def __call__(self, *args, **kwd):
return self.__callEntityMethod(self.__proxy, '__call__', *args, **kwd)
@classmethod
def GetProxy(self, binding):
# Return the private variable.
return binding.__proxy
def getObjectRef(self, object):
oId = id(object)
if oId not in self:
self[oId] = object
return oId
def getObjectOrProxy(self, oId):
try: return self[oId]
except KeyError:
proxy = self[oId] = self.Proxy(oId)
return proxy
def getObjectOrError(self, oId):
try: return self[oId]
except KeyError:
raise ValueError(oId)
# Mixins:
class EntityPackaging:
compression_level = None
def packMessage(self, message, compression = None):
if compression is None:
compression = self.compression_level
p = Packager(self)
return p.pack(message, compression = compression)
def unpackMessage(self, message):
u = Unpackager(self)
return u.unpack(message)
class EntityCoding:
def encodePackageSize(self, size):
return EncodePackageSize(size)
def encodePackage(self, package):
return self.encodePackageSize(len(package)) + package
def encodeMessage(self, message):
return self.encodePackage(self.packMessage(message))
def flattenCommandMessage(self, command, (serialId, flags), args, kwd):
# assert isinstance(command, basestring)
message = [command, serialId, flags]
if args:
message.append(args)
if kwd:
message.append(kwd)
return message
def encodeSerialCommand(self, command, (serialId, flags), *args, **kwd):
flat = self.flattenCommandMessage(command, (serialId, flags), args, kwd)
return self.encodeMessage(flat)
def encodeNonSerialCommand(self, command, *args, **kwd):
return self.encodeSerialCommand(command, (None, 0), *args, **kwd)
def encodeResponse(self, serialId, response):
return self.encodeMessage([serialId, [True, response]])
def encodeException(self, serialId, (etype, value, tb)):
return self.encodeMessage([serialId, [False, (etype, value, tb)]])
class EntitySpace(Mapping, EntityPackaging, EntityCoding):
pass
# Data Segment Routines
# These do not work because I don't understand byte-order:
## def EncodePackageSize(size):
## buf = ''
## lo = 0
## while size:
## lo = (size & 255)
## buf += chr(lo)
## size >>= 8
##
## if (lo & 128):
## buf += chr(0)
##
## return buf
##
## def IncreasePackageSize(size, addend):
## # low endian (?)
## return (size << 8) + ord(addend)
##
## def PackageSizeTerminated(addend):
## return not (ord(addend) & (128))
# Less packed style:
def EncodePackageSize(size):
return '%d.' % size
def IncreasePackageSize(size, addend):
if addend.isdigit():
return size * 10 + int(addend)
return size
def PackageSizeTerminated(addend):
return addend == '.'
# Other Routines
def unpackMessageSize(buf):
size = 0
size_length = 0
for size_length in xrange(len(buf)):
c = buf[size_length]
size = IncreasePackageSize(size, c)
if PackageSizeTerminated(c):
break
return (size, size_length)
def unpackMessageBuffer(buf):
(size, i) = unpackMessageSize(buf)
buf = buf[i:]
assert len(buf) == size
return unpackMessage(buf)
defaultSpace = EntitySpace()
packMessage = defaultSpace.packMessage
unpackMessage = defaultSpace.unpackMessage
from marshal import loads as unpackMessageBinary
from marshal import dumps as packMessageBinary
# Testing
def inspectPackedMessage(message):
buf = NewBuffer(message)
inspectPackedBuffer(buf)
def writeIndent(indent, message, stream = None, tab = ' '):
if stream is None:
from sys import stdout as stream
stream.write(tab * indent)
stream.write(message)
stream.write('\n')
def inspectPackedBuffer(buf, indent = 0):
r = buf.read
def readHash():
b = ''
while True:
c = r(1)
if c == '':
raise EOFError('Expected #')
if c == '#':
return b
b += c
c = r(1)
if c:
if c == 'K':
writeIndent(indent, 'Mark: #%d' % int(readHash()))
inspectPackedBuffer(buf, indent = indent + 1)
elif c == 'M':
writeIndent(indent, '[ Marked #%s ]' % readHash())
elif c in 'NTF':
writeIndent(indent, str(dict(N = None, T = True, F = False)))
elif c in 'IDLJ':
writeIndent(indent, '%s: %r' % (dict(I = 'Integer',
D = 'Float',
L = 'Long',
J = 'Complex')[c],
readHash()))
elif c in 'SU':
n = int(readHash())
writeIndent(indent, '%s: %s' % (dict(S = 'String',
U = 'Unicode')[c],
r(n)))
elif c in 'PW':
n = int(readHash())
writeIndent(indent, '%s (%d):' % (dict(P = 'Tuple',
W = 'List')[c],
n))
indent += 1
for x in xrange(n):
inspectPackedBuffer(buf, indent = indent)
indent -= 1
elif c == 'A':
n = int(readHash())
writeIndent(indent, 'Dict (%d):' % n)
indent += 1
for x in xrange(n):
writeIndent(indent, 'Key:')
inspectPackedBuffer(buf, indent = indent + 1)
writeIndent(indent, 'Value:')
inspectPackedBuffer(buf, indent = indent + 1)
indent -= 1
elif c == 'H':
writeIndent(indent, 'Instance:')
indent += 1
writeIndent(indent, 'Class Name:')
inspectPackedBuffer(buf, indent = indent + 1)
c = r(1)
assert c
if c in 'SD':
writeIndent(indent, '%s:' % dict(S = 'State',
D = 'Dict')[c])
inspectPackedBuffer(buf, indent = indent + 1)
else:
raise TypeError('Instance deserialization type: %r' % c)
elif c == 'R':
writeIndent(indent, 'Entity Reference: #%s' % readHash())
else:
writeIndent(indent, c + buf.read())
def test():
o = object()
structure = [[5, 'Hi There', 8.43, (6j + 1)], ()]
s2 = [structure, [structure]]
data = [o, o, dict(n = o, structure = structure,
s2 = s2)]
b = packMessage(data)
inspectPackedMessage(b)
# defaultSpace.clear()
value = unpackMessage(b)
from pprint import pprint
pprint(value)
if __name__ == '__main__':
test()
| Python |
#!python
from . import Client, Authorization, Fault, ClientOptions, Synthetic, User, Endpoint
from . import printFault, pdb, ApplyCompressionChannel, CompressIfWorthIt
# Front End.
def openClient(setup, options, ns):
if options.support_dir:
# Run in 'support mode'
assert options.username
user = User(options.username, options.secret_key)
group = user[options.support_dir]
ns['user'] = user
ns['group'] = group
setup.client
elif options.endpoint:
# Connect to a specific service using endpoint-url form.
assert options.username
user = User(options.username, options.secret_key)
endpoint = Endpoint.Parse(options.endpoint)
api = endpoint.open(user)
ns['user'] = user
ns['endpoint'] = endpoint
ns['api'] = api
setup.client
elif options.service_partner_name:
# Use the service-partner-lookup on the service manager.
from pentacle.bus.partners import OpenPartneredClient
setup.byline = 'SERVICE-MANAGER FAULT'
setup.client = OpenPartneredClient(options.service_partner_name,
**optionalArgs(options, 'username', 'port'))
else:
# Straight connect:
print 'Connecting to [%s:%d]...' % (options.address, options.port)
client = Client.Open(options.address, options.port)
ApplyCompressionChannel(client.dataChannel,
options.compression_level,
options.compression_threshold)
username = options.username
if username:
print 'Logging in %s...' % username
auth = Authorization(username, options.secret_key)
auth.Authenticate(client)
setup.client = client
def setupClient(setup, options, ns):
# Setup interactive namespace.
import pentacle
import sys
client = setup.client
ns['peer'] = client
ns['client'] = client
ns['call'] = client.call if client is not None else None
ns['options'] = options
ns['pentacle'] = pentacle
ns['sys'] = sys
ns['g'] = Synthetic(**globals())
# Note: Some of these options are incompatible with --support-dir and --endpoint
if options.open_system_api:
system = client.api.open('System::Debugging')
e = system.invoke.evaluate
ns['system'] = system
ns['e'] = e = system.invoke.evaluate
ns['x'] = x = system.invoke.execute
if options.enable_timing_tests:
def testTiming(n = 2017):
return len(e('"A" * %d' % n))
def timeIt(n, number, repeat = 1):
from timeit import repeat as tFunc
return tFunc(lambda:testTiming(n),
number = number,
repeat = repeat)
ns['tt'] = testTiming
ns['ti'] = timeIt
elif options.open_spline_api:
def PrintUptime(svcmgr):
print 'Uptime: %(running_duration)s' % svcmgr.invoke.GetManagerStats()
from ..bus import ServiceManagerName
ns['spline'] = client.api.open(ServiceManagerName)
ns['PrintUptime'] = PrintUptime
elif options.open_api:
(variable, name) = options.open_api.split(':', 1)
ns[variable] = client.api.open(name)
def optionalArgs(options, *names):
kwd = {}
for n in names:
value = getattr(options, n, None)
if value is not None:
kwd[n] = value
return kwd
def main(argv = None):
(options, args) = ClientOptions.parseCmdln(argv)
if options.debug:
pdb.set_trace()
ns = dict()
setup = Synthetic(byline = 'SERVER FAULT',
client = None)
try:
# Start connection.
openClient(setup, options, ns)
# Handle Session.
## if options.api_test:
## testApi(client)
if options.examine:
try: import readline
except ImportError:
pass
# Setup interactive namespace.
setupClient(setup, options, ns)
from code import InteractiveConsole as IC
ic = IC(locals = ns)
ic.interact()
if options.quit:
print 'Quitting Application...'
setup.client.call.stopApplication()
except Fault, fault:
printFault(fault, cause = True, byline = setup.byline)
except KeyboardInterrupt:
print 'Console Break'
## def testApi(client):
## with client.api('API::Management') as api:
## print 'Loading ClockService...'
## print api.loadNewServiceApi('pentacle.application.ClockService').NAME
##
## print 'Loading SystemDebug...'
## print api.loadNewServiceApi('pentacle.application.SystemDebug').NAME
##
## print 'Loading ObjectDirectory...'
## print api.loadNewServiceApi('pentacle.application.ObjectDirectory').NAME
##
## print 'Loading AuxiliaryApi...'
## print api.loadNewServiceApi('pentacle.storage.StorageManagement.AuxiliaryApi').NAME
##
## print 'Start Ticking...'
## with client.api('ClockService::API') as clock:
## clock.setClockTicking(10000)
## print 'Current Clock Time:', clock.getClockTime()
##
## print 'Testing Debug...'
## # client.debug = 1
## with client.api('System::Debugging') as debug:
## try: debug.fail()
## except Fault, fault:
## printFault(fault)
##
## print 'Testing Object Directory...'
## with client.api('System::Directory') as directory:
## N = directory.newObject
## directory.setObject('Feature', N(Aspect = N(Scope = N(Name = 'A System Feature Aspect Scope'))))
##
## print directory.getObject('Feature').Aspect.Scope.Name
##
## print 'Testing Auxiliary Storage'
## with client.api('Storage::Aux') as aux:
## db = aux.buildAuxiliaryStore(buildCmdln(db_path = '~/.pentacle/application.db'))
## user = aux.lookupUnitAccess('UserStorage')
## fraun = user.Open(db.application, 'fraun')
##
## # change user-fraun
## print dict(fraun.unit.unit.items())
if __name__ == '__main__':
main()
| Python |
# Pentacle Client Support (User-Auth-Group)
'''
from pentacle.client import support
user = support.User('login', 'secret')
group = user['~/services']
group.managerConfig(port = 7040)
group.serviceConfig('docmgr',
partner_name = 'Westmetal Documents',
service_api = '@westmetal/document/browser')
group.docmgr.openUrl('http://google.com')
'''
from . import Session, Client, CalculateDigest
from ..config import Configuration, INI
from ..application import Application
from ..runtime import *
from ..bus import ServiceManagerName
from os.path import expanduser, expandvars, join as joinpath
from os import makedirs
from errno import ENOENT, EEXIST
from collections import namedtuple
import re
def ensure_directory(paths):
try: makedirs(paths)
except OSError:
(etype, value, tb) = getSystemException()
if value.errno != EEXIST:
raise (etype, value, tb)
MANAGER_PREFIX = '.manager'
MANAGER_SECTION = 'Manager'
SERVICE_SECTION = 'Service'
class Group(object):
# A Group manages a group of service-partner clients configured
# using a directory of files that describe their services and relationship
# to a Spline Manager.
#
# The primary purpose is to expose a simplified object-oriented service
# access to a no-brainer external configuration system.
#
def __init__(self, main_path, interpolate = True, user = None):
self._main_path = main_path
self._interpolate = interpolate
self._user = user
self._services = dict()
def _getPathFromMain(self, *args):
path = self._main_path
if self._interpolate:
path = expandvars(expanduser(path))
return joinpath(path, *args)
class NoConfigFile(IOError):
pass
def _getConfigFromMain(self, name = MANAGER_PREFIX):
try: fileObj = open(self._getPathFromMain('%s.cfg' % name))
except IOError:
(etype, value, tb) = getSystemException()
if value.errno == ENOENT:
raise self.NoConfigFile(name)
raise (etype, value, tb)
return Configuration.FromFileObject(fileObj)
def _getManagerFullAddress(self):
# Todo: some kind of unification with server configs (sections?)
cfg = self._getConfigFromMain()
cfg = cfg.ConfigSet(cfg, section = MANAGER_SECTION, simplify = True)
return (cfg.address or 'localhost', cfg.port or Application.DEFAULT_PORT)
def _getServiceFullAddress(self, partner_name):
(address, port) = self._getManagerFullAddress()
with Session(self._user.Open(address, port)) as mgr:
with mgr.api(ServiceManagerName) as spline:
info = spline.GetPartnerInfo(partner_name)
return (address, info['service_port'])
def _getServiceConfigData(self, name):
cfg = self._getConfigFromMain(name)
cfg = cfg.ConfigSet(cfg, section = SERVICE_SECTION, simplify = True)
service_api = cfg.service_api
partner_name = cfg.partner_name
# (Maybe do this validation earlier??)
assert service_api
assert partner_name
return dict(service_api = service_api,
partner_name = partner_name)
def _getServiceConf(self, name):
try: return self._services[name]
except KeyError:
s = self._services[name] = self.ServiceConf(self, name)
return s
def _getServiceAccess(self, name):
return self._getServiceConf(name).access
def __getattr__(self, name):
# Get a service object.
if name.startswith('_'):
return object.__getattribute__(self, name)
return self._getServiceAccess(name)
def __setitem__(self, name, value):
# Write a config file.
value = str(value)
with open(self._getPathFromMain(name) + '.cfg', 'wb') as fl:
fl.write(value)
fl.flush()
def serviceConfig(self, name, **kwd):
ensure_directory(self._getPathFromMain())
self[name] = INI(**{SERVICE_SECTION: kwd})
def managerConfig(self, **kwd):
ensure_directory(self._getPathFromMain())
self[MANAGER_PREFIX] = INI(**{MANAGER_SECTION: kwd})
class ServiceConf:
def __init__(self, group, name):
self._group = group
self._name = name
def getConfigData(self):
# Collect data for connecting to the service-api
data = self._group._getServiceConfigData(self._name)
(address, service_port) = self._group._getServiceFullAddress(data['partner_name'])
return (address, service_port, data['service_api'])
def openApi(self):
# OpenPartneredClient -- Make the connection now, return the
# invocator to the remote service api by name.
(address, service_port, service_api) = self.getConfigData()
assert isinstance(service_port, int)
client = self._group._user.Open(address, service_port)
return client.api.open(service_api)
@property
def access(self):
try: return self._api.invoke
except AttributeError:
api = self._api = self.openApi()
return api.invoke
# ok, and toString or INI-building auto-write functionality.
class Authorization:
def __init__(self, username, secretKey):
self.username = username
self.secretKey = secretKey
def Open(self, hostname, port):
client = Client.Open(hostname, port)
return self.Authenticate(client)
def Authenticate(self, client):
auth = CalculateDigest(self.secretKey, self.username)
client.call.login(self.username, auth)
return client
class User(Authorization):
# Provides a top-level grouping of different calling configurations
# as well as an authorization mechanism for complete access.
def Group(self, main_path, *args, **kwd):
try: n = self._groups
except AttributeError:
n = self._groups = {}
try: return n[main_path]
except KeyError:
kwd['user'] = self
c = n[main_path] = Group(main_path, *args, **kwd)
return c
def __getitem__(self, main_path):
return self.Group(main_path)
class APICall:
class APIHandle:
class APIHandleCall:
def __init__(self, call, getprop = None):
self.__call = call
self.__getprop = getprop
def __getattr__(self, name):
return Method(name, self.__call, self.__getprop)
def __repr__(self):
return '<%s: %r>' % (self.__class__.__name__, self.__api)
def __init__(self, api, name):
self.api = api
self.name = name
# Q: Why not 'call'? to match Client<-MethodCall
self.invoke = self.APIHandleCall(api.call, api.getprop)
self.closed = False
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.name)
def __del__(self):
self.close()
def close(self):
if not self.closed:
self.api.call('closeApi')
self.closed = True
class APIMethod(MethodCall):
def __init__(self, name, call, getprop = None):
self.__name = name
self.__call = call
self.__getprop = getprop
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.__name)
def __getattr__(self, name):
return Method(name, self.__call, self.__getprop)
# Context Control:
def __enter__(self):
self.__call('openApi', self.__name)
return self
def __exit__(self, etype = None, value = None, tb = None):
self.__call('closeApi')
def __init__(self, call, getprop = None):
self.call = call
self.getprop = getprop
def __call__(self, name):
return self.APIMethod(name, self.call, self.getprop)
def __repr__(self):
return '<%s>' % (self.__class__.__name__)
def open(self, name):
self.call('openApi', name)
return self.APIHandle(self, name)
class PentacleServiceEndpoint(namedtuple('PentacleServiceEndpoint', 'scheme host port service')):
# Parses into the 'netloc' part (excluding username, password -> to be provided separately)
# Excludes // from netloc
# Preceding / in 'path' (service) unecessary
# (XXX no good for services that start with digits)
_pentacle_endpoint = re.compile('^([^/:]+):([^/?]*):([0-9]+)(.*)$')
@classmethod
def Parse(self, endpoint):
m = self._pentacle_endpoint.match(endpoint)
assert m is not None
(scheme, host, port, service) = m.groups()
return self(scheme, host, port, service)
__slots__ = ()
def getendpoint(self):
return '%s:%s:%s%s' % (self.scheme, self.host, self.port, self.service)
def open(self, user):
assert self.scheme.lower() == 'pentacle'
client = user.Open(self.host, int(self.port))
return client.api.open(self.service)
Endpoint = PentacleServiceEndpoint
def printFault(fault, cause = False, byline = 'REMOTE FAULT:'):
# todo: move into misc.
if cause:
print 'Fault Caused Locally By:'
printException()
print
print byline
print fault.toString(tb = True)
class CompressIfWorthIt:
THRESHOLD = .70
All = Object()
def __init__(self, threshold = None, level = None):
self.threshold = self.THRESHOLD
self.level = level if level is not None else self.All
def __iter__(self):
if self.level is self.All:
yield 3
# yield 2
yield 1
else:
yield self.level
def worthIt(self, z, u):
print 'WORTH IT??: (%d/%d) %f' % (z, u, float(z) / u)
# This probably will only be worth it for messages of a certain size.
return (float(z) / u) < self.threshold
def __call__(self, data):
data_length = len(data)
for level in self:
compressed = Compress(data, level)
if self.worthIt(len(compressed), data_length):
return compressed
return data
def ApplyCompressionChannel(channel, level = None, threshold = None):
if level is not None:
if isinstance(level, basestring):
if level.isdigit():
level = int(level)
else:
assert level == 'all'
if isinstance(threshold, basestring):
import re
m = re.match('^(\d+)%$', threshold)
assert m is not None
threshold = int(m.groups()[0]) / 100.0
channel.compression_level = CompressIfWorthIt(threshold, level)
| Python |
# Peer Data Client
from ..encoding import *
from ..runtime import *
from ..network import *
from ..security import *
from ..packaging import *
from ..application import *
from ..architecture import *
import pdb
import socket
from contextlib import closing as Session
DEFAULT_PORT = Application.DEFAULT_PORT
DEFAULT_HOST = 'localhost'
class ClientOptions(Component):
@classmethod
def addCmdlnOptions(self, parser):
parser.add_option('--port', default = DEFAULT_PORT, type = int)
parser.add_option('--service-partner-name')
parser.add_option('--support-dir')
parser.add_option('--endpoint')
parser.add_option('--address', default = DEFAULT_HOST)
parser.add_option('--username')
parser.add_option('--secret-key', default = '')
parser.add_option('--api-test', action = 'store_true')
parser.add_option('--quit', action = 'store_true')
parser.add_option('--examine', action = 'store_true')
parser.add_option('--compression-level')
parser.add_option('--compression-threshold')
parser.add_option('--open-system-api', action = 'store_true')
parser.add_option('--enable-timing-tests', action = 'store_true')
parser.add_option('--open-spline-api', action = 'store_true')
parser.add_option('-g', '--debug', action = 'store_true')
parser.add_option('--open-api')
@classmethod
def getCmdlnParser(self):
from optparse import OptionParser
parser = OptionParser()
self.addCmdlnOptions(parser)
return parser
@classmethod
def parseCmdln(self, argv = None):
parser = self.getCmdlnParser()
return parser.parse_args(argv)
class Client(PackageReader, asyncore.dispatcher_with_send, AsyncoreControl):
@classmethod
def Open(self, address, port, wait = True):
# The preferred way to open a client connection.
handler = self()
handler.openConnection(address, port)
if wait:
handler.WaitForConnection()
return handler
def __init__(self, *args, **kwd):
'''\
Creates a new Client instance based on asyncore.dispatcher_with_send.
Arguments and keywords are passed directly to that base class constructor.
This constructor also sets up the `entity-space <EntitySpace>`__ data channel
for handling packages, and builds locking versions of network functions for
synchronizing the user's thread with the asyncore thread.
This constructor also composes two access objects:
* call `MethodCall`
* api `APICall <pentacle.client.APICall>`__
Which allow for smooth remoting syntax, pointing back to the Client
communication routines.
'''
asyncore.dispatcher_with_send.__init__(self, *args, **kwd)
PackageReader.__init__(self)
self.dataChannel = EntitySpace()
self.deferred_responses = {} # Ordered?
self.command_nr = 0
deferredMutex = synchronizedWith(None, recursive = True)
self.pushResponse = deferredMutex(self.pushResponse)
self.getWaitState = deferredMutex(self.getWaitState)
self.call = MethodCall(self.callCommand, self.getEntityProperty) #: X{MethodCall}
self.api = APICall(self.callCommand, self.getEntityProperty) #: X{APICall}
def __repr__(self):
cls = self.__class__
if self.addr:
return '<%s.%s: [%s:%d]>' % (cls.__module__, cls.__name__) + self.addr
return '<%s.%s>' % (cls.__module__, cls.__name__)
def openConnection(self, address, port):
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connectWait = threading.Event()
self.connectError = None
self.connect((address, port))
self.startAsyncore() # if necessary.
## def close(self):
## import pdb; pdb.set_trace()
## # raise RuntimeError('closing?')
def WaitForConnection(self):
while not self.connectWait.isSet():
self.connectWait.wait()
if self.connected:
return True
if self.connectError:
raise self.connectError
# Asyncore Handlers.
def handle_connect(self):
self.connectWait.set()
def handle_read(self):
try: PackageReader.handle_read(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
def handle_write_event(self):
try: asyncore.dispatcher_with_send.handle_write_event(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
def handle_expt_event(self):
# Strangely, this is called after a failed write for refused connections.
# So we're racing on connectError (and double connectWait.set, btw)
try: asyncore.dispatcher_with_send.handle_expt_event(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
# Command Invocation.
def sendSerialCommand(self, command, (serialId, flags), *args, **kwd):
data = self.dataChannel.encodeSerialCommand(command, (serialId, flags), *args, **kwd)
# DEBUG('COMMAND[#%d]: %s' % (serialId, data))
self.send(data)
def newSerialId(self):
try: return self.command_nr
finally: self.command_nr += 1
def callCommand(self, command, *args, **kwd):
serialId = self.newSerialId()
self.sendSerialCommand(command, (serialId, 0), *args, **kwd)
DEBUG(' sent command', serialId, command)
response = self.waitForResponse(serialId)
DEBUG(' command response:', response)
return interpretResponse(response,
callEntityMethod = self.callEntityMethod,
getEntityProperty = self.getEntityProperty)
rpc_callEntityMethod = 'callEntityMethod'
def callEntityMethod(self, proxy, *args, **kwd):
return self.callCommand(self.rpc_callEntityMethod, proxy.refId, *args, **kwd)
rpc_getEntityProperty = 'getEntityProperty'
def getEntityProperty(self, proxy, name):
return self.callCommand(self.rpc_getEntityProperty, proxy.refId, name)
# Data Communications.
def handleIncomingPackage(self, package):
# DEBUG('RECEIVED-PACKAGE:', package)
response = Response.FromPackage(package, self.dataChannel)
if response is not None:
serialId = response.serialId
if serialId is None:
# Do some default handling.
try: print interpretResponse(response)
except Fault, fault:
print fault
else:
# DEBUG('PUSHING RESPONSE:', response)
self.pushResponse(serialId, response)
def pushResponse(self, serialId, response):
# Notify waiter.
try: waiting = self.deferred_responses[serialId]
except KeyError:
# DEBUG('GOT RESPONSE BEFORE WAITER: [#%d]' % serialId)
waiting = [None, response]
self.deferred_responses[serialId] = waiting
else:
# DEBUG('POSTING RESPONSE TO WAITER: [#%d]' % serialId)
del self.deferred_responses[serialId]
waiting[1] = response
waiting[0].set()
def getWaitState(self, serialId):
try: waiting = self.deferred_responses[serialId]
except KeyError:
waiting = [threading.Event(), None]
self.deferred_responses[serialId] = waiting
return waiting
def waitForResponse(self, serialId, timeout = 0.4):
waiting = self.getWaitState(serialId)
if waiting[0] is not None:
wait = waiting[0]
# DEBUG('WAITING FOR [#%d]...' % serialId)
while not wait.isSet():
wait.wait(timeout = timeout)
return waiting[1]
## def write_data(self, data):
## print '...', data
## return PackageReader.write_data(self, data)
from support import *
| Python |
#!python
from . import Client, Authorization, Fault, ClientOptions, Synthetic, User, Endpoint
from . import printFault, pdb, ApplyCompressionChannel, CompressIfWorthIt
# Front End.
def openClient(setup, options, ns):
if options.support_dir:
# Run in 'support mode'
assert options.username
user = User(options.username, options.secret_key)
group = user[options.support_dir]
ns['user'] = user
ns['group'] = group
setup.client
elif options.endpoint:
# Connect to a specific service using endpoint-url form.
assert options.username
user = User(options.username, options.secret_key)
endpoint = Endpoint.Parse(options.endpoint)
api = endpoint.open(user)
ns['user'] = user
ns['endpoint'] = endpoint
ns['api'] = api
setup.client
elif options.service_partner_name:
# Use the service-partner-lookup on the service manager.
from pentacle.bus.partners import OpenPartneredClient
setup.byline = 'SERVICE-MANAGER FAULT'
setup.client = OpenPartneredClient(options.service_partner_name,
**optionalArgs(options, 'username', 'port'))
else:
# Straight connect:
print 'Connecting to [%s:%d]...' % (options.address, options.port)
client = Client.Open(options.address, options.port)
ApplyCompressionChannel(client.dataChannel,
options.compression_level,
options.compression_threshold)
username = options.username
if username:
print 'Logging in %s...' % username
auth = Authorization(username, options.secret_key)
auth.Authenticate(client)
setup.client = client
def setupClient(setup, options, ns):
# Setup interactive namespace.
import pentacle
import sys
client = setup.client
ns['peer'] = client
ns['client'] = client
ns['call'] = client.call if client is not None else None
ns['options'] = options
ns['pentacle'] = pentacle
ns['sys'] = sys
ns['g'] = Synthetic(**globals())
# Note: Some of these options are incompatible with --support-dir and --endpoint
if options.open_system_api:
system = client.api.open('System::Debugging')
e = system.invoke.evaluate
ns['system'] = system
ns['e'] = e = system.invoke.evaluate
ns['x'] = x = system.invoke.execute
if options.enable_timing_tests:
def testTiming(n = 2017):
return len(e('"A" * %d' % n))
def timeIt(n, number, repeat = 1):
from timeit import repeat as tFunc
return tFunc(lambda:testTiming(n),
number = number,
repeat = repeat)
ns['tt'] = testTiming
ns['ti'] = timeIt
elif options.open_spline_api:
def PrintUptime(svcmgr):
print 'Uptime: %(running_duration)s' % svcmgr.invoke.GetManagerStats()
from ..bus import ServiceManagerName
ns['spline'] = client.api.open(ServiceManagerName)
ns['PrintUptime'] = PrintUptime
elif options.open_api:
(variable, name) = options.open_api.split(':', 1)
ns[variable] = client.api.open(name)
def optionalArgs(options, *names):
kwd = {}
for n in names:
value = getattr(options, n, None)
if value is not None:
kwd[n] = value
return kwd
def main(argv = None):
(options, args) = ClientOptions.parseCmdln(argv)
if options.debug:
pdb.set_trace()
ns = dict()
setup = Synthetic(byline = 'SERVER FAULT',
client = None)
try:
# Start connection.
openClient(setup, options, ns)
# Handle Session.
## if options.api_test:
## testApi(client)
if options.examine:
try: import readline
except ImportError:
pass
# Setup interactive namespace.
setupClient(setup, options, ns)
from code import InteractiveConsole as IC
ic = IC(locals = ns)
ic.interact()
if options.quit:
print 'Quitting Application...'
setup.client.call.stopApplication()
except Fault, fault:
printFault(fault, cause = True, byline = setup.byline)
except KeyboardInterrupt:
print 'Console Break'
## def testApi(client):
## with client.api('API::Management') as api:
## print 'Loading ClockService...'
## print api.loadNewServiceApi('pentacle.application.ClockService').NAME
##
## print 'Loading SystemDebug...'
## print api.loadNewServiceApi('pentacle.application.SystemDebug').NAME
##
## print 'Loading ObjectDirectory...'
## print api.loadNewServiceApi('pentacle.application.ObjectDirectory').NAME
##
## print 'Loading AuxiliaryApi...'
## print api.loadNewServiceApi('pentacle.storage.StorageManagement.AuxiliaryApi').NAME
##
## print 'Start Ticking...'
## with client.api('ClockService::API') as clock:
## clock.setClockTicking(10000)
## print 'Current Clock Time:', clock.getClockTime()
##
## print 'Testing Debug...'
## # client.debug = 1
## with client.api('System::Debugging') as debug:
## try: debug.fail()
## except Fault, fault:
## printFault(fault)
##
## print 'Testing Object Directory...'
## with client.api('System::Directory') as directory:
## N = directory.newObject
## directory.setObject('Feature', N(Aspect = N(Scope = N(Name = 'A System Feature Aspect Scope'))))
##
## print directory.getObject('Feature').Aspect.Scope.Name
##
## print 'Testing Auxiliary Storage'
## with client.api('Storage::Aux') as aux:
## db = aux.buildAuxiliaryStore(buildCmdln(db_path = '~/.pentacle/application.db'))
## user = aux.lookupUnitAccess('UserStorage')
## fraun = user.Open(db.application, 'fraun')
##
## # change user-fraun
## print dict(fraun.unit.unit.items())
if __name__ == '__main__':
main()
| Python |
#!python
# Peer Data Client
from .encoding import *
from .runtime import *
from .network import *
from .security import *
from .packaging import *
from .application import *
from .architecture import *
import pdb
import socket
from contextlib import closing as Session
def DEBUG(*args):
pass # print ' '.join(map(str, args))
class APICall:
def __init__(self, call, getprop = None):
self.call = call
self.getprop = getprop
def __call__(self, name):
return self.APIMethod(name, self.call, self.getprop)
def __repr__(self):
return '<%s>' % (self.__class__.__name__)
class APIHandle:
class APIHandleCall:
def __init__(self, call, getprop = None):
self.__call = call
self.__getprop = getprop
def __getattr__(self, name):
return Method(name, self.__call, self.__getprop)
def __repr__(self):
return '<%s: %r>' % (self.__class__.__name__, self.__api)
def __init__(self, api, name):
self.api = api
self.name = name
# Q: Why not 'call'? to match Client<-MethodCall
self.invoke = self.APIHandleCall(api.call, api.getprop)
self.closed = False
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.name)
def __del__(self):
self.close()
def close(self):
if not self.closed:
self.api.call('closeApi')
self.closed = True
def open(self, name):
self.call('openApi', name)
return self.APIHandle(self, name)
class APIMethod(MethodCall):
def __init__(self, name, call, getprop = None):
self.__name = name
self.__call = call
self.__getprop = getprop
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.__name)
def __getattr__(self, name):
return Method(name, self.__call, self.__getprop)
# Context Control:
def __enter__(self):
self.__call('openApi', self.__name)
return self
def __exit__(self, etype = None, value = None, tb = None):
self.__call('closeApi')
debug = False
def setDebug(state):
global debug
debug = state
class AsyncoreControl:
TIMEOUT = 0.3 # Good responsiveness
_asyncore_run_lock = threading.Lock()
@classmethod
def runAsyncore(self):
with self._asyncore_run_lock:
# XXX this needs to be atomic with the lock, but I don't want
# to have to lock for every socket map check. So, try not to
# start a new connection immediately after the last one closes.
# (I could probably use some other threading object)
while asyncore.socket_map:
## if debug:
## import pdb; pdb.set_trace()
asyncore.loop(timeout = self.TIMEOUT,
count = 1) # , use_poll = True)
@classmethod
def startAsyncore(self):
if self._asyncore_run_lock.acquire(False):
nth(self.runAsyncore)
self._asyncore_run_lock.release()
class Client(PackageReader, asyncore.dispatcher_with_send, AsyncoreControl):
@classmethod
def Open(self, address, port, wait = True):
handler = self()
handler.openConnection(address, port)
if wait:
handler.WaitForConnection()
return handler
def __init__(self, *args, **kwd):
asyncore.dispatcher_with_send.__init__(self, *args, **kwd)
PackageReader.__init__(self)
self.dataChannel = EntitySpace()
self.deferred_responses = {} # Ordered?
self.command_nr = 0
deferredMutex = synchronizedWith(None, recursive = True)
self.pushResponse = deferredMutex(self.pushResponse)
self.getWaitState = deferredMutex(self.getWaitState)
self.call = MethodCall(self.callCommand, self.getEntityProperty)
self.api = APICall(self.callCommand, self.getEntityProperty)
def openConnection(self, address, port):
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connectWait = threading.Event()
self.connectError = None
self.connect((address, port))
self.startAsyncore()
def handle_connect(self):
self.connectWait.set()
def handle_read(self):
try: PackageReader.handle_read(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
def handle_write_event(self):
try: asyncore.dispatcher_with_send.handle_write_event(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
def handle_expt_event(self):
# Strangely, this is called after a failed write for refused connections.
# So we're racing on connectError (and double connectWait.set, btw)
try: asyncore.dispatcher_with_send.handle_expt_event(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
def WaitForConnection(self):
while not self.connectWait.isSet():
self.connectWait.wait()
if self.connected:
return True
if self.connectError:
raise self.connectError
def __repr__(self):
cls = self.__class__
if self.addr:
return '<%s.%s: [%s:%d]>' % (cls.__module__, cls.__name__) + self.addr
return '<%s.%s>' % (cls.__module__, cls.__name__)
def sendSerialCommand(self, command, (serialId, flags), *args, **kwd):
data = self.dataChannel.encodeSerialCommand(command, (serialId, flags), *args, **kwd)
DEBUG('COMMAND[#%d]: %s' % (serialId, data))
self.send(data)
def newSerialId(self):
try: return self.command_nr
finally: self.command_nr += 1
def callCommand(self, command, *args, **kwd):
serialId = self.newSerialId()
self.sendSerialCommand(command, (serialId, 0), *args, **kwd)
response = self.waitForResponse(serialId)
return interpretResponse(response,
callEntityMethod = self.callEntityMethod,
getEntityProperty = self.getEntityProperty)
rpc_callEntityMethod = 'callEntityMethod'
def callEntityMethod(self, proxy, *args, **kwd):
return self.callCommand(self.rpc_callEntityMethod, proxy.refId, *args, **kwd)
rpc_getEntityProperty = 'getEntityProperty'
def getEntityProperty(self, proxy, name):
return self.callCommand(self.rpc_getEntityProperty, proxy.refId, name)
def handleIncomingPackage(self, package):
DEBUG('RECEIVED-PACKAGE:', package)
response = Response.FromPackage(package, self.dataChannel)
if response is not None:
serialId = response.serialId
if serialId is None:
# Do some default handling.
try: print interpretResponse(response)
except Fault, fault:
print fault
else:
DEBUG('PUSHING RESPONSE:', response)
self.pushResponse(serialId, response)
def pushResponse(self, serialId, response):
# Notify waiter.
try: waiting = self.deferred_responses[serialId]
except KeyError:
DEBUG('GOT RESPONSE BEFORE WAITER: [#%d]' % serialId)
waiting = [None, response]
self.deferred_responses[serialId] = waiting
else:
DEBUG('POSTING RESPONSE TO WAITER: [#%d]' % serialId)
del self.deferred_responses[serialId]
waiting[1] = response
waiting[0].set()
def getWaitState(self, serialId):
try: waiting = self.deferred_responses[serialId]
except KeyError:
waiting = [threading.Event(), None]
self.deferred_responses[serialId] = waiting
return waiting
def waitForResponse(self, serialId, timeout = 0.4):
waiting = self.getWaitState(serialId)
if waiting[0] is not None:
wait = waiting[0]
DEBUG('WAITING FOR [#%d]...' % serialId)
while not wait.isSet():
wait.wait(timeout = timeout)
return waiting[1]
## def write_data(self, data):
## print '...', data
## return PackageReader.write_data(self, data)
class Authorization:
def __init__(self, username, secretKey):
self.username = username
self.secretKey = secretKey
def Open(self, hostname, port):
client = Client.Open(hostname, port)
return self.Authenticate(client)
def Authenticate(self, client):
auth = CalculateDigest(self.secretKey, self.username)
client.call.login(self.username, auth)
return client
def printFault(fault, cause = False):
if cause:
print 'Fault Caused Locally By:'
printException()
print
print 'REMOTE FAULT:'
print fault.toString(tb = True)
class CompressIfWorthIt:
THRESHOLD = .70
All = Object()
def __init__(self, threshold = None, level = None):
self.threshold = self.THRESHOLD
self.level = level if level is not None else self.All
def __iter__(self):
if self.level is self.All:
yield 3
# yield 2
yield 1
else:
yield self.level
def worthIt(self, z, u):
print 'WORTH IT??: (%d/%d) %f' % (z, u, float(z) / u)
# This probably will only be worth it for messages of a certain size.
return (float(z) / u) < self.threshold
def __call__(self, data):
data_length = len(data)
for level in self:
compressed = Compress(data, level)
if self.worthIt(len(compressed), data_length):
return compressed
return data
# Front End.
DEFAULT_PORT = Application.DEFAULT_PORT
DEFAULT_HOST = 'localhost'
def ApplyCompressionChannel(channel, level = None, threshold = None):
if level is not None:
if isinstance(level, basestring):
if level.isdigit():
level = int(level)
else:
assert level == 'all'
if isinstance(threshold, basestring):
import re
m = re.match('^(\d+)%$', threshold)
assert m is not None
threshold = int(m.groups()[0]) / 100.0
channel.compression_level = CompressIfWorthIt(threshold, level)
class ClientOptions(Component):
@classmethod
def addCmdlnOptions(self, parser):
parser.add_option('--port', default = DEFAULT_PORT, type = int)
parser.add_option('--service-partner-name')
parser.add_option('--address', default = DEFAULT_HOST)
parser.add_option('--username')
parser.add_option('--secret-key', default = '')
parser.add_option('--api-test', action = 'store_true')
parser.add_option('--quit', action = 'store_true')
parser.add_option('--examine', action = 'store_true')
parser.add_option('--compression-level')
parser.add_option('--compression-threshold')
parser.add_option('--open-system-api', action = 'store_true')
parser.add_option('--enable-timing-tests', action = 'store_true')
parser.add_option('--open-spline-api', action = 'store_true')
parser.add_option('-g', '--debug', action = 'store_true')
parser.add_option('--open-api')
@classmethod
def getCmdlnParser(self):
from optparse import OptionParser
parser = OptionParser()
self.addCmdlnOptions(parser)
return parser
@classmethod
def parseCmdln(self, argv = None):
parser = self.getCmdlnParser()
return parser.parse_args(argv)
def testApi(client):
with client.api('API::Management') as api:
print 'Loading ClockService...'
print api.loadNewServiceApi('pentacle.application.ClockService').NAME
print 'Loading SystemDebug...'
print api.loadNewServiceApi('pentacle.application.SystemDebug').NAME
print 'Loading ObjectDirectory...'
print api.loadNewServiceApi('pentacle.application.ObjectDirectory').NAME
print 'Loading AuxiliaryApi...'
print api.loadNewServiceApi('pentacle.storage.StorageManagement.AuxiliaryApi').NAME
print 'Start Ticking...'
with client.api('ClockService::API') as clock:
clock.setClockTicking(10000)
print 'Current Clock Time:', clock.getClockTime()
print 'Testing Debug...'
# client.debug = 1
with client.api('System::Debugging') as debug:
try: debug.fail()
except Fault, fault:
printFault(fault)
print 'Testing Object Directory...'
with client.api('System::Directory') as directory:
N = directory.newObject
directory.setObject('Feature', N(Aspect = N(Scope = N(Name = 'A System Feature Aspect Scope'))))
print directory.getObject('Feature').Aspect.Scope.Name
print 'Testing Auxiliary Storage'
with client.api('Storage::Aux') as aux:
db = aux.buildAuxiliaryStore(buildCmdln(db_path = '~/.pentacle/application.db'))
user = aux.lookupUnitAccess('UserStorage')
fraun = user.Open(db.application, 'fraun')
# change user-fraun
print dict(fraun.unit.unit.items())
def setupClient(client, options):
import pentacle
import sys
ns = dict(peer = client, client = client,
call = client.call, options = options,
pentacle = pentacle, sys = sys,
g = Synthetic(**globals()))
if options.open_system_api:
system = client.api.open('System::Debugging')
e = system.invoke.evaluate
ns['system'] = system
ns['e'] = e = system.invoke.evaluate
ns['x'] = x = system.invoke.execute
if options.enable_timing_tests:
def testTiming(n = 2017):
return len(e('"A" * %d' % n))
def timeIt(n, number, repeat = 1):
from timeit import repeat as tFunc
return tFunc(lambda:testTiming(n),
number = number,
repeat = repeat)
ns['tt'] = testTiming
ns['ti'] = timeIt
elif options.open_spline_api:
def PrintUptime(svcmgr):
print 'Uptime: %(running_duration)s' % svcmgr.invoke.GetManagerStats()
ns['spline'] = client.api.open('ServiceManager::Spline')
ns['PrintUptime'] = PrintUptime
elif options.open_api:
(variable, name) = options.open_api.split(':', 1)
ns[variable] = client.api.open(name)
return ns
def optionalArgs(options, *names):
kwd = {}
for n in names:
value = getattr(options, n, None)
if value is not None:
kwd[n] = value
return kwd
def main(argv = None):
(options, args) = ClientOptions.parseCmdln(argv)
# Start connection.
if options.service_partner_name:
try:
from pentacle.bus.partners import OpenPartneredClient
client = OpenPartneredClient(options.service_partner_name,
**optionalArgs(options, 'username', 'port'))
except Fault, fault:
print 'SERVICE-MANAGER FAULT:\n%s' % fault.toString(True)
return
else:
print 'Connecting to [%s:%d]...' % (options.address, options.port)
client = Client.Open(options.address, options.port)
ApplyCompressionChannel(client.dataChannel,
options.compression_level,
options.compression_threshold)
if options.debug:
pdb.set_trace()
# Handle Session.
username = options.username
if username:
try:
print 'Logging in %s...' % username
auth = Authorization(username, options.secret_key)
auth.Authenticate(client)
if options.api_test:
testApi(client)
if options.examine:
try: import readline
except ImportError:
pass
ns = setupClient(client, options)
import pentacle, sys
from code import InteractiveConsole as IC
ic = IC(locals = ns)
ic.interact()
if options.quit:
print 'Quitting Application...'
client.call.stopApplication()
except Fault, fault:
printFault(fault, cause = True)
except KeyboardInterrupt:
print 'Console Break'
if __name__ == '__main__':
main()
| Python |
# Asyncore-driven Network.
__all__ = ['AddNetworkCmdlnOptions', 'HostNetwork', 'Peer',
'AsyncoreControl', 'asyncore', 'socket']
import asyncore
import socket
import errno
from .architecture import *
from .packaging import *
from .storage import *
from .runtime import *
from .encoding import *
from .security import *
def DEBUG(log, *args):
pass # log(' '.join(map(str, args)))
NOBIND = [errno.EPERM, errno.EADDRINUSE]
## # Todo: this could be more robust
## if sys.platform == 'cygwin':
## NOBIND = errno.EPERM
## elif sys.platform == 'win32':
## NOBIND = errno.WSAEACESS
## else:
## raise SystemError('Unknown platform for bind error: %s' % sys.platform)
def OpenAvailablePort(portRange, bindMethod):
for port in portRange:
if bindMethod(port):
return port
raise SystemError('Could find no available port')
class PortBindingMethod:
def __init__(self, socket, address):
self.socket = socket
self.address = address
def __call__(self, port):
# Should be the same for asyncore dispatcher and regular system socket
try: self.bind(port)
except socket.error, e:
if e.errno not in NOBIND:
raise
else:
self.socket.listen(5)
return True
class DispatcherBindMethod(PortBindingMethod):
def bind(self, port):
self.socket.bind((self.address, port))
class SocketBindMethod(PortBindingMethod):
@classmethod
def New(self, bindAddress):
return self(socket.socket(socket.AF_INET, socket.SOCK_STREAM), bindAddress)
def bind(self, port):
self.socket.bind(self.address, port)
class InterruptableSocket:
class InterruptPortConfig:
keySize = 64
internalPortRange = [30700, 30799]
address = '127.0.0.1'
@classmethod
def OpenAvailable(self, mother, port_ranger = xrange):
bindMethod = DispatcherBindMethod(mother, self.address)
try: return OpenAvailablePort(port_ranger(*self.internalPortRange), bindMethod)
except SystemError, e:
raise SystemError('%s in range: %s' % (e, self.internalPortRange))
class InterruptOpener(asyncore.dispatcher):
class InterruptPeer(asyncore.dispatcher_with_send):
def __init__(self, opener, socketMap, (host, port)):
self.__opener = opener
asyncore.dispatcher_with_send.__init__(self, map = socketMap)
self.connectToPort(host, port)
def connectToPort(self, host, port):
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect((host, port))
def handle_connect(self):
# Send the key.
self.send(self.__opener.getSecretKey())
class InterruptIncoming(asyncore.dispatcher):
def __init__(self, opener, socketMap, sock):
self.__opener = opener
self.data_buffer = NewBuffer()
asyncore.dispatcher.__init__(self, sock = sock, map = socketMap)
def writable(self):
return False
def handle_read(self):
# Initial state -- read secret key to determine that this is our own.
buf = self.data_buffer
keySize = self.__opener.getSecretKeySize()
readSize = keySize - self.data_buffer.tell()
data = self.recv(readSize)
if data == '':
# todo: log error?
self.close()
return
buf.write(data)
pos = buf.tell()
if pos > keySize:
# todo: log error?
self.close()
elif pos == keySize:
value = buf.getvalue()
buf.truncate(0)
if self.__opener.submitSecretKeyReponse(value):
# Key accepted, switch to interruptable mode.
self.handle_read = self.handleInterruptRead
def handleInterruptRead(self):
## print 'Receiving Interrupt:',
## print repr(self.recv(1))
self.recv(1)
INTERRUPT_NOTREADY = 0
INTERRUPT_FAILED = 1
INTERRUPT_CONNECTED = 2
def __init__(self, socketMap, portConfig, secretKey):
asyncore.dispatcher.__init__(self, map = socketMap)
self.socketMap = socketMap
self.__secretKey = secretKey
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.__state = self.INTERRUPT_NOTREADY
self.__wait = threading.Event()
port = self.openInterruptPort(portConfig)
# print 'Interruptable Connection Port: %s' % port
self.__peer = self.InterruptPeer(self, socketMap, (portConfig.address, port))
def handle_accept(self):
(conn, addr) = self.accept()
self.InterruptIncoming(self, self.socketMap, conn)
def openInterruptPort(self, portConfig):
return portConfig.OpenAvailable(self)
def getSecretKey(self):
return self.__secretKey
def getSecretKeySize(self):
return len(self.__secretKey)
def submitSecretKeyReponse(self, value):
if value != self.__secretKey:
# Fail -- why?
self.failSecretKeyResponse()
return False
self.switchIntoInterruptMode()
return True
def failSecretKeyResponse(self):
self.close()
self.__state = self.INTERRUPT_FAILED
self.__wait.set()
def switchIntoInterruptMode(self):
self.close()
# print 'Enabling Interrupt Mode.'
self.__state = self.INTERRUPT_CONNECTED
self.__wait.set()
def getStatus(self):
while not self.__wait.isSet():
self.__wait.wait()
return self.__state
def isConnected(self):
return self.__state == self.INTERRUPT_CONNECTED
def sendInterrupt(self):
if self.isConnected():
self.__peer.send('\x00')
def initialize_interrupt(self, portConfig, socketMap):
try:
if self.__opener.isConnected():
return
except AttributeError:
pass
secretKey = GenerateSecretKey(portConfig.keySize)
self.__opener = opener = self.InterruptOpener(socketMap, portConfig, secretKey)
def interruptTimeout(self):
# Always wait before we send the interrupt, because we want it to succeed
# in the event that the poll timeout is forever.
if self.__opener.getStatus() == self.__opener.INTERRUPT_CONNECTED:
self.__opener.sendInterrupt()
interruptConfig = InterruptPortConfig
class InterruptableSignal:
# Can't seem to get this one to work.
interruptConfig = None
def initialize_interrupt(self, config, map):
pass
if sys.platform == 'cygwin':
# from signal import SIGPIPE as INTERRUPT_SIGNAL
from signal import SIGIO as INTERRUPT_SIGNAL
def interruptTimeout(self):
if self.shouldInterrupt():
print 'Interrupting Network...'
sendSignal(thisProcessId(), self.INTERRUPT_SIGNAL)
# modify interruption rate...
else:
def interruptTimeout(self):
pass
Interruptable = InterruptableSocket
# Note on asyncore's poll/set
#
# Select limits the number of descriptors to FD_SETSIZE, which doesn't allow for
# massive numbers of connections, whereas poll is variable (and much less limited).
#
# However, poll is not implemented on all systems, making the tradeoff one between
# portability and capacity.
#
def AddNetworkCmdlnOptions(application, parser):
parser.add_option('--port', default = application.DEFAULT_PORT)
parser.add_option('--bind-address', default = HostNetwork.BIND_ADDRESS)
# parser.add_option('--no-network', action = 'store_true')
class AsyncoreControl:
# Inheriting both HostNetwork and Client so that dual-functioning service
# partners can cooperatively serve i/o flow.
class AlreadyServing(RuntimeError):
pass
# Todo: combine timing/responsiveness control into this class, or, some
# complex algorithm for cutover to host-style cycling?
TIMEOUT = 0.3 # Good responsiveness
_asyncore_run_lock = threading.Lock()
## def asyncoreLock(self):
## return self._asyncore_run_lock
@classmethod
@contextmanager
def asyncoreLock(self, blocking = False):
if not self._asyncore_run_lock.acquire(blocking):
# Pretty much, some other part of the application decided to start
# serving the single asyncore batch (probably the client) -- let it go.
raise self.AlreadyServing
try: yield
finally: self._asyncore_run_lock.release()
@classmethod
def startAsyncore(self):
try:
with self.asyncoreLock():
# When the context exits, lock is released, waiting ex-thread begins.
nth(self.runAsyncore)
except self.AlreadyServing:
pass
@classmethod
def waitForAsyncoreEnd(self):
with self.asyncoreLock(True):
# Effectively join at when the lock is momentarily free.
pass
@classmethod
def runAsyncore(self):
with self.asyncoreLock(True):
# XXX this needs to be atomic with the lock, but I don't want
# to have to lock for every socket map check. So, try not to
# start a new connection immediately after the last one closes.
#
# (I could probably use some other threading object)
while asyncore.socket_map:
asyncore.loop(timeout = self.TIMEOUT,
count = 1) # , use_poll = True)
class HostNetwork(asyncore.dispatcher, Interruptable, AsyncoreControl, Object):
# (Not an architectural component, but an engine subsystem)
BIND_ADDRESS = '0.0.0.0'
def __init__(self, engine, port, address = None, ConnectionClass = None):
# We want to separate network hosts, but unfortunately the partner
# architecture uses Client asyncore, so we're just always going to
# use default map.
self.socketMap = None # asyncore.socket_map -- {}
asyncore.dispatcher.__init__(self, map = self.socketMap)
if isinstance(port, int):
self.port_auto = False
self.port = port
else:
assert port.lower() == 'auto'
self.port_auto = True
self.port = None
self.address = address is None and self.BIND_ADDRESS or address
self.connections = []
self.engine = engine
self.ConnectionClass = ConnectionClass or Peer
self.nextTimeout = self.networkTiming().next
self.nextWakeupTime = None
PORT_RANGE = [30300, 30650]
def configurablePortRange(self):
# Todo: from self.engine.application.configuration??
return xrange(*self.PORT_RANGE)
def open_mother_socket(self, log = False):
if self.socket is None:
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.set_reuse_addr()
if self.port_auto:
bindMethod = DispatcherBindMethod(self, self.address)
portRange = self.configurablePortRange()
try: self.port = OpenAvailablePort(portRange, bindMethod)
except SystemError, e:
raise SystemError('%s in range: %s' % (e, portRange))
else:
self.bind((self.address, self.port))
self.listen(5)
if log:
log('Mother Socket Opened [%s:%s]' % (self.address, self.port))
self.initialize_interrupt(self.interruptConfig, self.socketMap)
def handle_accept(self):
(socket, addr) = self.accept()
self.newConnection(socket, addr)
def newConnection(self, socket, address = None):
conn = self.ConnectionClass(self, socket, address)
self.connections.append(conn)
self.engine.logMessage('New Connection %r' % conn)
def closeConnection(self, conn):
if conn in self.connections:
# Error occurred before newConnection was called.
# XXX This doesn't really make sense.. newConnection
# should always be called (synchronously) before close...
self.connections.remove(conn)
self.engine.logMessage('Disconnected %r' % conn)
def pollCycle(self):
timeout = self.nextTimeout()
self.nextWakeupTime = None if timeout is None else \
getCurrentSystemTime() + timeout
asyncore.loop(timeout = timeout,
map = self.socketMap,
count = 1) # , use_poll = True)
# XXX Why are there still network blockages?
BASE_NETWORK_TIMEOUT = 1.0 # None # 5.0 # 0.1 # Very responsive
def networkTiming(self):
# todo: make adaptable network timeout:
# If it's being interrupted alot, then decrease the timeout
# If it's not being interrupted much, increase timeout
while True:
yield self.BASE_NETWORK_TIMEOUT
responsiveness_threshold = 1.0 # Poor responsiveness
def timeUntilWakeup(self):
if self.nextWakeupTime is None:
raise ValueError('Coma')
return (self.nextWakeupTime - getCurrentSystemTime())
def shouldInterrupt(self):
try:
if self.timeUntilWakeup() > self.responsiveness_threshold:
return True
except ValueError:
pass
class SynchronizedSendingDispatcher(asyncore.dispatcher_with_send):
def __init__(self, *args, **kwd):
asyncore.dispatcher_with_send.__init__(self, *args, **kwd)
self.__sending = Object()
bufferMutex = synchronizedWith(self.__sending, recursive = True)
self.initiate_send = bufferMutex(self.initiate_send)
self.send = bufferMutex(self.send)
# SENDBUF_PACKET_SIZE = Variable
# 512 is the default, but it's no good if even most trivial exceptions
# occur, because it will bump against the nonresponsive network poll.
# So, it should be variable: either the peer can change it, or it can
# adapt to current network performance.
## SENDBUF_SIZE = 2048 # 512
## def initiate_send(self):
## num_sent = 0
## num_sent = asyncore.dispatcher.send(self, self.out_buffer[:self.SENDBUF_SIZE])
## self.out_buffer = self.out_buffer[num_sent:]
## print '[ SENT %d ]' % num_sent
##
## # Effectively, this is called while network is polling (sleep),
## # but just sending on the socket of changing .out_buffer isn't
## # enough (since writable is only called before the timeout).
## # So the network thread doesn't wake up unless a signal causes
## # it to EINTR. Here we are over in the engine, because all
## # functions are just thrown to it indiscriminately, so we're just
## # going to have to interrupt it!
## return num_sent
## def send(self, data):
## ## if self.debug:
## ## self.log_info('sending %s' % repr(data))
## self.out_buffer = self.out_buffer + data
## x = self.initiate_send()
## print '[ SENT %d ] %s' % (x, self.socket.send)
## return x
class Peer(PackageReader, SynchronizedSendingDispatcher, Object):
loggedInEvent = Event('peer-logged-in')
def __init__(self, network, socket, address):
SynchronizedSendingDispatcher.__init__(self, sock = socket, map = network.socketMap)
PackageReader.__init__(self)
self.network = network
self.client_address = address
self.deferred_response = {}
self.mode = network.engine.application.InitialMode()
self.dataChannel = EntitySpace()
def __repr__(self):
addr = self.client_address
return '[%s:%d] %r' % (addr[0], addr[1], self.mode)
def handle_close(self):
# showTraceback()
self.network.closeConnection(self)
self.close()
def handleIncomingPackage(self, package):
self.log('package', 'INCOMING-PACKAGE: %s' % package)
self.log('engine', 'ENGINE-MESSAGE-QUEUE:\n%s' % '\n'.join(map(str, self.network.engine.messageQueue.queue)))
try: cmd = Command.FromPackage(package, self.dataChannel)
except:
#@breakOn
def handleMalformedPackage(package):
print
print 'MALFORMED PACKAGE:'
from .encoding import inspectPackedMessage
inspectPackedMessage(package)
# This should alert the client (but we don't push, even if the request is flawed).
# We should just close it.
printException()
handleMalformedPackage(package)
else:
if cmd is not None:
self.network.engine.postMessage(self.PeerCommandMessage(self, cmd))
class PeerCommandMessage(Engine.Message):
def __init__(self, peer, command):
self.peer = peer
self.command = command
def __repr__(self):
return '<%s: %r>' % (self.__class__.__name__,
self.command)
def dispatch(self, engine):
self.peer.dispatchCommand(engine, self.command)
class ResponseDeferred(Exception):
def __init__(self, tracked = True):
self.tracked = tracked
def bind(self, peer, cmd):
self.peer = peer
self.command = command
return self
def finish(self):
if self.tracked:
self.peer.finishDeferredResponse(self.command.serialId)
def response(self, engine, response):
self.peer.handleCommandResponse(engine, self.command, response)
self.finish()
def exception(self, engine, (etype, value, tb)):
self.peer.handleCommandException(engine, self.command, (etype, value, tb))
self.finish()
#@breakOn
def dispatchCommand(self, engine, cmd):
# This should really be part of the application, since it makes decisions
# about package contents (exception traceback??)
self.log('command', 'COMMAND: %s' % cmd)
with engine.Controller(self):
# The client kwd names could interfere with the actual method params.
try: response = self.mode.interpretCommand(engine, self, cmd.command, *cmd.args, **cmd.kwd)
except self.ResponseDeferred, e:
d = e.bind(self, cmd)
if e.tracked:
nr = cmd.serialId
assert nr is not None
assert nr not in self.deferred_response
self.deferred_response[nr] = d
except:
self.log('command', 'EXCEPTION')
self.handleCommandException(engine, cmd, getSystemException())
self.network.interruptTimeout()
else:
self.log('command', 'RESPONSE: %s' % repr(response))
self.handleCommandResponse(engine, cmd, response)
self.network.interruptTimeout()
def log(self, logType, message):
self.network.engine.application.logLevel(logType, message)
## def sendLater(self, data):
## # XXX Why does this just go away?
## # Multithreading problem?
## self.out_buffer += data
## def sendNow(self, data):
## self.socket.send(data)
def handleCommandResponse(self, engine, cmd, response):
data = self.dataChannel.encodeResponse(cmd.serialId, response)
self.log('package', 'RESPONSE-PACKAGE: %r' % data)
self.send(data)
def handleCommandException(self, engine, cmd, (etype, value, tb)):
etype = etype.__name__
value = str(value)
tb = extractTraceback(tb) # Limit this to privileged mode? (application)
DEBUG(engine.log, 'EXCEPTION: (%s) %s' % (etype, value))
self.send(self.dataChannel.encodeException(cmd.serialId, (etype, value, tb)))
def finishDeferredResponse(self, serialId):
del self.deferred_response[serialId]
# For tracked deferments:
def succeedDeferredResponse(self, engine, serialId, response):
d = self.deferred_response[serialId]
d.response(engine, response)
def failDeferredResponse(self, engine, serialId, (etype, value, tb)):
d = self.deferred_response[serialId]
d.exception(engine, (etype, value, tb))
def login(self, engine, username, authKey):
assert isinstance(username, basestring)
storage = UserStorage.Open(engine.application, username)
if storage.checkAccess(authKey):
# Login succeeds!
self.loggedInEvent(engine, self)
return engine.application.LoggedInMode(self, username)
| Python |
# Package Object Model
__all__ = ['Command', 'Response', 'Fault',
'PackageReader', 'NotAvailableError', 'interpretResponse']
from os import SEEK_SET, SEEK_END
from .encoding import *
from .runtime import *
class Command:
@classmethod
def FromPackage(self, package, entitySpace):
msg = entitySpace.unpackMessage(package)
if isinstance(msg, list):
assert len(msg) in (3, 4, 5)
return self(*msg)
raise TypeError(type(msg))
def __init__(self, command, serialId, flags, args = (), kwd = {}):
assert isinstance(command, basestring)
if isinstance(args, dict):
# If there are no positional args then:
# "shift these [parameters] back right."
kwd = args
args = ()
else:
assert isinstance(args, (list, tuple))
assert isinstance(kwd, dict)
self.command = command
self.serialId = serialId
self.flags = flags # encrypted, compressed, encoded, etc.
self.args = args
self.kwd = kwd
def __repr__(self):
return self.CommandString(self.command, self.serialId, self.args, self.kwd)
@classmethod
def CommandString(self, cmd, nr, args, kwd):
args = ', '.join(map(repr, args))
kwd = ', '.join('%s = %r' % (nv) for nv in kwd.iteritems())
if nr is None:
return '%s(%s%s%s)' % (cmd, args, args and kwd and ', ' or '', kwd)
else:
return '%s[%d](%s%s%s)' % (cmd, nr, args, args and kwd and ', ' or '', kwd)
class Response:
@classmethod
def FromPackage(self, package, entitySpace):
msg = entitySpace.unpackMessage(package)
if isinstance(msg, list):
assert len(msg) is 2
(serialId, response) = msg
assert len(response) is 2
return self(serialId, *response)
raise TypeError(type(msg))
def __init__(self, serialId, success, result):
self.serialId = serialId
self.success = success
if success:
self.result = result
else:
self.error_type = result[0]
self.error_value = result[1]
self.error_traceback = result[2]
def __repr__(self):
to = (' to [#%s]' % self.serialId) if self.serialId is not None else ''
if self.success:
return '%s%s -> %r' % (self.__class__.__name__, to, self.result)
else:
return '%s%s -> %s: %s' % (self.__class__.__name__, to,
self.error_type, self.error_value)
# Peer Network I/O
READING_SIZE = 1
READING_PACKAGE = 2
class NotAvailableError(EOFError):
def __init__(self, nbytes, available):
EOFError.__init__(self, 'Bytes requested: %d, bytes available: %d' % (nbytes, available))
self.nbytes = nbytes
self.available = available
class PackageReader:
def __init__(self):
self.data_buffer = NewBuffer()
self.state = READING_SIZE
self.read_point = None
self.pkg_size = 0
self.pkg_size_length = 0
def flip_buffer(self):
if self.read_point is None:
# Go into read mode.
self.read_point = 0
else:
# Rewrite the buffer, go back into write mode.
db = self.data_buffer
db.seek(self.read_point, SEEK_SET)
self.data_buffer = NewBuffer(db.read())
self.read_point = None
def read_bytes(self, nbytes):
n = self.data_buffer.tell()
d = self.data_buffer.read(nbytes)
x = len(d)
if x != nbytes:
self.data_buffer.seek(n, SEEK_SET)
raise NotAvailableError(nbytes, x)
self.read_point += nbytes
return d
def write_data(self, data):
# print '...', data
b = self.data_buffer
b.seek(0, SEEK_END)
b.write(data)
b.seek(0, SEEK_SET)
def increasePackageSize(self, addend):
self.pkg_size = IncreasePackageSize(self.pkg_size, addend)
def packageSizeTerminated(self, addend):
return PackageSizeTerminated(addend)
READ_SIZE = 1024
def handle_read(self):
## if self.debug:
## import pdb; pdb.set_trace()
data = self.recv(self.READ_SIZE)
if data == '':
self.handle_close()
return
self.write_data(data)
self.flip_buffer()
# Network-sensitive data reading state machine:
try:
# Try to read all packages.
while True:
if self.state == READING_SIZE:
while True:
# Read variable-length big integer:
size_addend = self.read_bytes(1)
# Increase current package size.
self.increasePackageSize(size_addend)
if self.packageSizeTerminated(size_addend):
# Total package size received -- Go on to read package.
self.state = READING_PACKAGE
break
# Increase and continue.
self.pkg_size_length += 1
if self.state == READING_PACKAGE: # Why test?
package = self.read_bytes(self.pkg_size)
self.state = READING_SIZE
self.pkg_size = 0
self.pkg_size_length = 0
self.handleIncomingPackage(package)
except NotAvailableError:
pass
finally:
self.flip_buffer()
class Fault(Exception):
def __init__(self, etype, value, tb):
self.error_type = etype
self.error_value = value
self.error_traceback = tb
Exception.__init__(self, self.toString())
def toString(self, tb = False):
if not tb:
return '%s: %s' % (self.error_type, self.error_value)
d = ['Traceback (most recent call last):']
for (filename, lineno, name, line) in self.error_traceback:
d.append(' File "%s", line %d, in %s' % \
(filename, lineno, name))
d.append(' %s' % line)
d.append('%s: %s' % (self.error_type, self.error_value))
return '\n'.join(d)
def interpretResponse(response, callEntityMethod = None, getEntityProperty = None):
if response.success:
result = response.result
if isinstance(result, EntitySpace.Proxy):
return EntitySpace.Binding(result, callEntityMethod, getEntityProperty)
return result
raise Fault(response.error_type,
response.error_value,
response.error_traceback)
| Python |
#!python
# -*- coding: UTF-8 -*-
# Service Partner LINE: A Manager Daemon for the Pentacle System Bus
# Copyright 2011 Clint Banis & Penobscot Robotics. All rights reserved.
# Pronounciation: splīn
#
from ..application import bootServer, Application
from ..architecture import ServiceBase, Event
from ..config import Configuration
from ..client import Client, ClientOptions, Session, Authorization
from ..runtime import breakOn, getCurrentSystemTime, Object, LookupObject, thisProcessId, nth
from ..network import SocketBindMethod, AsyncoreControl
from ..storage import StorageUnit
from ..packaging import Fault
from .. import buildApplicationVersion
from . import __version__ as busVersion
from . import *
from .services import *
import socket
import errno
# Todo: Merge this functionality with pentacle application.
# (Note that this is a subsystem identity, actually, specific to the service API)
SERVICE_NAME = 'SplineBus'
__service_identity__ = buildApplicationVersion(SERVICE_NAME, busVersion)
class ServiceManager(ServiceBase):
NAME = ServiceManagerName
Methods = ['Identify', 'RegisterAsPartner', 'GetManagerStats', 'GetPartnerInfo']
# Managing runlevels:
class PartnerManager:
class PartnerStorage(StorageUnit):
STORAGE_REALM = 'PartnerStorage'
class Interface(StorageUnit.Interface):
def getServicePort(self):
return self.getValue('servicePort')
def setServicePort(self, port):
self.setValue('servicePort', port)
def getBootTime(self):
return self.getValue('bootTime')
def setBootTime(self, time):
self.setValue('bootTime', time)
def __init__(self, svcMgr, apiMgr):
# Associate service and API managers with routines accessible to partner services.
self.svcMgr = svcMgr
self.apiMgr = apiMgr
def OpenStorage(self, partnerName):
return self.PartnerStorage.Open(self.apiMgr.application, partnerName)
DYNAMIC_PORT_RANGE = [30750, 31000]
def getDynamicPort(self, bindAddress = '127.0.0.1'):
bindMethod = SocketBindMethod.New(bindAddress)
try: return OpenAvailablePort(xrange(*self.DYNAMIC_PORT_RANGE), bindMethod)
except SystemError, e:
raise SystemError('%s in range: %s' % (e, self.DYNAMIC_PORT_RANGE))
def getManagerPort(self):
return self.apiMgr.application.network.port
# Actions.
PartnerBooted = Event('spline-partner-booted')
def partnerBooted(self, partner):
self.PartnerBooted(self.apiMgr.application.engine, partner)
PartnerInjected = Event('spline-partner-injected')
def partnerInjected(self, partner):
self.PartnerInjected(self.apiMgr.application.engine, partner)
PartnerUpdated = Event('spline-partner-updated')
def partnerUpdated(self, partner):
self.PartnerUpdated(self.apiMgr.application.engine, partner)
def Activate(self, apiMgr):
# Configurate the service manager.
cfg = apiMgr.application.config.getSectionObject('Spline')
self.partners = {}
# Provide unique access to the manager from partner service handlers.
partnerManager = self.PartnerManager(self, apiMgr)
loadOrder = []
for pCfgName in cfg.getOptionMultiple('partner'):
if pCfgName:
# todo: fail on load?
pCfg = Configuration.FromFile(pCfgName)
partner = Partner.FromConfig(pCfg)
if partner.name in self.partners:
pass # log error
else:
self.partners[partner.name] = partner
loadOrder.append(partner.name)
# Load state of currently-running service.
partner.RestoreTrackedState(partnerManager)
# Stats.
self.activationTime = getCurrentSystemTime()
if cfg.getOption('public-access', simplify = True):
apiMgr.application.security.grantPublicAction('access-api:%s' % self.NAME)
# External Partner Connections.
#@breakOn
def doRegisterAsPartner(configString, digest = None):
# Load configuration.
pCfg = Configuration.FromString(configString)
partner = Partner.FromConfig(pCfg)
name = partner.name
# Find by name in already-loaded partners.
try: match = self.partners[name]
except KeyError:
# Inject new partner with configured custom handler.
self.partners[name] = partner
loadOrder.append(partner)
apiMgr.application.log('Injecting Partner: %s' % name)
partner.inject(partnerManager, pCfg)
else:
# Match authorization key.
if not match.isAuthorized(partner, digest, configString):
raise AuthorizationError('Not authorized: %s' % name)
# Update configuration.
apiMgr.application.log('Updating Partner: %s' % name)
match.updateStatus(partnerManager, pCfg)
def doGetLoadOrder():
return list(loadOrder) # copy
# Defined inline to protect cellular variables.
self.RegisterAsPartner = doRegisterAsPartner
self.GetLoadOrder = doGetLoadOrder
# Do in another thread.
@nth
def bootPartners():
# Boot partner runlevels.
for n in loadOrder:
# todo: ignore/log problems? disabled partner state?
apiMgr.application.log('Booting Partner: %s' % n)
self.partners[n].boot(partnerManager)
## def Deactivate(self):
## # Auto-kill.
## pass
def Identify(self):
return __service_identity__
def GetManagerStats(self):
return dict(running_duration = getCurrentSystemTime() - self.activationTime,
partners = self.partners.keys(),
process_id = thisProcessId())
def GetPartnerInfo(self, name):
return self.partners[name].GetInfo()
# Startup.
def isInstanceRunning(options):
# First attempt to see if an instance is already running.
with Session(Authorization(options.username or '',
options.secret_key or '') \
.Open(options.address, options.port)) as client:
DEBUG('Opening Spline API for Identification...')
with client.api(ServiceManager.NAME) as spline:
if isRecognizedServer(spline.Identify(), APPLICATION_NAME):
# Already started -- do nothing.
return True
raise UnknownServer(identity)
def Start(config_file, argv = None):
cfg = Configuration.FromCmdln(argv, config_file, ClientOptions, Application)
try: running = isInstanceRunning(cfg.set)
except socket.error, e:
if e.errno not in [errno.EAGAIN, errno.EBADF, errno.ECONNREFUSED]:
raise
running = False
except (UnknownServer, AuthenticationError), e:
print '%s: %s' % (e.__class__.__name__, e)
return
except Fault, fault:
print 'FAULT ON CONNECT:\n%s' % fault.toString(True)
return
if not running:
# Boot up a new instance.
DEBUG('Booting Spline...')
boot = bootServer # breakOn(bootServer)
app = boot(config_file = config_file)
# Ensure the ServiceManager is installed -- this blocks the
# rest of the application (including engine-message processing).
svcMgr = app.api.loadServiceApiObject(ServiceManager)
# Want to start up the host network:
DEBUG('services booted')
AsyncoreControl.waitForAsyncoreEnd()
DEBUG('Spline host-ready')
app.run()
# What about shutdown?? We may want to kill off some children...
# Or, at least notify listeners. I suppose notification would
# have to be done within the loop, so really a timed shutdown.
#
# (Or, do this within Deactivate)
for partnerName in svcMgr.GetLoadOrder():
svcMgr.partners[partnerName].doAutokill()
if __name__ == '__main__':
# import pdb; pdb.set_trace()
Start(getDefaultSplineConfigFilename())
| Python |
#!python
# -*- coding: UTF-8 -*-
# Service Partner LINE: A Manager Daemon for the Pentacle System Bus
# Copyright 2011 Clint Banis & Penobscot Robotics. All rights reserved.
# Pronounciation: splīn
#
from ..application import bootServer, Application
from ..architecture import ServiceBase, Event
from ..config import Configuration
from ..client import Client, ClientOptions, Session, Authorization
from ..runtime import breakOn, getCurrentSystemTime, Object, LookupObject, thisProcessId, nth
from ..network import SocketBindMethod, AsyncoreControl
from ..storage import StorageUnit
from ..packaging import Fault
from .. import buildApplicationVersion
from . import __version__ as busVersion
from . import *
from .services import *
import socket
import errno
# Todo: Merge this functionality with pentacle application.
# (Note that this is a subsystem identity, actually, specific to the service API)
SERVICE_NAME = 'SplineBus'
__service_identity__ = buildApplicationVersion(SERVICE_NAME, busVersion)
class ServiceManager(ServiceBase):
NAME = ServiceManagerName
Methods = ['Identify', 'RegisterAsPartner', 'GetManagerStats', 'GetPartnerInfo']
# Managing runlevels:
class PartnerManager:
class PartnerStorage(StorageUnit):
STORAGE_REALM = 'PartnerStorage'
class Interface(StorageUnit.Interface):
def getServicePort(self):
return self.getValue('servicePort')
def setServicePort(self, port):
self.setValue('servicePort', port)
def getBootTime(self):
return self.getValue('bootTime')
def setBootTime(self, time):
self.setValue('bootTime', time)
def __init__(self, svcMgr, apiMgr):
# Associate service and API managers with routines accessible to partner services.
self.svcMgr = svcMgr
self.apiMgr = apiMgr
def OpenStorage(self, partnerName):
return self.PartnerStorage.Open(self.apiMgr.application, partnerName)
DYNAMIC_PORT_RANGE = [30750, 31000]
def getDynamicPort(self, bindAddress = '127.0.0.1'):
bindMethod = SocketBindMethod.New(bindAddress)
try: return OpenAvailablePort(xrange(*self.DYNAMIC_PORT_RANGE), bindMethod)
except SystemError, e:
raise SystemError('%s in range: %s' % (e, self.DYNAMIC_PORT_RANGE))
def getManagerPort(self):
return self.apiMgr.application.network.port
# Actions.
PartnerBooted = Event('spline-partner-booted')
def partnerBooted(self, partner):
self.PartnerBooted(self.apiMgr.application.engine, partner)
PartnerInjected = Event('spline-partner-injected')
def partnerInjected(self, partner):
self.PartnerInjected(self.apiMgr.application.engine, partner)
PartnerUpdated = Event('spline-partner-updated')
def partnerUpdated(self, partner):
self.PartnerUpdated(self.apiMgr.application.engine, partner)
def Activate(self, apiMgr):
# Configurate the service manager.
cfg = apiMgr.application.config.getSectionObject('Spline')
self.partners = {}
# Provide unique access to the manager from partner service handlers.
partnerManager = self.PartnerManager(self, apiMgr)
loadOrder = []
for pCfgName in cfg.getOptionMultiple('partner'):
if pCfgName:
# todo: fail on load?
pCfg = Configuration.FromFile(pCfgName)
partner = Partner.FromConfig(pCfg)
if partner.name in self.partners:
pass # log error
else:
self.partners[partner.name] = partner
loadOrder.append(partner.name)
# Load state of currently-running service.
partner.RestoreTrackedState(partnerManager)
# Stats.
self.activationTime = getCurrentSystemTime()
if cfg.getOption('public-access', simplify = True):
apiMgr.application.security.grantPublicAction('access-api:%s' % self.NAME)
# External Partner Connections.
#@breakOn
def doRegisterAsPartner(configString, digest = None):
# Load configuration.
pCfg = Configuration.FromString(configString)
partner = Partner.FromConfig(pCfg)
name = partner.name
# Find by name in already-loaded partners.
try: match = self.partners[name]
except KeyError:
# Inject new partner with configured custom handler.
self.partners[name] = partner
loadOrder.append(partner)
apiMgr.application.log('Injecting Partner: %s' % name)
partner.inject(partnerManager, pCfg)
else:
# Match authorization key.
if not match.isAuthorized(partner, digest, configString):
raise AuthorizationError('Not authorized: %s' % name)
# Update configuration.
apiMgr.application.log('Updating Partner: %s' % name)
match.updateStatus(partnerManager, pCfg)
def doGetLoadOrder():
return list(loadOrder) # copy
# Defined inline to protect cellular variables.
self.RegisterAsPartner = doRegisterAsPartner
self.GetLoadOrder = doGetLoadOrder
# Do in another thread.
@nth
def bootPartners():
# Boot partner runlevels.
for n in loadOrder:
# todo: ignore/log problems? disabled partner state?
apiMgr.application.log('Booting Partner: %s' % n)
self.partners[n].boot(partnerManager)
## def Deactivate(self):
## # Auto-kill.
## pass
def Identify(self):
return __service_identity__
def GetManagerStats(self):
return dict(running_duration = getCurrentSystemTime() - self.activationTime,
partners = self.partners.keys(),
process_id = thisProcessId())
def GetPartnerInfo(self, name):
return self.partners[name].GetInfo()
# Startup.
def isInstanceRunning(options):
# First attempt to see if an instance is already running.
with Session(Authorization(options.username or '',
options.secret_key or '') \
.Open(options.address, options.port)) as client:
DEBUG('Opening Spline API for Identification...')
with client.api(ServiceManager.NAME) as spline:
if isRecognizedServer(spline.Identify(), APPLICATION_NAME):
# Already started -- do nothing.
return True
raise UnknownServer(identity)
def Start(config_file, argv = None):
cfg = Configuration.FromCmdln(argv, config_file, ClientOptions, Application)
try: running = isInstanceRunning(cfg.set)
except socket.error, e:
if e.errno not in [errno.EAGAIN, errno.EBADF, errno.ECONNREFUSED]:
raise
running = False
except (UnknownServer, AuthenticationError), e:
print '%s: %s' % (e.__class__.__name__, e)
return
except Fault, fault:
print 'FAULT ON CONNECT:\n%s' % fault.toString(True)
return
if not running:
# Boot up a new instance.
DEBUG('Booting Spline...')
boot = bootServer # breakOn(bootServer)
app = boot(config_file = config_file)
# Ensure the ServiceManager is installed -- this blocks the
# rest of the application (including engine-message processing).
svcMgr = app.api.loadServiceApiObject(ServiceManager)
# Want to start up the host network:
DEBUG('services booted')
AsyncoreControl.waitForAsyncoreEnd()
DEBUG('Spline host-ready')
app.run()
# What about shutdown?? We may want to kill off some children...
# Or, at least notify listeners. I suppose notification would
# have to be done within the loop, so really a timed shutdown.
#
# (Or, do this within Deactivate)
for partnerName in svcMgr.GetLoadOrder():
svcMgr.partners[partnerName].doAutokill()
if __name__ == '__main__':
# import pdb; pdb.set_trace()
Start(getDefaultSplineConfigFilename())
| Python |
# Service Manager Components.
from ..application import Application
from ..security import CalculateDigest, GenerateSecretKey
from ..runtime import Object, LookupObject, getCurrentSystemTime, sendSignal, breakOn, contextmanager
from ..client import Client, Session
from ..network import AsyncoreControl
from . import *
import signal
import socket
import errno
import sys
import os
from os.path import join as joinpath, basename, splitext
DEFAULT_AUTHKEY_SIZE = 32
NOSIG = None
# Utility Routines.
# todo: move into runtime.utility
def DefaultPidFile(name, exepath):
name = '-'.join(name.split()).lower()
exepath = splitext(basename(exepath))[0]
return joinpath('/tmp', 'proc', '%s-%s.pid' % (name, exepath))
def ValidateSignal(name):
if name == NOSIG:
return NOSIG
if isinstance(name, basestring):
if name.isdigit():
sigNr = int(name)
else:
return getattr(signal, 'SIG%s' % name.upper())
else:
assert isinstance(name, int)
sigNr = name
for name in dir(signal):
if len(name) > 3 and name.startswith('SIG') and name[3] != '_':
if getattr(signal, name) == sigNr:
return sigNr
raise UnknownSignalName(name)
@contextmanager
def TemporaryWorkingDir(workingDir):
if workingDir is None:
yield
else:
thisDir = os.getcwd()
os.chdir(workingDir)
try: yield
finally:
os.chdir(thisDir)
# Partner Class Implementations
class Partner(Object):
'Abstract base class'
@classmethod
def FromConfig(self, cfg):
# Load enough options to distinguish partner type.
main = cfg.getSectionObject('Partner')
handlerName = main.get('handler')
if handlerName:
if handlerName == 'adhoc':
# Yes, a little workaround, cuz we're not sure how spline is packaged.
return AdhocPartner.FromConfig(cfg)
handler = LookupObject(handlerName)
if handler is None:
raise NameError(handlerName)
# Pass it to another object.
return handler.FromConfig(cfg)
progr = cfg.getSectionObject('Program')
builtin = progr.get('builtin')
# Another special type -- for straight pentacle servers.
if builtin == 'pentacle-server':
return PentaclePartner.FromConfig(cfg)
return ProgramPartner.FromConfig(cfg)
def __init__(self, cfg, name, auth):
if auth in ['auto']: # [None]:
auth = GenerateSecretKey(DEFAULT_AUTHKEY_SIZE)
# Config
self.config = cfg
self.name = name
self.auth = auth
self.weblink = cfg.getSectionOption('weblink', 'Application')
# Stats
self.bootTime = getCurrentSystemTime()
def OpenStorage(self, mgr):
return mgr.OpenStorage(self.name)
def isAuthorized(self, partner, digest, *payload):
return CalculateDigest(self.auth, *payload) == digest
def GetUptime(self):
return getCurrentSystemTime() - self.bootTime
def GetInfo(self):
return dict(name = self.name,
boottime = self.bootTime,
uptime = self.GetUptime(),
weblink = self.weblink)
def inject(self, injectionMgr, cfg):
self.bootTime = getCurrentSystemTime()
self.TrackState(injectionMgr, cfg)
injectionMgr.partnerInjected(self)
def updateStatus(self, updateMgr, cfg):
self.TrackState(updateMgr, cfg)
updateMgr.partnerUpdated(self)
def doAutokill(self):
pass
def TrackState(self, mgr, cfg):
# Update state in memory.
progr = cfg.getSectionObject('Program')
port = int(progr.get('service-port'))
self.service_port = int(port) if port else port
weblink = cfg.getSectionOption('weblink', 'Application')
if weblink is not None:
self.weblink = weblink
bootTime = progr.get('boot-time')
if bootTime is not None:
# Todo: time format.
self.bootTime = int(bootTime)
self.state = progr.get('state')
# Update state in records.
with self.OpenStorage(mgr) as i:
i.setBootTime(self.bootTime)
i.setServicePort(self.service_port)
def RestoreTrackedState(self, mgr):
# Load current information from records.
with self.OpenStorage(mgr) as i:
bootTime = i.getBootTime()
if bootTime is not None:
self.bootTime = bootTime
servicePort = i.getServicePort()
if servicePort is not None:
self.service_port = servicePort
class AdhocPartner(Partner):
'An external previously-unknown process that identifies and registers itself.'
@classmethod
def FromConfig(self, cfg):
# Just load enough to identify and authorize.
main = cfg.getSectionObject('Partner')
name = main.get('name')
auth = main.get('authorization')
assert name
return self(cfg, name, auth)
Meta = Object.Meta('name', 'state')
def __init__(self, cfg, name, auth):
Partner.__init__(self, cfg, name, auth)
def GetInfo(self):
base = Partner.GetInfo(self)
base['service_port'] = self.service_port
return base
class ProgramPartner(Partner):
'A Spline-managed managed sub-process service.'
@classmethod
def FromConfig(self, cfg):
# A regular program-partner.
main = cfg.getSectionObject('Partner')
name = main.get('name')
auth = main.get('authorization')
dependencies = main.getOptionMultiple('dependencies')
progr = cfg.getSectionObject('Program')
exepath = progr.get('exe-path')
cmdln_args = [a for a in progr.getOptionMultiple('args') if a is not None]
pidfile = progr.get('pidfile')
port = progr.get('service-port')
working_dir = progr.get('working-dir')
autokill = progr.get('autokill')
environ = cfg.getSectionObject('Environ').asDict()
sigs = cfg.getSectionObject('Signals')
signals = self.Signals(kill = sigs.get('kill-signal'),
suspend = sigs.get('suspend-signal'),
resume = sigs.get('resume-signal'),
reload = sigs.get('reload-signal'))
# ... and various runlevel events
# Validate and construct.
assert name
assert exepath
if pidfile is None:
pidfile = DefaultPidFile(name, exepath)
if port:
if port != 'auto':
port = int(port)
return self(cfg, name, auth, exepath, cmdln_args, environ, pidfile,
port, working_dir, signals, autokill)
class Signals(Object):
if sys.platform == 'cygwin': # or.. linux
from signal import SIGKILL as DEFAULT_KILL, SIGSTOP as DEFAULT_SUSPEND, \
SIGCONT as DEFAULT_RESUME, SIGUSR1 as DEFAULT_RELOAD
elif sys.platform == 'win32':
from signal import SIGABRT as DEFAULT_KILL
DEFAULT_SUSPEND = DEFAULT_RESUME = DEFAULT_RELOAD = -1
def __init__(self, **kwd):
self.kill = ValidateSignal(kwd.pop('kill' , self.DEFAULT_KILL))
self.suspend = ValidateSignal(kwd.pop('suspend', self.DEFAULT_SUSPEND))
self.resume = ValidateSignal(kwd.pop('resume' , self.DEFAULT_RESUME))
self.reload = ValidateSignal(kwd.pop('reload' , self.DEFAULT_RELOAD))
def copy(self):
return dict(kill = self.kill, suspend = self.suspend,
resume = self.resume, reload = self.reload)
Meta = Object.Meta('name', 'exepath', 'pidfile', 'service_port')
def __init__(self, cfg, name, auth, exepath, cmdln_args, environ, pidfile,
port, working_dir, signals, autokill):
Partner.__init__(self, cfg, name, auth)
self.exepath = exepath
self.cmdln_args = cmdln_args
self.environ = environ
self.pidfile = pidfile
self.service_port = port
self.working_dir = working_dir
self.signals = signals
self.autokill = autokill
def GetInfo(self):
base = Partner.GetInfo(self)
base.update(dict(exepath = self.exepath,
cmdln_args = self.cmdln_args,
pidfile = self.pidfile,
service_port = self.service_port,
signals = self.signals.copy()))
return base
def doAutokill(self):
# todo: override this for PentaclePartner, and have it connect and issue stopApplication.
if self.autokill and self.signals.kill and isinstance(self.processId, int):
sendSignal(self.processId, self.signals.kill)
def ReadPidfile(self):
if self.pidfile:
# Strategy: read only enough to identify an integer.
try:
contents = open(self.pidfile).read(20)
if contents.isdigit():
return int(contents)
except IOError, e:
if e.errno != errno.ENOENT:
raise
def WritePidfile(self, pid):
if self.pidfile:
pid = int(pid)
try:
fl = open(self.pidfile, 'w')
print >> fl, pid
fl.flush()
fl.close()
except IOError, e:
# Usually directory doesn't exist.
if e.errno != errno.ENOENT:
raise
def ValidateProcess(self):
# Is the program running with the pid as saved, the same executable?
# (Recommended for specific executables, not generic instances like python or shell scripts)
pid = self.ReadPidfile()
if isinstance(pid, int):
# Yep, relying on /proc
try: runningExe = open('/proc/%s/exename' % pid).read()
except IOError: pass
else: return runningExe == self.exepath
def checkRunlevelActively(self):
# First, check the service-port.
# todo: catch errors, also skip it if there is no service_port configured.
# this means merging with isInstanceRunning code (probably put into bus)
if isinstance(self.service_port, int):
# Must match this partner name as a feature. Necessary because service-
# ports might be reassigned, especially when partner sets are changed.
partnerName = PartnerNameFeature(self.name)
try:
with Session(Client.Open('localhost', self.service_port)) as client:
DEBUG(' recognizing:', self.service_port, '(%s)' % self.name)
if isRecognizedServer(client.call.Identify(), appFeatures = [partnerName]):
# Todo: send updated service manager info (like port),
# since obviously it was rebooted out from under partner.
return True
except socket.error, e:
if e.errno not in [errno.EAGAIN, errno.EBADF, errno.ECONNREFUSED]:
raise
# We'll want to use superior host network.
# -- Yeah but host already started, spline-side.
## print 'client-side asyncore wait'
## AsyncoreControl.waitForAsyncoreEnd()
## print '...asyncore done'
# Then check pidfile and determine its up-time.
if self.ValidateProcess() is not None:
return True
def setupExecutablePath(self, bootMgr):
return self.exepath
def setupCommandLineArgs(self, bootMgr):
return self.cmdln_args
def setupBootEnvironment(self, bootMgr):
env = os.environ.copy()
env['SPLINE_PORT'] = '%s' % (bootMgr.getManagerPort() or '')
return env
#@breakOn
def boot(self, bootMgr):
if not self.checkRunlevelActively():
# Otherwise, just boot and build those things
# todo: allow fork-spawn to cause it to orphan? We don't want it to get SIGHUP or TERM or whatever
DEBUG(' booting:', self.name)
exepath = self.setupExecutablePath(bootMgr)
args = self.setupCommandLineArgs(bootMgr)
environ = self.setupBootEnvironment(bootMgr)
cmdln = [exepath] + list(args)
with TemporaryWorkingDir(self.working_dir):
pid = os.spawnve(os.P_NOWAIT, exepath, cmdln, environ)
# todo: be sensitive to the event that the subprocess didn't successfully boot.
# (that it somehow crashed). This isn't necessarily possible if it's orphaned,
# (or, if we're starting NOWAIT) but I guess we can assume some algorithm that
# more or less expects a startup answer within an amount of time.
#
# Of course, this requires more sophisticated signal/reap-chld handling.
self.processId = pid
self.WritePidfile(pid)
# Push broadcast event to engine
bootMgr.partnerBooted(self)
class PentaclePartner(ProgramPartner):
'Specifically a Pentacle sub-server managed by Spline.'
@classmethod
def FromConfig(self, cfg):
# Configure a special program partner that merely invokes another
# pentacle server instance, passing it another configuration.
main = cfg.getSectionObject('Partner')
progr = cfg.getSectionObject('Program')
name = main.get('name')
auth = main.get('authorization')
port = progr.get('service-port')
autokill = progr.get('autokill')
# Validate and construct.
assert name
if port:
if port != 'auto':
port = int(port)
# Handle inline configuration.
pentacle_config_file = progr.get('pentacle-config')
if pentacle_config_file == 'builtin':
pentacle_config_file = cfg.filename
return self(cfg, name, auth, port, pentacle_config_file, autokill)
Meta = Object.Meta('name', 'service_port', 'config_file')
def __init__(self, cfg, name, auth, port, config_file, autokill):
ProgramPartner.__init__(self, cfg, name, auth,
sys.executable, None, None,
None, port, None,
self.Signals(), autokill)
self.config_file = config_file
def setupCommandLineArgs(self, bootMgr):
# The partnered application main.
return ['-m', busPartnerMain()]
def setupBootEnvironment(self, bootMgr):
# This would be a good place to paste in the partner authorization code,
# so it can be used by the bus partner main.
env = ProgramPartner.setupBootEnvironment(self, bootMgr)
env[Application.PENTACLE_CONFIG_ENV_VAR] = self.config_file or ''
env[PENTACLE_PARTNER_NAME_ENV_VAR] = self.name or ''
env[PENTACLE_PARTNER_AUTH_ENV_VAR] = self.auth or ''
return env
def GetInfo(self):
base = Partner.GetInfo(self) # Skip Program
base.update(dict(service_port = self.service_port,
config_file = self.config_file))
return base
| Python |
from . import Main
if __name__ == '__main__':
Main()
| Python |
# Not sure if this should go here: what exactly is it doing?
# Managing multiple partners on localhost?
# Providing a network interface to start them up on workstations? (that's spline's job)
from ...services import PentaclePartner
class PartnerCluster(PentaclePartner):
@classmethod
def FromConfig(self, cfg):
return self()
def __init__(self):
raise NotImplementedError
| Python |
# Client Bus -- Service Partner processes use these routines for backtalk.
# Also, non-partner processes can use these routines to access services.
from ...application import Application
from ...packaging import Fault
from ...client import Session, Client, Authorization
from ...config import INI
from .. import *
import os
import socket
import errno
# todo:
# Not share the same application database files (directly)
# Smooth over all configuration
# How much autonomy does a service have if the manage isn't running?
# Should it boot the manager? No, but we can at least run and
# then how do we communicate the port to spline-tracked state?
# Adhoc Service Partners
BASE = INI(Partner = dict(handler = 'adhoc')) # Err why adhoc again??
def partnerConf(name, port, **other):
# Prepares a message that will be used to update the service manager partner records.
return str(BASE + dict(Partner = dict(name = name),
Program = dict(service_port = port)) \
+ other)
def registerAs(partnerConfig, authKey, mgrOptions = None):
assert isinstance(partnerConfig, basestring)
if mgrOptions is None:
mgrOptions = getDefaultSplineConfig().set
from pentacle.client import Authorization, CalculateDigest
digest = CalculateDigest(authKey, partnerConfig)
# XXX This requires some kind of login to access the api, but
# it's kind of hard to access this mode information that way,
# so we do another hmac digest here?? Todo: Clean this up.
auth = Authorization(mgrOptions.username or '', mgrOptions.secret_key or '')
with Session(auth.Open(mgrOptions.address, mgrOptions.port)) as client:
with client.api(ServiceManagerName) as spline:
spline.RegisterAsPartner(partnerConfig, digest)
def getManagerOptions(appConfig, section = 'Manager'):
# Really just trying to get another ConfigSet object
## mgrConf = appConfig.getSectionObject(section).asDict()
## mgrConf = INI(**{section: mgrConf})
## return mgrConf.toConfigObject(default_section = section)
return appConfig.ConfigSet(appConfig, section = section, simplify = True)
class PartneredApplication(Application):
@classmethod
def Main(self, argv = None):
# A Pentacle server application that boots as a partner.
app = self.Boot(argv)
app.UpdateRegistration()
app.run()
class InitialMode(Application.InitialMode):
def doIdentify(self, engine, peer):
# See bus.isRecognizedServer and ProgramPartner.checkRunlevelActivity
return '%s; partnerName = %s' % (engine.application.version,
engine.application.partnerName)
def IntegratePartnerName(self, partnerName):
self.partnerName = partnerName
def getConfiguration(self):
# return dict(Application = dict(weblink = ''))
return dict()
def UpdateRegistration(self):
# Also, get the spline manager port from the environment.
partnerName = os.environ.get(PENTACLE_PARTNER_NAME_ENV_VAR)
if partnerName:
# Necessary for uniquely identifying partners by Spline.
self.IntegratePartnerName(partnerName)
# The primary payload is the service port.
conf = self.getConfiguration()
try: registerAs(partnerConf(partnerName, self.network.port, **conf),
os.environ.get(PENTACLE_PARTNER_AUTH_ENV_VAR, ''),
mgrOptions = getManagerOptions(self.config))
except Fault, fault:
print 'REGISTRATION ERROR:\n%s' % fault.toString(True)
except socket.error, e:
if e.errno in [errno.EAGAIN, errno.EBADF, errno.ECONNREFUSED]:
print 'SERVICE MANAGER NOT AVAILABLE (%s)' % errno.errorcode[e.errno]
else:
raise
Main = PartneredApplication.Main
# Client Services -- put in pentacle.client/support??
def OpenPartneredClient(partner_name, cfg = None, **kwd):
if cfg is None:
section = 'SplineClient'
cfg = INI(**{section: dict(address = 'localhost',
port = kwd.pop('port', Application.DEFAULT_PORT))})
cfg += {section: kwd}
cfg = cfg.toConfigObject(default_section = section)
options = cfg.set
# First, connect to the manager.
auth = Authorization(options.username or '', options.secret_key or '')
with Session(auth.Open(options.address, options.port)) as client:
with client.api(ServiceManagerName) as spline:
info = spline.GetPartnerInfo(partner_name)
service_port = info['service_port']
# Next, connect to the partnered service.
if isinstance(service_port, int):
return Client.Open(options.address, service_port)
def partnerBreakConsole(partnerName, cfg = None):
# A pretty specific way of break-debugging any partnered service.
cfg = getDefaultSplineConfig(cfg)
auth = Authorization(options.username or '', options.secret_key or '')
partneredClient = OpenPartneredClient(partnerName, cfg)
with Session(auth.Authenticate(partneredClient)) as client:
with client.api('Console::Remote') as console:
pentacle.shell.InteractWith(console)
| Python |
# Copyright 2011 Clint Banis
import os
__version__ = 0.1
ServiceManagerName = 'ServiceManager::SplineBus'
__all__ = ['__version__', 'ServiceManagerName', 'isRecognizedServer',
'SPLINE_CONFIG_ENV_VAR', 'DEFAULT_SPLINE_CONFIG_FILE',
'PENTACLE_PARTNER_NAME_ENV_VAR', 'PENTACLE_PARTNER_AUTH_ENV_VAR',
'getDefaultSplineConfigFilename', 'getDefaultSplineConfig',
'busPartnerMain', 'PartnerNameFeature',
'AuthenticationError', 'UnknownServer', 'UnknownSignalName']
class AuthenticationError(Exception):
pass
class UnknownServer(Exception):
pass
class UnknownSignalName(NameError):
pass
SPLINE_CONFIG_ENV_VAR = 'SPLINE_CONFIG'
DEFAULT_SPLINE_CONFIG_FILE = '~/.spline/application.cfg'
PENTACLE_PARTNER_NAME_ENV_VAR = 'PENTACLE_PARTNER_NAME'
PENTACLE_PARTNER_AUTH_ENV_VAR = 'PENTACLE_PARTNER_AUTH'
def getDefaultSplineConfigFilename(default_file = None):
return os.environ.get(SPLINE_CONFIG_ENV_VAR, default_file) \
or DEFAULT_SPLINE_CONFIG_FILE
def getDefaultSplineConfig(cfg = None, default_file = None):
if cfg is None:
cfgfile = getDefaultSplineConfigFilename(default_file)
cfg = Configuration.FromFile(cfgfile)
return cfg
def PartnerNameFeature(name):
return ('partnerName', name)
def isRecognizedServer(ident, appName = None, appVersion = None, appFeatures = None):
if isinstance(ident, basestring):
parts = [p.strip() for p in ident.split(';')]
if parts:
(name, version) = parts[0].split('/')
name = name.lower()
version = float(version)
if isinstance(appName, (list, tuple)):
if name not in (n.lower() for n in appName):
return False
elif isinstance(appName, basestring):
if name != appName.lower():
return False
if isinstance(appVersion, (list, tuple)):
if version < appVersion:
return False
if isinstance(appFeatures, (list, tuple)):
foundFeatures = dict()
for p in parts[1:]:
feature = p.split('=', 1)
if len(feature) == 1:
foundFeatures[feature[0].strip()] = True
elif len(feature) == 2:
foundFeatures[feature[0].strip()] = feature[1].strip()
# Must match all these features.
for f in appFeatures:
if isinstance(f, basestring):
if f not in foundFeatures:
return False
elif isinstance(f, (list, tuple)):
(name, value) = f
if foundFeatures[name] != value:
return False
# todo: detect feature versions?
# Pretty much recognize based on the criteria.
return True
# Did not identify itself in a way we can understand.
return None
# The module name used for booting partner main application entry.
def busPartnerMain():
from partners import main # relative
return main.__name__
| Python |
# Process Architecture
# Todo:
# Move into runtime/core
#
__all__ = ['Engine', 'Event', 'Component', 'Serializable',
'BaseMode', 'SubcommandMode', 'UnboundApiMode',
'ServiceBase']
import Queue
from .runtime import *
# Event Driver
class Engine(Object):
def __init__(self, application, timeout = None):
self.application = application
self.controllerStack = []
self.messageQueue = Queue.Queue()
self.timeout = timeout
class Stop(SystemExit):
pass
class Message(Object): # Shouldn't this be called Action?
def dispatch(self, engine):
raise NotImplementedError
# Main Application Loop.
def run(self):
try:
while True:
try: msg = self.messageQueue.get(timeout = self.timeout)
except Queue.Empty:
continue
if isinstance(msg, BaseException):
raise msg
elif isinstance(msg, self.Message):
self.dispatchMessage(msg)
except self.Stop:
pass
def postMessage(self, msg):
self.messageQueue.put(msg)
def stop(self):
self.messageQueue.put(self.Stop())
def dispatchMessage(self, msg):
# print ' dispatching:', msg, '(%s)' % id(msg)
try: msg.dispatch(self)
except self.Stop:
raise
except:
self.logException('Exception handling: %r' % msg)
# print ' done with %s' % id(msg)
@contextmanager
def Controller(self, ctlr):
self.controllerStack.append(ctlr)
try: yield
finally:
self.controllerStack.pop()
def getController(self):
try: return self.controllerStack[-1]
except IndexError:
pass # raise self.NoController
# Logging
def log(self, message):
self.application.log(message)
def logException(self, message = None):
from traceback import format_exc
tb = format_exc()
self.logMessage(message + tb)
class LogMessage(Message):
def __init__(self, message):
self.message = message
def dispatch(self, engine):
engine.log(self.message)
def logMessage(self, message):
self.postMessage(self.LogMessage(message))
class Event(Object):
class Result(RuntimeError): # Why error? Should be like BaseException
def __init__(self, result):
self.result = result
def __init__(self, name):
self.name = name
self.listeners = []
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.name)
def addListener(self, listener):
self.listeners.append(listener)
def removeListenser(self, listener):
self.listeners.remove(listener)
def __iadd__(self, listener):
self.addListener(listener)
return self
def __isub__(self, listener):
self.removeListener(listener)
return self
def Listen(self, function):
# Decorator.
self += function
return function
def broadcast(self, *args, **kwd):
for n in self.listeners:
try: n(self, *args, **kwd)
except self.Result, r:
return r.result
except:
# ??
printException()
# Post a dispatchable message:
class EventBroadcast(Engine.Message):
def __init__(self, event, args, kwd):
self.event = event
self.args = args
self.kwd = kwd
def __repr__(self):
return '<%s: %r>' % (self.__class__.__name__, self.event)
def dispatch(self, engine):
self.event.broadcast(engine, *self.args, **self.kwd)
def __call__(self, engine, *args, **kwd):
engine.postMessage(self.EventBroadcast(self, args, kwd))
class Component(Object):
def __init__(self, application):
self.application = application
@classmethod
def getPathName(self, path):
return expanduser(expandvars(path))
@classmethod
def addCmdlnOptions(self, parser):
pass
# Input/Command Modes
class BaseMode(Object):
def __init__(self, previous = None):
self.previousMode = previous
def popMode(self, peer):
peer.mode = self.previousMode
class UnknownCommandError(NameError):
pass
class SubcommandMode(BaseMode):
def interpretCommand(self, engine, peer, name, *args, **kwd):
# Note: the kwd names could interfere with the method impl params.
action = getattr(self, 'do%s' % getCapitalizedName(name), None)
if callable(action):
return action(engine, peer, *args, **kwd)
return self.defaultCommand(engine, peer, name, *args, **kwd)
def defaultCommand(self, engine, peer, name, *args, **kwd):
# if self._previous_mode:
# return self._previous_mode.interpretCommand(engine, peer, name, *args, **kwd)
# Note: most notably, all unhandled commands do this: are you logged in to the api?
# raise UnknownCommandError(name) ??
pass
class UnboundApiMode(SubcommandMode):
def doCallEntityMethod(self, engine, peer, refId, methodName, *args, **kwd):
try: object = peer.dataChannel.getObjectOrError(refId)
except ValueError:
# The object wasn't proxied?? It must have gone away.
raise ApiManagement.ApiError("The object referred to by #%d doesn't exist" % refId)
else:
# Got the real object: call its method, straight-away!
method = getAttributeChain(object, methodName)
return method(*args, **kwd)
def doGetEntityProperty(self, engine, peer, refId, propertyName):
try: object = peer.dataChannel.getObjectOrError(refId)
except ValueError:
# The object wasn't proxied?? It must have gone away.
raise ApiManagement.ApiError("The object referred to by #%d doesn't exist" % refId)
else:
# Return property on real object.
return getAttributeChain(object, propertyName)
def doCloseApi(self, engine, peer):
peer.mode.popMode(peer)
# Encoding
class Serializable:
# Interface
pass
# Services
def ApiMethod(function):
function.apiMethod_Exposed = True
return function
def isApiMethod(function):
return getattr(function, 'apiMethod_Exposed', False)
# Thoughts on exposing the Service API:
#
# Leverage the existing Meta model for runtime Objects. This means
# declaring exposed methods through the Meta, or, declaring unsafe
# methods that shouldn't be exposed.
#
# Define exposed methods using an explicit member, like 'Methods' on
# the service class.
# Of course, expose methods using decorators.
#
# Consider wrapping objects returned to limit their exposure, as well,
# all defined by the service-api map (in the Meta).
# Wrap using a decorator is most obvious, easiest.
# What about doing a default Activate or ctor to scan those
# specified in Meta?
#
class ServiceBase(Object):
NAME = 'DefaultService'
def Activate(self, apiMgr):
self.apiMgr = apiMgr
@contextmanager
def __call__(self, engine, peer, name):
# Setup: Put engine/peer into externally-accessible context?
## if not self.isExposedMethod(name):
## raise AttributeError(name)
if self.isReservedAttribute(name):
raise AttributeError(name)
yield getAttributeChain(self, name)
@classmethod
def isReservedAttribute(self, name):
# Pretty sure this is enough to just deny external access to the application,
# while leaving it open internally, since this is the api gateway.
return name in ['application', 'Activate', 'Deactivate', '__init__']
## @classmethod
## def isExposedMethod(self, name):
## return isApiMethod(getattr(self, name, None))
| Python |
#!python
# Front End.
from .application import main
if __name__ == '__main__':
main()
| Python |
#!python
# Peer Data Client
from .encoding import *
from .runtime import *
from .network import *
from .security import *
from .packaging import *
from .application import *
from .architecture import *
import pdb
import socket
from contextlib import closing as Session
def DEBUG(*args):
pass # print ' '.join(map(str, args))
class APICall:
def __init__(self, call, getprop = None):
self.call = call
self.getprop = getprop
def __call__(self, name):
return self.APIMethod(name, self.call, self.getprop)
def __repr__(self):
return '<%s>' % (self.__class__.__name__)
class APIHandle:
class APIHandleCall:
def __init__(self, call, getprop = None):
self.__call = call
self.__getprop = getprop
def __getattr__(self, name):
return Method(name, self.__call, self.__getprop)
def __repr__(self):
return '<%s: %r>' % (self.__class__.__name__, self.__api)
def __init__(self, api, name):
self.api = api
self.name = name
# Q: Why not 'call'? to match Client<-MethodCall
self.invoke = self.APIHandleCall(api.call, api.getprop)
self.closed = False
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.name)
def __del__(self):
self.close()
def close(self):
if not self.closed:
self.api.call('closeApi')
self.closed = True
def open(self, name):
self.call('openApi', name)
return self.APIHandle(self, name)
class APIMethod(MethodCall):
def __init__(self, name, call, getprop = None):
self.__name = name
self.__call = call
self.__getprop = getprop
def __repr__(self):
return '<%s: %s>' % (self.__class__.__name__, self.__name)
def __getattr__(self, name):
return Method(name, self.__call, self.__getprop)
# Context Control:
def __enter__(self):
self.__call('openApi', self.__name)
return self
def __exit__(self, etype = None, value = None, tb = None):
self.__call('closeApi')
debug = False
def setDebug(state):
global debug
debug = state
class AsyncoreControl:
TIMEOUT = 0.3 # Good responsiveness
_asyncore_run_lock = threading.Lock()
@classmethod
def runAsyncore(self):
with self._asyncore_run_lock:
# XXX this needs to be atomic with the lock, but I don't want
# to have to lock for every socket map check. So, try not to
# start a new connection immediately after the last one closes.
# (I could probably use some other threading object)
while asyncore.socket_map:
## if debug:
## import pdb; pdb.set_trace()
asyncore.loop(timeout = self.TIMEOUT,
count = 1) # , use_poll = True)
@classmethod
def startAsyncore(self):
if self._asyncore_run_lock.acquire(False):
nth(self.runAsyncore)
self._asyncore_run_lock.release()
class Client(PackageReader, asyncore.dispatcher_with_send, AsyncoreControl):
@classmethod
def Open(self, address, port, wait = True):
handler = self()
handler.openConnection(address, port)
if wait:
handler.WaitForConnection()
return handler
def __init__(self, *args, **kwd):
asyncore.dispatcher_with_send.__init__(self, *args, **kwd)
PackageReader.__init__(self)
self.dataChannel = EntitySpace()
self.deferred_responses = {} # Ordered?
self.command_nr = 0
deferredMutex = synchronizedWith(None, recursive = True)
self.pushResponse = deferredMutex(self.pushResponse)
self.getWaitState = deferredMutex(self.getWaitState)
self.call = MethodCall(self.callCommand, self.getEntityProperty)
self.api = APICall(self.callCommand, self.getEntityProperty)
def openConnection(self, address, port):
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connectWait = threading.Event()
self.connectError = None
self.connect((address, port))
self.startAsyncore()
def handle_connect(self):
self.connectWait.set()
def handle_read(self):
try: PackageReader.handle_read(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
def handle_write_event(self):
try: asyncore.dispatcher_with_send.handle_write_event(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
def handle_expt_event(self):
# Strangely, this is called after a failed write for refused connections.
# So we're racing on connectError (and double connectWait.set, btw)
try: asyncore.dispatcher_with_send.handle_expt_event(self)
except socket.error, e:
self.connectError = e
self.connectWait.set()
def WaitForConnection(self):
while not self.connectWait.isSet():
self.connectWait.wait()
if self.connected:
return True
if self.connectError:
raise self.connectError
def __repr__(self):
cls = self.__class__
if self.addr:
return '<%s.%s: [%s:%d]>' % (cls.__module__, cls.__name__) + self.addr
return '<%s.%s>' % (cls.__module__, cls.__name__)
def sendSerialCommand(self, command, (serialId, flags), *args, **kwd):
data = self.dataChannel.encodeSerialCommand(command, (serialId, flags), *args, **kwd)
DEBUG('COMMAND[#%d]: %s' % (serialId, data))
self.send(data)
def newSerialId(self):
try: return self.command_nr
finally: self.command_nr += 1
def callCommand(self, command, *args, **kwd):
serialId = self.newSerialId()
self.sendSerialCommand(command, (serialId, 0), *args, **kwd)
response = self.waitForResponse(serialId)
return interpretResponse(response,
callEntityMethod = self.callEntityMethod,
getEntityProperty = self.getEntityProperty)
rpc_callEntityMethod = 'callEntityMethod'
def callEntityMethod(self, proxy, *args, **kwd):
return self.callCommand(self.rpc_callEntityMethod, proxy.refId, *args, **kwd)
rpc_getEntityProperty = 'getEntityProperty'
def getEntityProperty(self, proxy, name):
return self.callCommand(self.rpc_getEntityProperty, proxy.refId, name)
def handleIncomingPackage(self, package):
DEBUG('RECEIVED-PACKAGE:', package)
response = Response.FromPackage(package, self.dataChannel)
if response is not None:
serialId = response.serialId
if serialId is None:
# Do some default handling.
try: print interpretResponse(response)
except Fault, fault:
print fault
else:
DEBUG('PUSHING RESPONSE:', response)
self.pushResponse(serialId, response)
def pushResponse(self, serialId, response):
# Notify waiter.
try: waiting = self.deferred_responses[serialId]
except KeyError:
DEBUG('GOT RESPONSE BEFORE WAITER: [#%d]' % serialId)
waiting = [None, response]
self.deferred_responses[serialId] = waiting
else:
DEBUG('POSTING RESPONSE TO WAITER: [#%d]' % serialId)
del self.deferred_responses[serialId]
waiting[1] = response
waiting[0].set()
def getWaitState(self, serialId):
try: waiting = self.deferred_responses[serialId]
except KeyError:
waiting = [threading.Event(), None]
self.deferred_responses[serialId] = waiting
return waiting
def waitForResponse(self, serialId, timeout = 0.4):
waiting = self.getWaitState(serialId)
if waiting[0] is not None:
wait = waiting[0]
DEBUG('WAITING FOR [#%d]...' % serialId)
while not wait.isSet():
wait.wait(timeout = timeout)
return waiting[1]
## def write_data(self, data):
## print '...', data
## return PackageReader.write_data(self, data)
class Authorization:
def __init__(self, username, secretKey):
self.username = username
self.secretKey = secretKey
def Open(self, hostname, port):
client = Client.Open(hostname, port)
return self.Authenticate(client)
def Authenticate(self, client):
auth = CalculateDigest(self.secretKey, self.username)
client.call.login(self.username, auth)
return client
def printFault(fault, cause = False):
if cause:
print 'Fault Caused Locally By:'
printException()
print
print 'REMOTE FAULT:'
print fault.toString(tb = True)
class CompressIfWorthIt:
THRESHOLD = .70
All = Object()
def __init__(self, threshold = None, level = None):
self.threshold = self.THRESHOLD
self.level = level if level is not None else self.All
def __iter__(self):
if self.level is self.All:
yield 3
# yield 2
yield 1
else:
yield self.level
def worthIt(self, z, u):
print 'WORTH IT??: (%d/%d) %f' % (z, u, float(z) / u)
# This probably will only be worth it for messages of a certain size.
return (float(z) / u) < self.threshold
def __call__(self, data):
data_length = len(data)
for level in self:
compressed = Compress(data, level)
if self.worthIt(len(compressed), data_length):
return compressed
return data
# Front End.
DEFAULT_PORT = Application.DEFAULT_PORT
DEFAULT_HOST = 'localhost'
def ApplyCompressionChannel(channel, level = None, threshold = None):
if level is not None:
if isinstance(level, basestring):
if level.isdigit():
level = int(level)
else:
assert level == 'all'
if isinstance(threshold, basestring):
import re
m = re.match('^(\d+)%$', threshold)
assert m is not None
threshold = int(m.groups()[0]) / 100.0
channel.compression_level = CompressIfWorthIt(threshold, level)
class ClientOptions(Component):
@classmethod
def addCmdlnOptions(self, parser):
parser.add_option('--port', default = DEFAULT_PORT, type = int)
parser.add_option('--service-partner-name')
parser.add_option('--address', default = DEFAULT_HOST)
parser.add_option('--username')
parser.add_option('--secret-key', default = '')
parser.add_option('--api-test', action = 'store_true')
parser.add_option('--quit', action = 'store_true')
parser.add_option('--examine', action = 'store_true')
parser.add_option('--compression-level')
parser.add_option('--compression-threshold')
parser.add_option('--open-system-api', action = 'store_true')
parser.add_option('--enable-timing-tests', action = 'store_true')
parser.add_option('--open-spline-api', action = 'store_true')
parser.add_option('-g', '--debug', action = 'store_true')
parser.add_option('--open-api')
@classmethod
def getCmdlnParser(self):
from optparse import OptionParser
parser = OptionParser()
self.addCmdlnOptions(parser)
return parser
@classmethod
def parseCmdln(self, argv = None):
parser = self.getCmdlnParser()
return parser.parse_args(argv)
def testApi(client):
with client.api('API::Management') as api:
print 'Loading ClockService...'
print api.loadNewServiceApi('pentacle.application.ClockService').NAME
print 'Loading SystemDebug...'
print api.loadNewServiceApi('pentacle.application.SystemDebug').NAME
print 'Loading ObjectDirectory...'
print api.loadNewServiceApi('pentacle.application.ObjectDirectory').NAME
print 'Loading AuxiliaryApi...'
print api.loadNewServiceApi('pentacle.storage.StorageManagement.AuxiliaryApi').NAME
print 'Start Ticking...'
with client.api('ClockService::API') as clock:
clock.setClockTicking(10000)
print 'Current Clock Time:', clock.getClockTime()
print 'Testing Debug...'
# client.debug = 1
with client.api('System::Debugging') as debug:
try: debug.fail()
except Fault, fault:
printFault(fault)
print 'Testing Object Directory...'
with client.api('System::Directory') as directory:
N = directory.newObject
directory.setObject('Feature', N(Aspect = N(Scope = N(Name = 'A System Feature Aspect Scope'))))
print directory.getObject('Feature').Aspect.Scope.Name
print 'Testing Auxiliary Storage'
with client.api('Storage::Aux') as aux:
db = aux.buildAuxiliaryStore(buildCmdln(db_path = '~/.pentacle/application.db'))
user = aux.lookupUnitAccess('UserStorage')
fraun = user.Open(db.application, 'fraun')
# change user-fraun
print dict(fraun.unit.unit.items())
def setupClient(client, options):
import pentacle
import sys
ns = dict(peer = client, client = client,
call = client.call, options = options,
pentacle = pentacle, sys = sys,
g = Synthetic(**globals()))
if options.open_system_api:
system = client.api.open('System::Debugging')
e = system.invoke.evaluate
ns['system'] = system
ns['e'] = e = system.invoke.evaluate
ns['x'] = x = system.invoke.execute
if options.enable_timing_tests:
def testTiming(n = 2017):
return len(e('"A" * %d' % n))
def timeIt(n, number, repeat = 1):
from timeit import repeat as tFunc
return tFunc(lambda:testTiming(n),
number = number,
repeat = repeat)
ns['tt'] = testTiming
ns['ti'] = timeIt
elif options.open_spline_api:
def PrintUptime(svcmgr):
print 'Uptime: %(running_duration)s' % svcmgr.invoke.GetManagerStats()
ns['spline'] = client.api.open('ServiceManager::Spline')
ns['PrintUptime'] = PrintUptime
elif options.open_api:
(variable, name) = options.open_api.split(':', 1)
ns[variable] = client.api.open(name)
return ns
def optionalArgs(options, *names):
kwd = {}
for n in names:
value = getattr(options, n, None)
if value is not None:
kwd[n] = value
return kwd
def main(argv = None):
(options, args) = ClientOptions.parseCmdln(argv)
# Start connection.
if options.service_partner_name:
try:
from pentacle.bus.partners import OpenPartneredClient
client = OpenPartneredClient(options.service_partner_name,
**optionalArgs(options, 'username', 'port'))
except Fault, fault:
print 'SERVICE-MANAGER FAULT:\n%s' % fault.toString(True)
return
else:
print 'Connecting to [%s:%d]...' % (options.address, options.port)
client = Client.Open(options.address, options.port)
ApplyCompressionChannel(client.dataChannel,
options.compression_level,
options.compression_threshold)
if options.debug:
pdb.set_trace()
# Handle Session.
username = options.username
if username:
try:
print 'Logging in %s...' % username
auth = Authorization(username, options.secret_key)
auth.Authenticate(client)
if options.api_test:
testApi(client)
if options.examine:
try: import readline
except ImportError:
pass
ns = setupClient(client, options)
import pentacle, sys
from code import InteractiveConsole as IC
ic = IC(locals = ns)
ic.interact()
if options.quit:
print 'Quitting Application...'
client.call.stopApplication()
except Fault, fault:
printFault(fault, cause = True)
except KeyboardInterrupt:
print 'Console Break'
if __name__ == '__main__':
main()
| Python |
#!python
# Front End.
from .application import main
if __name__ == '__main__':
main()
| Python |
# Network and Application Config
__all__ = ['Configuration']
from ConfigParser import ConfigParser, DEFAULTSECT, NoSectionError, NoOptionError
from optparse import OptionParser, Values
from os.path import dirname, abspath, normpath
import re
from .architecture import Component
from .runtime import Object, NewBuffer
# Todo: This module should require tons of documentation. For example, vectored
# options, defaults operation (they're not interpolated further using Common),
# when optparse-derived default values are lists (and how they're semi-flattened),
# config.set (and its default_section passed through factories), simplification
# layers, and the priority of these option forms:
def getCmdlnOptionForms(name):
yield name
yield name.replace('-', '_')
def getSectionOptionForms(name):
yield name
yield name.replace('_', '-')
class DeferredDefaultValues(Values, object):
class DefaultValue(Exception):
def __init__(self, value):
self.value = value
def __init__(self, *args, **kwd):
Values.__init__(self, *args, **kwd)
self.__attrSet_counters = {} # Avoid counting defaults init.
def __setattr__(self, name, value):
# Track the number of times this is set.
try: self.__attrSet_counters[name] = self.__attrSet_counters.get(name, 0) + 1
except AttributeError:
# Ignore non-existence counters variable during defaults initialization.
pass
self.__dict__[name] = value
def getDefaultSensitive(self, name):
'''
Return the option value only if it was explicitly set.
Otherwise, raise an exception with the default value.
'''
try: value = self.__dict__[name]
except KeyError:
raise AttributeError(name)
if not self.__attrSet_counters.get(name, 0):
# Raise the default value as an exception if it's not already set.
raise self.DefaultValue(value)
return value
@classmethod
def FromParserDefaults(self, parser):
# Get normal default values from parser, but then convert to a simple
# dict type for initialization of our class impl.
values = parser.get_default_values()
values = dict((n, getattr(values, n)) for n in parser.defaults.iterkeys())
return self(values)
class Simplifications:
@classmethod
def Get(self, status):
if status is True:
return self.levelOneSimplification
if callable(status):
# Previous or custom simplifications
return status
return self.noSimplification
@staticmethod
def noSimplification(value):
return value
@staticmethod
def levelOneSimplification(value):
if isinstance(value, basestring):
lval = value.lower()
if lval in ('true', 'yes', 'on'):
return True
if lval in ('false', 'no', 'off'):
return False
if value.isdigit():
return int(value)
return value
# Please, don't even try to match negative numbers.
MATCH_OPTION_VECTOR = re.compile('(.*?)\.(\d+)').match
class Configuration(Component):
Meta = Object.Meta('filename')
@classmethod
def FromCmdln(self, argv, default_config_file, *components, **kwd):
# Build the cmdln-option parser, load components, then pass to ini.
parser = OptionParser()
parser.add_option('-C', '--config-file', default = default_config_file)
for c in components:
newP = c.addCmdlnOptions(parser)
if newP is not None:
parser = newP
values = DeferredDefaultValues.FromParserDefaults(parser)
(options, args) = parser.parse_args(argv, values = values)
return self.FromCmdlnOptions(options, **kwd)
@classmethod
def FromCmdlnOptions(self, options, **kwd):
config_file = self.getPathName(options.config_file)
cfg = ConfigParser()
cfg.read([config_file])
return self(config_file, cfg, options, **kwd)
@classmethod
def FromString(self, string, **kwd):
cfg = ConfigParser()
cfg.readfp(NewBuffer(string))
return self(None, cfg, **kwd)
@classmethod
def FromFile(self, filename, **kwd):
cfg = ConfigParser()
cfg.read([filename])
return self(filename, cfg, **kwd)
@classmethod
def FromFileObject(self, fileObj, **kwd):
cfg = ConfigParser()
cfg.readfp(fileObj)
return self(fileObj.name, cfg, **kwd)
def __init__(self, filename, cfg, options = None, default_section = None):
self.filename = filename or ''
self.cfg = cfg
self.options = options
self.set = self.ConfigSet(self, section = default_section, simplify = True)
self.loadVariables()
# Configure the configurator:
# self.simplify = True
class ConfigSet: # (Object)
def __init__(self, cfgObj, section = None, simplify = False):
self.__cfgObj = cfgObj
self.__section = section
self.__simplify = simplify
def __getattr__(self, name):
return self.__cfgObj.getOption(name, section = self.__section,
simplify = self.__simplify)
def loadVariables(self):
self.variables = {} # Because asDict uses it!
cfgpath = abspath(normpath(self.filename))
self.variables['config-file'] = cfgpath
self.variables['config-dir'] = dirname(cfgpath)
self.variables.update(self.getSectionObject('Common').asDict())
def getOption(self, name, section = None, default = None, simplify = False):
# Command-line always overrides.
simplify = Simplifications.Get(simplify)
if self.options is not None:
for form in getCmdlnOptionForms(name):
try: return simplify(self.options.getDefaultSensitive(form))
except DeferredDefaultValues.DefaultValue, d:
default = d.value
break
except AttributeError:
continue
return self.getSectionOption(name, section, default = default, simplify = simplify)
def getOptionMultiple(self, name, section = None, default = None, simplify = False):
# Get all possible option values, merging.
simplify = Simplifications.Get(simplify)
result = []
if self.options is not None:
for form in getCmdlnOptionForms(name):
try: oneOpt = simplify(self.options.getDefaultSensitive(form))
except DeferredDefaultValues.DefaultValue, d:
default = d.value
break
except AttributeError:
continue
else:
if not isinstance(oneOpt, list):
result.append(oneOpt)
# Just one option.
break
result.extend(self.getSectionOptionVector(name, section = section, default = default, simplify = simplify))
return result
__getitem__ = getOptionMultiple
ALLOWED_SETOPTION_KWD = set(['simplify', 'section'])
def getOptionSet(self, *names, **kwd):
assert self.ALLOWED_SETOPTION_KWD.issuperset(kwd.keys())
for n in names:
yield self.getOption(n, **kwd)
# Sections.
def getSectionOption(self, name, section = None, default = None, simplify = False):
if section is None:
section = DEFAULTSECT
simplify = Simplifications.Get(simplify)
for form in getSectionOptionForms(name):
try: return simplify(self.cfg.get(section, form, vars = self.variables))
except (NoSectionError, NoOptionError):
continue
return default # don't simplify
def getSectionOptionVector(self, name, section = None, default = None, simplify = False):
try: sopts = self.cfg.options(section)
except NoSectionError: pass
else:
top = [0]
def ov(sopts):
t = {}
x = 0
for n in sopts:
m = MATCH_OPTION_VECTOR(n)
if m is not None:
(o, i) = m.groups()
if o == name:
if i:
i = int(i)
if i > x:
x = i
else:
i = 0
t[i] = self.cfg.get(section, n, vars = self.variables)
top[0] = x
return t
simplify = Simplifications.Get(simplify)
t = ov(sopts)
if t:
for i in xrange(top[0] + 1):
yield simplify(t.get(i))
raise StopIteration
# If nothing was found, use default(/semi-flattened), but DON'T SIMPLIFY.
if isinstance(default, (list, tuple)):
for v in default:
yield v
else:
yield default
def getSectionObject(self, name):
return self.Section(self, name)
class Section(Object):
Meta = Object.Meta('name')
def __init__(self, cfgObj, name):
self.cfgObj = cfgObj
self.name = name
def getOption(self, opt, **kwd):
return self.cfgObj.getOption(opt, section = self.name, **kwd)
get = getOption
def getOptionMultiple(self, name, **kwd):
return self.cfgObj.getOptionMultiple(name, section = self.name, **kwd)
__getitem__ = getOptionMultiple
def options(self):
return self.cfgObj.cfg.options(self.name)
def asDict(self, multiple = False, **kwd):
if multiple:
return dict((n, self.getOptionMultiple(n, **kwd)) for n in self.options())
# More basic: (no simplification)
cfg = self.cfgObj.cfg
vars = self.cfgObj.variables
g = cfg.get
s = self.name
try: return dict((n, g(s, n, vars = vars)) for n in cfg.options(s))
except NoSectionError:
return dict()
def sectionNames(self):
return self.cfg.sections()
def __iter__(self):
return iter(self.sectionNames())
# This is another layer aimed at making configuration more convenient.
# Todo: move all this into ini.py??
SECTION_MATCH = re.compile(r'^\s*\[([^]]+)\]\s*$').match
VALUEPAIR_MATCH = re.compile('^\s*([^:]+)\s*:\s*(.*)$').match
COMMENT_MATCH = re.compile(r'^\s*;|#').match
def parseINI(inputSource):
ini = {}
sectionName = None
for line in inputSource:
# Ignore comments.
if COMMENT_MATCH(line):
continue
# Match section header.
m = SECTION_MATCH(line)
if m is not None:
n = m.groups()[0]
if n != sectionName:
sectionName = n
currentSection = ini.setdefault(n, {})
continue
# Match value pair line.
m = VALUEPAIR_MATCH(line)
if m is not None:
(name, value) = m.groups()
if currentSection is None:
sectionName = DEFAULTSECT
currentSection = ini.setdefault(sectionName, {})
currentSection[name] = value.rstrip()
return ini
def buildSectionINI(__name, **values):
# Automatic de-simplification.
def norm(r = {}):
for (n, v) in values.iteritems():
try: e = [r[n]]
except KeyError:
if isinstance(v, bool):
v = 'true' if v else 'false'
elif isinstance(v, (int, long, float)):
v = str(v)
elif isinstance(v, INI.Value):
v = v.resolve()
assert isinstance(v, basestring)
assert '\n' not in v
r[n] = v
else:
e.append(v)
e[n] = v
return r
def rebuild():
for (n, v) in norm().iteritems():
n = n.replace('_', '-')
if isinstance(v, list):
for i in xrange(len(v)):
yield '%s.%d: %s' % (n, i, v[i])
else:
yield '%s: %s' % (n, v)
return '[%s]\n%s\n' % (__name, '\n'.join(rebuild()))
def buildConfigINI(*sections):
return '\n'.join(buildSectionINI(name, **values) \
for (name, values) in sections)
class INI:
class Value:
pass
@classmethod
def FromINI(self, source):
if isinstance(source, basestring):
source = NewBuffer(source)
else:
assert hasattr(source, 'read')
return self(**parseINI(source))
def toConfigObject(self, **kwd):
return Configuration.FromString(self.build(), **kwd)
def __init__(self, **init):
self.sections = dict()
for (n, s) in init.iteritems():
assert isinstance(s, dict)
for (k, v) in s.iteritems():
k = k.replace('_', '-')
self.sections.setdefault(n, {})[k] = v
def __getitem__(self, name):
if isinstance(name, basestring):
name = name.split('.', 1)
(section, option) = name
option = option.replace('_', '-')
return self.sections[section][option]
def __setitem__(self, name, value):
if isinstance(name, basestring):
name = name.split('.', 1)
(section, option) = name
option = option.replace('_', '-')
if isinstance(value, (list, tuple)):
assert all(isinstance(v, (basestring, int, long, float, bool)) for v in value)
s = self.sections.setdefault(section, {})[option] = list(value)
else:
assert isinstance(value, (basestring, int, long, float, bool))
self.sections.setdefault(section, {})[option] = value
def __delitem__(self, name):
name = name.split('.', 1)
if len(name) == 1:
del self.sections[name[0]]
else:
(section, option) = name
option = option.replace('_', '-')
del self.sections[section][option]
def build(self):
return buildConfigINI(*self.sections.iteritems())
def buildSection(self, name):
return buildSectionINI(**self.sections[name])
__str__ = build
def __iter__(self):
return self.sections.iterkeys()
def options(self, name):
return self.sections[name].iterkeys()
def __iadd__(self, other):
if isinstance(other, dict):
other = INI(**other)
for section in other:
for o in other.options(section):
name = '%s.%s' % (section, o)
self[name] = other[name]
return self
def __add__(self, other):
new = INI()
new += self
new += other
return new
def copy(self):
return INI(**self.sections)
| Python |
# Application
__all__ = ['Application', 'ApiManagement']
from code import InteractiveConsole as IC
import sys
import os
from .architecture import *
from .network import *
from .security import *
from .storage import *
from .runtime import *
from .config import *
from . import __version__ as pentacleVersion
from . import buildApplicationVersion
APPLICATION_NAME = 'PentacleApp'
__identity__ = buildApplicationVersion(APPLICATION_NAME, pentacleVersion)
class Application(Object): # (Engine)
DEFAULT_PORT = 7040
DEFAULT_CONFIG_FILE = '~/.pentacle/application.cfg'
PENTACLE_CONFIG_ENV_VAR = 'PENTACLE_CONFIG'
def __init__(self, config):
self.config = config
self.version = self.getConfigOption('version', default = __identity__)
self.storage = StorageManagement(self)
self.security = RightsManagement(self)
self.api = ApiManagement(self)
# Todo: somethings like this could be updated live.
self.logTypes = self.getConfigOption('log-types') or []
# Build and activate Network and Engine.
self.engine = Engine(self.getConfigOption('engine-timeout'))
self.engine.application = self
if self.useNetwork():
(address, port) = self.getConfigOptionSet('bind-address', 'port',
section = 'Network',
simplify = True)
def logNetworkBoot(msg):
self.logNetwork('%s: %s' % (self.version, msg))
self.network = HostNetwork(self.engine, port, address = address)
self.network.open_mother_socket(logNetworkBoot)
def log(self, message):
print message
audit = log
def logLevel(self, logType, message):
if logType in self.logTypes or 'all' in self.logTypes:
self.log(message)
def logException(self, message = None):
if message:
self.log(message)
printException()
def logNetwork(self, message):
# self.logLevel('network', message)
self.log(message)
def getConfigOption(self, name, first = True, **kwd):
kwd.setdefault('section', 'Application')
result = self.config.getOptionMultiple(name, **kwd)
try: return result[0] if first else result
except IndexError:
pass
def getConfigOptionSet(self, *names, **kwd):
kwd.setdefault('section', 'Application')
return self.config.getOptionSet(*names, **kwd)
def getConfigOptionMultiple(self, *args, **kwd):
kwd.setdefault('section', 'Application')
return self.config.getOptionMultiple(*args, **kwd)
# Runtime Management.
debug = False
def networkThread(self):
# todo: move all this into network..?
# import pdb; pdb.set_trace()
self._running_network = True
try:
with self.network.asyncoreLock():
while getattr(self, '_running_network', False):
## if self.debug:
## import pdb; pdb.set_trace()
# print 'Poll Cycle'
self.network.pollCycle()
except self.network.AlreadyServing:
# Quietly ignore and terminate loop.
del self._running_network
except:
# Pass to application log??
import traceback
traceback.print_exc()
def profileEngine(self):
profileName = self.getConfigOption('profile-name')
if profileName:
from profile import Profile
profile = Profile()
try: profile.runcall(self.engine.run)
finally:
profile.dump_stats(profileName)
return True
def runEngine(self):
if not self.profileEngine():
self.engine.run()
def stopNetwork(self):
try: del self._running_network
except AttributeError: pass
self.network.interruptTimeout()
def stop(self):
self.stopNetwork()
self.engine.stop()
def run(self):
# Run network in thread.
DEBUG('running network')
nth(self.networkThread)
DEBUG('running engine')
# Run engine driver in main thread.
try: self.runEngine()
except KeyboardInterrupt:
self.log('Console Interrupt')
self.stop()
else:
self.log('Engine Stopped')
# Front End.
@classmethod
def addCmdlnOptions(self, parser):
parser.add_option('--emhw', action = 'store_true')
parser.add_option('--profile-name')
parser.add_option('--log-type', action = 'append',
default = [], dest = 'log_types')
# Make some bad assumptions about ClientOptions interoperability.
# This came from a need to blend application options with client
# options in spline, so it could be configured as an app on the
# cmdln. Todo: come up with a better way to do this.
from optparse import OptionConflictError
try: parser.add_option('--debug', action = 'store_true')
except OptionConflictError: pass
else: AddNetworkCmdlnOptions(self, parser)
@classmethod
def Boot(self, argv = None, config_file = None):
if config_file is None:
config_file = os.environ.get(self.PENTACLE_CONFIG_ENV_VAR,
self.DEFAULT_CONFIG_FILE)
config = Configuration.FromCmdln(argv, config_file,
self, StorageManagement,
RightsManagement, ApiManagement,
default_section = 'Application')
if config.set.debug: # or True:
import pdb; pdb.set_trace()
return self(config)
def useNetwork(self):
# Determine if the given configuration/options need to host peers.
if self.getConfigOption('emhw', simplify = True):
return False
if self.getConfigOption('dump-db'):
return False
# if self.getConfigOption('no-network'):
# return False
return True
# Todo: Maybe split this into the server-class application and a cmdln app (for emhw)
@classmethod
def Main(self, argv = None):
global app
app = self.Boot(argv)
if app.getConfigOption('emhw'):
# Emergency Mode Holigraphic Wizard
try: import readline
except ImportError:
pass
import pentacle
ic = IC(locals = dict(app = app, pentacle = pentacle,
g = Synthetic(**globals())))
ic.interact()
elif app.getConfigOption('dump-db'):
outfile = app.getConfigOption('dump-db')
app.log('Dumping Storage DB to: %r' % outfile)
app.storage.dumpDB(outfile)
else:
app.run()
# Interaction Modes
class InitialMode(SubcommandMode):
def popMode(self, peer):
if self.previousMode is not None:
# Cannot go beyond initial mode (if there isn't one).
BaseMode.popMode(self, peer)
def doIdentify(self, engine, peer):
return engine.application.version
def doLogin(self, engine, peer, username, authKey):
mode = peer.login(engine, username, authKey)
if mode is None:
return False
peer.mode = mode
engine.log('Logged In: %r -> %s' % (peer, username))
# Note: Override this class if you want to shape your application and still have authentication.
# But, you don't really need to since it's secure. What you want to do is activate service apis
# and grant access to them.
#
# Todo: consider subclassing UnboundApiMode, so that another mode doesn't have to be open
# in order to enjoy object method remoting benefits.
#
class LoggedInMode(InitialMode):
Meta = Object.Meta('username')
def __init__(self, peer, username):
BaseMode.__init__(self, peer)
self.username = username
def getStorage(self, engine):
return UserStorage.Open(engine.application, self.username)
# Actions:
def doWhoami(self, engine, peer):
return self.username
def doLogout(self, engine, peer):
self.popMode(peer)
def doStillConnected(self, engine, peer):
return True
def doOpenApi(self, engine, peer, apiName = None):
if apiName is None:
peer.mode = UnboundApiMode(self)
else:
apiObject = engine.application.api.getUserApiObject(self.username, apiName)
if apiObject is not None:
# Why not engine.application.api.ApiMode??
peer.mode = ApiManagement.ApiMode(self, apiObject)
# Todo: move into account-service api
def doChangeSecretKey(self, engine, peer, secretKey):
# Note: You wouldn't really want to pass this over the network.
# todo: diffie-hellman?
storage = self.getStorage(engine)
storage.changeSecretKey(secretKey)
# Todo: move into application-control service api
def doStopApplication(self, engine, peer):
# engine.getController().mode.username
app = engine.application
if app.security.isUserActionPermitted(self.username, 'stop-application'):
app.stop()
# Todo: move into security-control service api
def doGrantPermission(self, engine, peer, actionName, permissionName, userPrincipalName, withGrant = False):
security = engine.application.security
thePerm = '%s:%s' % (actionName, permissionName)
withGrantPerm = 'with-grant:' + thePerm
if security.isUserActionPermitted(self.username, withGrantPerm):
security.grantUserAction(userPrincipalName, thePerm)
if withGrant:
security.grantUserAction(userPrincipalName, withGrantPerm)
def doRevokePermission(self, engine, peer, permissionName, userPrincipalName, actionName):
raise NotImplementedError
# High-Powered: (used for testing)
def doPowerUp(self, engine, peer):
if engine.application.security.isUserActionPermitted(self.username, 'wizard-mode'):
peer.mode = self.WizardMode(self)
class WizardMode(SubcommandMode):
# Powerful Mode -- UNCHECKED ACCESS.
def doPowerDown(self, engine, peer):
self.popMode(peer)
def doMe(self, engine, peer):
return peer
main = Application.Main
bootServer = Application.Boot
class ApiManagement(Component):
class ServiceManager(ServiceBase): # Different from Spline.ServiceManager
NAME = 'API::Management'
# Allow access to dynamically load in new api services.
def loadNewServiceApi(self, serviceApi):
return self.apiMgr.loadServiceApiObject(serviceApi)
def getAllApiNames(self):
return self.apiMgr.apiDirectory.keys()
def isApiAvailable(self, name):
return name in self.apiMgr.apiDirectory
def getAvailableMethods(self, serviceName):
return self.apiMgr.getAvailableMethods(serviceName)
# ! High-powered:
def plugApi(self, name, api):
return self.apiMgr.plugApi(name, api)
def unplugApi(self, name):
return self.apiMgr.unplugApi(name)
def addSystemPath(self, path):
return self.apiMgr.addSystemPath(path)
@classmethod
def addCmdlnOptions(self, parser):
parser.add_option('--service-path', action = 'append', default = [])
parser.add_option('--service-api', action = 'append', default = [])
def __init__(self, application):
self.application = application
self.apiDirectory = {}
for path in application.getConfigOptionMultiple('service-path', section = 'Services'):
self.addSystemPath(path)
self.loadServiceApiObject(self.ServiceManager)
for serviceApi in application.getConfigOptionMultiple('service-api', section = 'Services'):
self.loadServiceApiObject(serviceApi)
def addSystemPath(self, newPath):
if newPath not in sys.path:
sys.path.append(newPath)
def loadServiceApiObject(self, apiClass):
if isinstance(apiClass, basestring):
apiClass = LookupObject(apiClass)
# assert issubclass(apiClass, ServiceBase)
api = apiClass()
self.plugApi(api.NAME, api)
return api
def plugApi(self, name, api):
if name not in self.apiDirectory:
self.application.log('Installing API: %s' % name)
self.apiDirectory[name] = api
try: activate = api.Activate
except AttributeError: pass
else: activate(self)
def unplugApi(self, name):
try: api = self.apiDirectory[name]
except KeyError: pass
else:
try: deactivate = api.Deactivate
except AttributeError: pass
else: deactivate()
def getAvailableMethods(self, serviceName):
if self.application.security.isUserActionPermitted(username, 'access-api:%s' % apiName):
try: apiObject = self.apiDirectory[apiName]
except KeyError: raise NameError(serviceName)
# Return set of names in API exposed by service.
return [name for name in dir(apiObject) if apiObject.isExposedMethod(name)]
def getUserApiObject(self, username, apiName):
if self.application.security.isUserActionPermitted(username, 'access-api:%s' % apiName):
return self.apiDirectory[apiName]
class ApiError(RuntimeError):
pass
class ApiMode(UnboundApiMode):
# Some Entity RPC
def __init__(self, peer, apiObject):
BaseMode.__init__(self, peer)
self.apiObject = apiObject
#@breakOn
def defaultCommand(self, engine, peer, name, *args, **kwd):
with self.apiObject(engine, peer, name) as method:
if not callable(method):
# ApiError?
raise TypeError('Method not callable: %s' % name)
return method(*args, **kwd)
| Python |
# Local debugging.
from pdb import set_trace, runcall
def breakOn(function):
def debugCall(*args, **kwd):
set_trace()
return function(*args, **kwd)
print 'DEBUGGING ON', functionName(function)
return debugCall
def functionName(function):
return '%s.%s' % (getattr(function, '__module__', '?'),
getattr(function, '__name__', '?'))
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| Python |
from django.conf.urls.defaults import patterns, include, url
from django.conf import settings
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns(settings.PROJECT_MODULE + '.views',
# Examples:
# url(r'^$', 'dj.views.home', name='home'),
# url(r'^dj/', include('dj.foo.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
# url(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
# url(r'^admin/', include(admin.site.urls)),
# Spline Management.
url(r'^status$', 'viewStatus'),
url(r'^$', 'viewStatus'),
)
urlpatterns += patterns('django.views.static',
url(r'^(?P<path>(?:images|js|css)/.*)', 'serve',
dict(document_root = settings.FALCON_MEDIA_ROOT)))
import re
CLEAN_DOCS_URL = re.compile(r'^/*(.*?)/*$')
cleanDocsUrl = lambda u: CLEAN_DOCS_URL.match(u).group(1)
def ConfigureDocumentation(path, urlbase = None):
from os.path import abspath
path = abspath(path)
urlbase = 'docs' if urlbase is None else cleanDocsUrl(urlbase)
## while urlbase[:1] == '/':
## urlbase = urlbase[1:]
## while urlbase[-1:] == '/':
## urlbase = urlbase[:-1]
global urlpatterns
urlpatterns += patterns('django.views.static',
url(r'^%s/(?P<path>.*)' % urlbase, 'serve',
dict(document_root = path)))
return dict(link = urlbase)
| Python |
# Django settings for falcon project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@example.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/media/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash.
# Examples: "http://media.lawrence.com/media/", "http://example.com/media/"
MEDIA_URL = ''
# Absolute path to the directory static files should be collected to.
# Don't put anything in this directory yourself; store your static files
# in apps' "static/" subdirectories and in STATICFILES_DIRS.
# Example: "/home/media/media.lawrence.com/static/"
STATIC_ROOT = ''
# URL prefix for static files.
# Example: "http://media.lawrence.com/static/"
STATIC_URL = '/static/'
# URL prefix for admin static files -- CSS, JavaScript and images.
# Make sure to use a trailing slash.
# Examples: "http://foo.com/static/admin/", "/static/admin/".
ADMIN_MEDIA_PREFIX = '/static/admin/'
# Additional locations of static files
STATICFILES_DIRS = (
# Put strings here, like "/home/html/static" or "C:/www/django/static".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
# List of finder classes that know how to find static files in
# various locations.
STATICFILES_FINDERS = (
'django.contrib.staticfiles.finders.FileSystemFinder',
'django.contrib.staticfiles.finders.AppDirectoriesFinder',
# 'django.contrib.staticfiles.finders.DefaultStorageFinder',
)
# XXX Make this unique, and don't share it with anybody.
SECRET_KEY = 'jg!byy&v(&=d-a9pxcamg52c3q7t)o5i-1-so7s5re9ze599wl'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
PROJECT_MODULE = '.'.join(__name__.split('.')[:-1])
ROOT_URLCONF = PROJECT_MODULE + '.urls'
from os.path import dirname, join as joinpath
FALCON_MEDIA_ROOT = joinpath(dirname(__file__), 'media')
from . import TEMPLATES_DIR as FALCON_VIEW_TEMPLATES
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
FALCON_VIEW_TEMPLATES,
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.staticfiles',
# Uncomment the next line to enable the admin:
# 'django.contrib.admin',
# Uncomment the next line to enable admin documentation:
# 'django.contrib.admindocs',
)
# A sample logging configuration. The only tangible logging
# performed by this configuration is to send an email to
# the site admins on every HTTP 500 error.
# See http://docs.djangoproject.com/en/dev/topics/logging for
# more details on how to customize your logging configuration.
LOGGING = {
'version': 1,
'disable_existing_loggers': False,
'handlers': {
'mail_admins': {
'level': 'ERROR',
'class': 'django.utils.log.AdminEmailHandler'
}
},
'loggers': {
'django.request': {
'handlers': ['mail_admins'],
'level': 'ERROR',
'propagate': True,
},
}
}
| Python |
# Management Views.
from django.shortcuts import render_to_response
from django.template import Template, Context
from django.http import HttpResponse
from . import getTemplateFile
from ....packaging import Fault
from datetime import datetime
def render_template_response(template_name, **values):
template_file = getTemplateFile(template_name)
template_source = open(template_file).read()
t = Template(template_source)
content = t.render(Context(values))
return HttpResponse(content)
render = render_template_response
def viewStatus(request):
wc = request.META['web.controller']
messages = []
# Horrible place for actions (in a status view).
action = request.GET.get('action')
try:
if action == 'reload-views':
wc.reloadViews()
elif action == 'shutdown-manager':
wc.shutdownManager()
except Fault, f:
messages.append(f.toString(True))
# get the remote application.config repr
mgrConfig = '(not yet available)'
def partnersInfo():
nr = 0
for p in stats['partners']:
info = spline.GetPartnerInfo(p)
try: info['configuration'] = open(info['config_file']).read()
except (IOError, KeyError): pass
try: boottime = info['boottime']
except KeyError:
boottime = '(Unknown)'
else:
boottime = datetime.fromtimestamp(boottime)
info['boottime'] = boottime.ctime()
info['nr'] = nr
nr += 1
yield info
spline = wc.managerApi
try:
stats = spline.GetManagerStats()
partners = list(partnersInfo())
version = spline.Identify()
except Fault, f:
return HttpResponse(f.toString(True), content_type = 'text/plain')
return render('status.html',
webControl = wc,
documentation = request.META['web.documentation'],
messages = messages,
spline = dict(version = version,
partners = partners,
mgrConfig = mgrConfig,
stats = stats))
| Python |
# Django Web Server Implementation.
import sys, os
from StringIO import StringIO
from django.core.management.validation import get_validation_errors
from django.core.handlers.wsgi import WSGIHandler
from django.conf import settings
from django.utils import translation
from django.core.servers.basehttp import AdminMediaHandler, WSGIServerException, \
WSGIRequestHandler, WSGIServer
class ValidationError(Exception):
pass
def ValidactivateOnce(app = None):
if not getattr(sys, '__django_validactivated', False):
s = StringIO()
num_errors = get_validation_errors(s, app)
if num_errors:
raise ValidationError("One or more models did not validate:\n%s" % s.getvalue())
translation.activate(settings.LANGUAGE_CODE)
sys.__django_validactivated = True
def getWSGIServerExceptionMessage(e):
# Use helpful error messages instead of ugly tracebacks.
ERRORS = {
13: "You don't have permission to access that port.",
98: "That port is already in use.",
99: "That IP address can't be assigned-to.",
}
try: return ERRORS[e.args[0].args[0]]
except (AttributeError, KeyError):
return str(e)
class FalconManagementServer(WSGIServer):
def __init__(self, controller):
os.environ['DJANGO_SETTINGS_MODULE'] = __name__ + '.settings'
ValidactivateOnce()
config = controller.application.config
config = config.ConfigSet(config, section = 'DjangoServer', simplify = True)
wsgi_handler = WSGIHandler()
admin_media_path = config.admin_media_path
if admin_media_path:
wsgi_handler = AdminMediaHandler(wsgi_handler, admin_media_path)
# Fugh: config layer eww.. this will end up defaulting to Manager settings
# because we're using a ConfigSet -- obviously not what's wanted, so SET THESE!
hostname = config.hostname or 'localhost'
port = config.port
assert port
from urls import ConfigureDocumentation
self.documentation = ConfigureDocumentation(config.docs_path, config.docs_url)
self.controller = controller
WSGIServer.__init__(self, (hostname, port), WSGIRequestHandler)
self.set_app(wsgi_handler)
def setup_environ(self):
# Called on successful server_bind, and passed down through the application
# ware to the (WSGI) request class and is stored in the META attribute.
WSGIServer.setup_environ(self)
self.base_environ['web.controller'] = self.controller
self.base_environ['web.documentation'] = self.documentation
def server_handle_request(self):
try: return self.baseControlClass.server_handle_request(self)
except WSGIServerException, e:
self.log(getWSGIServerExceptionMessage(e))
# Project and Views Support.
from os.path import join as joinpath, dirname
TEMPLATES_DIR = joinpath(dirname(__file__), 'templates')
def getTemplateFile(name):
return joinpath(TEMPLATES_DIR, name)
| Python |
# Spline Web Controller.
from ...architecture import ServiceBase
from ...runtime import Object, nth, contextmanager
from ...client import Authorization, Session
from ...bus import ServiceManagerName
from ...bus.partners import getManagerOptions
from errno import EINTR
from types import ClassType as newClassObject
from select import error as select_error
import platform
HTML_PROLOGUE = '''\
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml"><head><meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
'''
HTML_PROLOGUE = '<html><head>\n'
class Management(ServiceBase):
NAME = 'PenobscotRobotics[Spline::Web::Management]'
SERVER_NAME = 'SplineManagementServer'
class Controller(Object):
# Used by views, etc.
def __init__(self, application, service):
self.application = application
self.service = service
self.document = dict(start = HTML_PROLOGUE)
@property
def hostName(self):
return platform.node()
@property
def managerSession(self):
try: return self.cachedManagerSession
except AttributeError:
mgrOptions = getManagerOptions(self.application.config)
auth = Authorization(mgrOptions.username or '',
mgrOptions.secret_key or '')
client = auth.Open(mgrOptions.address, mgrOptions.port)
self.cachedManagerSession = client
return client
@property
def managerApi(self):
'Open a connection to the Spline service manager partner.'
# Todo: catch faults and produce synthetic tracebacks for django debug??
# Since this is a context-manager, we can catch errors (like disconnections)
# and re-connect. No wait -- the best thing is a connection-alive checker.
try: return self.cachedManagerApi.invoke
except AttributeError:
api = self.managerSession.api.open(ServiceManagerName)
self.cachedManagerApi = api
return api.invoke
def reloadViews(self):
from dj import views, urls
reload(views); reload(urls)
def shutdownManager(self):
try: self.cachedManagerApi.close()
except AttributeError: pass
return self.managerSession.call.stopApplication()
def Activate(self, apiMgr):
ServiceBase.Activate(self, apiMgr)
webController = self.Controller(apiMgr.application, self)
serverClass = BuildServerClass(self.SERVER_NAME)
self.server = serverClass(webController)
self.server.server_start()
apiMgr.application.log('%s Server -- Port %d' % \
(self.NAME, self.server.server_address[1]))
# OMFG -- todo: open if configured this way.
from os import environ
environ['DISPLAY'] = '1' # X-based browser.
import webbrowser
webbrowser.open('http://%s:%s' % self.server.server_address)
def getWebServerInfo(self):
return dict(port = self.server.server_address[1])
# Server Construction.
class ServerControl:
def set_running(self, value = True):
self.__running = value
def is_running(self):
try: return (self.__running)
except AttributeError:
return False
## def process_request_thread(self, *args, **kwd):
## import pdb; pdb.set_trace()
## return ThreadingMixin.process_request_thread(self, *args, **kwd)
def server_loop(self):
while self.is_running():
# import pdb; pdb.set_trace()
self.server_handle_request()
def server_handle_request(self):
try: self.handle_request()
except select_error, e:
if e.args[0] != EINTR:
# A process signal was sent, ignore and continue.
(etype, value, tb) = sys.exc_info()
raise etype, value, tb
except KeyboardInterrupt:
self.server_stop()
def server_start(self):
self.set_running(True)
nth(self.server_loop)
def server_stop(self):
self.set_running(False)
def server_shutdown(self):
# XXX!
self.server_stop()
self.server_close()
def runningState(self):
return self.is_running() and 'RUNNING' or 'STOPPED'
def BuildServerClass(ServerName):
# This abstracts the server implementation somewhat.
from SocketServer import ThreadingMixIn as ServerCompartment
from falcon import FalconManagementServer
return newClassObject(ServerName,
(FalconManagementServer,
ServerCompartment,
ServerControl),
dict(baseControlClass = ServerControl))
| Python |
# Active System Object Registry & Directory
# Builtin Pentacle Service Partner
'''
Pentacle Bus Active Database Partner
====================================
This module contains the ActiveRegistry service for a Pentacle database service partner.
Use the API Manager to load this service by specifying the class's fully qualified name.
(Relies on SQLObject.)
'''
from ..architecture import ServiceBase
from ..storage import StorageUnit
from ..runtime import Object
import os, errno
from os.path import join as joinpath, normpath, abspath
from types import ClassType as buildClass
class ActiveRegistry(ServiceBase):
'''
ActiveRegistry (Service)
========================
This class is loaded as a Pentacle service, and activates itself.
See the documentation for the `Activate` method to find out how to
configure it.
Note that the service name for this class is::
PenobscotRobotics[Registry::Active]
'''
NAME = 'PenobscotRobotics[Registry::Active]'
DEFAULT_DB_CONTAINER = 'ardbs'
class RegistryStorage(StorageUnit):
'A storage unit definition for a toplevel active registry entry.'
STORAGE_REALM = 'ActiveRegistry'
def Activate(self, apiMgr):
ServiceBase.Activate(self, apiMgr)
self.dbContainerPath = apiMgr.application \
.getConfigOption('root-path', default = self.DEFAULT_DB_CONTAINER)
try: os.makedirs(self.dbContainerPath)
except OSError, e:
if e.errno != errno.EEXIST:
raise
def OpenDirectory(self, name):
'Pass the name of the toplevel registry entry, and the storage interface is returned.'
return self.RegistryStorage.Open(self.apiMgr.application, name)
def OpenDB(self, name = None):
'Return a newly-opened (or existing) active database session object.'
if name is None:
return ActiveDB.OpenMemory(self)
return ActiveDB.Open(self, name)
class ActiveDB(Object):
Meta = Object.Meta('name')
_open_cache = {}
_memory = object()
@classmethod
def Open(self, registry, name):
try: return self._open_cache[name]
except KeyError:
import sqlobject
uri = self.getUriFor(registry, name)
conn = sqlobject.connectionForURI(uri)
db = self._open_cache[name] = self(registry, sqlobject, name, conn)
return db
@classmethod
def OpenMemory(self, registry):
try: return self._open_cache[self._memory]
except KeyError:
import sqlobject
conn = sqlobject.connectionForURI('sqlite:///:memory:')
db = self._open_cache[self._memory] = self(registry, sqlobject, name, conn)
return db
@classmethod
def getUriFor(self, registry, name):
# todo: also configure the protocol here? Could use sqlalchemy+mssql
path = joinpath(registry.dbContainerPath, name + '.db')
path = abspath(normpath(path))
# Open the connection with a unique class registry for each db.
# todo: should we be escaping the db name for this parameter value?
return 'sqlite://%s?registry=%s' % (path, name)
def __init__(self, registry, sqlobject, name, conn):
self.registry = registry
self.sqlobject = sqlobject
self.name = name
self.conn = conn
self.tables = {}
self.sqlObjectBaseClass = sqlobject.SQLObject
self.sqlObjectCols = sqlobject.col
class Table(Object):
Meta = Object.Meta('schema')
class Schema(Object):
# An externally-defined table.
Meta = Object.Meta('name', ['columns', lambda schema:len(schema.columns)])
class Column(Object):
Meta = Object.Meta('name', ['data-type', lambda col:'%s(%s)' % (col.data_type, col.size)])
def __init__(self, name, data_type, size = 0, fixed = None,
nullable = False, index = None, constraints = ()):
self.name = name
self.data_type = data_type
self.size = size
COLUMN_TYPE_MAP = dict(string = 'StringCol',
integer = 'IntCol',
pickle = 'PickleCol')
def build(self, sqlObjectCols):
# Build the actual SQLObject column.
colClass = getattr(sqlObjectCols, self.COLUMN_TYPE_MAP[self.data_type])
return (self.name, colClass())
@classmethod
def define(self, colDef):
if isinstance(colDef, (tuple, list)):
# Unpack from schema definition.
if len(colDef) == 3:
(name, args, kwd) = colDef
elif len(colDef) == 2:
(name, args) = colDef
else:
raise IndexError(repr(colDef))
else:
raise TypeError(type(colDef).__name__)
# Construct this column specification.
return self(name, *args, **kwd)
def __init__(self, name, *columns):
self.name = name
self.columns = columns
self.table = None # One instance per schema.
def build(self, db):
# Builds a definition.
return buildClass(db.getTableName(self.name),
(db.sqlObjectBaseClass,),
dict(c.build(db.sqlObjectCols)
for c in self.columns))
def open(self, db):
if self.table is None:
self.table = db.tableConnectImpl(self.build(db))
return self.table
def drop(self, db):
db.tableDropImpl(self.tableObj)
def __init__(self, db, schema):
self.schema = schema
self.tableObj = schema.open(db)
# Accessors.
def newRecord(self, **kwd):
return self.tableObj(**kwd)
def getRecord(self, **kwd):
return self.tableObj.selectBy(**kwd)
# Helpers.
def getTableName(self, name):
# Note: the classes will be differentiated via class registry.
return name
def tableConnectImpl(self, t):
t._connection = self.conn
if not t.tableExists():
t.createTable()
return t
def tableDropImpl(self, t):
if t.tableExists():
t.dropTable()
# Accessors.
def tableNeedsDefinition(self, name):
return name in self.tables
def defineColumn(self, name, *args, **kwd):
return (name, args, kwd)
def defineTable(self, name, *columns):
# what about alter table??
assert not self.tableNeedsDefinition(name)
columns = map(self.Table.Schema.Column.define, columns)
self.tables[name] = self.Table.Schema(name, *columns)
return name
def openTable(self, name):
return self.Table(self, self.tables[name])
def dropTable(self, name):
self.tables[name].schema.drop(self)
def listTables(self):
return self.tables.keys()
def close(self):
pass
## with client.api('PenobscotRobotics[Registry::Active]') as registry:
## with closing(registry.OpenDB('mine')) as db:
## db.defineTable('main',
## db.defineColumn('systemName', 'string'),
## db.defineColumn('version', 'integer'))
##
## tableDefs = registry.OpenDirectory('my/table/definitions')
## for (name, columns, records) in tableDefs.iteritems():
## if db.tableNeedsDefinition(name):
## db.defineTable(name, *columns)
##
## t = db.openTable(name)
## for r in records:
## t.newRecord(**r)
| Python |
# A task-scheduling implementation.
# todo: a more unique name
from ..architecture import ServiceBase
from ..runtime import nth, Object
class CronManager(ServiceBase):
NAME = 'System::Cron::Manager'
class Task(Object):
@classmethod
def FromConfig(self, command, schedule):
task = self(command)
task.setSchedule(schedule)
return task
def __init__(self, command):
self.command = command
def setSchedule(self, scheduleString):
pass
def run(self):
pass
def Activate(self, apiMgr):
ServiceBase.Activate(self, apiMgr)
self.tasks = dict()
# Load tasks from configuration.
taskCommands = apiMgr.application.getConfigSection('Tasks').asDict()
schedule = apiMgr.application.getConfigSection('Schedule')
for option in schedule.options():
try: cmd = taskCommands[option]
except KeyError:
continue
self.tasks[option] = self.Task.FromConfig(cmd, schedule.getOption(option))
def delaySchedule():
# todo: calculate next delay from tasks.
while True:
yield ([], 10)
waitForNextTask = delaySchedule().next
# Start timing.
def runScheduler():
while True:
(tasks, delay) = waitForNextTask()
sleep(delay)
for t in tasks:
# todo: post as apiMgr.application.engine message.
t.run()
nth(runScheduler)
| Python |
# Westmetal Document Services
from pentacle.architecture import ServiceBase
from webbrowser import get as getBrowser
class DocumentBrowserManager(ServiceBase):
NAME = '@westmetal/document/browser'
WINDOW = dict(normal = 0, new = 1, tab = 2)
def openUrl(self, url, window = 'normal', autoraise = True, browser = None):
browser = getBrowser(browser)
browser.open(url, new = self.WINDOW[window], autoraise = bool(autoraise))
| Python |
from ..architecture import ServiceBase, Engine
from ..runtime import Synthetic, LookupObject, getCurrentSystemTime, contextmanager, breakOn
# (Test) Services.
class ClockService(ServiceBase):
NAME = 'ClockService::API'
def setClockTicking(self, clockTime):
self.clockTime = clockTime
def getClockTime(self):
return getCurrentSystemTime() - self.clockTime
class SystemDebug(ServiceBase):
NAME = 'System::Debugging'
def stopAndReload(self):
# Hmm. Quit the engine, wait for it to close, reload
# all pentacle modules and restart??
pass
def copyover(self): # New args??
self.application.engine.postMessage(CopyoverMessage())
def enableTracing(self):
set_trace()
def fail(self, message = 'Force Fail'):
raise RuntimeError(message)
# lookupObject = staticmethod(LookupObject)
def lookupObject(self, name):
return LookupObject(name)
def evaluate(self, source):
code = compile(source, '', 'eval')
return eval(source, globals(), globals())
def execute(self, source):
code = compile(source, '', 'exec')
exec code in globals(), globals()
def compile(self, source, filename, mode):
return compile(source, filename, mode)
def hotModule(self, code = None, name = None, filename = None, reload = False):
from types import CodeType, ModuleType
if isinstance(code, basestring):
code = compile(code, filename or '', 'exec')
else:
assert isinstance(code, CodeType)
from sys import modules
if name is None:
raise NotImplementedError('RandomModuleName()')
name = RandomModuleName(modules)
# New
mod = ModuleType(name)
modules[name] = mod
else:
try: mod = modules[name]
except KeyError:
# New
mod = ModuleType(name)
modules[name] = mod
else:
assert reload
ns = mod.__dict__
if code:
exec code in ns, ns
return mod
class CopyoverMessage(Engine.Message):
def dispatch(self):
from os import execve
from sys import argv
# Shut down the network:
# Stop receiving/handling any new commands.
# Flush remaining output buffers.
# Write session map to disk (to hold onto connections)
# Initiate execve(argv)
class ObjectDirectory(ServiceBase):
NAME = 'System::Directory'
def getWholeDirectory(self):
import sys
try: return sys._object_directory
except AttributeError:
sys._object_directory = d = {}
return d
def getObject(self, name):
return self.getWholeDirectory().get(name)
def setObject(self, name, value):
self.getWholeDirectory()[name] = value
__getitem__ = getObject
__setitem__ = setObject
def newObject(self, **values):
return Synthetic(**values)
class RemoteConsole(ServiceBase):
# Redirect stdin/stdout, provide parallel interaction thread
# (because the streams shall be bound to engine/network comm)
NAME = 'Console::Remote'
# This should grab console for only the requesting peer (assuming there is one),
# and automatically release it on disconnect. For now, it's exposed to everyone!
def getStdoutBuffer(self):
try: return self.stdout_buffer
except AttributeError:
self.stdout_buffer = buf = NewBuffer()
return buf
def getStderrBuffer(self):
try: return self.stderr_buffer
except AttributeError:
self.stderr_buffer = buf = NewBuffer()
return buf
def getStdinChannel(self):
try: return self.stdin_channel
except AttributeError:
self.stdin_channel = channel = NewBuffer()
return channel
def readStdout(self, *args, **kwd):
return self.getStdoutBuffer().read(*args, **kwd)
def readStderr(self, *args, **kwd):
return self.getStdoutBuffer().read(*args, **kwd)
def writeStdin(self, *args, **kwd):
return self.getStdinChannel().write(*args, **kwd)
#@property
@contextmanager
def focus(self):
# Todo: Shouldn't allow this more than once...
stdout = sys.__stdout__ = sys.stdout
stderr = sys.__stderr__ = sys.stderr
stdin = sys.__stdin__ = sys.stdin
sys.stdout = self.getStdoutBuffer()
sys.stderr = self.getStderrBuffer()
sys.stdin = self.getStdinChannel()
try: yield
finally:
sys.stdout = stdout
sys.stderr = stderr
sys.stdin = stdin
def open(self):
cx = self.focus()
cx.__enter__()
return cx.__exit__
class Tunnel(ServiceBase):
NAME = 'Pentacle::Tunnel'
def openClient(self, address, port):
from pentacle.client import Client
return Client.Open(address, port)
| Python |
# Pentacle O-O P2P
#
# Copyright (C) 2011 Clint Banis
# Author: Clint Banis <cbanis@gmail.com>
# URL: <http://www.penobscotrobotics.us>
'''
Object-Oriented Peer-to-Peer RPC access library and server application framework.
:author: `Clint Banis <gmail.com>`__
:requires: Python 2.6+
:version: 0.2
:group Application: application, config, debugging, security, storage
:group Core: architecture, encoding, packaging, runtime
:group Networking: network, client, server
:group Service Partner Bus: bus, bus.partners, bus.services, bus.spline
:bug: Some synchronization issues cause intermittant network failures
:todo: Optimize the package format to elliminate unnecessary marks
:todo:
:see: `the Pentacle code repository <http://frauncache.googlecode.com>`__
:see: `Penobscot Robotics, Co.<http://www.penobscotrobotics.us>`__
:copyright: |copy| 2011 Clint Banis & Penobscot Robotics
.. |copy| unicode:: 0xA9 .. copyright sign
'''
__author__ = 'Clint Banis' # __contact__
__copyright__ = '2011 Clint Banis & Penobscot Robotics' # __license__
__version__ = 0.1
__docformat__ = 'restructuredtext en'
__url__ = 'http://www.penobscotrobotics.us/cms/page/pentacle'
def buildApplicationVersion(appName, busVersion):
return '%s/%s' % (appName, busVersion)
def DEBUG(*args):
pass # print '[DEBUG]', ' '.join(map(str, args))
import __builtin__ as builtin
builtin.DEBUG = DEBUG
| Python |
# Security Management
__all__ = ['RightsManagement', 'CalculateDigest', 'GenerateSecretKey']
from hmac import HMAC
import random
from .architecture import *
from .storage import *
def CalculateDigest(iv, *values):
h = HMAC(iv)
for v in values:
h.update(v)
return h.hexdigest()
def GenerateSecretKey(keySize, rng = None):
if rng is None:
rng = random.Random()
return ''.join(chr(rng.getrandbits(8)) for x in xrange(keySize))
class RightsManagement(Component):
class SecurityException(Exception):
def __init__(self, action, principal = None):
self.action = action
self.principal = principal
Exception.__init__(self, '%s denied %s' % (principal or '--', action))
class ExplicitlyDenied(SecurityException):
pass
def getStorage(self, role):
return self.RightsStorage.Open(self.application, role)
def getUserRoles(self, username):
storage = UserStorage.Open(self.application, username)
return storage.getUserRoles()
def isActionPermitted(self, roles, actionName):
if not isinstance(roles, (list, tuple)):
roles = [roles]
permitted = False
for thisRole in roles:
storage = self.getStorage(thisRole)
try: test = storage.isActionPermitted(actionName, explicit = True)
except self.ExplicitlyDenied:
self.application.audit('Action %s explicitly denied for %s' % (actionName, thisRole))
return False
else:
if test:
permitted = True
self.application.audit('Action %s %s for %s' % (actionName, permitted and 'permitted' or 'denied', thisRole))
return permitted
def isUserActionPermitted(self, username, actionName):
roles = self.getUserRoles(username)
return self.isActionPermitted(roles, actionName)
def checkActionPermitted(self, role, actionName):
if self.isActionPermitted(role, actionName):
raise self.SecurityException(actionName, role)
def checkUserActionPermitted(self, username, actionName):
if self.isUserActionPermitted(username, actionName):
raise self.SecurityException(actionName, username)
def grantAction(self, roleName, actionName):
with self.getStorage(roleName) as access:
if access.grant(actionName):
self.application.audit('Permission Granted: %s for %s' % (actionName, roleName))
def revokePermittedAction(self, roleName, actionName):
with self.getStorage(roleName) as access:
if access.revokeGranted(actionName):
self.application.audit('Granted Permission Revoked: %s for %s' % (actionName, roleName))
def revokeDeniedAction(self, roleName, actionName):
with self.getStorage(roleName) as access:
if access.revokeDenied(actionName):
self.application.audit('Denied Permission Revoked: %s for %s' % (actionName, roleName))
def denyAction(self, roleName, actionName):
with self.getStorage(roleName) as access:
if access.deny(actionName):
self.application.audit('Permission Denied: %s for %s' % (actionName, roleName))
def grantPublicAction(self, actionName):
return self.grantAction(UserStorage.Interface.PUBLIC_ROLE, actionName)
def getUserPrincipalName(self, username):
return 'user-' + username
def grantUserAction(self, username, actionName):
return self.grantAction(self.getUserPrincipalName(username), actionName)
def enableSuperuser(self, roleName):
with self.getStorage(roleName) as access:
access.enableSuperuser()
class RightsStorage(StorageUnit):
STORAGE_REALM = 'RightsStorage'
class Interface(StorageUnit.Interface):
PERMITTED_ACTIONS = 'permitted-actions'
DENIED_ACTIONS = 'explicitly-denied-actions'
def isAllPermitted(self):
return self.getValue('granted-all-permissions') == 'true'
def isNonePermitted(self):
return self.getValue('granted-no-permissions') == 'true'
def enableSuperuser(self):
self.setValue('granted-all-permissions', 'true')
def isActionExplicitlyDenied(self, actionName):
return actionName in self.getValue(self.DENIED_ACTIONS, [])
def isActionPermitted(self, actionName, explicit = False):
# Superuser/Lockdown:
if self.isNonePermitted():
return False
if self.isAllPermitted():
return True
if self.isActionExplicitlyDenied(actionName):
if explicit:
raise RightsManagement.ExplicitlyDenied(actionName)
return False
return actionName in self.getValue(self.PERMITTED_ACTIONS, [])
def changePermission(self, typeName, actionName, grant):
actions = self.getValue(typeName, [])
exists = actionName in actions
if grant:
if not exists:
actions.append(actionName)
elif exists:
actions.remove(actionName)
self.setValue(typeName, actions)
return True
def grant(self, actionName):
return self.changePermission(self.PERMITTED_ACTIONS, actionName, True)
def deny(self, actionName):
return self.changePermission(self.DENIED_ACTIONS, actionName, True)
def revokeGranted(self, actionName):
return self.changePermission(self.PERMITTED_ACTIONS, actionName, False)
def revokeDenied(self, actionName):
return self.changePermission(self.DENIED_ACTIONS, actionName, False)
| Python |
from com import skype
from sj.plugin import SkypeJython
from sj.utils import parseCmdln, getChatList
from sj.utils import examine as MyExamine
import sys
DefaultOptions = {'topic': 'Jython',
'user_id': '',
'windows': False,
'inspect': False,
'no_shell': False,
'plugin_id': '',
'show_options': False,
'store_connector_events': False,
'globals_module': 'sj.plugin'}
def main(argv = None):
options = parseCmdln(argv, **DefaultOptions)
skype.Skype.setDaemon(False)
try:
if options.show_options:
print options
if options.windows:
print getChatList()
# XXX: shutdown skype connection??
return
if not options.no_shell:
global shell
shell = SkypeJython(options)
if options.inspect:
if options.no_shell:
ns_locals = {}
ns_globals = globals()
else:
ns_locals = shell.getLocalNamespace()
ns_globals = shell.getGlobalNamespace()
# interact(ns_locals, ns_globals)
MyExamine(ns_locals, ns_globals)
sys.exit(0)
else:
try: from org.fraun import SkypeJython as ResourceClass
except ImportError: pass
else: shell.setupSystemTray(ResourceClass.getClass())
except skype.NotAttachedException:
print '** Not Attached'
if __name__ == '__main__':
main()
| Python |
from com import skype
from java.awt import SystemTray
from java.awt import Image, Toolkit
from java.awt import PopupMenu, MenuItem, TrayIcon
from java.awt import AWTException
from java.awt.event import ActionListener
import sys
import re
from sj.utils import *
from sj.utils import _capture, _count_lines
ACTIVE_STATUSES = [skype.Chat.Status.DIALOG,
skype.Chat.Status.LEGACY_DIALOG,
skype.Chat.Status.MULTI_SUBSCRIBED]
PLUGIN_ID_PATTERN = re.compile(r'\[([a-zA-Z0-9_\-]*)\]')
def validatePluginId(pluginId):
match = PLUGIN_ID_PATTERN.match(pluginId)
if match is None:
return ''
return match.groups()[0]
def importModuleNamespace(module_name):
module = __import__(module_name, globals(), locals(), [''])
return module.__dict__
def resolveGlobalsFromModule(module_name):
if module_name == '--':
return {}
elif module_name:
return importModuleNamespace(module_name)
return globals()
_command_table = {}
def registerCommand(function, *namelist):
for name in namelist:
_command_table[name] = function
def parseCommand(command):
command = command.lstrip()
if not command:
return ('', '')
if not command[0].isalpha():
return (command[0], command[1:])
command = command.split(' ', 1)
if len(command) == 2:
return command
return (command[0], '')
def findCommand(command):
(command, argstr) = parseCommand(command)
return (_command_table.get(command.lower()), command, argstr)
class CommandEvent:
def __init__(self, commandName, argstr = ''):
self.commandName = commandName
self.argstr = argstr
self.args = argstr.split()
self.larg = self.args and self.args[0].lower() or ''
def doBangCommand(shell, event):
'Magical command makes special non-alphanumerics work.'
commandName = event.larg
if commandName:
commandName = event.commandName + commandName
action = _command_table.get(commandName)
if callable(action):
argstr = event.argstr.lstrip()[len(event.larg):]
return action(shell, CommandEvent(commandName, argstr))
def doPythonCommand(shell, event):
shell.processPythonCommand(event.argstr.lstrip())
def doQuitShell(shell, event):
shell.quitShell()
def doEcho(shell, event):
shell.sendMessage(event.argstr.lstrip(), type = 'cash')
def doChatSessionWindows(shell, event):
shell.sendMessage(getChatList())
def doShowConnectorEvents(shell, event):
shell.sendMessage('\n'.join([ev.message for ev in self.events]),
type = 'cash')
def doInteractiveShell(shell, event):
shell.interactiveShell()
def doInterplug(shell, event):
result = shell.processInterplug(event.args)
if isinstance(result, basestring):
shell.sendMessage(result, type = 'mp')
def doRawConnectorCommand(shell, event):
c = skype.connector.Connector.getInstance()
c.execute(shell.argstr.lstrip())
def doHelp(shell, event):
for command in event.args:
action = _command_table.get(command.lower())
name = getattr(action, '__name__', '')
doc = getattr(action, '__doc__', '')
shell.sendMessage('%s - %s\n%s' % (name, command, doc))
else:
shell.sendMessage(', '.join(_command_table.keys()))
registerCommand(doBangCommand , '!') # '#', '%', '$'
registerCommand(doPythonCommand , ';', '!eval')
registerCommand(doQuitShell , '!quit')
registerCommand(doEcho , '!echo')
registerCommand(doChatSessionWindows , '!windows')
registerCommand(doShowConnectorEvents, '!events')
registerCommand(doInteractiveShell , '!interactive-shell')
registerCommand(doInterplug , '!inter-plug')
registerCommand(doRawConnectorCommand, '!raw')
registerCommand(doHelp , '!help')
class SkypeJython:
class JythonMessageAdapter(skype.ChatMessageAdapter):
def __init__(self, shell):
self.shell = shell
def chatMessageSent(self, sent):
if sent.getChat().equals(self.shell.chat):
self.shell.incomingMessage(sent.getContent())
def chatMessageReceived(self, sent):
if sent.getChat().equals(self.shell.chat):
self.shell.incomingMessage(sent.getContent())
class ConnectorAdapter(skype.connector.AbstractConnectorListener):
def __init__(self, shell):
self.shell = shell
def messageReceived(self, message): # ConnectorMessageEvent
# self.shell.log('Received-Message: %r' % message)
self.shell.addEvent(message)
def messageSent(self, message): # ConnectorMessageEvent
# self.shell.log('Message-Sent: %r' % message)
self.shell.addEvent(message)
def statusChanged(self, status): # ConnectorStatusEvent
# self.shell.log('Status-Changed: %r' % status)
self.shell.addEvent(status)
def __init__(self, options):
self.events = []
self.plugin_id = validatePluginId('[%s]' % options.plugin_id)
self.plugin_actions = {}
self.ns_locals = {}
self.ns_globals = resolveGlobalsFromModule(options.globals_module)
self.messaging = False
self.chat = self.getChatSession(options)
self.chat.open()
adapter = self.JythonMessageAdapter(self)
skype.Skype.addChatMessageListener(adapter)
if options.store_connector_events:
adapter = self.ConnectorAdapter(self)
connector = skype.connector.Connector.getInstance()
connector.addConnectorListener(adapter)
self.sendMessage('%s Connected' % self.__class__.__name__,
type = 'handshake')
def addEvent(self, object):
self.events.append(object)
def lastEventMessage(self):
return self.events[-1].message
def log(self, message):
print message
def getChatSession(self, options):
if options.user_id:
return skype.Skype.chat(options.user_id)
topic = options.topic
if topic:
# Try to find a topic already existing:
for chat in skype.Skype.getAllChats():
# XXX What if windowTitle is different??
# And topic is write-only!!
if chat.status not in ACTIVE_STATUSES:
continue
if chat.windowTitle == topic:
return chat
return skype.Skype.chat(topic)
def quitShell(self):
self.sendMessage('Quitting %s' % self.__class__.__name__,
type = 'bye')
from sys import exit
exit(0)
def sendMessage(self, message, type = None):
# XXX synchronized!
self.messaging += 1
if type is not None:
message = '(%s) %s' % (type, message)
self.chat.send(message)
def incomingMessage(self, message):
# XXX synchronized!
if not self.messaging:
self.parseCommand(message)
else:
self.messaging -= 1
def parseCommand(self, command):
(action, command, argstr) = findCommand(command)
if callable(action):
action(self, CommandEvent(command, argstr))
def processInterplug(self, args):
if not self.plugin_id or not len(args):
return
directive = args[0]
if directive == 'load-module':
if len(args) != 2:
return 'Expected 1 argument naming module, not %r' % ' '.join(args[1:])
try: self.plugin_actions = importModuleNamespace(args[1])
except Exception, e:
return '%s: %s' % (e.__class__.__name__, e)
return 'Loaded Inter-Plug Module: %r' % args[1]
plugin_id = validatePluginId(directive)
if plugin_id == self.plugin_id:
command = args[1]
args = args[2:]
action = self.plugin_actions.get(command)
if callable(action) and getattr(action, 'pluggable', False):
return action(self, *args)
def interactiveShell(self):
from sj.utils import MyExamine
MyExamine() # XXX Do this in another thread if not already running!
def processPythonCommand(self, source):
input_type = 'single'
if '\n' in source: # or _count_lines(source) > 1
input_type = 'exec'
(success, result) = self.evaluatePython(source, input_type = input_type)
if success:
(result, output) = result
if output:
self.sendMessage(str(output), type = 'cash')
if result is not None:
self.sendMessage(str(result), type = 'cash')
else:
self.sendMessage(str(result), type = 'bug')
def evaluatePython(self, source, input_type = 'single'):
try: code = compile(source, '<skype conversation>', input_type)
except SyntaxError, e:
(etype, value, tb) = sys.exc_info()
return (False, '%s: %s' % (etype, value))
def execute_python(code, locals, globals):
if input_type in ('eval', 'single'):
return eval(code, globals, locals)
else:
exec code in globals, locals
ns_locals = self.getLocalNamespace()
ns_globals = self.getGlobalNamespace()
try: result = _capture(execute_python, code, ns_locals, ns_globals)
except Exception, e:
output = str(e)
return (False, output)
return (True, result)
def getLocalNamespace(self):
self.ns_locals['shell'] = self
return self.ns_locals
def getGlobalNamespace(self):
return self.ns_globals
class SystrayActionListener(ActionListener):
def __init__(self, shell):
self.shell = shell
def actionPerformed(self, event):
try: self.shell.parseCommand(event.getActionCommand())
except skype.SkypeException, se:
self.shell.log('SkypeException while parsing command %s: %s' % \
(event.getActionCommand(), se))
se.printStackTrace()
# sys.exit(1)
def getResourceURLContent(self, name, ResourceClass):
# XXX ResourceURLContent hasn't been ported.
raise NotImplementedError
data = ResourceURLContent(ResourceClass, name)
return data.getURL()
SYSTRAY_IMAGE = 'resources/icon/skypejython.png'
def setupSystemTray(self, ResourceClass):
if SystemTray.isSupported():
url = self.getResourceURLContent(self.SYSTRAY_IMAGE, ResourceClass)
image = Toolkit.getDefaultToolkit().getImage(url)
listener = JythonShell.SystrayActionListener(self)
popup = PopupMenu()
item = MenuItem('Quit')
item.setActionCommand('!quit')
item.addActionListener(listener)
popup.add(item)
trayIcon = TrayIcon(image, 'SkypeJython', popup)
trayIcon.addActionListener(listener)
tray = SystemTray.getSystemTray()
try: tray.add(trayIcon)
except AWTException, e:
self.log('System tray icon not available.')
| Python |
try: True, False
except NameError:
(True, False) = (1, 0)
class JavaStringIO:
# XXX IllegalAccessExceptions with StringBuffer?!? puhlease
def __init__(self):
## from java.lang import StringBuffer
## self.buffer = StringBuffer()
self.buffer = ''
def write(self, string):
# self.buffer.append(string)
self.buffer += string
def getvalue(self):
# return self.buffer.toString()
return self.buffer
def newStringBuffer():
try: from StringIO import StringIO
except ImportError:
return JavaStringIO()
return StringIO()
def _capture(function, *args, **kwd):
buf = newStringBuffer()
import sys
stdout = sys.stdout
stderr = sys.stderr
sys.stdout = buf
sys.stderr = buf
try: value = function(*args, **kwd)
except:
value = None
# Consider returning error state with output and flag,
# instead of reraising..?
sys.stdout = stdout
sys.stderr = stderr
(evalue, etype, tb) = sys.exc_info()
raise evalue, etype, tb
sys.stdout = stdout
sys.stderr = stderr
return (value, buf.getvalue())
def _count_lines(string):
nr = 0
ln = len(string)
i = 0
while i < ln:
i = string.find('\n', i)
if i < 0:
break
nr += 1
i += 1
return nr
def printTraceback():
from sys import exc_info
(etype, value, tb) = exc_info()
print 'Traceback:'
while tb:
f = tb.tb_frame
code = f.f_code
print ' File "%s", line %d, in %s' % \
(code.co_filename, f.f_lineno, code.co_name)
tb = tb.tb_next
print '%s: %s' % (etype, value)
def interact(ns_locals = None, ns_globals = None):
if ns_locals is None:
ns_locals = {}
if ns_globals is None:
ns_globals = globals()
try:
while True:
line = raw_input('>>> ')
try:
code = compile(line, '<stdin>', 'single')
result = eval(code, ns_locals, ns_globals)
if result is not None:
print repr(result)
except:
printTraceback()
except EOFError:
print
class Options:
def __init__(self, **kwd):
self.__dict__.update(kwd)
self.__args = []
def __iadd__(self, arg):
self.__args.append(arg)
return self
def __setitem__(self, name, value):
name = str(name).strip().replace('-', '_')
self.__dict__[name] = value
def __str__(self):
s = []
for (name, value) in self.__dict__.items():
s.append('%s: %s' % (name, value))
s.append('-Positional:')
for a in self.__args:
s.append(' ' + a)
return '\n'.join(s)
def parseCmdln(argv = None, **defaults):
if argv is None:
from sys import argv
options = Options(**defaults)
for arg in argv[1:]:
if arg.startswith('--'):
arg = arg[2:]
if arg:
arg = arg.split('=', 1)
if len(arg) == 2:
(name, value) = arg
options[name] = value.strip()
else:
(name,) = arg
options[name] = True
elif arg.startswith('-'):
pass
else:
options += arg
return options
def old_examine(ns_locals, ns_globals):
from code import InteractiveConsole as IC
ns_locals['globs'] = ns_globals
ic = IC(locals = ns_locals)
ic.interact(banner = '')
COMMON_TOOLS_PATH = r'C:\Users\Clint\Documents\Projects\CommonTools\Packages'
def examine(ns_locals, ns_globals):
import sys
if not COMMON_TOOLS_PATH in sys.path:
sys.path.append(COMMON_TOOLS_PATH)
try: from my.interactive import ExamineEx
except ImportError: return old_examine(ns_locals, ns_globals)
else: return ExamineEx(ns_locals, ns_globals)
def getChatList():
result = []
for chat in skype.Skype.getAllChats():
result.append('%-30s : (%-20s) : %s' % \
(chat.id, chat.status, chat.windowTitle))
return '\n'.join(result)
| Python |
def pluggable(*functions):
for f in functions:
f.pluggable = True
_outgoing = {}
def BUILD(shell, name, operation, data = ''):
if operation == 'APPEND':
if name in _outgoing:
_outgoing[name].append(data)
else:
_outgoing[name] = [data]
def SENDMSG(shell, name, subject, *recipients):
msg = _outgoing.get(name)
if msg is not None:
del _outgoing[name]
msg = '\n'.join(msg)
msg = msg.encode('zlib').encode('base64')
for to in recipients:
shell.sendMessage('!inter-plug [%s] RECVMSG %s %s' % \
(to, subject, msg))
_incoming = {}
def RECVMSG(shell, subject, data):
msg = data.decode('base64').decode('zlib').split('\n')
_incoming[subject] = msg
return 'Received %r' % subject
pluggable(BUILD, SENDMSG, RECVMSG)
| Python |
## /*******************************************************************************
## * Copyright (c) 2006-2007 Koji Hisano <hisano@gmail.com> - UBION Inc. Developer
## * Copyright (c) 2006-2007 UBION Inc. <http://www.ubion.co.jp/>
## *
## * Copyright (c) 2006-2007 Skype Technologies S.A. <http://www.skype.com/>
## *
## * Skype4Java is licensed under either the Apache License, Version 2.0 or
## * the Eclipse Public License v1.0.
## * You may use it freely in commercial and non-commercial products.
## * You may obtain a copy of the licenses at
## *
## * the Apache License - http://www.apache.org/licenses/LICENSE-2.0
## * the Eclipse Public License - http://www.eclipse.org/legal/epl-v10.html
## *
## * If it is possible to cooperate with the publicity of Skype4Java, please add
## * links to the Skype4Java web site <https://developer.skype.com/wiki/Java_API>
## * in your web site or documents.
## *
## * Contributors: Koji Hisano - initial API and implementation
## ******************************************************************************/
## package com.skype.sample;
##
## import java.io.IOException;
## import java.io.PrintWriter;
## import java.io.Writer;
##
## import org.eclipse.swt.SWT;
## import org.eclipse.swt.events.SelectionAdapter;
## import org.eclipse.swt.events.SelectionEvent;
## import org.eclipse.swt.layout.GridData;
## import org.eclipse.swt.layout.GridLayout;
## import org.eclipse.swt.widgets.Button;
## import org.eclipse.swt.widgets.Display;
## import org.eclipse.swt.widgets.Shell;
## import org.eclipse.swt.widgets.Text;
##
## import com.skype.connector.Connector;
## import com.skype.connector.ConnectorException;
## import com.skype.connector.MessageProcessor;
## import com.skype.connector.osx.OSXConnector;
##
## public class SkypeTracer extends Shell {
## public static void main(final String args[]) throws Exception {
## OSXConnector.disableSkypeEventLoop();
##
## final Display display = Display.getDefault();
## SkypeTracer shell = new SkypeTracer(display, SWT.SHELL_TRIM);
## shell.layout();
## shell.open();
## while(!shell.isDisposed()) {
## if(!display.readAndDispatch()) {
## display.sleep();
## }
## }
## }
##
## public SkypeTracer(Display display, int style) throws ConnectorException {
## super(display, style);
## createContents();
## }
##
## private void createContents() throws ConnectorException {
## setText("Skype Tracer");
## setSize(400, 300);
## final GridLayout gridLayout = new GridLayout();
## gridLayout.numColumns = 2;
## setLayout(gridLayout);
##
## final Text fromSkype = new Text(this, SWT.V_SCROLL | SWT.MULTI | SWT.READ_ONLY | SWT.BORDER);
## fromSkype.setLayoutData(new GridData(GridData.FILL, GridData.FILL, true, true, 2, 1));
## new Thread() {
## public void run() {
## Connector.getInstance().setDebugOut(new PrintWriter(new Writer() {
## @Override
## public void write(char[] cbuf, int off, int len) throws IOException {
## final String appended = new String(cbuf, off, len);
## Display.getDefault().asyncExec(new Runnable() {
## public void run() {
## if (!fromSkype.isDisposed()) {
## fromSkype.append(appended);
## }
## }
## });
## }
##
## @Override
## public void flush() throws IOException {
## // Do nothing
## }
##
## @Override
## public void close() throws IOException {
## // Do nothing
## }
## }));
## try {
## Connector.getInstance().setDebug(true);
## } catch(ConnectorException e) {
## }
## }
## }.start();
##
## final Text toSkype = new Text(this, SWT.BORDER);
## toSkype.setLayoutData(new GridData(GridData.FILL, GridData.CENTER, true, false));
##
## final Button send = new Button(this, SWT.NONE);
## send.addSelectionListener(new SelectionAdapter() {
## public void widgetSelected(SelectionEvent event) {
## final String command = toSkype.getText();
## new Thread() {
## @Override
## public void run() {
## try {
## Connector.getInstance().execute(command, new MessageProcessor() {
## @Override
## protected void messageReceived(String message) {
## releaseLock();
## }
## });
## } catch(ConnectorException e) {
## e.printStackTrace();
## }
## }
## }.start();
## }
## });
## send.setText("&Send");
## }
##
## @Override
## protected void checkSubclass() {
## // Disable the check that prevents subclassing of SWT components
## }
## }
from com.skype.connector import Connector
from com.skype.connector import ConnectorException;
from com.skype.connector import MessageProcessor as SkypeMessageProcessor;
from com.skype.connector.osx import OSXConnector;
from java.io import PrintWriter
from java.io import Writer as JavaWriter
from sys import stdout
class Shell:
class Writer(JavaWriter):
def write(cbuf, offet, length): # char[], int, int throws IOException
stdout.write(str(cbuf[offset:length]))
class MessageProcessor(SkypeMessageProcessor):
def messageReceived(self, message):
# self.releaseLock()
print message
def __init__(self, connector):
self.writer = self.Writer()
self.message_processor = self.MessageProcessor()
self.connector = connector
connector.setDebugOut(PrintWriter(self.writer))
def run(self):
OSXConnector.disableSkypeEventLoop()
try:
while True:
command = raw_input('skype> ').strip()
if command:
try: self.connector.execute(command, self.message_processor)
except ConnectorException, e:
e.printStackTrace()
except EOFError:
print
def main(argv = None):
shell = Shell(Connector.getInstance())
shell.run()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
from django.conf.urls.defaults import *
urlpatterns = patterns('fbauthfacade.views',
(r'graph/(.*)', 'view_graph'),
(r'login$', 'view_login'),
(r'logout$', 'view_logout'),
(r'', 'default_view'),
)
| Python |
from django.utils import simplejson as json
from django.http import HttpResponseRedirect, HttpResponse
from django.http import HttpResponseBadRequest, HttpResponseServerError
from urllib import urlencode, splitquery
from cgi import parse_qsl
from traceback import format_exc
import re
def _get_request_param(request, name):
return request.GET[name]
def view_graph(request, path = None):
try:
if path == 'oauth/access_token':
return view_oauth_access_token(request)
if path == 'oauth/authorize':
return view_oauth_authorize(request)
# Otherwise, handle normal graph object accesses.
return handle_object(request, path)
except:
return HttpResponseServerError(format_exc())
_authorization_codes = {}
_access_tokens = {}
_access_tokens_reverse = {}
from random import choice
from string import uppercase, lowercase, digits
_auth_code_alphabet = uppercase + (digits * 5)
_access_token_alphabet = uppercase + digits
NR_AUTH_CODE = 5
NR_ACCESS_TOKEN = 10
def generateCode(alphabet, nr):
return ''.join(choice(alphabet) for x in xrange(nr))
def generateAuthCode():
while True:
code = generateCode(_auth_code_alphabet, NR_AUTH_CODE)
if code not in _authorization_codes:
return code
# XXX program a break
def generateAccessToken():
return generateCode(_access_token_alphabet, NR_ACCESS_TOKEN)
def checkAccess(user_id, access_token):
return True # since runtime doesn't persist over dev server reloads
return _access_tokens.get(user_id) == access_token
def view_oauth_authorize(request):
# client_id
## for (name, value) in request.GET.iteritems():
## print ' %s: %r' % (name, str(value)[:100])
user_id = _get_fb_user_cookie(request)
if user_id is None:
# Perform login.
args = dict()
redirect_uri = request.GET.get('redirect_uri')
if redirect_uri is not None:
args['redirect_uri'] = redirect_uri
url = rebuild_query('/facebook/login', args)
return HttpResponseRedirect(url)
# Perform authorization.
code = generateAuthCode()
_authorization_codes[code] = user_id
access_token = generateAccessToken()
_access_tokens[user_id] = access_token
_access_tokens_reverse[access_token] = user_id
redirect_uri = _get_request_param(request, 'redirect_uri')
url = rebuild_query(redirect_uri, dict(code = code))
return HttpResponseRedirect(url)
def view_oauth_access_token(request):
# client_id
# client_secret
# redirect_uri
code = _get_request_param(request, 'code')
user_id = _authorization_codes.get(code)
if user_id is not None:
access_token = _access_tokens.get(user_id)
del _authorization_codes[code]
response = dict(access_token = access_token)
# Ugh, which is it?
response = urlencode(response)
# response = json.dumps(response)
return HttpResponse(response)
# r'(?:([^/]+)/+)*([^/]+)?'
# r'/*(?:([^/]+)/*)*'
def _get_path_parts(path):
return path.split('/')
def handle_object(request, path):
parts = _get_path_parts(path)
data = dict()
if len(parts):
data['name'] = parts[0]
if len(parts) > 1:
if parts[1] == 'feed':
access_token = request.REQUEST.get('access_token')
user_id = parts[0]
try: user_id = int(user_id)
except ValueError: pass
if not checkAccess(user_id, access_token):
response = dict(message = 'Invalid access: %r' % access_token)
return HttpResponseBadRequest(json.dumps(response, indent = 1))
doUserPostMessage(user_id, request.REQUEST.get('message'))
else:
access_token = request.GET.get('access_token')
user_id = _access_tokens_reverse.get(access_token)
data['link'] = '/facebook/%s' % parts[0] # or user_id
data['id'] = user_id
return HttpResponse(json.dumps(data, indent = 1))
def doUserPostMessage(user_id, message):
print '[User #%s:Post] %s' % (user_id, message)
from django import template
LOGIN_SCREEN = '''
{% if have_user %}
<h2>User #{{ fb_user }} Logged In!</h2>
{% else %}
<h2>Log In:</h2>
<form action="/facebook/login" method="post">
<input type="input" name="fb_sig_user" value="{{ fb_sig_user }}" />
<input type="hidden" name="redirect_uri" value="{{ redirect_uri }}" />
<input type="submit" />
</form>
{% endif %}
<p>fb_sig_user: {{ fb_sig_user }}</p>
<table>
{% for cookie in cookies %}
<tr><td>{{ cookie.0|escape }}</td><td style="border: solid thin;">{{ cookie.1|escape }}</td></tr>
{% endfor %}
</table>
<form action="/facebook/logout" method="get">
<input type="submit" value="Logout" />
</form>
'''
FB_USER_ID_VAR = 'facade_fb_user'
def view_login(request):
# Allow a initial login.
redirect_uri = request.REQUEST.get('redirect_uri')
# Generate appropriate response type (page or redirect) based on method.
fb_user = None
if request.method == 'POST':
if redirect_uri is not None:
response = HttpResponseRedirect(redirect_uri)
else:
response = HttpResponse()
fb_sig_user = request.POST.get('fb_sig_user')
if fb_sig_user is not None:
fb_sig_user = fb_sig_user.strip()
if not fb_sig_user:
fb_sig_user = request.GET.get('fb_sig_user')
if fb_sig_user is not None:
# Set cookie -- Logged In:
fb_sig_user = int(fb_sig_user)
fb_user = fb_sig_user
_set_fb_user_cookie(response, fb_user)
else:
fb_sig_user = request.GET.get('fb_sig_user')
response = HttpResponse()
# Draw login/profile screen.
if fb_user is None:
fb_user = _get_fb_user_cookie(request)
# Process response (building page content if not redirect).
if request.method == 'GET':
t = template.Template(LOGIN_SCREEN)
cx = template.Context(dict(fb_user = fb_user,
have_user = bool(fb_user is not None),
fb_sig_user = fb_sig_user or '',
redirect_uri = redirect_uri,
cookies = _get_cookies(request.COOKIES)))
response.content = t.render(cx)
return response
def view_logout(request):
# What about redirect?
redirect_uri = request.REQUEST.get('redirect_uri') # GET, really
if redirect_uri is not None:
response = HttpResponseRedirect(redirect_uri)
else:
response = HttpResponse()
_delete_fb_user_cookie(response)
return response
def _set_fb_user_cookie(response, user_id):
response.set_cookie(FB_USER_ID_VAR, value = str(user_id))
def _get_fb_user_cookie(request):
return request.COOKIES.get(FB_USER_ID_VAR)
def _delete_fb_user_cookie(response):
response.delete_cookie(FB_USER_ID_VAR)
def _get_cookies(cookies):
# Will not include newly set cookies (on response object)
return cookies.iteritems()
def default_view(request):
return HttpResponse(str(request.COOKIES))
# Utilities.
def simple_parse_qs(qs):
# Evaluate only last value for each key-pair parameter.
r = dict()
for (name, value) in parse_qsl(qs):
r[name] = value
return r
def rebuild_query(original, args):
# Reduce original query string to dictionary, update with existing
# dictionary, then rebuild new path with query string.
(path, query) = splitquery(original)
if query is None:
query = args
else:
query = simple_parse_qs(query)
query.update(args)
return '%s?%s' % (path, urlencode(query))
| Python |
AUTH_CODE = 'the-auth-code'
ACCESS_TOKEN = 'abcdefgh'
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
from django.conf.urls.defaults import *
from django.contrib import admin
admin.autodiscover()
# from django_site import viewSiteIndex
SITE_INDEX = '''<html>
<head>
<title>Site Index</title>
</head>
<body>
<h2>Site Index</h2>
<ul>
{% for url in URLS %}
<li><a href="{{ url.href }}">{{ url.title }}</a></li>
{% endfor %}
</ul>
</body>
</html>'''
def viewSiteIndex(request):
from django.http import HttpResponse
from django.template import Template, Context
return HttpResponse(Template(SITE_INDEX).render(Context(dict(URLS = getSiteUrls()))))
def getSiteUrls():
return [dict(href = 'facebook/',
title = 'Facebook Auth Facade')]
urlpatterns = patterns('',
(r'^$', viewSiteIndex),
(r'^facebook/', include('fbauthfacade.urls')),
# Uncomment the admin/doc line below and add 'django.contrib.admindocs'
# to INSTALLED_APPS to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/(.*)', admin.site.root),
)
| Python |
# Django settings for django_site project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
('Clint Banis', 'cbanis@gmail.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'ancillary.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = False
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = 'C:\\Program Files\\Python2.5\\Lib\\site-packages\\django\\contrib\\admin\\media\\'
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/personal/admin/media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/personal/admin/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '-#rb457+x3t*$x(oc8c5=u3c8+tc_st_p)st&76fe-!c=o-i(f'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'django_ancillary.urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.admindocs',
'fbauthfacade',
)
| Python |
# Patch for google/appengine/tools/dev_appserver.py
def GetLastState():
try: from sys import OldState
except ImportError: pass
else: return State(OldState[-1])
def RegisterOldState(**state):
try: os = sys.OldState
except AttributeError:
os = sys.OldState = []
state = state.copy()
os.append(state)
return State(state)
class State:
def __init__(self, kwd):
self.__dict__ = kwd
def DoInLastState(function, *args, **kwd):
os = GetLastState()
if os is not None:
# Push operating state
this = RegisterOldState(stdin = sys.stdin,
stdout = sys.stdout)
try:
sys.stdin = os.stdin
sys.stdout = os.stdout
return function(*args, **kwd)
finally:
sys.stdin = this.stdin
sys.stdout = this.stdout
def PDebug():
from pdb import set_trace
DoInLastState(set_trace)
def ExecuteCGI(root_path,
handler_path,
cgi_path,
env,
infile,
outfile,
module_dict,
exec_script=ExecuteOrImportScript,
register_old_state=RegisterOldState):
# ...
register_old_state(module_dict = old_module_dict,
builtin = old_builtin,
argv = old_argv,
stdin = old_stdin,
stdout = old_stdout,
env = old_env,
cwd = old_cwd,
file_type = old_file_type)
# ... | Python |
# State Code:
def GetOldStates():
import sys
try: states = sys.oldStates
except AttributeError:
states = sys.oldStates = []
return states
def RegisterOldState(**kwd):
states = GetOldStates()
states.insert(0, kwd)
def PopOldState():
states = GetOldStates()
del states[0]
def GetLastState():
return GetOldStates()[0]
def DoInLastState(function, *args, **kwd):
state = GetLastState()
import sys
thisStdin = sys.stdin
thisStdout = sys.stdout
sys.stdin = state['stdin']
sys.stdout = state['stdout']
try: return function(*args, **kwd)
finally:
sys.stdin = thisStdin
sys.stdout = thisStdout
# ExecuteCGI Patch:
## RegisterOldState(sys_modules = old_module_dict,
## builtins = old_builtin,
## argv = old_argv,
## stdin = old_stdin,
## stdout = old_stdout,
## environ = old_env,
## cwd = old_cwd,
## file_type = old_file_type)
#
## PopOldState()
# And site usage:
try: from google.appengine.tools.dev_appserver import DoInLastState
except ImportError:
info('DevAppserver debugging not available!')
unavailable = True
else:
unavailable = not isDevelopmentServer()
if unavailable:
def DoInLastState(function, *args, **kwd):
return function(*args, **kwd)
def PDebug(function, *args, **kwd):
return function(*args, **kwd)
def PTrace(function):
return function
else:
from pdb import runcall, set_trace
def PDebug(function, *args, **kwd):
return DoInLastState(runcall, function, *args, **kwd)
def PTrace(function):
def tracedFunction(*args, **kwd):
set_trace()
return function(*args, **kwd)
def statefulFunction(*args, **kwd):
return DoInLastState(tracedFunction, *args, **kwd)
statefulFunction.__name__ = function.__name__
statefulFunction.__doc__ = function.__doc__
return statefulFunction
| Python |
# Rename bulkloader progress/result db and its corresponding internal keying.
import sqlite3
from sys import stderr
from pdb import runcall
SIGNATURE_TABLE_NAME = None
def get_signature_table_name():
global SIGNATURE_TABLE_NAME
try: from google.appengine.tools.bulkloader import SIGNATURE_TABLE_NAME
except ImportError:
SIGNATURE_TABLE_NAME = 'bulkloader_database_signature'
return SIGNATURE_TABLE_NAME
def parse_signature(sigstring):
data = {}
for line in sigstring.split('\n'):
line = line.strip()
colon = line.find(':')
if colon >= 0:
name = line[:colon].strip()
value = line[colon+1:].strip()
if name:
data[str(name)] = str(value)
return data
def read_signature(conn):
u = conn.cursor()
u.execute('select value from %s limit 1' % get_signature_table_name())
return u.fetchone()[0]
def change_signature(conn, signature):
old_sig = read_signature(conn)
conn.execute('delete from %s where value = ?' % get_signature_table_name(),
(old_sig,))
conn.execute('insert into %s (value) values (?)' % get_signature_table_name(),
(signature,))
conn.commit()
def load_signature(conn):
sigstring = read_signature(conn)
if sigstring:
return parse_signature(sigstring)
from google.appengine.tools.bulkloader import _MakeSignature
# How does result_db_line acquire 'result_db: ' value if download is rendered as False?!?
## def _MakeSignature(app_id=None,
## url=None,
## kind=None,
## db_filename=None,
## perform_map=None,
## download=None,
## has_header=None,
## result_db_filename=None,
## dump=None,
## restore=None):
## """Returns a string that identifies the important options for the database."""
## if download:
## result_db_line = 'result_db: %s' % result_db_filename
## else:
## result_db_line = ''
## return u"""
## app_id: %s
## url: %s
## kind: %s
## download: %s
## map: %s
## dump: %s
## restore: %s
## progress_db: %s
## has_header: %s
## %s
## """ % (app_id, url, kind, download, perform_map, dump, restore, db_filename,
## has_header, result_db_line)
def build_signature(sig):
# result_db_line = 'result_db: %s' % sig['result_db_filename']
# result_db_line = sig.get('result_db', '')
result_db_line = ''
sigstring = _MakeSignature(app_id = sig['app_id'],
url = sig['url'],
kind = sig['kind'],
db_filename = sig['progress_db'],
perform_map = sig['map'],
download = sig['download'],
has_header = sig['has_header'],
result_db_filename = result_db_line,
dump = sig['dump'],
restore = sig['restore'])
# XXX HACK XXX
sigstring = sigstring.replace('result_db: ', '')
# !! Actually, the
# XXX Why does error show it in one order and the ResumeError in another?
# bulkloader.py:1656
return sigstring
_sqlite_master_table_name = 'SQLite_Master'
_sqlite_master_table_name = 'sqlite_master'
def detect_db_file_type(conn):
u = conn.cursor()
u.execute('select name from %s' % _sqlite_master_table_name)
for table_name in u.fetchall():
table_name = table_name[0]
if table_name in ('progress', 'result'):
return str(table_name)
def replace_db_name_kind(string, kind = '', db_file_type = ''):
# todo: normpath?
return str(string.replace('$KIND', kind).replace('$DB_FILE_TYPE', db_file_type))
from os import rename as rename_file
from sys import exc_info
from errno import ENOENT, EEXIST
from os.path import dirname
from os import makedirs
def ensureDirectoryExists(path):
try: makedirs(path)
except OSError, e:
if e.args[0] != EEXIST:
(etype, value, tb) = exc_info()
raise etype, value, tb
else:
print 'Created %r' % path
def doRenameFile(old_filename, new_filename):
ensureDirectoryExists(dirname(new_filename))
try: rename_file(old_filename, new_filename)
except OSError, e:
if e.args[0] == ENOENT:
return False
(etype, value, tb) = exc_info()
raise etype, value, tb
else:
print 'Moved %r into %r' % (old_filename, new_filename)
return True
class DbFile:
def __init__(self, filename, conn = None):
self.filename = filename
self.conn = conn
self.open_database()
conn = self.conn
self.db_file_type = detect_db_file_type(conn)
self.signature = load_signature(conn)
def close_database(self):
if self.conn is not None:
self.conn.close()
self.conn = None
def open_database(self):
if self.conn is None:
self.conn = sqlite3.connect(self.filename)
def build_signature(self):
return build_signature(self.signature)
def get_signature_value(self, name):
return str(self.signature.get(name))
def __getitem__(self, name):
return self.get_signature_value(name)
def get_kind(self):
return self['kind']
def get_progress_db(self):
return self['progress_db']
def change_progress_db(self, new_filename, interpolate = False, rename = False):
if interpolate:
new_filename = replace_db_name_kind(new_filename, self.get_kind(),
self.db_file_type)
if new_filename == self.get_progress_db():
return
self.signature['progress_db'] = str(new_filename)
change_signature(self.conn, self.build_signature())
if rename:
self.close_database()
try:
if doRenameFile(self.filename, new_filename):
self.filename = new_filename
finally:
self.open_database()
def inspection(self):
return '\n'.join([self.filename, self.db_file_type, '---',
self.build_signature()])
def get_db_filelist(options, args):
if options.db_file:
yield options.db_file
for dbfile in args:
yield dbfile
def process_db_file(dbfile, options):
db = DbFile(dbfile)
new_db_name = options.new_db_name_template
if new_db_name:
print 'Changing: %r ...' % db.filename
db.change_progress_db(new_db_name, interpolate = True,
rename = options.rename_db_file)
if options.examine:
from my.interactive import examine
examine(db = db, globals = globals())
if options.inspect:
print db.inspection()
if new_db_name:
print 'new-db-name-template: %r' % new_db_name
if options.simple_inspection:
print '%s: %s (%s)' % (db.filename, db.db_file_type, db.get_progress_db())
from traceback import print_exc as full_traceback
from sys import exc_info
def simple_error_display():
(etype, value, tb) = exc_info()
print '%s: %s' % (str(etype), str(value))
def main(argv = None):
from optparse import OptionParser
parser = OptionParser()
parser.add_option('--db-file')
parser.add_option('--new-db-name-template', '--new-db-name')
parser.add_option('-M', '--rename-db-file', action = 'store_true')
parser.add_option('-e', '--examine', action = 'store_true')
parser.add_option('-i', '--inspect', action = 'store_true')
parser.add_option('--simple-inspection', action = 'store_true')
parser.add_option('--full-traceback', action = 'store_true')
(options, args) = parser.parse_args(argv)
nr = 0
for dbfile in get_db_filelist(options, args):
nr += 1
try: process_db_file(dbfile, options)
except:
if options.full_traceback:
full_traceback()
else:
simple_error_display()
if not nr:
parser.print_usage(stderr)
# Determine tables:
#
# CREATE TABLE bulkloader_database_signature (value TEXT not null);
#
# app_id: cbanis
# url: http://localhost:8080/gae/remote_api
# kind: MobileSpecialSetting
# download: False
# map: False
# dump: False
# restore: False
# progress_db: bulkloader-progress-20100628.212108.sql3
# has_header: False
#
# CREATE TABLE progress (id integer primary key autoincrement,
# state integer not null,
# kind text not null,
# key_start INTEGER,
# key_end INTEGER);
#
# CREATE TABLE result (id BLOB primary key,
# value BLOB not null,
# sort_key BLOB);
# Generally the filename will describe the table, but in this case,
# use db introspection somehow to check for progress or result tables.
# Parse signature data to glean the entity kind name for rebuilding the
# workfile in its qualified directory structure.
# options.new_db_name_template
if __name__ == '__main__':
main()
| Python |
from google.appengine.api.datastore_file_stub import DatastoreFileStub as DFS
from google.appengine.datastore import entity_pb
from google.appengine.api import datastore_types
from google.appengine.api import datastore
from pdb import runcall, set_trace as debug
from optparse import OptionParser
from pickletools import dis as dis_pickle
from pickle import load as load_pickle, dump as dump_pickle
from gc import collect as gc
from code import InteractiveConsole as IC
from sys import stdout
import readline
from common.timing import Timing
from my.interactive import examine
class StoredEntity(object):
def __init__(self, entity):
# self.protobuf = entity
# self.encoded_protobuf = entity.Encode()
self.native = datastore.Entity._FromPb(entity)
# Todo: render these dynamically in the DFS.
@property
def protobuf(self):
# Used in _Dynamic_Get and _Dynamic_GetSchema
entity = entity_pb.EntityProto()
entity.CopyFrom(self.native.ToPb())
return entity
@property
def encoded_protobuf(self):
# Used only in __WriteDatastore
return self.entity.Encode()
def __repr__(self):
return '%s: %s' % (self.__class__.__name__,
dict.__repr__(self.native))
def toData(self, key_name = None, no_key = False):
d = dict(self.native)
if not no_key:
id = self.native.key().id_or_name()
try: id = int(id)
except ValueError:
pass
d[key_name or '__key__'] = id
return d
def getStoredEntity(entity, options):
e = StoredEntity(entity)
if options.just_data:
return e.toData(options.key_name, options.no_key)
return e
def getAppKind(key):
# from DatastoreFileStub._AppIdNamespaceKindForKey
last_path = key.path().element_list()[-1]
# appid = datastore_types.EncodeAppIdNamespace(key.app(), key.name_space())
kind = last_path.type()
return kind
def getKeyId(key):
def getPathElement(e):
if e.has_name():
if e.has_id():
return '%s%s' % (str(e.name()), str(e.id()))
return str(e.name())
elif e.has_id():
return str(e.id())
return ''
keyId = '-'.join(getPathElement(e) for e in key.path().element_list())
if keyId.isdigit():
return int(keyId)
def clear_line(line):
output = '\r%s\r%s' % (' ' * len(line), line)
stdout.write(output)
stdout.flush()
def load_pickle_stream(fl):
try:
while True:
yield load_pickle(fl)
except EOFError:
pass
import simplejson as json
class KeyEncoder(json.JSONEncoder):
def default(self, key):
if isinstance(key, datastore_types.Key):
# Q: can we infer a general-purpose reference type
# to mandate .to_path()?
id = key.id_or_name()
try: return int(id)
except ValueError:
return id
raise TypeError(repr(key) + ' is not JSON serializable')
def toJson(db, filename):
json.dump(db, open(filename, 'w'), cls = KeyEncoder, indent = 1)
def examine_pickle(path, options):
fl = open(path)
if options.count:
entities = load_pickle(fl)
print len(entities), 'pickles'
elif options.store:
db = {}
if options.is_stream:
stream = load_pickle_stream(fl)
else:
stream = load_pickle(fl)
total = len(stream)
nr = 0
timing = Timing()
for encoded in stream:
entity = entity_pb.EntityProto(encoded)
nr += 1
key = entity.key()
app_kind = getAppKind(key)
if app_kind not in db:
db[app_kind] = {}
if not options.is_stream:
clear_line('Read entity %-6d / %-6d (%2.2f%%)' % \
(nr, total, (float(nr) / total) * 100))
else:
clear_line('Read entity %-6d...' % nr)
key_id = getKeyId(key)
db[app_kind][key_id] = getStoredEntity(entity, options)
print
print timing
if options.jsonify:
print 'Writing JSON to %r...' % options.jsonify
toJson(db, options.jsonify)
else:
examine(db = db, globals = globals())
elif options.stream:
stream = open(options.stream, 'w')
nr_records = options.nr_records
assert nr_records
if options.is_stream:
data_stream = load_pickle_stream(fl)
else:
data_stream = load_pickle(fl)
nr = 0
for entity in data_stream:
nr += 1
dump_pickle(entity, stream)
clear_line('Read entity %-6d / %-6d (%2.2f%%)' % \
(nr, nr_records, (float(nr) / nr_records) * 100))
if nr >= nr_records:
break
print
stream.close()
else:
if options.is_stream:
try:
while True:
dis_pickle(fl)
except EOFError:
pass
else:
dis_pickle(fl)
class TrackedDFS(DFS):
def __init__(self, appid, datapath, options):
self.options = options
super(TrackedDFS, self).__init__(appid, datapath)
def Read(self):
result = super(TrackedDFS, self).Read()
print '%s::Read => %r' % (self.__class__.__name__, result)
return result
from google.appengine.datastore import datastore_sqlite_stub
def examine_sqlite_datastore(datapath, appid, options):
datastore = datastore_sqlite_stub.DatastoreSqliteStub(appid, datapath)
examine(datastore = datastore,
datapath = datapath,
appid = appid,
options = options)
def main(argv = None):
parser = OptionParser()
parser.add_option('--debug', action = 'store_true')
parser.add_option('--appid', default = 'cbanis')
parser.add_option('--datastore-path', '--datastore',
default = 'dev_appserver.datastore')
parser.add_option('--sqlite', action = 'store_true')
parser.add_option('--unpickle', action = 'store_true')
parser.add_option('--count', action = 'store_true')
parser.add_option('--store', action = 'store_true')
parser.add_option('--is-stream', action = 'store_true')
parser.add_option('--stream')
parser.add_option('--nr-records', type = int)
parser.add_option('--jsonify')
parser.add_option('--just-data', action = 'store_true')
parser.add_option('--key-name')
parser.add_option('--no-key', action = 'store_true', default = False)
(options, args) = parser.parse_args(argv)
appid = options.appid
datapath = options.datastore_path
if options.debug:
dfs = runcall(DFS, appid, datapath)
# examine(dfs = dfs)
elif options.unpickle:
examine_pickle(datapath, options)
elif options.sqlite:
examine_sqlite_datastore(datapath, appid, options)
elif options.track:
dfs = TrackedDFS(appid, datapath, options)
if __name__ == '__main__':
main()
| Python |
# Clint Banis Copyright 2010 All rights reserved.
# ---
#
# FB Login auth part based on facebookoauth.py,
# with cookie-processing stolen verbatim:
#
# Facebook:
# http://github.com/facebook/python-sdk/blob/master/examples/oauth/facebookoauth.py
# http://www.apache.org/licenses/LICENSE-2.0
#
from google.appengine.api import memcache
from facebook.models import FacebookUser, FacebookAppSetting
from urllib import urlencode, urlopen, splitquery, quote_plus, unquote_plus, splittype, splithost
from cgi import parse_qsl
from django.utils import simplejson as json
import logging
import base64
import hmac
import Cookie
import time
import hashlib
import email
def getApplicationSetting(name, default = None):
q = FacebookAppSetting.all()
q.filter('name', name)
for setting in q.fetch(1):
return setting.value
return default
def setApplicationSetting(name, value):
q = FacebookAppSetting.all()
q.filter('name', name)
for setting in q.fetch(1):
break
else:
setting = FacebookAppSetting(name = name)
setting.value = value
setting.put()
FBAppID = 'FBAppID'
FBAppSecret = 'FBAppSecret'
FBAppAccessTokenUrl = 'FBAccessTokenUrl'
FBAppOAuthUrl = 'FBAppOAuthUrl'
FBAppUserProfileUrl = 'FBAppUserProfileUrl'
# FBAppSettingsKeys = [FBAppID, FBAppSecret, FBAppAccessTokenUrl,
# FBAppOAuthUrl, FBAppUserProfileUrl]
FBAppCacheKey = 'fb-app-settings'
ACCESS_TOKEN_URL = 'https://graph.facebook.com/oauth/access_token'
OAUTH_URL = 'https://graph.facebook.com/oauth/authorize'
USER_PROFILE_URL = 'https://graph.facebook.com/me'
_settings_symbols = [('app_id', FBAppID, ''), ('app_secret', FBAppSecret, ''),
('access_token_url', FBAppAccessTokenUrl, ACCESS_TOKEN_URL),
('oauth_url' , FBAppOAuthUrl , OAUTH_URL ),
('user_profile_url', FBAppUserProfileUrl, USER_PROFILE_URL)]
class Settings(dict):
def __getattr__(self, name):
try: return self[name]
except KeyError:
return super(dict, self).__getattr__(name)
def __iter__(self):
get = super(dict, self).get
for (name, _, default) in _settings_symbols:
return get(name, default)
class Symbol:
def __init__(self, name, symbol, value, is_default):
self.name = name
self.symbol = symbol
self.value = value
self.is_default = is_default
def symbolic(self):
for (name, symbol, default) in _settings_symbols:
try: yield self.Symbol(name, symbol, self[name], False)
except KeyError:
yield self.Symbol(name, symbol, default, True)
def getFBAppSettings(SettingsClass = Settings):
settings = memcache.get(FBAppCacheKey)
if settings is None:
settings = dict((name, getApplicationSetting(symbol, default)) \
for (name, symbol, default) in _settings_symbols)
memcache.set(FBAppCacheKey, settings)
return SettingsClass(settings)
_undefined = object()
def setFBAppSettings(**settings):
# todo: conform to Settings class..
for (name, symbol, _) in _settings_symbols:
value = settings.get(name, _undefined)
if value is not _undefined:
setApplicationSetting(symbol, value)
memcache.set(FBAppCacheKey, settings)
class InvalidSignature(Exception):
pass
def verify_fb_app_req(handler):
'''
// http://developers.facebook.com/docs/authentication/
$args = array();
parse_str(trim($_COOKIE['fbs_' . $app_id], '\\"'), $args);
ksort($args);
$payload = '';
foreach ($args as $key => $value) {
if ($key != 'sig') {
$payload .= $key . '=' . $value;
}
}
if (md5($payload . $application_secret) != $args['sig']) {
return null;
}
return $args;
'''
# Query datastore for app_id, application_secret.
# (Actually, the app_id could come from the request..)
app_id = getApplicationSetting('FBAppID') or 0
application_secret = getApplicationSetting('FBAppSecret') or ''
# Extract request payload.
(payload, sigdigest, sig) = get_fb_sig_data(handler, app_id)
payload += application_secret
# Evaluate verification and return data if possible.
if get_fb_sig_checksum(payload, sigdigest):
return sig
raise InvalidSignature
def get_fb_sig_data(handler, app_id):
# Obtain signature dictionary object.
sig = handler.request.cookies.get('fbs_%s' % app_id)
if sig is not None:
# Parse as query string.
sig = sig.replace('\\', '').replace('"')
sig = dict(x.split('=') for x in sig.split('&'))
sigdigest = sig.pop('sig')
else:
# Use any other parameters is cookie not set.
p = handler.request.params
ln = len('fb_sig_')
sig = dict((n[ln:], p.get(n)) for n in p.keys() \
if n.startswith('fb_sig_'))
sigdigest = p.get('fb_sig')
# Signature order:
keys = sig.keys()
keys.sort()
# Presumably, values do not need further encoding...
payload = ''.join('%s=%s' % (n, sig[n]) for n in keys)
return (payload, sigdigest, sig)
def get_fb_sig_checksum(data, sig):
import hashlib
return hashlib.md5(data).hexdigest() == sig
## import hmac
## return hmac.new(data, digestmod = hashlib.sha1).hexdigest() == sig
class FacebookAppRequest:
# Objectifies the facebook app/user data for other plugins.
# Verification occurs when the passed user signature
# is used to procure permissible user data. If the user
# didn't really authorize the application, FB will deny
# access to this user (and so shall we).
def __init__(self, handler):
self._sig = verify_fb_app_req(handler)
# or, cookie packet
self._handler = handler
handler.fb_app_req = self
fb_sig_user = self._getUserId()
if fb_sig_user is not None:
self.fb_user = self._getUser(fb_sig_user) or \
self._createUser(fb_sig_user)
def _getUserId(self):
fb_sig_user = self._handler.request.get('fb_sig_user')
if type(fb_sig_user) is str and not fb_sig_user.isdigit():
return
# Cast for all other types.
return int(fb_sig_user)
def _getUser(self, fb_sig_user):
return getFBUserByID(fb_sig_user)
def _createUser(self, fb_sig_user):
fb_user = FacebookUser(user_id = fb_sig_user)
fb_user.put()
return fb_user
def _get_sig_parts(self):
# Return a sequence suitable for urlencode, which expects a
# primitive sequence (apparently a generator/dict-listiterator
# isn't sufficient).
return [(name, value) for (name, value) in \
self._handler.request.params.iteritems() \
if name.startswith('fb_sig')]
def query(self):
return dict(self._get_sig_parts())
#@property
def signature(self):
try: return self._sig_string
except AttributeError:
sig = self._sig_string = urlencode(self._get_sig_parts())
return sig
def getFBAppReq(handler):
try: return handler.fb_app_req
except AttributeError:
return FacebookAppRequest(handler)
def getFBUserByID(user_id, create = True):
q = FacebookUser.all()
q.filter('user_id = ', int(user_id))
for fb_user in q.fetch(1):
return fb_user
if create:
return FacebookUser()
# Authentication for querying user data.
class LoginWare(Settings):
# Utilizes facebook settings to mask dev_appserver as fb auth server.
def __init__(self):
super(LoginWare, self).__init__(getFBAppSettings())
def _get_access_token(self, redirect_uri, verif_code):
args = dict(client_id = self.app_id, redirect_uri = redirect_uri)
if verif_code:
args["client_secret"] = self.app_secret
args["code"] = verif_code
url = rebuild_query(self.access_token_url, args)
response = simple_parse_qs(urlopen(url).read())
try: return response['access_token']
except KeyError:
return None
def _get_user_data(self, access_token):
args = dict(access_token = access_token)
url = '%s?%s' % (self.user_profile_url, urlencode(args))
return json.load(urlopen(url))
def _perform_user_login(self, redirect_uri, verif_code):
access_token = self._get_access_token(redirect_uri, verif_code)
if access_token is None:
args = dict(client_id = self.app_id, redirect_uri = redirect_uri)
return rebuild_query(self.oauth_url, args)
return (access_token, self._get_user_data(access_token))
def login_handler(self, handler, redirect_uri = None):
if redirect_uri is None:
redirect_uri = handler.request.path_url
response = self._perform_user_login(redirect_uri, handler.request.get("code"))
if isinstance(response, basestring):
handler.redirect(response)
return (None, None)
return response
def get_current_user(self, handler):
try: return self._current_user
except AttributeError:
user_id = self._parse_cookie(handler.request.cookie.get('fb_user'))
if user_id:
user = self._current_user = FacebookUser.get_by_key_name(user_id)
return user
# facebookoauth.py:
def _set_cookie(self, response, name, value, domain=None, path="/", expires=None):
"""Generates and signs a cookie for the give name/value"""
timestamp = str(int(time.time()))
value = base64.b64encode(value)
signature = self._cookie_signature(value, timestamp)
cookie = Cookie.BaseCookie()
cookie[name] = "|".join([value, timestamp, signature])
cookie[name]["path"] = path
if domain: cookie[name]["domain"] = domain
if expires:
cookie[name]["expires"] = email.utils.formatdate(
expires, localtime=False, usegmt=True)
response.headers._headers.append(("Set-Cookie", cookie.output()[12:]))
def _parse_cookie(self, value):
"""Parses and verifies a cookie value from set_cookie"""
if not value: return None
parts = value.split("|")
if len(parts) != 3: return None
if self._cookie_signature(parts[0], parts[1]) != parts[2]:
logging.warning("Invalid cookie signature %r", value)
return None
timestamp = int(parts[1])
if timestamp < time.time() - 30 * 86400:
logging.warning("Expired cookie %r", value)
return None
try:
return base64.b64decode(parts[0]).strip()
except:
return None
def _cookie_signature(self, *parts):
"""Generates a cookie signature.
We use the Facebook app secret since it is different for every app (so
people using this example don't accidentally all use the same secret).
"""
hash = hmac.new(str(self.app_secret), digestmod=hashlib.sha1)
for part in parts: hash.update(part)
return hash.hexdigest()
from facebook.page import FacebookApplication
class FBAppHandler(FacebookApplication):
def get(self):
# Prepare transient args: these get encoded into redirect_uri to be passed through
# the cross-domain apparatus.
args = dict()
after_login = self.request.params.get('after_login')
if after_login is not None:
args['after_login'] = after_login
fb_sig_encoded = self.request.params.get('fb_sig_encoded')
if fb_sig_encoded is not None:
args['fb_sig_encoded'] = fb_sig_encoded
# XXX Doesn't necessarily have to validate...
try: fb_app_req = getFBAppReq(self)
except InvalidSignature: pass
else: args['fb_sig_encoded'] = quote_plus(fb_app_req.signature())
# Now perform the encoding.
if args:
(proto, host_query) = splittype(self.request.path_url)
(host, path_query) = splithost(host_query)
# Rebuild.
redirect_uri = '%s://%s%s' % (proto, host, rebuild_query(path_query, args))
ware = LoginWare()
(access_token, profile) = ware.login_handler(self, redirect_uri = redirect_uri)
if access_token is not None:
# Skip signed query, access user data from profile info directly!
user = getFBUserByID(profile['id'], create = True)
user.access_token = access_token
user.name = profile['name']
user.profile_url = profile['link']
user.put()
# And then..? Cookies on user_id and claimant_id:
# (well, not claimant_id, that can piggyback other cookies in its proper)
ware._set_cookie(self.response, "fb_user", str(profile["id"]),
expires=time.time() + 30 * 86400)
if after_login:
if fb_sig_encoded:
fb_sig = simple_parse_qs(unquote_plus(fb_sig_encoded))
after_login = rebuild_query(after_login, fb_sig)
return self.redirect(after_login)
# This just shows page status:
return FacebookApplication.get(self, path = '')
def simple_parse_qs(qs):
# Evaluate only last value for each key-pair parameter.
r = dict()
for (name, value) in parse_qsl(qs):
r[name] = value
return r
def rebuild_query(original, args):
# Reduce original query string to dictionary, update with existing
# dictionary, then rebuild new path with query string.
(path, query) = splitquery(original)
if query is None:
query = args
else:
query = simple_parse_qs(query)
query.update(args)
return '%s?%s' % (path, urlencode(query))
# Ancillary Functionality to Authentication:
MESSAGE_URL = 'https://graph.facebook.com/%(user_id)d/feed'
def post_message(user, message):
url = MESSAGE_URL % dict(user_id = user.user_id)
args = dict(message = message,
access_token = user.access_token)
url = '%s?%s' % (url, urlencode(args))
return json.load(urlopen(url))
| Python |
from django.db import models
class CharacterClass(models.Model):
name = models.CharField(max_length = 30)
class Clan(models.Model):
pass
class Empire(models.Model):
pass
class Warrior(models.Model):
pass
class Item(models.Model):
pass
class Chieftain(models.Model):
chclass = models.ForeignKey(CharacterClass)
name = models.CharField(max_length = 240)
max_health = models.IntegerField()
health = models.IntegerField()
max_energy = models.IntegerField()
energy = models.IntegerField()
max_stamina = models.IntegerField()
stamina = models.IntegerField()
level = models.IntegerField()
experience = models.IntegerField()
gold = models.IntegerField()
bank = models.IntegerField()
clan = models.OneToOneField(Clan)
empire = models.ForeignKey(Empire)
land = models.IntegerField()
warriors = models.ForeignKey(Warrior)
items = models.ForeignKey(Item)
favor_points = models.IntegerField()
| Python |
from django.conf.urls.defaults import *
from Wartime.wartime.views import SitePages
page_pattern = r'^/(?P<player_id>\d+)/(?P<page_name>%s)?$' % '|'.join(SitePages.getAnchors())
urlpatterns = patterns('Wartime.wartime.views',
(page_pattern, 'render'),
(r'^/create/?$', 'create_chieftain'),
(r'^/?(login/)?$', 'login_page'))
| Python |
# Wartime Page Reports.
from django.shortcuts import render_to_response
from django.template.context import RequestContext
from Wartime.wartime.models import Chieftain, CharacterClass, Clan, Empire, Warrior, Item
from pdb import set_trace as debug
class Page:
def __init__(self, name = None, template = None, href = None):
self.name = name
self.template = template
self.href = href or name
def __repr__(self):
return '<%s:%s:%s>' % (self.name, self.template, self.href)
class Set(dict):
# WebOb
class Links(object):
def __init__(self, pageset, player_id):
self.__pageset = pageset
self.__player_id = int(player_id)
def player_id(self):
return self.__player_id
def __getitem__(self, name):
return self.__pageset.url(self.__player_id, name)
def __init__(self, *values):
for v in values:
if len(v) == 3:
(name, template, href) = v
else:
(name, template) = v
href = name
page = Page(name, template, href)
self[name] = page
self[href] = page
base_path = 'wartime'
def url(self, player_id, name):
return '/%s/%d/%s' % (self.base_path, player_id, self[name].href)
def getLinks(self, *args, **kwd):
return self.Links(self, *args, **kwd)
def getAnchors(self):
return (page.href for page in self.itervalues())
def render(self, request, template, **values):
if 'links' not in values:
values['links'] = self.getLinks(values['chieftain'].id)
return render_to_response(template,
context_instance = RequestContext(request, values))
SitePages = Page.Set(('home', 'home.html'),
('bank', 'bank.html'),
('quests', 'quests.html'),
('bounties', 'bounty.html'),
('empire', 'empire.html'),
('hospital', 'hospital.html'),
('offers', 'offerings.html'),
('battle_opponents', 'battle_opponents.html', 'battle/opponents'),
('battle', 'battle_plan.html'),
('inventory', 'inventory.html'),
('alliances', 'alliances.html'),
('rivals', 'rivals.html'))
def render(request, player_id = None, page_name = None):
page_name = page_name or 'home'
player_id = int(player_id)
chieftain = Chieftain.objects.get(id = player_id)
return SitePages.render(request, SitePages[page_name].template,
chieftain = chieftain)
def login_page(request, *args):
reqcx = RequestContext(request, dict(chieftains = Chieftain.objects.all()))
return render_to_response('login.html', context_instance = reqcx)
def create_chieftain(request):
import django
data = request.POST
try:
name = data['name']
chclass = data['chclass']
except django.utils.datastructures.MultiValueDictKeyError:
return django.http.HttpResponseRedirect('/wartime')
# XXX Todo: verify that chieftain isn't already created!
chclass = CharacterClass(name = chclass)
chclass.save()
# This hack just creates and saves new rows for every creation!
clan = Clan()
empire = Empire()
warriors = Warrior()
items = Item()
clan.save()
empire.save()
warriors.save()
items.save()
chieftain = Chieftain(name = name,
chclass = chclass,
max_health = 100,
health = 100,
max_energy = 10,
energy = 10,
max_stamina = 3,
stamina = 3,
level = 1,
experience = 9,
gold = 2370,
bank = 0,
clan = clan,
empire = empire,
land = 0,
warriors = warriors,
items = items,
favor_points = 0)
chieftain.save()
# The response page extends the home page. Should it redirect?
return SitePages.render(request, 'chieftain_created.html', chieftain = chieftain)
| Python |
# Wartime Module.
'''
Health: 100/100
More in: <mm:ss>
Energy: 10/10
More in: <mm:ss>
Stamina: 3/3
More in: <mm:ss>
Level: 1
Experience: 9/10
Gold: 2,734 coins (Hoard link to bank)
More in <mm:ss>
Clan: 1 warriors
Empire: 1000 hectares
Land: 9860 hectares
Home/MainMenu
Adventures /quests
The Gods /offers
Battle /battle
My Clan /inventory
Chieftains(n) /alliances
Bounties /bounty
Empire /land
Hoard /hoard
Top Clans /topclans
Hoard:
Balance: 0
Withdrawal (free)
Deposit (10% fee for protection of coins)
Adventures quests
Unlock more adventures when you reach level 5
Description / Payout Requirements
Consult the Oracle Energy: 1 Knife x1
200 - 300 coins
20 - 30 hectares
Experience: +1
Battle
Clan Chieftains Tips
Valerio Sigwlf, Level 1 3
Bounties
Clan Paid By Bounty Amount Time
Top 10 Rival Clans
Clan Attacks Against Last Attack Time
Empire land
Income: +200 coins / Upkeep: -0 coins
More gold collected in 31 minutes
Land Details/Requirements Buy
1 coin/hectare
Owned: 9,860 hectares
[ 1000 ] hectares (Buy)
Unlock more Buildings when you reach level 15...
Building Details/Requirements Buy
Income: 100 coins 6000 coins
Built on: 600 hectares Owned: 2
Build / Sell
Clan inventory
You have 1 Warrior. Your 1 Chieftain can lead 9 more Warriors into Battle. Buy 9 more Warriors to Battle at full strength!
Unlock more items when you reach level 5
Warriors Details Buy/Sell
Scout
Attack: +1 100 coins
Defense: +0
You have 2 Weapons. Your 1 Chieftain can lead 3 more Weapons into Battle. Buy 3 more Weapons to Battle at full strength!
Weapons Details: Buy/Sell
Helmet 100 coins
Attack: +0 Owned: 1
Defense: +1
Special Items
(The Gods)
The Gods (n) offers
Favor points: 10
Offering Weapon - 20
2 Chieftains - 10
2000 coins - 10
full health - 4
full energy - 10
full stamina - 10
<Donations>
Chieftains (n) alliances
Friends select
Hospital?
'''
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
from django.db import models
# Create your models here.
| Python |
from django.conf.urls.defaults import *
urlpatterns = patterns('',
(r'^$', 'Wartime.wild.views.view_wild'),
(r'media/(?P<path>.*)', 'django.views.static.serve',
{'document_root': 'wild/media',
'show_indexes' : False}),
)
| Python |
from django.shortcuts import render_to_response
from wild import loadRegion
REGION_FILE = 'wild/regions/island.yaml'
REGION_FILE = 'H:\My Projects\Wartime\wild\regions\island.yaml')
def view_wild(request):
region = loadRegion(REGION_FILE)
return render_to_response('wild.html', dict(region = region))
| Python |
# HTML Wilderness.
from pdb import set_trace as debug
class Wild:
BORDERING = False
TILING = chr # list
def __init__(self, (width, height), tile_base, sprite_base, default_tile,
default_start_location = None, **paint):
self.tile_base = tile_base
self.sprite_base = sprite_base
self.tiles = []
self.sprites = set()
self.default_start_location = default_start_location or (0, 0)
self.default_tile = (self + default_tile)
self.array = [self.TILING(self.default_tile) * width] * height
self.setTiles(paint)
def setTiles(self, tiles):
for ((x, y), tile) in tiles.iteritems():
self[(x, y)] = tile
def loadTiles(self, tiles):
result = {}
conversion = {}
for (coord, tile) in tiles.iteritems():
if tile in conversion:
nr = conversion[tile]
else:
nr = conversion[tile] = (self + tile)
result[coord] = nr
return result
# tileRows
def rows(self):
heightU = len(self.array) - 1
for y in xrange(heightU + 1):
def columns(row, y):
widthU = len(row) - 1
for x in xrange(widthU + 1):
borders = dict()
if self.BORDERING:
if x > 0:
borders['western'] = ord(row[x-1])
if x < widthU:
borders['eastern'] = ord(row[x+1])
if y > 0:
borders['northern'] = ord(self.array[y-1][x])
if y < heightU:
borders['southern'] = ord(self.array[y+1][x])
yield self.Tile(self, (x, y), ord(row[x]), borders)
yield columns(self.array[y], y)
class Tile:
def __init__(self, wild, coords, code, borders = {}):
self.wild = wild
self.coords = coords
self.code = code
self.borders = borders
def image(self):
name = self.wild.tiles[self.code]
# For the default tiling, just use its base.
if self.code == self.wild.default_tile:
return name
# For all other tiles, add on the border names, in order.
for b in ('northern', 'eastern', 'southern', 'western'):
if b in self.borders:
# Determine overlapping factor.
name = '%s_%s' % (name, self.wild.tiles[self.borders[b]])
return name
def occupants(self):
here = self.coords
for sprite in self.wild.sprites:
if sprite.location == here:
yield sprite
def __setitem__(self, (x, y), tile):
n = self.array[y]
self.array[y] = '%s%s%s' % (n[:x], chr(tile), n[x+1:])
def __add__(self, tile):
self.tiles.append(tile)
return len(self.tiles) - 1
class Sprite:
def __init__(self, wild, image = None, location = None, selected = False):
self.wild = wild
self.image = image
self.selected = selected
self.visible = True
if location is None:
location = wild.default_start_location
self.location = location
def image(self):
return self.image
def put_sprite(self, *args, **kwd):
sprite = self.Sprite(self, *args, **kwd)
self.sprites.add(sprite)
return sprite
def loadRegion(filename):
import yaml
config = yaml.load(open(filename))
(width, height) = eval(config['size'])
tile_base = config['tile-base']
sprite_base = config['sprite-base']
default = config['default']
tiles = config.get('tiles', [])
sprites = config.get('sprites', [])
region = Wild((width, height), tile_base, sprite_base, default)
# Set coordinate terrain.
unit = dict()
for coords in tiles:
for (xy, name) in coords.iteritems():
unit[eval(xy)] = name
tiles = region.loadTiles(unit)
region.setTiles(tiles)
# Load sprites.
for sp in sprites:
if 'location' in sp:
sp['location'] = eval(sp['location'])
region.put_sprite(**sp)
return region
def main(argv = None):
from optparse import OptionParser
parser = OptionParser()
parser.add_option('-d', '--debug', action = 'store_true')
(options, args) = parser.parse_args(argv)
if options.debug:
debug()
for filename in args:
# Load django templates.
loadRegion(filename)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.