code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
# jsb/boot.py
#
#
""" admin related data and functions. """
## jsb imports
from jsb.utils.generic import checkpermissions, isdebian, botuser
from jsb.lib.persist import Persist
from jsb.utils.exception import handle_exception
from jsb.lib.datadir import makedirs, getdatadir
from jsb.lib.config import Config
from jsb.lib.jsbimport import _import
from jsb.utils.lazydict import LazyDict
## basic imports
import logging
import os
import sys
import types
import copy
## paths
sys.path.insert(0, os.getcwd())
sys.path.insert(0, os.getcwd() + os.sep + '..')
#try: sys.path.append(os.path.expanduser("~") + os.sep + '.jsb')
#except: pass
## defines
ongae = False
try:
import waveapi
plugin_packages = ['myplugs.common', 'myplugs.gae','jsb.plugs.core', 'jsb.plugs.gae', 'jsb.plugs.common', 'jsb.plugs.wave', 'jsb.plugs.myplugs.common', 'jsb.plugs.myplugs.gae']
ongae = True
except ImportError: plugin_packages = ['myplugs.common', 'myplugs.socket', 'jsb.plugs.core', 'jsb.plugs.common', 'jsb.plugs.socket', 'jsb.plugs.myplugs.common', 'jsb.plugs.myplugs.socket']
default_plugins = ['jsb.plugs.core.admin', 'jsb.plugs.core.dispatch', 'jsb.plugs.core.plug']
logging.info("boot - default plugins are %s" % str(default_plugins))
loaded = False
cmndtable = None
pluginlist = None
callbacktable = None
cmndperms = None
timestamps = None
cpy = copy.deepcopy
## boot function
def boot(ddir=None, force=False, encoding="utf-8", umask=None, saveperms=True, fast=False):
""" initialize the bot. """
logging.info("booting ..")
from jsb.lib.datadir import getdatadir, setdatadir
if ddir: setdatadir(ddir)
origdir = ddir
ddir = ddir or getdatadir()
if not ddir: logging.error("can't determine datadir to boot from") ; os._exit(1)
if not ddir in sys.path: sys.path.append(ddir)
makedirs(ddir)
if os.path.isdir("/var/run/jsb") and botuser() == "jsb": rundir = "/var/run/jsb"
else: rundir = ddir + os.sep + "run"
try:
if os.getuid() == 0:
print "don't run the bot as root"
os._exit(1)
except AttributeError: pass
try:
k = open(rundir + os.sep + 'jsb.pid','w')
k.write(str(os.getpid()))
k.close()
except IOError: pass
try:
if not ongae:
reload(sys)
sys.setdefaultencoding(encoding)
except (AttributeError, IOError): pass
try:
if not umask: checkpermissions(getdatadir(), 0700)
else: checkpermissions(getdatadir(), umask)
except: handle_exception()
global loaded
global cmndtable
global pluginlist
global callbacktable
global cmndperms
global plugcommands
global timestamps
if not cmndtable: cmndtable = Persist(rundir + os.sep + 'cmndtable')
if not pluginlist: pluginlist = Persist(rundir + os.sep + 'pluginlist')
if not callbacktable: callbacktable = Persist(rundir + os.sep + 'callbacktable')
if not timestamps: timestamps = Persist(rundir + os.sep + 'timestamps')
if not cmndperms: cmndperms = Config('cmndperms', ddir=ddir)
from jsb.lib.plugins import plugs
if not cmndtable.data or force:
plugs.loadall(plugin_packages, force=True)
loaded = True
savecmndtable(saveperms=saveperms)
if not pluginlist.data or force:
if not loaded:
plugs.loadall(plugin_packages, force=True)
loaded = True
savepluginlist()
if not callbacktable.data or force:
if not loaded:
plugs.loadall(plugin_packages, force=True)
loaded = True
savecallbacktable()
if not loaded:
logging.info("boot - plugins not loaded .. loading defaults")
for plug in default_plugins:
plugs.reload(plug, showerror=True, force=True)
if not fast:
if ongae: plugs.loadall(["myplugs.common", "myplugs.gae", "jsb.plugs.myplugs.gae", "jsb.plugs.myplugs.common"], force=True)
else: plugs.loadall(["myplugs.common", "myplugs.socket", "jsb.plugs.myplugs.socket", "jsb.plugs.myplugs.common"], force=True)
else: logging.error("skipped loading of myplugs")
changed = checktimestamps()
if changed:
logging.warn("boot - files changed %s" % str(changed))
for plugfile in changed: plugs.reloadfile(plugfile, force=True)
logging.warn("boot - done")
## filestamps stuff
def checktimestamps(d=None):
changed = []
if not d: d = getdatadir() + os.sep + "myplugs"
for f in os.listdir(d):
if os.path.isdir(d + os.sep + f): changed.extend(checktimestamps(d + os.sep + f))
if not f.endswith(".py"): continue
m = d + os.sep + f
global timestamps
try:
t = os.path.getmtime(m)
if t > timestamps.data[m]: changed.append(m) ; timestamps.data[m] = t
except KeyError: timestamps.data[m] = os.path.getmtime(m) ; changed.append(m)
if changed: timestamps.save()
return changed
## commands related commands
def savecmndtable(modname=None, saveperms=True):
""" save command -> plugin list to db backend. """
global cmndtable
if not cmndtable.data: cmndtable.data = {}
if modname: target = LazyDict(cmndtable.data)
else: target = LazyDict()
global cmndperms
#if not cmndperms.data: cmndperms.data = {}
from jsb.lib.commands import cmnds
assert cmnds
if cmnds.subs:
for name, clist in cmnds.subs.iteritems():
if name:
if clist and len(clist) == 1: target[name] = clist[0].modname
for cmndname, c in cmnds.iteritems():
if modname and c.modname != modname or cmndname == "subs": continue
if cmndname and c:
target[cmndname] = c.modname
cmndperms[cmndname] = c.perms
logging.warn("saving command table")
assert cmndtable
assert target
cmndtable.data = target
cmndtable.save()
if saveperms:
logging.warn("saving command perms")
cmndperms.save()
def removecmnds(modname):
""" remove commands belonging to modname form cmndtable. """
global cmndtable
assert cmndtable
from jsb.lib.commands import cmnds
assert cmnds
for cmndname, c in cmnds.iteritems():
if c.modname == modname: del cmndtable.data[cmndname]
cmndtable.save()
def getcmndtable():
""" save command -> plugin list to db backend. """
global cmndtable
if not cmndtable: boot()
return cmndtable.data
## callbacks related commands
def savecallbacktable(modname=None):
""" save command -> plugin list to db backend. """
if modname: logging.warn("boot - module name is %s" % modname)
global callbacktable
assert callbacktable
if not callbacktable.data: callbacktable.data = {}
if modname: target = LazyDict(callbacktable.data)
else: target = LazyDict()
from jsb.lib.callbacks import first_callbacks, callbacks, last_callbacks, remote_callbacks
for cb in [first_callbacks, callbacks, last_callbacks, remote_callbacks]:
for type, cbs in cb.cbs.iteritems():
for c in cbs:
if modname and c.modname != modname: continue
if not target.has_key(type): target[type] = []
if not c.modname in target[type]: target[type].append(c.modname)
logging.warn("saving callback table")
assert callbacktable
assert target
callbacktable.data = target
callbacktable.save()
def removecallbacks(modname):
""" remove callbacks belonging to modname form cmndtable. """
global callbacktable
assert callbacktable
from jsb.lib.callbacks import first_callbacks, callbacks, last_callbacks, remote_callbacks
for cb in [first_callbacks, callbacks, last_callbacks, remote_callbacks]:
for type, cbs in cb.cbs.iteritems():
for c in cbs:
if not c.modname == modname: continue
if not callbacktable.data.has_key(type): callbacktable.data[type] = []
if c.modname in callbacktable.data[type]: callbacktable.data[type].remove(c.modname)
logging.warn("saving callback table")
assert callbacktable
callbacktable.save()
def getcallbacktable():
""" save command -> plugin list to db backend. """
global callbacktable
if not callbacktable: boot()
return callbacktable.data
## plugin list related commands
def savepluginlist(modname=None):
""" save a list of available plugins to db backend. """
global pluginlist
if not pluginlist.data: pluginlist.data = []
if modname: target = cpy(pluginlist.data)
else: target = []
from jsb.lib.commands import cmnds
assert cmnds
for cmndname, c in cmnds.iteritems():
if modname and c.modname != modname: continue
if c and not c.plugname: logging.info("boot - not adding %s to pluginlist" % cmndname) ; continue
if c and c.plugname not in target: target.append(c.plugname)
assert target
target.sort()
logging.warn("saving plugin list")
assert pluginlist
pluginlist.data = target
pluginlist.save()
def remove_plugin(modname):
removecmnds(modname)
removecallbacks(modname)
global pluginlist
try: pluginlist.data.remove(modname.split(".")[-1]) ; pluginlist.save()
except: pass
def clear_tables():
global cmndtable
global callbacktable
global pluginlist
cmndtable.data = {} ; cmndtable.save()
callbacktable.data = {} ; callbacktable.save()
pluginlist.data = [] ; pluginlist.save()
def getpluginlist():
""" get the plugin list. """
global pluginlist
if not pluginlist: boot()
return pluginlist.data
## update_mod command
def update_mod(modname):
""" update the tables with new module. """
savecallbacktable(modname)
savecmndtable(modname, saveperms=False)
savepluginlist(modname)
def whatcommands(plug):
tbl = getcmndtable()
result = []
for cmnd, mod in tbl.iteritems():
if not mod: continue
if plug in mod:
result.append(cmnd)
return result
def getcmndperms():
return cmndperms
| Python |
# jsb/persist.py
#
#
"""
allow data to be written to disk or BigTable in JSON format. creating
the persisted object restores data.
"""
## jsb imports
from jsb.utils.trace import whichmodule, calledfrom
from jsb.utils.lazydict import LazyDict
from jsb.utils.exception import handle_exception
from jsb.utils.name import stripname
from jsb.utils.locking import lockdec
from datadir import getdatadir
from cache import get, set, delete
## simplejson imports
from jsb.imports import getjson
json = getjson()
## basic imports
import thread
import logging
import os
import types
import copy
import sys
import time
## global list to keeptrack of what persist objects need to be saved
needsaving = []
## try google first
try:
import waveapi
from google.appengine.ext import db
import google.appengine.api.memcache as mc
from google.appengine.api.datastore_errors import Timeout
logging.debug("persist - using BigTable based Persist")
## JSONindb class
class JSONindb(db.Model):
""" model to store json files in. """
modtime = db.DateTimeProperty(auto_now=True, indexed=False)
createtime = db.DateTimeProperty(auto_now_add=True, indexed=False)
filename = db.StringProperty()
content = db.TextProperty(indexed=False)
## Persist class
class Persist(object):
""" persist data attribute to database backed JSON file. """
def __init__(self, filename, default={}, type="cache"):
self.plugname = calledfrom(sys._getframe())
if 'lib' in self.plugname: self.plugname = calledfrom(sys._getframe(1))
try: del self.fn
except: pass
self.fn = unicode(filename.strip()) # filename to save to
self.logname = os.sep.join(self.fn.split(os.sep)[-2:])
self.type = type
self.counter = mcounter = mc.incr(self.fn, 1, "counters", 0)
self.key = None
self.obj = None
self.init(default)
def init(self, default={}, filename=None):
cachetype = ""
mcounter = mc.incr(self.fn, 1, "counters")
logging.debug("persist - %s - %s" % (self.counter, mcounter))
if self.type == "mem":
tmp = get(self.fn) ; cachetype = "mem"
if tmp: self.data = tmp ; logging.debug("persist - %s - loaded %s" % (cachetype, self.fn)) ; return
jsontxt = mc.get(self.fn) ; cachetype = "cache"
if type(default) == types.DictType:
default2 = LazyDict()
default2.update(default)
else: default2 = copy.deepcopy(default)
if jsontxt is None:
logging.debug("persist - %s - loading from db" % self.logname)
try:
try: self.obj = JSONindb.get_by_key_name(self.fn)
except Timeout: self.obj = JSONindb.get_by_key_name(self.fn)
except Exception, ex:
# bw compat sucks
try: self.obj = JSONindb.get_by_key_name(self.fn)
except Exception, ex:
handle_exception()
self.data = default2
return
if self.obj == None:
logging.debug("persist - %s - no entry found" % self.logname)
self.obj = JSONindb(key_name=self.fn)
self.obj.content = unicode(default)
self.data = default2
return
jsontxt = self.obj.content
if jsontxt: mc.set(self.fn, jsontxt)
logging.debug('persist - jsontxt is %s' % jsontxt)
cachetype = "file"
else: cachetype = "cache"
logging.debug("persist - %s - loaded %s" % (cachetype, self.fn))
self.data = json.loads(jsontxt)
if type(self.data) == types.DictType:
d = LazyDict()
d.update(self.data)
self.data = d
cfrom = whichmodule()
if 'jsb' in cfrom:
cfrom = whichmodule(2)
if 'jsb' in cfrom: cfrom = whichmodule(3)
cfrom = whichmodule(2)
if 'jsb' in cfrom:
cfrom = whichmodule(3)
if 'jsb' in cfrom: cfrom = whichmodule(4)
if not 'run' in self.fn:
if cachetype: logging.debug("persist - %s - loaded %s (%s) - %s - %s" % (cachetype, self.logname, len(jsontxt), self.data.tojson(), cfrom))
else: logging.debug("persist - db - loaded %s (%s) - %s - %s" % (self.logname, len(jsontxt), self.data.tojson(), cfrom))
if self.data:
set(self.fn, self.data)
def sync(self):
logging.info("persist - syncing %s" % self.fn)
data = json.dumps(self.data)
mc.set(self.fn, data)
delete(self.fn, self.data)
return data
def save(self, filename=None):
""" save json data to database. """
fn = filename or self.fn
bla = json.dumps(self.data)
if filename or self.obj == None:
self.obj = JSONindb(key_name=fn)
self.obj.content = bla
else: self.obj.content = bla
self.obj.filename = fn
from google.appengine.ext import db
key = db.run_in_transaction(self.obj.put)
logging.debug("persist - transaction returned %s" % key)
mc.set(fn, bla)
delete(fn, self.data)
cfrom = whichmodule(0)
if 'jsb' in cfrom:
cfrom = whichmodule(2)
if 'jsb' in cfrom: cfrom = whichmodule(3)
logging.info('persist - %s - saved %s (%s)' % (cfrom, fn, len(bla)))
def upgrade(self, filename):
self.init(self.data, filename=filename)
except ImportError:
## file based persist
logging.debug("using file based Persist")
## defines
persistlock = thread.allocate_lock()
persistlocked = lockdec(persistlock)
## imports for shell bots
from jsb.lib.cache import get, set
import fcntl
## classes
class Persist(object):
""" persist data attribute to JSON file. """
def __init__(self, filename, default=None, init=True):
""" Persist constructor """
self.fn = filename.strip() # filename to save to
self.logname = os.sep.join(self.fn.split(os.sep)[-2:])
self.lock = thread.allocate_lock() # lock used when saving)
self.data = LazyDict() # attribute to hold the data
if init:
if default == None: default = LazyDict()
self.init(default)
self.count = 0
def init(self, default={}, filename=None):
""" initialize the data. """
logging.debug('persist - reading %s' % self.fn)
cfrom = whichmodule(2)
if 'jsb' in cfrom:
cfrom = whichmodule(3)
if 'jsb' in cfrom: cfrom = whichmodule(4)
gotcache = False
cachetype = "cache"
try:
data = get(self.fn)
if not data:
datafile = open(self.fn, 'r')
data = datafile.read()
datafile.close()
cachetype = "file"
else:
if type(data) == types.DictType:
d = LazyDict()
d.update(data)
else: d = data
self.data = d
cachetype = "mem"
logging.debug("persist - %s - loaded %s" % (cachetype, self.fn))
if not 'run' in self.fn:
size = len(d)
logging.debug("persist - mem - loaded %s (%s) - %s - %s" % (self.logname, size, self.data.tojson(), cfrom))
return
except IOError, ex:
if not 'No such file' in str(ex):
logging.error('persist - failed to read %s: %s' % (self.logname, str(ex)))
raise
else:
logging.debug("persist - %s doesn't exist yet" % self.logname)
return
try:
self.data = json.loads(data)
set(self.fn, self.data)
if type(self.data) == types.DictType:
d = LazyDict()
d.update(self.data)
self.data = d
logging.debug("persist - %s - loaded %s" % (cachetype, self.fn))
if not 'run' in self.fn:
size = len(data)
if gotcache: logging.debug("persist - cache - loaded %s (%s) - %s - %s" % (self.logname, size, self.data.tojson(), cfrom))
else: logging.debug("persist - file - loaded %s (%s) - %s - %s" % (self.logname, size, self.data.tojson(), cfrom))
except Exception, ex:
logging.error('persist - ERROR: %s' % self.fn)
raise
def upgrade(self, filename):
self.init(self.data, filename=filename)
self.save(filename)
def get(self):
return json.loads(get(self.fn))
def sync(self):
logging.info("persist - syncing %s" % self.fn)
set(self.fn, self.data)
return self.data
def save(self):
global needsaving
for p in needsaving:
try: p.dosave(); needsaving.remove(p)
except (OSError, IOError): logging.error("persist - failed to save %s" % p)
try: self.dosave()
except IOError:
self.sync()
if self not in needsaving: needsaving.append(self)
time.sleep(0.1)
for p in needsaving:
try: p.dosave(); needsaving.remove(p)
except (OSError, IOError): logging.error("persist - failed to save %s" % p)
def dosave(self):
""" persist data attribute. """
try:
fn = self.fn
d = []
if fn.startswith(os.sep): d = [os.sep,]
for p in fn.split(os.sep)[:-1]:
if not p: continue
d.append(p)
pp = os.sep.join(d)
if not os.path.isdir(pp):
logging.info("persist - creating %s dir" % pp)
os.mkdir(pp)
tmp = fn + '.tmp' # tmp file to save to
datafile = open(tmp, 'w')
fcntl.flock(datafile, fcntl.LOCK_EX | fcntl.LOCK_NB)
json.dump(self.data, datafile, indent=True)
set(fn, self.data)
fcntl.flock(datafile, fcntl.LOCK_UN)
datafile.close()
try: os.rename(tmp, fn)
except OSError:
handle_exception(tmp + ' ' + fn)
os.remove(fn)
os.rename(tmp, fn)
if 'lastpoll' in self.logname: logging.debug('persist - %s saved (%s)' % (self.logname, len(self.data)))
else: logging.info('persist - %s saved (%s)' % (self.logname, len(self.data)))
except IOError, ex: logging.warn("persist - not saving %s: %s" % (self.fn, str(ex))) ; raise
except: handle_exception()
finally: pass
class PlugPersist(Persist):
""" persist plug related data. data is stored in jsondata/plugs/{plugname}/{filename}. """
def __init__(self, filename, default=None):
plugname = calledfrom(sys._getframe())
Persist.__init__(self, getdatadir() + os.sep + 'plugs' + os.sep + stripname(plugname) + os.sep + stripname(filename))
| Python |
# jsb/threads.py
#
#
""" own threading wrapper. """
## jsb imports
from jsb.utils.exception import handle_exception
## basic imports
import threading
import re
import time
import thread
import logging
import uuid
## defines
# RE to determine thread name
methodre = re.compile('method\s+(\S+)', re.I)
funcre = re.compile('function\s+(\S+)', re.I)
## Botcommand class
class Botcommand(threading.Thread):
""" thread for running bot commands. """
def __init__(self, group, target, name, args, kwargs):
threading.Thread.__init__(self, None, target, name, args, kwargs)
self.name = name
self.ievent = args[1]
self.setDaemon(True)
def run(self):
""" run the bot command. """
try:
result = threading.Thread.run(self)
time.sleep(0.01)
#self.ievent.ready()
if self.ievent.closequeue:
logging.debug('threads- closing queue for %s' % self.ievent.userhost)
if self.ievent.queues:
for i in self.ievent.queues: i.put_nowait(None)
if self.ievent.outqueue: self.ievent.outqueue.put_nowait(None)
if self.ievent.inqueue: self.ievent.inqueue.put_nowait(None)
if self.ievent.resqueue: self.ievent.resqueue.put_nowait(None)
except Exception, ex:
handle_exception(self.ievent)
time.sleep(1)
## Thr class
class Thr(threading.Thread):
""" thread wrapper. """
def __init__(self, group, target, name, args, kwargs):
threading.Thread.__init__(self, None, target, name, args, kwargs)
self.setDaemon(True)
self.name = name
def run(self):
""" run the thread. """
try:
logging.debug('threads - running thread %s' % self.name)
threading.Thread.run(self)
except Exception, ex:
handle_exception()
time.sleep(1)
## getname function
def getname(func):
""" get name of function/method. """
name = ""
method = re.search(methodre, str(func))
if method: name = method.group(1)
else:
function = re.search(funcre, str(func))
if function: name = function.group(1)
else: name = str(func)
return name
## start_new_thread function
def start_new_thread(func, arglist, kwargs={}):
""" start a new thread .. set name to function/method name."""
if not kwargs: kwargs = {}
if not 'name' in kwargs:
name = getname(func)
if not name: name = str(func)
else: name = kwargs['name']
try:
thread = Thr(None, target=func, name=name, args=arglist, kwargs=kwargs)
thread.start()
return thread
except thread.error, ex:
if "can't start" in str(ex):
logging.error("threads - thread space is exhausted - can't start thread %s" % name)
handle_exception()
time.sleep(3)
except:
handle_exception()
time.sleep(3)
## start_bot_cpmmand function
def start_bot_command(func, arglist, kwargs={}):
""" start a new thread .. set name to function/method name. """
if not kwargs: kwargs = {}
try:
name = getname(func)
if not name: name = 'noname'
thread = Botcommand(group=None, target=func, name=name, args=arglist, kwargs=kwargs)
thread.start()
return thread
except:
handle_exception()
time.sleep(1)
def threaded(func):
""" threading decorator. """
def threadedfunc(*args, **kwargs):
start_new_thread(func, args, kwargs)
return threadedfunc
| Python |
# jsb/lib/config.py
#
#
""" config module. config is stored as item = JSON pairs. """
## jsb imports
from jsb.utils.trace import whichmodule, calledfrom
from jsb.utils.lazydict import LazyDict
from jsb.utils.exception import handle_exception
from jsb.utils.name import stripname
from datadir import getdatadir
from errors import CantSaveConfig, NoSuchFile
from jsb.utils.locking import lockdec
## simplejson imports
from jsb.imports import getjson
json = getjson()
## basic imports
import sys
import os
import types
import thread
import logging
import uuid
import thread
import getpass
import copy
## locks
savelock = thread.allocate_lock()
savelocked = lockdec(savelock)
## defines
cpy = copy.deepcopy
## classes
class Config(LazyDict):
"""
config class is a dict containing json strings. is writable to file
and human editable.
"""
def __init__(self, filename=None, verbose=False, input={}, ddir=None, *args, **kw):
LazyDict.__init__(self, input, *args, **kw)
self.filename = filename or 'mainconfig'
self.datadir = ddir or getdatadir()
self.dir = self.datadir + os.sep + 'config'
self.cfile = self.dir + os.sep + self.filename
logging.debug("config - filename is %s" % self.cfile)
self.jsondb = None
try: import waveapi ; self.isdb = True
except ImportError: self.isdb = False
if not self.comments: self.comments = {}
try:
try: self.fromfile(self.cfile)
except IOError:
logging.warn("can't read config from %s" % self.cfile)
import waveapi
from persist import Persist
self.jsondb = Persist(cfile)
self.update(self.jsondb.data)
self.isdb = True
logging.debug("config - fromdb - %s - %s" % (self.cfile, str(self)))
except ImportError:
handle_exception()
self.isdb = False
self.init()
if not self.owner: self.owner = []
if not self.uuid: self.uuid = str(uuid.uuid4())
def __deepcopy__(self, a):
""" accessor function. """
return Config(input=self)
def __getitem__(self, item):
""" accessor function. """
if not self.has_key(item): return None
else: return dict.__getitem__(self, item)
def merge(self, cfg):
""" merge in another cfg. """
f = self.cfile
self.update(cfg)
self.cfile = f
def set(self, item, value):
""" set item to value. """
dict.__setitem__(self, item, value)
def fromdb(self):
""" read config from database. """
from jsb.lib.persist import Persist
logging.info("config - fromdb - %s" % self.cfile)
tmp = Persist(self.cfile)
self.update(tmp.data)
def todb(self):
""" save config to database. """
cp = dict(self)
del cp['jsondb']
if not self.jsondb:
from jsb.lib.persist import Persist
self.jsondb = Persist(self.cfile)
self.jsondb.data = cp
self.jsondb.save()
def fromfile(self, filename=None):
""" read config object from filename. """
curline = ""
fname = filename or self.cfile
if not fname: raise Exception("config - %s - %s" % (self.cfile, self.dump()))
if not os.path.exists(fname): return False
comment = ""
for line in open(fname, 'r'):
curline = line
curline = curline.strip()
if curline == "": continue
if curline.startswith('#'): comment = curline; continue
if True:
try:
key, value = curline.split('=', 1)
kkey = key.strip()
self[kkey] = json.loads(unicode(value.strip()))
if comment: self.comments[kkey] = comment
comment = ""
except ValueError: logging.warn("config - skipping line - unable to parse: %s" % line)
#self.cfile = fname
return
@savelocked
def tofile(self, filename=None):
""" save config object to file. """
if not filename: filename = self.cfile
try: from os import mkdir
except ImportError:
logging.debug("can't save %s to file .. os.mkdir() not suported" % filename)
return
logging.debug("config - saving %s" % filename)
if filename.startswith(os.sep): d = [os.sep,]
else: d = []
for p in filename.split(os.sep)[:-1]:
if not p: continue
d.append(p)
ddir = os.sep.join(d)
if not os.path.isdir(ddir):
logging.debug("persist - creating %s dir" % ddir)
try: os.mkdir(ddir)
except OSError, ex:
logging.warn("persist - not saving - failed to make %s - %s" % (ddir, str(ex)))
return
written = []
curitem = None
try:
configtmp = open(filename + '.tmp', 'w')
teller = 0
keywords = self.keys()
keywords.sort()
for keyword in keywords:
value = self[keyword]
if keyword in written: continue
#if keyword == 'name': continue
if keyword == 'createdfrom': continue
if keyword == 'cfile': continue
if keyword == 'filename': continue
if keyword == 'dir': continue
if keyword == 'jsondb': continue
if keyword == 'isdb': continue
if keyword == 'optionslist': continue
if keyword == 'gatekeeper': continue
if keyword == "comments": continue
if self.comments and self.comments.has_key(keyword):
configtmp.write(self.comments[keyword] + u"\n")
curitem = keyword
try: configtmp.write('%s = %s\n' % (keyword, json.dumps(value)))
except TypeError: logging.error("config - %s - can't serialize %s" % (filename, keyword)) ; continue
teller += 1
configtmp.write("\n")
configtmp.close()
os.rename(filename + '.tmp', filename)
return teller
except Exception, ex:
handle_exception()
print "ERROR WRITING %s CONFIG FILE: %s .. %s" % (self.cfile, str(ex), curitem)
def save(self):
""" save the config. """
logging.info("config - save called from %s" % calledfrom(sys._getframe(1)))
if self.isdb: self.todb()
else: self.tofile()
def load(self, verbose=False):
""" load the config file. """
if self.isdb: self.fromdb()
else: self.fromfile()
if verbose: logging.debug('config - %s' % self.dump())
def init(self):
""" initialize the config object. """
if self.filename == 'mainconfig':
self.comments["whitelist"] = "# whitelist used to allow ips .. bot maintains this"
self.setdefault("whitelist", [])
self.comments["blacklist"] = "# blacklist used to deny ips .. bot maintains this"
self.setdefault("blacklist", [])
self.setdefault('owner', [])
self.comments["loglist"] = "# loglist .. maintained by the bot."
self.setdefault('loglist', [])
self.comments["loglevel"] = "# loglevel of all bots"
self.setdefault('loglevel', "warn")
self.comments["loadlist"] = "# loadlist .. not used yet."
self.setdefault('loadlist', [])
self.comments["quitmsg"] = "# message to send on quit"
self.setdefault('quitmsg', "http://jsonbot.googlecode.com")
self.comments["dotchars"] = "# characters to used as seperator"
self.setdefault('dotchars', ", ")
self.comments["floodallow"] = "# whether the bot is allowed to flood."
self.setdefault('floodallow', 0)
self.comments["auto_register"] = "# enable automatic registration of new users"
self.setdefault('auto_register', 0)
self.comments["guestasuser"] = "# enable this to give new users the USER permission besides GUEST"
self.setdefault('guestasuser', 0)
self.comments["app_id"] = "# application id used by appengine"
self.setdefault('app_id', "jsonbot")
self.comments["appname"] = "# application name as used by the bot"
self.setdefault('appname', "JSONBOT")
self.comments["domain"] = "# domain .. used for WAVE"
self.setdefault('domain', "")
#self.cfile = self.dir + os.sep + self.filename
self['createdfrom'] = whichmodule()
self.comments['datadir'] = "# directory to store bot data in."
self.comments["owner"] = "# owner of the bot."
self.comments["uuid"] = "# bot generated uuid for this config file."
self.comments["user"] = "# user used to login on xmpp networks."
self.comments["host"] = "# host part of the user, derived from user var."
self.comments["server"] = "# server to connect to (on jabber only when different that host."
self.comments["password"] = "# password to use in authing the bot."
self.comments["port"] = "# port to connect to (IRC)."
self.comments["ssl"] = "# whether to enable ssl (set to 1 to enable)."
self.comments["ipv6"] = "# whether to enable ssl (set to 1 to enable)."
self.comments["name"] = "# the name of the bot."
self.comments["disable"] = "# set this to 0 to enable the bot."
self.comments["followlist"] = "# who to follow on the bot .. bot maintains this list."
self.comments["networkname"] = "# networkname .. not used right now."
self.comments["type"] = "# the bot's type."
self.comments["nick"] = "# the bot's nick."
def reload(self):
""" reload the config file. """
self.load()
def ownercheck(userhost):
""" check whether userhost is a owner. """
if not userhost: return False
if userhost in cfg['owner']: return True
return False
mainconfig = None
def getmainconfig():
global mainconfig
if not mainconfig: mainconfig = Config()
return mainconfig
irctemplate = """# welcome to JSONBOT .. this file can be written to by the bot
# the name of the bot
name = "default-irc"
# channels to join .. not implemented yet .. use /msg bot !join #channel
channels = []
# disable .. set this to 0 to enable the bot
disable = 1
# domain .. not used yet
domain = ""
# who to follow on the bot .. bot maintains this list
followlist = []
# owner of the bot .. is list of userhosts
owner = ["~dev@127.0.0.1"]
# port to connect to
port = 6667
# networkname .. not used right now
networkname = null
# nick .. bot's nick
nick = "jsb"
# whether this is a ipv6 bot
ipv6 = null
# server to connect to
server = ""
# whether this is a ssl bot
ssl = null
# bot type
type = "irc"
"""
xmpptemplate = """# welcome to JSONBOT .. this file can be written to by the bot
# name of the bot
name = "default-sxmpp"
# channels to join .. not implemented yet .. use /msg bot !join <conference>
channels = []
# disable .. set this to 0 to enable the bot
disable = 1
# domain .. not used yet
domain = ""
# who to follow on the bot .. bot maintains this list
followlist = []
# this is the host part of the user variable .. is generated by the bot
host = "localhost"
# owner of the bot .. list of JIDS
owner = ["dunk@localhost"]
# networkname .. not used right now
networkname = null
# nick .. bot's nick
nick = "jsb"
# password used
password = "passje"
# server part of the user variable .. can be set to connect to different server then host
server = ""
# type of bot .. sxmpp stands for socket xmpp to differentiate from GAE xmpp
type = "sxmpp"
# the user as which the bot should connect to the server
user = "dev@localhost"
"""
def makedefaultconfig(type, ddir=None):
filename = 'config'
datadir = ddir or getdatadir()
dir = datadir + os.sep + 'config'
ttype = "default-%s" % type
cfile = dir + os.sep + "fleet" + os.sep + ttype + os.sep + filename
splitted = cfile.split(os.sep)
mdir = ""
for i in splitted[:-1]:
mdir += "%s%s" % (i, os.sep)
if not os.path.isdir(mdir): os.mkdir(mdir)
logging.debug("config - filename is %s" % cfile)
f = open(cfile, "w")
if type == "irc": f.write(irctemplate) ; f.close()
elif type == "sxmpp": f.write(xmpptemplate) ; f.close()
else: raise Exception("no such bot type: %s" % type)
| Python |
# gozerbot/persistconfig.py
#
#
""" plugin related config file with commands added to the bot to config a plugin.
usage:
!plug-cfg -> shows list of all config
!plug-cfg key value -> sets value to key
!plug-cfg key -> shows list of key
!plug-cfg key add value -> adds value to list
!plug-cfg key remove value -> removes value from list
!plug-cfg key clear -> clears entire list
!plug-cfgsave -> force save configuration to disk
"""
__copyright__ = 'this file is in the public domain'
__author__ = 'Bas van Oostveen'
## jsb imports
from jsb.utils.trace import calledfrom, whichplugin
from jsb.lib.examples import examples
from jsb.lib.persist import Persist
from jsb.lib.config import Config
from jsb.imports import getjson
## basic imports
import sys
import os
import types
import time
import logging
## PersistConfigError exception
class PersistConfigError(Exception): pass
## PersistConfig class
class PersistConfig(Config):
""" persist plugin configuration and create default handlers. """
def __init__(self):
self.hide = []
modname = whichplugin()
logging.debug("persistconfig - module name is %s" % modname)
self.plugname = modname.split('.')[-1]
Config.__init__(self, 'plugs' + os.sep + modname, "config")
self.modname = modname
cmndname = "%s-cfg" % self.plugname
logging.debug('persistconfig - added command %s (%s)' % (cmndname, self.plugname))
from jsb.lib.commands import cmnds, Command
cmnds[cmndname] = Command(self.modname, cmndname, self.cmnd_cfg, ['OPER', ])
examples.add(cmndname, "%s configuration" % self.plugname, cmndname)
cmndnamesave = cmndname + "save"
cmnds[cmndnamesave] = Command(self.modname, cmndname, self.cmnd_cfgsave, ['OPER',])
examples.add(cmndnamesave, "save %s configuration" % self.plugname, cmndnamesave)
## cmnds
def show_cfg(self, bot, ievent):
""" show config options. """
s = []
dumpstr = self.tojson()
logging.warn(dumpstr)
for key, optionvalue in sorted(getjson().loads(dumpstr).iteritems()):
if key in self.hide: continue
v = optionvalue
if type(v) in [str, unicode]: v = '"'+v+'"'
v = str(v)
s.append("%s=%s" % (key, v))
ievent.reply("options: " + ' .. '.join(s))
def cmnd_cfgsave(self, bot, ievent):
""" save config. """
self.save()
ievent.reply("config saved")
def cmnd_cfg_edit(self, bot, ievent, args, key, optionvalue):
""" edit config values. """
if not self.has_key(key):
ievent.reply('option %s is not defined' % key)
return
if key in self.hide: return
if type(optionvalue) == types.ListType:
if args[0].startswith("[") and args[-1].endswith("]"):
values = []
for v in ' '.join(args)[1:-1].replace(", ", ",").split(","):
if v[0]=='"' and v[-1]=='"': v = v.replace('"', '')
elif v[0]=="'" and v[-1]=="'": v = v.replace("'", "")
elif '.' in v:
try: v = float(v)
except ValueError:
ievent.reply("invalid long literal: %s" % v)
return
else:
try: v = int(v)
except ValueError:
ievent.reply("invalid int literal: %s" % v)
return
values.append(v)
self.set(key, values)
self.save()
ievent.reply("%s set %s" % (key, values))
return
command = args[0]
value = ' '.join(args[1:])
if command == "clear":
self.clear(key)
self.save()
ievent.reply("list empty")
elif command == "add":
self.append(key, value)
self.save()
ievent.reply("%s added %s" % (key, value))
elif command == "remove" or command == "del":
try:
self.remove(key, value)
self.save()
ievent.reply("%s removed" % str(value))
except ValueError: ievent.reply("%s is not in list" % str(value))
else: ievent.reply("invalid command")
return
else:
value = ' '.join(args)
try: value = type(optionvalue)(value)
except: pass
if type(value) == type(optionvalue):
self.set(key, value)
self.save()
ievent.reply("%s set" % key)
elif type(value) == types.LongType and type(option.value) == types.IntType:
self.set(key, value)
self.save()
ievent.reply("%s set" % key)
else:
ievent.reply("value %s (%s) is not of the same type as %s (%s)" % (value, type(value), optionvalue, type(optionvalue)))
def cmnd_cfg(self, bot, ievent):
""" the config (cfg) command. """
if not ievent.args:
self.show_cfg(bot, ievent)
return
argc = len(ievent.args)
key = ievent.args[0]
try: optionvalue = self[key]
except KeyError:
ievent.reply("%s option %s not found" % (self.plugname, key))
return
if key in self.hide: return
if argc == 1:
ievent.reply(str(optionvalue))
return
self.cmnd_cfg_edit(bot, ievent, ievent.args[1:], key, optionvalue)
def generic_cmnd(self, key):
""" command for editing config values. """
def func(bot, ievent):
try: optionvalue = self[key]
except KeyError:
ievent.reply("%s not found" % key)
return
if not isinstance(option, Option):
logging.warn('persistconfig - option %s is not a valid option' % key)
return
if ievent.args:
value = ' '.join(ievent.args)
try: value = type(optionvalue)(value)
except: pass
self.cmnd_cfg_edit(bot, ievent, ievent.args, key, optionvalue)
else: ievent.reply(str(optionvalue))
return func
### plugin api
def define(self, key, value=None, desc="plugin option", perm='OPER', example="", name=None, exposed=True):
""" define initial value. """
if name: name = name.lower()
if not exposed and not key in self.hide: self.hide.append(key)
if not self.has_key(key):
if name == None: name = "%s-cfg-%s" % (self.plugname, str(key))
self[key] = value
def undefine(self, key, throw=False):
""" remove a key. """
try:
del self[key]
return True
except KeyError, e:
if throw: raise
self.save()
return False
def set(self, key, value, throw=False):
""" set a key's value. """
self[key] = value
def append(self, key, value):
""" append a value. """
self[key].append(value)
def remove(self, key, value):
""" remove a value. """
self[key].remove(value)
def clear(self, key):
""" clear a value. """
self[key] = []
def get(self, key, default=None):
""" get value of key. """
try: return self[key]
except KeyError: return default
| Python |
# jsb/lib/fleet.py
#
#
""" fleet is a list of bots. """
## jsb imports
from jsb.utils.exception import handle_exception
from jsb.utils.generic import waitforqueue
from config import Config
from users import users
from plugins import plugs
from persist import Persist
from errors import NoSuchBotType, BotNotEnabled
from threads import start_new_thread
from eventhandler import mainhandler
from jsb.utils.name import stripname
from jsb.lib.factory import BotFactory
from jsb.utils.lazydict import LazyDict
## simplejson imports
from jsb.imports import getjson
json = getjson()
## basic imports
import Queue
import os
import types
import time
import glob
import logging
import threading
import thread
## classes
class FleetBotAlreadyExists(Exception):
pass
## locks
from jsb.utils.locking import lockdec
lock = thread.allocate_lock()
locked = lockdec(lock)
## Fleet class
class Fleet(Persist):
"""
a fleet contains multiple bots (list of bots).
"""
def __init__(self, datadir):
Persist.__init__(self, datadir + os.sep + 'fleet' + os.sep + 'fleet.main')
if not self.data.has_key('names'): self.data['names'] = []
if not self.data.has_key('types'): self.data['types'] = {}
self.startok = threading.Event()
self.bots = []
def addnametype(self, name, type):
if name not in self.data['names']:
self.data['names'].append(name)
self.data['types'][name] = type
self.save()
return True
def loadall(self, names=[]):
""" load all bots. """
target = names or self.data.names
if not target: logging.error("fleet - no bots in fleet") ; return
else: logging.warning("fleet - loading %s" % ", ".join(target))
threads = []
bots = []
for name in target:
if not name: logging.debug("fleet - name is not set") ; continue
try:
if self.data.types[name] == "console": logging.warn("fleet- skipping console bot %s" % name) ; continue
bot = self.makebot(self.data.types[name], name)
if bot: bots.append(bot) ; self.addbot(bot)
except KeyError: continue
except BotNotEnabled: pass
except KeyError: logging.error("no type know for %s bot" % name)
except Exception, ex: handle_exception()
return bots
def avail(self):
""" return available bots. """
return self.data['names']
def getfirstbot(self, type="irc"):
""" return the first bot in the fleet. """
for bot in self.bots:
if type in bot.type: return bot
def getfirstjabber(self, isgae=False):
""" return the first jabber bot of the fleet. """
return self.getfirstbot("xmpp")
def size(self):
""" return number of bots in fleet. """
return len(self.bots)
def settype(self, name, type):
""" set the type of a bot. """
cfg = Config('fleet' + os.sep + stripname(name) + os.sep + 'config')
cfg['name'] = name
logging.debug("fleet - %s - setting type to %s" % (self.cfile, type))
cfg.type = type
cfg.save()
def makebot(self, type, name, domain="", config={}, showerror=False):
""" create a bot .. use configuration if provided. """
assert type
assert name
if not name: logging.warn("fleet - name is not correct: %s" % name) ; return
if config: logging.warn('fleet - making %s (%s) bot - %s' % (type, name, config.dump()))
bot = None
cfg = Config('fleet' + os.sep + stripname(name) + os.sep + 'config')
if config: cfg.update(config)
if not cfg.name: cfg['name'] = name
cfg['botname'] = cfg['name']
if cfg.disable:
logging.warn("fleet - %s bot is disabled. see %s" % (name, cfg.cfile))
if showerror: raise BotNotEnabled(name)
return
if not cfg.type and type:
logging.debug("fleet - %s - setting type to %s" % (cfg.cfile, type))
cfg.type = type
if not cfg['type']:
try:
self.data['names'].remove(name)
self.save()
except ValueError: pass
raise Exception("no bot type specified")
if not cfg.owner:
logging.error("%s - owner not set .. using global config." % cfg.name)
cfg.owner = Config().owner
if not cfg.domain and domain: cfg.domain = domain
if not cfg: raise Exception("can't make config for %s" % name)
cfg.save()
bot = BotFactory().create(type, cfg)
if bot: self.addbot(bot)
return bot
def save(self):
""" save fleet data and call save on all the bots. """
Persist.save(self)
for i in self.bots:
try: i.save()
except Exception, ex: handle_exception()
def list(self):
""" return list of bot names. """
result = []
for i in self.bots: result.append(i.cfg.name)
return result
def stopall(self):
""" call stop() on all fleet bots. """
for i in self.bots:
try: i.stop()
except: handle_exception()
def byname(self, name):
""" return bot by name. """
for i in self.bots:
if name == i.cfg.name: return i
def replace(self, name, bot):
""" replace bot with a new bot. """
for i in range(len(self.bots)):
if name == self.bots[i].cfg.name:
self.bots[i] = bot
return True
def enable(self, cfg):
""" enable a bot baed of provided config. """
if cfg.botname and cfg.botname not in self.data['names']:
self.data['names'].append(cfg.botname)
self.data['types'][cfg.botname] = cfg.type
self.save()
return True
def addbot(self, bot):
"""
add a bot to the fleet .. remove all existing bots with the
same name.
"""
assert bot
for i in range(len(self.bots)-1, -1, -1):
if self.bots[i].cfg.name == bot.cfg.name:
logging.debug('fleet - removing %s from fleet' % bot.botname)
del self.bots[i]
logging.info('fleet - adding %s' % bot.cfg.name)
self.bots.append(bot)
if bot.cfg.name not in self.data['names']:
self.data['names'].append(bot.cfg.name)
self.data['types'][bot.cfg.name] = bot.type
self.save()
return True
def delete(self, name):
""" delete bot with name from fleet. """
for bot in self.bots:
if bot.cfg.name == name:
bot.exit()
self.remove(i)
bot.cfg['disable'] = 1
bot.cfg.save()
logging.debug('fleet - %s disabled' % bot.cfg.name)
return True
return False
def remove(self, bot):
""" delete bot by object. """
try:
self.bots.remove(bot)
return True
except ValueError:
return False
def exit(self, name=None, jabber=False):
""" call exit on all bots. """
if not name:
threads = []
for bot in self.bots:
if jabber and bot.type != 'sxmpp' and bot.type != 'jabber': continue
threads.append(start_new_thread(bot.exit, ()))
for thread in threads: thread.join()
return
for bot in self.bots:
if bot.cfg.name == name:
if jabber and bot.type != 'sxmpp' and bot.type != 'jabber': continue
try: bot.exit()
except: handle_exception()
self.remove(bot)
return True
return False
def cmnd(self, event, name, cmnd):
""" do command on a bot. """
bot = self.byname(name)
if not bot: return 0
from jsb.lib.eventbase import EventBase
j = plugs.clonedevent(bot, event)
j.onlyqueues = True
j.txt = cmnd
q = Queue.Queue()
j.queues = [q]
j.speed = 3
plugs.trydispatch(bot, j)
result = waitforqueue(q, 3000)
if not result: return
res = ["[%s]" % bot.cfg.name, ]
res += result
event.reply(res)
return res
def cmndall(self, event, cmnd):
""" do a command on all bots. """
for bot in self.bots: self.cmnd(event, bot.cfg.name, cmnd)
def broadcast(self, txt):
""" broadcast txt to all bots. """
for bot in self.bots: bot.broadcast(txt)
def startall(self, bots=None):
target = bots or self.bots
for bot in target: start_new_thread(bot.start, ())
def resume(self, sessionfile):
""" resume bot from session file. """
session = json.load(open(sessionfile))
for name in session['bots'].keys():
cfg = LazyDict(session['bots'][name])
try:
if not cfg.disable:
logging.warn("fleet - resuming %s" % cfg)
start_new_thread(self.resumebot, (cfg,))
except: handle_exception() ; return
time.sleep(10)
self.startok.set()
def resumebot(self, botcfg):
""" resume single bot. """
botname = botcfg.name
logging.warn("fleet - resuming %s bot" % botname)
if botcfg['type'] == "console": logging.warn("not resuming console bot %s" % botname) ; return
oldbot = self.byname(botname)
if oldbot and botcfg['type'] in ["sxmpp", "convore"]: oldbot.exit()
cfg = Config('fleet' + os.sep + stripname(botname) + os.sep + 'config')
if cfg.disable: logging.warn("%s - bot is disabled .. not resuming it" % botname) ; return
bot = self.makebot(botcfg.type, botname)
if oldbot: self.replace(oldbot, bot)
bot._resume(botcfg, botname)
bot.start(False)
## global fleet object
fleet = None
def getfleet(datadir=None, new=False):
if not datadir:
from jsb.lib.datadir import getdatadir
datadir = getdatadir()
global fleet
if not fleet or new: fleet = Fleet(datadir)
return fleet
| Python |
# jsb/commands.py
#
#
"""
the commands module provides the infrastructure to dispatch commands.
commands are the first word of a line.
"""
## jsb imports
from threads import start_new_thread, start_bot_command
from jsb.utils.xmpp import stripped
from jsb.utils.trace import calledfrom, whichmodule
from jsb.utils.exception import handle_exception
from jsb.utils.lazydict import LazyDict
from errors import NoSuchCommand, NoSuchUser
from persiststate import UserState
from runner import cmndrunner
from boot import getcmndperms
## basic imports
import logging
import sys
import types
import os
import copy
## defines
cpy = copy.deepcopy
## Command class
class Command(LazyDict):
""" a command object. """
def __init__(self, modname, cmnd, func, perms=[], threaded=False, wait=False, orig=None, how="message"):
LazyDict.__init__(self)
if not modname: raise Exception("modname is not set - %s" % cmnd)
self.modname = cpy(modname)
self.plugname = self.modname.split('.')[-1]
self.cmnd = cpy(cmnd)
self.orig = cpy(orig)
self.func = func
if type(perms) == types.StringType: perms = [perms, ]
self.perms = cpy(perms)
self.plugin = self.plugname
self.threaded = cpy(threaded)
self.wait = cpy(wait)
self.enable = True
self.how = cpy(how)
class Commands(LazyDict):
"""
the commands object holds all commands of the bot.
"""
def add(self, cmnd, func, perms, threaded=False, wait=False, orig=None, how=None, *args, **kwargs):
""" add a command. """
modname = calledfrom(sys._getframe())
target = Command(modname, cmnd, func, perms, threaded, wait, orig, how)
self[cmnd] = target
try:
c = cmnd.split('-')[1]
if not self.subs: self.subs = LazyDict()
if self.subs.has_key(c):
if not self.subs[c]: self.subs[c] = []
if target not in self.subs[c]: self.subs[c].append(target)
else: self.subs[c] = [target, ]
except IndexError: pass
try:
p = cmnd.split('-')[0]
if not self.pre: self.pre = LazyDict()
if self.pre.has_key(p):
if not self.pre[p]: self.pre[p] = []
if target not in self.pre[p]: self.pre[p].append(target)
else: self.pre[p] = [target, ]
except IndexError: pass
return self
def woulddispatch(self, bot, event, cmnd=""):
"""
dispatch an event if cmnd exists and user is allowed to exec this
command.
"""
cmnd = cmnd or event.usercmnd.lower()
if not cmnd: return
try:
cmnd = event.chan.data.aliases[cmnd]
except (KeyError, TypeError):
try: cmnd = bot.aliases.data[cmnd]
except (KeyError, TypeError): pass
try:
if cmnd:
event.txt = cmnd + ' ' + ' '.join(event.txt.split()[1:])
event.usercmnd = cmnd.split()[0]
event.prepare()
except (TypeError, KeyError, AttributeError): pass
logging.debug("%s" % cmnd)
bot.plugs.reloadcheck(bot, event)
result = None
cmnd = event.usercmnd
try:
result = self[cmnd]
except KeyError:
if self.subs and self.subs.has_key(cmnd):
cmndlist = self.subs[cmnd]
if len(cmndlist) == 1: result = cmndlist[0]
else: event.reply("try one of: %s" % ", ".join([x.cmnd for x in cmndlist])) ; return
else:
if self.pre and self.pre.has_key(cmnd):
cmndlist = self.pre[cmnd]
if len(cmndlist) == 1: result = cmndlist[0]
else: event.reply("try one of: %s" % ", ".join([x.cmnd for x in cmndlist])) ; return
logging.debug(" woulddispatch result: %s" % result)
return result
def dispatch(self, bot, event, wait=0):
"""
dispatch an event if cmnd exists and user is allowed to exec this
command.
"""
if event.groupchat: id = event.auth = event.userhost
else: id = event.auth
if not event.user: raise NoSuchUser(event.auth)
c = self.woulddispatch(bot, event)
if not c: raise NoSuchCommand()
if bot.cmndperms and bot.cmndperms[c.cmnd]: perms = bot.cmndperms[c.cmnd]
else: perms = c.perms
if bot.allowall: return self.doit(bot, event, c, wait=wait)
elif event.chan and event.chan.data.allowcommands and event.usercmnd in event.chan.data.allowcommands:
if not 'OPER' in perms: return self.doit(bot, event, c, wait=wait)
else: logging.warn("%s is not in allowlist" % c)
elif not bot.users or bot.users.allowed(id, perms, bot=bot): return self.doit(bot, event, c, wait=wait)
elif bot.users.allowed(id, perms, bot=bot): return self.doit(bot, event, c, wait=wait)
return event
def doit(self, bot, event, target, wait=0):
""" do the dispatching. """
if not target.enable: return
if target.modname in event.chan.data.denyplug:
logging.warn("%s is denied in channel %s - %s" % (target.plugname, event.channel, event.userhost))
return
id = event.auth or event.userhost
event.iscommand = True
event.how = target.how
event.thecommand = target
logging.warning('dispatching %s for %s' % (event.usercmnd, id))
try:
if bot.isgae:
if not event.notask and (target.threaded or event.threaded) and not event.nothreads:
logging.warn("LAUNCHING AS TASK")
from jsb.drivers.gae.tasks import start_botevent
event.txt = event.origtxt
start_botevent(bot, event, event.speed)
event.reply("task started for %s" % event.auth)
else: target.func(bot, event) ; event.ready() ; return event
else:
if target.threaded and not event.nothreads:
logging.warning("launching thread for %s" % event.usercmnd)
t = start_bot_command(target.func, (bot, event))
event.threads.append(t)
else: event.dontclose = False; cmndrunner.put(target.modname, target.func, bot, event)
except Exception, ex:
logging.error('%s - error executing %s' % (whichmodule(), str(target.func)))
raise
return event
def unload(self, modname):
""" remove modname registered commands from store. """
delete = []
for name, cmnd in self.iteritems():
if not cmnd: continue
if cmnd.modname == modname: delete.append(cmnd)
for cmnd in delete: cmnd.enable = False
return self
def apropos(self, search):
""" search existing commands for search term. """
result = []
for name, cmnd in self.iteritems():
if search in name: result.append(name)
return result
def perms(self, cmnd):
""" show what permissions are needed to execute cmnd. """
try: return self[cmnd].perms
except KeyError: return []
def whereis(self, cmnd):
""" return plugin name in which command is implemented. """
try: return self[cmnd].plugname
except KeyError: return ""
def gethelp(self, cmnd):
""" get the docstring of a command. used for help. """
try: return self[cmnd].func.__doc__
except KeyError: pass
## global commands
cmnds = Commands()
| Python |
# jsb/container.py
#
#
""" container for bot to bot communication. """
__version__ = "1"
## jsb imports
from jsb.lib.gozerevent import GozerEvent
## xmpp import
from jsb.contrib.xmlstream import NodeBuilder, XMLescape, XMLunescape
## basic imports
import hmac
import uuid
import time
import hashlib
## defines
idattributes = ['createtime', 'origin', 'type', 'idtime', 'payload']
## functions
def getid(container):
name = ""
for attr in idattributes:
try: name += str(container[attr])
except KeyError: pass
return uuid.uuid3(uuid.NAMESPACE_URL, name).hex
## classes
class Container(GozerEvent):
""" Container for bot to bot communication. Provides a hmac id that can be checked. """
def __init__(self, origin=None, payload=None, type="event", key=None):
GozerEvent.__init__(self)
self.createtime = time.time()
self.origin = origin
self.type = str(type)
self.payload = payload
self.makeid()
if key: self.makehmac(key)
else: self.makehmac(self.id)
def makeid(self):
self.idtime = time.time()
self.id = getid(self)
def makehmac(self, key):
self.hash = "sha512"
self.hashkey = key
self.digest = hmac.new(key, self.payload, hashlib.sha512).hexdigest()
| Python |
# jsb/lib/aliases.py
#
#
""" global aliases. """
## jsb imports
from jsb.lib.datadir import getdatadir
## basic imports
import os
## getaliases function
def getaliases():
from jsb.lib.persist import Persist
p = Persist(getdatadir() + os.sep + "aliases")
if not p.data: p.data = {}
return p | Python |
# jsb/rest/client.py
#
#
""" Rest Client class """
## jsb imports
from jsb.utils.url import geturl4, posturl, deleteurl, useragent
from jsb.utils.generic import toenc
from jsb.utils.exception import handle_exception, exceptionmsg
from jsb.utils.locking import lockdec
from jsb.utils.lazydict import LazyDict
from jsb.imports import getjson
json = getjson()
## basic imports
from urllib2 import HTTPError, URLError
from httplib import InvalidURL
from urlparse import urlparse
import socket
import asynchat
import urllib
import sys
import thread
import re
import asyncore
import time
import logging
## defines
restlock = thread.allocate_lock()
locked = lockdec(restlock)
## RestResult class
class RestResult(LazyDict):
def __init__(self, url="", name=""):
LazyDict.__init__(self)
self.url = url
self.name = name
self.data = None
self.error = None
self.status = None
self.reason = ""
## RestClient class
class RestClient(object):
""" Provide a REST client that works in sync mode. """
def __init__(self, url, keyfile=None, certfile=None, port=None):
if not url.endswith('/'): url += '/'
try:
u = urlparse(url)
splitted = u[1].split(':')
if len(splitted) == 2: host, port = splitted
else:
host = splitted[0]
port = port or 9999
path = u[2]
except Exception, ex: raise
self.host = host
try: self.ip = socket.gethostbyname(self.host)
except Exception, ex: handle_exception()
self.path = path
self.port = port
self.url = url
self.keyfile = keyfile
self.certfile = certfile
self.callbacks = []
def addcb(self, callback):
""" add a callback. """
if not callback: return
self.callbacks.append(callback)
logging.debug('rest.client - added callback %s' % str(callback))
return self
def delcb(self, callback):
""" delete callback. """
try:
del self.callbacks[callback]
logging.debug('rest.client - deleted callback %s' % str(callback))
except ValueError: pass
def do(self, func, url, *args, **kwargs):
""" perform a rest request. """
result = RestResult(url)
try:
logging.info("rest.client - %s - calling %s" % (url, str(func)))
res = func(url, {}, kwargs, self.keyfile, self.certfile, self.port)
result.status = res.status
result.reason = res.reason
if result.status >= 400: result.error = result.status
else: result.error = None
if result.status == 200:
r = res.read()
result.data = json.loads(r)
else: result.data = None
logging.info("rest.client - %s - result: %s" % (url, str(result)))
except Exception, ex:
result.error = str(ex)
result.data = None
for cb in self.callbacks:
try:
cb(self, result)
logging.info('rest.client - %s - called callback %s' % (url, str(cb)))
except Exception, ex:
handle_exception()
return result
def post(self, *args, **kwargs):
""" do a POST request. """
return self.do(posturl, self.url, *args, **kwargs)
def add(self, *args, **kwargs):
""" add an REST item. """
return self.do(posturl, self.url, *args, **kwargs)
def delete(self, nr=None):
""" delete a REST item. """
if nr: return self.do(deleteurl, self.url + '/' + str(nr))
else: return self.do(deleteurl, self.url)
def get(self, nr=None):
""" get a REST item. """
if not nr: return self.do(geturl4, self.url)
else: return self.do(geturl4, self.url + '/' + str(nr))
## RestClientAsync class
class RestClientAsync(RestClient, asynchat.async_chat):
""" Async REST client. """
def __init__(self, url, name=""):
RestClient.__init__(self, url)
asynchat.async_chat.__init__(self)
self.set_terminator("\r\n\r\n")
self.reading_headers = True
self.error = None
self.buffer = ''
self.name = name or self.url
self.headers = {}
self.status = None
def handle_error(self):
""" take care of errors. """
exctype, excvalue, tb = sys.exc_info()
if exctype == socket.error:
try:
errno, errtxt = excvalue
if errno in [11, 35, 9]:
logging.error("res.client - %s - %s %s" % (self.url, errno, errtxt))
return
except ValueError: pass
self.error = str(excvalue)
else:
logging.error("%s - %s" % (self.name, exceptionmsg()))
self.error = exceptionmsg()
self.buffer = ''
result = RestResult(self.url, self.name)
result.error = self.error
result.data = None
for cb in self.callbacks:
try:
cb(self, result)
logging.info('rest.client - %s - called callback %s' % (url, str(cb)))
except Exception, ex: handle_exception()
self.close()
def handle_expt(self):
""" handle an exception. """
handle_exception()
def handle_connect(self):
""" called after succesfull connect. """
logging.info('rest.client - %s - connected %s' % (self.url, str(self)))
def start(self):
""" start the client loop. """
assert(self.host)
assert(int(self.port))
try:
logging.info('rest.client - %s - starting client' % self.url)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.connect((self.ip, int(self.port)))
except socket.error, ex:
self.error = str(ex)
try:
self.connect((self.ip, int(self.port)))
except socket.error, ex: self.error = str(ex)
except Exception, ex: self.error = str(ex)
if self.error: self.warn("rest.client - %s - can't start %s" % (self.url, self.error))
else: return True
@locked
def found_terminator(self):
""" called when terminator is found. """
logging.info('rest.client - %s - found terminator' % self.url)
if self.reading_headers:
self.reading_headers = False
try:
self.headers = self.buffer.split('\r\n')
self.status = int(self.headers[0].split()[1])
except (ValueError, IndexError):
logging.warn("rest.client - %s - can't parse headers %s" % (self.url, self.headers))
return
self.set_terminator(None)
self.buffer = ''
logging.info('rest.client - %s - headers: %s' % (self.url, self.headers))
def collect_incoming_data(self, data):
""" aggregate seperate data chunks. """
self.buffer = self.buffer + data
def handle_close(self):
""" called on connection close. """
self.reading_headers = False
self.handle_incoming()
logging.info('rest.client - %s - closed' % self.url)
self.close()
def handle_incoming(self):
""" handle incoming data. """
logging.info("rest.client - %s - incoming: %s" % (self.url, self.buffer))
if not self.reading_headers:
result = RestResult(self.url, self.name)
if self.status >= 400:
logging.warn('rest.client - %s - error status: %s' % (self.url, self.status))
result.error = self.status
result.data = None
elif self.error:
result.error = self.error
result.data = None
elif self.buffer == "":
result.data = ""
result.error = None
else:
try:
res = json.loads(self.buffer)
if not res:
self.buffer = ''
return
result.data = res
result.error = None
except ValueError, ex:
logging.info("rest.client - %s - can't decode %s" % (self.url, self.buffer))
result.error = str(ex)
except Exception, ex:
logging.error("rest.client - %s - %s" % (self.url, exceptionmsg()))
result.error = exceptionmsg()
result.data = None
for cb in self.callbacks:
try:
cb(self, result)
logging.info('rest.client - %s - called callback %s' % (self.url, str(cb)))
except Exception, ex: handle_exception()
self.buffer = ''
@locked
def dorequest(self, method, path, postdata={}, headers={}):
if postdata: postdata = urllib.urlencode(postdata)
if headers:
if not headers.has_key('Content-Length'): headers['Content-Length'] = len(postdata)
headerstxt = ""
for i,j in headers.iteritems(): headerstxt += "%s: %s\r\n" % (i.lower(), j)
else: headerstxt = ""
if method == 'POST': s = toenc("%s %s HTTP/1.0\r\n%s\r\n%s\r\n\r\n" % (method, path, headerstxt, postdata), 'ascii')
else: s = toenc("%s %s HTTP/1.0\r\n\r\n" % (method, path), 'ascii')
if self.start():
logging.info('rest.client - %s - sending %s' % (self.url, s))
self.push(s)
def sendpost(self, postdata):
headers = {'Content-Type': 'application/x-www-form-urlencoded', \
'Accept': 'text/plain; text/html', 'User-Agent': useragent()}
self.dorequest('POST', self.path, postdata, headers)
def sendget(self):
""" send a GET request. """
self.dorequest('GET', self.path)
def post(self, *args, **kwargs):
""" do a POST request. """
self.sendpost(kwargs)
def get(self):
""" call GET request. """
self.sendget()
| Python |
# jsb/socklib/rest/server.py
#
#
## jsb imports
from jsb.utils.exception import handle_exception, exceptionmsg
from jsb.utils.trace import calledfrom
from jsb.lib.persiststate import ObjectState
from jsb.lib.threads import start_new_thread
from jsb.version import version
## basic imports
from SocketServer import BaseServer, ThreadingMixIn
from BaseHTTPServer import HTTPServer, BaseHTTPRequestHandler
from urllib import unquote_plus
from asyncore import dispatcher
from cgi import escape
import time
import sys
import select
import types
import socket
import logging
## RestServerBase class
class RestServerBase(HTTPServer):
""" REST web server """
allow_reuse_address = True
daemon_thread = True
def start(self):
""" start the REST server. """
self.name = calledfrom(sys._getframe(0))
self.stop = False
self.running = False
self.handlers = {}
self.webmods = {}
self.state = ObjectState()
self.state.define('whitelistenable', 0)
self.state.define('whitelist', [])
self.state.define('blacklist', [])
self.state.define('disable', [])
self.poll = select.poll()
self.poll.register(self)
start_new_thread(self.serve, ())
def shutdown(self):
""" shutdown the REST server. """
try:
self.stop = True
time.sleep(0.2)
self.server_close()
except Exception, ex: handle_exception()
def serve(self):
""" serving loop. """
logging.warn('rest.server - starting')
time.sleep(1)
while not self.stop:
self.running = True
try: got = self.poll.poll(100)
except Exception, ex: handle_exception()
if got and not self.stop:
try: self.handle_request()
except Exception, ex: handle_exception()
time.sleep(0.01)
self.running = False
logging.warn('rest.server - stopping')
def entrypoint(self, request):
""" check lists whether request should be allowed. """
ip = request.ip
if not self.whitelistenable() and ip in self.blacklist():
logging.warn('rest.server - denied %s' % ip)
request.send_error(401)
return False
if self.whitelistenable() and ip not in self.whitelist():
logging.warn('rest.server - denied %s' % ip)
request.send_error(401)
return False
return True
def whitelistenable(self):
""" enable whitelist? """
return self.state['whitelistenable']
def whitelist(self):
""" return the whitelist. """
return self.state['whitelist']
def blacklist(self):
""" return the black list. """
return self.state['blacklist']
def addhandler(self, path, type, handler):
""" add a web handler """
path = unquote_plus(path)
splitted = []
for i in path.split('/'):
if i: splitted.append(i)
else: splitted.append("/")
splitted = tuple(splitted)
if not self.handlers.has_key(splitted): self.handlers[splitted] = {}
self.handlers[splitted][type] = handler
logging.info('rest.server - %s %s handler added' % (splitted[0], type))
def enable(self, what):
""" enable an path. """
try:
self.state['disable'].remove(what)
logging.info('rest.server - enabled %s' % str(what))
except ValueError: pass
def disable(self, what):
""" disable an path. """
self.state['disable'].append(what)
logging.info('rest.server - disabled %s' % str(what))
def do(self, request):
""" do a request """
path = unquote_plus(request.path.strip())
path = path.split('?')[0]
#if path.endswith('/'): path = path[:-1]
splitted = []
for i in path.split('/'):
if i: splitted.append(i)
else: splitted.append("/")
splitted = tuple(splitted)
logging.warn("rest.server - incoming - %s" % str(splitted))
for i in self.state['disable']:
if i in splitted:
logging.warn('rest.server - %s - denied disabled %s' % (request.ip, i))
request.send_error(404)
return
request.splitted = splitted
request.value = None
type = request.command
try: func = self.handlers[splitted][type]
except (KeyError, ValueError):
try:
func = self.handlers[splitted][type]
request.value = splitted[-1]
except (KeyError, ValueError):
logging.error("rest.server - no handler found for %s" % str(splitted))
request.send_error(404)
return
result = func(self, request)
logging.info('rest.server - %s - result: %s' % (request.ip, str(result)))
return result
def handle_error(self, request, addr):
""" log the error """
ip = request.ip
exctype, excvalue, tb = sys.exc_info()
if exctype == socket.timeout:
logging.warn('rest.server - %s - socket timeout' % (ip, ))
return
if exctype == socket.error:
logging.warn('rest.server - %s - socket error: %s' % (ip, excvalue))
return
exceptstr = exceptionmsg()
logging.warn('rest.server - %s - error %s %s => %s' % (ip, exctype, excvalue, exceptstr))
## Mixin classes
class RestServer(ThreadingMixIn, RestServerBase):
pass
class RestServerAsync(RestServerBase, dispatcher):
pass
## RestReqeustHandler class
class RestRequestHandler(BaseHTTPRequestHandler):
""" timeserver request handler class """
def setup(self):
""" called on each incoming request. """
BaseHTTPRequestHandler.setup(self)
self.ip = self.client_address[0]
self.name = self.ip
self.size = 0
def writeheader(self, type='text/plain'):
""" write headers to the client. """
self.send_response(200)
self.send_header('Content-type', '%s; charset=%s ' % (type,sys.getdefaultencoding()))
self.send_header('Server', version)
self.end_headers()
def sendresult(self):
""" dispatch a call. """
try:
result = self.server.do(self)
if not result: return
self.size = len(result)
except Exception, ex:
handle_exception()
self.send_error(501)
return
self.writeheader()
self.wfile.write(result)
self.wfile.close()
def handle_request(self):
""" handle a REST request. """
if not self.server.entrypoint(self): return
self.sendresult()
do_DELETE = do_PUT = do_GET = do_POST = handle_request
def log_request(self, code):
""" log the request """
try: ua = self.headers['user-agent']
except: ua = "-"
try: rf = self.headers['referer']
except: rf = "-"
if hasattr(self, 'path'):
logging.debug('rest.server - %s "%s %s %s" %s %s "%s" "%s"' % (self.address_string(), self.command, self.path, self.request_version, code, self.size, rf, ua))
else:
logging.debug('rest.server - %s "%s %s %s" %s %s "%s" "%s"' % (self.address_string(), self.command, "none", self.request_version, code, self.size, rf, ua))
## secure classes .. not working yet
class SecureRestServer(RestServer):
def __init__(self, server_address, HandlerClass, keyfile, certfile):
from OpenSSL import SSL
BaseServer.__init__(self, server_address, HandlerClass)
ctx = SSL.Context(SSL.SSLv23_METHOD)
ctx.set_options(SSL.OP_NO_SSLv2)
logging.warn("rest.server - loading private key from %s" % keyfile)
ctx.use_privatekey_file (keyfile)
logging.warn('rest.server - loading certificate from %s' % certfile)
ctx.use_certificate_file(certfile)
logging.info('rest.server - creating SSL socket on %s' % str(server_address))
self.socket = SSL.Connection(ctx, socket.socket(self.address_family,
self.socket_type))
self.server_bind()
self.server_activate()
class SecureAuthRestServer(SecureRestServer):
def __init__(self, server_address, HandlerClass, chain, serverkey, servercert):
from OpenSSL import SSL
BaseServer.__init__(self, server_address, HandlerClass)
ctx = SSL.Context(SSL.SSLv23_METHOD)
logging.warn("rest.server - loading private key from %s" % serverkey)
ctx.use_privatekey_file (serverkey)
logging.warn('rest.server - loading certificate from %s' % servercert)
ctx.use_certificate_file(servercert)
logging.warn('rest.server - loading chain of certifications from %s' % chain)
ctx.set_verify_depth(2)
ctx.load_client_ca(chain)
#ctx.load_verify_locations(chain)
logging.info('rest.server - creating SSL socket on %s' % str(server_address))
callback = lambda conn,cert,errno,depth,retcode: retcode
ctx.set_verify(SSL.VERIFY_FAIL_IF_NO_PEER_CERT | SSL.VERIFY_PEER, callback)
ctx.set_session_id('jsb')
self.socket = SSL.Connection(ctx, socket.socket(self.address_family,
self.socket_type))
self.server_bind()
self.server_activate()
class SecureRequestHandler(RestRequestHandler):
def setup(self):
self.connection = self.request._sock
self.request._sock.setblocking(1)
self.rfile = socket._fileobject(self.request, "rb", self.rbufsize)
self.wfile = socket._fileobject(self.request, "wb", self.rbufsize)
| Python |
# jsb/errors.py
#
#
""" jsb exceptions. """
## jsb imports
from jsb.utils.trace import calledfrom
## basic imports
import sys
## exceptions
class JsonBotError(Exception):
pass
class NotConnected(JsonBotError):
pass
class FeedAlreadyExists(JsonBotError):
pass
class NoSuchFile(JsonBotError):
pass
class BotNotEnabled(JsonBotError):
pass
class NoProperDigest(JsonBotError):
pass
class NoChannelProvided(JsonBotError):
pass
class NoInput(JsonBotError):
pass
class PropertyIgnored(JsonBotError):
pass
class BotNotSetInEvent(JsonBotError):
pass
class FeedProviderError(JsonBotError):
pass
class CantSaveConfig(JsonBotError):
pass
class NoOwnerSet(JsonBotError):
pass
class NameNotSet(JsonBotError):
pass
class NoSuchUser(JsonBotError):
pass
class NoSuchBotType(JsonBotError):
pass
class NoChannelSet(JsonBotError):
pass
class NoSuchWave(JsonBotError):
pass
class NoSuchCommand(JsonBotError):
pass
class NoSuchPlugin(JsonBotError):
pass
class NoOwnerSet(JsonBotError):
pass
class PlugsNotConnected(JsonBotError):
pass
class NoEventProvided(JsonBotError):
pass
| Python |
# jsb/tasks.py
#
#
## jsb imports
from jsb.utils.trace import calledfrom
from jsb.lib.plugins import plugs
## basic imports
import logging
import sys
## TaskManager class
class TaskManager(object):
def __init__(self):
self.handlers = {}
self.plugins = {}
def add(self, taskname, func):
""" add a task. """
logging.debug("tasks - added task %s - %s" % (taskname, func))
self.handlers[taskname] = func
self.plugins[taskname] = calledfrom(sys._getframe())
return True
def unload(self, taskname):
""" unload a task. """
logging.debug("tasks - unloading task %s" % taskname)
try:
del self.handlers[taskname]
del self.plugins[taskname]
return True
except KeyError: return False
def dispatch(self, taskname, *args, **kwargs):
""" dispatch a task. """
try: plugin = self.plugins[taskname]
except KeyError:
logging.debug('tasks - no plugin for %s found' % taskname)
return
logging.debug('loading %s for taskmanager' % plugin)
plugs.load(plugin)
try: handler = self.handlers[taskname]
except KeyError:
logging.debug('tasks - no handler for %s found' % taskname)
return
logging.warn("dispatching task %s - %s" % (taskname, str(handler)))
return handler(*args, **kwargs)
## global task manager
taskmanager = TaskManager()
| Python |
# jsb/threadloop.py
#
#
""" class to implement start/stoppable threads. """
## lib imports
from jsb.utils.exception import handle_exception
from threads import start_new_thread, getname
## basic imports
import Queue
import time
import logging
## ThreadLoop class
class ThreadLoop(object):
""" implement startable/stoppable threads. """
def __init__(self, name="", queue=None):
self.name = name or 'idle'
self.stopped = False
self.running = False
self.outs = []
self.queue = queue or Queue.Queue()
self.nowrunning = "none"
def _loop(self):
""" the threadloops loop. """
logging.debug('%s - starting threadloop' % self.name)
self.running = True
nrempty = 0
while not self.stopped:
try: data = self.queue.get()
except Queue.Empty:
time.sleep(0.1)
continue
if self.stopped: break
if not data: break
self.handle(*data)
self.running = False
logging.debug('%s - stopping threadloop' % self.name)
def put(self, *data):
""" put data on task queue. """
self.queue.put_nowait(data)
def start(self):
""" start the thread. """
if not self.running and not self.stopped: start_new_thread(self._loop, ())
def stop(self):
""" stop the thread. """
self.stopped = True
self.running = False
self.queue.put_nowait(None)
def handle(self, *args, **kwargs):
""" overload this. """
pass
## RunnerLoop class
class RunnerLoop(ThreadLoop):
""" dedicated threadloop for bot commands/callbacks. """
def put(self, *data):
""" put data on task queue. """
self.queue.put_nowait(data)
def _loop(self):
""" runner loop. """
logging.debug('%s - starting threadloop' % self.name)
self.running = True
while not self.stopped:
try: data = self.queue.get()
except Queue.Empty:
time.sleep(0.1)
continue
if self.stopped: break
if not data: break
self.nowrunning = getname(data[1])
try: self.handle(*data)
except Exception, ex: handle_exception()
self.running = False
logging.debug('%s - stopping threadloop' % self.name)
class TimedLoop(ThreadLoop):
""" threadloop that sleeps x seconds before executing. """
def __init__(self, name, sleepsec=300, *args, **kwargs):
ThreadLoop.__init__(self, name, *args, **kwargs)
self.sleepsec = sleepsec
def _loop(self):
""" timed loop. sleep a while. """
logging.debug('%s - starting timedloop (%s seconds)' % (self.name, self.sleepsec))
self.stopped = False
self.running = True
while not self.stopped:
time.sleep(self.sleepsec)
if self.stopped:
logging.debug("%s - loop is stopped" % self.name)
break
try: self.handle()
except Exception, ex: handle_exception()
self.running = False
logging.debug('%s - stopping timedloop' % self.name)
| Python |
# jsb/examples.py
#
#
""" examples is a dict of example objects. """
## basic imports
import re
## Example class
class Example(object):
""" an example. """
def __init__(self, descr, ex):
self.descr = descr
self.example = ex
## Collection of exanples
class Examples(dict):
""" examples holds all the examples. """
def add(self, name, descr, ex):
""" add description and example. """
self[name.lower()] = Example(descr, ex)
def size(self):
""" return size of examples dict. """
return len(self.keys())
def getexamples(self):
""" get all examples in list. """
result = []
for i in self.values():
ex = i.example.lower()
exampleslist = re.split('\d\)', ex)
for example in exampleslist:
if example: result.append(example.strip())
return result
## global examples object
examples = Examples()
| Python |
# jsb/lib/waiter.py
#
#
""" wait for events. """
## jsb imports
from jsb.lib.runner import waitrunner
from jsb.utils.trace import whichmodule
from jsb.utils.exception import handle_exception
## basic imports
import logging
import copy
import types
import time
import uuid
## defines
cpy = copy.deepcopy
## Wait class
class Wait(object):
"""
wait object contains a list of types to match and a list of callbacks to
call, optional list of userhosts to match the event with can be given.
"""
def __init__(self, cbtypes, cbs=None, userhosts=None, modname=None, event=None, queue=None):
self.created = time.time()
if type(cbtypes) != types.ListType: cbtypes = [cbtypes, ]
self.cbtypes = cbtypes
self.userhosts = userhosts
if cbs and type(cbs) != types.ListType: cbs = [cbs, ]
self.cbs = cbs
self.modname = modname
self.origevent = event
self.queue = queue
def check(self, bot, event):
""" check whether event matches this wait object. if so call callbacks. """
target = event.cmnd or event.cbtype
logging.debug("waiter - checking for %s - %s" % (target, self.cbtypes))
if target not in self.cbtypes: return
if event.channel and self.origevent and not event.channel == self.origevent.channel:
logging.warn("waiter - %s and %s dont match" % (event.channel, self.origevent.channel))
return
if self.userhosts and event.userhost and event.userhost not in self.userhosts:
logging.warn("waiter - no userhost matched")
return
if self.queue: self.queue.put_nowait(event)
self.docbs(bot, event)
return event
def docbs(self, bot, event):
""" do the actual callback .. put callback on the waitrunner for execution. """
if not self.cbs: return
logging.warn("%s - found wait match: %s" % (bot.cfg.name, event.dump()))
for cb in self.cbs:
try: waitrunner.put(self.modname, cb, bot, event)
except Exception, ex: handle_exception()
## Waiter class
class Waiter(object):
""" list of wait object to match. """
def __init__(self):
self.waiters = {}
def register(self, cbtypes, cbs=None, userhosts=None, event=None, queue=None):
""" add a wait object to the waiters dict. """
logging.warn("waiter - registering wait object: %s - %s" % (str(cbtypes), str(userhosts)))
key = str(uuid.uuid4())
self.waiters[key] = Wait(cbtypes, cbs, userhosts, modname=whichmodule(), event=event, queue=queue)
return key
def ready(self, key):
try: del self.waiters[key]
except KeyError: logging.warn("wait - %s key is not in waiters" % key)
def check(self, bot, event):
""" scan waiters for possible wait object that match. """
matches = []
for wait in self.waiters.values():
result = wait.check(bot, event)
if not wait.cbtypes: matches.append(wait)
if matches: self.delete(matches)
return matches
def delete(self, removed):
""" delete a list of wait items from the waiters dict. """
logging.debug("waiter - removing from waiters: %s" % str(removed))
for w in removed:
try: del self.waiters[w]
except KeyError: pass
def remove(self, modname):
""" remove all waiter registered by modname. """
removed = []
for wait in self.waiters.values():
if wait.modname == modname: removed.append(wait)
if removed: self.delete(removed)
## the global waiter object
waiter = Waiter()
| Python |
# jsb/eventhandler.py
#
#
""" event handler. use to dispatch function in main loop. """
## jsb imports
from jsb.utils.exception import handle_exception
from jsb.utils.locking import lockdec
from threads import start_new_thread
## basic imports
import Queue
import thread
import logging
import time
## locks
handlerlock = thread.allocate_lock()
locked = lockdec(handlerlock)
## classes
class EventHandler(object):
"""
events are handled in 11 queues with different priorities:
queue0 is tried first queue10 last.
"""
def __init__(self):
self.sortedlist = []
self.queues = {}
for i in range(11):
self.queues[i] = Queue.Queue()
self.sortedlist.append(i)
self.sortedlist.sort()
self.go = Queue.Queue()
self.stopped = False
self.running = False
self.nooutput = False
def start(self):
""" start the eventhandler thread. """
self.stopped = False
if not self.running:
start_new_thread(self.handleloop, ())
self.running = True
def stop(self):
""" stop the eventhandler thread. """
self.running = False
self.stopped = True
self.go.put('Yihaaa')
def put(self, speed, func, *args, **kwargs):
""" put item on the queue. """
self.queues[10-speed].put_nowait((func, args, kwargs))
self.go.put('go')
def getready(self):
""" check queues from available functions to execute. """
ready = []
for i in self.sortedlist:
if self.queues[i].qsize():
ready.append(i)
break
return ready
def handle_one(self):
""" do 1 loop over ready queues. """
ready = self.getready()
for i in ready: self.dispatch(self.queues[i])
def handleloop(self):
""" thread that polls the queues for items to dispatch. """
logging.debug('eventhandler - starting handle thread')
while not self.stopped:
time.sleep(0.01)
try:
res = self.go.get_nowait()
if res: self.handle_one()
except Queue.Empty: pass
logging.debug('eventhandler - stopping %s' % str(self))
runforever = handleloop
def dispatch(self, queue):
""" dispatch functions from provided queue. """
try: todo = queue.get_nowait()
except Queue.Empty: return
try:
(func, args, kwargs) = todo
func(*args, **kwargs)
except ValueError:
try:
(func, args) = todo
func(*args)
except ValueError:
(func, ) = todo
func()
except: handle_exception()
## handler to use in main prog
mainhandler = EventHandler()
| Python |
# jsb package
#
#
""" jsb core package. """
__version__ = "0.8"
import warnings
warnings.simplefilter('ignore')
| Python |
# jsb/jsbimport.py
#
#
""" use the imp module to import modules. """
## basic imports
import time
import sys
import imp
import os
import thread
import logging
## _import function
def _import(name):
""" do a import (full). """
mods = []
mm = ""
for m in name.split('.'):
mm += m
mods.append(mm)
mm += "."
for mod in mods: imp = __import__(mod)
logging.debug("jsbimport - got module %s" % sys.modules[name])
return sys.modules[name]
## force_import function
def force_import(name):
""" force import of module <name> by replacing it in sys.modules. """
try: del sys.modules[name]
except KeyError: pass
plug = _import(name)
return plug
def _import_byfile(modname, filename):
try: return imp.load_source(modname, filename)
except NotImplementedError: return _import(filename[:-3].replace(os.sep, "."))
| Python |
# jsb/callbacks.py
#
#
"""
bot callbacks .. callbacks take place on registered events. a precondition
function can optionaly be provided to see if the callback should fire.
"""
## jsb imports
from threads import getname, start_new_thread
from jsb.utils.locking import lockdec
from jsb.utils.exception import handle_exception
from jsb.utils.trace import calledfrom, whichplugin, callstack
from jsb.utils.dol import Dol
## basic imports
import sys
import copy
import thread
import logging
## locks
lock = thread.allocate_lock()
locked = lockdec(lock)
## Callback class
class Callback(object):
""" class representing a callback. """
def __init__(self, modname, func, prereq, kwargs, threaded=False, speed=5):
self.modname = modname
self.plugname = self.modname.split('.')[-1]
self.func = func # the callback function
self.prereq = prereq # pre condition function
self.kwargs = kwargs # kwargs to pass on to function
self.threaded = copy.deepcopy(threaded) # run callback in thread
self.speed = copy.deepcopy(speed) # speed to execute callback with
self.activate = False
self.enable = True
## Callbacks class (holds multiple callbacks)
class Callbacks(object):
"""
dict of lists containing callbacks. Callbacks object take care of
dispatching the callbacks based on incoming events. see Callbacks.check()
"""
def __init__(self):
self.cbs = Dol()
def size(self):
""" return number of callbacks. """
return len(self.cbs)
def add(self, what, func, prereq=None, kwargs=None, threaded=False, nr=False, speed=5):
""" add a callback. """
what = what.upper()
modname = calledfrom(sys._getframe())
if not kwargs: kwargs = {}
if nr != False: self.cbs.insert(nr, what, Callback(modname, func, prereq, kwargs, threaded, speed))
else: self.cbs.add(what, Callback(modname, func, prereq, kwargs, threaded, speed))
logging.debug('added %s (%s)' % (what, modname))
return self
def unload(self, modname):
""" unload all callbacks registered in a plugin. """
unload = []
for name, cblist in self.cbs.iteritems():
index = 0
for item in cblist:
if item.modname == modname: unload.append((name, index))
index += 1
for callback in unload[::-1]:
self.cbs.delete(callback[0], callback[1])
logging.debug(' unloaded %s (%s)' % (callback[0], modname))
def disable(self, plugname):
""" disable all callbacks registered in a plugin. """
unload = []
for name, cblist in self.cbs.iteritems():
index = 0
for item in cblist:
if item.plugname == plugname: item.activate = False
def activate(self, plugname):
""" activate all callbacks registered in a plugin. """
unload = []
for name, cblist in self.cbs.iteritems():
index = 0
for item in cblist:
if item.plugname == plugname: item.activate = True
def whereis(self, cmnd):
""" show where ircevent.CMND callbacks are registered """
result = []
cmnd = cmnd.upper()
for c, callback in self.cbs.iteritems():
if c == cmnd:
for item in callback:
if not item.plugname in result: result.append(item.plugname)
return result
def list(self):
""" show all callbacks. """
result = []
for cmnd, callbacks in self.cbs.iteritems():
for cb in callbacks:
result.append(getname(cb.func))
return result
def check(self, bot, event):
""" check for callbacks to be fired. """
type = event.cbtype or event.cmnd
if self.cbs.has_key('ALL'):
for cb in self.cbs['ALL']: self.callback(cb, bot, event)
if self.cbs.has_key(type):
target = self.cbs[type]
for cb in target: self.callback(cb, bot, event)
def callback(self, cb, bot, event):
""" do the actual callback with provided bot and event as arguments. """
#if event.stop: logging.info("callbacks - event is stopped.") ; return
event.calledfrom = cb.modname
if not event.bonded: event.bind(bot)
try:
if event.status == "done":
logging.debug("callback - event is done .. ignoring")
return
if event.chan and cb.plugname in event.chan.data.denyplug:
logging.debug("%s denied in %s - %s" % (cb.modname, event.channel, event.auth))
return
if cb.prereq:
logging.debug(' executing in loop %s' % str(cb.prereq))
if not cb.prereq(bot, event): return
if not cb.func: return
if event.isremote(): logging.info('%s - executing REMOTE %s - %s' % (bot.cfg.name, getname(cb.func), event.cbtype))
elif event.cbtype == "TICK": logging.debug('%s - executing %s - %s' % (bot.cfg.name, getname(cb.func), event.cbtype))
else: logging.info('%s - executing %s - %s' % (bot.cfg.name, getname(cb.func), event.cbtype))
event.iscallback = True
logging.debug("%s - %s - trail - %s" % (bot.cfg.name, getname(cb.func), callstack(sys._getframe())[::-1]))
#if not event.direct and cb.threaded and not bot.isgae: start_new_thread(cb.func, (bot, event))
if cb.threaded and not bot.isgae: start_new_thread(cb.func, (bot, event))
else:
if bot.isgae or event.direct: cb.func(bot, event)
else:
from runner import callbackrunner
callbackrunner.put(cb.modname, cb.func, bot, event)
return True
except Exception, ex:
handle_exception()
## global callbacks
first_callbacks = Callbacks()
callbacks = Callbacks()
last_callbacks = Callbacks()
remote_callbacks = Callbacks()
| Python |
# jsb/socklib/partyline.py
#
#
""" provide partyline functionality .. manage dcc sockets. """
__copyright__ = 'this file is in the public domain'
__author__ = 'Aim'
## jsb imports
from jsb.lib.fleet import getfleet
from jsb.utils.exception import handle_exception
from jsb.lib.threads import start_new_thread
from jsb.imports import getjson
json = getjson()
## basic imports
import thread
import pickle
import socket
import logging
## classes
class PartyLine(object):
""" partyline can be used to talk through dcc chat connections. """
def __init__(self):
self.socks = [] # partyline sockets list
self.jids = []
self.lock = thread.allocate_lock()
def resume(self, sessionfile):
""" resume bot from session file. """
try:
session = json.load(open(sessionfile, 'r'))
self._resume(session)
except: handle_exception()
def _resume(self, data, reto=None):
""" resume a party line connection after reboot. """
fleet = getfleet()
for i in data['partyline']:
logging.warn("partyline - resuming %s" % i)
bot = fleet.byname(i['botname'])
if not bot: logging.error("partyline - can't find bot") ; continue
sock = socket.fromfd(i['fileno'], socket.AF_INET, socket.SOCK_STREAM)
sock.setblocking(1)
nick = i['nick']
userhost = i['userhost']
channel = i['channel']
if not bot:
logging.error("partyline - can't find %s bot in fleet" % i['botname'])
continue
self.socks.append({'bot': bot, 'sock': sock, 'nick': nick, 'userhost': userhost, 'channel': channel, 'silent': i['silent']})
bot._dccresume(sock, nick, userhost, channel)
if reto: self.say_nick(nick, 'rebooting done')
def _resumedata(self):
""" return data used for resume. """
result = []
for i in self.socks: result.append({'botname': i['bot'].cfg.name, 'fileno': i['sock'].fileno(), 'nick': i['nick'], 'userhost': i['userhost'], 'channel': i['channel'], 'silent': i['silent']})
return result
def stop(self, bot):
""" stop all users on bot. """
for i in self.socks:
if i['bot'] == bot:
try:
i['sock'].shutdown(2)
i['sock'].close()
except: pass
def stop_all(self):
""" stop every user on partyline. """
for i in self.socks:
try:
i['sock'].shutdown(2)
i['sock'].close()
except:
pass
def loud(self, nick):
""" enable broadcasting of txt for nick. """
for i in self.socks:
if i['nick'] == nick: i['silent'] = False
def silent(self, nick):
""" disable broadcasting txt from/to nick. """
for i in self.socks:
if i['nick'] == nick: i['silent'] = True
def add_party(self, bot, sock, nick, userhost, channel):
''' add a socket with nick to the list. '''
for i in self.socks:
if i['sock'] == sock: return
self.socks.append({'bot': bot, 'sock': sock, 'nick': nick, 'userhost': userhost, 'channel': channel, 'silent': False})
logging.debug("partyline - added user %s" % nick)
def del_party(self, nick):
''' remove a socket with nick from the list. '''
nick = nick.lower()
self.lock.acquire()
try:
for socknr in range(len(self.socks)-1, -1, -1):
if self.socks[socknr]['nick'].lower() == nick: del self.socks[socknr]
logging.debug('partyline - removed user %s' % nick)
finally: self.lock.release()
def list_nicks(self):
''' list all connected nicks. '''
result = []
for item in self.socks: result.append(item['nick'])
return result
def say_broadcast(self, txt):
''' broadcast a message to all ppl on partyline. '''
for item in self.socks:
if not item['silent']: item['sock'].send("%s\n" % txt)
def say_broadcast_notself(self, nick, txt):
''' broadcast a message to all ppl on partyline, except the sender. '''
nick = nick.lower()
for item in self.socks:
if item['nick'] == nick: continue
if not item['silent']: item['sock'].send("%s\n" % txt)
def say_nick(self, nickto, msg):
''' say a message on the partyline to an user. '''
nickto = nickto.lower()
for item in self.socks:
if item['nick'].lower() == nickto:
if not '\n' in msg: msg += "\n"
item['sock'].send("%s" % msg)
return
def is_on(self, nick):
''' checks if user an is on the partyline. '''
nick = nick.lower()
for item in self.socks:
if item['nick'].lower() == nick: return True
return False
## global partyline object
partyline = PartyLine()
| Python |
# jsb/lib/factory.py
#
#
""" Factory to produce instances of classes. """
## jsb imports
from jsb.utils.exception import handle_exception
from jsb.lib.errors import NoSuchBotType
## basic imports
import logging
## Factory base class
class Factory(object):
pass
## BotFactory class
class BotFactory(Factory):
def create(self, type, cfg):
try:
if 'xmpp' in type:
try:
import waveapi
from jsb.drivers.gae.xmpp.bot import XMPPBot
bot = XMPPBot(cfg)
except ImportError:
from jsb.drivers.xmpp.bot import SXMPPBot
bot = SXMPPBot(cfg)
elif type == 'web':
from jsb.drivers.gae.web.bot import WebBot
bot = WebBot(cfg)
elif type == 'wave':
from jsb.drivers.gae.wave.bot import WaveBot
bot = WaveBot(cfg, domain=cfg.domain)
elif type == 'irc':
from jsb.drivers.irc.bot import IRCBot
bot = IRCBot(cfg)
elif type == 'console':
from jsb.drivers.console.bot import ConsoleBot
bot = ConsoleBot(cfg)
elif type == 'base':
from jsb.lib.botbase import BotBase
bot = BotBase(cfg)
elif type == 'convore':
from jsb.drivers.convore.bot import ConvoreBot
bot = ConvoreBot(cfg)
else: raise NoSuchBotType('%s bot .. unproper type %s' % (type, cfg.dump()))
return bot
except AssertionError, ex: logging.error("%s - assertion error: %s" % (cfg.name, str(ex)))
except Exception, ex: handle_exception()
bot_factory = BotFactory()
| Python |
# jsb/runner.py
#
#
""" threads management to run jobs. """
## jsb imports
from jsb.lib.threads import getname, start_new_thread, start_bot_command
from jsb.utils.exception import handle_exception
from jsb.utils.locking import locked, lockdec
from jsb.utils.lockmanager import rlockmanager, lockmanager
from jsb.utils.generic import waitevents
from jsb.utils.trace import callstack
from jsb.lib.threadloop import RunnerLoop
from jsb.lib.callbacks import callbacks
## basic imports
import Queue
import time
import thread
import random
import logging
import sys
## Runner class
class Runner(RunnerLoop):
"""
a runner is a thread with a queue on which jobs can be pushed.
jobs scheduled should not take too long since only one job can
be executed in a Runner at the same time.
"""
def __init__(self, name="runner", doready=True):
RunnerLoop.__init__(self, name)
self.working = False
self.starttime = time.time()
self.elapsed = self.starttime
self.finished = time.time()
self.doready = doready
def handle(self, descr, func, *args, **kwargs):
""" schedule a job. """
self.working = True
name = getname(str(func))
try:
rlockmanager.acquire(getname(str(func)))
name = getname(str(func))
self.name = name
logging.debug('running %s: %s' % (descr, name))
self.starttime = time.time()
func(*args, **kwargs)
self.finished = time.time()
self.elapsed = self.finished - self.starttime
if self.elapsed > 3:
logging.debug('ALERT %s %s job taking too long: %s seconds' % (descr, str(func), self.elapsed))
except Exception, ex: handle_exception()
finally: rlockmanager.release()
self.working = False
## BotEventRunner class
class BotEventRunner(Runner):
def handle(self, descr, func, bot, ievent, *args, **kwargs):
""" schedule a bot command. """
try:
self.starttime = time.time()
#lockmanager.acquire(getname(str(func)))
name = getname(str(func))
self.name = name
self.working = True
logging.debug("now running %s" % name)
func(bot, ievent, *args, **kwargs)
self.finished = time.time()
self.elapsed = self.finished - self.starttime
if self.elapsed > 3:
logging.info('ALERT %s %s job taking too long: %s seconds' % (descr, str(func), self.elapsed))
#if ievent.iscommand: ievent.ready()
if not ievent.type == "OUTPUT": ievent.ready()
except Exception, ex:
handle_exception(ievent)
#finally: lockmanager.release(getname(str(func)))
self.working = False
self.name = "finished"
## Runners class
class Runners(object):
""" runners is a collection of runner objects. """
def __init__(self, max=100, runnertype=Runner, doready=True):
self.max = max
self.runners = []
self.runnertype = runnertype
self.doready = doready
def runnersizes(self):
""" return sizes of runner objects. """
result = []
for runner in self.runners: result.append("%s - %s" % (runner.queue.qsize(), runner.name))
return result
def stop(self):
""" stop runners. """
for runner in self.runners: runner.stop()
def start(self):
""" overload this if needed. """
pass
def put(self, *data):
""" put a job on a free runner. """
logging.debug("size is %s" % len(self.runners))
for runner in self.runners:
if not runner.queue.qsize():
runner.put(*data)
return
runner = self.makenew()
runner.put(*data)
def running(self):
""" return list of running jobs. """
result = []
for runner in self.runners:
if runner.queue.qsize(): result.append(runner.nowrunning)
return result
def makenew(self):
""" create a new runner. """
runner = None
for i in self.runners:
if not i.queue.qsize(): return i
if len(self.runners) < self.max:
runner = self.runnertype(self.doready)
runner.start()
self.runners.append(runner)
else: runner = random.choice(self.runners)
return runner
def cleanup(self):
""" clean up idle runners. """
if not len(self.runners): logging.debug("nothing to clean")
for index in range(len(self.runners)-1, -1, -1):
runner = self.runners[index]
logging.debug("cleanup %s" % runner.name)
if not runner.queue.qsize(): runner.stop() ; del self.runners[index]
else: logging.info("now running: %s" % runner.nowrunning)
## show runner status
def runner_status():
print cmndrunner.runnersizes()
print callbackrunner.runnersizes()
## global runners
cmndrunner = defaultrunner = longrunner = Runners(10, BotEventRunner)
callbackrunner = Runners(10, BotEventRunner, doready=False)
waitrunner = Runners(10, BotEventRunner, doready=False)
## cleanup
def runnercleanup(bot, event):
cmndrunner.cleanup()
logging.debug("cmndrunner sizes: %s" % str(cmndrunner.runnersizes()))
callbackrunner.cleanup()
logging.debug("callbackrunner sizes: %s" % str(cmndrunner.runnersizes()))
waitrunner.cleanup()
logging.debug("waitrunner sizes: %s" % str(cmndrunner.runnersizes()))
callbacks.add("TICK", runnercleanup)
| Python |
# jsb/persiststate.py
#
#
""" persistent state classes. """
## jsb imports
from jsb.utils.name import stripname
from jsb.utils.trace import calledfrom
from persist import Persist
from jsb.lib.datadir import getdatadir
## basic imports
import types
import os
import sys
import logging
## PersistState classes
class PersistState(Persist):
""" base persitent state class. """
def __init__(self, filename):
Persist.__init__(self, filename)
self.types = dict((i, type(j)) for i, j in self.data.iteritems())
def __getitem__(self, key):
""" get state item. """
return self.data[key]
def __setitem__(self, key, value):
""" set state item. """
self.data[key] = value
def define(self, key, value):
""" define a state item. """
if not self.data.has_key(key) or type(value) != self.types[key]:
if type(value) == types.StringType: value = unicode(value)
if type(value) == types.IntType: value = long(value)
self.data[key] = value
class PlugState(PersistState):
""" state for plugins. """
def __init__(self, *args, **kwargs):
self.plugname = calledfrom(sys._getframe())
logging.debug('persiststate - initialising %s' % self.plugname)
PersistState.__init__(self, getdatadir() + os.sep + 'state' + os.sep + 'plugs' + os.sep + self.plugname + os.sep + 'state')
class ObjectState(PersistState):
""" state for usage in constructors. """
def __init__(self, *args, **kwargs):
PersistState.__init__(self, getdatadir() + os.sep + 'state' + os.sep + calledfrom(sys._getframe(1))+'.state')
class UserState(PersistState):
""" state for users. """
def __init__(self, username, filename="state", *args, **kwargs):
assert username
username = stripname(username)
ddir = getdatadir() + os.sep + 'state' + os.sep + 'users' + os.sep + username
PersistState.__init__(self, ddir + os.sep + filename)
| Python |
# jsb/cache.py
#
#
""" jsb cache provding get, set and delete functions. """
## basic imports
import logging
## defines
cache = {}
## functions
def get(name, namespace=""):
""" get data from the cache. """
global cache
try:
data = cache[name]
if data: logging.debug("cache - returning %s" % name) ; return data
except KeyError: pass
def set(name, item, timeout=0, namespace=""):
""" set data in the cache. """
logging.debug("cache - setting %s (%s)" % (name, len(item)))
global cache
cache[name] = item
def delete(name, namespace=""):
""" delete data from the cache. """
try:
global cache
del cache[name]
logging.warn("cache - deleted %s" % name)
return True
except KeyError: return False
| Python |
'''
Created on 21-03-2011
@author: maciek
'''
def formatString(format, **kwargs):
'''
'''
if not format: return ''
for arg in kwargs.keys():
format = format.replace("{" + arg + "}", "##" + arg + "##")
format = format.replace ("{", "{{")
format = format.replace("}", "}}")
for arg in kwargs.keys():
format = format.replace("##" + arg + "##", "{" + arg + "}")
res = format.format(**kwargs)
res = res.replace("{{", "{")
res = res.replace("}}", "}")
return res | Python |
'''
Created on 21-03-2011
@author: maciek
'''
from IndexGenerator import IndexGenerator
from optparse import OptionParser
import os
import tempfile
import shutil
import logging
logging.basicConfig(level = logging.DEBUG)
parser = OptionParser()
parser.add_option('-n', '--app-name', action='store', dest='appName', help='aplication name')
parser.add_option('-u', '--release-urls', action='store', dest='releaseUrls', help='URLs of download files - as coma separated list of entrires')
parser.add_option('-d', '--destination-directory', action='store', dest='otaAppDir', help='Directory where OTA files are created')
parser.add_option('-v', '--version', action='store', dest='version', help='Version of the application')
parser.add_option('-r', '--releases', action='store', dest='releases', help='Release names of the application')
parser.add_option('-R', '--release-notes', action='store', dest='releaseNotes', help='Release notes of the application (in txt2tags format)')
parser.add_option('-D', '--description', action='store', dest='description', help='Description of the application (in txt2tags format)')
(options, args) = parser.parse_args()
if options.appName == None:
parser.error("Please specify the appName.")
elif options.releaseUrls == None:
parser.error("Please specify releaseUrls")
elif options.otaAppDir == None:
parser.error("Please specify destination directory")
elif options.version == None:
parser.error("Please specify version")
elif options.releases == None:
parser.error("Please specify releases")
elif options.releaseNotes == None:
parser.error("Please specify releaseNotes")
elif options.description == None:
parser.error("Please specify description")
appName = options.appName
releaseUrls = options.releaseUrls
otaAppDir = options.otaAppDir
version = options.version
releases = options.releases
releaseNotes = options.releaseNotes
description = options.description
def findIconFilename():
iconPath = "res/drawable-hdpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable-mdpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable-ldpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable/icon.png"
logging.debug("IconPath: "+iconPath)
return iconPath
def createOTApackage():
'''
crates all needed files in tmp dir
'''
releaseNotesContent = open(releaseNotes).read()
descriptionContent = open(description).read()
indexGenerator = IndexGenerator(appName, releaseUrls, releaseNotesContent, descriptionContent, version, releases)
index = indexGenerator.get();
tempIndexFile = tempfile.TemporaryFile()
tempIndexFile.write(index)
tempIndexFile.flush()
tempIndexFile.seek(0)
return tempIndexFile
tempIndexFile = createOTApackage()
if not os.path.isdir(otaAppDir):
logging.debug("creating dir: "+otaAppDir)
os.mkdir(otaAppDir)
else:
logging.warning("dir: "+otaAppDir+" exists")
indexFile = open(os.path.join(otaAppDir,"index.html"),'w')
shutil.copyfileobj(tempIndexFile, indexFile)
srcIconFileName = findIconFilename()
disIconFileName = os.path.join(otaAppDir,"Icon.png")
shutil.copy(srcIconFileName,disIconFileName)
| Python |
'''
Created on 21-03-2011
@author: maciek
'''
from formater import formatString
import os
class IndexGenerator(object):
'''
Generates Index.html for iOS app OTA distribution
'''
basePath = os.path.dirname(__file__)
templateFile = os.path.join(basePath,"templates/index.tmpl")
releaseUrls = ""
appName = ""
changeLog = ""
description = ""
version = ""
release = ""
def __init__(self,appName, releaseUrls, changeLog, description, version, releases):
'''
Constructor
'''
self.appName = appName
self.releaseUrls = releaseUrls
self.changeLog = changeLog
self.description = description
self.version = version
self.releases = releases
def get(self):
'''
returns index.html source code from template file
'''
urlList = self.releaseUrls.split(",")
releaseList = self.releases.split(",")
generatedHtml=""
count=0;
for release in releaseList:
generatedHtml += " <li>\n"
generatedHtml += " <h3><a href=\"javascript:load('" + urlList[count] + "')\">" + release + "</a></h3>\n"
generatedHtml += " </li>\n"
count += 1
template = open(self.templateFile).read()
index = formatString(template, downloads=generatedHtml,
changeLog=self.changeLog,
appName=self.appName,
description=self.description,
version = self.version);
return index | Python |
'''
Created on 21-03-2011
@author: maciek
'''
from formater import formatString
import os
class IndexGenerator(object):
'''
Generates Index.html for iOS app OTA distribution
'''
basePath = os.path.dirname(__file__)
templateFile = os.path.join(basePath,"templates/index.tmpl")
releaseUrls = ""
appName = ""
changeLog = ""
description = ""
version = ""
release = ""
def __init__(self,appName, releaseUrls, changeLog, description, version, releases):
'''
Constructor
'''
self.appName = appName
self.releaseUrls = releaseUrls
self.changeLog = changeLog
self.description = description
self.version = version
self.releases = releases
def get(self):
'''
returns index.html source code from template file
'''
urlList = self.releaseUrls.split(",")
releaseList = self.releases.split(",")
generatedHtml=""
count=0;
for release in releaseList:
generatedHtml += " <li>\n"
generatedHtml += " <h3><a href=\"javascript:load('" + urlList[count] + "')\">" + release + "</a></h3>\n"
generatedHtml += " </li>\n"
count += 1
template = open(self.templateFile).read()
index = formatString(template, downloads=generatedHtml,
changeLog=self.changeLog,
appName=self.appName,
description=self.description,
version = self.version);
return index | Python |
'''
Created on 21-03-2011
@author: maciek
'''
def formatString(format, **kwargs):
'''
'''
if not format: return ''
for arg in kwargs.keys():
format = format.replace("{" + arg + "}", "##" + arg + "##")
format = format.replace ("{", "{{")
format = format.replace("}", "}}")
for arg in kwargs.keys():
format = format.replace("##" + arg + "##", "{" + arg + "}")
res = format.format(**kwargs)
res = res.replace("{{", "{")
res = res.replace("}}", "}")
return res | Python |
'''
Created on 21-03-2011
@author: maciek
'''
from IndexGenerator import IndexGenerator
from optparse import OptionParser
import os
import tempfile
import shutil
import logging
logging.basicConfig(level = logging.DEBUG)
parser = OptionParser()
parser.add_option('-n', '--app-name', action='store', dest='appName', help='aplication name')
parser.add_option('-u', '--release-urls', action='store', dest='releaseUrls', help='URLs of download files - as coma separated list of entrires')
parser.add_option('-d', '--destination-directory', action='store', dest='otaAppDir', help='Directory where OTA files are created')
parser.add_option('-v', '--version', action='store', dest='version', help='Version of the application')
parser.add_option('-r', '--releases', action='store', dest='releases', help='Release names of the application')
parser.add_option('-R', '--release-notes', action='store', dest='releaseNotes', help='Release notes of the application (in txt2tags format)')
parser.add_option('-D', '--description', action='store', dest='description', help='Description of the application (in txt2tags format)')
(options, args) = parser.parse_args()
if options.appName == None:
parser.error("Please specify the appName.")
elif options.releaseUrls == None:
parser.error("Please specify releaseUrls")
elif options.otaAppDir == None:
parser.error("Please specify destination directory")
elif options.version == None:
parser.error("Please specify version")
elif options.releases == None:
parser.error("Please specify releases")
elif options.releaseNotes == None:
parser.error("Please specify releaseNotes")
elif options.description == None:
parser.error("Please specify description")
appName = options.appName
releaseUrls = options.releaseUrls
otaAppDir = options.otaAppDir
version = options.version
releases = options.releases
releaseNotes = options.releaseNotes
description = options.description
def findIconFilename():
iconPath = "res/drawable-hdpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable-mdpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable-ldpi/icon.png"
if not os.path.exists(iconPath):
iconPath = "res/drawable/icon.png"
logging.debug("IconPath: "+iconPath)
return iconPath
def createOTApackage():
'''
crates all needed files in tmp dir
'''
releaseNotesContent = open(releaseNotes).read()
descriptionContent = open(description).read()
indexGenerator = IndexGenerator(appName, releaseUrls, releaseNotesContent, descriptionContent, version, releases)
index = indexGenerator.get();
tempIndexFile = tempfile.TemporaryFile()
tempIndexFile.write(index)
tempIndexFile.flush()
tempIndexFile.seek(0)
return tempIndexFile
tempIndexFile = createOTApackage()
if not os.path.isdir(otaAppDir):
logging.debug("creating dir: "+otaAppDir)
os.mkdir(otaAppDir)
else:
logging.warning("dir: "+otaAppDir+" exists")
indexFile = open(os.path.join(otaAppDir,"index.html"),'w')
shutil.copyfileobj(tempIndexFile, indexFile)
srcIconFileName = findIconFilename()
disIconFileName = os.path.join(otaAppDir,"Icon.png")
shutil.copy(srcIconFileName,disIconFileName)
| Python |
""" YABEE (Yet another Blender's egg-exporter)
for Blender 2.59
rev 11.1
"""
# -------------- Change this to setup parameters -----------------------
#: file name to write
FILE_PATH = './exp_test/test.egg'
#: { 'animation_name' : (start_frame, end_frame, frame_rate) }
ANIMATIONS = {'anim1':(0,10,5),
}
#: 'True' to interprete an image in the uv layer as the texture
EXPORT_UV_IMAGE_AS_TEXTURE = False
#: 'True' to copy texture images together with main.egg
COPY_TEX_FILES = True
#: Path for the copied textures. Relative to the main EGG file dir.
#: For example if main file path is '/home/username/test/test.egg',
#: texture path is './tex', then the actual texture path is
#: '/home/username/test/tex'
TEX_PATH = './tex'
#: 'True' to write an animation data into the separate files
SEPARATE_ANIM_FILE = True
#: 'True' to write only animation data
ANIM_ONLY = False
#: number of sign after point
FLOATING_POINT_ACCURACY = 3
#: Enable tangent space calculation. Tangent space needed for some
# shaders/autoshaders, but increase exporting time
# 'NO', 'INTERNAL', 'PANDA'
# 'INTERNAL' - use internal TBS calculation
# 'PANDA' - use egg-trans to calculate TBS
# 'NO' - do not calc TBS
CALC_TBS = 'PANDA'
#: Type of texture processing. May be 'SIMPLE' or 'BAKE'.
# 'SIMPLE' - export all texture layers as MODULATE.
# Exceptions:
# use map normal == NORMAL
# use map specular == GLOSS
# use map emit == GLOW
# 'BAKE' - bake textures. BAKE_LAYERS setting up what will be baked.
# Also diffuse color of the material would set to (1,1,1) in the
# 'BAKE' mode
TEXTURE_PROCESSOR = 'BAKE'
#TEXTURE_PROCESSOR = 'SIMPLE'
# type: (size, do_bake)
BAKE_LAYERS = {'diffuse':(512, True),
'normal':(512, True),
'gloss': (512, True), # specular
'glow': (512, False) # emission
}
# ----------------------------------------------------------------------
import bpy, os, sys
if __name__ == '__main__':
# Dirty hack. I can't get the script dir through the sys.argv[0] or __file__
try:
for text in bpy.data.texts:
dir = os.path.dirname(text.filepath)
if os.name == 'nt':
dir = os.path.abspath(dir + '\\..')
else:
dir = os.path.abspath(dir + '/..')
if dir not in sys.path:
sys.path.append(os.path.abspath(dir))
except:
print('Error while trying to add a paths in the sys.path')
import io_scene_egg.yabee_libs.egg_writer
#from io_scene_egg.yabee_libs import egg_writer
print('RELOADING MODULES')
import imp
imp.reload(io_scene_egg.yabee_libs.egg_writer)
egg_writer = io_scene_egg.yabee_libs.egg_writer
egg_writer.write_out(FILE_PATH,
ANIMATIONS,
EXPORT_UV_IMAGE_AS_TEXTURE,
SEPARATE_ANIM_FILE,
ANIM_ONLY,
COPY_TEX_FILES,
TEX_PATH,
FLOATING_POINT_ACCURACY,
CALC_TBS,
TEXTURE_PROCESSOR,
BAKE_LAYERS)
| Python |
""" Part of the YABEE
rev 1.1
"""
bl_info = {
"name": "Panda3d EGG format",
"author": "Andrey (Ninth) Arbuzov",
"blender": (2, 6, 0),
"api": 41226,
"location": "File > Import-Export",
"description": ("Export to Panda3D EGG: meshes, uvs, materials, textures, "
"armatures, animation and curves"),
"warning": "May contain bugs. Make backup of your file before use.",
"wiki_url": ("http://www.panda3d.org/forums/viewtopic.php?t=11441"),
"tracker_url": "yabee.googlecode.com",
"category": "Import-Export"}
if "bpy" in locals():
import imp
if 'egg_writer' in locals():
imp.reload(egg_writer)
import bpy
from bpy_extras.io_utils import ExportHelper
from bpy.props import *
# --------------- Properties --------------------
class EGGBakeProperty(bpy.types.PropertyGroup):
''' Texture baker settings '''
res_x = IntProperty(name = "Res. X", default=512)
res_y = IntProperty(name = "Res. Y", default=512)
export = BoolProperty(default = False)
def draw(self, row, name):
row.prop(self, "res_x")
row.prop(self, "res_y")
row.prop(self, "export")
row.label(name)
class EGGAnimationProperty(bpy.types.PropertyGroup):
''' One animation record '''
name = StringProperty(name="Name", default="Unknown")
from_frame = IntProperty(name="From", default=1)
to_frame = IntProperty(name="To", default=2)
fps = IntProperty(name="FPS", default=24)
def __get_idx(self):
return list(bpy.context.scene.yabee_settings.opt_anim_list.anim_collection).index(self)
index = property(__get_idx)
class EGGAnimList(bpy.types.PropertyGroup):
''' Animations list settings '''
active_index = IntProperty()
anim_collection = CollectionProperty(type=EGGAnimationProperty)
def get_anim_dict(self):
d = {}
for anim in self.anim_collection:
d[anim.name] = (anim.from_frame, anim.to_frame, anim.fps)
return d
class YABEEProperty(bpy.types.PropertyGroup):
''' Main YABEE class for store settings '''
opt_tex_proc = EnumProperty(
name="Tex. processing",
description="Export all textures as MODULATE or bake texture layers",
items=(('SIMPLE', "Simple", "Export all texture layers."),
('BAKE', "Bake", "Bake textures.")),
default='SIMPLE',
)
opt_bake_diffuse = PointerProperty(type=EGGBakeProperty)
opt_bake_normal = PointerProperty(type=EGGBakeProperty)
opt_bake_gloss = PointerProperty(type=EGGBakeProperty)
opt_bake_glow = PointerProperty(type=EGGBakeProperty)
opt_tbs_proc = EnumProperty(
name="TBS generation",
description="Export all textures as MODULATE or bake texture layers",
items=(('PANDA', "Panda", "Use egg-trans to calculate TBS (Need installed Panda3D)."),
('INTERNAL', "Internal", "Use internal YABEE TBS generator"),
('NO', "No", "Do not generate TBS.")),
default='NO',
)
opt_export_uv_as_texture = BoolProperty(
name="UV as texture",
description="export uv image as texture",
default=False,
)
opt_copy_tex_files = BoolProperty(
name="Copy texture files",
description="Copy texture files together with EGG",
default=True,
)
opt_separate_anim_files = BoolProperty(
name="Separate animation files",
description="Write an animation data into the separate files",
default=True,
)
opt_anim_only = BoolProperty(
name="Animation only",
description="Write only animation data",
default=False,
)
opt_tex_path = StringProperty(
name="Tex. path",
description="Path for the copied textures. Relative to the main EGG file dir",
default='./tex',
)
opt_anim_list = PointerProperty(type=EGGAnimList)
first_run = BoolProperty(default = True)
def draw(self, layout):
row = layout.row()
row.operator("export.yabee_reset_defaults", icon="FILE_REFRESH", text="Reset to defaults")
row.operator("export.yabee_help", icon="URL", text="Help")
layout.row().label('Animation:')
row = layout.row()
row.template_list(self.opt_anim_list,
"anim_collection",
self.opt_anim_list,
"active_index",
rows=2)
col = row.column(align=True)
col.operator("export.egg_anim_add", icon='ZOOMIN', text="")
col.operator("export.egg_anim_remove", icon='ZOOMOUT', text="")
sett = self.opt_anim_list
if len(sett.anim_collection):
p = sett.anim_collection[sett.active_index]
layout.row().prop(p, 'name')
row = layout.row(align = True)
row.prop(p, 'from_frame')
row.prop(p, 'to_frame')
row.prop(p, 'fps')
layout.separator()
layout.row().label('Options:')
layout.row().prop(self, 'opt_anim_only')
layout.row().prop(self, 'opt_separate_anim_files')
if not self.opt_anim_only:
layout.row().prop(self, 'opt_tbs_proc')
if self.opt_tex_proc == 'BAKE':
box = layout.box()
box.row().prop(self, 'opt_tex_proc')
self.opt_bake_diffuse.draw(box.row(align = True), "Diffuse")
self.opt_bake_normal.draw(box.row(align = True), "Normal")
self.opt_bake_gloss.draw(box.row(align = True), "Gloss")
self.opt_bake_glow.draw(box.row(align = True), "Glow")
else:
layout.row().prop(self, 'opt_tex_proc')
if self.opt_tex_proc == 'SIMPLE':
layout.row().prop(self, 'opt_export_uv_as_texture')
if self.opt_copy_tex_files or self.opt_tex_proc == 'BAKE':
box = layout.box()
if self.opt_tex_proc == 'SIMPLE':
box.row().prop(self, 'opt_copy_tex_files')
box.row().prop(self, 'opt_tex_path')
else:
layout.row().prop(self, 'opt_copy_tex_files')
def get_bake_dict(self):
d = {}
opts = ((self.opt_bake_diffuse, 'diffuse'),
(self.opt_bake_normal, 'normal'),
(self.opt_bake_gloss, 'gloss'),
(self.opt_bake_glow, 'glow')
)
for opt, name in opts:
d[name] = (opt.res_x, opt.res_y, opt.export)
return d
def check_warns(self, context):
warns = []
if len(context.selected_objects) == 0:
warns.append('Nothing to export. Please, select "Mesh", \n' + \
'"Armature" or "Curve" objects.')
return warns
def reset_defaults(self):
self.opt_tex_proc = 'SIMPLE'
self.opt_tbs_proc = 'NO'
self.opt_bake_diffuse.export = True
self.opt_bake_diffuse.res_x, self.opt_bake_diffuse.res_y = 512, 512
self.opt_bake_normal.export = False
self.opt_bake_normal.res_x, self.opt_bake_normal.res_y = 512, 512
self.opt_bake_gloss.export = False
self.opt_bake_gloss.res_x, self.opt_bake_gloss.res_y = 512, 512
self.opt_bake_glow.export = False
self.opt_bake_glow.res_x, self.opt_bake_glow.res_y = 512, 512
self.opt_export_uv_as_texture = False
self.opt_copy_tex_files = True
self.opt_separate_anim_files = True
self.opt_anim_only = False
self.opt_tex_path = './tex'
while self.opt_anim_list.anim_collection[:]:
bpy.ops.export.egg_anim_remove('INVOKE_DEFAULT')
self.first_run = False
#def write_some_data(context, filepath, use_some_setting):
# print("running write_some_data...")
# f = open(filepath, 'w')
# f.write("Hello World %s" % use_some_setting)
# f.close()
# return {'FINISHED'}
# ------------------ Operators ----------------------------------
class YABEEHelp(bpy.types.Operator):
bl_idname = "export.yabee_help"
bl_label = "YABEE Help."
def execute(self, context):
bpy.ops.wm.url_open("INVOKE_DEFAULT", url="http://www.panda3d.org/forums/viewtopic.php?t=11441")
return {"FINISHED"}
class WarnDialog(bpy.types.Operator):
''' Warning messages operator '''
bl_idname = "export.yabee_warnings"
bl_label = "YABEE Warnings."
def draw(self, context):
warns = context.scene.yabee_settings.check_warns(context)
for warn in warns:
for n, line in enumerate(warn.splitlines()):
if n == 0:
self.layout.row().label(line, icon="ERROR")
else:
self.layout.row().label(' ' + line, icon="NONE")
def execute(self, context):
#print("Dialog Runs")
return {'FINISHED'}
def invoke(self, context, event):
wm = context.window_manager
return wm.invoke_props_dialog(self)
class ResetDefault(bpy.types.Operator):
''' Reset YABEE settings to default operator '''
bl_idname = "export.yabee_reset_defaults"
bl_label = "YABEE reset default settings"
def execute(self, context):
context.scene.yabee_settings.reset_defaults()
return {'FINISHED'}
class AddAnim(bpy.types.Operator):
''' Add animation record operator '''
bl_idname = "export.egg_anim_add"
bl_label = "Add EGG animation"
def execute(self, context):
prop = context.scene.yabee_settings.opt_anim_list.anim_collection.add()
prop.name = 'Anim'+str(prop.index)
return {'FINISHED'}
class RemoveAnim(bpy.types.Operator):
''' Remove active animation record operator '''
bl_idname = "export.egg_anim_remove"
bl_label = "Remove EGG animation"
def execute(self, context):
sett = context.scene.yabee_settings.opt_anim_list
sett.anim_collection.remove(sett.active_index)
if len(sett.anim_collection):
if sett.active_index not in [p.index for p in sett.anim_collection]:
sett.active_index = sett.anim_collection[-1].index
return {'FINISHED'}
class ExportPanda3DEGG(bpy.types.Operator, ExportHelper):
''' Export selected to the Panda3D EGG format '''
bl_idname = "export.panda3d_egg"
bl_label = "Export to Panda3D EGG"
# ExportHelper mixin class uses this
filename_ext = ".egg"
filter_glob = StringProperty(
default="*.egg",
options={'HIDDEN'},
)
#@classmethod
#def poll(cls, context):
# #return context.active_object is not None
# return len(context.selected_objects) > 0
def execute(self, context):
#return write_some_data(context, self.filepath, self.use_setting)
from .yabee_libs import egg_writer
import imp
imp.reload(egg_writer)
sett = context.scene.yabee_settings
egg_writer.write_out(self.filepath,
sett.opt_anim_list.get_anim_dict(),
sett.opt_export_uv_as_texture,
sett.opt_separate_anim_files,
sett.opt_anim_only,
sett.opt_copy_tex_files,
sett.opt_tex_path,
3,
sett.opt_tbs_proc,
sett.opt_tex_proc,
sett.get_bake_dict())
return {'FINISHED'}
def invoke(self, context, evt):
if context.scene.yabee_settings.first_run:
context.scene.yabee_settings.reset_defaults()
return ExportHelper.invoke(self, context, evt)
def draw(self, context):
warns = context.scene.yabee_settings.check_warns(context)
if warns:
self.layout.row().operator('export.yabee_warnings', icon='ERROR', text='Warning!')
context.scene.yabee_settings.draw(self.layout)
def menu_func_export(self, context):
self.layout.operator(ExportPanda3DEGG.bl_idname, text="Panda3D (.egg)")
def register():
bpy.utils.register_module(__name__)
# Good or bad, but I'll store settings in the scene
bpy.types.Scene.yabee_settings = PointerProperty(type=YABEEProperty)
bpy.types.INFO_MT_file_export.append(menu_func_export)
def unregister():
bpy.utils.unregister_module(__name__)
bpy.types.INFO_MT_file_export.remove(menu_func_export)
if __name__ == "__main__":
register()
# test call
#bpy.ops.export.panda3d_egg('INVOKE_DEFAULT')
| Python |
"""
Part of the YABEE
rev 1.2
"""
import bpy
if __name__ != '__main__':
from io_scene_egg.yabee_libs.utils import convertFileNameToPanda, save_image
BAKE_TYPES = {'diffuse': ('TEXTURE', 'MODULATE'),
'normal': ('NORMALS', 'NORMAL'),
'gloss': ('SPEC_INTENSITY', 'GLOSS'),
'glow': ('EMIT', 'GLOW'),
}
class SimpleTextures():
def __init__(self, obj_list, uv_img_as_texture, copy_tex, file_path, tex_path):
self.obj_list = obj_list[:]
self.uv_img_as_texture = uv_img_as_texture
self.copy_tex = copy_tex
self.file_path = file_path
self.tex_path = tex_path
def get_used_textures(self):
""" Collect images from the UV images and Material texture slots
tex_list structure:
image_name: { 'scalars': [(name, val), (name, val), ...],
'path': 'path/to/texture'
}
"""
tex_list = {}
for obj in self.obj_list:
if obj.type == 'MESH':
# Texture from UV image
if self.uv_img_as_texture:
for num, uv in enumerate(obj.data.uv_textures):
for f in uv.data:
#if f.use_image:
if f.image.source == 'FILE':
if not f.image.name in tex_list:
name = uv.name
if num == 0: name = ''
t_path = bpy.path.abspath(f.image.filepath)
if self.copy_tex:
t_path = save_image(f.image, self.file_path, self.tex_path)
#tex_list[f.image.name] = (name, t_path, 'MODULATE')
tex_list[f.image.name] = {'path': t_path,
'scalars': [] }
tex_list[f.image.name]['scalars'].append(('envtype', 'MODULATE'))
if name:
tex_list[f.image.name]['scalars'].append(('uv-name', name))
# General textures
for f in obj.data.faces:
if f.material_index < len(obj.data.materials):
for tex in obj.data.materials[f.material_index].texture_slots:
if ((tex) and (not tex.texture.use_nodes)):
if tex.texture_coords == 'UV' and obj.data.uv_textures:
if tex.uv_layer:
uv_name = tex.uv_layer
if not [uv.name for uv in obj.data.uv_textures].index(uv_name):
uv_name = ''
else:
uv_name = '' #obj.data.uv_textures[0].name
if tex.texture.image and tex.texture.image.source == 'FILE':
if not tex.texture.name in list(tex_list.keys()):
#try:
envtype = 'MODULATE'
if tex.use_map_normal:
envtype = 'NORMAL'
if tex.use_map_emit:
envtype = 'GLOW'
if tex.use_map_specular:
envtype = 'GLOSS'
t_path = bpy.path.abspath(tex.texture.image.filepath)
if self.copy_tex:
t_path = save_image(tex.texture.image, self.file_path, self.tex_path)
#tex_list[tex.texture.name] = (uv_name, t_path, envtype)
tex_list[tex.texture.name] = {'path': t_path,
'scalars': [] }
tex_list[tex.texture.name]['scalars'].append(('envtype', envtype))
if uv_name:
tex_list[tex.texture.name]['scalars'].append(('uv-name', uv_name))
#except:
# print('ERROR: can\'t get texture image on %s.' % tex.texture.name)
return tex_list
class TextureBaker():
def __init__(self, obj_list, file_path, tex_path):
self.saved_objs = {}
self.rendered_images = {}
self.obj_list = obj_list[:]
self.file_path = file_path
self.tex_path = tex_path
def get_active_uv(self, obj):
auv = [uv for uv in obj.data.uv_textures if uv.active]
if auv:
return auv[0]
else:
return None
def _save_obj_props(self, obj):
props = {'uvs':[], 'textures':{}}
active_uv = self.get_active_uv(obj)
if active_uv:
for uvd in active_uv.data:
#props['uvs'].append((uvd.use_image, uvd.image))
props['uvs'].append(uvd.image)
self.saved_objs[obj.name] = props
def _restore_obj_props(self, obj):
if obj.name in self.saved_objs.keys():
props = self.saved_objs[obj.name]
active_uv = self.get_active_uv(obj)
if active_uv:
for id, uvs in enumerate(props['uvs']):
uvd = active_uv.data[id]
#uvd.use_image, uvd.image = uvs
uvd.image = uvs
def _prepare_images(self, btype, tsizex, tsizey):
assigned_data = {}
for obj in self.obj_list:
if obj.type == 'MESH' and self.get_active_uv(obj):
self._save_obj_props(obj)
img = bpy.data.images.new(obj.name + '_' + btype, tsizex, tsizey)
self.rendered_images[obj.name] = img.name
active_uv = self.get_active_uv(obj)
active_uv_idx = obj.data.uv_textures[:].index(active_uv)
if active_uv:
for uvd in active_uv.data:
#uvd.use_image = True
uvd.image = img
assigned_data[obj.name + '_' + btype] = (active_uv, img, active_uv_idx, BAKE_TYPES[btype][1])
else:
print('ERROR: %s have not active UV layer' % obj.name)
return None
return assigned_data
def _clear_images(self):
for iname in self.rendered_images.values():
img = bpy.data.images[iname]
img.user_clear()
bpy.data.images.remove(img)
self.rendred_images = []
def _save_rendered(self, spath):
for oname, iname in self.rendered_images.items():
img = bpy.data.images[iname]
img.save_render(spath + iname + '.' + bpy.context.scene.render.file_format.lower())
def _save_images(self):
paths = {}
for oname, iname in self.rendered_images.items():
img = bpy.data.images[iname]
paths[iname] = save_image(img, self.file_path, self.tex_path)
return paths
def _select(self, obj):
obj.select = True
def _deselect(self, obj):
obj.select = False
def bake(self, bake_layers):
tex_list = {}
for btype, params in bake_layers.items():
if len(params) == 2:
params = (params[0], params[0], params[1])
if params[2]:
if btype in BAKE_TYPES.keys():
paths = None
if len(self.obj_list) == 0:
return False
assigned_data = self._prepare_images(btype, params[0], params[1])
if assigned_data:
old_selected = bpy.context.selected_objects[:]
#bpy.ops.object.select_all(action = 'DESELECT')
map(self._deselect, old_selected)
bpy.context.scene.render.bake_type = BAKE_TYPES[btype][0]
bpy.context.scene.render.bake_margin = 5
bpy.context.scene.render.image_settings.color_mode = 'RGBA'
bpy.context.scene.render.bake_normal_space = 'TANGENT'
#print(bpy.context.selected_objects[:])
map(self._select, self.obj_list)
#bpy.context.scene.update()
#print(bpy.context.selected_objects[:])
bpy.ops.object.bake_image()
#bpy.ops.object.select_all(action = 'DESELECT')
map(self._deselect, self.obj_list)
map(self._select, old_selected)
#self._save_rendered(save_path)
#self._save_rendered(bpy.app.tempdir)
paths = self._save_images()
for obj in self.obj_list:
self._restore_obj_props(obj)
self._clear_images()
for key, val in assigned_data.items():
uv_name = val[0].name
if val[2] == 0:
uv_name = ''
#img_path = bpy.app.tempdir + val[1].name + '.' + bpy.context.scene.render.file_format.lower()
#print('+++' + str(paths))
envtype = val[3]
if paths:
img_path = paths[key]
else:
img_path = self.tex_path + val[1].name + '.' + bpy.context.scene.render.file_format.lower()
#tex_list[key] = (uv_name, img_path, envtype)
# Texture information dict
tex_list[key] = {'path': img_path,
'scalars': [] }
tex_list[key]['scalars'].append(('envtype', envtype))
if uv_name:
tex_list[key]['scalars'].append(('uv-name', uv_name))
if envtype in ('GLOW', 'GLOSS'):
tex_list[key]['scalars'].append(('alpha-file', '"' + img_path + '"'))
else:
print('WARNING: unknown bake layer "%s"' % btype)
return tex_list
if __name__ == '__main__':
import os, sys
def convertFileNameToPanda(filename):
""" (Get from Chicken) Converts Blender filenames to Panda 3D filenames.
"""
path = filename.replace('//', './').replace('\\', '/')
if os.name == 'nt' and path.find(':') != -1:
path = '/'+ path[0].lower() + path[2:]
return path
def save_image(img, file_path, text_path):
oldpath = bpy.path.abspath(img.filepath)
old_dir, old_f = os.path.split(convertFileNameToPanda(oldpath))
f_names = [s.lower() for s in old_f.split('.')]
if not f_names[-1] in ('jpg', 'png', 'tga', 'tiff', 'dds', 'bmp') and img.is_dirty:
old_f += ('.' + bpy.context.scene.render.image_settings.file_format.lower())
rel_path = os.path.join(text_path, old_f)
if os.name == 'nt':
rel_path = rel_path.replace('\\','/')
new_dir, eg_f = os.path.split(file_path)
new_dir = os.path.abspath(os.path.join(new_dir, text_path))
if not os.path.exists(new_dir):
os.makedirs(new_dir)
if img.is_dirty:
r_path = os.path.abspath(os.path.join(new_dir, old_f))
img.save_render(r_path)
print('RENDER IMAGE to %s; rel path: %s' % (r_path, rel_path))
else:
if os.path.exists(oldpath):
#oldf = convertFileNameToPanda(oldpath)
newf = os.path.join(new_dir, old_f)
if oldpath != newf:
shutil.copyfile(oldpath, newf)
print('COPY IMAGE %s to %s; rel path %s' % (oldpath, newf, rel_path))
else:
if img.has_data:
img.filepath = os.path.abspath(os.path.join(new_dir, old_f))
print('SAVE IMAGE to %s; rel path: %s' % (img.filepath, rel_path))
img.save()
img.filepath == oldpath
return rel_path
tb = TextureBaker(bpy.context.selected_objects,'./exp_test/test.egg', './tex')
print(tb.bake())
st = SimpleTextures(bpy.context.selected_objects, False, False, './exp_test/test.egg', './tex')
print(st.get_used_textures())
| Python |
"""
Part of the YABEE
rev 1.1
"""
import bpy, os, sys, shutil
def convertFileNameToPanda(filename):
""" (Get from Chicken) Converts Blender filenames to Panda 3D filenames.
"""
path = filename.replace('//', './').replace('\\', '/')
if os.name == 'nt' and path.find(':') != -1:
path = '/'+ path[0].lower() + path[2:]
return path
def save_image(img, file_path, text_path):
if img.filepath:
oldpath = bpy.path.abspath(img.filepath)
old_dir, old_f = os.path.split(convertFileNameToPanda(oldpath))
f_names = [s.lower() for s in old_f.split('.')]
if not f_names[-1] in ('jpg', 'png', 'tga', 'tiff', 'dds', 'bmp') and img.is_dirty:
old_f += ('.' + bpy.context.scene.render.image_settings.file_format.lower())
else:
oldpath = ''
old_dir = ''
old_f = img.name + '.' + bpy.context.scene.render.image_settings.file_format.lower()
rel_path = os.path.join(text_path, old_f)
if os.name == 'nt':
rel_path = rel_path.replace('\\','/')
new_dir, eg_f = os.path.split(file_path)
new_dir = os.path.abspath(os.path.join(new_dir, text_path))
if not os.path.exists(new_dir):
os.makedirs(new_dir)
if img.is_dirty:
r_path = os.path.abspath(os.path.join(new_dir, old_f))
img.save_render(r_path)
print('RENDER IMAGE to %s; rel path: %s' % (r_path, rel_path))
else:
if os.path.exists(oldpath):
#oldf = convertFileNameToPanda(oldpath)
newf = os.path.join(new_dir, old_f)
if oldpath != newf:
shutil.copyfile(oldpath, newf)
print('COPY IMAGE %s to %s; rel path %s' % (oldpath, newf, rel_path))
else:
if img.has_data:
img.filepath = os.path.abspath(os.path.join(new_dir, old_f))
print('SAVE IMAGE to %s; rel path: %s' % (img.filepath, rel_path),img,type(img))
img.save()
img.filepath == oldpath
return rel_path
def get_active_uv(obj):
auv = [uv for uv in obj.data.uv_textures if uv.active]
if auv:
return auv[0]
else:
return None
def eggSafeName(s):
""" (Get from Chicken) Function that converts names into something
suitable for the egg file format - simply puts " around names that
contain spaces and prunes bad characters, replacing them with an
underscore.
"""
s = str(s).replace('"','_') # Sure there are more bad characters, but this will do for now.
if ' ' in s:
return '"' + s + '"'
else:
return s
| Python |
""" Part of the YABEE
rev 11.2
"""
import bpy, os, sys, shutil
from mathutils import *
from math import pi
import io_scene_egg.yabee_libs.tbn_generator
from io_scene_egg.yabee_libs.texture_processor import SimpleTextures, TextureBaker
from io_scene_egg.yabee_libs.utils import *
import imp
imp.reload(io_scene_egg.yabee_libs.texture_processor)
imp.reload(io_scene_egg.yabee_libs.tbn_generator)
imp.reload(io_scene_egg.yabee_libs.utils)
FILE_PATH = None
ANIMATIONS = None
EXPORT_UV_IMAGE_AS_TEXTURE = None
COPY_TEX_FILES = None
TEX_PATH = None
SEPARATE_ANIM_FILE = None
ANIM_ONLY = None
CALC_TBS = None
TEXTURE_PROCESSOR = None
BAKE_LAYERS = None
STRF = lambda x: '%.6f' % x
class Group:
"""
Representation of the EGG <Group> hierarchy structure as the
linked list "one to many".
"""
def __init__(self, obj):
self.object = obj #: Link to the blender's object
self.childs = [] #: List of children (Groups)
def make_hierarchy_from_list(self, obj_list):
""" This function make <Group> hierarchy from the list of
Blender's objects. Self.object is the top level of the created
hierarchy. Usually in this case self.object == None
@param obj_list: tuple or lis of blender's objects.
"""
for obj in obj_list:
if ((obj.parent == self.object) or
((self.object == None) and
(str(obj.parent) not in map(str,obj_list)) and
(str(obj) not in [str(ch.object) for ch in self.childs]))):
gr = self.__class__(obj)
self.childs.append(gr)
gr.make_hierarchy_from_list(obj_list)
def print_hierarchy(self, level = 0):
""" Debug function to print out hierarchy to console.
@param level: starting indent level.
"""
print('-' * level, self.object)
for ch in self.childs:
ch.print_hierarchy(level+1)
def get_tags_egg_str(self, level = 0):
""" Create and return <Tag> string from Blender's object
Game logic properties.
@param level: indent level.
@return: the EGG tags string.
"""
egg_str = ''
if self.object:
for prop in self.object.game.properties:
egg_str += '%s<Tag> %s { %s }\n' % (' ' * level,
eggSafeName(prop.name),
eggSafeName(prop.value))
return egg_str
def get_full_egg_str(self,level = 0):
""" Create and return representation of the EGG <Group>
with hierarchy, started from self.object. It's start point to
generating EGG structure.
@param level: starting indent level.
@return: full EGG string of group.
"""
egg_str = ''
if self.object:
egg_str += '%s<Group> %s {\n' % (' ' * level, eggSafeName(self.object.name))
egg_str += self.get_tags_egg_str(level + 1)
if self.object.type == 'MESH':
if (('ARMATURE' in [m.type for m in self.object.modifiers]) or
(((self.object.data.shape_keys) and
(len(self.object.data.shape_keys.key_blocks) > 1)))):
egg_str += '%s<Dart> { 1 }\n' % (' ' * (level + 1))
egg_mesh = EGGActorObjectData(self.object)
else:
egg_mesh = EGGMeshObjectData(self.object)
for line in egg_mesh.get_full_egg_str().splitlines():
egg_str += '%s%s\n' % (' ' * (level + 1), line)
elif self.object.type == 'CURVE':
egg_obj = EGGNurbsCurveObjectData(self.object)
for line in egg_obj.get_full_egg_str().splitlines():
egg_str += '%s%s\n' % (' ' * (level + 1), line)
elif self.object.type == 'ARMATURE':
egg_obj = EGGArmature(None)
egg_obj.make_hierarchy_from_list(self.object.data.bones)
egg_str += '%s<Dart> { 1 }\n' % (' ' * (level + 1))
for line in egg_obj.get_full_egg_str({}, self.object, -1).splitlines():
egg_str += '%s%s\n' % (' ' * (level + 1), line)
else:
egg_obj = EGGBaseObjectData(self.object)
for line in egg_obj.get_full_egg_str().splitlines():
egg_str += '%s%s\n' % (' ' * (level + 1), line)
for ch in self.childs:
egg_str += ch.get_full_egg_str(level + 1)
egg_str += '%s}\n' % (' ' * level)
else:
for ch in self.childs:
egg_str += ch.get_full_egg_str(level + 1)
return egg_str
class EGGArmature(Group):
""" Representation of the EGG <Joint> hierarchy. Recive Blender's
bones list as obj_list in constructor.
"""
def get_full_egg_str(self, vrefs, arm_owner, level = 0):
""" Create and return string representation of the EGG <Joint>
with hieratchy.
@param vrefs: reference of vertices, linked to bones.
@param arm_owner: Armature object - owner of the bones
@param level: indent level.
@return: the EGG string with joints hierarchy
"""
egg_str = ''
if self.object:
egg_str += '%s<Joint> %s {\n' % (' ' * level, eggSafeName(self.object.name))
# Get vertices reference by Bone name from globlal armature vref
if self.object.name in list(vrefs.keys()):
vref = vrefs[self.object.name]
else:
vref = {}
joint = EGGJointObjectData(self.object, vref, arm_owner)
for line in joint.get_full_egg_str().splitlines():
egg_str += '%s%s\n' % (' ' * (level + 1), line)
for ch in self.childs:
egg_str += ch.get_full_egg_str(vrefs, arm_owner, level + 1)
egg_str += '%s}\n' % (' ' * level)
else:
for ch in self.childs:
egg_str += ch.get_full_egg_str(vrefs, arm_owner, level + 1)
return egg_str
#-----------------------------------------------------------------------
# BASE OBJECT
#-----------------------------------------------------------------------
class EGGBaseObjectData:
""" Base representation of the EGG objects data
"""
def __init__(self, obj):
self.obj_ref = obj
self.transform_matrix = obj.matrix_world
def get_transform_str(self):
""" Return the EGG string representation of object transforms.
"""
tr_str = '<Transform> {\n <Matrix4> {\n'
for y in self.transform_matrix.col:
tr_str += ' '
for x in y[:]:
tr_str += STRF( x ) + ' '
tr_str += '\n'
tr_str += ' }\n}\n'
return tr_str
def get_full_egg_str(self):
return self.get_transform_str() + '\n'
class EGGNurbsCurveObjectData(EGGBaseObjectData):
""" Representation of the EGG NURBS Curve
"""
def collect_vertices(self):
#str6f = lambda x: '%.6f' % x
vertices = []
idx = 0
for spline in self.obj_ref.data.splines:
for vtx in spline.points:
#co = vtx.co * self.obj_ref.matrix_world
co = self.obj_ref.matrix_world * vtx.co
vertices.append('<Vertex> %i {\n %s\n}\n' % (idx,
' '.join(map(STRF, co))))
idx += 1
return vertices
def get_vtx_pool_str(self):
""" Return the vertex pool string in the EGG syntax.
"""
vtx_pool = ''
vertices = self.collect_vertices()
if vertices:
vtx_pool = '<VertexPool> %s {\n' % eggSafeName(self.obj_ref.name)
for vtx_str in vertices:
for line in vtx_str.splitlines():
vtx_pool += ' ' + line + '\n'
vtx_pool += '}\n'
return vtx_pool
def get_curves_str(self):
""" Return the <NURBSCurve> string. Blender 2.5 has not contain
Knots information, seems it's calculating in runtime.
I got algorythm for the knots calculation from the OBJ exporter
and modified it.
"""
str2f = lambda x: '%.2f' % x
cur_str = ''
idx = 0
for spline in self.obj_ref.data.splines:
if spline.type == 'NURBS':
knots_num = spline.point_count_u + spline.order_u
knots = [i/(knots_num - 1) for i in range(knots_num)]
if spline.use_endpoint_u:
for i in range(spline.order_u - 1):
knots[i] = 0.0
knots[-(i + 1)] = 1.0
for i in range(knots_num - (spline.order_u * 2) + 2):
knots[i + spline.order_u - 1] = i/(knots_num - (spline.order_u * 2) + 1)
cur_str += '<NURBSCurve> {\n'
cur_str += ' <Scalar> subdiv { %i }\n' % (spline.resolution_u * \
(spline.point_count_u - 1))
cur_str += ' <Order> { %i }\n' % spline.order_u
cur_str += ' <Knots> { %s }\n' % ' '.join(map(str2f, knots))
cur_str += ' <VertexRef> {\n %s\n <Ref> { %s } \n }\n' % (
' '.join([str(i) for i in range(idx, idx + \
spline.point_count_u)]), eggSafeName(self.obj_ref.name))
cur_str += '}\n'
idx += spline.point_count_u
return cur_str
def get_full_egg_str(self):
return self.get_transform_str() + self.get_vtx_pool_str() + self.get_curves_str()
class EGGJointObjectData(EGGBaseObjectData):
""" Representation of the EGG <Joint> data
"""
def __init__(self, obj, vref, arm_owner):
""" @param vref: reference of vertices, linked to bone.
@param arm_owner: Armature object - owner of the bones
"""
self.obj_ref = obj
self.arm_owner = arm_owner
if not obj.parent:
self.transform_matrix = arm_owner.matrix_world * obj.matrix_local
else:
self.transform_matrix = obj.parent.matrix_local.inverted() * obj.matrix_local
self.vref = vref
def get_vref_str(self):
""" Convert vertex reference to the EGG string and return it.
"""
vref_str = ''
for vpool, data in self.vref.items():
weightgroups = {}
for idx, weight in data:
wstr = '%s' % STRF(weight)
if wstr not in list(weightgroups.keys()):
weightgroups[wstr] = []
weightgroups[wstr].append(idx)
for wgrp, idxs in weightgroups.items():
vref_str += '<VertexRef> {\n'
vref_str += ' ' + ' '.join(map(str,idxs)) + '\n'
vref_str += ' <Scalar> membership { %s }' % wgrp
vref_str += ' <Ref> { %s }\n}\n' % vpool
return vref_str
def get_full_egg_str(self):
egg_str = ''
egg_str += self.get_transform_str()
egg_str += self.get_vref_str()
for obj in [obj for obj in bpy.context.selected_objects \
if self.obj_ref.name == obj.parent_bone and self.arm_owner == obj.parent]:
gr = Group(None)
obj_list = []
hierarchy_to_list(obj, obj_list)
obj_list = [obj for obj in obj_list if (obj in bpy.context.selected_objects)]
gr.make_hierarchy_from_list(obj_list)
for line in gr.get_full_egg_str(-1).splitlines():
egg_str += line + '\n'
return egg_str
#-----------------------------------------------------------------------
# MESH OBJECT
#-----------------------------------------------------------------------
class EGGMeshObjectData(EGGBaseObjectData):
""" EGG data representation of the mesh object
"""
def __init__(self, obj):
EGGBaseObjectData.__init__(self, obj)
self.smooth_vtx_list = self.get_smooth_vtx_list()
self.poly_vtx_ref = self.pre_convert_poly_vtx_ref()
self.colors_vtx_ref = self.pre_convert_vtx_color()
self.uvs_list = self.pre_convert_uvs()
if CALC_TBS == 'INTERNAL':
self.tbs = io_scene_egg.yabee_libs.tbn_generator.TBNGenerator(obj).generate()
else:
self.tbs = None
#-------------------------------------------------------------------
# AUXILIARY
def get_smooth_vtx_list(self):
""" Collect the smoothed polygon vertices
for write normals of the vertices. In the EGG for the smooth
shading used normals of vertices. For solid - polygons.
"""
vtx_list = []
for f in self.obj_ref.data.faces:
if f.use_smooth:
for v in f.vertices:
vtx_list.append(v)
return set(vtx_list)
def pre_convert_uvs(self):
""" Blender uses shared vertices, but for the correct working
UV and shading in the Panda needs to convert they are in the
individual vertices for each polygon.
"""
uv_list = []
for uv_layer in self.obj_ref.data.uv_textures:
data = []
for uv_face in uv_layer.data:
for u,v in uv_face.uv:
data.append((u,v))
uv_list.append((uv_layer.name, data))
return uv_list
def pre_convert_poly_vtx_ref(self):
""" Blender uses shared vertices, but for the correct working
UV and shading in the Panda needs to convert they are in the
individual vertices for each polygon.
"""
poly_vtx_ref = []
idx = 0
for face in self.obj_ref.data.faces:
vtxs = []
for v in face.vertices:
vtxs.append(idx)
idx += 1
poly_vtx_ref.append(vtxs)
return poly_vtx_ref
def pre_convert_vtx_color(self):
color_vtx_ref = []
if self.obj_ref.data.vertex_colors.active:
for cols in self.obj_ref.data.vertex_colors.active.data:
for col in (cols.color1, cols.color2, cols.color3, cols.color4):
color_vtx_ref.append(col)
#for fi, face in enumerate(self.obj_ref.data.faces):
# col = self.obj_ref.data.vertex_colors.active.data[fi]
# col = col.color1[:], col.color2[:], col.color3[:], col.color4[:]
# for vi, v in enumerate(face.vertices):
# color_vtx_ref.append(col[vi])
return color_vtx_ref
#-------------------------------------------------------------------
# VERTICES
def collect_vtx_xyz(self, vidx, attributes):
""" Add coordinates of the vertex to the vertex attriibutes list
@param vidx: Blender's internal vertex index.
@param attributes: list of vertex attributes
@return: list of vertex attributes.
"""
#co = self.obj_ref.data.vertices[vidx].co * self.obj_ref.matrix_world
co = self.obj_ref.matrix_world * self.obj_ref.data.vertices[vidx].co
co = map(STRF, co)
attributes.append(' '.join(co))
return attributes
def collect_vtx_dxyz(self, vidx, attributes):
""" Add morph target <Dxyz> to the vertex attributes list.
@param vidx: Blender's internal vertex index.
@param attributes: list of vertex attributes
@return: list of vertex attributes.
"""
if ((self.obj_ref.data.shape_keys) and (len(self.obj_ref.data.shape_keys.key_blocks) > 1)):
for i in range(1,len(self.obj_ref.data.shape_keys.key_blocks)):
key = self.obj_ref.data.shape_keys.key_blocks[i]
vtx = self.obj_ref.data.vertices[vidx]
co = key.data[vidx].co * self.obj_ref.matrix_world - \
vtx.co * self.obj_ref.matrix_world
if co.length > 0.000001:
attributes.append('<Dxyz> "%s" { %s %s %s }\n' % \
(key.name, STRF(co[0]), STRF(co[1]), STRF(co[2])))
return attributes
def collect_vtx_normal(self, vidx, attributes):
""" Add <Normal> to the vertex attributes list.
@param vidx: Blender's internal vertex index.
@param attributes: list of vertex attributes
@return: list of vertex attributes.
"""
if vidx in self.smooth_vtx_list:
#no = self.obj_ref.data.vertices[vidx].normal * self.obj_ref.matrix_world.to_euler().to_matrix()
no = self.obj_ref.matrix_world.to_euler().to_matrix() * self.obj_ref.data.vertices[vidx].normal
attributes.append('<Normal> { %s %s %s }' % (STRF(no[0]), STRF(no[1]), STRF(no[2])))
return attributes
def collect_vtx_rgba(self, vidx, attributes):
if self.colors_vtx_ref:
col = self.colors_vtx_ref[vidx]
attributes.append('<RGBA> { %s %s %s 1.0 }' % (STRF(col[0]), STRF(col[1]), STRF(col[2])))
return attributes
def collect_vtx_uv(self, vidx, ividx, attributes):
""" Add <UV> to the vertex attributes list.
@param vidx: the EGG (converted) vertex index.
@param attributes: list of vertex attributes
@return: list of vertex attributes.
"""
for i in range(len(self.uvs_list)):
name, data = self.uvs_list[i]
if i == 0: name = ''
if self.tbs:
t = self.tbs[vidx][0][i]
b = self.tbs[vidx][1][i]
try:
uv_str = '<UV> %s {\n %s %s\n' % (name, STRF(data[ividx][0]), STRF(data[ividx][1]))
if self.tbs:
uv_str += ' <Tangent> { %s %s %s }\n' % (STRF(t[0]), STRF(t[1]), STRF(t[2]))
uv_str += ' <Binormal> { %s %s %s }\n' % (STRF(b[0]), STRF(b[1]), STRF(b[2]))
uv_str += '}\n'
attributes.append(uv_str)
#attributes.append('<UV> %s { %.6f %.6f }' % (name, data[ividx][0], data[ividx][1]))
except:
print('ERROR: can\'t get UV information in "collect_vtx_uv"')
return attributes
def collect_vertices(self):
""" Convert and collect vertices info.
"""
vertices = []
idx = 0
for f in self.obj_ref.data.faces:
for v in f.vertices:
# v - Blender inner vertex index
# idx - Vertex index for the EGG
vtx = '<Vertex> %s {\n' % idx
attributes = []
self.collect_vtx_xyz(v, attributes)
self.collect_vtx_dxyz(v, attributes)
self.collect_vtx_normal(v, attributes)
self.collect_vtx_rgba(idx, attributes)
self.collect_vtx_uv(v, idx, attributes)
for attr in attributes:
for attr_str in attr.splitlines():
vtx += ' ' + attr_str + '\n'
vtx += '}\n'
vertices.append(vtx)
idx += 1
return vertices
#-------------------------------------------------------------------
# POLYGONS
def collect_poly_tref(self, face, attributes):
""" Add <TRef> to the polygon's attributes list.
@param face: face index.
@param attributes: list of polygon's attributes.
@return: list of polygon's attributes.
"""
if TEXTURE_PROCESSOR == 'SIMPLE':
if EXPORT_UV_IMAGE_AS_TEXTURE:
for uv_tex in self.obj_ref.data.uv_textures:
#if uv_tex.data[face.index].use_image:
if uv_tex.data[face.index].image.source == 'FILE':
attributes.append('<TRef> { %s }' % uv_tex.data[face.index].image.name)
if face.material_index < len(self.obj_ref.data.materials):
mat = self.obj_ref.data.materials[face.material_index]
tex_idx = 0
for tex in [tex for tex in mat.texture_slots if tex]:
if ((tex.texture_coords == 'UV')
and (not tex.texture.use_nodes)
and (mat.use_textures[tex_idx])):
if tex.texture.image and tex.texture.image.source == 'FILE':
attributes.append('<TRef> { %s }' % tex.texture.name)
tex_idx += 1
elif TEXTURE_PROCESSOR == 'BAKE':
#if [uv for uv in self.obj_ref.data.uv_textures if uv.active]:
if self.obj_ref.data.uv_textures:
for btype, params in BAKE_LAYERS.items():
if len(params) == 2:
params = (params[0], params[0], params[1])
if params[2]:
attributes.append('<TRef> { %s }' % (self.obj_ref.name + '_' + btype))
return attributes
def collect_poly_mref(self, face, attributes):
""" Add <MRef> to the polygon's attributes list.
@param face: face index.
@param attributes: list of polygon's attributes.
@return: list of polygon's attributes.
"""
#if face.material_index < len(bpy.data.materials):
# mat = bpy.data.materials[face.material_index]
if face.material_index < len(self.obj_ref.data.materials):
mat = self.obj_ref.data.materials[face.material_index]
attributes.append('<MRef> { %s }' % eggSafeName(mat.name))
return attributes
def collect_poly_normal(self, face, attributes):
""" Add <Normal> to the polygon's attributes list.
@param face: face index.
@param attributes: list of polygon's attributes.
@return: list of polygon's attributes.
"""
#no = face.normal * self.obj_ref.matrix_world.to_euler().to_matrix()
no = self.obj_ref.matrix_world.to_euler().to_matrix() * face.normal
attributes.append('<Normal> {%s %s %s}' % (STRF(no[0]), STRF(no[1]), STRF(no[2])))
return attributes
def collect_poly_rgba(self, face, attributes):
return attributes
def collect_poly_bface(self, face, attributes):
""" Add <BFace> to the polygon's attributes list.
@param face: face index.
@param attributes: list of polygon's attributes.
@return: list of polygon's attributes.
"""
#if [uv_face.data[face.index] for uv_face in self.obj_ref.data.uv_textures if uv_face.data[face.index].use_twoside]:
# attributes.append('<BFace> { 1 }')
if face.material_index < len(self.obj_ref.data.materials):
#print('+++', face.material_index, self.obj_ref.data.materials[face.material_index].game_settings.use_backface_culling)
if not self.obj_ref.data.materials[face.material_index].game_settings.use_backface_culling:
attributes.append('<BFace> { 1 }')
return attributes
def collect_poly_vertexref(self, face, attributes):
""" Add <VertexRef> to the polygon's attributes list.
@param face: face index.
@param attributes: list of polygon's attributes.
@return: list of polygon's attributes.
"""
vr = ' '.join(map(str,self.poly_vtx_ref[face.index]))
attributes.append('<VertexRef> { %s <Ref> { %s }}' % (vr, self.obj_ref.name))
return attributes
def collect_polygons(self):
""" Convert and collect polygons info
"""
polygons = []
for f in self.obj_ref.data.faces:
poly = '<Polygon> {\n'
attributes = []
self.collect_poly_tref(f, attributes)
self.collect_poly_mref(f, attributes)
self.collect_poly_normal(f, attributes)
self.collect_poly_rgba(f, attributes)
self.collect_poly_bface(f, attributes)
self.collect_poly_vertexref(f, attributes)
for attr in attributes:
for attr_str in attr.splitlines():
poly += ' ' + attr_str + '\n'
poly += '}\n'
polygons.append(poly)
return polygons
def get_vtx_pool_str(self):
""" Return the vertex pool string in the EGG syntax.
"""
vtx_pool = '<VertexPool> %s {\n' % self.obj_ref.name
for vtx_str in self.collect_vertices():
for line in vtx_str.splitlines():
vtx_pool += ' ' + line + '\n'
vtx_pool += '}\n'
return vtx_pool
def get_polygons_str(self):
""" Return polygons string in the EGG syntax
"""
polygons = '\n'
for poly_str in self.collect_polygons():
for line in poly_str.splitlines():
polygons += line + '\n'
return polygons
def get_full_egg_str(self):
""" Return full mesh data representation in the EGG string syntax
"""
return self.get_transform_str() + '\n' \
+ self.get_vtx_pool_str() + '\n' \
+ self.get_polygons_str()
#-----------------------------------------------------------------------
# ACTOR OBJECT
#-----------------------------------------------------------------------
class EGGActorObjectData(EGGMeshObjectData):
""" Representation of the EGG animated object data
"""
def __init__(self, obj):
EGGMeshObjectData.__init__(self,obj)
self.joint_vtx_ref = self.pre_convert_joint_vtx_ref()
def pre_convert_joint_vtx_ref(self):
""" Collect and convert vertices, assigned to the bones
"""
joint_vref = {}
idx = 0
for face in self.obj_ref.data.faces:
for v in face.vertices:
for g in self.obj_ref.data.vertices[v].groups:
gname = self.obj_ref.vertex_groups[g.group].name
# Goup name = Joint (bone) name
if gname not in list(joint_vref.keys()):
joint_vref[gname] = {}
# Object name = vertices pool name
if self.obj_ref.name not in list(joint_vref[gname].keys()):
joint_vref[gname][self.obj_ref.name] = []
joint_vref[gname][self.obj_ref.name].append((idx, g.weight))
idx += 1
return joint_vref
def get_joints_str(self):
""" Make the EGGArmature object from the bones, pass the
vertex referense to it, and return the EGG string representation
of the joints hierarchy.
"""
j_str = ''
for mod in self.obj_ref.modifiers:
if mod.type == 'ARMATURE':
ar = EGGArmature(None)
ar.make_hierarchy_from_list(mod.object.data.bones)
j_str += ar.get_full_egg_str(self.joint_vtx_ref, mod.object, -1)
return j_str
def get_full_egg_str(self):
""" Return string representation of the EGG animated object data.
"""
return self.get_vtx_pool_str() + '\n' \
+ self.get_polygons_str() + '\n' \
+ self.get_joints_str() + '\n'
class EGGAnimJoint(Group):
""" Representation of the <Joint> animation data. Has the same
hierarchy as the character's skeleton.
"""
def get_full_egg_str(self, anim_info, framerate, level = 0):
""" Create and return the string representation of the <Joint>
animation data, included all joints hierarchy.
"""
egg_str = ''
if self.object:
egg_str += '%s<Table> %s {\n' % (' ' * level, eggSafeName(self.object.name))
bone_data = anim_info['<skeleton>'][self.object.name]
egg_str += '%s <Xfm$Anim> xform {\n' % (' ' * level)
egg_str += '%s <Scalar> order { sprht }\n' % (' ' * level)
egg_str += '%s <Scalar> fps { %i }\n' % (' ' * level, framerate)
egg_str += '%s <Scalar> contents { ijkprhxyz }\n' % (' ' * level)
egg_str += '%s <V> {\n' % (' ' * level)
for i in range(len(bone_data['r'])):
egg_str += '%s %s %s %s %s %s %s %s %s %s\n' % (
' ' * level,
STRF(1.0),
STRF(1.0),
STRF(1.0),
STRF(bone_data['p'][i]),
STRF(bone_data['r'][i]),
STRF(bone_data['h'][i]),
STRF(bone_data['x'][i]),
STRF(bone_data['y'][i]),
STRF(bone_data['z'][i]))
egg_str += '%s }\n' % (' ' * level)
egg_str += '%s }\n' % (' ' * level)
for ch in self.childs:
egg_str += ch.get_full_egg_str(anim_info, framerate, level + 1)
egg_str += '%s}\n' % (' ' * level)
else:
for ch in self.childs:
egg_str += ch.get_full_egg_str(anim_info, framerate, level + 1)
return egg_str
class AnimCollector():
""" Collect an armature and a shapekeys animation data and
convert it to the EGG string.
"""
def __init__(self, obj_list, start_f, stop_f, framerate, name):
""" @param obj_list: list or tuple of the Blender's objects
for wich needed to collect animation data.
@param start_f: number of the "from" frame.
@param stop_f: number of the "to" frame.
@param framerate: framerate for the given animation.
@param name: name of the animation for access in the Panda.
"""
self.obj_list = obj_list
self.start_f = start_f
self.stop_f = stop_f
self.framerate = framerate
self.name = name
self.bone_groups = {}
for arm in bpy.data.armatures:
arm.pose_position = 'POSE'
self.obj_anim_ref = {}
for obj in obj_list:
if obj.type == 'MESH':
for mod in obj.modifiers:
if mod:
if mod.type == 'ARMATURE':
self.bone_groups[obj.name] = EGGAnimJoint(None)
self.bone_groups[obj.name].make_hierarchy_from_list(mod.object.data.bones)
if obj.name not in list(self.obj_anim_ref.keys()):
self.obj_anim_ref[obj.name] = {}
self.obj_anim_ref[obj.name]['<skeleton>'] = \
self.collect_arm_anims(mod.object)
if ((obj.data.shape_keys) and (len(obj.data.shape_keys.key_blocks) > 1)):
if obj.name not in list(self.obj_anim_ref.keys()):
self.obj_anim_ref[obj.name] = {}
self.obj_anim_ref[obj.name]['morph'] = self.collect_morph_anims(obj)
elif obj.type == 'ARMATURE':
self.bone_groups[obj.name] = EGGAnimJoint(None)
self.bone_groups[obj.name].make_hierarchy_from_list(obj.data.bones)
if obj.name not in list(self.obj_anim_ref.keys()):
self.obj_anim_ref[obj.name] = {}
self.obj_anim_ref[obj.name]['<skeleton>'] = \
self.collect_arm_anims(obj)
def collect_morph_anims(self, obj):
""" Collect an animation data for the morph target (shapekeys).
@param obj: Blender's object for wich need to collect an animation data
"""
keys = {}
if ((obj.data.shape_keys) and (len(obj.data.shape_keys.key_blocks) > 1)):
current_f = bpy.context.scene.frame_current
anim_dict = {}
for f in range(self.start_f, self.stop_f):
bpy.context.scene.frame_current = f
bpy.context.scene.frame_set(f)
for i in range(1,len(obj.data.shape_keys.key_blocks)):
key = obj.data.shape_keys.key_blocks[i]
if key.name not in list(keys.keys()):
keys[key.name] = []
keys[key.name].append(key.value)
bpy.context.scene.frame_current = current_f
return keys
def collect_arm_anims(self, arm):
""" Collect an animation data for the skeleton (Armature).
@param arm: Blender's Armature for wich need to collect an animation data
"""
current_f = bpy.context.scene.frame_current
anim_dict = {}
for f in range(self.start_f, self.stop_f):
bpy.context.scene.frame_current = f
bpy.context.scene.frame_set(f)
for bone in arm.pose.bones:
if bone.name not in list(anim_dict.keys()):
anim_dict[bone.name] = {}
for k in 'ijkabcrphxyz':
if k not in list(anim_dict[bone.name].keys()):
anim_dict[bone.name][k] = []
if bone.parent:
matrix = bone.parent.matrix.inverted() * bone.matrix
else:
matrix = arm.matrix_world * bone.matrix
p, r, h = matrix.to_euler()
anim_dict[bone.name]['p'].append(p/pi*180)
anim_dict[bone.name]['r'].append(r/pi*180)
anim_dict[bone.name]['h'].append(h/pi*180)
x, y, z = matrix.to_translation()
anim_dict[bone.name]['x'].append(x)
anim_dict[bone.name]['y'].append(y)
anim_dict[bone.name]['z'].append(z)
bpy.context.scene.frame_current = current_f
return anim_dict
def get_morph_anim_str(self, obj_name):
""" Create and return the EGG string of the morph animation for
the given object.
@param obj_name: name of the Blender's object
"""
morph_str = ''
data = self.obj_anim_ref[obj_name]
if 'morph' in list(data.keys()):
#str4f = lambda x: '%.4f' % x
morph_str += '<Table> morph {\n'
for key, anim_vals in data['morph'].items():
morph_str += ' <S$Anim> %s {\n' % eggSafeName(key)
morph_str += ' <Scalar> fps { %i }\n' % self.framerate
morph_str += ' <V> { %s }\n' % (' '.join(map(STRF, anim_vals)))
morph_str += ' }\n'
morph_str += '}\n'
return morph_str
def get_skeleton_anim_str(self, obj_name):
""" Create and return the EGG string of the Armature animation for
the given object.
@param obj_name: name of the Blender's object
"""
skel_str = ''
data = self.obj_anim_ref[obj_name]
if '<skeleton>' in list(data.keys()):
skel_str += '<Table> "<skeleton>" {\n'
for line in self.bone_groups[obj_name].get_full_egg_str(data, self.framerate, -1).splitlines():
skel_str += ' %s\n' % line
skel_str += '}\n'
return skel_str
def get_full_egg_str(self):
""" Create and return the full EGG string for the animation, wich
has been setup in the object constructor (__init__)
"""
egg_str = ''
if self.obj_anim_ref:
egg_str += '<Table> {\n'
for obj_name, obj_data in self.obj_anim_ref.items():
if self.name:
anim_name = self.name
else:
anim_name = obj_name
if SEPARATE_ANIM_FILE:
egg_str += ' <Bundle> %s {\n' % eggSafeName(obj_name)
else:
egg_str += ' <Bundle> %s {\n' % eggSafeName(anim_name)
for line in self.get_skeleton_anim_str(obj_name).splitlines():
egg_str += ' %s\n' % line
for line in self.get_morph_anim_str(obj_name).splitlines():
egg_str += ' %s\n' % line
egg_str += ' }\n'
egg_str += '}\n'
return egg_str
#-----------------------------------------------------------------------
# SCENE MATERIALS & TEXTURES
#-----------------------------------------------------------------------
def get_used_materials():
""" Collect Materials used in the selected object.
"""
m_list = []
for obj in bpy.context.selected_objects:
if obj.type == 'MESH':
for f in obj.data.faces:
if f.material_index < len(obj.data.materials):
m_list.append(obj.data.materials[f.material_index].name)
return set(m_list)
def get_egg_materials_str():
""" Return the EGG string of used materials
"""
if not bpy.context.selected_objects:
return ''
mat_str = ''
for m_idx in get_used_materials():
mat = bpy.data.materials[m_idx]
mat_str += '<Material> %s {\n' % eggSafeName(mat.name)
if TEXTURE_PROCESSOR == 'SIMPLE':
mat_str += ' <Scalar> diffr { %s }\n' % STRF(mat.diffuse_color[0] * mat.diffuse_intensity)
mat_str += ' <Scalar> diffg { %s }\n' % STRF(mat.diffuse_color[1] * mat.diffuse_intensity)
mat_str += ' <Scalar> diffb { %s }\n' % STRF(mat.diffuse_color[2] * mat.diffuse_intensity)
elif TEXTURE_PROCESSOR == 'BAKE':
mat_str += ' <Scalar> diffr { 1.0 }\n'
mat_str += ' <Scalar> diffg { 1.0 }\n'
mat_str += ' <Scalar> diffb { 1.0 }\n'
mat_str += ' <Scalar> specr { %s }\n' % STRF(mat.specular_color[0] * mat.specular_intensity)
mat_str += ' <Scalar> specg { %s }\n' % STRF(mat.specular_color[1] * mat.specular_intensity)
mat_str += ' <Scalar> specb { %s }\n' % STRF(mat.specular_color[2] * mat.specular_intensity)
mat_str += ' <Scalar> shininess { %s }\n' % (mat.specular_hardness / 512 * 128)
mat_str += ' <Scalar> emitr { %s }\n' % STRF(mat.emit * 0.1)
mat_str += ' <Scalar> emitg { %s }\n' % STRF(mat.emit * 0.1)
mat_str += ' <Scalar> emitb { %s }\n' % STRF(mat.emit * 0.1)
#file.write(' <Scalar> ambr { %s }\n' % STRF(mat.ambient))
#file.write(' <Scalar> ambg { %s }\n' % STRF(mat.ambient))
#file.write(' <Scalar> ambb { %s }\n' % STRF(mat.ambient))
mat_str += '}\n\n'
if TEXTURE_PROCESSOR == 'SIMPLE':
st = SimpleTextures(bpy.context.selected_objects,
EXPORT_UV_IMAGE_AS_TEXTURE,
COPY_TEX_FILES,
FILE_PATH, TEX_PATH)
used_textures = st.get_used_textures()
elif TEXTURE_PROCESSOR == 'BAKE':
tb = TextureBaker(bpy.context.selected_objects, FILE_PATH, TEX_PATH)
used_textures = tb.bake(BAKE_LAYERS)
print(used_textures)
#for name, path in used_textures.items():
# mat_str += '<Texture> %s {\n' % name
# mat_str += ' "' + convertFileNameToPanda(path[1]) + '"\n'
# if path[0]:
# mat_str += ' <Scalar> uv-name { %s }\n' % path[0]
# if path[2] != 'MODULATE':
# mat_str += ' <Scalar> envtype { %s }\n' % path[2]
# mat_str += '}\n\n'
for name, params in used_textures.items():
mat_str += '<Texture> %s {\n' % name
mat_str += ' "' + convertFileNameToPanda(params['path']) + '"\n'
for scalar in params['scalars']:
mat_str += (' <Scalar> %s { %s }\n' % scalar)
mat_str += '}\n\n'
return mat_str
def hierarchy_to_list(obj, list):
list.append(obj)
for ch in obj.children:
if ch not in list:
hierarchy_to_list(ch, list)
#-----------------------------------------------------------------------
# WRITE OUT
#-----------------------------------------------------------------------
def write_out(fname, anims, uv_img_as_tex, sep_anim, a_only, copy_tex,
t_path, fp_accuracy, tbs, tex_processor, b_layers):
global FILE_PATH, ANIMATIONS, EXPORT_UV_IMAGE_AS_TEXTURE, \
COPY_TEX_FILES, TEX_PATH, SEPARATE_ANIM_FILE, ANIM_ONLY, \
STRF, CALC_TBS, TEXTURE_PROCESSOR, BAKE_LAYERS
imp.reload(io_scene_egg.yabee_libs.texture_processor)
imp.reload(io_scene_egg.yabee_libs.tbn_generator)
imp.reload(io_scene_egg.yabee_libs.utils)
# === prepare to write ===
FILE_PATH = fname
ANIMATIONS = anims
EXPORT_UV_IMAGE_AS_TEXTURE = uv_img_as_tex
SEPARATE_ANIM_FILE = sep_anim
ANIM_ONLY = a_only
CALC_TBS = tbs
COPY_TEX_FILES = copy_tex
TEX_PATH = t_path
TEXTURE_PROCESSOR = tex_processor
BAKE_LAYERS = b_layers
def str_f(x):
s = '%.' + str(fp_accuracy) + 'f'
return s % x
STRF = str_f
if bpy.ops.object.mode_set.poll():
bpy.ops.object.mode_set(mode='OBJECT')
gr = Group(None)
# exclude objects, parented to the bones and his children
# objects, parented to the bones will be process later in the Armature
b_ex_list = [obj for obj in bpy.context.selected_objects if obj.parent_type == 'BONE']
exclude_list = []
for b_ex in b_ex_list:
tmp_list = []
hierarchy_to_list(b_ex, tmp_list)
exclude_list += tmp_list
exclude_list = set(exclude_list)
obj_list = [obj for obj in bpy.context.selected_objects if (obj not in exclude_list)]
# include armatures, wich bones has exported children
# and exclude if Armature in modifiers of any exported objects
for b_ex in b_ex_list:
if b_ex.parent not in obj_list:
print('INCLUDE Armature \'%s\'' % b_ex.parent.name)
obj_list.append(b_ex.parent)
for ar_in in [obj for obj in obj_list if obj.type == 'ARMATURE']:
for b_in in [obj for obj in obj_list]:
for mod in b_in.modifiers:
if mod.type == 'ARMATURE' and mod.object == ar_in:
append = False
if mod.object in obj_list:
del obj_list[obj_list.index(mod.object)]
print('EXCLUDE Armature \'%s\' since it\'s a modifier' % mod.object.name)
gr.make_hierarchy_from_list(obj_list)
#gr.print_hierarchy()
fdir, fname = os.path.split(os.path.abspath(FILE_PATH))
if not os.path.exists(fdir):
print('PATH %s not exist. Trying to make path' % fdir)
os.makedirs(fdir)
# === write egg data ===
print('WRITE main EGG to %s' % os.path.abspath(FILE_PATH))
if ((not ANIM_ONLY) or (not SEPARATE_ANIM_FILE)):
file = open(FILE_PATH, 'w')
if not ANIM_ONLY:
file.write('<CoordinateSystem> { Z-up } \n')
file.write(get_egg_materials_str())
file.write(gr.get_full_egg_str())
for a_name, frames in ANIMATIONS.items():
ac = AnimCollector(obj_list,
frames[0],
frames[1],
frames[2],
a_name)
if not SEPARATE_ANIM_FILE:
file.write(ac.get_full_egg_str())
else:
a_path = FILE_PATH
if a_path[-4:].upper() == '.EGG':
a_path = a_path[:-4] + '-' + a_name + a_path[-4:]
else:
a_path = a_path + '-' + a_name + '.egg'
a_egg_str = ac.get_full_egg_str()
if len(a_egg_str) > 0:
a_file = open(a_path, 'w')
a_file.write('<CoordinateSystem> { Z-up } \n')
a_file.write(ac.get_full_egg_str())
a_file.close()
if ((not ANIM_ONLY) or (not SEPARATE_ANIM_FILE)):
file.close()
if CALC_TBS == 'PANDA':
try:
fp = os.path.abspath(FILE_PATH)
for line in os.popen('egg-trans -tbnall -o "%s" "%s"' % (fp, fp)).readlines():
print(line)
except:
print('ERROR: Can\'t calculate TBS through panda\'s egg-trans')
| Python |
"""
Part of the YABEE
rev 1
"""
import bpy
from mathutils import *
class TBNGenerator():
def __init__(self,obj):
self.obj_ref = obj
self.triangles = None
self.uv_layers = None
def generate(self):
tris = []
fidxs = 0
for face in self.obj_ref.data.faces:
tris.append((face.vertices[0],
face.vertices[1],
face.vertices[2]))
if len(face.vertices) > 3:
tris.append((face.vertices[0],
face.vertices[3],
face.vertices[2]))
uv_layers = []
for uv_layer in self.obj_ref.data.uv_textures:
layer = []
for uv_face in uv_layer.data:
layer.append((uv_face.uv[0],
uv_face.uv[1],
uv_face.uv[2]))
if len(uv_face.uv) > 3:
layer.append((uv_face.uv[0],
uv_face.uv[3],
uv_face.uv[2]))
uv_layers.append(layer)
self.triangles = tris
self.uv_layers = uv_layers
vtx_tb = []
for vtx in self.obj_ref.data.vertices:
t_res = []
b_res = []
for tidx in range(len(self.triangles)):
tr = self.triangles[tidx]
if ((vtx.index == tr[0]) or
(vtx.index == tr[1]) or
(vtx.index == tr[2])):
tbs = self.get_triangle_basis(tidx)
for l in range(len(tbs)):
t,b = tbs[l]
if len(t_res) < l + 1:
t_res.append(t)
b_res.append(b)
else:
t_res[l] += t
b_res[l] += b
for i in range(len(t_res)):
t_res[i] /= len(t_res)
t_res[i] = self.ortogonalize(vtx.normal, t_res[i])
b_res[i] /= len(b_res)
b_res[i] = self.ortogonalize(vtx.normal, b_res[i])
vtx_tb.append((t_res,b_res))
return vtx_tb
def get_triangle_basis(self, idx):
tbs = []
triangle = self.triangles[idx]
c_mat = self.obj_ref.matrix_world.to_euler().to_matrix()
#vtx0 = self.obj_ref.data.vertices[triangle[0]].co * c_mat
#vtx1 = self.obj_ref.data.vertices[triangle[1]].co * c_mat
#vtx2 = self.obj_ref.data.vertices[triangle[2]].co * c_mat
vtx0 = c_mat * self.obj_ref.data.vertices[triangle[0]].co
vtx1 = c_mat * self.obj_ref.data.vertices[triangle[1]].co
vtx2 = c_mat * self.obj_ref.data.vertices[triangle[2]].co
v1 = vtx1 - vtx0
v2 = vtx2 - vtx0
vtxMat = (v1,v2)
for layer in self.uv_layers:
uvs = layer[idx]
#uvs = sorted(uvs, key = lambda uv: Vector(uv).length)
v1_u = uvs[1][0] - uvs[0][0]
v1_v = uvs[1][1] - uvs[0][1]
v2_u = uvs[2][0] - uvs[0][0]
v2_v = uvs[2][1] - uvs[0][1]
tmp = 1.0 / ((v1_u * v2_v) - (v2_u * v1_v))
uvMat = ((v2_v * tmp, -v1_v * tmp),
(-v2_u * tmp, v1_u * tmp))
tbMat = []
tbMat.append([])
#tbMatrix[0][0] = stMatrix[0][0] * pqMatrix[0][0] + stMatrix[0][1] * pqMatrix[1][0];
tbMat[0].append(uvMat[0][0] * vtxMat[0][0] + uvMat[0][1] * vtxMat[1][0])
#tbMatrix[0][1] = stMatrix[0][0] * pqMatrix[0][1] + stMatrix[0][1] * pqMatrix[1][1];
tbMat[0].append(uvMat[0][0] * vtxMat[0][1] + uvMat[0][1] * vtxMat[1][1])
#tbMatrix[0][2] = stMatrix[0][0] * pqMatrix[0][2] + stMatrix[0][1] * pqMatrix[1][2];
tbMat[0].append(uvMat[0][0] * vtxMat[0][2] + uvMat[0][1] * vtxMat[1][2])
tbMat.append([])
#tbMatrix[1][0] = stMatrix[1][0] * pqMatrix[0][0] + stMatrix[1][1] * pqMatrix[1][0];
tbMat[1].append(uvMat[1][0] * vtxMat[0][0] + uvMat[1][1] * vtxMat[1][0])
#tbMatrix[1][1] = stMatrix[1][0] * pqMatrix[0][1] + stMatrix[1][1] * pqMatrix[1][1];
tbMat[1].append(uvMat[1][0] * vtxMat[0][1] + uvMat[1][1] * vtxMat[1][1])
#tbMatrix[1][2] = stMatrix[1][0] * pqMatrix[0][2] + stMatrix[1][1] * pqMatrix[1][2];
tbMat[1].append(uvMat[1][0] * vtxMat[0][2] + uvMat[1][1] * vtxMat[1][2])
tangent = Vector((tbMat[0][0],
tbMat[0][1],
tbMat[0][2]))
binormal = Vector((tbMat[1][0],
tbMat[1][1],
tbMat[1][2]))
tangent.normalize()
#tangent.negate()
binormal.normalize()
#binormal.negate()
tbs.append((tangent, binormal))
return tbs
def get_closest_point(self, a, b, p):
c = p - a
v = b - a
d = v.length
v.normalize()
t = v.dot(c)
if t < 0.0:
return a
if t > d:
return b
v *= t
return a + v
def ortogonalize(self, v1, v2):
v2_proj_v1 = self.get_closest_point( v1, -v1, v2 )
res = v2 - v2_proj_v1
res.normalize()
return res
if __name__ == "__main__":
tbng = TBNGenerator(bpy.context.selected_objects[0])
print(tbng.generate())
| Python |
#!/usr/bin/env python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
from django import forms
from django.contrib.auth.models import User
class ContactForm(forms.Form):
name = forms.CharField(max_length=100,
help_text="Full Name", widget=forms.TextInput(attrs={'size':'40'}),required=False)
subject = forms.CharField(max_length=100,
help_text="Subject of your message", widget=forms.TextInput(attrs={'size':'40'}))
sender = forms.EmailField(
help_text="Your email address", widget=forms.TextInput(attrs={'size':'40'}),required=True)
message = forms.CharField(
help_text="Please enter as much text as you would like",
widget=forms.Textarea(attrs={'rows':'12','cols':'60'}))
cc_myself = forms.BooleanField(required=False,
help_text="Send yourself a copy of this message")
| Python |
from django.conf.urls.defaults import *
from views import *
urlpatterns = patterns('',
(r'^', contact),
(r'^thanks/$', static, {'template':'contact_thanks.html'}),
)
| Python |
#r!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf import settings
from django.shortcuts import render_to_response
from django.http import HttpResponseRedirect
from forms import *
#@cache_page(60*5)
def static(request, template):
return render_to_response(template, RequestContext(request,{}))
def contact(request):
if request.method == 'POST':
form = ContactForm(request.POST)
if form.is_valid():
subject = form.cleaned_data['subject']
sender = form.cleaned_data['sender']
message = 'The following feedback was submitted from %s \n\n' % sender
message += form.cleaned_data['message']
cc_myself = form.cleaned_data['cc_myself']
recipients = settings.CONTACT_EMAILS
if cc_myself:
recipients.append(sender)
from django.core.mail import send_mail
send_mail(subject, message, sender, recipients)
return HttpResponseRedirect('/contact/thanks/') # Redirect after POST
else:
form = ContactForm() # An unbound form
return render_to_response('contact.html', {
'form': form,
})
| Python |
from django.conf import settings
from django.conf.urls.defaults import *
from django.contrib import databrowse
from django.contrib import admin
admin.autodiscover()
#from shapeft.custom_admin import editor
#from registration.views import register
urlpatterns = patterns('',
(r'^_admin_/', include(admin.site.urls)),
# (r'^editor/(.*)', editor.root),
(r'^databrowse/(.*)', databrowse.site.root),
(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.STATIC_DATA}),
(r'^media/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': settings.MEDIA_ROOT}),
(r'^comments/', include('django.contrib.comments.urls')),
(r'^contact/', include('contact.urls')),
(r'^auth/', include('ft_auth.urls')),
(r'^', include('shapeft.urls')),
)
| Python |
#Please create a local_settings.py which should include, at least:
# - ADMINS
# - DEFAULT_FROM_EMAIL
# - DATABASES
# - SECRET_KEY
# - FT_DOMAIN_KEY
# - FT_DOMAIN_SECRET
# - EMAIL_HOST
# - EMAIL_HOST_USER
# - EMAIL_HOST_PASSWORD
# - EMAIL_PORT
# - EMAIL_USE_TLS
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
STATIC_DATA = os.path.join(os.path.dirname(__file__), 'static/')
SHP_UPLOAD_DIR = '/tmp/'
ADMINS = (
('Admin1', 'your_email_address'),
)
MANAGERS = ADMINS
DEFAULT_FROM_EMAIL = 'your_email_addressm'
EMAIL_MANAGERS = False
CACHE_BACKEND = 'file:///tmp/shapeft_cache'
DATABASE_NAME = 'shapeft'
DATABASES = {
'default': {
'NAME': DATABASE_NAME,
'ENGINE': 'django.contrib.gis.db.backends.postgis',
'USER': 'postgres',
'PASSWORD': 'foo'
}
}
TIME_ZONE = 'America/Vancouver'
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
USE_I18N = False
MEDIA_ROOT = os.path.join(os.path.dirname(__file__), 'media/')
MEDIA_URL = ''
ADMIN_MEDIA_PREFIX = '/admin_media/'
SECRET_KEY = 'store-in-local-settings'
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
)
from django.conf.global_settings import TEMPLATE_CONTEXT_PROCESSORS
TEMPLATE_CONTEXT_PROCESSORS += (
'django.core.context_processors.request',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.csrf.middleware.CsrfViewMiddleware',
'django.contrib.csrf.middleware.CsrfResponseMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'django.contrib.databrowse',
'django.contrib.gis',
'django.contrib.humanize',
'django.contrib.webdesign',
'shapeft',
'shapes',
'contact',
'ft_auth',
)
FT_DOMAIN_KEY = 'shpescape.com'
FT_DOMAIN_SECRET = 'foo'
try:
from local_settings import *
except ImportError, exp:
pass
| Python |
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.db import models
from django.contrib.gis.geos import Point
class OAuthRequestToken(models.Model):
"""OAuth Request Token."""
session_key = models.CharField(max_length=250)
ft_token = models.CharField(max_length=250)
ft_token_secret = models.CharField(max_length=250)
created = models.DateTimeField(auto_now_add=True)
def is_complete(self):
if self.ft_token and self.md_token:
return True
class OAuthAccessToken(models.Model):
"""OAuth Access Token."""
session_key = models.CharField(max_length=250)
ft_token = models.CharField(max_length=250)
ft_token_secret = models.CharField(max_length=250)
created = models.DateTimeField(auto_now_add=True)
def is_complete(self):
if self.ft_token and self.md_token:
return True
| Python |
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls.defaults import *
from views import FTVerify, FTAuthenticate
urlpatterns = patterns('',
(r'^FTVerify/$', FTVerify),
(r'^FTAuthenticate/$', FTAuthenticate),
)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
from datetime import datetime
from django.conf import settings
from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden, Http404
from django.template import RequestContext
from django.conf import settings
from ftlibrary.authorization.oauth import OAuth
from ftlibrary.ftclient import OAuthFTClient
from ftlibrary.sql.sqlbuilder import SQL
import ftlibrary.oauth2 as oauth # httplib2 is required for this to work on AppEngine
from models import *
FT_OAUTH = {
'key': settings.FT_DOMAIN,
'secret': settings.FT_DOMAIN_SECRET,
'domain': settings.FT_DOMAIN
}
def get_token(request):
"""
see if we have an oauth token for them, based on their ip and session key
"""
try:
ft_session = request.session['ft_token']
token = OAuthAccessToken.objects.get(session_key=ft_session)
# invalidate any token > 24 hours old
now = datetime.now()
diff = now - token.created
if diff.days:
token.delete()
return False
# TODO check ip address matches
#oauthorize
return token
except KeyError:
print 'no session token..'
except OAuthAccessToken.DoesNotExist:
print 'no access token ...'
return False
def create_session_key(request):
ip = request.META['REMOTE_ADDR']
skey = ip + str(random.random())
return skey.replace('.','')
def FTVerify(request):
ft_session = create_session_key(request)
callback_url = 'http://' + request.META['HTTP_HOST'] + '/auth/FTAuthenticate'
url,token,secret = OAuth().generateAuthorizationURL(
consumer_key=FT_OAUTH['key'],
consumer_secret=FT_OAUTH['secret'],
domain=FT_OAUTH['domain'],
callback_url=callback_url)
#save the new token
request_token = OAuthRequestToken(
ft_token=token,
ft_token_secret=secret,
session_key=ft_session)
request_token.save()
#save session key
request.session['ft_token'] = ft_session
return HttpResponseRedirect(url)
def FTAuthenticate(request):
#get the old token and secret
try:
ft_session = request.session['ft_token']
except KeyError:
raise Exception('should not get here ... no session key')
HttpResponseRedirect('/FTVerify')
request_token = OAuthRequestToken.objects.filter(session_key=ft_session)
if not request_token:
raise Exception('should not get here ... no token key')
HttpResponseRedirect('/FTVerify')
token = request_token[0].ft_token
secret = request_token[0].ft_token_secret
#retrieve the access token and secret, these will be used in future requests
#so save them in the database for the user
access_token, access_secret = OAuth().authorize(
consumer_key=FT_OAUTH['key'],
consumer_secret=FT_OAUTH['secret'],
oauth_token=token,
oauth_token_secret=secret)
oauth_token = OAuthAccessToken(
ft_token=access_token,
ft_token_secret=access_secret,
session_key=ft_session)
oauth_token.save()
return HttpResponseRedirect('/upload')
| Python |
import random
import md5
from django.db import models
from ft_auth.models import OAuthAccessToken
STATUS_CODES = {
1 : 'In Queue (%s ahead of you)',
2 : 'Initial Processing',
3 : 'Importing into Fusion Tables',
4 : 'Complete',
6 : 'Error'
}
class shapeUpload(models.Model):
"""An upload -- includes location of initial shape, processing status, etc"""
auth_token = models.ForeignKey(OAuthAccessToken)
uid = models.CharField(max_length=250)
shapefile = models.CharField(max_length=250)
status = models.IntegerField()
status_msg = models.CharField(max_length=250,null=True)
total_rows = models.IntegerField(null=True)
rows_processed = models.IntegerField(null=True)
rows_imported = models.IntegerField(null=True)
ft_table_id = models.IntegerField(null=True)
uploaded = models.DateTimeField(auto_now_add=True)
create_simplify = models.BooleanField(default=True)
create_centroid = models.BooleanField(default=True)
create_centroid_poly = models.BooleanField(default=False)
def get_title(self):
return self.shapefile.split('/')[-1]
def get_status(self):
status = STATUS_CODES[self.status]
if self.status == 1:
queue_length = shapeUpload.objects.filter(status=1).count()
status = status % (queue_length - 1)
return status
def save(self):
salt = 'shapebar'
if not self.id:
super(shapeUpload, self).save()
hash = md5.new(salt + str(self.id))
self.uid = hash.hexdigest()
super(shapeUpload, self).save()
| Python |
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.conf.urls.defaults import *
from views import *
urlpatterns = patterns('',
(r'^upload/$', generic_import),
url(r'^uploads/(?P<upload_ids>[a-gA-F\d]*)/$', upload_detail, name="upload_detail"),
(r'^$', static, {'template':'index.html'}),
)
| Python |
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import random
import time
from django.conf import settings
from django.shortcuts import render_to_response, get_object_or_404
from django.template import RequestContext
from django.http import HttpResponse, HttpResponseRedirect, HttpResponseForbidden, Http404
from django.contrib.gis.geos import fromstr, LineString
from django.contrib.gis.models import SpatialRefSys
from django.contrib.gis.gdal import DataSource, OGRGeometry
from django.utils.datastructures import SortedDict
import simplejson
from shapes.forms import UploadForm
from ft_auth.views import *
from shapeft.models import shapeUpload
#@cache_page(60*5)
def static(request, template):
if not template:
template = "index.html"
return render_to_response(template, RequestContext(request,{}))
def generic_import(request):
"""
accept an uploaded file and create associated shapeUpload obj
"""
token = get_token(request)
if not token:
return HttpResponseRedirect('/auth/FTVerify')
if request.method == 'POST':
form = UploadForm(request.POST, request.FILES)
if form.is_valid():
form.handle(request.FILES['file_obj'])
create_simplify = request.POST.get('create_simplify', False);
create_centroid = request.POST.get('create_centroid', False);
create_centroid_poly = request.POST.get('create_centroid_poly', False);
#save form info in a model, and run from cron
uids = []
for shapefile in form.shapefiles:
upload = shapeUpload()
upload.auth_token = token
upload.shapefile = shapefile
upload.status = 1
upload.save()
upload.create_simplify = bool(create_simplify)
upload.create_centroid = bool(create_centroid)
upload.create_centroid_poly = bool(create_centroid_poly)
uids.append(upload.uid)
url = '/uploads/%s/' % 'g'.join(uids)
return HttpResponseRedirect(url)
else:
form = UploadForm()
return render_to_response('upload.html', RequestContext(request,{
'form': form}))
def upload_detail(request, upload_ids):
"""
display status of one or more shapeUploads
"""
uids = upload_ids.split('g')
uploads = shapeUpload.objects.filter(uid__in=uids).order_by('id')
#upload = get_object_or_404(shapeUpload, id=upload_id)
return render_to_response('upload_detail.html', RequestContext(request,{
'uploads': uploads}))
def import_from_shape(upload,
start_row=0,
max_rows=200000,
create_int_style_cols=True):
"""
a shapeUpload object
max_rows - any more than this is ignored
centroid - if it's a (multi)polygon, should we also create a geometry_centroid field
"""
upload.status = 2 #set this right away so it doesn't get reprocessed
upload.save()
ds = DataSource(upload.shapefile)
layer = ds[0]
fields = layer.fields
num_features = len(layer)
#set max # of _style features
max_distinct_style_vals = max(min(num_features / 100, 50),10)
print 'there are %d features' % num_features
upload.total_rows = num_features
if not num_features:
print 'no rows, returning'
upload.status = 6
upload.save()
return
rows = []
#get field types
field_map = {
'OFTString':'STRING',
'OFTReal':'NUMBER',
'OFTInteger':'NUMBER',
'OFTDate':'DATETIME'
}
field_types = [field_map[f.__name__] for f in layer.field_types]
field_layers = layer.fields
#insert geometry layers first
field_layers.insert(0,'geometry')
field_types.insert(0,'LOCATION')
field_layers.insert(1,'geometry_vertex_count')
field_types.insert(1,'NUMBER')
if upload.create_simplify:
field_layers.insert(0,'geometry_simplified')
field_types.insert(0,'LOCATION')
field_layers.insert(1,'geometry_simplified_vertex_count')
field_types.insert(1,'NUMBER')
#use sorted dict so we can ensure table has geom columns upfront
field_dict = SortedDict(zip(field_layers, field_types))
#set up extra fields if creating int/style cols
if create_int_style_cols:
int_style_dict = {}
for field,field_type in field_dict.items():
if field_type == 'STRING':
field_dict[field + '_ft_style'] = 'NUMBER'
int_style_dict[field] = {}
print field_dict
#add some custom import fields
field_dict['import_notes'] = 'STRING'
print 'FIELD DICT', field_dict
print 'starting to process'
for i, feat in enumerate(layer):
if i > max_rows:
continue
if start_row and i < start_row:
continue
upload.rows_processed = i + 1
if not i % ((num_features / 50) or 5):
print upload.rows_processed,'rp'
upload.save()
upload.save()
rd = {}
#geom = fromstr(feat.geom.wkt,srid=srid)
if layer.srs:
try:
geom = OGRGeometry(feat.geom.wkt, layer.srs.proj4)
geom.transform(4326)
except Exception, e:
print 'FAIL GEOM'
print e,
geom = None
else:
geom = OGRGeometry(feat.geom.wkt)
if geom:
geom = fromstr(geom.wkt)
#create optional centroid for polys
if upload.create_centroid and 'oly' in geom.geom_type:
field_dict['geometry_pos'] = 'LOCATION'
rd['geometry_pos'] = geom.point_on_surface.kml
if upload.create_centroid_poly and 'oly' in geom.geom_type:
field_dict['geometry_pos_poly_2'] = 'LOCATION'
field_dict['geometry_pos_poly_3'] = 'LOCATION'
rd['geometry_pos_poly_2'] = geom.point_on_surface.buffer(.0001,10).kml
rd['geometry_pos_poly_3'] = geom.point_on_surface.buffer(.0005,10).kml
#if it's > 1M characters, we need to simplify it for FT
simplify_tolerance = .0001
while len(geom.kml) > 1000000:
geom = geom.simplify(simplify_tolerance)
print 'simplified to %f' % simplify_tolerance
rd['import_notes'] = 'simplified to %d DD' % simplify_tolerance
simplify_tolerance = simplify_tolerance * 1.5
if not geom.valid:
rd['import_notes'] = '<br>Geometry not valid'
kml = geom.kml
rd['geometry'] = kml
rd['geometry_vertex_count'] = geom.num_coords
if upload.create_simplify and not 'oint' in geom.geom_type:
amt = .002
if 'oly' in geom.geom_type:
buffer_geom = geom.buffer(amt)
buffer_geom = buffer_geom.buffer(amt * -1)
simple_geom = buffer_geom.simplify(amt)
else:
simple_geom = geom.simplify(amt)
rd['geometry_simplified'] = simple_geom.kml
rd['geometry_simplified_vertex_count'] = simple_geom.num_coords
for f in fields:
val = feat.get(f)
#make sure we have proper null type for diff fields
if val == '<Null>':
continue
if not val:
continue
if field_dict[f] == 'DATETIME':
val = val.isoformat().split('T')[0]
if field_dict[f] == 'STRING' \
and create_int_style_cols \
and field_dict.has_key(f + '_ft_style'):
#check to see if we have a number for this yet
try:
rd[f + '_ft_style'] = int_style_dict[f][val]
except:
int_style_dict[f][val] = len(int_style_dict[f])
rd[f + '_ft_style'] = int_style_dict[f][val]
#however if we have too many distinct vals, let's just not do this anymore
if len(int_style_dict[f]) > max_distinct_style_vals:
print 'DELETING FD %s' % f
del field_dict[f + '_ft_style']
del rd[f + '_ft_style']
#sucks, but now we should just remove all these fields from previous rows
for srow in rows:
try:del srow[f + '_ft_style']
except:
pass #probably this was a null value?
rd[f] = val
rows.append(rd)
#let's process 10k rows at a time.. not keep everything in memory
if len(rows) > 10000:
uploadRows(upload, field_dict, rows)
rows = []
uploadRows(upload, field_dict, rows)
def uploadRows(upload, field_dict, rows):
if not upload.ft_table_id:
upload = createTable(upload, field_dict)
upload.status = 3
upload.save()
print 'inserting %d rows' % len(rows)
insertData(upload, field_dict, rows)
upload.status = 4
upload.save()
def insertSql(client, sql, attempt_no=0):
try:resp = client.query(sql)
except:
print 'unable to query sql %s' % sql
resp = client.query(sql)
print resp[:50]
if 'Unable' in resp:
if attempt_no > 3:
return 'Error - failed after 3 attempts' + resp
#print sql
print resp
time.sleep(1)
print 'len: %d, attempt: %d' % (len(sql), attempt_no)
insertSql(client, sql, attempt_no + 1)
return resp
def getClient(upload):
ftClient = OAuthFTClient(
FT_OAUTH['key'],
FT_OAUTH['secret'],
upload.auth_token.ft_token,
upload.auth_token.ft_token_secret)
print 'client created'
return ftClient
def createTable(upload, field_dict):
ftClient = getClient(upload)
table_dictionary = {upload.get_title() : field_dict}
results = ftClient.query(SQL().createTable(table_dictionary))
table_id = results.split("\n")[1]
print 'new table: %s' % results
upload.ft_table_id = table_id
upload.save()
return upload
def insertData(upload, field_dict, rows):
ftClient = getClient(upload)
#insert rows
sql = []
sql_len = 0
for i, row in enumerate(rows):
upload.rows_imported = i + 1
if sql_len > 500000 or len(sql) > 100: # max upload is 1MB?
insertSql(ftClient, ';'.join(sql))
sql = []
sql_len = 0
upload.save()
try:
insert_statement = SQL().insert(upload.ft_table_id, row)
except Exception, e:
print 'FAIL SQL', row
print e
continue
sql.append(insert_statement)
sql_len += len( insert_statement)
insertSql(ftClient, ';'.join(sql))
upload.save()
| Python |
#!/usr/bin/env python
#
# Copyright 2010 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import math
import sys
import os
import time
import urllib
if len(sys.argv) < 3:
print "Usage: %s <path to settings module> <settings module name>" % sys.argv[0]
sys.exit()
class KeyboardException: pass
sys.path = [sys.argv[1]] + sys.path
os.environ['DJANGO_SETTINGS_MODULE'] = sys.argv[2]
from shapeft.models import *
from shapeft.views import *
def run():
while True:
uploads = shapeUpload.objects.filter(status=1)
for upload in uploads:
print 'working oni %d: %s' % (upload.id, upload.shapefile)
try:
import_from_shape(upload)
print "Finished with %s" % upload.shapefile
except Exception, E:
print "Error occurred (%s)" % E
upload.status = 6
upload.status_msg = str(E)
upload.save()
time.sleep(8)
if __name__ == "__main__":
run()
| Python |
# Pseudo-Python rendition of the code for ``get_next_access_unit()``.
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the MPEG TS, PS and ES tools.
#
# The Initial Developer of the Original Code is Amino Communications Ltd.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Amino Communications Ltd, Swavesey, Cambridge UK
#
# ***** END LICENSE BLOCK *****
def get_next_access_unit(context):
"""Retrieve the next access unit from the file described by `context`.
"""
access_unit = build_access_unit()
if context.pending_nal: # i.e., we already had a NAL to start this unit
access_unit.append(context.pending_nal,TRUE,context.pending_list)
context.pending_nal = NULL
context.pending_list.reset(FALSE)
while 1:
try:
nal = context.find_next_NAL_unit()
except EOF:
context.no_more_data = TRUE; # prevent future reads on this stream
break
except BrokenNALUnit:
WARNING("!!! Ignoring broken NAL unit\n")
access_unit.ignored_broken_NAL_units += 1
continue
if nal.is_slice():
if not access_unit.started_primary_picture:
# We're in a new access unit, but we haven't had a slice
# yet, so we can be lazy and assume that this must be the
# first slice
nal.start_reason = "First slice of new access unit"
access_unit.append(nal,TRUE,context.pending_list)
context.pending_list.reset(FALSE)
context.remember_earlier_primary_start(nal)
elif nal.is_first_VCL_NAL(context.earlier_primary_start):
# Regardless of what we determine next, we need to remember
# that the NAL started (what may later be the previous) access
# unit
context.remember_earlier_primary_start(nal)
if access_unit.started_primary_picture:
# We were already in an access unit with a primary
# picture, so this NAL unit must start a new access unit.
# Remember it for next time, and return the access unit so
# far.
context.pending_nal = nal
break; # Ready to return the access unit
else:
# This access unit was waiting for its primary picture
access_unit.append(nal,TRUE,context.pending_list)
context.pending_list.reset(FALSE)
elif not access_unit.started_primary_picture:
# But this is not a NAL unit that may start a new
# access unit. So what should we do? Ignore it?
if not quiet:
WARNING("!!! Ignoring VCL NAL that cannot start a new"
" primary picture: "
nal.report(stderr)
elif nal_is_redundant(nal):
# printf(" ignoring redundant NAL unit\n")
pass
else:
# We're part of the same access unit, but not special
access_unit.append(nal,FALSE,context.pending_list)
context.pending_list.reset(FALSE)
elif nal.nal_unit_type == NAL_ACCESS_UNIT_DELIM:
# An access unit delimiter always starts a new access unit
if access_unit.started_primary_picture:
context.pending_list.append(nal)
break # Ready to return the "previous" access unit
else:
# The current access unit doesn't yet have any VCL NALs
if context.pending_list.length > 0:
WARNING("!!! Ignoring items after last VCL NAL and"
" before Access Unit Delimiter\n")
context.pending_list.report(stderr," ",NULL,)
context.pending_list.reset(TRUE)
if access_unit.nal_units.length > 0:
WARNING("!!! Ignoring incomplete access unit\n")
access_unit.nal_units.report(stderr," ",NULL,)
access_unit.nal_units.reset(TRUE)
access_unit.append(nal,FALSE,NULL)
elif nal.nal_unit_type == NAL_SEI:
# SEI units always precede the primary coded picture
# - so they also implicitly end any access unit that has already
# started its primary picture
if access_unit.started_primary_picture:
context.pending_list.append(nal)
break # Ready to return the "previous" access unit
else:
context.pending_list.append(nal)
elif nal.nal_unit_type in [NAL_SEQ_PARAM_SET, NAL_PIC_PARAM_SET,
13, 14, 15, 16, 17, 18]:
# These start a new access unit *if* they come after the last VCL
# NAL of an access unit. But we can only *tell* that they are
# after the last VCL NAL of an access unit when we start the next
# access unit - so we need to hold them in hand until we know that
# we need them. (i.e., they'll get added to an access unit just
# before the next "more determined" NAL unit we add to an access
# unit)
context.pending_list.append(nal)
elif nal.nal_unit_type == NAL_END_OF_SEQ:
if context.pending_list.length > 0:
WARNING("!!! Ignoring items after last VCL NAL and"
" before End of Sequence\n")
context.pending_list.report(stderr," ",NULL,)
context.pending_list.reset(TRUE)
# And remember this as the End of Sequence marker
context.end_of_sequence = nal
break
elif nal.nal_unit_type == NAL_END_OF_STREAM:
# And remember this as the End of Stream marker
context.end_of_stream = nal
# Which means there's no point in reading more from this stream
# (setting no_more_data like this means that *next* time this
# function is called, it will return EOF)
context.no_more_data = TRUE
# And we're done
break
else:
# It's not a slice, or an access unit delimiter, or an
# end of sequence or stream, or a sequence or picture
# parameter set, or various other odds and ends, so it
# looks like we can ignore it.
pass
# Check for an immediate "end of file with no data"
# - i.e., we read EOF or end of stream, and there was nothing
# between the last access unit and such reading
if context.no_more_data and access_unit.nal_units.length == 0:
raise EOF
# Otherwise, finish off and return the access unit we have in hand
access_unit.end(context,show_details)
# Remember to count it
context.access_unit_index += 1
return access_unit
| Python |
#! /usr/bin/env python
"""Build HTML from the reStructuredText files in this directory.
This is a script just so I don't have to remember the particular incantation
required. It's not in the Makefile because I'm not yet sure it belongs there...
Requires Python and docutils.
Uses rst2html.py on individual files because that seems to be available
more often than the buildhtml.py script.
"""
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the MPEG TS, PS and ES tools.
#
# The Initial Developer of the Original Code is Amino Communications Ltd.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Amino Communications Ltd, Swavesey, Cambridge UK
#
# ***** END LICENSE BLOCK *****
import os
def main():
filenames = os.listdir('.')
for name in filenames:
base,ext = os.path.splitext(name)
if ext == '.txt':
print 'Processing',name
os.system('rst2html --stylesheet-path=default.css'
' --embed-stylesheet %s > %s'%(name,base+'.html'))
if __name__ == "__main__":
main()
| Python |
#! /usr/bin/env python
"""Run the doctest on a text file
Usage: doctext.py [file]
[file] defaults to ``test.txt``
"""
import sys
import doctest
def main():
args = sys.argv[1:]
filename = None
verbose = False
for word in args:
if word in ("-v", "-verbose"):
verbose = True
elif word in ("-h", "-help", "/?", "/help", "--help"):
print __doc__
return
else:
if filename:
print "Filename '%s' already specified"%filename
return
else:
filename = word
if not filename:
filename = "test.txt"
print
print 'Ignore any output lines starting ### or !!!. These are written by the'
print 'underlying C library, and are not "seen" (or hidden) by doctest.'
print
# I want to be able to use the "with" statement in the doctests.
# It's not possible to use "from __future__ import with_statement"
# in doctests as such. Instead, one has to add the resulting globals
# to the doctest context. Which seems to be done as follows:
import __future__
extraglobs={'with_statement':__future__.with_statement}
(failures,tests) = doctest.testfile(filename,verbose=verbose,
extraglobs=extraglobs)
testword = "test"
if tests != 1: testword = "tests"
failword = "failure"
if failures != 1: failword = "failures"
print
print "File %s: %d %s, %d %s"%(filename,tests,testword,failures,failword)
print
if failures == 0:
print 'The little light is GREEN'
else:
print 'The little light is RED'
if __name__ == "__main__":
main()
| Python |
"""Setup.py -- for building tstools Pyrex modules
"""
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the MPEG TS, PS and ES tools.
#
# The Initial Developer of the Original Code is Amino Communications Ltd.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Tibs (tibs@berlios.de)
#
# ***** END LICENSE BLOCK *****
from distutils.core import setup
from Pyrex.Distutils.extension import Extension
from Pyrex.Distutils import build_ext
tstools = Extension("tstools/tstools",
['tstools/tstools.pyx'],
include_dirs=['..'],
library_dirs=['../lib'],
libraries=['tstools'],
)
setup(
name = 'tstools',
ext_modules=[tstools],
cmdclass = {'build_ext': build_ext}
)
| Python |
"""tstools -- a package of Pyrex bindings for the tstools
This is being developed on a Mac, running OS X, and also tested on my Ubuntu
system at work.
I do not expect it to build (as it stands) on Windows, as it is making
assumptions that may not follow thereon.
It is my intent to worry about Windows after it works on the platforms that
I can test most easily!
"""
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the MPEG TS, PS and ES tools.
#
# The Initial Developer of the Original Code is Amino Communications Ltd.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Tibs (tibs@berlios.de)
#
# ***** END LICENSE BLOCK *****
# The following also makes available the "sys" and "array" modules as
# imported to tsools, so is probably not the best way to do it. Ho hum.
from tstools import *
| Python |
#! /usr/bin/env python
"""sockread.py -- a simple client to read from a socket
"""
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the MPEG TS, PS and ES tools.
#
# The Initial Developer of the Original Code is Amino Communications Ltd.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Amino Communications Ltd, Swavesey, Cambridge UK
#
# ***** END LICENSE BLOCK *****
import sys
import socket
class DoneException(Exception):
pass
def get_packet(sock,packet_size=188):
"""Read a packet from the socket, coping with partial reads.
"""
data = ""
total = 0
while total < packet_size:
data += sock.recv(packet_size - total)
if len(data) == 0:
raise DoneException
total += len(data)
return data
def read_next_packet(sock,f=None):
"""Read the next packet from the socket, checking and counting it.
"""
data = get_packet(sock)
if ord(data[0]) == 0x47 and len(data) == 188:
sys.stdout.write(".")
else:
sys.stdout.write("[%x]/%d"%(ord(data[0]),len(data)))
sys.stdout.flush()
if f:
f.write(data)
def main():
total_packets = 0
sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print "Waiting on port 8889"
sock.bind(("localhost",8889))
sock.listen(1)
conn, addr = sock.accept()
print 'Connected by', addr
#print "Writing to file temp.ts"
#stream = file("temp.ts","wb")
stream = None
try:
while 1:
read_next_packet(conn,stream)
total_packets += 1
except DoneException:
#stream.close()
pass
sys.stdout.write("\n")
sys.stdout.write("Total packets: %d\n"%total_packets)
sock.close()
if __name__ == "__main__":
# try:
main()
# except KeyboardInterrupt:
# print
| Python |
#! /usr/bin/env python
"""socktest.py -- a simple client to talk to tsserve
Command line - optionally:
-host <host> defaults to "localhost"
-port <port> defaults to 8889
-output <filename> defaults to None
-file <filename> the same
-nonblock the socket should operate in non-blocking mode
followed by zero or more commands, specified as:
<letter> <count> for n, F, f, r or R
or <letter> for any of the above, and also q, p, > and <, or 0 .. 9
If a <count> is given, it is the number of data packets to try to read before
issuing the next command.
The commands "n", "f", "F", r" and "R", and the "select channel and play" commands
"0" .. "9" may be given a count, in which case the command is given and then that
many data packets are read before the next command is given. If no count is given,
then data packets are read "forever".
The commands "p", ">" and "<" do not take a count, and do not read any data
packets.
The command "q" does not take a count, but reads the rest of the data packets.
If no commands are given, the default is "n" with no count.
"""
# ***** BEGIN LICENSE BLOCK *****
# Version: MPL 1.1
#
# The contents of this file are subject to the Mozilla Public License Version
# 1.1 (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
# http://www.mozilla.org/MPL/
#
# Software distributed under the License is distributed on an "AS IS" basis,
# WITHOUT WARRANTY OF ANY KIND, either express or implied. See the License
# for the specific language governing rights and limitations under the
# License.
#
# The Original Code is the MPEG TS, PS and ES tools.
#
# The Initial Developer of the Original Code is Amino Communications Ltd.
# Portions created by the Initial Developer are Copyright (C) 2008
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Amino Communications Ltd, Swavesey, Cambridge UK
#
# ***** END LICENSE BLOCK *****
import sys
import socket
class DoneException(Exception):
pass
global total_packets
def get_packet(sock,packet_size=188):
"""Read a packet from the socket, coping with partial reads.
"""
data = ""
total = 0
while total < packet_size:
data += sock.recv(packet_size - total)
if len(data) == 0:
raise DoneException
total += len(data)
return data
def read_next_packet(sock,file=None):
"""Read the next packet from the socket, checking and counting it.
"""
data = get_packet(sock)
if ord(data[0]) == 0x47 and len(data) == 188:
sys.stdout.write(".")
else:
sys.stdout.write("[%x]/%d"%(ord(data[0]),len(data)))
sys.stdout.flush()
global total_packets
total_packets += 1
if file:
file.write(data)
def give_command(sock,command="n",file=None,howmany=None):
"""Give the command specified, and then read data packets.
If `howmany` is specified, try to read that many packets (and return
thereafter), otherwise, just keep trying to read.
Raises DoneException if there is no more data to read.
"""
if howmany is None:
print "Sending command '%s' and listening"%command
else:
print "Sending command '%s' and listening for %d packets"%(command,
howmany)
sock.send(command)
if howmany is None:
while 1:
read_next_packet(sock,file)
else:
try:
for count in range(howmany):
read_next_packet(sock,file)
except DoneException:
sys.stdout.write("\n")
sys.stdout.write("Finished listening after %d packets"%count)
raise DoneException
except socket.error, val:
print "socket.error:",val
raise DoneException
print
def main():
global total_packets
total_packets = 0
host = "localhost"
port = 8889
stream = filename = None
nonblock = 0
argv = sys.argv[1:]
if len(argv) == 0:
print __doc__
return
while len(argv) > 0 and argv[0].startswith("-"):
if argv[0] in ("-h", "-help", "--help"):
print __doc__
return
elif argv[0] == "-host":
host = argv[1]
argv = argv[2:]
elif argv[0] == "-port":
port = int(argv[1])
argv = argv[2:]
elif argv[0] in ("-file", "-output"):
filename = argv[1]
argv = argv[2:]
elif argv[0] in ("-nonblock"):
nonblock = 1
argv = argv[1:]
else:
print "Unexpected switch",argv[0]
return
commands = []
if len(argv) == 0:
print "No commands specified - assuming 'n'ormal play"
commands = [("n",None)]
command = None
count = 0
for word in argv:
if command: # we have a command waiting for a count
try:
count = int(word)
except:
print "'%s' does not work as a count for command '%s'"%(word,command)
return
commands.append((command,count))
command = None
elif word in ("p", ">", "<"): # commands that don't take a count
commands.append((word,0))
command = None
elif word in ("q"): # commands that read the rest of input
commands.append((word,None))
command = None
elif word in ("n","F","f","r","R",
"0", "1", "2", "3", "4", "5", "6", "7", "8", "9"): # commands that do take a count
command = word
else:
print "Unrecognised command '%s'"%word
if command:
commands.append((command,None))
print "Commands:", commands
sock = socket.socket(socket.AF_INET,socket.SOCK_STREAM)
print "Connecting to %s on port %d"%(host,port)
sock.connect((host,port))
if filename:
print "Writing output to file %s"%filename
stream = file(filename,"wb")
if nonblock:
sock.setblocking(0)
try:
for command,count in commands:
give_command(sock,command=command,file=stream,howmany=count)
except (KeyboardInterrupt, DoneException):
if stream:
stream.close()
sys.stdout.write("\n")
sys.stdout.write("Total packets: %d\n"%total_packets)
sock.close()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
print
| Python |
"""
>>> from gmapi import maps
# Test Map creation.
>>> m = maps.Map()
>>> m
{'arg': ['div'], 'cls': 'Map'}
# Test setting and getting the map center.
>>> m.setCenter(maps.LatLng(38, -97))
>>> m.getCenter()
{'arg': [38, -97], 'cls': 'LatLng'}
# Test setting the map type.
>>> m.setMapTypeId(maps.MapTypeId.ROADMAP)
>>> m.getMapTypeId()
{'val': 'MapTypeId.ROADMAP'}
# Test setting and getting the zoom.
>>> m.setZoom(3)
>>> m.getZoom()
3
# Test LatLngBounds creation.
>>> b = maps.LatLngBounds(maps.LatLng(18, -119), maps.LatLng(53, -74))
>>> b
{'arg': [{'arg': [18, -119], 'cls': 'LatLng'}, {'arg': [53, -74], 'cls': 'LatLng'}], 'cls': 'LatLngBounds'}
# Test setting multiple options at once.
>>> m.setOptions({'center': maps.LatLng(0, 0), 'zoom': 4, 'mapTypeId': maps.MapTypeId.SATELLITE})
>>> m
{'arg': ['div', {'mapTypeId': {'val': 'MapTypeId.SATELLITE'}, 'center': {'arg': [0, 0], 'cls': 'LatLng'}, 'zoom': 4}], 'cls': 'Map'}
# Test creating a marker.
>>> k = maps.Marker()
>>> k.setPosition(maps.LatLng(38, -97))
>>> k.setMap(m)
>>> k
{'arg': [{'position': {'arg': [38, -97], 'cls': 'LatLng'}}], 'cls': 'Marker'}
# Make sure the marker was added to the map.
>>> m
{'arg': ['div', {'mapTypeId': {'val': 'MapTypeId.SATELLITE'}, 'center': {'arg': [0, 0], 'cls': 'LatLng'}, 'zoom': 4}], 'mkr': [{'arg': [{'position': {'arg': [38, -97], 'cls': 'LatLng'}}], 'cls': 'Marker'}], 'cls': 'Map'}
"""
| Python |
from django.utils.http import urlquote_plus
def urlencode(query, doseq=0, safe=''):
"""Custom urlencode that leaves static map delimiters ("|", ",", ":") alone.
Based on Django's unicode-safe version of urllib.quote_plus.
"""
safe = safe + '|,:'
if hasattr(query, 'items'):
query = query.items()
return '&'.join([urlquote_plus(k, safe) + '=' + urlquote_plus(v, safe)
for k, s in query
for v in ((isinstance(s, basestring) and [s])
or (doseq and hasattr(s, '__len__') and s)
or [s])])
| Python |
"""URL pattern for serving static media. Use only to DEBUG!
Add something like the following to the bottom of your urls.py:
from django.conf import settings
if settings.DEBUG:
urlpatterns = patterns('',
(r'', include('gmapi.urls.media')),
) + urlpatterns
"""
from os import path
from django.conf import settings
from django.conf.urls.defaults import *
from urlparse import urljoin
# Same rules apply as regular MEDIA_ROOT.
MEDIA_ROOT = getattr(settings, 'GMAPI_MEDIA_ROOT',
path.abspath(path.join(path.dirname(
path.dirname(__file__)), 'media', 'gmapi')))
# Same rules apply as ADMIN_MEDIA_PREFIX.
# Omit leading slash to make relative to MEDIA_URL.
MEDIA_PREFIX = getattr(settings, 'GMAPI_MEDIA_PREFIX', 'gmapi/')
if MEDIA_PREFIX.startswith('http://') or MEDIA_PREFIX.startswith('https://'):
urlpatterns = []
else:
urlpatterns = patterns('',
(r'^%s(?P<path>.*)$' %
urljoin(settings.MEDIA_URL, MEDIA_PREFIX).lstrip('/'),
'django.views.static.serve', {'document_root': MEDIA_ROOT}),
)
| Python |
"""Implements the Google Maps API v3."""
import time
import urllib
from django.conf import settings
from django.core.cache import cache
from django.utils.encoding import force_unicode, smart_str
from django.utils.simplejson import loads
from gmapi.utils.http import urlencode
STATIC_URL = getattr(settings, 'GMAPI_STATIC_URL',
'http://maps.google.com/maps/api/staticmap')
ELEVATION_URL = getattr(settings, 'GMAPI_ELEVATION_URL',
'http://maps.googleapis.com/maps/api/elevation')
GEOCODE_URL = getattr(settings, 'GMAPI_GEOCODE_URL',
'http://maps.google.com/maps/api/geocode')
CHART_URL = getattr(settings, 'GMAPI_CHART_URL',
'http://chart.apis.google.com/chart')
class MapClass(dict):
"""A base class for Google Maps API classes."""
_getopts = {}
_setopts = {}
def __getattr__(self, item):
"""Handle generic get and set option methods."""
if 'arg' in self:
if item in self._getopts:
key = self._getopts[item]
def func():
return self['arg'].get('opts', {}).get(key)
return func
if item in self._setopts:
key = self._setopts[item]
def func(value):
# Call setOptions so that it can be overridden.
self.setOptions({key: value})
return func
raise AttributeError, item
def __str__(self):
"""Handle string conversion."""
if hasattr(self, '__unicode__'):
return force_unicode(self).encode('utf-8')
return '%s object' % self.__class__.__name__
def setOptions(self, opts):
if 'arg' in self and opts:
self['arg'].setdefault('opts', {}).update(opts)
class MapConstant(MapClass):
"""A custom constant class.
For holding special Google Maps constants. When parsed by
JSONEncoder and subsequently by our custom jQuery plugin,
it will be converted to the actual constant value.
"""
def __init__(self, cls, const):
super(MapConstant, self).__init__(val='%s.%s' % (cls, const))
self.const = const
def __setitem__(self, key, value):
raise KeyError, key
def __unicode__(self):
return self.const.lower()
class MapConstantClass(object):
"""A custom factory class for constants."""
def __init__(self, name, constants):
for const in constants:
setattr(self, const, MapConstant(name, const))
class Map(MapClass):
"""A Google Map.
Equivalent to google.maps.Map. When parsed by JSONEncoder
and subsequently by our custom jQuery plugin, it will be
converted to an actual google.maps.Map instance.
"""
_getopts = {
'getCenter': 'center',
'getMapTypeId': 'mapTypeId',
'getZoom': 'zoom',
}
_setopts = {
'setCenter': 'center',
'setMapTypeId': 'mapTypeId',
'setZoom': 'zoom',
}
def __init__(self, opts=None):
"""mapDiv is not used, so not included in parameters."""
super(Map, self).__init__(cls='Map')
self['arg'] = Args(['mapDiv', 'opts'], ['div'])
self.setOptions(opts)
def __unicode__(self):
"""Produces a static map image url.
Don't forget to set the map option 'size' (as an instance
of maps.Size). Or alternatively you can append it to the
resulting string (e.g. '&size=400x400').
"""
opts = self['arg'].get('opts', {})
params = []
for p in ['center', 'zoom', 'size', 'format', 'language']:
if p in opts:
params.append((p, unicode(opts[p])))
if 'mapTypeId' in opts:
params.append(('maptype', unicode(opts['mapTypeId'])))
if 'visible' in opts:
params.append(('visible', '|'.join([unicode(v)
for v in opts['visible']])))
if 'mkr' in self:
params.append(('markers', [unicode(m) for m in self['mkr']]))
if 'pln' in self:
params.append(('path', [unicode(p) for p in self['pln']]))
if 'pgn' in self:
params.append(('path', [q for p in self['pgn']
for q in unicode(p).split('&path=')]))
params.append(('sensor', 'true' if opts.get('sensor') else 'false'))
return '%s?%s' % (STATIC_URL, urlencode(params, doseq=True))
def _markers(self):
return self.get('mkr', [])
markers = property(_markers)
def _polylines(self):
return self.get('pln', [])
polylines = property(_polylines)
def _polygons(self):
return self.get('pgn', [])
polygons = property(_polygons)
MapTypeId = MapConstantClass('MapTypeId',
('HYBRID', 'ROADMAP', 'SATELLITE', 'TERRAIN',))
MapTypeControlStyle = MapConstantClass('MapTypeControlStyle',
('DEFAULT', 'DROPDOWN_MENU',
'HORIZONTAL_BAR',))
NavigationControlStyle = MapConstantClass('NavigationControlStyle',
('ANDROID', 'DEFAULT', 'SMALL',
'ZOOM_PAN',))
ScaleControlStyle = MapConstantClass('ScaleControlStyle', ('DEFAULT',))
ControlPosition = MapConstantClass('ControlPosition',
('BOTTOM', 'BOTTOM_LEFT', 'BOTTOM_RIGHT',
'LEFT', 'RIGHT', 'TOP', 'TOP_LEFT',
'TOP_RIGHT',))
class Marker(MapClass):
"""A Google Marker.
Equivalent to google.maps.Marker. When parsed by JSONEncoder
and subsequently by our custom jQuery plugin, it will be
converted to an actual google.maps.Marker instance.
"""
_getopts = {
'getClickable': 'clickable',
'getCursor': 'cursor',
'getDraggable': 'draggable',
'getFlat': 'flat',
'getIcon': 'icon',
'getPosition': 'position',
'getShadow': 'shadow',
'getShape': 'shape',
'getTitle': 'title',
'getVisible': 'visible',
'getZIndex': 'zIndex',
}
_setopts = {
'setClickable': 'clickable',
'setCursor': 'cursor',
'setDraggable': 'draggable',
'setFlat': 'flat',
'setIcon': 'icon',
'setMap': 'map',
'setPosition': 'position',
'setShadow': 'shadow',
'setShape': 'shape',
'setTitle': 'title',
'setVisible': 'visible',
'setZIndex': 'zIndex',
}
def __init__(self, opts=None):
super(Marker, self).__init__(cls='Marker')
self._map = None
self._size = None
self._color = None
self._label = None
self['arg'] = Args(['opts'])
self.setOptions(opts)
def __unicode__(self):
opts = self['arg'].get('opts', {})
params = []
if self._size:
params.append('size:%s' % self._size)
if self._color or self._label:
if self._color:
params.append('color:%s' % self._color)
if self._label:
params.append('label:%s' % self._label)
elif 'icon' in opts:
params.append('icon:%s' % opts['icon'])
if 'shadow' in opts:
params.append('shadow:%s' %
'true' if opts['shadow'] else 'false')
if 'position' in opts:
params.append(unicode(opts['position']))
return '|'.join(params)
def getMap(self):
return self._map
def setOptions(self, options):
if options and 'map' in options:
if self._map:
# Remove this marker from the map.
self._map['mkr'].remove(self)
# Save new map reference.
self._map = options.pop('map')
if self._map:
# Add this marker to the map.
self._map.setdefault('mkr', []).append(self)
if options:
self._size = options.pop('size', self._size)
self._color = options.pop('color', self._color)
self._label = options.pop('label', self._label)
if (self._color or self._label) and 'icon' not in options:
l = self._label or u'\u2022'
c = (self._color or 'FF776B').lstrip('0x')
options['icon'] = MarkerImage('%s?chst=d_map_pin_letter'
'&chld=%s|%s' % (CHART_URL, l, c),
anchor=Point(10, 33))
options['shadow'] = MarkerImage('%s?chst=d_map_pin_shadow' %
CHART_URL, anchor=Point(12, 35))
elif 'icon' in options:
self._color = None
self._label = None
super(Marker, self).setOptions(options)
class MarkerImage(MapClass):
"""An image to be used as the icon or shadow for a Marker.
Equivalent to google.maps.MarkerImage. When parsed by
JSONEncoder and subsequently by our custom jQuery plugin,
it will be converted to an actual google.maps.MarkerImage
instance.
"""
def __init__(self, url, size=None, origin=None, anchor=None,
scaledSize=None):
super(MarkerImage, self).__init__(cls='MarkerImage')
self['arg'] = Args(['url', 'size', 'origin', 'anchor', 'scaledSize'],
[url])
if size:
self['arg'].setdefault('size', size)
if origin:
self['arg'].setdefault('origin', origin)
if anchor:
self['arg'].setdefault('anchor', anchor)
if scaledSize:
self['arg'].setdefault('scaledSize', scaledSize)
def __unicode__(self):
return self['arg'].get('url')
class Polyline(MapClass):
"""A Google Polyline.
Equivalent to google.maps.Polyline. When parsed by JSONEncoder
and subsequently by our custom jQuery plugin, it will be
converted to an actual google.maps.Polyline instance.
"""
_getopts = {
'getPath': 'path',
}
_setopts = {
'setMap': 'map',
'setPath': 'path',
}
def __init__(self, opts=None):
super(Polyline, self).__init__(cls='Polyline')
self._map = None
self['arg'] = Args(['opts'])
self.setOptions(opts)
def __unicode__(self):
opts = self['arg'].get('opts', {})
params = []
if 'strokeColor' in opts:
color = 'color:0x%s' % opts['strokeColor'].lstrip('#').lower()
if 'strokeOpacity' in opts:
color += '%02x' % min(max(opts['strokeOpacity'] * 255, 0), 255)
params.append(color)
if 'strokeWeight' in opts:
params.append('weight:%d' % opts['strokeWeight'])
if 'path' in opts:
params.append('|'.join([unicode(p) for p in opts['path']]))
return '|'.join(params)
def getMap(self):
return self._map
def setOptions(self, options):
if options and 'map' in options:
if self._map:
# Remove this polyline from the map.
self._map['pln'].remove(self)
# Save new map reference.
self._map = options.pop('map')
if self._map:
# Add this polyline to the map.
self._map.setdefault('pln', []).append(self)
super(Polyline, self).setOptions(options)
class Polygon(MapClass):
"""A Google Polygon.
Equivalent to google.maps.Polygon. When parsed by JSONEncoder
and subsequently by our custom jQuery plugin, it will be
converted to an actual google.maps.Polygon instance.
"""
_getopts = {
'getPaths': 'paths',
}
_setopts = {
'setMap': 'map',
'setPaths': 'paths',
}
def __init__(self, opts=None):
super(Polygon, self).__init__(cls='Polygon')
self._map = None
self['arg'] = Args(['opts'])
self.setOptions(opts)
def __unicode__(self):
opts = self['arg'].get('opts', {})
params = []
paths = []
if 'fillColor' in opts:
fillcolor = ('fillcolor:0x%s' %
opts['fillColor'].lstrip('#').lower())
if 'fillOpacity' in opts:
fillcolor += ('%02x' %
min(max(opts['fillOpacity'] * 255, 0), 255))
params.append(fillcolor)
if 'strokeColor' in opts:
color = 'color:0x%s' % opts['strokeColor'].lstrip('#').lower()
if 'strokeOpacity' in opts:
color += '%02x' % min(max(opts['strokeOpacity'] * 255, 0), 255)
params.append(color)
if 'strokeWeight' in opts:
params.append('weight:%d' % opts['strokeWeight'])
if 'paths' in opts:
for path in opts['paths']:
loop = ['' if path[-1].equals(path[0]) else unicode(path[0])]
paths.append('|'.join(params + [unicode(p) for p in path] +
loop))
return '&path='.join(paths)
def getMap(self):
return self._map
def getPath(self):
return (self.getPaths() or [None])[0]
def setOptions(self, options):
if options and 'map' in options:
if self._map:
# Remove this polygon from the map.
self._map['pgn'].remove(self)
# Save new map reference.
self._map = options.pop('map')
if self._map:
# Add this polygon to the map.
self._map.setdefault('pgn', []).append(self)
super(Polygon, self).setOptions(options)
def setPath(self, path):
self.setPaths([path])
class InfoWindow(MapClass):
"""A Google InfoWindow.
Equivalent to google.maps.InfoWindow. When parsed by JSONEncoder
and subsequently by our custom jQuery plugin, it will be
converted to an actual google.maps.InfoWindow instance.
"""
_getopts = {
'getContent': 'content',
'getPosition': 'position',
'getZIndex': 'zIndex',
}
_setopts = {
'setContent': 'content',
'setPosition': 'position',
'setZIndex': 'zIndex',
}
def __init__(self, opts=None):
super(InfoWindow, self).__init__(cls='InfoWindow')
self['arg'] = Args(['opts'])
self.setOptions(opts)
def open(self, map, anchor=None):
"""Link this InfoWindow to a Marker and/or Map."""
if anchor:
# Make sure the marker is assigned to the specified map.
anchor.setMap(map)
anchor['nfo'] = self
else:
map['nfo'] = self
class Elevation(object):
"""A service to get the elevation of a LatLng.
This is equivalent to using google.maps.ElevationService except
that it makes use of the Web Service. You should always use the
javascript API version in preference to this one as query
limits are per IP. The javascript API uses the client's IP
and thus is much less likely to hit any limits.
http://code.google.com/apis/maps/documentation/elevation/
"""
# Handle blocking and sleeping at class level.
_block = False
_sleep = 0
_last = 0
def elevation(self, request, callback=None):
"""Gets the elevation of a request.
Unlike the javascript API, this method is blocking. So, even
though a callback function is supported, the method will also
return the results and status directly.
"""
# Add the sensor parameter if needed.
if 'sensor' in request:
if request['sensor'] != 'false':
request['sensor'] = 'true' if request['sensor'] else 'false'
else:
request['sensor'] = 'false'
encoded_request = urlencode(request)
url = '%s/json?%s' % (ELEVATION_URL, encoded_request)
cache_key = url
# Try up to 30 times if over query limit.
for _ in xrange(30):
# Check if result is already cached.
data = cache.get(cache_key)
if data is None:
if (max(0, time.time() - self.__class__._last) <
self.__class__._sleep):
# Wait a bit so that we don't make requests too fast.
time.sleep(max(0, self.__class__._sleep +
self.__class__._last - time.time()))
data = urllib.urlopen(url).read()
self.__class__._last = time.time()
response = loads(data)
status = response['status']
if status == 'OVER_QUERY_LIMIT':
# Over limit, increase delay a bit.
if self.__class__._block:
break
self.__class__._sleep += .1
else:
# Save results to cache.
cache.set(cache_key, data)
if status == 'OK':
# Successful query, clear block if there is one.
if self.__class__._block:
self.__class__._block = False
self.__class__._sleep = 0
results = _parseLatLonResult(response['results'])
if callback:
callback(results, status)
return results, status
else:
return None, status
self.__class__._block = True
raise SystemError('The elevation API has failed too many times. '
'You might have exceeded your daily limit.')
class Geocoder(object):
"""A service for converting between an address and a LatLng.
This is equivalent to using google.maps.Geocoder except that
it makes use of the Web Service. You should always use the
javascript API version in preference to this one as query
limits are per IP. The javascript API uses the client's IP
and thus is much less likely to hit any limits.
http://code.google.com/apis/maps/documentation/geocoding/
"""
# Handle blocking and sleeping at class level.
_block = False
_sleep = 0
_last = 0
def geocode(self, request, callback=None):
"""Geocode a request.
Unlike the javascript API, this method is blocking. So, even
though a callback function is supported, the method will also
return the results and status directly.
"""
# Handle any unicode in the request.
if 'address' in request:
request['address'] = smart_str(request['address'],
strings_only=True).lower()
# Add the sensor parameter if needed.
if 'sensor' in request:
if request['sensor'] != 'false':
request['sensor'] = 'true' if request['sensor'] else 'false'
else:
request['sensor'] = 'false'
encoded_request = urlencode(request)
url = '%s/json?%s' % (GEOCODE_URL, encoded_request)
cache_key = url
# Try up to 30 times if over query limit.
for _ in xrange(30):
# Check if result is already cached.
data = cache.get(cache_key)
if data is None:
if (max(0, time.time() - self.__class__._last) <
self.__class__._sleep):
# Wait a bit so that we don't make requests too fast.
time.sleep(max(0, self.__class__._sleep +
self.__class__._last - time.time()))
data = urllib.urlopen(url).read()
self.__class__._last = time.time()
response = loads(data)
status = response['status']
if status == 'OVER_QUERY_LIMIT':
# Over limit, increase delay a bit.
if self.__class__._block:
break
self.__class__._sleep += .1
else:
# Save results to cache.
cache.set(cache_key, data)
if status == 'OK':
# Successful query, clear block if there is one.
if self.__class__._block:
self.__class__._block = False
self.__class__._sleep = 0
results = _parseLatLonResult(response['results'])
if callback:
callback(results, status)
return results, status
else:
return None, status
self.__class__._block = True
raise SystemError('Geocoding has failed too many times. '
'You might have exceeded your daily limit.')
def _parseLatLonResult(result):
"""Parse the result of a Google Maps API.
Traverses the results converting any latitude-longitude pairs
into instances of LatLng and any SouthWest-NorthEast pairs
into instances of LatLngBounds.
"""
# Check for LatLng objects and convert.
if (isinstance(result, dict) and 'lat' in result and 'lng' in result):
result = LatLng(result['lat'], result['lng'])
# Continue traversing.
elif isinstance(result, dict):
for item in result:
result[item] = _parseLatLonResult(result[item])
# Check for LatLngBounds objects and convert.
if ('southwest' in result and 'northeast' in result):
result = LatLngBounds(result['southwest'], result['northeast'])
elif isinstance(result, (list, tuple)):
for index in xrange(len(result)):
result[index] = _parseLatLonResult(result[index])
return result
class MapsEventListener(list):
pass
class _event(object):
def addListener(self, instance, eventName, handlerName):
listener = MapsEventListener([eventName, handlerName])
instance.setdefault('evt', []).append(listener)
listener.instance = instance
return listener
def addListenerOnce(self, instance, eventName, handlerName):
listener = MapsEventListener([eventName, handlerName, True])
instance.setdefault('evt', []).append(listener)
listener.instance = instance
return listener
def clearInstanceListeners(self, instance):
if 'evt' in instance:
del instance['evt']
def clearListeners(self, instance, eventName):
if 'evt' in instance:
for listener in instance['evt']:
if listener[0] == eventName:
instance['evt'].remove(listener)
if not instance['evt']:
del instance['evt']
def removeListener(self, listener):
instance = listener.instance
if 'evt' in instance:
if listener in instance['evt']:
instance['evt'].remove(listener)
if not instance['evt']:
del instance['evt']
event = _event()
class LatLng(MapClass):
"""A point in geographical coordinates, latitude and longitude.
Equivalent to google.maps.LatLng. When parsed by JSONEncoder
and subsequently by our custom jQuery plugin, it will be
converted to an actual google.maps.LatLng instance.
"""
def __init__(self, lat, lng, noWrap=None):
super(LatLng, self).__init__(cls='LatLng')
self['arg'] = Args(['lat', 'lng', 'noWrap'], [Degree(lat), Degree(lng)])
if noWrap is not None:
self['arg'].setdefault('noWrap', noWrap)
def __unicode__(self):
return self.toUrlValue()
def equals(self, other):
return (self.lat() == other.lat() and self.lng() == other.lng())
def lat(self):
return self['arg'].get('lat')
def lng(self):
return self['arg'].get('lng')
def toString(self):
return '(%s, %s)' % (self.lat(), self.lng())
def toUrlValue(self, precision=6):
return '%s,%s' % (Degree(self.lat(), precision),
Degree(self.lng(), precision))
class LatLngBounds(MapClass):
"""A rectangle in geographical coordinates.
Equivalent to google.maps.LatLngBounds. When parsed by
JSONEncoder and subsequently by our custom jQuery plugin,
it will be converted to an actual google.maps.LatLngBounds
instance.
"""
def __init__(self, sw=None, ne=None):
super(LatLngBounds, self).__init__(cls='LatLngBounds')
self['arg'] = Args(['sw', 'ne'])
if sw:
self['arg'].setdefault('sw', sw)
if ne:
self['arg'].setdefault('ne', ne)
def __unicode__(self):
return self.toUrlValue()
def equals(self, other):
# Check if our corners are equal.
return (self.getSouthWest().equals(other.getSouthWest()) and
self.getNorthEast().equals(other.getNorthEast()))
def getNorthEast(self):
return self['arg'].get('ne')
def getSouthWest(self):
return self['arg'].get('sw')
def isEmpty(self):
return ((not self.getSouthWest()) or
(self.getNorthEast() and
self.getSouthWest().lat() >
self.getNorthEast().lat()))
def toString(self):
return '(%s, %s)' % (self.getSouthWest().toString(),
self.getNorthEast().toString())
def toUrlValue(self, precision=6):
return '%s,%s' % (self.getSouthWest().toUrlValue(precision),
self.getNorthEast().toUrlValue(precision))
class Point(MapClass):
"""A point on a two-dimensional plane.
Equivalent to google.maps.Point. When parsed by JSONEncoder
and subsequently by our custom jQuery plugin, it will be
converted to an actual google.maps.Point instance.
"""
def __init__(self, x, y):
super(Point, self).__init__(cls='Point')
self['arg'] = Args(['x', 'y'], [x, y])
def __unicode__(self):
return '%s,%s' % (self['arg'].get('x', 0),
self['arg'].get('y', 0))
def _getX(self):
return self['arg'][0]
def _getY(self):
return self['arg'][1]
def _setX(self, x):
self['arg'][0] = x
def _setY(self, y):
self['arg'][1] = y
def equals(self, other):
return self.x == other.x and self.y == other.y
def toString(self):
return '(%s, %s)' % (self.x, self.y)
x = property(_getX, _setX)
y = property(_getY, _setY)
class Size(MapClass):
"""A two-dimensonal size.
Equivalent to google.maps.Size. When parsed by JSONEncoder
and subsequently by our custom jQuery plugin, it will be
converted to an actual google.maps.Size instance.
"""
def __init__(self, width, height, widthUnit=None, heightUnit=None):
super(Size, self).__init__(cls='Size')
self['arg'] = Args(['width', 'height', 'widthUnit', 'heightUnit'],
[int(width), int(height)])
if widthUnit:
self['arg'].setdefault('widthUnit', widthUnit)
if heightUnit:
self['arg'].setdefault('heightUnit', heightUnit)
def __unicode__(self):
return '%sx%s' % (self['arg'].get('width', 0),
self['arg'].get('height', 0))
def _getHeight(self):
return self['arg'][1]
def _getWidth(self):
return self['arg'][0]
def _setHeight(self, height):
self['arg'][1] = height
def _setWidth(self, width):
self['arg'][0] = width
def equals(self, other):
return self.width == other.width and self.height == other.height
def toString(self):
return '(%s, %s)' % (self.width, self.height)
height = property(_getHeight, _setHeight)
width = property(_getWidth, _setWidth)
class Degree(float):
"""A custom float class for degrees.
For holding degrees of a circle (latitude and longitude).
When converted to a string or parsed by JSONEncoder, it
will output with, at most, the specified precision.
"""
def __new__(cls, value, precision=6):
return float.__new__(cls, value)
def __init__(self, value, precision=6):
self.precision = precision
def __repr__(self):
return (('%%0.%df' % self.precision) % self).rstrip('0').rstrip('.')
def __unicode__(self):
return self.__repr__()
def __str__(self):
return self.__repr__()
class Args(list):
"""A custom list that implements setdefault and get by name."""
def __init__(self, names, values=None):
super(Args, self).__init__(values or [])
self.names = names
def get(self, name, default=None):
i = self.names.index(name)
return self[i] if len(self) > i else default
def setdefault(self, name, default=None):
i = self.names.index(name)
if len(self) <= i or self[i] is None:
# Fill gaps with None.
self.extend(None for _ in xrange(len(self), i))
self.append(default)
return self[i]
| Python |
"""Custom Map widget."""
from django.conf import settings
from django.forms.forms import Media
from django.forms.util import flatatt
from django.forms.widgets import Widget
from django.utils.html import escape
from django.utils.safestring import mark_safe
from django.utils.simplejson import dumps
from gmapi import maps
from urlparse import urljoin
JQUERY_URL = getattr(settings, 'GMAPI_JQUERY_URL',
'http://ajax.googleapis.com/ajax/libs/jquery/1.6.2/jquery'
'%s.js' % ('' if settings.DEBUG else '.min'))
MAPS_URL = getattr(settings, 'GMAPI_MAPS_URL',
'http://maps.google.com/maps/api/js?sensor=false')
# Same rules apply as ADMIN_MEDIA_PREFIX.
# Omit leading slash to make relative to MEDIA_URL.
MEDIA_PREFIX = getattr(settings, 'GMAPI_MEDIA_PREFIX', 'gmapi/')
class GoogleMap(Widget):
def __init__(self, attrs=None):
self.nojquery = (attrs or {}).pop('nojquery', False)
self.nomapsjs = (attrs or {}).pop('nomapsjs', False)
super(GoogleMap, self).__init__(attrs)
def render(self, name, gmap, attrs=None):
if gmap is None:
gmap = maps.Map()
default_attrs = {'id': name, 'class': 'gmap'}
if attrs:
default_attrs.update(attrs)
final_attrs = self.build_attrs(default_attrs)
width = final_attrs.pop('width', 500)
height = final_attrs.pop('height', 400)
style = (u'position:relative;width:%dpx;height:%dpx;' %
(width, height))
final_attrs['style'] = style + final_attrs.get('style', '')
map_div = (u'<div class="%s" style="position:absolute;'
u'width:%dpx;height:%dpx"></div>' %
(escape(dumps(gmap, separators=(',', ':'))),
width, height))
map_img = (u'<img style="position:absolute;z-index:1" '
u'width="%(x)d" height="%(y)d" alt="Google Map" '
u'src="%(map)s&size=%(x)dx%(y)d" />' %
{'map': escape(gmap), 'x': width, 'y': height})
return mark_safe(u'<div%s>%s%s</div>' %
(flatatt(final_attrs), map_div, map_img))
def _media(self):
js = []
if not self.nojquery:
js.append(JQUERY_URL)
if not self.nomapsjs:
js.append(MAPS_URL)
js.append(urljoin(MEDIA_PREFIX, 'js/jquery.gmapi%s.js' %
('' if settings.DEBUG else '.min')))
return Media(js=js)
media = property(_media)
| Python |
from distutils.core import setup
import os
# Compile the list of packages available, because distutils doesn't have
# an easy way to do this.
packages, data_files = [], []
root_dir = os.path.dirname(__file__)
if root_dir:
os.chdir(root_dir)
for dirpath, dirnames, filenames in os.walk('gmapi'):
# Ignore dirnames that start with '.'
for i, dirname in enumerate(dirnames):
if dirname.startswith('.'): del dirnames[i]
if '__init__.py' in filenames:
pkg = dirpath.replace(os.path.sep, '.')
if os.path.altsep:
pkg = pkg.replace(os.path.altsep, '.')
packages.append(pkg)
elif filenames:
prefix = dirpath[6:] # Strip "gmapi/" or "gmapi\"
for f in filenames:
data_files.append(os.path.join(prefix, f))
setup(name='django-gmapi',
version='1.0.1',
description='A Google Maps API implementation for Django',
author='David Bennett',
author_email='ungenio@gmail.com',
url='http://code.google.com/p/django-gmapi/',
packages=packages,
package_data={'gmapi': data_files},
classifiers=['Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Topic :: Software Development :: '
'Libraries :: Python Modules',
'Topic :: Utilities'],
)
| Python |
# -*- coding: utf-8 -*-
"""Setup script for django-gae2django."""
import os
from distutils.core import setup
def find_packages(base_dir):
yield base_dir
for fname in os.listdir(base_dir):
if fname.startswith('.'):
continue
fullpath = os.path.join(base_dir, fname)
if os.path.isdir(fullpath):
for item in find_packages(fullpath):
yield item
setup(
name='django-gae2django',
version='0.1',
description='Django-based implementation of App Engine APIs',
author='Andi Albrecht',
author_email='albrecht.andi@gmail.com',
url='http://code.google.com/p/django-gae2django/',
packages=list(find_packages('gae2django')),
license='Apache',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: Web Environment',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Programming Language :: Python',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
],
)
| Python |
#!/usr/bin/env python
import gae2django
gae2django.install()
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
from django.db import models
from gaeapi.appengine.ext import db
class RefTestModel(db.Model):
value = db.StringProperty()
class RegressionTestModel(db.Model):
xstring = db.StringProperty()
xlist = db.ListProperty(str)
xuser = db.UserProperty(auto_current_user_add=True)
ref = db.ReferenceProperty(RefTestModel)
blob = db.BlobProperty()
| Python |
#!/usr/bin/python2.4
#
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Loads all the _test.py files into the top level of the package.
This file is a hack around the fact that Django expects the tests "module" to
be a single tests.py file and cannot handle a tests package inside an
application.
All _test.py files inside this package are imported and any classes derived
from unittest.TestCase are then referenced from this file itself so that they
appear at the top level of the tests "module" that Django will import.
"""
import os
import re
import sys
import types
import unittest
from django.conf import settings
TEST_RE = r"^test_.*.py$"
# Search through every file inside this package.
test_names = []
test_dir = os.path.dirname(__file__)
for filename in os.listdir(test_dir):
if not re.match(TEST_RE, filename):
continue
# Import the test file and find all TestClass clases inside it.
test_module = __import__('gae2django.tests.%s' %
filename[:-3], {}, {},
filename[:-3])
for name in dir(test_module):
item = getattr(test_module, name)
if not (isinstance(item, (type, types.ClassType)) and
issubclass(item, unittest.TestCase)):
continue
# Found a test, bring into the module namespace.
exec "%s = item" % name
test_names.append(name)
# Hide everything other than the test cases from other modules.
__all__ = test_names
def test_runner_with_coverage(test_labels, verbosity=1, interactive=True,
extra_tests=[]):
# This doesn't work with Django 1.4
from django.test.simple import run_tests as django_test_runner
import coverage
coverage.use_cache(0)
coverage.start()
test_results = django_test_runner(test_labels, verbosity, interactive,
extra_tests)
coverage.stop()
coverage_modules = [m.__file__ for k, m in sys.modules.iteritems()
if m and k.split('.')[0] in test_labels
and 'test' not in k]
print
print '='*80
print 'Coverage results for %s' % ', '.join(test_labels)
print '='*80
coverage.report(coverage_modules, show_missing=1)
coverage_html_dir = getattr(settings, 'COVERAGE_HTML_DIR', None)
if coverage_html_dir is not None:
coverage._the_coverage.html_report(coverage_modules,
coverage_html_dir)
return test_results
| Python |
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import threading
from django.conf import settings
from gae2django.utils import CallableString, patch_user
_thread_locals = threading.local()
# get_current_user hack found here:
# http://lukeplant.me.uk/blog.php?id=1107301634
def get_current_user():
user = getattr(_thread_locals, 'user', None)
if user and user.is_anonymous():
return None
return user
class FixRequestUserMiddleware(object):
def process_request(self, request):
_thread_locals.user = getattr(request, 'user', None)
if getattr(request, 'user', None) is None:
return
if not request.user.is_anonymous():
patch_user(request.user)
try:
profile = request.user.get_profile()
if hasattr(profile, 'nickname'):
request.user.nickname = CallableString(profile.nickname)
except:
pass
else:
request.user.email = CallableString()
request.user.nickname = CallableString()
| Python |
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import new
class CallableString(unicode):
"""Helper class providing a callable unicode string.
This helper class is used to simulate a hybrid user.email attribute.
App Engine requires this attribute to be callable, returning a string.
Django expects just a string here.
CallableString aims to solve this problem.
"""
def __call__(self):
return unicode(self)
def id(self):
try:
return int(self.split('_')[-1])
except:
return None
def patch_user(user):
"""Patches a instance of Django's builtin User model."""
if not isinstance(user.email, CallableString):
user.email = CallableString(user.email)
if not hasattr(user, 'nickname'):
nickname = CallableString()
# TODO(andi): Commented since it's a performance killer.
# All tests pass and at least Rietveld seems to run fine.
# I'll leave it in the sources in case it comes up again...
# try:
# profile = user.get_profile()
# if hasattr(profile, 'nickname'):
# nickname = CallableString(profile.nickname)
# except:
# pass
user.nickname = nickname
if not hasattr(user, 'user_id'):
user.user_id = new.instancemethod(lambda u: u.id,
user, user.__class__)
| Python |
from django.http import HttpResponse
from django.template import Context, Template
from gaeapi.appengine.api import users
def test(request):
t = Template('Test view')
c = Context({'user': request.user,
'is_admin': users.is_current_user_admin()})
return HttpResponse(t.render(c))
| Python |
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides a pure Django implementation of Google's App Engine API."""
import logging
import os
import sys
from gae2django.utils import CallableString
def install(server_software='gae2django'):
"""Imports the API and makes it available as 'google.appengine'."""
import gaeapi
sys.modules['google'] = gaeapi
sys.modules['gaeapi'] = gaeapi
os.environ['SERVER_SOFTWARE'] = server_software
_install_pg_adapter()
def _install_pg_adapter():
"""Install a psycopg2 adapter to make use of callable strings."""
# We cannot access settings during install() of gae2django.
# So let's get proactive and try to register the adapter anyway.
# See: http://code.djangoproject.com/ticket/5996
try:
import psycopg2.extensions
except ImportError, err:
return
psycopg2.extensions.register_adapter(CallableString,
psycopg2.extensions.QuotedString)
| Python |
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class CapabilityDisabledError(Exception):
"""Indicates that a datastore api call was not performed as that
particular datastore functionality is not available."""
| Python |
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
class DeadlineExceededError(Exception):
"""Not used."""
| Python |
# Customized version of Google's GQL class (google.appengine.ext.gql.GQL).
import datetime
import heapq
import logging
import re
import time
from gaeapi.appengine.api import datastore
from gaeapi.appengine.api import users
from gaeapi.appengine.ext import db
LOG_LEVEL = logging.DEBUG - 1
# Hacks
ASCENDING = 1
DESCENDING = 2
class BadQueryError(Exception):
"""BadQueryError"""
class BadArgumentError(Exception):
"""BadArgumentError"""
class GQL(object):
"""A GQL interface to the datastore.
GQL is a SQL-like language which supports more object-like semantics
in a langauge that is familiar to SQL users. The language supported by
GQL will change over time, but will start off with fairly simple
semantics.
- reserved words are case insensitive
- names are case sensitive
The syntax for SELECT is fairly straightforward:
SELECT * FROM <entity>
[WHERE <condition> [AND <condition> ...]]
[ORDER BY <property> [ASC | DESC] [, <property> [ASC | DESC] ...]]
[LIMIT [<offset>,]<count>]
[OFFSET <offset>]
[HINT (ORDER_FIRST | HINT FILTER_FIRST | HINT ANCESTOR_FIRST)]
<condition> := <property> {< | <= | > | >= | = | != | IN} <value>
<condition> := <property> {< | <= | > | >= | = | != | IN} CAST(<value>)
<condition> := <property> IN (<value>, ...)
<condition> := ANCESTOR IS <entity or key>
Currently the parser is LL(1) because of the simplicity of the grammer
(as it is largely predictive with one token lookahead).
The class is implemented using some basic regular expression tokenization
to pull out reserved tokens and then the recursive descent parser will act
as a builder for the pre-compiled query. This pre-compiled query is then
bound to arguments before executing the query.
Initially, three parameter passing mechanisms are supported when calling
Execute():
- Positional parameters
Execute('SELECT * FROM Story WHERE Author = :1 AND Date > :2')
- Named parameters
Execute('SELECT * FROM Story WHERE Author = :author AND Date > :date')
- Literals (numbers, and strings)
Execute('SELECT * FROM Story WHERE Author = \'James\'')
Users are also given the option of doing type conversions to other datastore
types (e.g. db.Email, db.GeoPt). The language provides a conversion function
which allows the caller to express conversions of both literals and
parameters. The current conversion operators are:
- GEOPT(float, float)
- USER(str)
- KEY(kind, id/name[, kind, id/name...])
- DATETIME(year, month, day, hour, minute, second)
- DATETIME('YYYY-MM-DD HH:MM:SS')
- DATE(year, month, day)
- DATE('YYYY-MM-DD')
- TIME(hour, minute, second)
- TIME('HH:MM:SS')
We will properly serialize and quote all values.
It should also be noted that there are some caveats to the queries that can
be expressed in the syntax. The parser will attempt to make these clear as
much as possible, but some of the caveats include:
- There is no OR operation. In most cases, you should prefer to use IN to
express the idea of wanting data matching one of a set of values.
- You cannot express inequality operators on multiple different properties
- You can only have one != operator per query (related to the previous
rule).
- The IN and != operators must be used carefully because they can
dramatically raise the amount of work done by the datastore. As such,
there is a limit on the number of elements you can use in IN statements.
This limit is set fairly low. Currently, a max of 30 datastore queries is
allowed in a given GQL query. != translates into 2x the number of
datastore queries, and IN multiplies by the number of elements in the
clause (so having two IN clauses, one with 5 elements, the other with 6
will cause 30 queries to occur).
- Literals can take the form of basic types or as type-cast literals. On
the other hand, literals within lists can currently only take the form of
simple types (strings, integers, floats).
SELECT * will return an iterable set of entries, but other operations (schema
queries, updates, inserts or field selections) will return alternative
result types.
"""
TOKENIZE_REGEX = re.compile(r"""
(?:'[^'\n\r]*')+|
<=|>=|!=|=|<|>|
:\w+|
,|
\*|
-?\d+(?:\.\d+)?|
\w+|
\(|\)|
\S+
""", re.VERBOSE | re.IGNORECASE)
MAX_ALLOWABLE_QUERIES = 30
__ANCESTOR = -1
def __init__(self, query_string, _app=None, _auth_domain=None):
"""Ctor.
Parses the input query into the class as a pre-compiled query, allowing
for a later call to Bind() to bind arguments as defined in the
documentation.
Args:
query_string: properly formatted GQL query string.
Raises:
BadQueryError: if the query is not parsable.
"""
self._entity = ''
self.__filters = {}
self.__has_ancestor = False
self.__orderings = []
self.__offset = -1
self.__limit = -1
self.__hint = ''
self.__app = _app
self.__auth_domain = _auth_domain
self.__symbols = self.TOKENIZE_REGEX.findall(query_string)
self.__next_symbol = 0
if not self.__Select():
raise BadQueryError('Unable to parse query')
else:
pass
def Bind(self, args, keyword_args):
"""Bind the existing query to the argument list.
Assumes that the input args are first positional, then a dictionary.
So, if the query contains references to :1, :2 and :name, it is assumed
that arguments are passed as (:1, :2, dict) where dict contains a mapping
[name] -> value.
Args:
args: the arguments to bind to the object's unbound references.
keyword_args: dictionary-based arguments (for named parameters).
Raises:
BadArgumentError: when arguments are left unbound
(missing from the inputs arguments) or when arguments do not match the
expected type.
Returns:
The bound datastore.Query object. This may take the form of a MultiQuery
object if the GQL query will require multiple backend queries to statisfy.
"""
num_args = len(args)
input_args = frozenset(xrange(num_args))
used_args = set()
queries = []
enumerated_queries = self.EnumerateQueries(used_args, args, keyword_args)
if enumerated_queries:
query_count = len(enumerated_queries)
else:
query_count = 1
for i in xrange(query_count):
queries.append(datastore.Query(self._entity, _app=self.__app))
logging.log(LOG_LEVEL,
'Binding with %i positional args %s and %i keywords %s'
, len(args), args, len(keyword_args), keyword_args)
for ((identifier, condition), value_list) in self.__filters.iteritems():
for (operator, params) in value_list:
value = self.__Operate(args, keyword_args, used_args, operator, params)
if not self.__IsMultiQuery(condition):
for query in queries:
self.__AddFilterToQuery(identifier, condition, value, query)
unused_args = input_args - used_args
if unused_args:
unused_values = [unused_arg + 1 for unused_arg in unused_args]
raise BadArgumentError('Unused positional arguments %s' % unused_values)
if enumerated_queries:
logging.debug('Multiple Queries Bound: %s' % enumerated_queries)
for (query, enumerated_query) in zip(queries, enumerated_queries):
query.update(enumerated_query)
if self.__orderings:
for query in queries:
query.Order(*tuple(self.__orderings))
if query_count > 1:
return MultiQuery(queries, self.__orderings)
else:
return queries[0]
def EnumerateQueries(self, used_args, args, keyword_args):
"""Create a list of all multi-query filter combinations required.
To satisfy multi-query requests ("IN" and "!=" filters), multiple queries
may be required. This code will enumerate the power-set of all multi-query
filters.
Args:
used_args: set of used positional parameters (output only variable used in
reporting for unused positional args)
args: positional arguments referenced by the proto-query in self. This
assumes the input is a tuple (and can also be called with a varargs
param).
keyword_args: dict of keyword arguments referenced by the proto-query in
self.
Returns:
A list of maps [(identifier, condition) -> value] of all queries needed
to satisfy the GQL query with the given input arguments.
"""
enumerated_queries = []
for ((identifier, condition), value_list) in self.__filters.iteritems():
for (operator, params) in value_list:
value = self.__Operate(args, keyword_args, used_args, operator, params)
self.__AddMultiQuery(identifier, condition, value, enumerated_queries)
return enumerated_queries
def __CastError(self, operator, values, error_message):
"""Query building error for type cast operations.
Args:
operator: the failed cast operation
values: value list passed to the cast operator
error_message: string to emit as part of the 'Cast Error' string.
Raises:
BadQueryError and passes on an error message from the caller. Will raise
BadQueryError on all calls.
"""
raise BadQueryError('Type Cast Error: unable to cast %r with operation %s (%s)' %
(values, operator.upper(), error_message))
def __CastNop(self, values):
"""Return values[0] if it exists -- default for most where clauses."""
if len(values) != 1:
self.__CastError(values, 'nop', 'requires one and only one value')
else:
return values[0]
def __CastList(self, values):
"""Return the full list of values -- only useful for IN clause."""
if values:
return values
else:
return None
def __CastKey(self, values):
"""Cast input values to Key() class using encoded string or tuple list."""
if not len(values) % 2:
return db.Key.from_path(_app=self.__app, *values)
elif len(values) == 1 and isinstance(values[0], str):
return db.Key(values[0])
else:
self.__CastError('KEY', values,
'requires an even number of operands'
'or a single encoded string')
def __CastGeoPt(self, values):
"""Cast input to GeoPt() class using 2 input parameters."""
if len(values) != 2:
self.__CastError('GEOPT', values, 'requires 2 input parameters')
return datastore_types.GeoPt(*values)
def __CastUser(self, values):
"""Cast to User() class using the email address in values[0]."""
if len(values) != 1:
self.__CastError(values, 'user', 'requires one and only one value')
else:
return users.User(email=values[0], _auth_domain=self.__auth_domain)
def __CastDate(self, values):
"""Cast date values to DATETIME() class using ISO string or tuple inputs."""
try:
if len(values) == 1 and isinstance(values[0], str):
time_tuple = time.strptime(values[0], '%Y-%m-%d')
return datetime.datetime(*time_tuple[0:6])
else:
return datetime.datetime(values[0], values[1], values[2], 0, 0, 0)
except ValueError, err:
self.__CastError('DATE', values, err)
def __CastTime(self, values):
"""Cast time values to DATETIME() class using ISO string or tuple inputs."""
try:
if len(values) == 1 and isinstance(values[0], str):
time_tuple = time.strptime(values[0], '%H:%M:%S')
time_tuple = (1970, 1, 1) + time_tuple[3:]
return datetime.datetime(*time_tuple[0:6])
else:
return datetime.datetime(1970, 1, 1, *values)
except ValueError, err:
self.__CastError('TIME', values, err)
def __CastDatetime(self, values):
"""Cast values to DATETIME() class using ISO string or tuple inputs."""
try:
if len(values) == 1 and isinstance(values[0], str):
time_tuple = time.strptime(values[0], '%Y-%m-%d %H:%M:%S')
return datetime.datetime(*time_tuple[0:6])
else:
return datetime.datetime(*values)
except ValueError, err:
self.__CastError('DATETIME', values, err)
def __Operate(self, args, keyword_args, used_args, operator, params):
"""Create a single output value from params using the operator string given.
Args:
args,keyword_args: arguments passed in for binding purposes (used in
binding positional and keyword based arguments).
used_args: set of numeric arguments accessed in this call.
values are ints representing used zero-based positional arguments.
used as an output parameter with new used arguments appended to the
list.
operator: string representing the operator to use 'nop' just returns
the first value from params.
params: parameter list to operate on (positional references, named
references, or literals).
Returns:
A value which can be used as part of a GQL filter description (either a
list of datastore types -- for use with IN, or a single datastore type --
for use with other filters).
"""
if not params:
return None
param_values = []
for param in params:
if isinstance(param, Literal):
value = param.Get()
else:
value = self.__GetParam(param, args, keyword_args)
if isinstance(param, int):
used_args.add(param - 1)
logging.log(LOG_LEVEL, 'found param for bind: %s value: %s',
param, value)
param_values.append(value)
logging.log(LOG_LEVEL, '%s Operating on values: %s',
operator, repr(param_values))
if operator in self.__cast_operators:
result = self.__cast_operators[operator](self, param_values)
else:
self.__Error('Operation %s is invalid' % operator)
return result
def __IsMultiQuery(self, condition):
"""Return whether or not this condition could require multiple queries."""
return condition.lower() in ('in', '!=')
def __GetParam(self, reference, args, keyword_args):
"""Get the specified parameter from the input arguments.
Args:
reference: id for a filter reference in the filter list (string or
number)
args: positional args passed in by the user (tuple of arguments, indexed
numerically by "reference")
keyword_args: dict of keyword based arguments (strings in "reference")
Returns:
The specified param from the input list.
Raises:
BadArgumentError if the referenced argument doesn't exist.
"""
num_args = len(args)
if isinstance(reference, int):
if reference <= num_args:
return args[reference - 1]
else:
raise BadArgumentError(
'Missing argument for bind, requires argument #%i, '
'but only has %i args.' % (reference, num_args))
elif isinstance(reference, str):
if reference in keyword_args:
return keyword_args[reference]
else:
raise BadArgumentError(
'Missing named arguments for bind, requires argument %s' %
reference)
else:
assert False, 'Unknown reference %s' % reference
def __AddMultiQuery(self, identifier, condition, value, enumerated_queries):
"""Helper function to add a muti-query to previously enumerated queries.
Args:
identifier: property being filtered by this condition
condition: filter condition (e.g. !=,in)
value: value being bound
enumerated_queries: in/out list of already bound queries -> expanded list
with the full enumeration required to satisfy the condition query
Raises:
BadArgumentError if the filter is invalid (namely non-list with IN)
"""
def CloneQueries(queries, n):
"""Do a full copy of the queries and append to the end of the queries.
Does an in-place replication of the input list and sorts the result to
put copies next to one-another.
Args:
queries: list of all filters to clone
n: number of copies to make
Returns:
Number of iterations needed to fill the structure
"""
if not enumerated_queries:
for i in xrange(n):
queries.append({})
return 1
else:
old_size = len(queries)
tmp_queries = []
for i in xrange(n - 1):
[tmp_queries.append(filter_map.copy()) for filter_map in queries]
queries.extend(tmp_queries)
queries.sort()
return old_size
if condition == '!=':
if len(enumerated_queries) * 2 > self.MAX_ALLOWABLE_QUERIES:
raise BadArgumentError(
'Cannot satisfy query -- too many IN/!= values.')
num_iterations = CloneQueries(enumerated_queries, 2)
for i in xrange(num_iterations):
enumerated_queries[2 * i]['%s <' % identifier] = value
enumerated_queries[2 * i + 1]['%s >' % identifier] = value
elif condition.lower() == 'in':
if not isinstance(value, list):
raise BadArgumentError('List expected for "IN" filter')
in_list_size = len(value)
if len(enumerated_queries) * in_list_size > self.MAX_ALLOWABLE_QUERIES:
raise BadArgumentError(
'Cannot satisfy query -- too many IN/!= values.')
num_iterations = CloneQueries(enumerated_queries, in_list_size)
for clone_num in xrange(num_iterations):
for value_num in xrange(len(value)):
list_val = value[value_num]
query_num = in_list_size * clone_num + value_num
filt = '%s =' % identifier
enumerated_queries[query_num][filt] = list_val
def __AddFilterToQuery(self, identifier, condition, value, query):
"""Add a filter condition to a query based on the inputs.
Args:
identifier: name of the property (or self.__ANCESTOR for ancestors)
condition: test condition
value: test value passed from the caller
query: query to add the filter to
"""
if identifier != self.__ANCESTOR:
filter_condition = '%s %s' % (identifier, condition)
logging.log(LOG_LEVEL, 'Setting filter on "%s" with value "%s"',
filter_condition, value.__class__)
datastore._AddOrAppend(query, filter_condition, value)
else:
logging.log(LOG_LEVEL, 'Setting ancestor query for ancestor %s', value)
query.Ancestor(value)
def Run(self, *args, **keyword_args):
"""Runs this query.
Similar to datastore.Query.Run.
Assumes that limit == -1 or > 0
Args:
args: arguments used to bind to references in the compiled query object.
keyword_args: dictionary-based arguments (for named parameters).
Returns:
A list of results if a query count limit was passed.
A result iterator if no limit was given.
"""
bind_results = self.Bind(args, keyword_args)
offset = 0
if self.__offset != -1:
offset = self.__offset
if self.__limit == -1:
it = bind_results.Run()
try:
for i in xrange(offset):
it.next()
except StopIteration:
pass
return it
else:
res = bind_results.Get(self.__limit, offset)
return res
def filters(self):
"""Return the compiled list of filters."""
return self.__filters
def hint(self):
"""Return the datastore hint."""
return self.__hint
def limit(self):
"""Return numerical result count limit."""
return self.__limit
def orderings(self):
"""Return the result ordering list."""
return self.__orderings
__iter__ = Run
__quoted_string_regex = re.compile(r'((?:\'[^\'\n\r]*\')+)')
__ordinal_regex = re.compile(r':(\d+)$')
__named_regex = re.compile(r':(\w+)$')
__identifier_regex = re.compile(r'(\w+)$')
__conditions_regex = re.compile(r'(<=|>=|!=|=|<|>|is|in)$', re.IGNORECASE)
__number_regex = re.compile(r'(\d+)$')
__cast_regex = re.compile(
r'(geopt|user|key|date|time|datetime)$', re.IGNORECASE)
__cast_operators = {
'geopt': __CastGeoPt,
'user': __CastUser,
'key': __CastKey,
'datetime': __CastDatetime,
'date': __CastDate,
'time': __CastTime,
'list': __CastList,
'nop': __CastNop,
}
def __Error(self, error_message):
"""Generic query error.
Args:
error_message: string to emit as part of the 'Parse Error' string.
Raises:
BadQueryError and passes on an error message from the caller. Will raise
BadQueryError on all calls to __Error()
"""
if self.__next_symbol >= len(self.__symbols):
raise BadQueryError(
'Parse Error: %s at end of string' % error_message)
else:
raise BadQueryError(
'Parse Error: %s at symbol %s' %
(error_message, self.__symbols[self.__next_symbol]))
def __Accept(self, symbol_string):
"""Advance the symbol and return true iff the next symbol matches input."""
if self.__next_symbol < len(self.__symbols):
logging.log(LOG_LEVEL, '\t%s', self.__symbols)
logging.log(LOG_LEVEL, '\tExpect: %s Got: %s',
symbol_string, self.__symbols[self.__next_symbol].upper())
if self.__symbols[self.__next_symbol].upper() == symbol_string:
self.__next_symbol += 1
return True
return False
def __Expect(self, symbol_string):
"""Require that the next symbol matches symbol_string, or emit an error.
Args:
symbol_string: next symbol expected by the caller
Raises:
BadQueryError if the next symbol doesn't match the parameter passed in.
"""
if not self.__Accept(symbol_string):
self.__Error('Unexpected Symbol: %s' % symbol_string)
def __AcceptRegex(self, regex):
"""Advance and return the symbol if the next symbol matches the regex.
Args:
regex: the compiled regular expression to attempt acceptance on.
Returns:
The first group in the expression to allow for convenient access
to simple matches. Requires () around some objects in the regex.
None if no match is found.
"""
if self.__next_symbol < len(self.__symbols):
match_symbol = self.__symbols[self.__next_symbol]
logging.log(LOG_LEVEL, '\taccept %s on symbol %s', regex, match_symbol)
match = regex.match(match_symbol)
if match:
self.__next_symbol += 1
if match.groups():
matched_string = match.group(1)
logging.log(LOG_LEVEL, '\taccepted %s', matched_string)
return matched_string
return None
def __AcceptTerminal(self):
"""Only accept an empty string.
Returns:
True
Raises:
BadQueryError if there are unconsumed symbols in the query.
"""
if self.__next_symbol < len(self.__symbols):
self.__Error('Expected no additional symbols')
return True
def __Select(self):
"""Consume the SELECT clause and everything that follows it.
Assumes SELECT * to start.
Transitions to a FROM clause.
Returns:
True if parsing completed okay.
"""
self.__Expect('SELECT')
self.__Expect('*')
return self.__From()
def __From(self):
"""Consume the FROM clause.
Assumes a single well formed entity in the clause.
Assumes FROM <Entity Name>
Transitions to a WHERE clause.
Returns:
True if parsing completed okay.
"""
self.__Expect('FROM')
entity = self.__AcceptRegex(self.__identifier_regex)
if entity:
self._entity = entity
return self.__Where()
else:
self.__Error('Identifier Expected')
return False
def __Where(self):
"""Consume the WHERE cluase.
These can have some recursion because of the AND symbol.
Returns:
True if parsing the WHERE clause completed correctly, as well as all
subsequent clauses
"""
if self.__Accept('WHERE'):
return self.__FilterList()
return self.__OrderBy()
def __FilterList(self):
"""Consume the filter list (remainder of the WHERE clause)."""
identifier = self.__AcceptRegex(self.__identifier_regex)
if not identifier:
self.__Error('Invalid WHERE Identifier')
return False
condition = self.__AcceptRegex(self.__conditions_regex)
if not condition:
self.__Error('Invalid WHERE Condition')
return False
self.__CheckFilterSyntax(identifier, condition)
if not self.__AddSimpleFilter(identifier, condition, self.__Reference()):
if not self.__AddSimpleFilter(identifier, condition, self.__Literal()):
type_cast = self.__TypeCast()
if (not type_cast or
not self.__AddProcessedParameterFilter(identifier, condition,
*type_cast)):
self.__Error('Invalid WHERE condition')
if self.__Accept('AND'):
return self.__FilterList()
return self.__OrderBy()
def __GetValueList(self):
"""Read in a list of parameters from the tokens and return the list.
Reads in a set of tokens, but currently only accepts literals, positional
parameters, or named parameters. Or empty list if nothing was parsed.
Returns:
A list of values parsed from the input, with values taking the form of
strings (unbound, named reference), integers (unbound, positional
reference), or Literal() (bound value usable directly as part of a filter
with no additional information).
"""
params = []
while True:
reference = self.__Reference()
if reference:
params.append(reference)
else:
literal = self.__Literal()
if literal:
params.append(literal)
else:
self.__Error('Parameter list requires literal or reference parameter')
if not self.__Accept(','):
break
return params
def __CheckFilterSyntax(self, identifier, condition):
"""Check that filter conditions are valid and throw errors if not.
Args:
identifier: identifier being used in comparison
condition: string form of the comparison operator used in the filter
"""
if identifier.lower() == 'ancestor':
if condition.lower() == 'is':
if self.__has_ancestor:
self.__Error('Only one ANCESTOR IS" clause allowed')
else:
self.__Error('"IS" expected to follow "ANCESTOR"')
elif condition.lower() == 'is':
self.__Error('"IS" can only be used when comparing against "ANCESTOR"')
def __AddProcessedParameterFilter(self, identifier, condition,
operator, parameters):
"""Add a filter with post-processing required.
Args:
identifier: property being compared.
condition: comparison operation being used with the property (e.g. !=).
operator: operation to perform on the parameters before adding the filter.
parameters: list of bound parameters passed to 'operator' before creating
the filter. When using the parameters as a pass-through, pass 'nop'
into the operator field and the first value will be used unprocessed).
Returns:
True if the filter was okay to add.
"""
if parameters is None:
return False
if parameters[0] is None:
return False
logging.log(LOG_LEVEL, 'Adding Filter %s %s %s',
identifier, condition, repr(parameters))
filter_rule = (identifier, condition)
if identifier.lower() == 'ancestor':
self.__has_ancestor = True
filter_rule = (self.__ANCESTOR, 'is')
assert condition.lower() == 'is'
if condition.lower() != 'in' and operator == 'list':
self.__Error('Only IN can process a list of values')
self.__filters.setdefault(filter_rule, []).append((operator, parameters))
return True
def __AddSimpleFilter(self, identifier, condition, parameter):
"""Add a filter to the query being built (no post-processing on parameter).
Args:
identifier: identifier being used in comparison
condition: string form of the comparison operator used in the filter
parameter: ID of the reference being made or a value of type Literal
Returns:
True if the filter could be added.
False otherwise.
"""
return self.__AddProcessedParameterFilter(identifier, condition,
'nop', [parameter])
def __Reference(self):
"""Consume a parameter reference and return it.
Consumes a reference to a positional parameter (:1) or a named parameter
(:email). Only consumes a single reference (not lists).
Returns:
The name of the reference (integer for positional parameters or string
for named parameters) to a bind-time parameter.
"""
logging.log(LOG_LEVEL, 'Try Reference')
reference = self.__AcceptRegex(self.__ordinal_regex)
if reference:
return int(reference)
else:
reference = self.__AcceptRegex(self.__named_regex)
if reference:
return reference
return None
def __Literal(self):
"""Parse literals from our token list.
Returns:
The parsed literal from the input string (currently either a string,
integer, or floating point value).
"""
logging.log(LOG_LEVEL, 'Try Literal')
literal = None
try:
literal = int(self.__symbols[self.__next_symbol])
except ValueError:
pass
else:
self.__next_symbol += 1
if literal is None:
try:
literal = float(self.__symbols[self.__next_symbol])
except ValueError:
pass
else:
self.__next_symbol += 1
if literal is None:
literal = self.__AcceptRegex(self.__quoted_string_regex)
if literal:
literal = literal[1:-1].replace("''", "'")
if literal is None:
if self.__Accept('TRUE'):
literal = True
elif self.__Accept('FALSE'):
literal = False
if literal is not None:
return Literal(literal)
else:
return None
def __TypeCast(self):
"""Check if the next operation is a type-cast and return the cast if so.
Casting operators look like simple function calls on their parameters. This
code returns the cast operator found and the list of parameters provided by
the user to complete the cast operation.
Returns:
A tuple (cast operator, params) which represents the cast operation
requested and the parameters parsed from the cast clause.
None - if there is no TypeCast function.
"""
logging.log(LOG_LEVEL, 'Try Type Cast')
cast_op = self.__AcceptRegex(self.__cast_regex)
if not cast_op:
if self.__Accept('('):
cast_op = 'list'
else:
return None
else:
cast_op = cast_op.lower()
self.__Expect('(')
params = self.__GetValueList()
self.__Expect(')')
logging.log(LOG_LEVEL, 'Got casting operator %s with params %s',
cast_op, repr(params))
return (cast_op, params)
def __OrderBy(self):
"""Consume the ORDER BY clause."""
if self.__Accept('ORDER'):
self.__Expect('BY')
return self.__OrderList()
return self.__Limit()
def __OrderList(self):
"""Consume variables and sort order for ORDER BY clause."""
identifier = self.__AcceptRegex(self.__identifier_regex)
if identifier:
if self.__Accept('DESC'):
self.__orderings.append((identifier, DESCENDING))
elif self.__Accept('ASC'):
self.__orderings.append((identifier, ASCENDING))
else:
self.__orderings.append((identifier, ASCENDING))
else:
self.__Error('Invalid ORDER BY Property')
logging.log(LOG_LEVEL, self.__orderings)
if self.__Accept(','):
return self.__OrderList()
return self.__Limit()
def __Limit(self):
"""Consume the LIMIT clause."""
if self.__Accept('LIMIT'):
maybe_limit = self.__AcceptRegex(self.__number_regex)
if maybe_limit:
if self.__Accept(','):
self.__offset = int(maybe_limit)
if self.__offset < 0:
self.__Error('Bad offset in LIMIT Value')
else:
logging.log(LOG_LEVEL, 'Set offset to %i', self.__offset)
maybe_limit = self.__AcceptRegex(self.__number_regex)
self.__limit = int(maybe_limit)
if self.__limit < 1:
self.__Error('Bad Limit in LIMIT Value')
else:
logging.log(LOG_LEVEL, 'Set limit to %i', self.__limit)
else:
self.__Error('Non-number limit in LIMIT clause')
return self.__Offset()
def __Offset(self):
"""Consume the OFFSET clause."""
if self.__Accept('OFFSET'):
if self.__offset != -1:
self.__Error('Offset already defined in LIMIT clause')
offset = self.__AcceptRegex(self.__number_regex)
if offset:
self.__offset = int(offset)
if self.__offset < 0:
self.__Error('Bad offset in OFFSET clause')
else:
logging.log(LOG_LEVEL, 'Set offset to %i', self.__offset)
else:
self.__Error('Non-number offset in OFFSET clause')
return self.__Hint()
def __Hint(self):
"""Consume the HINT clause.
Requires one of three options (mirroring the rest of the datastore):
HINT ORDER_FIRST
HINT ANCESTOR_FIRST
HINT FILTER_FIRST
Returns:
True if the hint clause and later clauses all parsed okay
"""
if self.__Accept('HINT'):
if self.__Accept('ORDER_FIRST'):
self.__hint = 'ORDER_FIRST'
elif self.__Accept('FILTER_FIRST'):
self.__hint = 'FILTER_FIRST'
elif self.__Accept('ANCESTOR_FIRST'):
self.__hint = 'ANCESTOR_FIRST'
else:
self.__Error('Unknown HINT')
return False
return self.__AcceptTerminal()
class Literal(object):
"""Class for representing literal values in a way unique from unbound params.
This is a simple wrapper class around basic types and datastore types.
"""
def __init__(self, value):
self.__value = value
def Get(self):
"""Return the value of the literal."""
return self.__value
def __repr__(self):
return 'Literal(%s)' % repr(self.__value)
class MultiQuery(datastore.Query):
"""Class representing a GQL query requiring multiple datastore queries.
This class is actually a subclass of datastore.Query as it is intended to act
like a normal Query object (supporting the same interface).
"""
def __init__(self, bound_queries, orderings):
self.__bound_queries = bound_queries
self.__orderings = orderings
def Get(self, limit, offset=0):
"""Get results of the query with a limit on the number of results.
Args:
limit: maximum number of values to return.
offset: offset requested -- if nonzero, this will override the offset in
the original query
Returns:
An array of entities with at most "limit" entries (less if the query
completes before reading limit values).
"""
count = 1
result = []
iterator = self.Run()
try:
for i in xrange(offset):
val = iterator.next()
except StopIteration:
pass
try:
while count <= limit:
val = iterator.next()
result.append(val)
count += 1
except StopIteration:
pass
return result
class SortOrderEntity(object):
def __init__(self, entity_iterator, orderings):
self.__entity_iterator = entity_iterator
self.__entity = None
self.__min_max_value_cache = {}
try:
self.__entity = entity_iterator.next()
except StopIteration:
pass
else:
self.__orderings = orderings
def __str__(self):
return str(self.__entity)
def GetEntity(self):
return self.__entity
def GetNext(self):
return MultiQuery.SortOrderEntity(self.__entity_iterator,
self.__orderings)
def CmpProperties(self, that):
"""Compare two entities and return their relative order.
Compares self to that based on the current sort orderings and the
key orders between them. Returns negative, 0, or positive depending on
whether self is less, equal to, or greater than that. This
comparison returns as if all values were to be placed in ascending order
(highest value last). Only uses the sort orderings to compare (ignores
keys).
Args:
self: SortOrderEntity
that: SortOrderEntity
Returns:
Negative if self < that
Zero if self == that
Positive if self > that
"""
if not self.__entity:
return cmp(self.__entity, that.__entity)
for (identifier, order) in self.__orderings:
value1 = self.__GetValueForId(self, identifier, order)
value2 = self.__GetValueForId(that, identifier, order)
result = cmp(value1, value2)
if order == DESCENDING:
result = -result
if result:
return result
return 0
def __GetValueForId(self, sort_order_entity, identifier, sort_order):
value = sort_order_entity.__entity[identifier]
entity_key = sort_order_entity.__entity.key()
if self.__min_max_value_cache.has_key((entity_key, identifier)):
value = self.__min_max_value_cache[(entity_key, identifier)]
elif isinstance(value, list):
if sort_order == DESCENDING:
value = min(value)
else:
value = max(value)
self.__min_max_value_cache[(entity_key, identifier)] = value
return value
def __cmp__(self, that):
"""Compare self to that w.r.t. values defined in the sort order.
Compare an entity with another, using sort-order first, then the key
order to break ties. This can be used in a heap to have faster min-value
lookup.
Args:
that: other entity to compare to
Returns:
negative: if self is less than that in sort order
zero: if self is equal to that in sort order
positive: if self is greater than that in sort order
"""
property_compare = self.CmpProperties(that)
if property_compare:
return property_compare
else:
return cmp(self.__entity.key(), that.__entity.key())
def Run(self):
"""Return an iterable output with all results in order."""
results = []
count = 1
for bound_query in self.__bound_queries:
logging.log(LOG_LEVEL, 'Running query #%i' % count)
results.append(bound_query.Run())
count += 1
def IterateResults(results):
"""Iterator function to return all results in sorted order.
Iterate over the array of results, yielding the next element, in
sorted order. This function is destructive (results will be empty
when the operation is complete).
Args:
results: list of result iterators to merge and iterate through
Yields:
The next result in sorted order.
"""
result_heap = []
for result in results:
heap_value = MultiQuery.SortOrderEntity(result, self.__orderings)
if heap_value.GetEntity():
heapq.heappush(result_heap, heap_value)
used_keys = set()
while result_heap:
top_result = heapq.heappop(result_heap)
results_to_push = []
if top_result.GetEntity().key() not in used_keys:
yield top_result.GetEntity()
else:
pass
used_keys.add(top_result.GetEntity().key())
results_to_push = []
while result_heap:
next = heapq.heappop(result_heap)
if cmp(top_result, next):
results_to_push.append(next)
break
else:
results_to_push.append(next.GetNext())
results_to_push.append(top_result.GetNext())
for popped_result in results_to_push:
if popped_result.GetEntity():
heapq.heappush(result_heap, popped_result)
return IterateResults(results)
| Python |
import base64
import binascii
import cPickle
import logging
import os
import random
import re
import time
import types
from django.contrib.auth.models import User
from django.contrib.contenttypes import generic
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.db.models import manager
from django.db.models.fields.related import (
ReverseSingleRelatedObjectDescriptor as RSROD)
from django.db.models.query import QuerySet
from django.db.models.query_utils import Q
from django.db.models.signals import post_init
from django.db import transaction
from django.utils.hashcompat import md5_constructor
from gae2django.middleware import get_current_user
from gae2django.utils import CallableString, patch_user
# Use the system (hardware-based) random number generator if it exists.
# Taken from django.contrib.sessions.backends.base
if hasattr(random, 'SystemRandom'):
randrange = random.SystemRandom().randrange
else:
randrange = random.randrange
MAX_SESSION_KEY = 18446744073709551616L # 2 << 63
class Query(QuerySet):
def __init__(self, *args, **kwds):
super(Query, self).__init__(*args, **kwds)
self._listprop_filter = None
def filter(self, *args, **kwds):
if kwds:
return super(Query, self).filter(*args, **kwds)
property_operator, value = args
if isinstance(value, basestring):
value = u'%s' % value
value = value.replace("'", "''")
elif isinstance(value, Key):
value = value.obj
prop, op = property_operator.split(' ', 1)
# TODO(andi): See GqlQuery. Refactor query building.
if op.lower() in ('=', 'is'):
self.query.add_q(Q(**{prop: value}))
elif op == '>':
self.query.add_q(Q(**{'%s__gt' % prop: value}))
elif op == '<':
self.query.add_q(Q(**{'%s__lt' % prop: value}))
elif op == '>=':
self.query.add_q(Q(**{'%s__gte' % prop: value}))
elif op == '<=':
self.query.add_q(Q(**{'%s__lte' % prop: value}))
else:
where = '%s %r' % (property_operator, value)
self.query.add_extra(None, None, [where], None, None, None)
return self
def _filter(self, *args, **kwds):
return super(Query, self).filter(*args, **kwds)
def order(self, prop):
self.query.add_ordering(prop)
return self
def get(self, *args, **kwds):
if kwds:
return super(Query, self).get(*args, **kwds)
results = list(self)
if results:
return results[0]
return None
def ancestor(self, ancestor):
if isinstance(ancestor, Key):
# This isn't very efficient because we need to run an
# extra query to fetch the object. Maybe this could be
# refactored to work on keys only (instead of objects),
# but Key.parent() fetches an object too, so it makes no
# difference ATM.
ancestor = ancestor.obj
pattern = '@@'.join(str(x.key()) for x in ancestor.get_ancestry())
# TODO(andi): __startswith would be better, see issue21
self.query.add_q(Q(gae_ancestry__endswith='@%s@' % pattern))
return self
def fetch(self, limit, offset=0):
return list(self)[offset:limit]
def iterator(self):
"""Handles ListProperty filters."""
for obj in super(Query, self).iterator():
if self._listprop_filter is not None:
matched = True
for kwd, item in self._listprop_filter:
if item not in getattr(obj, kwd):
matched = False
break
if matched:
yield obj
else:
yield obj
class BaseManager(manager.Manager):
def __iter__(self):
return self.iterator()
def _filter(self, *args, **kwds):
return self.get_query_set()._filter(*args, **kwds)
def count(self, limit=None):
return super(BaseManager, self).count()
def order(self, *args, **kwds):
return super(BaseManager, self).order_by(*args, **kwds)
def get_query_set(self):
return Query(self.model)
def _adjust_keywords(kwds):
required = kwds.get('required', False)
kwds['null'] = not required
kwds['blank'] = not required
if 'required' in kwds:
del kwds['required']
if 'choices' in kwds:
kwds['choices'] = [(a, a) for a in kwds['choices']]
return kwds
class StringProperty(models.CharField):
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
kwds['max_length'] = 500
super(StringProperty, self).__init__(*args, **kwds)
class TextProperty(models.TextField):
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
super(TextProperty, self).__init__(*args, **kwds)
class BooleanProperty(models.NullBooleanField):
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
super(BooleanProperty, self).__init__(*args, **kwds)
class UserProperty(models.ForeignKey):
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
self._auto_current_user_add = False
if 'auto_current_user_add' in kwds:
self._auto_current_user_add = True
del kwds['auto_current_user_add']
super(UserProperty, self).__init__(User, *args, **kwds)
def get_default(self):
if self._auto_current_user_add:
user = get_current_user()
if user is not None:
return user.id
else:
return None
return get_current_user()
return super(UserProperty, self).get_default()
def patch_user_model(sender, **kwds):
if sender != User:
return
if not 'instance' in kwds: # just to go for sure, shouldn't happen
return
instance = kwds['instance']
patch_user(instance)
post_init.connect(patch_user_model)
class DateTimeProperty(models.DateTimeField):
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
super(DateTimeProperty, self).__init__(*args, **kwds)
class ListProperty(models.TextField):
__metaclass__ = models.SubfieldBase
def __init__(self, type_, *args, **kwds):
kwds = _adjust_keywords(kwds)
super(models.TextField, self).__init__()
def get_db_prep_value(self, value, connection=None, prepared=False):
return base64.encodestring(cPickle.dumps(value))
def to_python(self, value):
if type(value) in [types.ListType, types.TupleType]:
return value
if value is None:
return []
try:
return cPickle.loads(base64.decodestring(value))
except EOFError:
return []
Email = str
Link = str
Text = unicode
class Blob(str):
pass
class G2DReverseSingleRelatedObjectDescriptor(RSROD):
def get_value_for_datastore(self, model_instance):
return getattr(model_instance, self.__id_attr_name())
def __id_attr_name(self):
return self._attr_name()
def _attr_name(self):
return "_%s" % self.field.name
class ReferenceProperty(models.ForeignKey):
def __init__(self, other, *args, **kwds):
kwds = _adjust_keywords(kwds)
if 'collection_name' in kwds:
kwds['related_name'] = kwds['collection_name']
del kwds['collection_name']
super(ReferenceProperty, self).__init__(other, *args, **kwds)
def contribute_to_class(self, cls, name):
# This is mainly a copy of the ForeignKey's contribute_to_class.
# The only difference is that we use our custom
# ReverseSingleRelatedObjectDescriptor that implements
# get_value_for_datastore (see issue 1).
super(ReferenceProperty, self).contribute_to_class(cls, name)
setattr(cls, self.name, G2DReverseSingleRelatedObjectDescriptor(self))
if isinstance(self.rel.to, basestring):
target = self.rel.to
else:
target = self.rel.to._meta.db_table
cls._meta.duplicate_targets[self.column] = (target, "o2m")
SelfReferenceProperty = ReferenceProperty
class BlobProperty(models.TextField):
__metaclass__ = models.SubfieldBase
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
super(BlobProperty, self).__init__(*args, **kwds)
def get_db_prep_value(self, value, connection=None, prepared=False):
if value is None:
return value
return base64.encodestring(value)
def to_python(self, value):
if value is None:
return value
if isinstance(value, Blob):
return value
elif isinstance(value, unicode):
# For legacy data
value = value.encode('utf-8')
try:
return Blob(base64.decodestring(value))
except binascii.Error:
# value is already decoded, or for legacy data it was
# never encoded
return Blob(value)
class LinkProperty(models.URLField):
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
super(LinkProperty, self).__init__(*args, **kwds)
class EmailProperty(models.EmailField):
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
super(EmailProperty, self).__init__(*args, **kwds)
class IntegerProperty(models.IntegerField):
def __init__(self, *args, **kwds):
kwds = _adjust_keywords(kwds)
super(IntegerProperty, self).__init__(*args, **kwds)
class BaseModelMeta(models.base.ModelBase):
def __new__(cls, name, bases, attrs):
new_cls = super(BaseModelMeta, cls).__new__(cls, name, bases, attrs)
new_cls.objects = BaseManager()
new_cls.objects.model = new_cls
new_cls._default_manager = new_cls.objects
new_cls._reference_attrs = set()
for name in set(attrs):
value = attrs[name]
if isinstance(value, (ReferenceProperty, SelfReferenceProperty)):
new_cls._reference_attrs.add(name)
return new_cls
class Model(models.Model):
__metaclass__ = BaseModelMeta
gae_key = models.CharField(max_length=64, blank=True, null=True,
unique=True)
gae_parent_ctype = models.ForeignKey(ContentType,
blank=True, null=True)
gae_parent_id = models.PositiveIntegerField(blank=True, null=True)
gae_ancestry = models.CharField(max_length=500, blank=True, null=True)
parent = generic.GenericForeignKey('gae_parent_ctype',
'gae_parent_id')
class Meta:
abstract = True
def __init__(self, *args, **kwds):
# keywords for GenericForeignKeys don't work with abstract classes:
# http://code.djangoproject.com/ticket/8309
if 'parent' in kwds:
parent = kwds['parent']
ctype = ContentType.objects.get_for_model(parent.__class__)
kwds['gae_parent_ctype'] = ctype
kwds['gae_parent_id'] = parent.id
kwds['gae_ancestry'] = ''.join(['@%s@' % prnt.key()
for prnt in parent.get_ancestry()])
del kwds['parent']
if 'key' in kwds:
kwds['gae_key'] = kwds['key']
del kwds['key']
if 'key_name' in kwds:
kwds['gae_key'] = kwds['key_name']
del kwds['key_name']
self._key = None
super(Model, self).__init__(*args, **kwds)
def __getattribute__(self, name):
ref_attrs = super(Model, self).__getattribute__('_reference_attrs')
if name.startswith('_') and name[1:] in ref_attrs:
referenced = super(Model, self).__getattribute__(name[1:])
if referenced is not None:
return referenced.key()
return None
return super(Model, self).__getattribute__(name)
@classmethod
def get_or_insert(cls, key, **kwds):
try:
return cls.objects.get(gae_key=key)
except cls.DoesNotExist:
kwds['gae_key'] = key
new = cls(**kwds)
new.save()
return new
@classmethod
def get_by_key_name(cls, keys, parent=None):
single = False
# if keys isn't a list then a single instance is returned
if type(keys) not in [types.ListType, types.TupleType]:
single = True
keys = [keys]
result = []
for key in keys:
try:
kwds = {'gae_key': str(key)}
if parent is not None:
kwds['gae_ancestry__icontains'] = str(parent.key())
result.append(cls.objects.get(**kwds))
except cls.DoesNotExist:
result.append(None)
if single and len(result) != 0:
return result[0]
elif single:
return None
else:
return result
@classmethod
def get_by_id(cls, id_, parent=None):
# Ignore parent, we've got an ID
ret = []
return_list = True
if type(id_) not in (types.ListType, types.TupleType):
id_ = [id_]
return_list = False
for i in id_:
try:
ret.append(cls.objects.get(id=i))
except cls.DoesNotExist:
ret.append(None)
if len(id_) == 1 and not return_list:
return ret[0]
else:
return ret
@classmethod
def kind(cls):
# Return the table name here. It should be the expected output...
return cls._meta.db_table
@classmethod
def all(cls):
return cls.objects.all()
@classmethod
def properties(cls):
props = {}
[props.setdefault(field.name, field) for field in cls._meta.fields
if not field.name.startswith('gae_')]
return props
def key(self):
if self.id is None:
raise NotSavedError()
if self._key is None:
self._key = Key('%s_%s' % (self.__class__.__name__, self.id))
self._key._obj = self
return self._key
def is_saved(self):
return self.id is not None
def put(self):
return self.save()
def save(self):
if not self.key:
try:
pid = os.getpid()
except AttributeError:
pid = 1
self.key = md5_constructor("%s%s%s%s"
% (randrange(0, MAX_SESSION_KEY),
pid, time.time(),
self.__name__)).hexdigest()
super(Model, self).save()
@classmethod
def gql(cls, clause, *args, **kwds):
from google.appengine.ext import db
query = db.GqlQuery('SELECT * FROM %s %s' % (cls.__name__,
clause), *args, **kwds)
query._real_cls = cls
return query
@classmethod
def get(cls, keys):
if type(keys) not in [types.ListType, types.TupleType]:
keys = [keys]
instances = [cls.get_by_key_name(key) for key in keys]
if len(keys) == 1:
return instances[0]
else:
return instances
def parent_key(self):
return self.parent.key()
def get_ancestry(self):
"""Returns parent objects."""
yield self
parent = self.parent
while parent:
yield parent
parent = parent.parent
from django import forms as djangoforms
class _QueryIterator(object):
def __init__(self, results):
self._results = results
self._idx = -1
def __iter__(self):
return self
def next(self):
self._idx += 1
if len(self._results) > self._idx:
return self._results[self._idx]
else:
raise StopIteration
class GqlQuery(object):
def __init__(self, sql, *args, **kwds):
from gaeapi.appengine.ext import gql
#print sql, args, kwds
self._sql = sql
self._gql = gql.GQL(sql)
self._real_cls = None
self._args = []
self._kwds = {}
if args or kwds:
self.bind(*args, **kwds)
self._cursor = None
self._idx = -1
self._results = None
def __iter__(self):
if self._results is None:
self._execute()
return _QueryIterator(self._results)
def _resolve_arg(self, value):
from gaeapi.appengine.ext import gql
if isinstance(value, basestring):
return self._kwds[value]
elif isinstance(value, int):
return self._args[value-1]
elif isinstance(value, gql.Literal):
return value.Get()
else:
raise Error('Unhandled args %s' % item)
def _execute(self):
from gaeapi.appengine.ext import gql
if self._cursor:
raise Error('Already executed.')
# Make sql local just for traceback
sql = self._sql
from django.db import models
# First, let's see if the class is explicitely given.
# E.g. Model.gql('xxx') set's _real_cls.
cls = self._real_cls
if cls is None:
for xcls in models.get_models():
if (xcls.__name__ == self._gql._entity \
or xcls._meta.db_table in self._sql) \
and not xcls.__module__.startswith('django.'):
cls = xcls
break
if not cls:
raise Error('Class not found.')
q = cls.objects.all()
q = q.select_related()
#print '-'*10
#print "xx", sql, self._args, self._kwds
ancestor = None
listprop_filter = []
for key, value in self._gql.filters().items():
#print key, value
kwd, op = key
if op == '=':
if cls._meta.get_field(kwd).rel:
rel_cls = cls._meta.get_field(kwd).rel.to
else:
rel_cls = None
for xop, val in value:
# FIXME: Handle lists...
item = val[0]
if isinstance(item, gql.Literal):
#print 'Literal', item
item = item.Get()
#print '-->', item
elif isinstance(item, basestring):
#print 'Keyword', item
item = self._kwds[item]
#print '-->', item
elif isinstance(item, int):
#print 'Positional', item
item = self._args[item-1]
#print '-->', item
else:
raise Error('Unhandled args %s' % item)
# if rel_cls:
# # FIXME: Handle lists
# try:
# item = rel_cls.objects.get(id=item)
# except rel_cls.DoesNotExist:
# continue
if isinstance(cls._meta.get_field(kwd), ListProperty):
listprop_filter.append((kwd, item))
continue
if isinstance(kwd, unicode):
kwd = kwd.encode('ascii')
q = q._filter(**{kwd: item})
elif op == 'is' and kwd == -1: # ANCESTOR
if ancestor:
raise Error('Ancestor already defined: %s' % ancestor)
item = value[0][1][0]
if isinstance(item, basestring):
ancestor = self._kwds[item]
elif isinstance(item, int):
ancestor = self._args[item-1]
else:
raise Error('Unhandled args %s' % item)
pattern = '@%s@' % ancestor.key()
q = q._filter(**{'gae_ancestry__contains': pattern})
elif op == '>':
item = self._resolve_arg(value[0][1][0])
q = q._filter(**{'%s__gt' % kwd: item})
elif op == '<':
item = self._resolve_arg(value[0][1][0])
q = q._filter(**{'%s__lt' % kwd: item})
elif op == '>=':
item = self._resolve_arg(value[0][1][0])
q = q._filter(**{'%s__gte' % kwd: item})
elif op == '<=':
item = self._resolve_arg(value[0][1][0])
q = q._filter(**{'%s__lte' % kwd: item})
else:
raise Error('Unhandled operator %s' % op)
orderings = []
for field, direction in self._gql.orderings():
if direction != 1:
field = '-%s' % field
orderings.append(field)
if orderings:
q = q.order_by(*orderings)
if listprop_filter:
q._listprop_filter = listprop_filter
self._results = q
def bind(self, *args, **kwds):
self._kwds = kwds
self._args = args
self._results = None
def fetch(self, limit, offset=0):
if self._results is None:
self._execute()
return list(self._results[offset:offset+limit])
def count(self, limit=None):
if self._results is None:
self._execute()
idx = self._idx
c = len(self._results)
self._idx = idx
return c
def get(self):
if self._results is None:
self._execute()
if self._results:
return self._results.get()
return None
class Key(object):
def __init__(self, key_str):
if not isinstance(key_str, basestring):
raise BadArgumentError(('Key() expects a string; '
'received %s (a %s)'
% (key_str, type(key_str))))
self._obj = None
self._key_str = key_str
def __str__(self):
return self._key_str
def __cmp__(self, other):
return cmp(str(self), str(other))
def __hash__(self):
return hash(self.__str__())
def _get_obj(self):
if self._obj is None:
self._init_obj()
return self._obj
def _set_obj(self, obj):
self._obj = obj
obj = property(fget=_get_obj, fset=_set_obj)
@classmethod
def _find_model_cls(cls, name):
from django.db.models.loading import get_models
model_cls = None
for model in get_models():
if model.__module__.startswith('django'):
continue
if model.__name__ == name:
model_cls = model
break
assert model_cls is not None
return model_cls
def _init_obj(self):
clsname, objid = self._key_str.rsplit('_', 1)
model_cls = self._find_model_cls(clsname)
return model_cls.objects.get(int(objid))
@classmethod
def from_path(cls, *args, **kwds):
if kwds and tuple(kwds) != ('parent',):
raise BadArgumentError('Excess keyword arguments %r' % kwds)
if len(args)%2 != 0 or len(args) == 0:
raise BadArgumentError(('A non-zero even number of positional '
'arguments is required (kind, id or name, '
'kind, id or name, ...); received %s'
% repr(args)))
cls_name = args[-2]
key_name = args[-1]
if isinstance(key_name, basestring) and not key_name.isdigit():
model_cls = cls._find_model_cls(cls_name)
obj = model_cls.objects.get(gae_key=key_name)
key = obj.key()
else:
key = cls('%s_%s' % (cls_name, key_name), **kwds)
return key
def app(self):
return self.obj._meta.app_label
def kind(self):
return self.obj.__class__.__name__
def id(self):
if self.name():
return None
return self.obj.id
def name(self):
return self.obj.gae_key
def id_or_name(self):
if self.name() is None:
return self.id()
return self.name()
def has_id_or_name(self):
# Always returns True as we've always have at least an id...
return True
def parent(self):
return self.obj.parent.key()
# Errors
class Error(Exception):
"""db.Error"""
class BadArgumentError(Error):
"""A bad argument was given to a query method."""
class BadFilterError(Error):
"""A filter string in the query is invalid."""
class BadKeyError(Error):
"""The provided key string is not a valid key."""
class BadPropertyError(Error):
"""The property could not be created because its name is not a string."""
class BadQueryError(Error):
"""The query string is not a valid query."""
class BadRequestError(Error):
"""Request to the datastore service has one or more invalid properties."""
class BadValueError(Error):
"""Invalid value for the property type."""
class ConfigurationError(Error):
"""A property is not configured correctly."""
class DuplicatePropertyError(Error):
"""A model definition has more than one property with the same name."""
class InternalError(Error):
"""There was an error internal to the datastore service."""
class KindError(Error):
"""Model class that does not match the entity."""
class NotSavedError(Error):
"""Object is not saved."""
class PropertyError(Error):
"""The referenced model property does not exist on the data object."""
class ReservedWordError(Error):
"""A model defines a property whose name is disallowed."""
class Rollback(Error):
"""Indicates that a function in a transaction wants to roll back."""
class TransactionFailedError(Error):
"""The transaction or datastore operation could not be committed."""
class CapabilityDisabledError(Error):
"""Datastore functionality is not available."""
# Functions
def get(keys):
raise NotImplementedError
def put(models):
if type(models) not in [types.ListType, types.TupleType]:
models = [models]
keys = []
for model in models:
model.save()
keys.append(model.key)
if len(keys) > 1:
return keys
elif len(keys) == 1:
return keys[0]
return None
def delete(models):
if type(models) not in [types.ListType, types.TupleType]:
models = [models]
for model in models:
model.delete()
@transaction.commit_on_success
def run_in_transaction(func, *args, **kwds):
return func(*args, **kwds)
| Python |
from django.conf import settings
from django.contrib.auth.models import User
def get_current_user():
from gae2django import middleware
return middleware.get_current_user()
def is_current_user_admin():
user = get_current_user()
if user:
return user.is_superuser
return False
def create_login_url(redirect):
return settings.LOGIN_URL+'?next='+redirect
def create_logout_url(redirect):
return settings.LOGOUT_URL+'?next='+redirect
class Error(Exception):
"""Base class for all exceptions in this package."""
class UserNotFoundError(Error):
"""Raised if a User doesn't exist."""
class RedirectTooLongError(Error):
"""Raised if the redirect URL is too long."""
| Python |
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements the URL fetch API.
http://code.google.com/appengine/docs/memcache/
"""
from django.core.cache import cache
class Client(object):
def set(self, key, value, time=0, min_compress_len=0):
cache.set(key, value)
return True
def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
[self.set('%s%s' % (key_prefix, key), mapping[key],
time, min_compress_len) for key in mapping]
return []
def get(self, key):
return cache.get(key)
def get_multi(self, keys, key_prefix=''):
mapping = {}
[mapping.setdefault(key, self.get('%s%s' % (key_prefix, key)))
for key in keys
if '%s%s' % (key_prefix, key) in cache]
return mapping
def delete(self, key, seconds=0):
# TODO: Implement locking (seconds keyword).
if key not in cache:
return 1
cache.delete(key)
return 2
def delete_multi(self, keys, seconds=0, key_prefix=''):
succeeded = True
for key in keys:
if self.delete('%s%s' % (key_prefix, key), seconds) != 2:
succeeded = False
return succeeded
def add(self, key, value, time=0, min_compress_len=0):
return cache.add(key, value, time or None)
def replace(self, key, value, time=0, min_compress_len=0):
if key in cache:
self.set(key, value, time, min_compress_len)
return True
return False
def incr(self, key, delta=1):
if key in cache:
try:
old = long(cache.get(key))
new = old+delta
cache.set(key, new)
return new
except ValueError:
return None
return None
def decr(self, key, delta=1):
return self.incr(key, delta*-1)
def flush_all(self):
# Django doesn't know all keys in cache. So let's raise an RPC error...
return False
def get_stats(self):
# Again, Django doesn't have this information.
return {'hits': 0,
'misses': 0,
'byte_hits': 0,
'items': 0,
'bytes': 0,
'oldest_item_age': 0}
def set_servers(self, servers):
pass
def disconnect_all(self):
pass
def forget_dead_hosts(self):
pass
def debuglog(self):
pass
_CLIENT = None
def setup_cache(client_obj):
global _CLIENT
var_dict = globals()
_CLIENT = client_obj
var_dict['set_servers'] = _CLIENT.set_servers
var_dict['disconnect_all'] = _CLIENT.disconnect_all
var_dict['forget_dead_hosts'] = _CLIENT.forget_dead_hosts
var_dict['debuglog'] = _CLIENT.debuglog
var_dict['get'] = _CLIENT.get
var_dict['get_multi'] = _CLIENT.get_multi
var_dict['set'] = _CLIENT.set
var_dict['set_multi'] = _CLIENT.set_multi
var_dict['add'] = _CLIENT.add
var_dict['replace'] = _CLIENT.replace
var_dict['delete'] = _CLIENT.delete
var_dict['delete_multi'] = _CLIENT.delete_multi
var_dict['incr'] = _CLIENT.incr
var_dict['decr'] = _CLIENT.decr
var_dict['flush_all'] = _CLIENT.flush_all
var_dict['get_stats'] = _CLIENT.get_stats
setup_cache(Client())
| Python |
class Query(dict):
def __init__(self, model_class):
self._model_cls = model_class
def filter(self, property_operator, value):
raise NotImplementedError
def order(self, property):
raise NotImplementedError
def ancestor(self, ancestor):
raise NotImplementedError
def get(self):
raise NotImplementedError
def fetch(self, limit, offset=0):
raise NotImplementedError
def count(self, limit):
raise NotImplementedError
# Copied from google.appengine.api.datastore.
# Used in ext.gql.GQL
def _AddOrAppend(dictionary, key, value):
"""Adds the value to the existing values in the dictionary, if any.
If dictionary[key] doesn't exist, sets dictionary[key] to value.
If dictionary[key] is not a list, sets dictionary[key]
to [old_value, value].
If dictionary[key] is a list, appends value to that list.
Args:
dictionary: a dict
key, value: anything
"""
if key in dictionary:
existing_value = dictionary[key]
if isinstance(existing_value, list):
existing_value.append(value)
else:
dictionary[key] = [existing_value, value]
else:
dictionary[key] = value
| Python |
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements the URL fetch API.
http://code.google.com/appengine/docs/urlfetch/
"""
import httplib
import socket
import urlparse
# Constants
GET = 'GET'
POST = 'POST'
HEAD = 'HEAD'
PUT = 'PUT'
DELETE = 'DELETE'
MAX_REDIRECTS = 5
REDIRECT_STATUSES = frozenset([
httplib.MOVED_PERMANENTLY,
httplib.FOUND,
httplib.SEE_OTHER,
httplib.TEMPORARY_REDIRECT,
])
def fetch(url, payload=None, method=GET, headers={}, allow_truncated=False):
if method in [POST, PUT]:
payload = payload or ''
else:
payload = ''
for redirect_number in xrange(MAX_REDIRECTS+1):
scheme, host, path, params, query, fragment = urlparse.urlparse(url)
try:
if scheme == 'http':
connection = httplib.HTTPConnection(host)
elif scheme == 'https':
connection = httplib.HTTPSConnection(host)
else:
raise InvalidURLError('Protocol \'%s\' is not supported.')
if query != '':
full_path = path + '?' + query
else:
full_path = path
adjusted_headers = {
'Content-Length': len(payload),
'Host': host,
'Accept': '*/*',
}
for header in headers:
adjusted_headers[header] = headers[header]
try:
connection.request(method, full_path, payload,
adjusted_headers)
http_response = connection.getresponse()
http_response_data = http_response.read()
finally:
connection.close()
if http_response.status in REDIRECT_STATUSES:
newurl = http_response.getheader('Location', None)
if newurl is None:
raise DownloadError('Redirect is missing Location header.')
else:
url = urlparse.urljoin(url, newurl)
method = 'GET'
else:
response = Response()
response.content = http_response_data
response.status_code = http_response.status
response.headers = {}
for header_key, header_value in http_response.getheaders():
response.headers[header_key] = header_value
return response
except (httplib.error, socket.error, IOError), e:
raise DownloadError('Download of \'%s\' failed: %s' % (url, e))
class Response(object):
content = None
content_was_truncated = False
status_code = -1
headers = None
class Error(Exception):
"""Base class for all URL fetch exceptions."""
class InvalidURLError(Error):
"""Invalid URL."""
class DownloadError(Error):
"""Download failed."""
class ResponseTooLargeError(Error):
"""Unused."""
| Python |
"""Stub XMPP module that does nothing except exposing the API."""
NO_ERROR = 0
INVALID_JID = 1
OTHER_ERROR = 2
MESSAGE_TYPE_CHAT = 'chat'
MESSAGE_TYPE_ERROR = 'error'
MESSAGE_TYPE_GROUPCHAT = 'groupchat'
MESSAGE_TYPE_HEADLINE = 'headline'
MESSAGE_TYPE_NORMAL = 'normal'
class Error(Exception):
pass
class InvalidJidError(Error):
pass
class InvalidTypeError(Error):
pass
class InvalidXmlError(Error):
pass
class NoBodyError(Error):
pass
class InvalidMessageError(Error):
pass
class Message(object):
def __init__(self, args):
try:
self.sender = args['from']
self.to = args['to']
self.body = args['body']
except KeyError, err:
raise InvalidMessageError(err)
self.command = None
self.arg = 'body'
def reply(self, body, message_type=MESSAGE_TYPE_CHAT, raw_xml=False):
return NO_ERROR
def get_presence(jid, from_jid=None):
return False
def send_invite(jid, from_jid=None):
return None
def send_message(jids, body, *args, **kwds):
if isinstance(jids, basestring):
return NO_ERROR
return [NO_ERROR]*len(jids)
| Python |
#
# Copyright 2008 Andi Albrecht <albrecht.andi@gmail.com>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Implements the mail fetch API.
http://code.google.com/appengine/docs/mail/
"""
from django.conf import settings
from django.core.mail import EmailMessage as _EmailMessage
class Error(Exception):
"""Base class for all exceptions in this module."""
class BadRequestError(Error):
pass
class InvalidSenderError(Error):
pass
class InvalidEmailError(Error):
pass
class InvalidAttachmentTypeError(Error):
pass
class MissingRecipientsError(Error):
pass
class MissingSenderError(Error):
pass
class MissingSubjectError(Error):
pass
class MissingBodyError(Error):
pass
class EmailMessage(object):
def __init__(self, **kw):
self.sender = None
self.to = None
self.cc = []
self.bcc = []
self.reply_to = None
self.subject = None
self.body = None
self.html = None
self.attachments = []
self.initialize(**kw)
def initialize(self, **kw):
list_fields = ('to', 'cc', 'bcc', 'reply_to')
for field in ('sender', 'to', 'cc', 'bcc', 'reply_to',
'subject', 'body', 'html', 'attachments'):
value = kw.get(field, None)
if value is not None:
if field in list_fields and isinstance(value, basestring):
value = [value]
setattr(self, field, value)
def check_initialized(self):
if not self.sender:
raise MissingSenderError()
if not self.to and not self.cc and not self.bcc:
raise MissingRecipientsError()
if not self.subject:
raise MissingSubjectError()
if not self.body:
raise MissingBodyError()
def is_initialized(self):
try:
self.check_intitialized()
return True
except Error:
pass
return False
def send(self):
headers = {}
if self.cc:
headers['Cc'] = ', '.join(self.cc)
if self.reply_to:
headers['Reply-To'] = ', '.join(self.reply_to)
msg = _EmailMessage(self.subject, self.body, self.sender,
self.to, self.cc + self.bcc, headers=headers)
msg.send(fail_silently=True)
def send_mail(sender, to, subject, body, **kw):
"""Send an email.
To mimic the behavior of Google's App Engine the email
is send with fail_silently=True as this function shouldn't
raise an exception.
Args:
sender: The senders email address.
to: List of recipients or single recipient address as string.
subject: The email's subject.
body: The email's body.
kw: Additional header keywords.
"""
if isinstance(to, basestring):
to = [to]
msg = EmailMessage(**kw)
msg.sender = sender
msg.to = to
msg.subject = subject
msg.body = body
msg.send()
def check_email_valid(email_address, field):
pass
def invalid_email_reason(email_address, field):
pass
def is_email_valid(email_address):
return True
def send_mail_to_admins(sender, subject, body, **kw):
_EmailMessage(settings.EMAIL_SUBJECT_PREFIX + subject, body,
settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS],
headers=kw).send(fail_silently=True)
| Python |
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
# from django.contrib import admin
# admin.autodiscover()
urlpatterns = patterns('',
(r'', 'gae2django.views.test'),
)
| Python |
# Django settings for django_gae2django project.
# NOTE: Keep the settings.py in examples directories in sync with this one!
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'dev.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# DATABASES setting for Django >= 1.3
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': 'dev.db',
'USER': '',
'PASSWORD': '',
'HOST': '',
'PORT': '',
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'el@4s$*(idwm5-87teftxlksckmy8$tyo7(tm!n-5x)zeuheex'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'gae2django.middleware.FixRequestUserMiddleware',
'django.middleware.doc.XViewMiddleware',
)
ROOT_URLCONF = 'urls'
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'gae2django',
)
# Uncomment the following to lines to run unittests with coverage reports.
#TEST_RUNNER = 'gae2django.tests.test_runner_with_coverage'
#COVERAGE_HTML_DIR = 'coverage_report'
| Python |
#!/usr/bin/env python
import gae2django
# Use gae2django.install(server_software='Dev') to enable a link to the
# admin frontend at the top of each page. By default this link is hidden.
gae2django.install(server_software='Django')
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
# Django settings for django_gae2django project.
# NOTE: Keep the settings.py in examples directories in sync with this one!
import os
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASE_ENGINE = 'sqlite3' # 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
DATABASE_NAME = 'dev.db' # Or path to database file if using sqlite3.
DATABASE_USER = '' # Not used with sqlite3.
DATABASE_PASSWORD = '' # Not used with sqlite3.
DATABASE_HOST = '' # Set to empty string for localhost. Not used with sqlite3.
DATABASE_PORT = '' # Set to empty string for default. Not used with sqlite3.
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/static/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'el@4s$*(idwm5-87teftxlksckmy8$tyo7(tm!n-5x)zeuheex'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.load_template_source',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'gae2django.middleware.FixRequestUserMiddleware',
# Keep in mind, that CSRF protection is DISABLED in this example!
'rietveld_helper.middleware.DisableCSRFMiddleware',
'rietveld_helper.middleware.AddUserToRequestMiddleware',
'django.middleware.doc.XViewMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.auth', # required by admin panel
'django.core.context_processors.request',
)
ROOT_URLCONF = 'rietveld_helper.urls'
TEMPLATE_DIRS = (
os.path.join(os.path.dirname(__file__), 'templates'),
)
INSTALLED_APPS = (
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'gae2django',
'rietveld_helper',
'codereview',
)
AUTH_PROFILE_MODULE = 'codereview.Account'
LOGIN_REDIRECT_URL = '/'
# This won't work with gae2django.
RIETVELD_INCOMING_MAIL_ADDRESS = None
RIETVELD_REVISION = ''
UPLOAD_PY_SOURCE = os.path.join(os.path.dirname(__file__), 'upload.py')
| Python |
from django.db import models
# Create your models here.
| Python |
from django.conf.urls.defaults import *
from django.contrib import admin
from codereview.urls import urlpatterns
admin.autodiscover()
urlpatterns = patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve',
{'document_root': 'static/'}),
(r'^accounts/login/$', 'django.contrib.auth.views.login'),
(r'^accounts/logout/$', 'django.contrib.auth.views.logout_then_login'),
('^admin/', include(admin.site.urls)),
('^_ah/admin', 'rietveld_helper.views.admin_redirect'),
('', include('codereview.urls')),
)
| Python |
from django.contrib.messages.api import get_messages
from codereview import models
class DisableCSRFMiddleware(object):
"""This is a BAD middleware. It disables CSRF protection.
If someone comes up with a smart approach to make upload.py work
with Django's CSRF protection, please submit a patch!
"""
def process_request(self, request):
setattr(request, '_dont_enforce_csrf_checks', True)
class AddUserToRequestMiddleware(object):
"""Just add the account..."""
def process_request(self, request):
account = None
is_admin = False
if not request.user.is_anonymous():
account = models.Account.get_account_for_user(request.user)
is_admin = request.user.is_superuser
models.Account.current_user_account = account
request.user_is_admin = is_admin
def process_view(self, request, view_func, view_args, view_kwargs):
is_rietveld = view_func.__module__.startswith('codereview')
user = request.user
if is_rietveld and user.is_anonymous():
# Pre-fetch messages before changing request.user so that
# they're cached (for Django 1.2.5 and above).
request._messages = get_messages(request)
request.user = None
response = view_func(request, *view_args, **view_kwargs)
request.user = user
return response
| Python |
from django.contrib import admin
from codereview import models
# Patch in some simple lambda's, Django uses them.
#models.Issue.__unicode__ = lambda self: self.subject
#models.PatchSet.__unicode__ = lambda self: self.message or ''
#class PatchSetInlineAdmin(admin.TabularInline):
# model = models.PatchSet
#class PatchSetAdmin(admin.ModelAdmin):
# list_filter = ('issue', 'owner')
# list_display = ('issue', 'message')
# search_fields = ('issue__subject', 'message')
#class IssueAdmin(admin.ModelAdmin):
# list_filter = ('closed', 'owner')
# list_display = ('id', 'subject', 'owner', 'modified', 'n_comments')
# list_display_links = ('id', 'subject')
# inlines = [PatchSetInlineAdmin]
#admin.site.register(models.Issue, IssueAdmin)
#admin.site.register(models.PatchSet, PatchSetAdmin)
| Python |
from django.http import HttpResponseRedirect
def admin_redirect(request):
return HttpResponseRedirect('/admin/')
| Python |
from django import template
from django.db.models.signals import post_save
from django.contrib.auth.models import AnonymousUser, User
from codereview import library
from gae2django.utils import CallableString
def nickname(email, arg=None):
if isinstance(email, AnonymousUser):
email = None
elif isinstance(email, User):
email.email = CallableString(email.email)
return library.nickname(email, arg)
def show_user(email, arg=None, autoescape=None, memcache_results=None):
if isinstance(email, AnonymousUser):
email = None
elif isinstance(email, User):
email.email = CallableString(email.email)
return library.show_user(email, arg, autoescape, memcache_results)
# Make filters global
template.defaultfilters.register.filter('nickname', nickname)
template.defaultfilters.register.filter('show_user', show_user)
def on_post_save_user(sender, **kwds):
if sender != User:
return
user = kwds['instance']
if not user.email:
# Django's admin allows to create a user without email!
return
if not isinstance(user.email, CallableString):
user.email = CallableString(user.email)
from codereview import models
account = models.Account.get_account_for_user(user)
account.put()
post_save.connect(on_post_save_user)
| Python |
# Copyright 2011 Tobias Rodaebel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Setup script for the gaesynkit package."""
from distutils.cmd import Command
from distutils.core import setup
from setuptools import setup, find_packages
from unittest import TestLoader, TextTestRunner
import os
import sys
class test(Command):
"""Runs the unit tests for gaesynkit."""
description = "Runs unit tests for gaesynkit."
user_options = [
('gae-sdk=', None, 'path to the Google App Engine SDK')
]
def initialize_options(self):
self.gae_sdk = None
def finalize_options(self):
pass
def run(self):
gae_sdk = self.gae_sdk or '/'
extra_paths = [
gae_sdk,
os.path.join(gae_sdk, 'lib', 'antlr3'),
os.path.join(gae_sdk, 'lib', 'django'),
os.path.join(gae_sdk, 'lib', 'fancy_urllib'),
os.path.join(gae_sdk, 'lib', 'ipaddr'),
os.path.join(gae_sdk, 'lib', 'webob'),
os.path.join(gae_sdk, 'lib', 'yaml', 'lib'),
os.path.join(gae_sdk, 'lib', 'simplejson'),
os.path.join(gae_sdk, 'lib', 'graphy'),
]
sys.path.extend(extra_paths)
import gaesynkit.tests
loader = TestLoader()
t = TextTestRunner()
t.run(loader.loadTestsFromModule(gaesynkit.tests))
# 'test' is the parameter as it gets added to setup.py
cmdclasses = {'test': test}
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
setup(
name='gaesynkit',
version='1.0.0a2',
author="Tobias Rodaebel",
author_email="tobias.rodaebel@googlemail.com",
description=("Google App Engine Datastore/Local Storage Synchronization "
"Framework"),
long_description=(
read('README.txt')
+ '\n\n' +
read('CHANGES.txt')
),
license="Apache License 2.0",
keywords="google app engine gae javascript datastore",
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Web Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Natural Language :: English',
'Operating System :: OS Independent',
'Programming Language :: JavaScript',
'Programming Language :: Python',
'Topic :: Internet :: WWW/HTTP',
'Topic :: Internet :: WWW/HTTP :: WSGI :: Server',
],
url='http://code.google.com/p/gaesynkit',
packages=find_packages('src'),
package_dir={'': 'src'},
include_package_data=True,
install_requires=[
'setuptools',
],
zip_safe=False,
cmdclass=cmdclasses
)
| Python |
##############################################################################
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
##############################################################################
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os, shutil, sys, tempfile, textwrap, urllib, urllib2, subprocess
from optparse import OptionParser
if sys.platform == 'win32':
def quote(c):
if ' ' in c:
return '"%s"' % c # work around spawn lamosity on windows
else:
return c
else:
quote = str
# See zc.buildout.easy_install._has_broken_dash_S for motivation and comments.
stdout, stderr = subprocess.Popen(
[sys.executable, '-Sc',
'try:\n'
' import ConfigParser\n'
'except ImportError:\n'
' print 1\n'
'else:\n'
' print 0\n'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
has_broken_dash_S = bool(int(stdout.strip()))
# In order to be more robust in the face of system Pythons, we want to
# run without site-packages loaded. This is somewhat tricky, in
# particular because Python 2.6's distutils imports site, so starting
# with the -S flag is not sufficient. However, we'll start with that:
if not has_broken_dash_S and 'site' in sys.modules:
# We will restart with python -S.
args = sys.argv[:]
args[0:0] = [sys.executable, '-S']
args = map(quote, args)
os.execv(sys.executable, args)
# Now we are running with -S. We'll get the clean sys.path, import site
# because distutils will do it later, and then reset the path and clean
# out any namespace packages from site-packages that might have been
# loaded by .pth files.
clean_path = sys.path[:]
import site
sys.path[:] = clean_path
for k, v in sys.modules.items():
if k in ('setuptools', 'pkg_resources') or (
hasattr(v, '__path__') and
len(v.__path__)==1 and
not os.path.exists(os.path.join(v.__path__[0],'__init__.py'))):
# This is a namespace package. Remove it.
sys.modules.pop(k)
is_jython = sys.platform.startswith('java')
setuptools_source = 'http://peak.telecommunity.com/dist/ez_setup.py'
distribute_source = 'http://python-distribute.org/distribute_setup.py'
# parsing arguments
def normalize_to_url(option, opt_str, value, parser):
if value:
if '://' not in value: # It doesn't smell like a URL.
value = 'file://%s' % (
urllib.pathname2url(
os.path.abspath(os.path.expanduser(value))),)
if opt_str == '--download-base' and not value.endswith('/'):
# Download base needs a trailing slash to make the world happy.
value += '/'
else:
value = None
name = opt_str[2:].replace('-', '_')
setattr(parser.values, name, value)
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
Bootstraps a buildout-based project.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
Note that by using --setup-source and --download-base to point to
local resources, you can keep this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("-v", "--version", dest="version",
help="use a specific zc.buildout version")
parser.add_option("-d", "--distribute",
action="store_true", dest="use_distribute", default=False,
help="Use Distribute rather than Setuptools.")
parser.add_option("--setup-source", action="callback", dest="setup_source",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or file location for the setup file. "
"If you use Setuptools, this will default to " +
setuptools_source + "; if you use Distribute, this "
"will default to " + distribute_source +"."))
parser.add_option("--download-base", action="callback", dest="download_base",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or directory for downloading "
"zc.buildout and either Setuptools or Distribute. "
"Defaults to PyPI."))
parser.add_option("--eggs",
help=("Specify a directory for storing eggs. Defaults to "
"a temporary directory that is deleted when the "
"bootstrap script completes."))
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true", default=False,
help=("Normally, if you do not specify a --version, the "
"bootstrap script and buildout gets the newest "
"*final* versions of zc.buildout and its recipes and "
"extensions for you. If you use this flag, "
"bootstrap and buildout will get the newest releases "
"even if they are alphas or betas."))
parser.add_option("-c", None, action="store", dest="config_file",
help=("Specify the path to the buildout configuration "
"file to be used."))
options, args = parser.parse_args()
# if -c was provided, we push it back into args for buildout's main function
if options.config_file is not None:
args += ['-c', options.config_file]
if options.eggs:
eggs_dir = os.path.abspath(os.path.expanduser(options.eggs))
else:
eggs_dir = tempfile.mkdtemp()
if options.setup_source is None:
if options.use_distribute:
options.setup_source = distribute_source
else:
options.setup_source = setuptools_source
if options.accept_buildout_test_releases:
args.append('buildout:accept-buildout-test-releases=true')
args.append('bootstrap')
try:
import pkg_resources
import setuptools # A flag. Sometimes pkg_resources is installed alone.
if not hasattr(pkg_resources, '_distribute'):
raise ImportError
except ImportError:
ez_code = urllib2.urlopen(
options.setup_source).read().replace('\r\n', '\n')
ez = {}
exec ez_code in ez
setup_args = dict(to_dir=eggs_dir, download_delay=0)
if options.download_base:
setup_args['download_base'] = options.download_base
if options.use_distribute:
setup_args['no_fake'] = True
ez['use_setuptools'](**setup_args)
if 'pkg_resources' in sys.modules:
reload(sys.modules['pkg_resources'])
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
cmd = [quote(sys.executable),
'-c',
quote('from setuptools.command.easy_install import main; main()'),
'-mqNxd',
quote(eggs_dir)]
if not has_broken_dash_S:
cmd.insert(1, '-S')
find_links = options.download_base
if not find_links:
find_links = os.environ.get('bootstrap-testing-find-links')
if find_links:
cmd.extend(['-f', quote(find_links)])
if options.use_distribute:
setup_requirement = 'distribute'
else:
setup_requirement = 'setuptools'
ws = pkg_resources.working_set
setup_requirement_path = ws.find(
pkg_resources.Requirement.parse(setup_requirement)).location
env = dict(
os.environ,
PYTHONPATH=setup_requirement_path)
requirement = 'zc.buildout'
version = options.version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setup_requirement_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
if is_jython:
import subprocess
exitcode = subprocess.Popen(cmd, env=env).wait()
else: # Windows prefers this, apparently; otherwise we would prefer subprocess
exitcode = os.spawnle(*([os.P_WAIT, sys.executable] + cmd + [env]))
if exitcode != 0:
sys.stdout.flush()
sys.stderr.flush()
print ("An error occurred when trying to install zc.buildout. "
"Look above this message for any errors that "
"were output by easy_install.")
sys.exit(exitcode)
ws.add_entry(eggs_dir)
ws.require(requirement)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
if not options.eggs: # clean up temporary egg directory
shutil.rmtree(eggs_dir)
| Python |
# -*- coding: utf-8 -*-
#
# Copyright 2011 Tobias Rodäbel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Application for running the Javascript Unit Tests."""
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
TEST_HTML = r"""
<!DOCTYPE HTML>
<html>
<head>
<meta http-equiv="Content-Type" content="text/html; charset=UTF-8" />
<title>Unit Tests for gaesynkit</title>
<link rel="stylesheet" href="qunit/qunit.css" type="text/css" />
<script type="text/javascript" src="qunit/jquery-1.4.4.js"></script>
<script type="text/javascript" src="qunit/qunit.js"></script>
<script type="text/javascript" src="gaesynkit/gaesynkit.js"></script>
<script type="text/javascript" src="tests/test_gaesynkit.js"></script>
<style type="text/css">
a {
font-family: 'Helvetica Neue Light', Helvetica, sans-serif;
font-size: 0.8em;
border: none;
color: #009;
text-decoration: none;
}
a:hover {
border: none;
color: #009;
text-decoration: none;
}
a:visited {
border: none;
color: #009;
text-decoration: none;
}
</style>
<body>
<div style="text-align: right;">
<a href="docs/index.html">Documentation</a> | %(login_or_logout)s
</div>
<h1 id="qunit-header">Unit Tests</h1>
<h2 id="qunit-banner"></h2>
<div id="qunit-testrunner-toolbar"></div>
<h2 id="qunit-userAgent"></h2>
<ol id="qunit-tests"></ol>
</body>
</html>
"""
def get_login_or_logout(user):
"""Returns either login or logout link."""
link = '<a href="%(url)s">%(label)s</a>'
if user:
return link % dict(url=users.create_logout_url('/'), label='Logout')
else:
return link % dict(url=users.create_login_url('/'), label='Login')
class MainHandler(webapp.RequestHandler):
"""Request handler for running our JS unit tests."""
def get(self):
user = users.get_current_user()
login_or_logout = get_login_or_logout(user)
self.response.out.write(TEST_HTML % locals())
app = webapp.WSGIApplication([('.*', MainHandler),], debug=True)
def main():
util.run_bare_wsgi_app(app)
if __name__ == "__main__":
main()
| Python |
# -*- coding: utf-8 -*-
#
# Copyright 2011 Tobias Rodäbel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Python implementation of the gaesynkit handlers JSON-RPC endpoint."""
try:
from gaesynkit import json_rpc as rpc
except ImportError: # pragma: no cover
import json_rpc as rpc
try:
from gaesynkit.sync import SyncInfo
except ImportError: # pragma: no cover
from sync import SyncInfo
from datetime import datetime
from google.appengine.api import datastore
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
import base64
import email
import itertools
import mimetypes
import os
import re
import time
ENTITY_NOT_CHANGED = 1
ENTITY_UPDATED = 2
ENTITY_STORED = 3
ENTITY_NOT_FOUND = 4
ENTITY_DELETED = 5
_APP_ID_SEP = "@"
_NAMESPACE_SEP = "!!"
_DEFAULT_NAMESPACE = "default"
_KIND_ID_SEP = "\n"
_KIND_NAME_SEP = "\b"
_PATH_SEP = "\t"
_PROPERTY_TYPES_MAP = {
"string": unicode,
"bool": bool,
"int": int,
"float": float,
"key": datastore_types.Key,
"byte_string": datastore_types.ByteString,
"gd:when": lambda v: datetime.strptime(v, "%Y/%m/%d %H:%M:%S"),
"user": users.User,
"gd:email": datastore_types.Email,
"georss:point": datastore_types.GeoPt,
"atom:category": datastore_types.Category,
"atom:link": datastore_types.Link,
"gd:im": datastore_types.IM,
"gd:phonenumber": datastore_types.PhoneNumber,
"gd:postaladdress": datastore_types.PostalAddress,
"gd:rating": datastore_types.Rating
}
_PROPERTY_TYPES_STRINGS = {
unicode: 'string',
str: 'string',
bool: 'bool',
int: 'int',
long: 'int',
type(None): 'null',
float: 'float',
datastore_types.Key: 'key',
datastore_types.Blob: 'blob',
datastore_types.ByteString: 'byte_string',
datastore_types.Text: 'text',
users.User: 'user',
datastore_types.Category: 'atom:category',
datastore_types.Link: 'atom:link',
datastore_types.Email: 'gd:email',
datetime: 'gd:when',
datastore_types.GeoPt: 'georss:point',
datastore_types.IM: 'gd:im',
datastore_types.PhoneNumber: 'gd:phonenumber',
datastore_types.PostalAddress: 'gd:postaladdress',
datastore_types.Rating: 'gd:rating',
datastore_types.BlobKey: 'blobkey',
}
DECODED_KEY_PATTERN = re.compile(r'([a-z\-0-9]+?)%s([a-zA-Z0-9\-\_]+?)%s(.*)' %
(_APP_ID_SEP, _NAMESPACE_SEP))
class NotAllowedError(Exception):
"""Error to be raised when synchronization is not allowed."""
def parent_from_remote_key(key_string):
"""Extracts parent key from remote key string.
:param str key_string: The remote key string.
:returns: A `datastore_types.Key` instance.
"""
decoded = base64.b64decode(key_string)
m = re.match(DECODED_KEY_PATTERN, decoded)
if not m:
raise Exception("Corrupted key")
app_id, namespace, path = m.groups()
if app_id != os.environ['APPLICATION_ID']:
raise NotAllowedError(
"Not allowed to access data of another application")
if namespace == _DEFAULT_NAMESPACE:
namespace = None
def split_elem(elem):
if _KIND_NAME_SEP in elem:
return elem.split(_KIND_NAME_SEP, 1)
else:
raise StopIteration
try:
path_elements = list(
itertools.chain(*map(split_elem, path.split(_PATH_SEP))))
except StopIteration:
sync_info = SyncInfo.get_by_key_name(
base64.b64encode((namespace or _DEFAULT_NAMESPACE) +
_NAMESPACE_SEP+(_PATH_SEP.join(path.split(_PATH_SEP)[:-1]))))
if sync_info:
return sync_info.target_key()
else:
return None
if len(path_elements) == 2:
return None
kw = dict(namespace=namespace)
return datastore_types.Key.from_path(*path_elements[:-2], **kw)
def entity_from_json_data(entity_dict):
"""Creates a new entity.
:param dictionary entity_dict: JSON data.
:returns: A `datastore.Entity` instance.
"""
# Create new entity
entity = datastore.Entity(
entity_dict["kind"],
name=entity_dict.get("name"),
parent=parent_from_remote_key(entity_dict["key"]),
namespace=entity_dict.get("namespace")
)
# Generator for converting properties
def convertProps():
properties = entity_dict["properties"]
for prop in properties:
value = properties[prop]
if isinstance(value["value"], list):
prop_t = list
else:
prop_t = _PROPERTY_TYPES_MAP[value["type"]]
yield (prop, prop_t(value["value"]))
# Populate entity
entity.update(dict(convertProps()))
return entity
def encode_properties(entity):
"""Encode entity properties to JSON serializable dictionary.
:param datastore.Entity entity: An entity.
:returns: Dictionary.
"""
def encode(obj):
if isinstance(obj, datetime):
return obj.isoformat().replace('T', ' ').replace('-', '/')
elif isinstance(obj, datastore_types.Key):
return str(obj)
elif isinstance(obj, users.User):
return str(obj)
return obj
def encode_props():
for key in entity.keys():
prop = entity[key]
prop_t = type(prop)
if prop_t == list:
prop_t = type(prop[0])
type_str = _PROPERTY_TYPES_STRINGS[prop_t]
yield (key, {"type": type_str, "value": encode(prop)})
return dict(encode_props())
def json_data_from_entity(entity):
"""Get the JSON encodable entity dictionary.
:param datastore.Entity entity: The entity.
:returns: JSON encodable dictionary.
"""
result_dict = dict(properties=encode_properties(entity))
result_dict["kind"] = entity.kind()
id_or_name = entity.key().id_or_name()
if isinstance(id_or_name, basestring):
result_dict["name"] = id_or_name
else:
result_dict["id"] = id_or_name
return result_dict
def compare_replace_sync(entity_dict, sync_info, content_hash):
"""Make a compare-replace-sync between the stored and the remote entity.
:param dictionary entity_dict: The remote entity dictionary.
:param sync.SyncInfo sync_info: A synchronization info instance.
:param string content_hash: MD5 checksum of the remote entity.
:returns: A `datastore.Entity` instance.
"""
# The remote entity
remote_version = entity_dict["version"]
remote_entity = entity_from_json_data(entity_dict)
# The stored entity
version = sync_info.version()
entity = sync_info.target()
assert remote_version <= version, "Version conflict"
if remote_version < version:
# If the remote version is older, just return the stored entity
return entity
# Merge entities
for prop in remote_entity.keys():
entity[prop] = remote_entity[prop]
sync_info.incr_version()
sync_info.set_content_hash(content_hash)
return entity
class SyncHandler(rpc.JsonRpcHandler):
"""Handles JSON-RPC sync requests.
This request handler is the main JSON-RPC endpoint.
"""
@rpc.ServiceMethod
def syncEntity(self, entity_dict, content_hash):
"""Synchronize entity.
:param dictionary entity_dict: Dictionary from decoded JSON entity.
:param string content_hash: MD5 checksum of the entity.
"""
assert "key" in entity_dict, "Remote entity key missing"
remote_key = entity_dict["key"]
version = entity_dict["version"]
user = users.get_current_user()
sync_info = SyncInfo.get_by_key_name(remote_key)
if sync_info:
# Check whether user is allowed to synchronize the requested
# entity
if user != sync_info.user():
raise NotAllowedError("Synchronization not allowed")
# The entity has been synced before; check whether its contents
# have been changed
if sync_info.content_hash() == content_hash:
# The entity contents haven't change
result = {
"status": ENTITY_NOT_CHANGED,
"key": remote_key,
"version": sync_info.version()
}
return result
entity = compare_replace_sync(entity_dict, sync_info, content_hash)
json_data = json_data_from_entity(entity)
json_data["key"] = remote_key
json_data["version"] = sync_info.version()
datastore.Put([entity, sync_info.entity()])
return {"status": ENTITY_UPDATED, "entity": json_data}
# Create and put new entity
entity = entity_from_json_data(entity_dict)
key = datastore.Put(entity)
# Get a new version number
version = entity_dict["version"] + 1
# Create and put synchronization info
sync_info = SyncInfo.from_params(
remote_key, version, content_hash, key, user=user)
sync_info.put()
return {"status": ENTITY_STORED, "key": remote_key, "version": version}
@rpc.ServiceMethod
def syncDeletedEntity(self, key):
"""Delete entity.
:param string key: The remote key.
"""
sync_info = SyncInfo.get_by_key_name(key)
datastore.Delete([sync_info.target_key(), sync_info.key()])
return {"status": ENTITY_DELETED}
@rpc.ServiceMethod
def test(self, param):
"""For testing only.
This method basically *echoes* the given parameter.
:param object param: Arbitrary parameter.
"""
return param
class StaticHandler(webapp.RequestHandler):
"""Request handler to serve static files."""
def get(self):
path = self.request.path
filename = path[path.rfind('gaesynkit/')+10:]
filename = os.path.join(os.path.dirname(__file__), 'static', filename)
content_type, encoding = mimetypes.guess_type(filename)
try:
assert content_type and '/' in content_type, repr(content_type)
fp = open(filename, 'rb')
except (IOError, AssertionError):
self.response.set_status(404)
return
expiration = email.Utils.formatdate(time.time()+3600, usegmt=True)
self.response.headers['Content-type'] = content_type
self.response.headers['Cache-Control'] = 'public, max-age=expiry'
self.response.headers['Expires'] = expiration
try:
data = fp.read().replace("$APPLICATION_ID",
os.environ['APPLICATION_ID'])
self.response.out.write(data)
finally:
fp.close()
app = webapp.WSGIApplication([
('.*/gaesynkit/rpc/.*', SyncHandler),
('.*/gaesynkit/.*', StaticHandler),
], debug=True)
def main(): # pragma: no cover
"""The main function."""
util.run_wsgi_app(app)
if __name__ == "__main__": # pragma: no cover
main()
| Python |
def webapp_add_wsgi_middleware(app):
from google.appengine.ext.appstats import recording
app = recording.appstats_wsgi_middleware(app)
return app
| Python |
# Python package
from test_handlers import *
from test_json_rpc import *
from test_sync import *
| Python |
# -*- coding: utf-8 -*-
#
# Copyright 2010, 2011 Florian Glanzner (fgl), Tobias Rodäbel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""JsonRpcHandler webapp.RequestHandler for TyphoonAE and Google App Engine.
See specs:
- http://groups.google.com/group/json-rpc/web/json-rpc-2-0
- http://groups.google.com/group/json-rpc/web/json-rpc-over-http
This version does not support:
- *args, **kwargs and default-values are not allowed for Service Methods
- handles only HTTP POST
- JSON-RPC Version < 2.0 (same as 1.2) not supported
TODOs:
- more Comments
- Examples (doctest?)
- Factor out handler methods to reuse in other frameworks
"""
from google.appengine.ext import webapp
from inspect import getargspec
import cgi
import logging
import simplejson
import sys
import traceback
JSON_RPC_KEYS = frozenset(['method', 'jsonrpc', 'params', 'id'])
def ServiceMethod(fn):
"""Decorator to mark a method of a JsonRpcHandler as ServiceMethod.
This exposes methods to the RPC interface.
:param function fn: A function.
:returns: A function.
TODO:
- Warn when applied to underscore methods
"""
fn.IsServiceMethod = True
return fn
class JsonRpcError(Exception):
"""Baseclass for all JSON-RPC Errors.
Errors are described in the JSON-RPC 2.0 specs, related HTTP Status
Codes are described in the json-rpc-over-http proposal.
"""
code = 0
message = None
status = 500
def __init__(self, message=None):
if message is not None:
self.message = message
def __str__(self):
return(self.message)
def __repr__(self):
return '%s("%s")' % (str(self.__class__.__name__), self.message)
def getJsonData(self):
error = {
'code' : self.code ,
'message' : '%s: %s' %
(str(self.__class__.__name__),
str(self.message))}
return error
class ParseError(JsonRpcError):
"""Invalid JSON was received by the server.
An error occurred on the server while parsing the JSON text.
"""
code = -32700
message = 'Parse error'
class InvalidRequestError(JsonRpcError):
"""The JSON sent is not a valid Request object."""
code = -32600
message = 'Invalid Request'
status = 400
class MethodNotFoundError(JsonRpcError):
"""The method does not exist / is not available."""
code = -32601
message = 'Method not found'
status = 404
class InvalidParamsError(JsonRpcError):
"""Invalid method parameter(s)."""
code = -32602
message = 'Invalid params'
class InternalError(JsonRpcError):
"""Internal JSON-RPC error."""
code = -32603
message = 'Internal error'
class ServerError(JsonRpcError):
"""Base Class for implementation-defined Server Errors.
The Error Code must be between -32099..-32000
"""
code = -32000
message = 'Server Error'
class JsonRpcMessage(object):
"""A single JSON-RPC message.
:param dict json: The JSON-RPC message Python representation.
"""
def __init__(self, json=None):
super(JsonRpcMessage, self).__init__()
self.message_id = None
self.notification = False
self.error = None
self.result = None
if json is not None:
self.from_json(json)
def from_json(self, json):
"""Parses a single JSON-RPC message.
:param dict json: The JSON-RPC message Python representation.
"""
try:
if not isinstance(json, dict):
raise InvalidRequestError(
'Invalid JSON-RPC Message; must be an object')
if not set(json.keys()) <= JSON_RPC_KEYS:
raise InvalidRequestError('Invalid members in request object')
if not ('jsonrpc' in json and json['jsonrpc'] == '2.0'):
raise InvalidRequestError('Server supports JSON-RPC 2.0 only')
if 'method' not in json:
raise InvalidRequestError('No method specified')
if not isinstance(json['method'], basestring):
raise InvalidRequestError('Method must be a string')
self.method_name = json['method']
if 'params' in json:
params = json['params']
if not isinstance(params, (dict, list, tuple)):
raise InvalidRequestError(
"'params' must be an array or object")
self.params = params
if 'id' not in json:
self.notification = True
else:
self.message_id = json['id']
except InvalidRequestError, ex:
self.error = ex
logging.error('Encountered invalid json message')
class JsonRpcHandler(webapp.RequestHandler):
"""Subclass this handler to implement a JSON-RPC handler.
Annotate methods with @ServiceMethod to expose them and make them callable
via JSON-RPC. Currently methods with *args or **kwargs are not supported
as service-methods. All parameters have to be named explicitly.
"""
def __init__(self):
webapp.RequestHandler.__init__(self)
def post(self):
self.handle_request()
def handle_request(self):
"""Handles POST request."""
self.response.headers['Content-Type'] = 'application/json-rpc'
try:
logging.debug("Raw JSON-RPC: %s", self.request.body)
messages, batch_request = self.parse_body(self.request.body)
except (InvalidRequestError, ParseError), ex:
logging.error(ex)
self.error(ex.status)
body = self._build_error(ex)
self.response.out.write(simplejson.dumps(body))
else:
for msg in messages:
self.handle_message(msg)
responses = self.get_responses(messages)
if len(responses) == 0:
# Only notifications were sent
self.error(204)
return
if batch_request:
#TODO Which http_status to set for batches?
self.error(200)
body = [r[1] for r in responses]
self.response.out.write(simplejson.dumps(body))
else:
if len(responses) != 1:
# This should never happen
raise InternalError() # pragma: no cover
status, body = responses[0]
self.error(status)
self.response.out.write(simplejson.dumps(body))
def get_responses(self, messages):
"""Gets a list of responses from all 'messages'.
Responses are a tuple of HTTP-status and body.
A response may be None if the message was a notification and will be
excluded from the returned list.
:param list messages: JSON messages.
:returns: List of responses.
"""
responses = []
for msg in messages:
resp = self.get_response(msg)
if resp is not None:
responses.append(resp)
return responses
def handle_message(self, msg):
"""Executes a message.
The method of the message is executed.
Errors and/or results are written back to the message.
:param dict msg: A JSON-RPC message.
"""
if msg.error != None:
return
else:
try:
method = self.get_service_method(msg.method_name)
params = getattr(msg, 'params', None)
msg.result = self.execute_method(method, params)
except (MethodNotFoundError, InvalidParamsError, ServerError), ex:
logging.error(ex)
msg.error = ex
except Exception, ex:
logging.error(ex)
ex = InternalError("Error executing service method")
ex.data = ''.join(traceback.format_exception(*sys.exc_info()))
msg.error = ex
def parse_body(self, body):
"""Parses the body of POST request.
Validates for correct JSON and returns a tuple with a list of JSON-RPC
messages and wether the request was a batch-request.
Raises ParseError and InvalidRequestError.
:param string body: The HTTP body.
"""
try:
json = simplejson.loads(body)
except ValueError:
raise ParseError()
messages = []
if isinstance(json, (list, tuple)):
if len(json) == 0:
raise InvalidRequestError('Recieved an empty batch message')
batch_request = True
for obj in json:
msg = JsonRpcMessage(obj)
messages.append(msg)
if isinstance(json, (dict)):
batch_request = False
msg = JsonRpcMessage(json)
messages.append(msg)
return messages, batch_request
def get_response(self, msg):
"""Gets the response object for a message.
Returns a tuple of a HTTP-status and a json object or None.
The JSON object may be a JSON-RPC error object or a result object.
None is returned if the message was a notification.
:param dict msg: A JSON-RPC message.
:returns: Tuple with status and result.
"""
if msg.notification:
return None
elif msg.error:
return (msg.error.status,
self._build_error(msg.error, msg.message_id))
elif msg.result:
return (200, self._build_result(msg))
else: # pragma: no cover
# Should never be reached
logging.warn('Message neither contains an error nor a result')
def _build_error(self, err, message_id=None):
return {'jsonrpc':'2.0',
'error':err.getJsonData(),
'id':message_id}
def _build_result(self, msg):
return {'jsonrpc':'2.0',
'result':msg.result,
'id':msg.message_id}
def execute_method(self, method, params):
"""Executes the RPC method.
:param function method: A method object.
:param params: List, tuple or dictionary with JSON-RPC parameters.
"""
args, varargs, varkw, defaults = getargspec(method)
if varargs or varkw:
raise InvalidParamsError(
"Service method definition must not have variable parameters")
args_set = set(args[1:])
if params is None:
if not len(args_set) == 0:
raise InvalidParamsError(
"Wrong number of parameters; "
"expected %i but 'params' was omitted "
"from JSON-RPC message" % (len(args_set)))
return method()
elif isinstance(params, (list, tuple)):
if not len(args_set) == len(params):
raise InvalidParamsError(
"Wrong number of parameters; "
"expected %i got %i" % (len(args_set),len(params)))
return method(*params)
elif isinstance(params, dict):
paramset = set(params)
if not args_set == paramset:
raise InvalidParamsError(
"Named parameters do not "
"match method; expected %s" % (str(args_set)))
params = self.decode_dict_keys(params)
return method(**params)
def get_service_method(self, meth_name):
# TODO use inspect.getmembers()?
f = getattr(self, meth_name, None)
if (f == None or not hasattr(f, 'IsServiceMethod')
or not getattr(f, 'IsServiceMethod') == True):
raise MethodNotFoundError('Method %s not found' % meth_name)
return f
def decode_dict_keys(self, d):
"""Convert all keys in dict d to str.
Python does not allow unicode keys in dictionaries.
:param dict d: A JSON-RPC message.
"""
try:
r = {}
for (k, v) in d.iteritems():
r[str(k)] = v
return r
except UnicodeEncodeError: # pragma: no cover
# Unsure which error is the correct to raise here.
# Actually this code will probably never be reached
# because "wrong" parameters will be filtered out
# and returned as InvalidParamsError() and methods cant
# have non-ascii parameter names.
raise InvalidRequestError("Parameter-names must be ASCII")
| Python |
# -*- coding: utf-8 -*-
#
# Copyright 2011 Tobias Rodäbel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The gaesynkit package."""
VERSION = "1.0.0"
| Python |
# -*- coding: utf-8 -*-
#
# Copyright 2011 Tobias Rodäbel
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module for synchronization support.
A SyncInfo is a wrapper class for entities which holds the synchronization
status of a user's entity.
"""
from google.appengine.api import datastore
from google.appengine.api import datastore_types
from google.appengine.api import datastore_errors
__all__ = ['SYNC_INFO_KIND', 'SyncInfo']
SYNC_INFO_KIND = "SyncInfo"
class SyncInfo(object):
"""Wrapper class for synchronization info entities.
:param Entity entity: A datastore.Entity instance.
"""
def __init__(self, entity):
"""Constructor."""
if not isinstance(entity, datastore.Entity):
raise TypeError("Expected datastore.Entity instance")
self.__entity = entity
@classmethod
def from_params(cls, remote_key, version, content_hash, target_key=None,
user=None):
"""Retrieve or create a SyncInfo entity from the given parameters.
:param string remote_key: Remote entity key.
:param int version: Remote entity version.
:param string content_hash: MD5 hex digest.
:param datastore_types.Key target_key: Key of the sync target entity.
:param datastore_types.User user: A user.
"""
entity = datastore.Entity(SYNC_INFO_KIND, name=remote_key)
entity.update({"version": version, "content_hash": content_hash})
if target_key:
entity.update({"target_key": target_key})
if user:
entity.update({"user": user})
return cls(entity)
def entity(self):
"""Get raw entity.
:returns: A datastore.Entity instance.
"""
return self.__entity
def user(self):
"""Get the user, if provided."""
return self.__entity.get("user")
def version(self):
"""Get the entity version."""
return self.__entity["version"]
def incr_version(self):
"""Increment the entity version."""
self.__entity["version"] += 1
return self.__entity["version"]
def content_hash(self):
"""Get the content hash as MD5 hex digest."""
return self.__entity["content_hash"]
def set_content_hash(self, content_hash):
"""Set the content hash.
:param str content_hash: MD5 hex digest.
"""
self.__entity["content_hash"] = content_hash
def target_key(self):
"""Get the sync target key."""
return self.__entity.get("target_key")
def target(self):
"""Get the sync target entity."""
key = self.__entity.get("target_key")
return datastore.Get(key)
@classmethod
def get(cls, keys):
"""Get one or more synchronization info entities.
:param key|list keys: One or a list of `datastore_types.Key` instances.
"""
if isinstance(keys, datastore_types.Key):
keys_ = [keys]
elif isinstance(keys, list):
keys_ = keys
else:
raise TypeError("SyncInfo.get(keys) takes a key or list of keys")
results = []
for key in keys_:
try:
results.append(cls(datastore.Get(key)))
except datastore_errors.EntityNotFoundError:
results.append(None)
if isinstance(keys, datastore_types.Key):
return results[0]
elif isinstance(keys, list):
return results
@classmethod
def get_by_key_name(cls, key_names, parent=None):
"""Get one or more synchronization info entities.
:param string|list key_names: A key name, or a list of key names.
:param Entity|Key parent: The parent.
"""
if isinstance(key_names, basestring):
return cls.get(datastore_types.Key.from_path(
SYNC_INFO_KIND, key_names, parent=parent))
elif isinstance(key_names, list):
return cls.get([datastore_types.Key.from_path(
SYNC_INFO_KIND, name, parent=parent) for name in key_names])
else:
raise TypeError("SyncInfo.get_by_key_name(key_name, parent) takes "
"a key name or a list of key names")
def key(self):
"""Get the key for this synchronization info entity."""
return self.__entity.key()
@staticmethod
def kind():
"""Get the entity kind for synchronization info entities."""
return SYNC_INFO_KIND
def put(self):
"""Put the synchronization info entity."""
return datastore.Put(self.__entity)
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.