code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
import pygtk
pygtk.require('2.0')
import gtk
import cairo
import math
import string
import threading
import time
import Queue
import os
#from backend.postgis import PostGisBackend as DataBackend
from backend.vtile import QuadTileBackend as DataBackend
from mapcss import MapCSS as Styling
from gtk_widget import KothicWidget
try:
import psyco
psyco.full()
except ImportError:
debug("Psyco import failed. Program may run slower. Ir you run it on i386 machine, please install Psyco to get best performance.")
class KothicApp:
def __init__(self):
self.width, self.height = 800, 480
self.center_coord = (27.6549791, 53.8698)
self.zoom = 17.
self.data_projection = "EPSG:4326"
self.data = DataBackend()
self.load_style()
self.request_d = (0,0)
self.window = gtk.Window()
self.window.set_size_request(self.width, self.height)
self.window.connect("destroy", gtk.main_quit)
self.window.set_title("Kothic renderer")
menu = gtk.MenuBar()
filemenu = gtk.Menu()
filem = gtk.MenuItem("File")
filem.set_submenu(filemenu)
i = gtk.MenuItem("Reload style")
i.connect("activate", self.load_style)
filemenu.append(i)
stylemenu = gtk.Menu()
stylem = gtk.MenuItem("Style")
stylem.set_submenu(stylemenu)
styles = [name for name in os.listdir("styles") if ".mapcss" in name]
for style in styles:
i = gtk.MenuItem(style)
i.StyleName = style
i.connect("activate", self.reload_style)
stylemenu.append(i)
i = gtk.MenuItem("Exit")
i.connect("activate", gtk.main_quit)
filemenu.append(i)
menu.append(filem)
menu.append(stylem)
vbox = gtk.VBox(False, 2)
vbox.pack_start(menu,False,False,0)
self.KothicWidget = KothicWidget(self.data, self.style)
self.KothicWidget.set_zoom(self.zoom)
self.KothicWidget.jump_to(self.center_coord)
vbox.pack_end(self.KothicWidget)
self.window.add(vbox)
def load_style(self):
self.style = Styling(0,25)
self.style.parse(open("styles/default.mapcss","r").read())
def reload_style(self,w):
self.style = Styling(0,25)
self.style.parse(open("styles/%s"%w.StyleName,"r").read())
self.KothicWidget.style_backend = self.style
self.KothicWidget.redraw()
def main(self):
self.window.show_all()
gtk.main()
exit()
if __name__ == "__main__":
gtk.gdk.threads_init()
kap = KothicApp()
kap.main()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
from debug import debug, Timer
from mapcss import MapCSS
import sys
import os
import Image
from libkomapnik import *
from optparse import OptionParser
try:
import psyco
psyco.full()
except ImportError:
pass
def relaxedFloat(x):
try:
return float(x) if int(float(x)) != float(x) else int(x)
except ValueError:
return float(str(x).replace(",", "."))
parser = OptionParser()
parser.add_option("-r", "--renderer", dest="renderer", default="mapnik",
help="which renderer stylesheet to generate", metavar="ENGINE")
parser.add_option("-s", "--stylesheet", dest="filename",
help="read MapCSS stylesheet from FILE", metavar="FILE")
parser.add_option("-f", "--minzoom", dest="minzoom", default=0, type="int",
help="minimal available zoom level", metavar="ZOOM")
parser.add_option("-t", "--maxzoom", dest="maxzoom", default=19, type="int",
help="maximal available zoom level", metavar="ZOOM")
parser.add_option("-l", "--locale", dest="locale",
help="language that should be used for labels (ru, en, be, uk..)", metavar="LANG")
parser.add_option("-o", "--output-file", dest="outfile", default="-",
help="output filename (defaults to stdout)", metavar="FILE")
parser.add_option("-p", "--osm2pgsql-style", dest="osm2pgsqlstyle", default="-",
help="osm2pgsql stylesheet filename", metavar="FILE")
(options, args) = parser.parse_args()
#print (options, args)
minzoom = options.minzoom
maxzoom = options.maxzoom+1
locale = options.locale
if options.outfile == "-":
mfile = sys.stdout
else:
mfile = open(options.outfile,"w")
osm2pgsql_avail_keys = {} # "column" : ["node", "way"]
if options.osm2pgsqlstyle != "-":
mf = open(options.osm2pgsqlstyle, "r")
for line in mf:
line = line.strip().split()
if line and line[0][0] != "#":
osm2pgsql_avail_keys[line[1]] = tuple(line[0].split(","))
def escape_sql_column(name, type="way", asname = False):
if name in mapped_cols:
return name # already escaped
name = name.strip().strip('"')
type = {'line':'way', 'area':'way'}.get(type,type)
if type in osm2pgsql_avail_keys.get(name, ()) or not osm2pgsql_avail_keys:
return '"'+name+'"'
elif not asname:
return "(tags->'"+name+"')"
else:
return "(tags->'"+name+"') as \"" +name+'"'
style = MapCSS(minzoom, maxzoom) #zoom levels
style.parse(open(options.filename,"r").read())
if options.renderer == "js":
subjs = {"canvas": ("canvas",),"way": ("Polygon","LineString"), "line":("Polygon","LineString"), "area": ("Polygon",), "node": ("Point",), "*":("Point","Polygon","LineString") }
mfile.write("function restyle (prop, zoom, type){")
mfile.write("style = new Object;")
mfile.write('style["default"] = new Object;')
for chooser in style.choosers:
condition = ""
subclass = "default"
for i in chooser.ruleChains[0]:
if condition:
condition += "||"
rule = " zoom >= %s && zoom <= %s"%(i.minZoom, i.maxZoom)
for z in i.conditions:
t = z.type
params = z.params
if params[0] == "::class":
subclass = params[1][2:]
continue
if rule:
rule += " && "
if t == 'eq':
rule += 'prop["%s"] == "%s"'%(params[0], params[1])
if t == 'ne':
rule += 'prop["%s"] != "%s"'%(params[0], params[1])
if t == 'regex':
rule += 'prop["%s"].match(RegExp("%s"))'%(params[0], params[1])
if t == 'true':
rule += 'prop["%s"] == "yes"'%(params[0])
if t == 'untrue':
rule += 'prop["%s"] != "yes"'%(params[0])
if t == 'set':
rule += '"%s" in prop'%(params[0])
if t == 'unset':
rule += '!("%s"in prop)'%(params[0])
if t == '<':
rule += 'prop["%s"] < %s'%(params[0], params[1])
if t == '<=':
rule += 'prop["%s"] <= %s'%(params[0], params[1])
if t == '>':
rule += 'prop["%s"] > %s'%(params[0], params[1])
if t == '>=':
rule += 'prop["%s"] >= %s'%(params[0], params[1])
if rule:
rule = "&&" + rule
condition += "(("+"||".join(['type == "%s"'%z for z in subjs[i.subject]])+") "+ rule + ")"
#print chooser.styles
styles = ""
if subclass != "default":
styles = 'if(!("%s" in style)){style["%s"] = new Object;}'%(subclass,subclass)
for k, v in chooser.styles[0].iteritems():
if type(v) == str:
try:
v = str(float(v))
styles += 'style["'+subclass+'"]["'+k+'"] = '+v + ';'
except:
styles += 'style["'+subclass+'"]["'+k+'"] = "' + v + '";'
mfile.write("if(%s) {%s};\n"%(condition,styles))
mfile.write("return style;}")
if options.renderer == "mapnik":
columnmap = {}
if locale == "en":
columnmap["name"] = ("""COALESCE("name:en","int_name", replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(replace(translate("name",'абвгдезиклмнопрстуфьАБВГДЕЗИКЛМНОПРСТУФЬ','abvgdeziklmnoprstuf’ABVGDEZIKLMNOPRSTUF’'),'х','kh'),'Х','Kh'),'ц','ts'),'Ц','Ts'),'ч','ch'),'Ч','Ch'),'ш','sh'),'Ш','Sh'),'щ','shch'),'Щ','Shch'),'ъ','”'),'Ъ','”'),'ё','yo'),'Ё','Yo'),'ы','y'),'Ы','Y'),'э','·e'),'Э','E'),'ю','yu'),'Ю','Yu'),'й','y'),'Й','Y'),'я','ya'),'Я','Ya'),'ж','zh'),'Ж','Zh')) AS name""",('name:en','int_name',))
elif locale == "be":
columnmap["name"] = ('COALESCE("name:be", "name:ru", "int_name", "name:en", "name") AS name',('name:be', "name:ru", "int_name", "name:en"))
elif locale:
columnmap["name"] = ('COALESCE("name:'+locale+'", "name") AS name',('name:'+locale,))
mapped_cols = [i[0] for i in columnmap.values()]
numerics = set() # set of number-compared things, like "population<10000" needs population as number, not text
mapniksheet = {}
# {zoom: {z-index: [{sql:sql_hint, cond: mapnikfiltercondition, subject: subj, style: {a:b,c:d..}},{r2}...]...}...}
coast = {}
fonts = set()
for zoom in range (minzoom, maxzoom):
mapniksheet[zoom] = {}
zsheet = mapniksheet[zoom]
for chooser in style.choosers:
if chooser.get_sql_hints(chooser.ruleChains[0][0].subject, zoom)[1]:
#sys.stderr.write(str(chooser.get_sql_hints(chooser.ruleChains[0][0].subject, zoom)[1])+"\n")
styles = chooser.styles[0]
zindex = styles.get("z-index",0)
if zindex not in zsheet:
zsheet[zindex] = []
chooser_entry = {}
chooser_entry["type"] = chooser.ruleChains[0][0].subject
sql = "("+ chooser.get_sql_hints(chooser.ruleChains[0][0].subject,zoom)[1] +")"
sql = sql.split('"')
sq = ""
odd = True
for i in sql:
if not odd:
sq += escape_sql_column(i, chooser_entry["type"])
else:
sq += i
odd = not odd
chooser_entry["sql"] = sq
chooser_entry["style"] = styles
fonts.add(styles.get("font-family","DejaVu Sans Book"))
chooser_entry["rule"] = [i.conditions for i in chooser.ruleChains[0] if i.test_zoom(zoom)]
numerics.update(chooser.get_numerics())
#print chooser_entry["rule"]
chooser_entry["rulestring"] = " or ".join([ "("+ " and ".join([i.get_mapnik_filter() for i in rule if i.get_mapnik_filter()]) + ")" for rule in chooser_entry["rule"]])
chooser_entry["chooser"] = chooser
if chooser_entry["type"] == "area" and "[natural] = 'coastline'" in chooser_entry["rulestring"]:
coast[zoom] = chooser_entry["style"]
else:
zsheet[zindex].append(chooser_entry)
#sys.stderr.write(str(numerics)+"\n")
#print mapniksheet
def add_numerics_to_itags(itags, escape = True):
tt = set()
nitags = set()
if escape:
escape = escape_sql_column
else:
def escape(i, asname=False):
if i in mapped_cols:
return i # already escaped
return '"'+i+'"'
for i in itags:
if i in numerics:
tt.add("""(CASE WHEN %s ~ E'^[[:digit:]]+([.][[:digit:]]+)?$' THEN CAST (%s AS FLOAT) ELSE NULL END) as %s__num"""%(escape(i),escape(i),i))
nitags.add(escape(i, asname = True))
itags = nitags
itags.update(tt)
return itags
bgcolor = style.get_style("canvas", {}, maxzoom)[0].get("fill-color", "")
opacity = style.get_style("canvas", {}, maxzoom)[0].get("opacity", 1)
demhack = style.get_style("canvas", {}, maxzoom)[0].get("-x-mapnik-dem-hack", False)
if (opacity == 1) and bgcolor:
mfile.write(xml_start(bgcolor))
else:
mfile.write(xml_start("transparent"))
conf_full_layering = style.get_style("canvas", {}, maxzoom)[0].get("-x-mapnik-true-layers", "true").lower() == 'true'
for font in fonts:
mfile.write(xml_fontset(font, True))
for zoom, zsheet in mapniksheet.iteritems():
x_scale = xml_scaledenominator(zoom)
ta = zsheet.keys()
ta.sort(key=float)
if demhack and zoom >= 7:
xml="""
<Style name="elevation1z%s">
<Rule>%s
<RasterSymbolizer>
<RasterColorizer default-mode="linear" epsilon="0.001">
<stop value="701" color="#98b7f5"/>
<stop value="1701" color="#9fbcf5"/>
<stop value="2701" color="#a6c1f5"/>
<stop value="3701" color="#abc4f5"/>
<stop value="4701" color="#b0c7f5"/>
<stop value="5701" color="#b5caf5"/>
<stop value="6701" color="#bacef5"/>
<stop value="7701" color="#bfd1f5"/>
<stop value="8701" color="#c4d4f5"/>
<stop value="9701" color="#c6d6f5"/>
<stop value="10201" color="#c9d7f5"/>
<!--stop value="10501" color="#cbd9f5"/-->
<!-- stop value="10701" color="cedbf5"/ -->
<stop value="10502" color="rgba(231, 209, 175, 0.1)"/>
<!--stop value="10701" color="rgba(50, 180, 50, 0.0)"/ -->
<stop value="10901" color="rgba(231, 209, 175, 0.2)"/>
<stop value="11201" color="rgba(226, 203, 170, 0.2)"/>
<stop value="11701" color="rgba(217, 194, 159, 0.3)"/>
<stop value="12701" color="rgba(208, 184, 147, 0.4)"/>
<stop value="13701" color="rgba(197, 172, 136, 0.5)"/>
<stop value="14701" color="rgba(188, 158, 120, 0.55)"/>
<stop value="15701" color="rgba(179, 139, 102, 0.6)"/>
<stop value="16701" color="rgba(157, 121, 87, 0.7)"/>
<stop value="17701" color="rgba(157, 121, 87, 0.8)"/>
<stop value="18701" color="rgba(144, 109, 77, 0.9)"/>
</RasterColorizer>
</RasterSymbolizer>
</Rule>
</Style>
<Layer name="ele-raster1z%s">
<StyleName>elevation1z%s</StyleName>
<Datasource>
<Parameter name="file">/raid/srtm/Full/CleanTOPO2merc.tif</Parameter>
<Parameter name="type">gdal</Parameter>
<Parameter name="band">1</Parameter>
<Parameter name="srid">4326</Parameter>
</Datasource>
</Layer>
"""
xml = xml%(zoom, x_scale, zoom, zoom)
mfile.write(xml)
if zoom in coast:
xml = xml_style_start()
xml += xml_rule_start()
xml += x_scale
if "fill-color" in coast[zoom]:
xml += xml_polygonsymbolizer(coast[zoom].get("fill-color", "#ffffff"), relaxedFloat(coast[zoom].get("fill-opacity", "1")))
if "fill-image" in coast[zoom]:
xml += xml_polygonpatternsymbolizer(coast[zoom].get("fill-image", ""))
xml += xml_rule_end()
xml += xml_style_end()
xml += xml_layer("coast", zoom=zoom)
mfile.write(xml)
if demhack and zoom < 7:
xml="""
<Style name="elevationz%s">
<Rule>%s
<RasterSymbolizer>
<RasterColorizer default-mode="linear" epsilon="0.001">
<stop value="701" color="#98b7f5"/>
<stop value="1701" color="#9fbcf5"/>
<stop value="2701" color="#a6c1f5"/>
<stop value="3701" color="#abc4f5"/>
<stop value="4701" color="#b0c7f5"/>
<stop value="5701" color="#b5caf5"/>
<stop value="6701" color="#bacef5"/>
<stop value="7701" color="#bfd1f5"/>
<stop value="8701" color="#c4d4f5"/>
<stop value="9701" color="#c6d6f5"/>
<stop value="10201" color="#c9d7f5"/>
<!--stop value="10501" color="#cbd9f5"/-->
<!-- stop value="10701" color="cedbf5"/ -->
<stop value="10502" color="rgba(231, 209, 175, 0.1)"/>
<!--stop value="10701" color="rgba(50, 180, 50, 0.0)"/ -->
<stop value="10901" color="rgba(231, 209, 175, 0.2)"/>
<stop value="11201" color="rgba(226, 203, 170, 0.2)"/>
<stop value="11701" color="rgba(217, 194, 159, 0.3)"/>
<stop value="12701" color="rgba(208, 184, 147, 0.4)"/>
<stop value="13701" color="rgba(197, 172, 136, 0.5)"/>
<stop value="14701" color="rgba(188, 158, 120, 0.55)"/>
<stop value="15701" color="rgba(179, 139, 102, 0.6)"/>
<stop value="16701" color="rgba(157, 121, 87, 0.7)"/>
<stop value="17701" color="rgba(157, 121, 87, 0.8)"/>
<stop value="18701" color="rgba(144, 109, 77, 0.9)"/>
</RasterColorizer>
</RasterSymbolizer>
</Rule>
</Style>
<Layer name="ele-rasterz%s">
<StyleName>elevationz%s</StyleName>
<Datasource>
<Parameter name="file">/raid/srtm/Full/CleanTOPO2merc.tif</Parameter>
<Parameter name="type">gdal</Parameter>
<Parameter name="band">1</Parameter>
<Parameter name="srid">4326</Parameter>
</Datasource>
</Layer>
"""
xml = xml%(zoom, x_scale, zoom, zoom)
mfile.write(xml)
if demhack and zoom >= 7:
xml="""
<Style name="elevationz%s">
<Rule>%s
<RasterSymbolizer>
<RasterColorizer default-mode="linear" epsilon="0.001">
<stop value="-100" color="rgba(231, 209, 175, 0.1)"/>
<stop value="200" color="rgba(231, 209, 175, 0.2)"/>
<stop value="500" color="rgba(226, 203, 170, 0.2)"/>
<stop value="1000" color="rgba(217, 194, 159, 0.3)"/>
<stop value="2000" color="rgba(208, 184, 147, 0.4)"/>
<stop value="3000" color="rgba(197, 172, 136, 0.5)"/>
<stop value="4000" color="rgba(188, 158, 120, 0.55)"/>
<stop value="5000" color="rgba(179, 139, 102, 0.6)"/>
<stop value="6000" color="rgba(157, 121, 87, 0.7)"/>
<stop value="7000" color="rgba(157, 121, 87, 0.8)"/>
<stop value="8000" color="rgba(144, 109, 77, 0.9)"/>
</RasterColorizer>
</RasterSymbolizer>
</Rule>
</Style>
<Layer name="ele-rasterz%s">
<StyleName>elevationz%s</StyleName>
<Datasource>
<Parameter name="file">/raid/srtm/srtmm.vrt</Parameter>
<Parameter name="type">gdal</Parameter>
<Parameter name="band">1</Parameter>
<Parameter name="srid">4326</Parameter>
</Datasource>
</Layer>
"""
xml = xml%(zoom, x_scale, zoom, zoom)
mfile.write(xml)
sql_g = set()
there_are_dashed_lines = False
itags_g = set()
xml_g = ""
for zindex in ta:
## background areas pass
sql = set()
itags = set()
xml = xml_style_start()
for entry in zsheet[zindex]:
if entry["type"] in ("way", "area", "polygon"):
if "background-color" in entry["style"] or "background-image" in entry["style"]:
xml += xml_rule_start()
xml += x_scale
xml += xml_filter(entry["rulestring"])
if "background-color" in entry["style"]:
xml += xml_polygonsymbolizer(entry["style"].get("background-color", "black"), entry["style"].get("background-opacity", "1"))
if "background-image" in entry["style"]:
xml += xml_polygonpatternsymbolizer(entry["style"].get("background-image", ""))
sql.add(entry["sql"])
itags.update(entry["chooser"].get_interesting_tags(entry["type"], zoom))
xml += xml_rule_end()
xml += xml_style_end()
sql.discard("()")
if sql:
sql_g.update(sql)
xml_g += xml
itags_g.update(itags)
else:
xml_nosubstyle()
sql = sql_g
itags = itags_g
if sql:
mfile.write(xml_g)
sql = "(" + " OR ".join(sql) + ")"# and way && !bbox!"
itags = add_numerics_to_itags(itags)
mfile.write(xml_layer("postgis", "polygon", itags, sql, zoom=zoom ))
else:
xml_nolayer()
if demhack and zoom<6:
xml = """
<Style name="hillshadez%s">
<Rule>
%s
<RasterSymbolizer opacity="1" scaling="bilinear" mode="multiply">
<RasterColorizer default-mode="linear">
<stop value="0" color="rgba(0,0,0,0.2)" />
<stop value="255" color="rgba(255,255,255,0)" />
</RasterColorizer>
</RasterSymbolizer>
</Rule>
</Style>
<Layer name="datarasterz%s">
<StyleName>hillshadez%s</StyleName>
<Datasource>
<Parameter name="file">/raid/srtm/Full/CleanTOPO2merchs.tif</Parameter>
<Parameter name="type">gdal</Parameter>
<Parameter name="band">1</Parameter>
</Datasource>
</Layer>
"""
xml = xml%(zoom, x_scale, zoom, zoom)
mfile.write(xml)
index_range = range(-6,7)
full_layering = conf_full_layering
if (zoom < 9) or not conf_full_layering :
index_range = (-6,0,6)
full_layering = False
def check_if_roads_table(rulestring):
roads = set([
"[highway] = 'secondary'",
"[highway] = 'secondary_link'",
"[highway] = 'primary'",
"[highway] = 'primary_link'",
"[highway] = 'trunk'",
"[highway] = 'trunk_link'",
"[highway] = 'motorway'",
"[highway] = 'motorway_link'",
"[boundary] = 'administrative'",
"[railway] "
])
for r in roads:
if r in rulestring:
return True
return False
for zlayer in index_range:
for layer_type, entry_types in [("line",("way", "line")),("polygon",("way","area"))]:
sql_g = set()
there_are_dashed_lines = False
itags_g = set()
xml_g = ""
roads = (layer_type == 'line') and (zoom < 9) # whether to use planet_osm_roads
## casings pass
for zindex in ta:
sql = set()
itags = set()
xml = xml_style_start()
for entry in zsheet[zindex]:
if entry["type"] in entry_types:
if "-x-mapnik-layer" in entry["style"]:
if zlayer != -6 and entry["style"]["-x-mapnik-layer"] == "bottom":
continue
if zlayer != 6 and entry["style"]["-x-mapnik-layer"] == "top":
continue
elif zlayer not in range(-5,6):
continue
if "casing-width" in entry["style"]:
xml += xml_rule_start()
xml += x_scale
xml += xml_filter(entry["rulestring"])
if not check_if_roads_table(entry["rulestring"]):
roads = False
twidth = 2*float(entry["style"].get("casing-width", 1))+float(entry["style"].get("width", 0));
tlinejoin = "round"
if twidth < 3:
tlinejoin = "miter"
xml += xml_linesymbolizer(color=entry["style"].get("casing-color", "black"),
width=twidth,
opacity=relaxedFloat(entry["style"].get("casing-opacity", entry["style"].get("opacity","1"))),
linecap=entry["style"].get("casing-linecap", entry["style"].get("linecap","butt")),
linejoin=entry["style"].get("casing-linejoin", entry["style"].get("linejoin", "round")),
dashes=entry["style"].get("casing-dashes",entry["style"].get("dashes", "")),
zoom=zoom)
sql.add(entry["sql"])
itags.update(entry["chooser"].get_interesting_tags(entry["type"], zoom))
xml += xml_rule_end()
xml += xml_style_end()
sql.discard("()")
if sql:
sql_g.update(sql)
xml_g += xml
itags_g.update(itags)
else:
xml_nosubstyle()
sql = sql_g
itags = itags_g
if sql:
mfile.write(xml_g)
sql = "(" + " OR ".join(sql) + ")"# and way && !bbox!"
if zlayer == 0 and full_layering:
sql = "("+ sql +') and ("layer" not in ('+ ", ".join(['\'%s\''%i for i in range(-5,6) if i != 0])+") or \"layer\" is NULL)"
elif zlayer <=5 and zlayer >= -5 and full_layering:
sql = "("+ sql +') and "layer" = \'%s\''%zlayer
itags = add_numerics_to_itags(itags)
if roads:
layer_type = 'roads'
mfile.write(xml_layer("postgis", layer_type, itags, sql, zoom=zoom ))
else:
xml_nolayer()
for zindex in ta:
for layer_type, entry_types in [("line",("way", "line")),("polygon",("way","area"))]:
## lines and polygons pass
sql_g = set()
there_are_dashed_lines = False
there_are_line_patterns = False
itags_g = set()
roads = (layer_type == 'line') and (zoom < 9) # whether to use planet_osm_roads
xml_g = ""
sql = set()
itags = set()
xml = xml_style_start()
for entry in zsheet[zindex]:
if entry["type"] in entry_types:
if "-x-mapnik-layer" in entry["style"]:
if zlayer != -6 and entry["style"]["-x-mapnik-layer"] == "bottom":
continue
if zlayer != 6 and entry["style"]["-x-mapnik-layer"] == "top":
continue
elif zlayer not in range(-5,6):
continue
if "width" in entry["style"] or "pattern-image" in entry["style"] or (("fill-color" in entry["style"] or "fill-image" in entry["style"]) and layer_type == "polygon"):
xml += xml_rule_start()
xml += x_scale
xml += xml_filter(entry["rulestring"])
if not check_if_roads_table(entry["rulestring"]):
roads = False
if layer_type == "polygon":
if "fill-color" in entry["style"]:
xml += xml_polygonsymbolizer(entry["style"].get("fill-color", "black"), entry["style"].get("fill-opacity", "1"))
if "fill-image" in entry["style"]:
xml += xml_polygonpatternsymbolizer(entry["style"].get("fill-image", ""))
if "width" in entry["style"]:
twidth = relaxedFloat(entry["style"].get("width", "1"))
tlinejoin = "round"
if twidth <= 2:
tlinejoin = "miter"
xml += xml_linesymbolizer(color=entry["style"].get("color", "black"),
width=twidth,
opacity=relaxedFloat(entry["style"].get("opacity", "1")),
linecap=entry["style"].get("linecap", "round"),
linejoin=entry["style"].get("linejoin", "round"),
dashes=entry["style"].get("dashes", ""),
zoom=zoom)
if entry["style"].get("dashes", ""):
there_are_dashed_lines = True
#print "dashes!!!"
if "pattern-image" in entry["style"]:
there_are_line_patterns = True
if entry["style"]["pattern-image"] == "arrows":
xml += xml_hardcoded_arrows()
else:
if "pattern-rotate" in entry["style"] or "pattern-spacing" in entry["style"]:
fname = entry["style"]["pattern-image"]
im = Image.open(icons_path + fname).convert("RGBA")
fname = "f"+fname
if "pattern-rotate" in entry["style"]:
im = im.rotate(relaxedFloat(entry["style"]["pattern-rotate"]))
fname = "r"+str(relaxedFloat(entry["style"]["pattern-rotate"]))+fname
if "pattern-scale" in entry["style"]:
sc = relaxedFloat(entry["style"]["pattern-scale"])*1.
ns = (max(int(round(im.size[0]*sc)),1), max(int(round(im.size[1]*sc)),1))
im = im.resize(ns, Image.BILINEAR)
fname = "z"+str(sc)+fname
if "pattern-spacing" in entry["style"]:
im2 = Image.new("RGBA", (im.size[0]+int(relaxedFloat(entry["style"]["pattern-spacing"])),im.size[1]))
im2.paste(im,(0,0))
im = im2
fname = "s"+str(int(relaxedFloat(entry["style"]["pattern-spacing"])))+fname
try:
if not os.path.exists(icons_path+"komap/"):
os.makedirs(icons_path+"komap/")
if not os.path.exists(icons_path+"komap/"+fname):
im.save(icons_path+"komap/"+fname, "PNG")
xml += xml_linepatternsymbolizer("komap/"+fname)
except OSError, IOError:
print >> sys.stderr, "Error writing to ", icons_path+"komap/"+fname
else:
xml += xml_linepatternsymbolizer(entry["style"]["pattern-image"])
sql.add(entry["sql"])
itags.update(entry["chooser"].get_interesting_tags(entry["type"], zoom))
xml += xml_rule_end()
xml += xml_style_end()
sql.discard("()")
if sql:
sql_g.update(sql)
xml_g += xml
itags_g.update(itags)
else:
xml_nosubstyle()
sql = sql_g
itags = itags_g
if sql:
mfile.write(xml_g)
sql = "(" + " OR ".join(sql) + ")"# and way && !bbox!"
if zlayer == 0 and full_layering:
sql = "("+ sql +') and ("layer" not in ('+ ", ".join(['\'%s\''%i for i in range(-5,6) if i != 0])+") or \"layer\" is NULL)"
elif zlayer <=5 and zlayer >= -5 and full_layering:
sql = "("+ sql +') and "layer" = \'%s\''%zlayer
oitags = itags
itags = add_numerics_to_itags(itags)
if layer_type == "polygon" and there_are_line_patterns:
itags = ", ".join(itags)
oitags = '"'+ "\", \"".join(oitags) +'"'
sqlz = """SELECT %s, ST_ForceRHR(way) as way from planet_osm_polygon where (%s) and way && !bbox! and ST_IsValid(way)"""%(itags,sql)
mfile.write(xml_layer("postgis-process", layer_type, itags, sqlz, zoom=zoom ))
#### FIXME: Performance degrades painfully on large lines ST_Union. Gotta find workaround :(
#if layer_type == "polygon" and there_are_dashed_lines:
#itags = ", ".join(itags)
#oitags = '"'+ "\", \"".join(oitags) +'"'
#sqlz = """select %s, ST_LineMerge(ST_Union(way)) as way from
#(SELECT %s, ST_Boundary(way) as way from planet_osm_polygon where (%s) and way && !bbox! and ST_IsValid(way) ) tex
#group by %s
#"""%(itags,oitags,sql,oitags)
##elif layer_type == "line" and there_are_dashed_lines:
## sqlz = """select %s, ST_Union(way) as way from (SELECT * from planet_osm_line where way && !bbox! #and (%s)) as tex
## group by %s
## """%(itags,sql,oitags)
#mfile.write(xml_layer("postgis-process", layer_type, itags, sqlz, zoom=zoom ))
else:
if roads:
layer_type = 'roads'
mfile.write(xml_layer("postgis", layer_type, itags, sql, zoom=zoom ))
else:
xml_nolayer()
## icons pass
sql_g = set()
itags_g = set()
xml_g = ""
prevtype = ""
for zindex in ta:
for layer_type, entry_types in [("point", ("node", "point")),("line",("way", "line")), ("polygon",("way","area"))]:
sql = set()
itags = set()
style_started = False
for entry in zsheet[zindex]:
if entry["type"] in entry_types:
if "icon-image" in entry["style"] and ("text" not in entry["style"] or ("text" in entry["style"] and entry["style"].get("text-position","center")!='center')):
if not prevtype:
prevtype = layer_type
if prevtype != layer_type:
if sql_g:
mfile.write(xml_g)
sql_g = "(" + " OR ".join(sql_g) + ")"# and way && !bbox!"
itags_g = add_numerics_to_itags(itags_g)
mfile.write(xml_layer("postgis", prevtype, itags_g, sql_g, zoom=zoom ))
sql_g = set()
itags_g = set()
xml_g = ""
sql = set()
itags = set()
else:
xml_nolayer()
prevtype = layer_type
if not style_started:
xml = xml_style_start()
style_started = True
xml += xml_rule_start()
xml += x_scale
xml += xml_filter(entry["rulestring"])
xml += xml_pointsymbolizer(
path=entry["style"].get("icon-image", ""),
width=entry["style"].get("icon-width", ""),
height=entry["style"].get("icon-height", ""),
opacity=relaxedFloat(entry["style"].get("opacity", "1")))
sql.add(entry["sql"])
itags.update(entry["chooser"].get_interesting_tags(entry["type"], zoom))
xml += xml_rule_end()
if style_started:
xml += xml_style_end()
style_started = False
sql.discard("()")
if sql:
sql_g.update(sql)
xml_g += xml
itags_g.update(itags)
else:
xml_nosubstyle()
if sql_g:
mfile.write(xml_g)
sql_g = "(" + " OR ".join(sql_g) + ")"# and way && !bbox!"
itags_g = add_numerics_to_itags(itags_g)
mfile.write(xml_layer("postgis", prevtype, itags_g, sql_g, zoom=zoom ))
else:
xml_nolayer()
ta.reverse()
for zindex in ta:
for layer_type, entry_types in [ ("polygon",("way","area")),("point", ("node", "point")),("line",("way", "line"))]:
for placement in ("center","line"):
## text pass
collhere = set()
for entry in zsheet[zindex]:
if entry["type"] in entry_types:#, "node", "line", "point"):
if "text" in entry["style"] and entry["style"].get("text-position","center")==placement:
csb = entry["style"].get("collision-sort-by",None)
cso = entry["style"].get("collision-sort-order","desc")
collhere.add((csb,cso))
for snap_to_street in ('true', 'false'):
for (csb, cso) in collhere:
sql = set()
itags = set()
texttags = set()
xml = xml_style_start()
for entry in zsheet[zindex]:
if entry["type"] in entry_types and csb == entry["style"].get("collision-sort-by",None) and cso == entry["style"].get("collision-sort-order","desc") and snap_to_street == entry["style"].get("-x-mapnik-snap-to-street","false"):
if "text" in entry["style"] and entry["style"].get("text-position","center")==placement:
ttext = entry["style"]["text"].extract_tags().pop()
texttags.add(ttext)
tface = entry["style"].get("font-family","DejaVu Sans Book")
tsize = entry["style"].get("font-size","10")
tcolor = entry["style"].get("text-color","#000000")
thcolor= entry["style"].get("text-halo-color","#ffffff")
thradius= relaxedFloat(entry["style"].get("text-halo-radius","0"))
tplace= entry["style"].get("text-position","center")
toffset= relaxedFloat(entry["style"].get("text-offset","0"))
toverlap= entry["style"].get("text-allow-overlap",entry["style"].get("allow-overlap","false"))
tdistance= relaxedFloat(entry["style"].get("-x-mapnik-min-distance","20"))
twrap= relaxedFloat(entry["style"].get("max-width",256))
talign= entry["style"].get("text-align","center")
topacity= relaxedFloat(entry["style"].get("text-opacity",entry["style"].get("opacity","1")))
tpos = entry["style"].get("text-placement","X")
ttransform = entry["style"].get("text-transform","none")
xml += xml_rule_start()
xml += x_scale
xml += xml_filter(entry["rulestring"])
if "icon-image" in entry["style"] and entry["style"].get("text-position","center")=='center':
xml += xml_shieldsymbolizer(
entry["style"].get("icon-image", ""),
entry["style"].get("icon-width", ""),
entry["style"].get("icon-height", ""),
ttext,tface,tsize,tcolor, thcolor, thradius, tplace,
toffset,toverlap,tdistance,twrap,talign,topacity, ttransform)
else:
xml += xml_textsymbolizer(ttext,tface,tsize,tcolor, thcolor, thradius, tplace, toffset,toverlap,tdistance,twrap,talign,topacity,tpos,ttransform)
sql.add(entry["sql"])
itags.update(entry["chooser"].get_interesting_tags(entry["type"], zoom))
xml += xml_rule_end()
xml += xml_style_end()
sql.discard("()")
if sql:
order = ""
if csb:
if cso != "desc":
cso = "asc"
order = """ order by (CASE WHEN "%s" ~ E'^[[:digit:]]+([.][[:digit:]]+)?$' THEN to_char(CAST ("%s" AS FLOAT) ,'000000000000000.99999999999') else "%s" end) %s nulls last """%(csb,csb,csb,cso)
mfile.write(xml)
add_tags = set()
for t in itags:
if t in columnmap:
add_tags.update(columnmap[t][1])
texttags.update(columnmap[t][1])
oitags = itags.union(add_tags) # SELECT: (tags->'mooring') as "mooring"
oitags = ", ".join([ escape_sql_column(i, asname=True) for i in oitags])
goitags = itags.union(add_tags) # GROUP BY: (tags->'mooring')
goitags = ", ".join([ escape_sql_column(i) for i in goitags])
fitags = [columnmap.get(i, (i,))[0] for i in itags]
#fitags = add_numerics_to_itags(itags)
itags = add_numerics_to_itags(fitags) # population => {population, population__num}
neitags = add_numerics_to_itags(fitags, escape = False) # for complex polygons, no escapng needed
del fitags
ttext = " OR ".join(['"'+i+ "\" is not NULL " for i in texttags])
if placement == "center" and layer_type == "polygon" and snap_to_street == 'false':
sqlz = " OR ".join(sql)
itags = ", ".join(itags)
neitags = ", ".join(neitags)
if not order:
order = "order by"
else:
order += ", "
if zoom > 13 or zoom < 6:
sqlz = """select %s, way
from planet_osm_%s
where (%s) and (%s) and (way_area > %s) and way && ST_Expand(!bbox!,3000) %s way_area desc
"""%(itags,layer_type,ttext,sqlz,pixel_size_at_zoom(zoom,3)**2, order)
else:
sqlz = """select %s, way
from (
select (ST_Dump(ST_Multi(ST_Buffer(ST_Collect(p.way),%s)))).geom as way, %s
from (
select *
from planet_osm_%s p
where (%s) and way_area > %s and p.way && ST_Expand(!bbox!,%s) and (%s)) p
group by %s) p %s ST_Area(p.way) desc
"""%(neitags,pixel_size_at_zoom(zoom,10),oitags,layer_type,ttext,pixel_size_at_zoom(zoom,5)**2,max(pixel_size_at_zoom(zoom,20),3000),sqlz,goitags,order)
mfile.write(xml_layer("postgis-process", layer_type, itags, sqlz, zoom ))
elif layer_type == "line" and zoom < 16 and snap_to_street == 'false':
sqlz = " OR ".join(sql)
itags = ", ".join(itags)
#itags = "\""+ itags+"\""
sqlz = """select %s, ST_LineMerge(ST_Union(way)) as way from (SELECT * from planet_osm_line where way && ST_Expand(!bbox!,%s) and (%s) and (%s)) as tex
group by %s
%s
"""%(itags,max(pixel_size_at_zoom(zoom,20),3000),ttext,sqlz,goitags,order)
mfile.write(xml_layer("postgis-process", layer_type, itags, sqlz, zoom=zoom ))
elif snap_to_street == 'true':
sqlz = " OR ".join(sql)
itags = ", ".join(itags)
sqlz = """select %s,
coalesce(
(select
ST_Intersection(
ST_Translate(
ST_Rotate(
ST_GeomFromEWKT('SRID=900913;LINESTRING(-50 0, 50 0)'),
-1*ST_Azimuth(ST_PointN(ST_ShortestLine(l.way, ST_PointOnSurface(ST_Buffer(h.way,0.1))),1),
ST_PointN(ST_ShortestLine(l.way, ST_PointOnSurface(ST_Buffer(h.way,0.1))),2)
)
),
ST_X(ST_PointOnSurface(ST_Buffer(h.way,0.1))),
ST_Y(ST_PointOnSurface(ST_Buffer(h.way,0.1)))
),
ST_Buffer(h.way,20)
)
as way
from planet_osm_line l
where
l.way && ST_Expand(h.way, 600) and
ST_IsValid(l.way) and
l."name" = h."addr:street" and
l.highway is not NULL and
l."name" is not NULL
order by ST_Distance(ST_Buffer(h.way,0.1), l.way) asc
limit 1
),
(select
ST_Intersection(
ST_Translate(
ST_Rotate(
ST_GeomFromEWKT('SRID=900913;LINESTRING(-50 0, 50 0)'),
-1*ST_Azimuth(ST_PointN(ST_ShortestLine(ST_Centroid(l.way), ST_PointOnSurface(ST_Buffer(h.way,0.1))),1),
ST_PointN(ST_ShortestLine(ST_Centroid(l.way), ST_PointOnSurface(ST_Buffer(h.way,0.1))),2)
)
),
ST_X(ST_PointOnSurface(ST_Buffer(h.way,0.1))),
ST_Y(ST_PointOnSurface(ST_Buffer(h.way,0.1)))
),
ST_Buffer(h.way,20)
)
as way
from planet_osm_polygon l
where
l.way && ST_Expand(h.way, 600) and
ST_IsValid(l.way) and
l."name" = h."addr:street" and
l.highway is not NULL and
l."name" is not NULL
order by ST_Distance(ST_Buffer(h.way,0.1), l.way) asc
limit 1
),
ST_Intersection(
ST_MakeLine( ST_Translate(ST_PointOnSurface(ST_Buffer(h.way,0.1)),-50,0),
ST_Translate(ST_PointOnSurface(ST_Buffer(h.way,0.1)), 50,0)
),
ST_Buffer(h.way,20)
)
) as way
from planet_osm_%s h
where (%s) and (%s) and way && ST_Expand(!bbox!,3000) %s
"""%(itags,layer_type,ttext,sqlz, order)
mfile.write(xml_layer("postgis-process", layer_type, itags, sqlz, zoom ))
else:
sql = "(" + " OR ".join(sql) + ") %s"%(order)#and way && ST_Expand(!bbox!,%s), max(pixel_size_at_zoom(zoom,20),3000),
mfile.write(xml_layer("postgis", layer_type, itags, sql, zoom=zoom ))
else:
xml_nolayer()
mfile.write(xml_end()) | Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
from debug import debug, Timer
from twms import projections
import cairo
import math
import os as os_module
from copy import deepcopy
import pangocairo
import pango
def line(cr, c):
cr.move_to(*c[0])
for k in c:
cr.line_to(*k)
cr.stroke()
def poly(cr, c):
cr.move_to(*c[0])
for k in c:
cr.line_to(*k)
cr.fill()
def offset_line(line,offset):
a = []
prevcoord = line[0]
for coord in line:
if coord != prevcoord:
angle = - math.atan2(coord[1]-prevcoord[1],coord[0]-prevcoord[0])
dx = offset*math.sin(angle)
dy = offset*math.cos(angle)
a.append((prevcoord[0]+dx,prevcoord[1]+dy))
a.append((coord[0]+dx,coord[1]+dy))
prevcoord = coord
return a
class RasterTile:
def __init__(self, width, height, zoomlevel, data_backend, raster_proj="EPSG:3857"):
self.w = width
self.h = height
self.surface = cairo.ImageSurface(cairo.FORMAT_RGB24, self.w, self.h)
self.offset_x = 0
self.offset_y = 0
self.bbox = (0.,0.,0.,0.)
self.bbox_p = (0.,0.,0.,0.)
self.zoomlevel = zoomlevel
self.zoom = None
self.data = data_backend
self.proj = raster_proj
def __del__(self):
del self.surface
def screen2lonlat(self, x, y):
lo1, la1, lo2, la2 = self.bbox_p
debug ("%s %s - %s %s"%(x,y,self.w, self.h))
debug(self.bbox_p)
return projections.to4326( (1.*x/self.w*(lo2-lo1)+lo1, la2+(1.*y/(self.h)*(la1-la2))),self.proj)
# return (x - self.w/2)/(math.cos(self.center_coord[1]*math.pi/180)*self.zoom) + self.center_coord[0], -(y - self.h/2)/self.zoom + self.center_coord[1]
def lonlat2screen(self, (lon, lat), epsg4326=False):
if epsg4326:
lon, lat = projections.from4326((lon,lat),self.proj)
lo1, la1, lo2, la2 = self.bbox_p
return ((lon-lo1)*(self.w-1)/abs(lo2-lo1), ((la2-lat)*(self.h-1)/(la2-la1)))
# return (lon - self.center_coord[0])*self.lcc*self.zoom + self.w/2, -(lat - self.center_coord[1])*self.zoom + self.h/2
def update_surface_by_center(self, lonlat, zoom, style):
self.zoom = zoom
xy = projections.from4326(lonlat, self.proj)
xy1 = projections.to4326((xy[0]-40075016*0.5**self.zoom/256*self.w, xy[1]-40075016*0.5**self.zoom/256*self.h), self.proj)
xy2 = projections.to4326((xy[0]+40075016*0.5**self.zoom/256*self.w, xy[1]+40075016*0.5**self.zoom/256*self.h), self.proj)
bbox = (xy1[0],xy1[1],xy2[0],xy2[1])
debug (bbox)
return self.update_surface(bbox, zoom, style)
def update_surface(self, bbox, zoom, style, callback = lambda x=None: None):
rendertimer = Timer("Rendering image")
if "image" not in style.cache:
style.cache["image"] = ImageLoader()
timer = Timer("Getting data")
self.zoom = zoom
self.bbox = bbox
self.bbox_p = projections.from4326(bbox,self.proj)
print self.bbox_p
scale = abs(self.w/(self.bbox_p[0] - self.bbox_p[2])/math.cos(math.pi*(self.bbox[1]+self.bbox[3])/2/180))
zscale = 0.5*scale
cr = cairo.Context(self.surface)
# getting and setting canvas properties
bgs = style.get_style("canvas", {}, self.zoom, scale, zscale)
if not bgs:
bgs = [{}]
bgs = bgs[0]
cr.rectangle(0, 0, self.w, self.h)
# canvas color and opcity
color = bgs.get("fill-color",(0.7, 0.7, 0.7))
cr.set_source_rgba(color[0], color[1], color[2], bgs.get("fill-opacity", 1))
cr.fill()
callback()
# canvas antialiasing
antialias = bgs.get("antialias", "full")
if antialias == "none":
"no antialiasing enabled"
cr.set_antialias(1)
#cr.font_options_set_antialias(1)
elif antialias == "text":
"only text antialiased"
cr.set_antialias(1)
#cr.font_options_set_antialias(2)
else:
"full antialias"
cr.set_antialias(2)
#cr.font_options_set_antialias(2)
datatimer = Timer("Asking backend")
if "get_sql_hints" in dir(style):
hints = style.get_sql_hints('way', self.zoom)
else:
hints = None
if "get_interesting_tags" in dir(style):
itags = style.get_interesting_tags(zoom=self.zoom)
else:
itags = None
# enlarge bbox by 20% to each side. results in more vectors, but makes less artifaces.
span_x, span_y = bbox[2]-bbox[0], bbox[3]-bbox[1]
bbox_expand = [bbox[0]-0.2*span_x,bbox[1]-0.2*span_y,bbox[2]+0.2*span_x,bbox[3]+0.2*span_y]
vectors = self.data.get_vectors(bbox_expand,self.zoom,hints,itags).values()
datatimer.stop()
datatimer = Timer("Applying styles")
ww = []
for way in vectors:
st = style.get_style("way", way.tags, self.zoom, scale, zscale)
if st:
for fpt in st:
#debug(fpt)
ww.append([way.copy(), fpt])
datatimer.stop()
debug( "%s objects on screen (%s in dataset)"%(len(ww),len(vectors)) )
er = Timer("Projecing data")
if self.data.proj != self.proj:
for w in ww:
w[0].cs = [self.lonlat2screen(coord) for coord in projections.transform(w[0].coords, self.data.proj, self.proj)]
else:
for w in ww:
w[0].cs = [self.lonlat2screen(coord) for coord in w[0].coords]
for w in ww:
if "offset" in w[1]:
offset = float(w[1]["offset"])
w[0] = w[0].copy()
w[0].cs = offset_line(w[0].cs, offset)
if "raise" in w[1] and not "extrude" in w[1]:
w[0] = w[0].copy()
offset = float(w[1]["raise"])
w[0].cs_real = w[0].cs
w[0].cs = [(x,y-offset) for x,y in w[0].cs]
if "extrude" in w[1]:
if w[1]["extrude"]<2:
del w[1]["extrude"]
if "extrude" in w[1] and "fill-color" not in w[1] and "width" in w[1]:
w[1]["fill-color"] = w[1].get("color", (0,0,0))
w[1]["fill-opacity"] = w[1].get("opacity", 1)
w[0] = w[0].copy()
#print w[0].cs
w[0].cs = offset_line(w[0].cs, w[1]["width"]/2)
#print w[0].cs
aa = offset_line(w[0].cs, -w[1]["width"])
del w[1]["width"]
aa.reverse()
w[0].cs.extend(aa)
er.stop()
ww.sort(key=lambda x: x[1]["layer"])
layers = list(set([int(x[1]["layer"]/100.) for x in ww]))
layers.sort()
objs_by_layers = {}
for layer in layers:
objs_by_layers[layer] = []
for obj in ww:
objs_by_layers[int(obj[1]["layer"]/100.)].append(obj)
del ww
timer.stop()
timer = Timer("Rasterizing image")
linecaps = {"butt":0, "round":1, "square":2}
linejoin = {"miter":0, "round":1, "bevel":2}
text_rendered_at = set([(-100,-100)])
for layer in layers:
data = objs_by_layers[layer]
#data.sort(lambda x,y:cmp(max([x1[1] for x1 in x[0].cs]), max([x1[1] for x1 in y[0].cs])))
# - fill polygons
for obj in data:
if ("fill-color" in obj[1] or "fill-image" in obj[1]) and not "extrude" in obj[1]: ## TODO: fill-image
color = obj[1].get("fill-color", (0,0,0))
cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("fill-opacity", 1))
if "fill-image" in obj[1]:
image = style.cache["image"][obj[1]["fill-image"]]
if image:
pattern = cairo.SurfacePattern(image)
pattern.set_extend(cairo.EXTEND_REPEAT)
cr.set_source(pattern)
poly(cr, obj[0].cs)
# - draw casings on layer
for obj in data:
### Extras: casing-linecap, casing-linejoin
if "casing-width" in obj[1] or "casing-color" in obj[1] and "extrude" not in obj[1]:
cr.set_dash(obj[1].get("casing-dashes",obj[1].get("dashes", [])))
cr.set_line_join(linejoin.get(obj[1].get("casing-linejoin",obj[1].get("linejoin", "round")),1))
color = obj[1].get("casing-color", (0,0,0))
cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("casing-opacity", 1))
## TODO: good combining of transparent lines and casing
## Probable solution: render casing, render way as mask and put casing with mask chopped out onto image
cr.set_line_width (obj[1].get("width",0)+obj[1].get("casing-width", 1 ))
cr.set_line_cap(linecaps.get(obj[1].get("casing-linecap", obj[1].get("linecap", "butt")),0))
line(cr, obj[0].cs)
# - draw line centers
for obj in data:
if ("width" in obj[1] or "color" in obj[1] or "image" in obj[1]) and "extrude" not in obj[1]:
cr.set_dash(obj[1].get("dashes", []))
cr.set_line_join(linejoin.get(obj[1].get("linejoin", "round"),1))
color = obj[1].get("color", (0,0,0))
cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("opacity", 1))
## TODO: better overlapping of transparent lines.
## Probable solution: render them (while they're of the same opacity and layer) on a temporary canvas that's merged into main later
cr.set_line_width (obj[1].get("width", 1))
cr.set_line_cap(linecaps.get(obj[1].get("linecap", "butt"),0))
if "image" in obj[1]:
image = style.cache["image"][obj[1]["image"]]
if image:
pattern = cairo.SurfacePattern(image)
pattern.set_extend(cairo.EXTEND_REPEAT)
cr.set_source(pattern)
line(cr, obj[0].cs)
callback()
# - extruding polygons
#data.sort(lambda x,y:cmp(max([x1[1] for x1 in x[0].cs]), max([x1[1] for x1 in y[0].cs])))
# Pass 1. Creating list of extruded polygons
extlist = []
# fromat: (coords, ("h"/"v", y,z), real_obj)
for obj in data:
if "extrude" in obj[1]:
def face_to_poly(face, hgt):
"""
Converts a line into height-up extruded poly
"""
return [face[0], face[1], (face[1][0], face[1][1]-hgt), (face[0][0], face[0][1]-hgt), face[0]]
hgt = obj[1]["extrude"]
raised = float(obj[1].get("raise",0))
excoords = [(a[0],a[1]-hgt-raised) for a in obj[0].cs]
faces = []
coord = obj[0].cs[-1]
#p_coord = (coord[0],coord[1]-raised)
p_coord = False
for coord in obj[0].cs:
c = (coord[0],coord[1]-raised)
if p_coord:
extlist.append( (face_to_poly([c, p_coord],hgt), ("v", min(coord[1],p_coord[1]), hgt), obj ))
p_coord = c
extlist.append( (excoords, ("h", min(coord[1],p_coord[1]), hgt), obj ))
#faces.sort(lambda x,y:cmp(max([x1[1] for x1 in x]), max([x1[1] for x1 in y])))
# Pass 2. Sorting
def compare_things(a,b):
"""
Custom comparator for extlist sorting.
Sorts back-to-front, bottom-to-top, | > \ > _, horizontal-to-vertical.
"""
t1,t2 = a[1],b[1] #
if t1[1] > t2[1]: # back-to-front
return 1
if t1[1] < t2[1]:
return -1
if t1[2] > t2[2]: # bottom-to-top
return 1
if t1[2] < t2[2]:
return -1
if t1[0] < t2[0]: # h-to-v
return 1
if t1[0] > t2[0]:
return -1
return cmp(math.sin(math.atan2(a[0][0][0]-a[0][1][0],a[0][0][0]-a[0][1][0])),math.sin(math.atan2(b[0][0][0]-b[0][1][0],b[0][0][0]-b[0][1][0])))
print t1
print t2
extlist.sort(compare_things)
# Pass 3. Rendering using painter's algorythm
cr.set_dash([])
for ply, prop, obj in extlist:
if prop[0] == "v":
color = obj[1].get("extrude-face-color", obj[1].get("color", (0,0,0) ))
cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("extrude-face-opacity", obj[1].get("opacity", 1)))
poly(cr, ply)
color = obj[1].get("extrude-edge-color", obj[1].get("color", (0,0,0) ))
cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("extrude-edge-opacity", obj[1].get("opacity", 1)))
cr.set_line_width (.5)
line(cr, ply)
if prop[0] == "h":
if "fill-color" in obj[1]:
color = obj[1]["fill-color"]
cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("fill-opacity", obj[1].get("opacity", 1)))
poly(cr,ply)
color = obj[1].get("extrude-edge-color", obj[1].get("color", (0,0,0) ))
cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("extrude-edge-opacity", obj[1].get("opacity", 1)))
cr.set_line_width (1)
line(cr, ply)
#cr.set_line_width (obj[1].get("width", 1))
#color = obj[1].get("color", (0,0,0) )
#cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("extrude-edge-opacity", obj[1].get("opacity", 1)))
#line(cr,excoords)
#if "fill-color" in obj[1]:
#color = obj[1]["fill-color"]
#cr.set_source_rgba(color[0], color[1], color[2], obj[1].get("fill-opacity", 1))
#poly(cr,excoords)
for obj in data:
if "icon-image" in obj[1]:
image = style.cache["image"][obj[1]["icon-image"]]
if image:
dy = image.get_height()/2
dx = image.get_width()/2
where = self.lonlat2screen(projections.transform(obj[0].center,self.data.proj,self.proj))
cr.set_source_surface(image, where[0]-dx, where[1]-dy)
cr.paint()
callback()
# - render text labels
texttimer = Timer("Text rendering")
cr.set_line_join(1) # setting linejoin to "round" to get less artifacts on halo render
for obj in data:
if "text" in obj[1]:
text = obj[1]["text"]
#cr.set_line_width (obj[1].get("width", 1))
#cr.set_font_size(float(obj[1].get("font-size", 9)))
ft_desc = pango.FontDescription()
ft_desc.set_family(obj[1].get('font-family', 'sans'))
ft_desc.set_size(pango.SCALE*int(obj[1].get('font-size',9)))
fontstyle = obj[1].get('font-style', 'normal')
if fontstyle == 'italic':
fontstyle = pango.STYLE_ITALIC
else:
fontstyle = pango.STYLE_NORMAL
ft_desc.set_style(fontstyle)
fontweight = obj[1].get('font-weight', 400)
try:
fontweight = int(fontweight)
except ValueError:
if fontweight == 'bold':
fontweight = 700
else:
fontweight = 400
ft_desc.set_weight(fontweight)
if obj[1].get('text-transform', None) == 'uppercase':
text = text.upper()
p_ctx = pangocairo.CairoContext(cr)
p_layout = p_ctx.create_layout()
p_layout.set_font_description(ft_desc)
p_layout.set_text(text)
p_attrs = pango.AttrList()
decoration = obj[1].get('text-decoration', 'none')
if decoration == 'underline':
p_attrs.insert(pango.AttrUnderline(pango.UNDERLINE_SINGLE,end_index=-1))
decoration = obj[1].get('font-variant', 'none')
if decoration == 'small-caps':
p_attrs.insert(pango.AttrVariant(pango.VARIANT_SMALL_CAPS, start_index=0, end_index=-1))
p_layout.set_attributes(p_attrs)
if obj[1].get("text-position", "center") == "center":
where = self.lonlat2screen(projections.transform(obj[0].center,self.data.proj,self.proj))
for t in text_rendered_at:
if ((t[0]-where[0])**2+(t[1]-where[1])**2) < 15*15:
break
else:
text_rendered_at.add(where)
#debug ("drawing text: %s at %s"%(text, where))
if "text-halo-color" in obj[1] or "text-halo-radius" in obj[1]:
cr.new_path()
cr.move_to(where[0], where[1])
cr.set_line_width (obj[1].get("text-halo-radius", 1))
color = obj[1].get("text-halo-color", (1.,1.,1.))
cr.set_source_rgb(color[0], color[1], color[2])
cr.text_path(text)
cr.stroke()
cr.new_path()
cr.move_to(where[0], where[1])
cr.set_line_width (obj[1].get("text-halo-radius", 1))
color = obj[1].get("text-color", (0.,0.,0.))
cr.set_source_rgb(color[0], color[1], color[2])
cr.text_path(text)
cr.fill()
else: ### render text along line
c = obj[0].cs
text = unicode(text,"utf-8")
# - calculate line length
length = reduce(lambda x,y: (x[0]+((y[0]-x[1])**2 + (y[1]-x[2])**2 )**0.5, y[0], y[1]), c, (0,c[0][0],c[0][1]))[0]
#print length, text, cr.text_extents(text)
if length > cr.text_extents(text)[2]:
# - function to get (x, y, normale) from (c, length_along_c)
def get_xy_from_len(c,length_along_c):
x0, y0 = c[0]
for x,y in c:
seg_len = ((x-x0)**2+(y-y0)**2)**0.5
if length_along_c < seg_len:
normed = length_along_c /seg_len
return (x-x0)*normed+x0, (y-y0)*normed+y0, math.atan2(y-y0,x-x0)
else:
length_along_c -= seg_len
x0,y0 = x,y
else:
return None
da = 0
os = 1
z = length/2-cr.text_extents(text)[2]/2
# print get_xy_from_len(c,z)
if c[0][0] < c[1][0] and get_xy_from_len(c,z)[2]<math.pi/2 and get_xy_from_len(c,z)[2] > -math.pi/2:
da = 0
os = 1
z = length/2-cr.text_extents(text)[2]/2
else:
da = math.pi
os = -1
z = length/2+cr.text_extents(text)[2]/2
z1=z
if "text-halo-color" in obj[1] or "text-halo-radius" in obj[1]:
cr.set_line_width (obj[1].get("text-halo-radius", 1.5)*2)
color = obj[1].get("text-halo-color", (1.,1.,1.))
cr.set_source_rgb(color[0], color[1], color[2])
xy = get_xy_from_len(c,z)
cr.save()
#cr.move_to(xy[0],xy[1])
p_ctx.translate(xy[0],xy[1])
cr.rotate(xy[2]+da)
#p_ctx.translate(x,y)
#p_ctx.show_layout(p_layout)
p_ctx.layout_path(p_layout)
cr.restore()
cr.stroke()
#for letter in text:
#cr.new_path()
#xy = get_xy_from_len(c,z)
##print letter, cr.text_extents(letter)
#cr.move_to(xy[0],xy[1])
#cr.save()
#cr.rotate(xy[2]+da)
#cr.text_path(letter)
#cr.restore()
#cr.stroke()
#z += os*cr.text_extents(letter)[4]
color = obj[1].get("text-color", (0.,0.,0.))
cr.set_source_rgb(color[0], color[1], color[2])
z = z1
xy = get_xy_from_len(c,z)
cr.save()
#cr.move_to(xy[0],xy[1])
p_ctx.translate(xy[0],xy[1])
cr.rotate(xy[2]+da)
#p_ctx.translate(x,y)
p_ctx.show_layout(p_layout)
cr.restore()
#for letter in text:
#cr.new_path()
#xy = get_xy_from_len(c,z)
##print letter, cr.text_extents(letter)
#cr.move_to(xy[0],xy[1])
#cr.save()
#cr.rotate(xy[2]+da)
#cr.text_path(letter)
#cr.restore()
#cr.fill()
#z += os*cr.text_extents(letter)[4]
texttimer.stop()
del data
del layers
timer.stop()
rendertimer.stop()
debug(self.bbox)
callback(True)
class ImageLoader:
def __init__(self):
self.cache = {}
def __getitem__(self, url):
if url in self.cache:
return self.cache[url]
else:
print url, os_module.path.exists(url)
if os_module.path.exists(url):
self.cache[url] = cairo.ImageSurface.create_from_png (url)
return self.cache[url]
else:
return False | Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
"""
This is a module to substitute debug.py in porduction mode.
"""
debug = lambda st: None
class Timer:
def __init__(self, comment):
pass
def stop(self):
pass | Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
import os
import sys
from lxml import etree
from twms import projections
from style import Styling
reload(sys)
sys.setdefaultencoding("utf-8") # a hack to support UTF-8
try:
import psyco
psyco.full()
except ImportError:
pass
MAXZOOM = 16
proj = "EPSG:4326"
style = Styling()
# elsif($k eq 'highway' and $v eq 'footway' or $v eq 'path' or $v eq 'track'){
def tilelist_by_geometry(way, start_zoom = 0, ispoly = False):
"""
Gives a number of (z,x,y) tile numbers that geometry crosses.
"""
ret = set([])
tiles_by_zooms = {} # zoom: set(tile,tile,tile...)
for t in xrange(0,MAXZOOM+1):
tiles_by_zooms[t] = set([])
for point in way:
tile = projections.tile_by_coords(point, MAXZOOM, proj)
tile = (MAXZOOM, int(tile[0]),int(tile[1]))
tiles_by_zooms[MAXZOOM].add(tile)
for t in xrange(MAXZOOM-1,start_zoom-1,-1):
for tt in tiles_by_zooms[t+1]:
tiles_by_zooms[t].add((t, int(tt[1]/2), int(tt[2]/2)))
for z in tiles_by_zooms.values():
ret.update(z)
return ret
def pix_distance(a,b,z):
"""
Calculates onscreen disatnce between 2 points on given zoom.
"""
return 2**z*256*(((a[0]-b[0])/360.)**2+((a[1]-b[1])/180.)**2)**0.5
def sanitize(string):
string=string.replace(" ", "_")
string=string.replace(";", ",")
string=string.replace("=", "###")
return string
print sanitize (" ;=")
def main ():
DROPPED_POINTS = 0
WAYS_WRITTEN = 0
NODES_READ = 0
WAYS_READ = 0
tilefiles = {}
tilefiles_hist = []
#osm_infile = open("minsk.osm", "rb")
osm_infile = sys.stdin
nodes = {}
curway = []
tags = {}
context = etree.iterparse(osm_infile)
for action, elem in context:
items = dict(elem.items())
if elem.tag == "node":
NODES_READ += 1
if NODES_READ % 10000 == 0:
print "Nodes read:", NODES_READ
nodes[int(items["id"])] = (float(items["lon"]), float(items["lat"]))
tags = {}
elif elem.tag == "nd":
try:
curway.append(nodes[int(items["ref"])])
except KeyError:
pass
elif elem.tag == "tag":
tags[sanitize(items["k"])] = sanitize(items["v"])
elif elem.tag == "way":
WAYS_READ += 1
if WAYS_READ % 1000 == 0:
print "Ways read:", WAYS_READ
mzoom = 1
#tags = style.filter_tags(tags)
if tags:
if True:#style.get_style("way", tags, True): # if way is stylized
towrite = ";".join(["%s=%s"%x for x in tags.iteritems()]) ### TODO: sanitize keys and values
#print towrite
way_simplified = {MAXZOOM: curway}
for zoom in xrange(MAXZOOM-1,-1,-1): ######## generalize a bit
# TODO: Douglas-Peucker
prev_point = curway[0]
way = [prev_point]
for point in way_simplified[zoom+1]:
if pix_distance(point, prev_point, zoom) > 1.5:
way.append(point)
prev_point = point
else:
DROPPED_POINTS += 1
if len(way) == 1:
mzoom = zoom
#print zoom
break
if len(way) > 1:
way_simplified[zoom] = way
#print way
for tile in tilelist_by_geometry(curway, mzoom+1):
z, x, y = tile
path = "tiles/z%s/%s/x%s/%s/"%(z, x/1024, x, y/1024)
if tile not in tilefiles:
if not os.path.exists(path):
os.makedirs(path)
tilefiles[tile] = open(path+"y"+str(y)+".vtile","wb")
tilefiles_hist.append(tile)
else:
if not tilefiles[tile]:
tilefiles[tile] = open(path+"y"+str(y)+".vtile","a")
tilefiles_hist.append(tile)
tilefiles_hist.remove(tile)
tilefiles_hist.append(tile)
print >>tilefiles[tile], "%s %s" % (towrite, items["id"]), " ".join([str(x[0])+" "+str(x[1]) for x in way_simplified[tile[0]]])
if len(tilefiles_hist) > 400:
print "Cleaned up tiles. Wrote by now:", len(tilefiles),"active:",len(tilefiles_hist)
for tile in tilefiles_hist[0:len(tilefiles_hist)-100]:
tilefiles_hist.remove(tile)
tilefiles[tile].flush()
tilefiles[tile].close()
tilefiles[tile] = None
#print >>corr, "%s %s %s %s %s %s"% (curway[0][0],curway[0][1],curway[1][0],curway[1][1], user, ts )
WAYS_WRITTEN += 1
if WAYS_WRITTEN % 10000 == 0:
print WAYS_WRITTEN
curway = []
tags = {}
elem.clear()
#user = default_user
#ts = ""
print "Tiles generated:",len(tilefiles)
print "Nodes dropped when generalizing:", DROPPED_POINTS
print "Nodes in memory:", len(nodes)
main()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
from twms import projections
import twms.bbox
class Empty:
def copy(self):
a = Empty()
a.tags = self.tags.copy()
a.coords = self.coords[:]
a.center = self.center
a.cs = self.cs[:]
a.bbox = self.bbox
return a
class Way:
def __init__(self, tags, coords):
self.cs = []
#print [x.split("=") for x in tags.split(";")]
self.tags = dict((x.split("=") for x in tags.split(";")))
# calculating center point
c= coords
sumz = [(c[0],c[1])]
for k in range(2, len(c), 2):
sumz.append((c[k], c[k + 1]))
self.coords = sumz
# left for the better times:
self.center = reduce(lambda x, y: (x[0]+y[0],x[1]+y[1]), self.coords)
self.center = (self.center[0]/len(self.coords),self.center[1]/len(self.coords))
self.bbox = reduce(lambda x,y: (min(x[0],y[0]),min(x[1],y[1]),max(x[2],y[0]),max(x[3],y[1])), self.coords, (9999,9999,-9999,-9999))
#debug(self.center)
def copy(self):
a = Empty()
a.tags = self.tags.copy()
a.coords = self.coords[:]
a.center = self.center
a.cs = self.cs[:]
a.bbox = self.bbox
return a
class QuadTileBackend:
"""
A class that gives out vector data on demand.
"""
def __init__(self,max_zoom = 16,proj = "EPSG:4326", path = "tiles", lang = "ru"):
self.max_zoom = max_zoom # no better tiles available
self.path = path # path to tile files
self.lang = lang # map language to use
self.tiles = {} # loaded vector tiles go here
self.proj = proj # which projection used to cut map in tiles
self.keep_tiles = 15 # a number of tiles to cache in memory
self.tile_load_log = [] # used when selecting which tile to unload
def filename(self, (z,x,y)):
return "%s/z%s/%s/x%s/%s/y%s.vtile"%(self.path, z, x/1024, x, y/1024, y)
def load_tile(self, k):
#debug("loading tile: %s"% (k,))
try:
f = open(self.filename(k))
except IOError:
#print ( "Failed open: '%s'" % self.filename(k) )
return {}
t = {}
for line in f:
#debug(line)
a = line.split(" ")
w = Way(a[0], [float(x) for x in a[2:]])
t[int(a[1])] = w
f.close()
return t
def collect_garbage(self):
"""
Cleans up some RAM by removing least accessed tiles.
"""
if len(self.tiles) > self.keep_tiles:
#debug("Now %s tiles cached, trying to kill %s"%(len(self.tiles),len(self.tiles)-self.keep_tiles))
for tile in self.tile_load_log[0:len(self.tiles)-self.keep_tiles]:
try:
del self.tiles[tile]
self.tile_load_log.remove(tile)
#debug ("killed tile: %s" % (tile,))
except KeyError, ValueError:
pass
#debug ("tile killed not by us: %s" % (tile,))
def get_vectors (self, bbox, zoom, sql_hint = None, itags = None):
zoom = int(zoom)
zoom = min(zoom, self.max_zoom) ## If requested zoom is better than the best, take the best
zoom = max(zoom, 0) ## Negative zooms are nonsense
a,d,c,b = [int(x) for x in projections.tile_by_bbox(bbox,zoom, self.proj)]
resp = {}
hint = [x[0] for x in sql_hint]
for tile in set([(zoom,i,j) for i in range(a, c+1) for j in range(b, d+1)]):
# Loading current vector tile
try:
ti = self.tiles[tile]
except KeyError:
ti = self.load_tile(tile)
self.tiles[tile] = ti
try:
self.tile_load_log.remove(tile)
except ValueError:
pass
self.tile_load_log.append(tile)
for obj in ti:
"filling response with interesting-tagged objects"
need = False
for tag in ti[obj].tags:
if tag in hint:
need = True
break
if need:
if twms.bbox.bbox_is_in(bbox, ti[obj].bbox, fully=False):
resp[obj] = ti[obj]
self.collect_garbage()
return resp | Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
#from debug import debug
from twms import projections
import psycopg2
import shapely.wkb
class Empty:
def copy(self):
a = Empty()
a.tags = self.tags.copy()
a.coords = self.coords[:]
a.center = self.center
a.cs = self.cs[:]
return a
class Way:
def __init__(self, tags, geom):
self.cs = []
#print [x.split("=") for x in tags.split(";")]
self.tags = tags
# calculating center point
#c= geom
#sumz = [(c[0],c[1])]
#for k in range(2, len(c), 2):
# sumz.append((c[k], c[k + 1]))
self.coords = geom
# left for the better times:
self.center = reduce(lambda x, y: (x[0]+y[0],x[1]+y[1]), self.coords)
self.center = (self.center[0]/len(self.coords),self.center[1]/len(self.coords))
#debug(self.center)
def copy(self):
a = Empty()
a.tags = self.tags.copy()
a.coords = self.coords[:]
a.center = self.center
a.cs = self.cs[:]
return a
class PostGisBackend:
"""
A class that gives out vector data on demand.
"""
def __init__(self,database = "dbname=gis user=mapz host=komzpa.net",max_zoom = 16,proj = "EPSG:3857", path = "tiles", lang = "ru", ):
# debug("Bakend created")
self.database=database
self.max_zoom = max_zoom # no better tiles available
self.path = path # path to tile files
self.lang = lang # map language to use
self.tiles = {} # loaded vector tiles go here
self.proj = proj # which projection used to cut map in tiles
self.keep_tiles = 190 # a number of tiles to cache in memory
self.tile_load_log = [] # used when selecting which tile to unload
def get_vectors (self, bbox, zoom, sql_hint = None, tags_hint = None):
"""
Fetches vectors for given bbox.
sql_hint is a list of sets of (key, sql_for_key)
"""
a = psycopg2.connect(self.database)
b = a.cursor()
bbox = tuple(projections.from4326(bbox,self.proj))
### FIXME: hardcoded EPSG:3857 in database
tables = ("planet_osm_line","planet_osm_polygon") # FIXME: points
resp = {}
for table in tables:
add = ""
taghint = "*"
if sql_hint:
adp = []
for tp in sql_hint:
add = []
b.execute("SELECT * FROM %s LIMIT 1;"%table)
names = [q[0] for q in b.description]
for j in tp[0]:
if j not in names:
break
else:
add.append(tp[1])
if add:
add = " OR ".join(add)
add = "("+add+")"
adp.append(add)
if tags_hint:
taghint = ", ".join(['"'+j+'"' for j in tags_hint if j in names])+ ", way, osm_id"
adp = " OR ".join(adp)
req = "SELECT %s FROM %s WHERE (%s) and way && SetSRID('BOX3D(%s %s,%s %s)'::box3d,900913);"%(taghint,table,adp,bbox[0],bbox[1],bbox[2],bbox[3])
print req
b.execute(req)
names = [q[0] for q in b.description]
for row in b.fetchall():
row_dict = dict(map(None,names,row))
for k,v in row_dict.items():
if not v:
del row_dict[k]
geom = shapely.wkb.loads(row_dict["way"].decode('hex'))
### FIXME: a dirty hack to basically support polygons, needs lots of rewrite
try:
geom = list(geom.coords)
except NotImplementedError:
"trying polygons"
try:
geom = geom.boundary
geom = list(geom.coords)
row_dict[":area"] = "yes"
except NotImplementedError:
"multipolygon"
continue
### FIXME
#geom = projections.to4326(geom, self.proj)
del row_dict["way"]
oid = row_dict["osm_id"]
del row_dict["osm_id"]
w = Way(row_dict, geom)
#print row_dict
resp[oid] = w
a.close()
del a
return resp | Python |
# -*- coding: utf-8 -*-
| Python |
# -*- coding: utf-8 -*-
# This file is part of tWMS.
# tWMS is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# tWMS is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with tWMS. If not, see <http://www.gnu.org/licenses/>.
import StringIO
import Image
import os
import threading, thread
from twms import projections
import config
#from vtiles_backend import QuadTileBackend as DataBackend
from backend.postgis import PostGisBackend as DataBackend
from mapcss import MapCSS
from render import RasterTile
from tempfile import NamedTemporaryFile
style = MapCSS(1,19)
style.parse(open("/home/kom/osm/kothic/src/styles/default.mapcss","r").read())
os.chdir("/home/kom/osm/kothic/src/")
metatiles_in_progress = {}
renderlock = threading.Lock()
def kothic_fetcher (z, x, y, this_layer):
if "max_zoom" in this_layer:
if z >= this_layer["max_zoom"]:
return None
bbox = projections.bbox_by_tile(z,x,y,"EPSG:3857")
db = DataBackend(path="/home/kom/osm/kothic/src/tiles")
res = RasterTile(256, 256, 1, db, "EPSG:3857")
res.update_surface(bbox, z, style)
f = NamedTemporaryFile()
f.close()
res.surface.write_to_png(f.name)
del res
del db
im = Image.open(f.name)
os.unlink(f.name)
im = im.convert("RGBA")
return im
def kothic_metatile(z, x, y, this_layer):
print z, x, y
global metatiles_in_progress
if "max_zoom" in this_layer:
if z >= this_layer["max_zoom"]:
return None
if z<5:
return None
metatile_id = (z,int(x/8), int(y/8))
try:
metatiles_in_progress[metatile_id].join()
except KeyError:
metatiles_in_progress[metatile_id] = threading.Thread(None, gen_metatile, None, (metatile_id, this_layer))
metatiles_in_progress[metatile_id].start()
metatiles_in_progress[metatile_id].join()
except RuntimeError:
pass
local = config.tiles_cache + this_layer["prefix"] + "/z%s/%s/x%s/%s/y%s."%(z, x/1024, x, y/1024,y)
ext = this_layer["ext"]
if os.path.exists(local+ext): # First, look for tile in cache
try:
im1 = Image.open(local+ext)
del metatiles_in_progress[metatile_id]
return im1
except IOError:
os.remove(local+ext)
def gen_metatile(metatile_id, this_layer):
#renderlock.acquire()
z, x, y = metatile_id
z -= 3
wh = 2560
bb1 = projections.coords_by_tile(z, x-0.125, y-0.125, "EPSG:3857")
bb2 = projections.coords_by_tile(z, x+1.125, y+1.125, "EPSG:3857")
bbox = (bb1[0],bb2[1],bb2[0],bb1[1])
db = DataBackend()
res = RasterTile(wh, wh, 1, db, "EPSG:3857")
res.update_surface(bbox, z+3, style)
f = NamedTemporaryFile()
f.close()
res.surface.write_to_png(f.name)
del res
del db
im = Image.open(f.name)
os.unlink(f.name)
im = im.convert("RGBA")
x*=8
y*=8
z+=3
ext = this_layer["ext"]
for i in range(x,x+9):
for j in range(y,y+9):
local = config.tiles_cache + this_layer["prefix"] + "/z%s/%s/x%s/%s/y%s."%(z, i/1024, i, j/1024,j)
box = (256*(i-x+1),256*(j-y+1),256*(i-x+2),256*(j-y+2))
im1 = im.crop(box)
if not os.path.exists("/".join(local.split("/")[:-1])):
os.makedirs("/".join(local.split("/")[:-1]))
im1.save(local+ext)
del im1
#renderlock.release()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
from debug import debug, Timer
from mapcss import MapCSS
style = MapCSS(1, 19) #zoom levels
style.parse(open("styles/osmosnimki-maps.mapcss","r").read())
t = ("way", "node")
dct = {}
for a in t:
for tag in style.get_interesting_tags(type=a):
if tag not in dct:
dct[tag] = set()
dct[tag].add(a)
print """
# OsmType Tag DataType Flags"""
for t in ("z_order","way_area",":area"):
if t in dct:
del dct[t]
for k,v in dct.iteritems():
s = ""
for i in v:
s += i
s += ","
s = s[:-1]
print "%-10s %-18s %-13s %s"%(s, k, "text", "polygon")
print """
node,way z_order int4 linear # This is calculated during import
way way_area real # This is calculated during import"""
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
from debug import debug, Timer
from backend.postgis import PostGisBackend as DataBackend
from mapcss import MapCSS
from twms import bbox, projections
from render import RasterTile
import web
import StringIO
style = MapCSS(1, 26) #zoom levels
style.parse(open("styles/landuses.mapcss","r").read())
#bbox = (27.115768874532,53.740327031764,28.028320754378,54.067187302158)
#w,h = 630*4,364*4
#z = 17
db = DataBackend()
#style = Styling()
try:
import psyco
psyco.full()
except ImportError:
pass
OK = 200
ERROR = 500
def handler():
"""
A handler for web.py.
"""
data = web.input()
resp, ctype, content = twms_main(data)
web.header('Content-type', ctype)
return content
urls = (
'/(.*)', 'mainhandler'
)
class mainhandler:
def GET(self, crap):
return handler()
if __name__ == "__main__":
app = web.application(urls, globals())
app.run() # standalone run
def twms_main(req):
resp = ""
data = req
srs = data.get("srs", data.get("SRS", "EPSG:4326"))
content_type = "image/png"
#layer = data.get("layers",data.get("LAYERS", config.default_layers)).split(",")
width=0
height=0
req_bbox = ()
if data.get("bbox",data.get("BBOX",None)):
req_bbox = tuple(map(float,data.get("bbox",data.get("BBOX",req_bbox)).split(",")))
req_bbox = projections.to4326(req_bbox, srs)
req_bbox, flip_h = bbox.normalize(req_bbox)
box = req_bbox
height = int(data.get("height",data.get("HEIGHT",height)))
width = int(data.get("width",data.get("WIDTH",width)))
z = bbox.zoom_for_bbox (box, (height, width), {"proj":"EPSG:3857"}, min_zoom = 1, max_zoom = 25,max_size = (10000,10000))
res = RasterTile(width, height, z, db)
res.update_surface(box, z, style)
image_content = StringIO.StringIO()
res.surface.write_to_png(image_content)
resp = image_content.getvalue()
return (OK, content_type, resp)
| Python |
# -*- coding: utf-8 -*-
| Python |
# -*- coding: utf-8 -*-
from twms import projections
from libkomapnik import pixel_size_at_zoom
import json
import psycopg2
from mapcss import MapCSS
import cgi
import os
import sys
reload(sys)
sys.setdefaultencoding("utf-8") # a hack to support UTF-8
try:
import psyco
psyco.full()
except ImportError:
pass
#print >>sys.stderr, "Psyco import failed. Program may run slower. If you run it on i386 machine, please install Psyco to get best performance."
def get_vectors(bbox, zoom, style, vec = "polygon"):
bbox_p = projections.from4326(bbox, "EPSG:3857")
geomcolumn = "way"
database = "dbname=gis user=gis"
pxtolerance = 1.8
intscalefactor = 10000
ignore_columns = set(["way_area", "osm_id", geomcolumn, "tags", "z_order"])
table = {"polygon":"planet_osm_polygon", "line":"planet_osm_line","point":"planet_osm_point", "coastline": "coastlines"}
a = psycopg2.connect(database)
b = a.cursor()
if vec != "coastline":
b.execute("SELECT * FROM %s LIMIT 1;" % table[vec])
names = [q[0] for q in b.description]
for i in ignore_columns:
if i in names:
names.remove(i)
names = ",".join(['"'+i+'"' for i in names])
taghint = "*"
types = {"line":"line","polygon":"area", "point":"node"}
adp = ""
if "get_sql_hints" in dir(style):
sql_hint = style.get_sql_hints(types[vec], zoom)
adp = []
for tp in sql_hint:
add = []
for j in tp[0]:
if j not in names:
break
else:
add.append(tp[1])
if add:
add = " OR ".join(add)
add = "("+add+")"
adp.append(add)
adp = " OR ".join(adp)
if adp:
adp = adp.replace("<", "<")
adp = adp.replace(">", ">")
if vec == "polygon":
query = """select ST_AsGeoJSON(ST_TransScale(ST_ForceRHR(ST_Intersection(way,SetSRID('BOX3D(%s %s,%s %s)'::box3d,900913))),%s,%s,%s,%s),0) as %s,
ST_AsGeoJSON(ST_TransScale(ST_ForceRHR(ST_PointOnSurface(way)),%s,%s,%s,%s),0) as reprpoint, %s from
(select (ST_Dump(ST_Multi(ST_SimplifyPreserveTopology(ST_Buffer(way,-%s),%s)))).geom as %s, %s from
(select ST_Union(way) as %s, %s from
(select ST_Buffer(way, %s) as %s, %s from
%s
where (%s)
and way && SetSRID('BOX3D(%s %s,%s %s)'::box3d,900913)
and way_area > %s
) p
group by %s
) p
where ST_Area(way) > %s
order by ST_Area(way)
) p
"""%(bbox_p[0],bbox_p[1],bbox_p[2],bbox_p[3],
-bbox_p[0],-bbox_p[1],intscalefactor/(bbox_p[2]-bbox_p[0]),intscalefactor/(bbox_p[3]-bbox_p[1]),
geomcolumn,
-bbox_p[0],-bbox_p[1],intscalefactor/(bbox_p[2]-bbox_p[0]),intscalefactor/(bbox_p[3]-bbox_p[1]),
names,
pixel_size_at_zoom(zoom, pxtolerance),pixel_size_at_zoom(zoom, pxtolerance),
geomcolumn, names,
geomcolumn, names,
pixel_size_at_zoom(zoom, pxtolerance),
geomcolumn, names,
table[vec],
adp,
bbox_p[0],bbox_p[1],bbox_p[2],bbox_p[3],
(pixel_size_at_zoom(zoom, pxtolerance)**2)/pxtolerance,
names,
pixel_size_at_zoom(zoom, pxtolerance)**2
)
elif vec == "line":
query = """select ST_AsGeoJSON(ST_TransScale(ST_Intersection(way,SetSRID('BOX3D(%s %s,%s %s)'::box3d,900913)),%s,%s,%s,%s),0) as %s, %s from
(select (ST_Dump(ST_Multi(ST_SimplifyPreserveTopology(ST_LineMerge(way),%s)))).geom as %s, %s from
(select ST_Union(way) as %s, %s from
%s
where (%s)
and way && SetSRID('BOX3D(%s %s,%s %s)'::box3d,900913)
group by %s
) p
) p
"""%(bbox_p[0],bbox_p[1],bbox_p[2],bbox_p[3],
-bbox_p[0],-bbox_p[1],intscalefactor/(bbox_p[2]-bbox_p[0]),intscalefactor/(bbox_p[3]-bbox_p[1]),
geomcolumn, names,
pixel_size_at_zoom(zoom, pxtolerance),
geomcolumn, names,
geomcolumn, names,
table[vec],
adp,
bbox_p[0],bbox_p[1],bbox_p[2],bbox_p[3],
names,
)
elif vec == "point":
query = """select ST_AsGeoJSON(ST_TransScale(way,%s,%s,%s,%s),0) as %s, %s
from %s where
(%s)
and way && SetSRID('BOX3D(%s %s,%s %s)'::box3d,900913)
limit 10000
"""%(
-bbox_p[0],-bbox_p[1],intscalefactor/(bbox_p[2]-bbox_p[0]),intscalefactor/(bbox_p[3]-bbox_p[1]),
geomcolumn, names,
table[vec],
adp,
bbox_p[0],bbox_p[1],bbox_p[2],bbox_p[3],
)
elif vec == "coastline":
query = """select ST_AsGeoJSON(ST_TransScale(ST_ForceRHR(ST_Intersection(way,SetSRID('BOX3D(%s %s,%s %s)'::box3d,900913))),%s,%s,%s,%s),0) as %s, 'coastline' as "natural" from
(select (ST_Dump(ST_Multi(ST_SimplifyPreserveTopology(ST_Buffer(way,-%s),%s)))).geom as %s from
(select ST_Union(way) as %s from
(select ST_Buffer(SetSRID(the_geom,900913), %s) as %s from
%s
where
SetSRID(the_geom,900913) && SetSRID('BOX3D(%s %s,%s %s)'::box3d,900913)
) p
) p
where ST_Area(way) > %s
) p
"""%(bbox_p[0],bbox_p[1],bbox_p[2],bbox_p[3],
-bbox_p[0],-bbox_p[1],intscalefactor/(bbox_p[2]-bbox_p[0]),intscalefactor/(bbox_p[3]-bbox_p[1]),
geomcolumn,
pixel_size_at_zoom(zoom, pxtolerance),pixel_size_at_zoom(zoom, pxtolerance),
geomcolumn,
geomcolumn,
pixel_size_at_zoom(zoom, pxtolerance),
geomcolumn,
table[vec],
bbox_p[0],bbox_p[1],bbox_p[2],bbox_p[3],
pixel_size_at_zoom(zoom, pxtolerance)**2
)
#print query
a = psycopg2.connect(database)
b = a.cursor()
b.execute(query)
names = [q[0] for q in b.description]
ROWS_FETCHED = 0
polygons = []
for row in b.fetchall():
ROWS_FETCHED += 1
geom = dict(map(None,names,row))
for t in geom.keys():
if not geom[t]:
del geom[t]
geojson = json.loads(geom[geomcolumn])
del geom[geomcolumn]
if geojson["type"] == "GeometryCollection":
continue
if "reprpoint" in geom:
geojson["reprpoint"] = json.loads(geom["reprpoint"])["coordinates"]
del geom["reprpoint"]
prop = {}
for k,v in geom.iteritems():
prop[k] = v
try:
if int(v) == float(v):
prop[k] = int(v)
else:
prop[k] = float(v)
if str(prop[k]) != v: # leading zeros etc.. should be saved
prop[k] = v
except:
pass
geojson["properties"] = prop
polygons.append(geojson)
return {"bbox": bbox, "granularity":intscalefactor, "features":polygons}
print "Content-Type: text/html"
print
form = cgi.FieldStorage()
if "z" not in form:
print "need z"
exit()
if "x" not in form:
print "need x"
exit()
if "y" not in form:
print "need y"
exit()
z = int(form["z"].value)
x = int(form["x"].value)
y = int(form["y"].value)
if z>22:
exit()
callback = "onKothicDataResponse"
bbox = projections.bbox_by_tile(z+1,x,y,"EPSG:3857")
style = MapCSS(0,30)
style.parse(open("styles/osmosnimki-maps.mapcss","r").read())
zoom = z+2
aaaa = get_vectors(bbox,zoom,style,"coastline")
aaaa["features"].extend(get_vectors(bbox,zoom,style,"polygon")["features"])
aaaa["features"].extend(get_vectors(bbox,zoom,style,"line")["features"])
aaaa["features"].extend(get_vectors(bbox,zoom,style,"point")["features"])
aaaa = callback+"("+json.dumps(aaaa,True,False,separators=(',', ':'))+",%s,%s,%s);"%(z,x,y)
print aaaa
dir = "/var/www/vtile/%s/%s/"%(z,x)
file = "%s.js"%y
try:
if not os.path.exists(dir):
os.makedirs(dir)
except:
pass
file = open(dir+file,"w")
file.write(aaaa)
file.flush()
file.close()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
import pygtk
pygtk.require('2.0')
import gtk
import gobject
import cairo
import math
import string
import threading
import datetime
import time
import Queue
import os
from render import RasterTile
from debug import debug, Timer
import twms.bbox
from twms import projections
class KothicWidget(gtk.DrawingArea):
def __init__(self, data, style):
gtk.DrawingArea.__init__(self)
self.data_backend = data
self.style_backend = style
self.request_d = (0,0)
self.tiles = TileSource(data,style, callback=self.redraw)
self.dx = 0
self.dy = 0
self.drag_x = 0
self.drag_y = 0
self.drag = False
self.rastertile = None
self.f = True
self.width = 0
self.height = 0
self.max_zoom = 25
self.zoom = 0
self.center_coord = (0.0,0.0)
self.old_zoom = 1
self.old_center_coord = (0.0,0.1)
self.tilebox = [] # bbox of currently seen tiles
self.bbox = []
self.add_events(gtk.gdk.BUTTON1_MOTION_MASK)
self.add_events(gtk.gdk.POINTER_MOTION_MASK)
self.add_events(gtk.gdk.BUTTON_PRESS_MASK)
self.add_events(gtk.gdk.BUTTON_RELEASE_MASK)
self.add_events(gtk.gdk.SCROLL)
# self.window.add_events(gtk.gdk.BUTTON1_MOTION_MASK)
self.connect("expose_event",self.expose_ev)
self.connect("motion_notify_event",self.motion_ev)
self.connect("button_press_event",self.press_ev)
self.connect("button_release_event",self.release_ev)
self.connect("scroll_event",self.scroll_ev)
# self.surface = cairo.ImageSurfaceicreate(gtk.RGB24, self.width, self.height)
def set_zoom(self, zoom):
self.zoom = zoom
self.queue_draw()
def jump_to(self, lonlat):
self.center_coord = lonlat
self.queue_draw()
def zoom_to(self, bbox):
self.zoom = twms.bbox.zoom_for_bbox (bbox, (self.width,self.height), {"proj":"EPSG:3857","max_zoom":self.max_zoom})-1
print "Zoom:", self.zoom
self.center_coord = ((bbox[0]+bbox[2])/2,(bbox[1]+bbox[3])/2)
print self.center_coord
self.redraw()
def motion_ev(self, widget, event):
if self.drag:
self.dx = event.x - self.drag_x
self.dy = event.y - self.drag_y
#if((abs(self.dx) > 3 or abs(self.dy) > 3) and self.f):
if True:
# x = event.x
# y = event.y
# lo1, la1, lo2, la2 = self.tilebox
# self.center_coord = projections.coords_by_tile(self.zoom,1.*x/self.width*(lo2-lo1)+lo1, la1+(1.*y/(self.height)*(la2-la1)),"EPSG:3857")
widget.queue_draw()
def press_ev(self, widget, event):
if event.button == 1:
#debug("Start drag")
self.drag = True
self.drag_x = event.x
self.drag_y = event.y
self.timer = Timer("Drag")
#elif event.button == 2:
#debug("Button2")
#elif event.button == 3:
#debug("Button3")
def release_ev(self, widget, event):
if event.button == 1:
#debug("Stop drag")
self.drag = False
self.timer.stop()
#debug("dd: %s,%s "%(self.dx, self.dy))
x = event.x
y = event.y
lo1, la1, lo2, la2 = projections.from4326(self.bbox, "EPSG:3857")
print lo1, la1, lo2, la2
#self.center_coord = projections.to4326((0.5*(self.width+self.dx)/self.width*(lo1-lo2)+lo2, la1+(0.5*(self.height+self.dy)/self.height*(la2-la1))),"EPSG:3857")
self.center_coord = projections.to4326((0.5*(self.width+2*self.dx)/self.width*(lo1-lo2)+lo2, la1+(0.5*(self.height+2*self.dy)/self.height*(la2-la1))),"EPSG:3857")
#self.rastertile.screen2lonlat(self.rastertile.w/2 - self.dx, self.rastertile.h/2 - self.dy);
self.dx = 0
self.dy = 0
self.redraw()
def scroll_ev(self, widget, event):
if event.direction == gtk.gdk.SCROLL_UP:
if self.zoom+0.5 <= self.max_zoom:
self.zoom += 0.5
#debug("Zoom in")
elif event.direction == gtk.gdk.SCROLL_DOWN:
if self.zoom >= 0: ## negative zooms are nonsense
self.zoom -= 0.5
# debug("Zoom out")
#self.redraw()
debug("new zoom: %s"%(self.zoom))
widget.queue_draw()
def redraw(self):
"""
Force screen redraw.
"""
#res = RasterTile(3*self.width, 3*self.height, self.zoom, self.data_backend)
#res.update_surface_by_center(self.center_coord, self.zoom, self.style_backend)
#self.rastertile = res
self.queue_draw()
def expose_ev(self, widget, event):
if(widget.allocation.width != self.width or widget.allocation.height != self.height ):
#debug("Rrresize!")
self.width = widget.allocation.width
self.height = widget.allocation.height
cr = widget.window.cairo_create()
if self.old_center_coord != self.center_coord or self.old_zoom != self.zoom:
#print "Recentered!"
xy = projections.from4326(self.center_coord,"EPSG:3857")
xy1 = projections.to4326((xy[0]-40075016.*(0.5**(self.zoom))/self.tiles.tilewidth*self.width, xy[1]-40075016.*(0.5**(self.zoom))/self.tiles.tileheight*self.height), "EPSG:3857")
xy2 = projections.to4326((xy[0]+40075016.*(0.5**(self.zoom))/self.tiles.tilewidth*self.width, xy[1]+40075016.*(0.5**(self.zoom))/self.tiles.tileheight*self.height), "EPSG:3857")
self.bbox = (xy1[0],xy1[1],xy2[0],xy2[1])
self.tilebox = projections.tile_by_bbox(self.bbox, self.zoom, "EPSG:3857")
self.old_center_coord = self.center_coord
self.old_zoom = self.zoom
from_tile_x, from_tile_y, to_tile_x, to_tile_y = self.tilebox
dx = 1.*(from_tile_x - int(from_tile_x))*self.tiles.tilewidth
dy = 1.*(from_tile_y - int(from_tile_y))*self.tiles.tileheight
print dx,dy
#print self.dx, self.dy
onscreen_tiles = set()
for x in range (int(from_tile_x), int(to_tile_x)+1):
for y in range (int(to_tile_y), int(from_tile_y)+1):
onscreen_tiles.add((self.zoom,x,y))
self.tiles.onscreen = onscreen_tiles
for z,x,y in onscreen_tiles:
tile = self.tiles[(self.zoom,x,y)]
#print dx+(x-from_tile_x)*self.tiles.tilewidth-self.width
#print dy+(y-from_tile_y)*self.tiles.tileheight-self.height
#cr.set_source_surface(tile, int(self.dx-dx+(x-int(from_tile_x))*self.tiles.tilewidth-self.width), int(self.dy-dy-(int(from_tile_y)-y)*self.tiles.tileheight+self.height))
cr.set_source_surface(tile, int(self.dx-dx+(x-int(from_tile_x))*self.tiles.tilewidth), int(self.dy-dy-(int(from_tile_y)-y)*self.tiles.tileheight+self.height))
cr.paint()
#cr.set_source_surface(self.rastertile.surface, self.dx-self.width + self.rastertile.offset_x, self.dy - self.height + self.rastertile.offset_y)
#self.comm[3].release()
class TileSource:
def __init__(self,data,style, callback = lambda: None):
self.tiles = {}
self.tilewidth = 2048
self.tileheight = 2048
self.max_tiles = 32
self.data_backend = data
self.style_backend = style
self.callback = callback
self.onscreen = set()
self._singlethread = False
self._prerender = True
def __getitem__(self,(z,x,y),wait=False):
try:
#if "surface" in self.tiles[(z,x,y)] and not wait:
# self._callback((z,x,y), True)
print "Tiles count:", len(self.tiles)
return self.tiles[(z,x,y)]["surface"]
except:
self.tiles[(z,x,y)] = {"tile": RasterTile(self.tilewidth, self.tileheight, z, self.data_backend)}
self.tiles[(z,x,y)]["start_time"] = datetime.datetime.now()
if self._singlethread:
self.tiles[(z,x,y)]["surface"] = self.tiles[(z,x,y)]["tile"].surface
self.tiles[(z,x,y)]["tile"].update_surface(projections.bbox_by_tile(z,x,y,"EPSG:3857"), z, self.style_backend, lambda p=False: self._callback((z,x,y),p))
del self.tiles[(z,x,y)]["tile"]
else:
self.tiles[(z,x,y)]["surface"] = self.tiles[(z,x,y)]["tile"].surface.create_similar(cairo.CONTENT_COLOR_ALPHA, self.tilewidth, self.tileheight)
self.tiles[(z,x,y)]["thread"] = threading.Thread(None, self.tiles[(z,x,y)]["tile"].update_surface,None, (projections.bbox_by_tile(z,x,y,"EPSG:3857"), z, self.style_backend, lambda p=False: self._callback((z,x,y),p)))
self.tiles[(z,x,y)]["thread"].start()
if wait:
self.tiles[(z,x,y)]["thread"].join()
return self.tiles[(z,x,y)]["surface"]
def _callback (self, (z,x,y),last):
#if last:
# print last, "dddddddddddddddddd"
if not self._singlethread:
if ((z,x,y) in self.onscreen or last) and "tile" in self.tiles[(z,x,y)]:
cr = cairo.Context(self.tiles[(z,x,y)]["surface"])
cr.set_source_surface(self.tiles[(z,x,y)]["tile"].surface,0,0)
cr.paint()
if last:
try:
del self.tiles[(z,x,y)]["thread"]
del self.tiles[(z,x,y)]["tile"]
except KeyError:
pass
self.tiles[(z,x,y)]["finish_time"] = datetime.datetime.now() - self.tiles[(z,x,y)]["start_time"]
gobject.idle_add(self.callback)
self.collect_grabage()
if last and self._prerender:
if (z,x,y) in self.onscreen:
a = self.__getitem__((z-1,x/2,y/2),True)
if (z,x,y) in self.onscreen:
a = self.__getitem__((z+1,x*2,y*2),True)
if (z,x,y) in self.onscreen:
a = self.__getitem__((z+1,x*2+1,y*2),True)
if (z,x,y) in self.onscreen:
a = self.__getitem__((z+1,x*2,y*2+1),True)
if (z,x,y) in self.onscreen:
a = self.__getitem__((z+1,x*2+1,y*2+1),True)
if (z,x,y) in self.onscreen:
a = self.__getitem__((z,x+1,y),True)
if (z,x,y) in self.onscreen:
a = self.__getitem__((z,x,y+1),True)
if (z,x,y) in self.onscreen:
a = self.__getitem__((z,x-1,y),True)
if (z,x,y) in self.onscreen:
a = self.__getitem__((z,x,y-1),True)
def collect_grabage (self):
if len(self.tiles)> self.max_tiles:
# let's kick out the fastest rendered tiles - it's easy to rerender those
# don't touch onscreen tiles
cand = set(self.tiles.keys())
cand.difference_update(self.onscreen)
cand = [i for i in cand if "finish_time" in self.tiles[i]]
cand.sort(lambda i,j: self.tiles[i]["finish_time"]<self.tiles[i]["finish_time"])
while cand:
if (len(self.tiles)> self.max_tiles):
c = cand.pop()
try:
print "Killed tile ", c, " - finished in ",str(self.tiles[c]["finish_time"]), ", ago:", str(datetime.datetime.now()-self.tiles[c]["start_time"])
del self.tiles[c]
except KeyError:
pass
else:
break
if __name__ == "__main__":
gtk.gdk.threads_init()
kap = KothicApp()
kap.main()
| Python |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# This file is part of kothic, the realtime map renderer.
# kothic is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# kothic is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with kothic. If not, see <http://www.gnu.org/licenses/>.
### TODO: MapCSS loading and parsing
from debug import debug
from mapcss.webcolors.webcolors import whatever_to_cairo as colorparser
class Styling():
"""
Class used to choose the right way of rendering an object.
"""
def __init__(self, stylefile = None):
self.Selectors = {}
self.Selectors["way"] = []
self.Selectors["node"] = []
self.Selectors["relation"] = []
if not stylefile:
# self.Selectors["way"].append(StyleSelector( ( [ ( ("building",),(None) ) ] ),{"fill-color": "#00f"} ))
#if stylefile=="zzzz":
### using "builtin" styling
self.Selectors["way"].append(StyleSelector( ( [ ( ("area",),("yes") ) ] ),{"fill-color": "#ff0000"} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("highway",),(None) ) ] ),{"width":1,"color":"#ff0000","text": "name", "text-position":"line","text-halo-radius":2,} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("barrier",),(None) ) ] ),{"casing-width":1,} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("highway",),("residential", "tertiary", "living_street")) ] ),{"width": 3, "color":"#ffffff", "casing-width": 5, "z-index":10} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("highway",),("service", "unclassified")) ] ),{"width": 2.5, "color":"#ccc", "casing-width": 4, "z-index":9} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("highway",),("primary", "motorway", "trunk")) ] ),{"width": 4, "color":"#ff0", "casing-width": 6, "z-index":11} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("highway",),("primary_link", "motorway_link", "trunk_link")) ] ),{"width": 3.5, "color":"#ff0", "casing-width": 6, "z-index":11} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("highway",),("secondary", )) ] ),{"width": 4, "color":"orange", "casing-width": 6, "z-index":10} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("living_street",),("yes")) ] ),{"width": 2, "casing-width": 3, "z-index": 0} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("landuse","natural"),("forest", "wood") ) ] ),{"fill-color": "#020"} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("landuse",),("industrial",) ) ] ),{"fill-color": "#855"} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("landuse",),("military",) ) ] ),{"fill-color": "pink"} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("waterway","natural"),("riverbank", "water") ) ] ),{"fill-color": "#002"} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("waterway","natural"),("river", "stream") ) ] ),{"color": "#002"} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("landuse","natural"),("grass",) ) ] ),{"fill-color": "#050",} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("highway",),("footway","pedestrian","path" )) ] ),{"width":2.5, "color":"#655", "dashes": [3,1],"z-index":3} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("bridge",),("yes") ) ] ),{"casing-width":10} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("power",),("line",)) ] ),{"width": 1, "color":"#ccc",} ))
self.Selectors["way"].append(StyleSelector( ( [ ( ("building",),(None) ) ] ),{"fill-color": "#522","text": "addr:housenumber","text-halo-radius":2,"z-index":100} ))#"extrude":10,
self.stylefile = stylefile
self.useful_keys = set(["layer"])
for objtype in self.Selectors.values(): # getting useful keys
for selector in objtype:
#debug(selector)
for tag in selector.tags:
self.useful_keys.update(set(tag[0]))
if "text" in selector.style:
self.useful_keys.update(set((selector.style["text"],)))
debug(self.useful_keys)
def get_style(self, objtype, tags, nodata = False):
"""
objtype is "node", "way" or "relation"
tags - object tags
nodata - we won't render that now, don't need exact styling
"""
resp = {}
for selector in self.Selectors[objtype]:
resp.update(selector.get_style(tags))
if nodata:
if resp:
return True
if not nodata and resp:
#debug((tags, tags.get("layer",0)), )
try:
resp["layer"] = int(tags.get("layer",0))*100+resp.get("z-index",0)+1000
except ValueError:
resp["layer"] = 1000000
if "text" in resp: # unpacking text
if resp["text"] in tags:
resp["text"] = tags[resp["text"]]
#debug("text: %s"%resp["text"])
else:
del resp["text"]
return resp
def filter_tags(self, tags):
"""
Returns only tags that are useful for rendering
"""
#resp = {}
#for k,v in tags.iteritems():
# if k in self.useful_keys:
# resp[k] = v
return tags
class StyleSelector():
def __init__(self, tags, style):
"""
Selector that decides if that style is right for the object
tags - list of tags [(("key","key"..), ("value", "value"...)), (("key","key"..), ("value", "value"...))]
style - MapCSS rules to apply
"""
self.tags = tags
self.style = {}
for key in style:
keyz = key.lower()
if "color" in keyz:
self.style[keyz] = colorparser(style[key])
debug((colorparser(style[key]),style[key]))
else:
self.style[keyz] = style[key]
def get_style(self, tags):
"""
Get actual styling for object.
"""
styled = False
#debug(self.tags)
for k,v in self.tags:
for j in k:
if j in tags:
if v:
if tags[j] in v:
styled = True
else:
styled = True
if styled:
return self.style
return {}
if __name__ == "__main__":
c = Styling()
print c.get_style("way", {"building":"yes"})
print c.get_style("way", {"highway":"residential"})
print c.get_style("way", {"highway":"road"})
print c.get_style("way", {"highway":"residential", "building": "yes"})
print c.get_style("way", {"highwadfgaay":"resifdgsdential", "builafgding": "yedfgs"})
print c.get_style("way", {"highwadfgaay":"resifdgsdential", "builafgding": "yedfgs"}, True)
print c.get_style("way", {"highway":"residential", "building": "yes"}, True)
print c.filter_tags({"highwadfgaay":"resifdgsdential", "builafgding": "yedfgs", "building": "residential"}) | Python |
import wmi
TEMPATTR = 194 # the number of the temperature attribute
c = wmi.WMI(namespace='root/wmi')
for drive in c.MSStorageDriver_ATAPISmartData():
# strip out parts of the name to make it more readable.
driveName = drive.InstanceName.split('_')[1]
# The first byte of the array is the number of 12-byte 'blocks' it contains.
blockNum = drive.VendorSpecific[0]
# Split the rest of the array into blocks.
vs = drive.VendorSpecific[1:]
blocks = [vs[i*12:i*12+12] for i in xrange(blockNum)]
# Find the temperature block for each drive and print the value.
print driveName + ':',
for block in blocks:
if block[1] == TEMPATTR:
print str(block[6]) + 'C'
break
for cpu in c.MSAcpi_ThermalZoneTemperature():
print "cputemper :"+str((cpu.CurrentTemperature-2732)/10.0)
| Python |
# -*- coding: utf-8 -*-
# Windows WMI SQL (WQL)
# Network information
#
# http://msdn.microsoft.com/en-us/library/aa394084(VS.85).aspx
# http://python.net/crew/mhammond/win32/
#
# raspi 2008
import sys
try:
import win32com.client
except ImportError:
sys.exit("you can has epic fail")
wmi = win32com.client.GetObject("winmgmts:")
def getNetworkAdapters():
adapters = []
try:
# http://msdn.microsoft.com/en-us/library/aa394216(VS.85).aspx
wql = wmi.ExecQuery("SELECT * FROM Win32_NetworkAdapter WHERE (AdapterTypeID=0 OR AdapterTypeID=9) AND NetConnectionID IS NOT NULL AND Manufacturer <> 'Microsoft'")
for i in wql:
id = int(i.Properties_["Index"].Value)
# Get more adapter information
# http://msdn.microsoft.com/en-us/library/aa394217(VS.85).aspx
wql2 = wmi.ExecQuery("SELECT * FROM Win32_NetworkAdapterConfiguration WHERE Index='%d'" % id)
ip = []
dns = []
for j in wql2:
# Adapter uses DHCP?
dhcp = j.Properties_["DHCPEnabled"].Value
# Get DNS servers
if j.Properties_["DNSServerSearchOrder"].Value != None:
for q in j.Properties_["DNSServerSearchOrder"].Value:
dns.append(q.encode(sys.getfilesystemencoding()))
# Get IP + Netmask + Gateway
if (j.Properties_["IPAddress"].Value != None) and (j.Properties_["IPSubnet"].Value != None):
ipcount = len(j.Properties_["IPAddress"].Value)
for idx in range(ipcount):
try:
gw = j.Properties_["DefaultIPGateway"].Value[idx].encode(sys.getfilesystemencoding())
except:
gw = "-"
ipaddr = j.Properties_["IPAddress"].Value[idx].encode(sys.getfilesystemencoding())
mask = j.Properties_["IPSubnet"].Value[idx].encode(sys.getfilesystemencoding())
ip.append({"IP": ipaddr, "Mask": mask, "Gateway": gw})
# Get rest
name = i.Properties_["Name"].Value.encode(sys.getfilesystemencoding())
try:
connid = i.Properties_["NetConnectionID"].Value.encode(sys.getfilesystemencoding())
except:
connid = "?"
try:
cstatus = int(i.Properties_["NetConnectionStatus"].Value)
avail = int(i.Properties_["Availability"].Value)
except:
cstatus = 0
avail = 0
try:
mac = i.Properties_["MACAddress"].Value.encode(sys.getfilesystemencoding())
except:
mac = ""
status = [cstatus, avail]
adapters.append({"IFName": connid, "Name": name, "Status": status, "IP": ip, "DNS": dns, "MAC": mac, "DHCP": dhcp})
except:
print "Unexpected error:", sys.exc_info()[0]
return
return adapters
availability = ['-', 'Other', 'Unknown', 'Running or Full Power', 'Warning', 'In Test', 'Not Applicable', 'Power Off', 'Off Line', 'Off Duty', 'Degraded', 'Not Installed', 'Install Error', 'Power Save - Unknown', 'Power Save - Low Power Mode', 'Power Save - Standby', 'Power Cycle', 'Power Save - Warning']
connstatus = ['Disconnected', 'Connecting', 'Connected', 'Disconnecting', 'Hardware not present', 'Hardware disabled', 'Hardware malfunction', 'Media disconnected', 'Authenticating', 'Authentication succeeded', 'Authentication failed', 'Invalid address', 'Credentials required']
adapters = getNetworkAdapters()
#print adapters
print "Network adapters:"
for i in adapters:
if i['IFName'] != "?":
print "%s (%s):" % (i['IFName'], i['Name'])
print " Status: %s (%s)" % (connstatus[i['Status'][0]], availability[i['Status'][1]])
ipstr = ""
for ip in i['IP']:
ipstr = "%s/%s (GW: %s) " % (ip['IP'], ip['Mask'], ip['Gateway'])
print " IP(s) with mask: " + ipstr
print " DNS: " + ", ".join(i['DNS'])
print " MAC: " + i['MAC']
print " Use DHCP: " + (i['DHCP'] and "Yes" or "No")
print "" | Python |
from subprocess import *
p = Popen(["netstat", "-an"], bufsize=1024,stdin=PIPE, stdout=PIPE, close_fds=True)
(fin, fout) = (p.stdin, p.stdout)
for i in range(10):
fin.write("line" + str(i))
fin.write('\n')
fin.flush()
print fout.readline(), | Python |
#====================================================================
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
# ====================================================================
#
# This software consists of voluntary contributions made by many
# individuals on behalf of the Apache Software Foundation. For more
# information on the Apache Software Foundation, please see
# <http://www.apache.org/>.
#
import os
import re
import tempfile
import shutil
ignore_pattern = re.compile('^(.svn|target|bin|classes)')
java_pattern = re.compile('^.*\.java')
annot_pattern = re.compile('import org\.apache\.http\.annotation\.')
def process_dir(dir):
files = os.listdir(dir)
for file in files:
f = os.path.join(dir, file)
if os.path.isdir(f):
if not ignore_pattern.match(file):
process_dir(f)
else:
if java_pattern.match(file):
process_source(f)
def process_source(filename):
tmp = tempfile.mkstemp()
tmpfd = tmp[0]
tmpfile = tmp[1]
try:
changed = False
dst = os.fdopen(tmpfd, 'w')
try:
src = open(filename)
try:
for line in src:
if annot_pattern.match(line):
changed = True
line = line.replace('import org.apache.http.annotation.', 'import net.jcip.annotations.')
dst.write(line)
finally:
src.close()
finally:
dst.close();
if changed:
shutil.move(tmpfile, filename)
else:
os.remove(tmpfile)
except:
os.remove(tmpfile)
process_dir('.')
| Python |
from Vector import *
from Ray import *
from math import sqrt
class Sphere:
c = Vector()
r = 1
hit = []
def __init__(self, _c=[0,0,0], r=1):
self.c = Vector(_c)
self.r = r
hit = []
def __str__(self): return "%s, r=%f" % (str(self.c), self.r)
def Intersect(self, r):
o_minus_c = r.o - self.c; # (o - c)
a = r.d.ScalarProduct(r.d) # a = d^2
p = r.d.ScalarProduct(o_minus_c) * 2 # p = 2d(o - c)
r_q = self.r ** 2 # r^2
q = o_minus_c.ScalarProduct(o_minus_c) - r_q # q = (o - c)^2 - r^2
discriminant = p*p - 4*a*q # p^2 - 4aq
if (discriminant < 0): return False
# ... and save the actual hit distances
discriminant = sqrt(discriminant) # sqrt()
self.hit.append((-p - discriminant) / 2*a) # enter/graze distance
if (discriminant != 0):
self.hit.append((-p + discriminant) / 2*a) # exit distance
self.hit.sort() # sort hit-listing
else:
self.hit.append(0) # sphere grazed, no exit distance
return True
| Python |
from math import sqrt
class Vector:
# x,y,z
c = []
# ----------------------------------------------------------------------------
def __init__(self, _c=[0,0,0]): self.c = _c
def __str__(self): return str(self.c)
def Clear(self): self.__init__()
# ----------------------------------------------------------------------------
# ==, !=
def __eq__(self, v): return self.c == v.c
def __ne__(self, v): return self.c != v.c
# ----------------------------------------------------------------------------
# -(v)
def __neg__(self): return Vector([-(a) for a in self.c])
# ----------------------------------------------------------------------------
# +,+=
def __add__(self, v): return Vector([a+b for a,b in zip(self.c, v.c)])
def __iadd__(self, v):
self.c = [a+b for a,b in zip(self.c, v.c)]
return self
# ----------------------------------------------------------------------------
# -,-=
def __sub__(self, v): return Vector([a-b for a,b in zip(self.c, v.c)])
def __isub__(self, v):
self.c = [a-b for a,b in zip(self.c, v.c)]
return self
# ----------------------------------------------------------------------------
# *,*=
def __mul__(self, k): return Vector([a*k for a in self.c])
def __rmul__(self, k): return (self * k)
def __imul__(self, k):
self.c = [a*k for a in self.c]
return self
# ----------------------------------------------------------------------------
# /,/=
def __div__(self, k):
try:
return Vector([a/k for a in self.c])
except ZeroDivisionError:
return Vector()
def __rdiv__(self, k): return (self / k)
def __idiv__(self, k):
try:
self.c = [a/k for a in self.c]
except ZeroDivisionError: pass
return self
# ----------------------------------------------------------------------------
def Magnitude(self): return sqrt(sum([a ** 2 for a in self.c]))
def Unitize(self):
magnitude = self.Magnitude()
try:
self.c = [a/magnitude for a in self.c]
except ZeroDivisionError: pass
def ScalarProduct(self, v):
x,y,z = self.c
vx,vy,vz = v.c
return (x*vx + y*vy + z*vz)
def VectorProduct(self, v):
x,y,z = self.c
vx,vy,vz = v.c
return Vector([ y * vz - z * vy,
-(x * vz - z * vx),
x * vy - y * vx ])
| Python |
from array import array
from random import *
class Framebuffer:
stack = []
def __init__(self):
stack = []
def Create(self, w = 800, h = 600):
try:
buffer = array('B')
for i in range(0, w*h*3): buffer.append(0)
self.stack.append([buffer,w,h,None])
return len(self.stack) - 1
except MemoryError:
print "ERROR: Create() failed!"
return -1
def Register(self, slot, cb):
if (callable(cb)): self.stack[slot][3] = cb
def Process(self, slot):
#try:
buffer,w,h,cb = self.stack[slot]
if (cb == None): return
for line in range(0, h):
for pixel in range(0, w):
p = line*w*3 + pixel*3
ret = cb(pixel,line,p,p+1,p+2,buffer)
#except:
# print "ERROR: Slot was not Process()ed."
def Write(self, slot, filename = "undefined"):
try:
buffer,w,h,cb = self.stack[slot]
f = open("%s.ppm" % filename, "wb")
f.write("P6\n# this is a comment.\n%i %i\n255\n" % (w,h))
buffer.tofile(f)
f.close()
except:
print "ERROR: WritePPM() failed!"
def Get(self, slot):
try: return tuple(stack[slot][1], stack[slot][2]) # (w,h)
except: return None
# ------------------------------------------------------------------------------
if __name__ == "__main__":
lastY=-1
_c=None
colors = []
def CreateSomeRandomColors():
for i in range(0, 21):
colors[len(colors):] = [(randint(0,255),randint(0,255),randint(0,255))]
def Lines(x,y,r,g,b,pixel):
global lastY,_c
if (lastY != y):
_c = colors[randint(0,20)]
lastY = y
pixel[r],pixel[g],pixel[b] = _c[0],_c[1],_c[2]
def Noise(x,y,r,g,b,pixel):
global _c
_c = colors[randint(0,20)]
pixel[r],pixel[g],pixel[b] = _c[0],_c[1],_c[2]
CreateSomeRandomColors()
f = Framebuffer()
slot1 = f.Create(1280,720)
f.Register(slot1, Lines)
f.Process(slot1)
slot2 = f.Create()
f.Register(slot2, Noise)
f.Process(slot2)
f.Write(slot1, "lines")
f.Write(slot2, "noise")
| Python |
from Framebuffer import *
from Ray import *
from Sphere import *
s = Sphere([400,300,-10], 100)
ray = Ray()
def trace(x,y,r,g,b,px):
global s
ray.o.c[0] = x
ray.o.c[1] = y
if (s.Intersect(ray) == True):
px[r],px[g],px[b] = [255,255,255]
f = Framebuffer()
slot = f.Create(1280, 720)
f.Register(slot, trace)
f.Process(slot)
f.Write(slot, "sphere")
| Python |
from Vector import *
class Ray:
o,d = Vector(),Vector()
def __init__(self, _o=[0,0,0], _d=[0,0,-1]):
self.o = Vector(_o)
self.d = Vector(_d)
def __str__(self): return "Ray %s %s" % (str(self.o), str(self.d))
def P(self,t): return self.o + t * self.d
| Python |
from Vector import *
from Ray import *
class Triangle:
A,B,C = Vector(),Vector(),Vector()
hit = [0]
def __init__(self, _A=[0,0,0], _B=[1,0,0], _C=[0,1,0]):
self.A = Vector(_A)
self.B = Vector(_B)
self.C = Vector(_C)
hit = [0]
def Intersect(self, r):
return False
| Python |
#!/usr/bin/python
# Copyright 2011 Google, Inc. All Rights Reserved.
# simple script to walk source tree looking for third-party licenses
# dumps resulting html page to stdout
import os, re, mimetypes, sys
# read source directories to scan from command line
SOURCE = sys.argv[1:]
# regex to find /* */ style comment blocks
COMMENT_BLOCK = re.compile(r"(/\*.+?\*/)", re.MULTILINE | re.DOTALL)
# regex used to detect if comment block is a license
COMMENT_LICENSE = re.compile(r"(license)", re.IGNORECASE)
COMMENT_COPYRIGHT = re.compile(r"(copyright)", re.IGNORECASE)
EXCLUDE_TYPES = [
"application/xml",
"image/png",
]
# list of known licenses; keys are derived by stripping all whitespace and
# forcing to lowercase to help combine multiple files that have same license.
KNOWN_LICENSES = {}
class License:
def __init__(self, license_text):
self.license_text = license_text
self.filenames = []
# add filename to the list of files that have the same license text
def add_file(self, filename):
if filename not in self.filenames:
self.filenames.append(filename)
LICENSE_KEY = re.compile(r"[^\w]")
def find_license(license_text):
# TODO(alice): a lot these licenses are almost identical Apache licenses.
# Most of them differ in origin/modifications. Consider combining similar
# licenses.
license_key = LICENSE_KEY.sub("", license_text).lower()
if license_key not in KNOWN_LICENSES:
KNOWN_LICENSES[license_key] = License(license_text)
return KNOWN_LICENSES[license_key]
def discover_license(exact_path, filename):
# when filename ends with LICENSE, assume applies to filename prefixed
if filename.endswith("LICENSE"):
with open(exact_path) as file:
license_text = file.read()
target_filename = filename[:-len("LICENSE")]
if target_filename.endswith("."): target_filename = target_filename[:-1]
find_license(license_text).add_file(target_filename)
return None
# try searching for license blocks in raw file
mimetype = mimetypes.guess_type(filename)
if mimetype in EXCLUDE_TYPES: return None
with open(exact_path) as file:
raw_file = file.read()
# include comments that have both "license" and "copyright" in the text
for comment in COMMENT_BLOCK.finditer(raw_file):
comment = comment.group(1)
if COMMENT_LICENSE.search(comment) is None: continue
if COMMENT_COPYRIGHT.search(comment) is None: continue
find_license(comment).add_file(filename)
for source in SOURCE:
for root, dirs, files in os.walk(source):
for name in files:
discover_license(os.path.join(root, name), name)
print "<html><head><style> body { font-family: sans-serif; } pre { background-color: #eeeeee; padding: 1em; white-space: pre-wrap; } </style></head><body>"
for license in KNOWN_LICENSES.values():
print "<h3>Notices for files:</h3><ul>"
filenames = license.filenames
filenames.sort()
for filename in filenames:
print "<li>%s</li>" % (filename)
print "</ul>"
print "<pre>%s</pre>" % license.license_text
print "</body></html>"
| Python |
# Helper to set a breakpoint in App Engine. Requires Python >= 2.5.
import os
import pdb
import sys
class MyPdb(pdb.Pdb):
def default(self, line):
# Save/set + restore stdin/stdout around self.default() call.
# (This is only needed for Python 2.5.)
save_stdout = sys.stdout
save_stdin = sys.stdin
try:
sys.stdin = self.stdin
sys.stdout = self.stdout
return pdb.Pdb.default(self, line)
finally:
sys.stdout = save_stdout
sys.stdin = save_stdin
def do_vars(self, arg):
for name, value in sorted(self.curframe.f_locals.iteritems()):
print >>self.stdout, name, '=', repr(value)
do_v = do_vars
def BREAKPOINT():
os_mod = os.open.func_globals['os']
os_open = os_mod.open
os_fdopen = os_mod.fdopen
tty = '/dev/tty'
stdin_fd = os_open(tty, 0)
stdout_fd = os_open(tty, 1)
stdin = os_fdopen(stdin_fd, 'r')
stdout = os_fdopen(stdout_fd, 'w')
p = MyPdb(None, stdin, stdout)
p.set_trace(sys._getframe(1))
| Python |
def webapp_add_wsgi_middleware(app):
try:
from google.appengine.ext.appstats import recording
except ImportError, err:
logging.info('Failed to import recording: %s', err)
else:
app = recording.appstats_wsgi_middleware(app)
return app
appstats_KEY_DISTANCE = 10
appstats_MAX_REPR = 1000
appstats_MAX_STACK = 20
appstats_FILTER_LIST = [
{'PATH_INFO': '!^/favicon\.ico$'},
]
| Python |
# Startup file for interactive prompt, used by "make python".
from ndb import utils
utils.tweak_logging()
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_file_stub
from google.appengine.api import memcache
from google.appengine.api.memcache import memcache_stub
from google.appengine.api import taskqueue
from google.appengine.api.taskqueue import taskqueue_stub
from ndb.model import *
from ndb.query import *
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
ds_stub = datastore_file_stub.DatastoreFileStub('_', None)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', ds_stub)
mc_stub = memcache_stub.MemcacheServiceStub()
apiproxy_stub_map.apiproxy.RegisterStub('memcache', mc_stub)
tq_stub = taskqueue_stub.TaskQueueServiceStub()
apiproxy_stub_map.apiproxy.RegisterStub('taskqueue', tq_stub)
os.environ['APPLICATION_ID'] = '_'
class Employee(Model):
name = StringProperty()
age = IntegerProperty()
rank = IntegerProperty()
@classmethod
def demographic(cls, min_age, max_age):
return cls.query().filter(AND(cls.age >= min_age, cls.age <= max_age))
@classmethod
def ranked(cls, rank):
return cls.query(cls.rank == rank).order(cls.age)
class Manager(Employee):
report = StructuredProperty(Employee, repeated=True)
reports = []
for (name, age, rank) in [('Joe', 21, 1), ('Jim', 30, 2), ('Jane', 23, 1)]:
emp = Employee(name=name, age=age, rank=rank)
reports.append(emp)
f1 = put_multi_async(reports)
boss = Manager(name='Fred', age=42, rank=4, report=reports)
f2 = boss.put_async()
f2.get_result()
for f in f1:
f.get_result()
class BlobTest(Model):
data = BlobProperty(indexed=True)
b1 = BlobTest(data='a')
b1.put()
b2 = BlobTest(data='\xff\x00')
b2.put()
from ndb import tasklets
ctx = tasklets.get_context()
conn = ctx._conn
E = Employee
M = Manager
B = BlobTest
| Python |
"""Tests for eventloop.py."""
import os
import time
import unittest
from google.appengine.datastore import datastore_rpc
from ndb import eventloop, test_utils
class EventLoopTests(test_utils.DatastoreTest):
def setUp(self):
super(EventLoopTests, self).setUp()
if eventloop._EVENT_LOOP_KEY in os.environ:
del os.environ[eventloop._EVENT_LOOP_KEY]
self.ev = eventloop.get_event_loop()
def testQueueTasklet(self):
def f(): return 1
def g(): return 2
def h(): return 3
t_before = time.time()
eventloop.queue_call(1, f, 42, 'hello', a=1, b=2)
eventloop.queue_call(3, h, c=3, d=4)
eventloop.queue_call(2, g, 100, 'abc')
t_after = time.time()
self.assertEqual(len(self.ev.queue), 3)
[(t1, f1, a1, k1), (t2, f2, a2, k2), (t3, f3, a3, k3)] = self.ev.queue
self.assertTrue(t1 < t2)
self.assertTrue(t2 < t3)
self.assertTrue(abs(t1 - (t_before + 1)) < t_after - t_before)
self.assertTrue(abs(t2 - (t_before + 2)) < t_after - t_before)
self.assertTrue(abs(t3 - (t_before + 3)) < t_after - t_before)
self.assertEqual(f1, f)
self.assertEqual(f2, g)
self.assertEqual(f3, h)
self.assertEqual(a1, (42, 'hello'))
self.assertEqual(a2, (100, 'abc'))
self.assertEqual(a3, ())
self.assertEqual(k1, {'a': 1, 'b': 2})
self.assertEqual(k2, {})
self.assertEqual(k3, {'c': 3, 'd': 4})
# Delete queued events (they would fail or take a long time).
ev = eventloop.get_event_loop()
ev.queue = []
ev.rpcs = {}
def testRun(self):
record = []
def foo(arg):
record.append(arg)
eventloop.queue_call(0.2, foo, 42)
eventloop.queue_call(0.1, foo, arg='hello')
eventloop.run()
self.assertEqual(record, ['hello', 42])
def testRunWithRpcs(self):
record = []
def foo(arg):
record.append(arg)
eventloop.queue_call(0.1, foo, 42)
config = datastore_rpc.Configuration(on_completion=foo)
rpc = self.conn.async_get(config, [])
self.assertEqual(len(rpc.rpcs), 1)
eventloop.queue_rpc(rpc)
eventloop.run()
self.assertEqual(record, [rpc.rpcs[0], 42])
self.assertEqual(rpc.state, 2) # TODO: Use apiproxy_rpc.RPC.FINISHING.
def main():
unittest.main()
if __name__ == '__main__':
main()
| Python |
"""Higher-level Query wrapper.
There are perhaps too many query APIs in the world.
The fundamental API here overloads the 6 comparisons operators to
represent filters on property values, and supports AND and OR
operations (implemented as functions -- Python's 'and' and 'or'
operators cannot be overloaded, and the '&' and '|' operators have a
priority that conflicts with the priority of comparison operators).
For example:
class Employee(Model):
name = StringProperty()
age = IntegerProperty()
rank = IntegerProperty()
@classmethod
def demographic(cls, min_age, max_age):
return cls.query().filter(AND(cls.age >= min_age, cls.age <= max_age))
@classmethod
def ranked(cls, rank):
return cls.query(cls.rank == rank).order(cls.age)
for emp in Employee.seniors(42, 5):
print emp.name, emp.age, emp.rank
The 'in' operator cannot be overloaded, but is supported through the
IN() method. For example:
Employee.query().filter(Employee.rank.IN([4, 5, 6]))
Sort orders are supported through the order() method; unary minus is
overloaded on the Property class to represent a descending order:
Employee.query().order(Employee.name, -Employee.age)
Besides using AND() and OR(), filters can also be combined by
repeatedly calling .filter():
q1 = Employee.query() # A query that returns all employees
q2 = q1.filter(Employee.age >= 30) # Only those over 30
q3 = q2.filter(Employee.age < 40) # Only those in their 30s
A further shortcut is calling .filter() with multiple arguments; this
implies AND():
q1 = Employee.query() # A query that returns all employees
q3 = q1.filter(Employee.age >= 30,
Employee.age < 40) # Only those in their 30s
And finally you can also pass one or more filter expressions directly
to the .query() method:
q3 = Employee.query(Employee.age >= 30,
Employee.age < 40) # Only those in their 30s
Query objects are immutable, so these methods always return a new
Query object; the above calls to filter() do not affect q1. (On the
other hand, operations that are effectively no-ops may return the
original Query object.)
Sort orders can also be combined this way, and .filter() and .order()
calls may be intermixed:
q4 = q3.order(-Employee.age)
q5 = q4.order(Employee.name)
q6 = q5.filter(Employee.rank == 5)
Again, multiple .order() calls can be combined:
q5 = q3.order(-Employee.age, Employee.name)
The simplest way to retrieve Query results is a for-loop:
for emp in q3:
print emp.name, emp.age
Some other methods to run a query and access its results:
q.iter() # Return an iterator; same as iter(q) but more flexible
q.map(callback) # Call the callback function for each query result
q.fetch(N) # Return a list of the first N results
q.get() # Return the first result
q.count(N) # Return the number of results, with a maximum of N
q.fetch_page(N, start_cursor=cursor) # Return (results, cursor, has_more)
All of the above methods take a standard set of additional query
options, either in the form of keyword arguments such as
keys_only=True, or as QueryOptions object passed with
options=QueryOptions(...). The most important query options are:
keys_only: bool, if set the results are keys instead of entities
limit: int, limits the number of results returned
offset: int, skips this many results first
start_cursor: Cursor, start returning results after this position
end_cursor: Cursor, stop returning results after this position
batch_size: int, hint for the number of results returned per RPC
prefetch_size: int, hint for the number of results in the first RPC
produce_cursors: bool, return Cursor objects with the results
For additional (obscure) query options and more details on them,
including an explanation of Cursors, see datastore_query.py.
All of the above methods except for iter() have asynchronous variants
as well, which return a Future; to get the operation's ultimate
result, yield the Future (when inside a tasklet) or call the Future's
get_result() method (outside a tasklet):
q.map_async(callback) # Callback may be a task or a plain function
q.fetch_async(N)
q.get_async()
q.count_async(N)
q.fetch_page_async(N, start_cursor=cursor)
Finally, there's an idiom to efficiently loop over the Query results
in a tasklet, properly yielding when appropriate:
it = q.iter()
while (yield it.has_next_async()):
emp = it.next()
print emp.name, emp.age
"""
from __future__ import with_statement
__author__ = 'guido@google.com (Guido van Rossum)'
import heapq
import itertools
import sys
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.datastore import datastore_pb
from google.appengine.datastore import datastore_query
from google.appengine.datastore import datastore_rpc
from google.appengine.ext import gql
from ndb import context
from ndb import model
from ndb import tasklets
__all__ = ['Binding', 'AND', 'OR', 'parse_gql', 'Query',
'QueryOptions', 'Cursor']
# Re-export some useful classes from the lower-level module.
QueryOptions = datastore_query.QueryOptions
Cursor = datastore_query.Cursor
# Some local renamings.
_ASC = datastore_query.PropertyOrder.ASCENDING
_DESC = datastore_query.PropertyOrder.DESCENDING
_AND = datastore_query.CompositeFilter.AND
_KEY = datastore_types._KEY_SPECIAL_PROPERTY
# Table of supported comparison operators.
_OPS = frozenset(['=', '!=', '<', '<=', '>', '>=', 'in'])
# Default limit value. (Yes, the datastore uses int32!)
_MAX_LIMIT = 2**31 - 1
# TODO: Once CL/21689469 is submitted, get rid of this and its callers.
def _make_unsorted_key_value_map(pb, property_names):
"""Like _make_key_value_map() but doesn't sort the values."""
value_map = dict((name, []) for name in property_names)
# Building comparable values from pb properties.
# NOTE: Unindexed properties are skipped.
for prop in pb.property_list():
prop_name = prop.name()
if prop_name in value_map:
value_map[prop_name].append(
datastore_types.PropertyValueToKeyValue(prop.value()))
# Adding special key property (if requested).
if _KEY in value_map:
value_map[_KEY] = [datastore_types.ReferenceToKeyValue(pb.key())]
return value_map
class RepeatedStructuredPropertyPredicate(datastore_query.FilterPredicate):
def __init__(self, match_keys, pb, key_prefix):
super(RepeatedStructuredPropertyPredicate, self).__init__()
self.match_keys = match_keys
stripped_keys = []
for key in match_keys:
assert key.startswith(key_prefix), key
stripped_keys.append(key[len(key_prefix):])
value_map = _make_unsorted_key_value_map(pb, stripped_keys)
self.match_values = tuple(value_map[key][0] for key in stripped_keys)
def _get_prop_names(self):
return frozenset(self.match_keys)
def __call__(self, pb):
return self._apply(_make_unsorted_key_value_map(pb, self.match_keys))
def _apply(self, key_value_map):
"""Apply the filter to values extracted from an entity.
Think of self.match_keys and self.match_values as representing a
table with one row. For example:
match_keys = ('name', 'age', 'rank')
match_values = ('Joe', 24, 5)
(Except that in reality, the values are represented by tuples
produced by datastore_types.PropertyValueToKeyValue().)
represents this table:
| name | age | rank |
+---------+-------+--------+
| 'Joe' | 24 | 5 |
Think of key_value_map as a table with the same structure but
(potentially) many rows. This represents a repeated structured
property of a single entity. For example:
{'name': ['Joe', 'Jane', 'Dick'],
'age': [24, 21, 23],
'rank': [5, 1, 2]}
represents this table:
| name | age | rank |
+---------+-------+--------+
| 'Joe' | 24 | 5 |
| 'Jane' | 21 | 1 |
| 'Dick' | 23 | 2 |
We must determine wheter at least one row of the second table
exactly matches the first table. We need this class because the
datastore, when asked to find an entity with name 'Joe', age 24
and rank 5, will include entities that have 'Joe' somewhere in the
name column, 24 somewhere in the age column, and 5 somewhere in
the rank column, but not all aligned on a single row. Such an
entity should not be considered a match.
"""
columns = []
for key in self.match_keys:
column = key_value_map.get(key)
if not column: # None, or an empty list.
return False # If any column is empty there can be no match.
columns.append(column)
# Use izip to transpose the columns into rows.
return self.match_values in itertools.izip(*columns)
# Don't implement _prune()! It would mess up the row correspondence
# within columns.
class CompositePostFilter(datastore_query.CompositeFilter):
def __call__(self, pb):
key_value_map = _make_unsorted_key_value_map(pb, self._get_prop_names())
return self._apply(key_value_map)
class Binding(object):
"""Used with GQL; for now unsupported."""
def __init__(self, value=None, key=None):
"""Constructor. The value may be changed later."""
self.value = value
self.key = key
def __repr__(self):
return '%s(%r, %r)' % (self.__class__.__name__, self.value, self.key)
def __eq__(self, other):
# TODO: When comparing tree nodes containing Bindings, Bindings
# should be compared by object identity?
if not isinstance(other, Binding):
return NotImplemented
return self.value == other.value and self.key == other.key
def resolve(self):
"""Return the value currently associated with this Binding."""
value = self.value
assert not isinstance(value, Binding), 'Recursive Binding'
return value
class Node(object):
"""Base class for filter expression tree nodes.
Tree nodes are considered immutable, even though they can contain
Binding instances, which are not. In particular, two identical
trees may be represented by the same Node object in different
contexts.
"""
def __new__(cls):
assert cls is not Node, 'Cannot instantiate Node, only a subclass'
return super(Node, cls).__new__(cls)
def __eq__(self, other):
raise NotImplementedError
def __ne__(self, other):
eq = self.__eq__(other)
if eq is not NotImplemented:
eq = not eq
return eq
def __unordered(self, other):
raise TypeError('Nodes cannot be ordered')
__le__ = __lt__ = __ge__ = __gt__ = __unordered
def _to_filter(self, bindings, post=False):
"""Helper to convert to datastore_query.Filter, or None."""
raise NotImplementedError
def _post_filters(self):
"""Helper to extract post-filter Nodes, if any."""
return None
def resolve(self):
"""Extract the Binding's value if necessary."""
raise NotImplementedError
class FalseNode(Node):
"""Tree node for an always-failing filter."""
def __new__(cls):
return super(Node, cls).__new__(cls)
def __eq__(self, other):
if not isinstance(other, FalseNode):
return NotImplemented
return True
def _to_filter(self, bindings, post=False):
if post:
return None
# Because there's no point submitting a query that will never
# return anything.
raise datastore_errors.BadQueryError(
'Cannot convert FalseNode to predicate')
def resolve(self):
return self
class FilterNode(Node):
"""Tree node for a single filter expression."""
def __new__(cls, name, opsymbol, value):
if opsymbol == '!=':
n1 = FilterNode(name, '<', value)
n2 = FilterNode(name, '>', value)
return DisjunctionNode(n1, n2)
if opsymbol == 'in' and not isinstance(value, Binding):
assert isinstance(value, (list, tuple, set, frozenset)), repr(value)
nodes = [FilterNode(name, '=', v) for v in value]
if not nodes:
return FalseNode()
if len(nodes) == 1:
return nodes[0]
return DisjunctionNode(*nodes)
self = super(FilterNode, cls).__new__(cls)
self.__name = name
self.__opsymbol = opsymbol
self.__value = value
return self
def _sort_key(self):
return (self.__name, self.__opsymbol, self.__value)
def __repr__(self):
return '%s(%r, %r, %r)' % (self.__class__.__name__,
self.__name, self.__opsymbol, self.__value)
def __eq__(self, other):
if not isinstance(other, FilterNode):
return NotImplemented
# TODO: Should nodes with values that compare equal but have
# different types really be considered equal? IIUC the datastore
# doesn't consider 1 equal to 1.0 when it compares property values.
return (self.__name == other.__name and
self.__opsymbol == other.__opsymbol and
self.__value == other.__value)
def _to_filter(self, bindings, post=False):
if post:
return None
assert self.__opsymbol not in ('!=', 'in'), repr(self.__opsymbol)
value = self.__value
if isinstance(value, Binding):
bindings[value.key] = value
value = value.resolve()
# TODO: validate the resolved value.
return datastore_query.make_filter(self.__name.decode('utf-8'),
self.__opsymbol, value)
def resolve(self):
if self.__opsymbol == 'in':
assert isinstance(self.__value, Binding), 'Unexpanded non-Binding IN'
return FilterNode(self.__name, self.__opsymbol, self.__value.resolve())
else:
return self
class PostFilterNode(Node):
"""Tree node representing an in-memory filtering operation.
This is used to represent filters that cannot be executed by the
datastore, for example a query for a structured value.
"""
def __new__(cls, predicate):
self = super(PostFilterNode, cls).__new__(cls)
self.predicate = predicate
return self
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self.predicate)
def __eq__(self, other):
if not isinstance(other, PostFilterNode):
return NotImplemented
return self is other
def _to_filter(self, bindings, post=False):
if post:
return self.predicate
else:
return None
def resolve(self):
return self
class ConjunctionNode(Node):
"""Tree node representing a Boolean AND operator on two or more nodes."""
def __new__(cls, *nodes):
assert nodes, 'ConjunctionNode requires at least one node'
if len(nodes) == 1:
return nodes[0]
clauses = [[]] # Outer: Disjunction; inner: Conjunction.
# TODO: Remove duplicates?
for node in nodes:
assert isinstance(node, Node), repr(node)
if isinstance(node, DisjunctionNode):
# Apply the distributive law: (X or Y) and (A or B) becomes
# (X and A) or (X and B) or (Y and A) or (Y and B).
new_clauses = []
for clause in clauses:
for subnode in node:
new_clause = clause + [subnode]
new_clauses.append(new_clause)
clauses = new_clauses
elif isinstance(node, ConjunctionNode):
# Apply half of the distributive law: (X or Y) and A becomes
# (X and A) or (Y and A).
for clause in clauses:
clause.extend(node.__nodes)
else:
# Ditto.
for clause in clauses:
clause.append(node)
if not clauses:
return FalseNode()
if len(clauses) > 1:
return DisjunctionNode(*[ConjunctionNode(*clause) for clause in clauses])
self = super(ConjunctionNode, cls).__new__(cls)
self.__nodes = clauses[0]
return self
def __iter__(self):
return iter(self.__nodes)
def __repr__(self):
return 'OR(%s)' % (', '.join(map(str, self.__nodes)))
def __eq__(self, other):
if not isinstance(other, ConjunctionNode):
return NotImplemented
return self.__nodes == other.__nodes
def _to_filter(self, bindings, post=False):
filters = filter(None,
(node._to_filter(bindings, post=post)
for node in self.__nodes
if isinstance(node, PostFilterNode) == post))
if not filters:
return None
if len(filters) == 1:
return filters[0]
if post:
return CompositePostFilter(_AND, filters)
return datastore_query.CompositeFilter(_AND, filters)
def _post_filters(self):
post_filters = [node for node in self.__nodes
if isinstance(node, PostFilterNode)]
if not post_filters:
return None
if len(post_filters) == 1:
return post_filters[0]
if post_filters == self.__nodes:
return self
return ConjunctionNode(*post_filters)
def resolve(self):
nodes = [node.resolve() for node in self.__nodes]
if nodes == self.__nodes:
return self
return ConjunctionNode(*nodes)
class DisjunctionNode(Node):
"""Tree node representing a Boolean OR operator on two or more nodes."""
def __new__(cls, *nodes):
assert nodes, 'DisjunctionNode requires at least one node'
if len(nodes) == 1:
return nodes[0]
self = super(DisjunctionNode, cls).__new__(cls)
self.__nodes = []
# TODO: Remove duplicates?
for node in nodes:
assert isinstance(node, Node), repr(node)
if isinstance(node, DisjunctionNode):
self.__nodes.extend(node.__nodes)
else:
self.__nodes.append(node)
return self
def __iter__(self):
return iter(self.__nodes)
def __repr__(self):
return 'OR(%s)' % (', '.join(map(str, self.__nodes)))
def __eq__(self, other):
if not isinstance(other, DisjunctionNode):
return NotImplemented
return self.__nodes == other.__nodes
def resolve(self):
nodes = [node.resolve() for node in self.__nodes]
if nodes == self.__nodes:
return self
return DisjunctionNode(*nodes)
# AND and OR are preferred aliases for these.
AND = ConjunctionNode
OR = DisjunctionNode
def _args_to_val(func, args, bindings):
"""Helper for GQL parsing."""
vals = []
for arg in args:
if isinstance(arg, (int, long, basestring)):
if arg in bindings:
val = bindings[arg]
else:
val = Binding(None, arg)
bindings[arg] = val
elif isinstance(arg, gql.Literal):
val = arg.Get()
else:
assert False, 'Unexpected arg (%r)' % arg
vals.append(val)
if func == 'nop':
assert len(vals) == 1, '"nop" requires exactly one value'
return vals[0]
if func == 'list':
return vals
if func == 'key':
if len(vals) == 1 and isinstance(vals[0], basestring):
return model.Key(urlsafe=vals[0])
assert False, 'Unexpected key args (%r)' % (vals,)
assert False, 'Unexpected func (%r)' % func
# TODO: Not everybody likes GQL.
# TODO: GQL doesn't support querying for structured property values.
def parse_gql(query_string):
"""Parse a GQL query string.
Args:
query_string: Full GQL query, e.g. 'SELECT * FROM Kind WHERE prop = 1'.
Returns:
A tuple (query, options, bindings) where query is a Query instance,
options a datastore_query.QueryOptions instance, and bindings a dict
mapping integers and strings to Binding instances.
"""
gql_qry = gql.GQL(query_string)
ancestor = None
flt = gql_qry.filters()
bindings = {}
filters = []
for ((name, op), values) in flt.iteritems():
op = op.lower()
if op == 'is' and name == gql.GQL._GQL__ANCESTOR:
assert len(values) == 1, '"is" requires exactly one value'
[(func, args)] = values
ancestor = _args_to_val(func, args, bindings)
continue
assert op in _OPS, repr(op)
for (func, args) in values:
val = _args_to_val(func, args, bindings)
filters.append(FilterNode(name, op, val))
if filters:
filters.sort(key=lambda x: x._sort_key()) # For predictable tests.
filters = ConjunctionNode(*filters)
else:
filters = None
orders = _orderings_to_orders(gql_qry.orderings())
qry = Query(kind=gql_qry._entity,
ancestor=ancestor,
filters=filters,
orders=orders)
offset = gql_qry.offset()
if offset < 0:
offset = None
limit = gql_qry.limit()
if limit < 0:
limit = None
options = QueryOptions(offset=offset, limit=limit)
return qry, options, bindings
class Query(object):
"""Query object.
Usually constructed by calling Model.query().
See module docstring for examples.
Note that not all operations on Queries are supported by _MultiQuery
instances; the latter are generated as necessary when any of the
operators !=, IN or OR is used.
"""
@datastore_rpc._positional(1)
def __init__(self, kind=None, ancestor=None, filters=None, orders=None):
"""Constructor.
Args:
kind: Optional kind string.
ancestor: Optional ancestor Key.
filters: Optional Node representing a filter expression tree.
orders: Optional datastore_query.Order object.
"""
if ancestor is not None and not isinstance(ancestor, Binding):
lastid = ancestor.pairs()[-1][1]
assert lastid, 'ancestor cannot be an incomplete key'
if filters is not None:
assert isinstance(filters, Node), repr(filters)
if orders is not None:
assert isinstance(orders, datastore_query.Order), repr(orders)
self.__kind = kind # String
self.__ancestor = ancestor # Key
self.__filters = filters # None or Node subclass
self.__orders = orders # None or datastore_query.Order instance
def __repr__(self):
args = []
if self.__kind is not None:
args.append('kind=%r' % self.__kind)
if self.__ancestor is not None:
args.append('ancestor=%r' % self.__ancestor)
if self.__filters is not None:
args.append('filters=%r' % self.__filters)
if self.__orders is not None:
args.append('orders=...') # PropertyOrder doesn't have a good repr().
return '%s(%s)' % (self.__class__.__name__, ', '.join(args))
def _get_query(self, connection):
kind = self.__kind
ancestor = self.__ancestor
bindings = {}
if isinstance(ancestor, Binding):
bindings[ancestor.key] = ancestor
ancestor = ancestor.resolve()
if ancestor is not None:
ancestor = connection.adapter.key_to_pb(ancestor)
filters = self.__filters
post_filters = None
if filters is not None:
post_filters = filters._post_filters()
filters = filters._to_filter(bindings)
dsquery = datastore_query.Query(kind=kind.decode('utf-8'),
ancestor=ancestor,
filter_predicate=filters,
order=self.__orders)
if post_filters is not None:
dsquery = datastore_query._AugmentedQuery(
dsquery,
in_memory_filter=post_filters._to_filter(bindings, post=True))
return dsquery
@tasklets.tasklet
def run_to_queue(self, queue, conn, options=None, dsquery=None):
"""Run this query, putting entities into the given queue."""
try:
multiquery = self._maybe_multi_query()
if multiquery is not None:
yield multiquery.run_to_queue(queue, conn, options=options)
return
if dsquery is None:
dsquery = self._get_query(conn)
orig_options = options
rpc = dsquery.run_async(conn, options)
skipped = 0
count = 0
while rpc is not None:
batch = yield rpc
rpc = batch.next_batch_async(options)
for i, result in enumerate(batch.results):
queue.putq((batch, i, result))
queue.complete()
except Exception:
if not queue.done():
t, e, tb = sys.exc_info()
queue.set_exception(e, tb)
raise
def _maybe_multi_query(self):
filters = self.__filters
if filters is not None:
filters = filters.resolve()
if isinstance(filters, DisjunctionNode):
# Switch to a _MultiQuery.
subqueries = []
for subfilter in filters:
subquery = Query(kind=self.__kind, ancestor=self.__ancestor,
filters=subfilter, orders=self.__orders)
subqueries.append(subquery)
return _MultiQuery(subqueries)
return None
@property
def kind(self):
"""Accessor for the kind (a string or None)."""
return self.__kind
@property
def ancestor(self):
"""Accessor for the ancestor (a Key or None)."""
return self.__ancestor
@property
def filters(self):
"""Accessor for the filters (a Node or None)."""
return self.__filters
@property
def orders(self):
"""Accessor for the filters (a datastore_query.Order or None)."""
return self.__orders
def filter(self, *args):
"""Return a new Query with additional filter(s) applied."""
if not args:
return self
preds = []
f = self.filters
if f:
preds.append(f)
for arg in args:
assert isinstance(arg, Node), repr(arg)
preds.append(arg)
if not preds:
pred = None
elif len(preds) == 1:
pred = preds[0]
else:
pred = ConjunctionNode(*preds)
return self.__class__(kind=self.kind, ancestor=self.ancestor,
orders=self.orders, filters=pred)
def order(self, *args):
"""Return a new Query with additional sort order(s) applied."""
# q.order(Employee.name, -Employee.age)
if not args:
return self
orders = []
o = self.__orders
if o:
orders.append(o)
for arg in args:
if isinstance(arg, model.Property):
orders.append(datastore_query.PropertyOrder(arg._name, _ASC))
elif isinstance(arg, datastore_query.Order):
orders.append(arg)
else:
assert False, arg
if not orders:
orders = None
elif len(orders) == 1:
orders = orders[0]
else:
orders = datastore_query.CompositeOrder(orders)
return self.__class__(kind=self.kind, ancestor=self.ancestor,
filters=self.filters, orders=orders)
# Datastore API using the default context.
def iter(self, **q_options):
"""Construct an iterator over the query.
Args:
**q_options: All query options keyword arguments are supported.
Returns:
A QueryIterator object.
"""
return QueryIterator(self, **q_options)
__iter__ = iter
@datastore_rpc._positional(2)
def map(self, callback, merge_future=None, **q_options):
"""Map a callback function or tasklet over the query results.
Args:
callback: A function or tasklet to be applied to each result; see below.
merge_future: Optional Future subclass; see below.
**q_options: All query options keyword arguments are supported.
Callback signature: The callback is normally called with an entity
as argument. However if keys_only=True is given, it is called
with a Key. Also, when produce_cursors=True is given, it is
called with three arguments: the current batch, the index within
the batch, and the entity or Key at that index. The callback can
return whatever it wants.
Optional merge future: The merge_future is an advanced argument
that can be used to override how the callback results are combined
into the overall map() return value. By default a list of
callback return values is produced. By substituting one of a
small number of specialized alternatives you can arrange
otherwise. See tasklets.MultiFuture for the default
implementation and a description of the protocol the merge_future
object must implement the default. Alternatives from the same
module include QueueFuture, SerialQueueFuture and ReducingFuture.
Returns:
When the query has run to completion and all callbacks have
returned, map() returns a list of the results of all callbacks.
(But see 'optional merge future' above.)
"""
return self.map_async(callback, merge_future=merge_future,
**q_options).get_result()
@datastore_rpc._positional(2)
def map_async(self, callback, merge_future=None, **q_options):
"""Map a callback function or tasklet over the query results.
This is the asynchronous version of Query.map().
"""
return tasklets.get_context().map_query(self, callback,
options=_make_options(q_options),
merge_future=merge_future)
@datastore_rpc._positional(2)
def fetch(self, limit=None, **q_options):
"""Fetch a list of query results, up to a limit.
Args:
limit: How many results to retrieve at most.
**q_options: All query options keyword arguments are supported.
Returns:
A list of results.
"""
return self.fetch_async(limit, **q_options).get_result()
@tasklets.tasklet
@datastore_rpc._positional(2)
def fetch_async(self, limit=None, **q_options):
"""Fetch a list of query results, up to a limit.
This is the asynchronous version of Query.fetch().
"""
assert 'limit' not in q_options, q_options
if limit is None:
limit = _MAX_LIMIT
q_options['limit'] = limit
q_options.setdefault('prefetch_size', limit)
q_options.setdefault('batch_size', limit)
res = []
it = self.iter(**q_options)
while (yield it.has_next_async()):
res.append(it.next())
if len(res) >= limit:
break
raise tasklets.Return(res)
def get(self, **q_options):
"""Get the first query result, if any.
This is similar to calling q.fetch(1) and returning the first item
of the list of results, if any, otherwise None.
Args:
**q_options: All query options keyword arguments are supported.
Returns:
A single result, or None if there are no results.
"""
return self.get_async(**q_options).get_result()
@tasklets.tasklet
def get_async(self, **q_options):
"""Get the first query result, if any.
This is the asynchronous version of Query.get().
"""
res = yield self.fetch_async(1, **q_options)
if not res:
raise tasklets.Return(None)
raise tasklets.Return(res[0])
@datastore_rpc._positional(2)
def count(self, limit=None, **q_options):
"""Count the number of query results, up to a limit.
This returns the same result as len(q.fetch(limit)) but more
efficiently.
Note that you must pass a maximum value to limit the amount of
work done by the query.
Args:
limit: How many results to count at most.
**q_options: All query options keyword arguments are supported.
Returns:
"""
return self.count_async(limit, **q_options).get_result()
@tasklets.tasklet
@datastore_rpc._positional(2)
def count_async(self, limit=None, **q_options):
"""Count the number of query results, up to a limit.
This is the asynchronous version of Query.count().
"""
# TODO: Support offset by incorporating it to the limit.
assert 'offset' not in q_options, q_options
assert 'limit' not in q_options, q_options
if limit is None:
limit = _MAX_LIMIT
if (self.__filters is not None and
isinstance(self.__filters, DisjunctionNode)):
# _MultiQuery does not support iterating over result batches,
# so just fetch results and count them.
# TODO: Use QueryIterator to avoid materializing the results list.
q_options.setdefault('prefetch_size', limit)
q_options.setdefault('batch_size', limit)
q_options.setdefault('keys_only', True)
results = yield self.fetch_async(limit, **q_options)
raise tasklets.Return(len(results))
# Issue a special query requesting 0 results at a given offset.
# The skipped_results count will tell us how many hits there were
# before that offset without fetching the items.
q_options['offset'] = limit
q_options['limit'] = 0
options = _make_options(q_options)
conn = tasklets.get_context()._conn
dsquery = self._get_query(conn)
rpc = dsquery.run_async(conn, options)
total = 0
while rpc is not None:
batch = yield rpc
rpc = batch.next_batch_async(options)
total += batch.skipped_results
raise tasklets.Return(total)
@datastore_rpc._positional(2)
def fetch_page(self, page_size, **q_options):
"""Fetch a page of results.
This is a specialized method for use by paging user interfaces.
Args:
page_size: The requested page size. At most this many results
will be returned.
In addition, any keyword argument supported by the QueryOptions
class is supported. In particular, to fetch the next page, you
pass the cursor returned by one call to the next call using
start_cursor=<cursor>. A common idiom is to pass the cursor to
the client using <cursor>.to_websafe_string() and to reconstruct
that cursor on a subsequent request using
Cursor.from_websafe_string(<string>).
Returns:
A tuple (results, cursor, more) where results is a list of query
results, cursor is a cursor pointing just after the last result
returned, and more is a bool indicating whether there are
(likely) more results after that.
"""
# NOTE: page_size can't be passed as a keyword.
return self.fetch_page_async(page_size, **q_options).get_result()
@tasklets.tasklet
@datastore_rpc._positional(2)
def fetch_page_async(self, page_size, **q_options):
"""Fetch a page of results.
This is the asynchronous version of Query.fetch_page().
"""
q_options.setdefault('batch_size', page_size)
q_options.setdefault('produce_cursors', True)
it = self.iter(limit=page_size+1, **q_options)
results = []
while (yield it.has_next_async()):
results.append(it.next())
if len(results) >= page_size:
break
try:
cursor = it.cursor_after()
except datastore_errors.BadArgumentError:
cursor = None
raise tasklets.Return(results, cursor, it.probably_has_next())
def _make_options(q_options):
"""Helper to construct a QueryOptions object from keyword arguents.
Args:
q_options: a dict of keyword arguments.
Note that either 'options' or 'config' can be used to pass another
QueryOptions object, but not both. If another QueryOptions object is
given it provides default values.
Returns:
A QueryOptions object, or None if q_options is empty.
"""
if not q_options:
return None
if 'options' in q_options:
# Move 'options' to 'config' since that is what QueryOptions() uses.
assert 'config' not in q_options, q_options
q_options['config'] = q_options.pop('options')
return QueryOptions(**q_options)
class QueryIterator(object):
"""This iterator works both for synchronous and async callers!
For synchronous callers, just use:
for entity in Account.query():
<use entity>
Async callers use this idiom:
it = iter(Account.query())
while (yield it.has_next_async()):
entity = it.next()
<use entity>
You can also use q.iter([options]) instead of iter(q); this allows
passing query options such as keys_only or produce_cursors.
When keys_only is set, it.next() returns a key instead of an entity.
When produce_cursors is set, the methods it.cursor_before() and
it.cursor_after() return Cursor objects corresponding to the query
position just before and after the item returned by it.next().
Before it.next() is called for the first time, both raise an
exception. Once the loop is exhausted, both return the cursor after
the last item returned. Calling it.has_next() does not affect the
cursors; you must call it.next() before the cursors move. Note that
sometimes requesting a cursor requires a datastore roundtrip (but
not if you happen to request a cursor corresponding to a batch
boundary). If produce_cursors is not set, both methods always raise
an exception.
Note that queries requiring in-memory merging of multiple queries
(i.e. queries using the IN, != or OR operators) do not support query
options.
"""
# When produce_cursors is set, _lookahead collects (batch, index)
# pairs passed to _extended_callback(), and (_batch, _index)
# contain the info pertaining to the current item.
_lookahead = None
_batch = None
_index = None
# Indicate the loop is exhausted.
_exhausted = False
@datastore_rpc._positional(2)
def __init__(self, query, **q_options):
"""Constructor. Takes a Query and query options.
This is normally called by Query.iter() or Query.__iter__().
"""
ctx = tasklets.get_context()
callback = None
options = _make_options(q_options)
if options is not None and options.produce_cursors:
callback = self._extended_callback
self._iter = ctx.iter_query(query, callback=callback, options=options)
self._fut = None
def _extended_callback(self, batch, index, ent):
assert not self._exhausted, 'QueryIterator is already exhausted'
# TODO: Make _lookup a deque.
if self._lookahead is None:
self._lookahead = []
self._lookahead.append((batch, index))
return ent
def _consume_item(self):
if self._lookahead:
self._batch, self._index = self._lookahead.pop(0)
else:
self._batch = self._index = None
def cursor_before(self):
"""Return the cursor before the current item.
You must pass a QueryOptions object with produce_cursors=True
for this to work.
If there is no cursor or no current item, raise BadArgumentError.
Before next() has returned there is no cursor. Once the loop is
exhausted, this returns the cursor after the last item.
"""
if self._batch is None:
raise datastore_errors.BadArgumentError('There is no cursor currently')
# TODO: if cursor_after() was called for the previous item
# reuse that result instead of computing it from scratch.
# (Some cursor() calls make a datastore roundtrip.)
return self._batch.cursor(self._index + self._exhausted)
def cursor_after(self):
"""Return the cursor after the current item.
You must pass a QueryOptions object with produce_cursors=True
for this to work.
If there is no cursor or no current item, raise BadArgumentError.
Before next() has returned there is no cursor. Once the loop is
exhausted, this returns the cursor after the last item.
"""
if self._batch is None:
raise datastore_errors.BadArgumentError('There is no cursor currently')
return self._batch.cursor(self._index + 1)
def __iter__(self):
"""Iterator protocol: get the iterator for this iterator, i.e. self."""
return self
def probably_has_next(self):
"""Return whether a next item is (probably) available.
This is not quite the same as has_next(), because when
produce_cursors is set, some shortcuts are possible. However, in
some cases (e.g. when the query has a post_filter) we can get a
false positive (returns True but next() will raise StopIteration).
There are no false negatives, if Batch.more_results doesn't lie.
"""
if self._lookahead:
return True
if self._batch is not None:
return self._batch.more_results
return self.has_next()
def has_next(self):
"""Return whether a next item is available.
See the module docstring for the usage pattern.
"""
return self.has_next_async().get_result()
@tasklets.tasklet
def has_next_async(self):
"""Return a Future whose result will say whether a next item is available.
See the module docstring for the usage pattern.
"""
if self._fut is None:
self._fut = self._iter.getq()
flag = True
try:
yield self._fut
except EOFError:
flag = False
raise tasklets.Return(flag)
def next(self):
"""Iterator protocol: get next item or raise StopIteration."""
if self._fut is None:
self._fut = self._iter.getq()
try:
try:
ent = self._fut.get_result()
self._consume_item()
return ent
except EOFError:
self._exhausted = True
raise StopIteration
finally:
self._fut = None
class _SubQueryIteratorState(object):
"""Helper class for _MultiQuery."""
def __init__(self, batch_i_entity, iterator, dsquery, orders):
batch, index, entity = batch_i_entity
self.batch = batch
self.index = index
self.entity = entity
self.iterator = iterator
self.dsquery = dsquery
self.orders = orders
def __cmp__(self, other):
assert isinstance(other, _SubQueryIteratorState), repr(other)
assert self.orders == other.orders, (self.orders, other.orders)
lhs = self.entity._orig_pb
rhs = other.entity._orig_pb
lhs_filter = self.dsquery._filter_predicate
rhs_filter = other.dsquery._filter_predicate
names = self.orders._get_prop_names()
# TODO: In some future version, there won't be a need to add the
# filters' names.
if lhs_filter is not None:
names |= lhs_filter._get_prop_names()
if rhs_filter is not None:
names |= rhs_filter._get_prop_names()
lhs_value_map = datastore_query._make_key_value_map(lhs, names)
rhs_value_map = datastore_query._make_key_value_map(rhs, names)
if lhs_filter is not None:
lhs_filter._prune(lhs_value_map)
if rhs_filter is not None:
rhs_filter._prune(rhs_value_map)
return self.orders._cmp(lhs_value_map, rhs_value_map)
class _MultiQuery(object):
"""Helper class to run queries involving !=, IN or OR operators."""
# This is not instantiated by the user directly, but implicitly when
# iterating over a query with at least one filter using an IN, OR or
# != operator. Note that some options must be interpreted by
# _MultiQuery instead of passed to the underlying Queries' methods,
# e.g. offset (though not necessarily limit, and I'm not sure about
# cursors).
# TODO: Need a way to specify the unification of two queries that
# are identical except one has an ancestor and the other doesn't.
# The HR datastore makes that a useful special case.
def __init__(self, subqueries):
assert isinstance(subqueries, list), subqueries
assert all(isinstance(subq, Query) for subq in subqueries), subqueries
kind = subqueries[0].kind
assert kind, 'Subquery kind cannot be missing'
assert all(subq.kind == kind for subq in subqueries), subqueries
# TODO: Assert app and namespace match, when we support them.
orders = subqueries[0].orders
assert all(subq.orders == orders for subq in subqueries), subqueries
self.__subqueries = subqueries
self.__orders = orders
self.ancestor = None # Hack for map_query().
@property
def orders(self):
return self.__orders
@tasklets.tasklet
def run_to_queue(self, queue, conn, options=None):
"""Run this query, putting entities into the given queue."""
if options is None:
# Default options.
offset = None
limit = None
keys_only = None
else:
# Capture options we need to simulate.
offset = options.offset
limit = options.limit
keys_only = options.keys_only
# Cursors are supported for certain orders only.
if (options.start_cursor or options.end_cursor or
options.produce_cursors):
names = set()
if self.__orders is not None:
names = self.__orders._get_prop_names()
if '__key__' not in names:
raise datastore_errors.BadArgumentError(
'_MultiQuery with cursors requires __key__ order')
# Decide if we need to modify the options passed to subqueries.
# NOTE: It would seem we can sometimes let the datastore handle
# the offset natively, but this would thwart the duplicate key
# detection, so we always have to emulate the offset here.
# We can set the limit we pass along to offset + limit though,
# since that is the maximum number of results from a single
# subquery we will ever have to consider.
modifiers = {}
if offset:
modifiers['offset'] = None
if limit is not None:
modifiers['limit'] = min(_MAX_LIMIT, offset + limit)
if keys_only and self.__orders is not None:
modifiers['keys_only'] = None
if modifiers:
options = QueryOptions(config=options, **modifiers)
if offset is None:
offset = 0
if limit is None:
limit = _MAX_LIMIT
if self.__orders is None:
# Run the subqueries sequentially; there is no order to keep.
keys_seen = set()
for subq in self.__subqueries:
if limit <= 0:
break
subit = tasklets.SerialQueueFuture('_MultiQuery.run_to_queue[ser]')
subq.run_to_queue(subit, conn, options=options)
while limit > 0:
try:
batch, index, result = yield subit.getq()
except EOFError:
break
if keys_only:
key = result
else:
key = result._key
if key not in keys_seen:
keys_seen.add(key)
if offset > 0:
offset -= 1
else:
limit -= 1
queue.putq((None, None, result))
queue.complete()
return
# This with-statement causes the adapter to set _orig_pb on all
# entities it converts from protobuf.
# TODO: Does this interact properly with the cache?
with conn.adapter:
# Create a list of (first-entity, subquery-iterator) tuples.
state = []
for subq in self.__subqueries:
dsquery = subq._get_query(conn)
subit = tasklets.SerialQueueFuture('_MultiQuery.run_to_queue[par]')
subq.run_to_queue(subit, conn, options=options, dsquery=dsquery)
try:
thing = yield subit.getq()
except EOFError:
continue
else:
state.append(_SubQueryIteratorState(thing, subit, dsquery,
self.__orders))
# Now turn it into a sorted heap. The heapq module claims that
# calling heapify() is more efficient than calling heappush() for
# each item.
heapq.heapify(state)
# Repeatedly yield the lowest entity from the state vector,
# filtering duplicates. This is essentially a multi-way merge
# sort. One would think it should be possible to filter
# duplicates simply by dropping other entities already in the
# state vector that are equal to the lowest entity, but because of
# the weird sorting of repeated properties, we have to explicitly
# keep a set of all keys, so we can remove later occurrences.
# Note that entities will still be sorted correctly, within the
# constraints given by the sort order.
keys_seen = set()
while state and limit > 0:
item = heapq.heappop(state)
batch = item.batch
index = item.index
entity = item.entity
key = entity._key
if key not in keys_seen:
keys_seen.add(key)
if offset > 0:
offset -= 1
else:
limit -= 1
if keys_only:
queue.putq((batch, index, key))
else:
queue.putq((batch, index, entity))
subit = item.iterator
try:
batch, index, entity = yield subit.getq()
except EOFError:
pass
else:
item.batch = batch
item.index = index
item.entity = entity
heapq.heappush(state, item)
queue.complete()
# Datastore API using the default context.
def iter(self, **q_options):
return QueryIterator(self, **q_options)
__iter__ = iter
# TODO: Add fetch() etc.?
# Helper functions to convert between orders and orderings. An order
# is a datastore_query.Order instance. An ordering is a
# (property_name, direction) tuple.
def _order_to_ordering(order):
pb = order._to_pb()
return (pb.property(), pb.direction()) # TODO: What about UTF-8?
def _orders_to_orderings(orders):
if orders is None:
return []
if isinstance(orders, datastore_query.PropertyOrder):
return [_order_to_ordering(orders)]
if isinstance(orders, datastore_query.CompositeOrder):
# TODO: What about UTF-8?
return [(pb.property(), pb.direction()) for pb in orders._to_pbs()]
assert False, 'Bad order: %r' % (orders,)
def _ordering_to_order(ordering):
name, direction = ordering
return datastore_query.PropertyOrder(name, direction)
def _orderings_to_orders(orderings):
orders = [_ordering_to_order(o) for o in orderings]
if not orders:
return None
if len(orders) == 1:
return orders[0]
return datastore_query.CompositeOrder(orders)
| Python |
"""An event loop.
This event loop should handle both asynchronous App Engine RPC objects
(specifically urlfetch and datastore RPC objects) and arbitrary
callback functions with an optional time delay.
Normally, event loops are singleton objects, though there is no
enforcement of this requirement.
The API here is inspired by Monocle.
"""
import bisect
import logging
import os
import time
from google.appengine.api.apiproxy_rpc import RPC
from google.appengine.datastore import datastore_rpc
import utils
logging_debug = utils.logging_debug
IDLE = RPC.IDLE
RUNNING = RPC.RUNNING
FINISHING = RPC.FINISHING
class EventLoop(object):
"""An event loop."""
# TODO: Use a separate queue for tasklets with delay=None.
def __init__(self):
"""Constructor."""
self.queue = []
self.rpcs = {}
# TODO: Rename to queue_callback?
def queue_call(self, delay, callable, *args, **kwds):
"""Schedule a function call at a specific time in the future."""
if delay is None:
when = 0
elif delay < 1e9:
when = delay + time.time()
else:
# Times over a billion seconds are assumed to be absolute.
when = delay
bisect.insort(self.queue, (when, callable, args, kwds))
def queue_rpc(self, rpc, callable=None, *args, **kwds):
"""Schedule an RPC with an optional callback.
The caller must have previously sent the call to the service.
The optional callback is called with the remaining arguments.
NOTE: If the rpc is a MultiRpc, the callback will be called once
for each sub-RPC. TODO: Is this a good idea?
"""
if rpc is None:
return
assert rpc.state in (RUNNING, FINISHING), rpc.state
if isinstance(rpc, datastore_rpc.MultiRpc):
rpcs = rpc.rpcs
if len(rpcs) > 1:
# Don't call the callback until all sub-rpcs have completed.
def help_multi_rpc_along(r=rpc, c=callable, a=args, k=kwds):
if r.state == FINISHING:
c(*a, **k)
# TODO: And again, what about exceptions?
callable = help_multi_rpc_along
args = ()
kwds = {}
else:
rpcs = [rpc]
for rpc in rpcs:
self.rpcs[rpc] = (callable, args, kwds)
# TODO: A way to add a datastore Connection
def run0(self):
"""Run one item (a callback or an RPC wait_any).
Returns:
A time to sleep if something happened (may be 0);
None if all queues are empty.
"""
delay = None
if self.queue:
delay = self.queue[0][0] - time.time()
if delay is None or delay <= 0:
when, callable, args, kwds = self.queue.pop(0)
logging_debug('event: %s', callable.__name__)
callable(*args, **kwds)
# TODO: What if it raises an exception?
return 0
if self.rpcs:
rpc = datastore_rpc.MultiRpc.wait_any(self.rpcs)
if rpc is not None:
logging.info('rpc: %s', rpc.method) # XXX Should be debug
# Yes, wait_any() may return None even for a non-empty argument.
# But no, it won't ever return an RPC not in its argument.
assert rpc in self.rpcs, (rpc, self.rpcs)
callable, args, kwds = self.rpcs[rpc]
del self.rpcs[rpc]
if callable is not None:
callable(*args, **kwds)
# TODO: Again, what about exceptions?
return 0
return delay
def run1(self):
"""Run one item (a callback or an RPC wait_any) or sleep.
Returns:
True if something happened; False if all queues are empty.
"""
delay = self.run0()
if delay is None:
return False
if delay > 0:
time.sleep(delay)
return True
def run(self):
"""Run until there's nothing left to do."""
# TODO: A way to stop running before the queue is empty.
while True:
if not self.run1():
break
_EVENT_LOOP_KEY = '__EVENT_LOOP__'
_event_loop = None
def get_event_loop():
"""Return a singleton EventLoop instance.
A new singleton is created for each new HTTP request. We determine
that we're in a new request by inspecting os.environ, which is reset
at the start of each request.
"""
# TODO: Use thread-local storage?
global _event_loop
ev = None
if os.getenv(_EVENT_LOOP_KEY):
ev = _event_loop
if ev is None:
ev = EventLoop()
_event_loop = ev
os.environ[_EVENT_LOOP_KEY] = '1'
return ev
def queue_call(*args, **kwds):
ev = get_event_loop()
ev.queue_call(*args, **kwds)
def queue_rpc(rpc, callable=None, *args, **kwds):
ev = get_event_loop()
ev.queue_rpc(rpc, callable, *args, **kwds)
def run():
ev = get_event_loop()
ev.run()
def run1():
ev = get_event_loop()
return ev.run1()
def run0():
ev = get_event_loop()
return ev.run0()
| Python |
"""A tasklet decorator.
Tasklets are a way to write concurrently running functions without
threads; tasklets are executed by an event loop and can suspend
themselves blocking for I/O or some other operation using a yield
statement. The notion of a blocking operation is abstracted into the
Future class, but a tasklet may also yield an RPC in order to wait for
that RPC to complete.
The @tasklet decorator wraps generator function so that when it is
called, a Future is returned while the generator is executed by the
event loop. For example:
@tasklet
def foo():
a = yield <some Future>
c = yield <another Future>
raise Return(a + b)
def main():
f = foo()
x = f.get_result()
print x
Note that blocking until the Future's result is available using
get_result() is somewhat inefficient (though not vastly -- it is not
busy-waiting). In most cases such code should be rewritten as a tasklet
instead:
@tasklet
def main_tasklet():
f = foo()
x = yield f
print x
Calling a tasklet automatically schedules it with the event loop:
def main():
f = main_tasklet()
eventloop.run() # Run until no tasklets left to do
assert f.done()
As a special feature, if the wrapped function is not a generator
function, its return value is returned via the Future. This makes the
following two equivalent:
@tasklet
def foo():
return 42
@tasklet
def foo():
if False: yield # The presence of 'yield' makes foo a generator
raise Return(42) # Or, after PEP 380, return 42
This feature (inspired by Monocle) is handy in case you are
implementing an interface that expects tasklets but you have no need to
suspend -- there's no need to insert a dummy yield in order to make
the tasklet into a generator.
"""
import collections
import logging
import os
import sys
import types
from google.appengine.api.apiproxy_stub_map import UserRPC
from google.appengine.api.apiproxy_rpc import RPC
from google.appengine.datastore import datastore_rpc
from ndb import eventloop, utils
logging_debug = utils.logging_debug
def is_generator(obj):
"""Helper to test for a generator object.
NOTE: This tests for the (iterable) object returned by calling a
generator function, not for a generator function.
"""
return isinstance(obj, types.GeneratorType)
class Future(object):
"""A Future has 0 or more callbacks.
The callbacks will be called when the result is ready.
NOTE: This is somewhat inspired but not conformant to the Future interface
defined by PEP 3148. It is also inspired (and tries to be somewhat
compatible with) the App Engine specific UserRPC and MultiRpc classes.
"""
# TODO: Trim the API; there are too many ways to do the same thing.
# TODO: Compare to Monocle's much simpler Callback class.
# Constants for state property.
IDLE = RPC.IDLE # Not yet running (unused)
RUNNING = RPC.RUNNING # Not yet completed.
FINISHING = RPC.FINISHING # Completed.
_all_pending = set() # Set of all pending Future instances.
# XXX Add docstrings to all methods. Separate PEP 3148 API from RPC API.
_geninfo = None # Extra info about suspended generator.
def __init__(self, info=None):
# TODO: Make done a method, to match PEP 3148?
__ndb_debug__ = 'SKIP' # Hide this frame from self._where
self._info = info # Info from the caller about this Future's purpose.
self._where = utils.get_stack()
self._context = None
self._reset()
def _reset(self):
self._done = False
self._result = None
self._exception = None
self._traceback = None
self._callbacks = []
logging_debug('_all_pending: add %s', self)
self._all_pending.add(self)
self._next = None # Links suspended Futures together in a stack.
# TODO: Add a __del__ that complains if neither get_exception() nor
# check_success() was ever called? What if it's not even done?
def __repr__(self):
if self._done:
if self._exception is not None:
state = 'exception %s: %s' % (self._exception.__class__.__name__,
self._exception)
else:
state = 'result %r' % (self._result,)
else:
state = 'pending'
line = '?'
for line in self._where:
if 'ndb/tasklets.py' not in line:
break
if self._info:
line += ' for %s;' % self._info
if self._geninfo:
line += ' %s;' % self._geninfo
return '<%s %x created by %s %s>' % (
self.__class__.__name__, id(self), line, state)
def dump(self):
return '%s\nCreated by %s' % (self.dump_stack(),
'\n called by '.join(self._where))
def dump_stack(self):
lines = []
fut = self
while fut is not None:
lines.append(str(fut))
fut = fut._next
return '\n waiting for '.join(lines)
@classmethod
def clear_all_pending(cls):
if cls._all_pending:
logging.info('_all_pending: clear %s', cls._all_pending)
else:
logging_debug('_all_pending: clear no-op')
cls._all_pending.clear()
@classmethod
def dump_all_pending(cls, verbose=False):
all = []
for fut in cls._all_pending:
if verbose:
line = fut.dump() + ('\n' + '-'*40)
else:
line = fut.dump_stack()
all.append(line)
return '\n'.join(all)
def add_callback(self, callback, *args, **kwds):
if self._done:
eventloop.queue_call(None, callback, *args, **kwds)
else:
self._callbacks.append((callback, args, kwds))
def set_result(self, result):
assert not self._done
self._result = result
self._done = True
logging_debug('_all_pending: remove successful %s', self)
self._all_pending.remove(self)
for callback, args, kwds in self._callbacks:
eventloop.queue_call(None, callback, *args, **kwds)
def set_exception(self, exc, tb=None):
assert isinstance(exc, BaseException)
assert not self._done
self._exception = exc
self._traceback = tb
self._done = True
if self in self._all_pending:
logging_debug('_all_pending: remove failing %s', self)
self._all_pending.remove(self)
else:
logging_debug('_all_pending: not found %s', self)
for callback, args, kwds in self._callbacks:
eventloop.queue_call(None, callback, *args, **kwds)
def done(self):
return self._done
@property
def state(self):
# This is just for compatibility with UserRPC and MultiRpc.
# A Future is considered running as soon as it is created.
if self._done:
return self.FINISHING
else:
return self.RUNNING
def wait(self):
if self._done:
return
ev = eventloop.get_event_loop()
while not self._done:
if not ev.run1():
logging.info('Deadlock in %s', self)
logging.info('All pending Futures:\n%s', self.dump_all_pending())
logging_debug('All pending Futures (verbose):\n%s',
self.dump_all_pending(verbose=True))
self.set_exception(RuntimeError('Deadlock waiting for %s' % self))
def get_exception(self):
self.wait()
return self._exception
def get_traceback(self):
self.wait()
return self._traceback
def check_success(self):
self.wait()
if self._exception is not None:
raise self._exception.__class__, self._exception, self._traceback
def get_result(self):
self.check_success()
return self._result
@classmethod
def wait_any(cls, futures):
# TODO: Flatten MultiRpcs.
all = set(futures)
ev = eventloop.get_event_loop()
while all:
for f in all:
if f.state == cls.FINISHING:
return f
ev.run1()
return None
@classmethod
def wait_all(cls, futures):
# TODO: Flatten MultiRpcs.
all = set(futures)
ev = eventloop.get_event_loop()
while all:
all = set(f for f in all if f.state == cls.RUNNING)
ev.run1()
def _help_tasklet_along(self, gen, val=None, exc=None, tb=None):
# XXX Docstring
info = utils.gen_info(gen)
__ndb_debug__ = info
try:
save_context = get_context()
try:
set_context(self._context)
if exc is not None:
logging_debug('Throwing %s(%s) into %s',
exc.__class__.__name__, exc, info)
value = gen.throw(exc.__class__, exc, tb)
else:
logging_debug('Sending %r to %s', val, info)
value = gen.send(val)
self._context = get_context()
finally:
set_context(save_context)
except StopIteration, err:
result = get_return_value(err)
logging_debug('%s returned %r', info, result)
self.set_result(result)
return
except Exception, err:
_, _, tb = sys.exc_info()
logging.warning('%s raised %s(%s)',
info, err.__class__.__name__, err,
exc_info=(logging.getLogger().level <= logging.INFO))
self.set_exception(err, tb)
return
else:
logging_debug('%s yielded %r', info, value)
if isinstance(value, (UserRPC, datastore_rpc.MultiRpc)):
# TODO: Tail recursion if the RPC is already complete.
eventloop.queue_rpc(value, self._on_rpc_completion, value, gen)
return
if isinstance(value, Future):
# TODO: Tail recursion if the Future is already done.
assert not self._next, self._next
self._next = value
self._geninfo = utils.gen_info(gen)
logging_debug('%s is now blocked waiting for %s', self, value)
value.add_callback(self._on_future_completion, value, gen)
return
if isinstance(value, (tuple, list)):
# Arrange for yield to return a list of results (not Futures).
info = 'multi-yield from ' + utils.gen_info(gen)
mfut = MultiFuture(info)
try:
for subfuture in value:
mfut.add_dependent(subfuture)
mfut.complete()
except Exception, err:
_, _, tb = sys.exc_info()
mfut.set_exception(err, tb)
mfut.add_callback(self._on_future_completion, mfut, gen)
return
if is_generator(value):
assert False # TODO: emulate PEP 380 here?
assert False # A tasklet shouldn't yield plain values.
def _on_rpc_completion(self, rpc, gen):
try:
result = rpc.get_result()
except Exception, err:
_, _, tb = sys.exc_info()
self._help_tasklet_along(gen, exc=err, tb=tb)
else:
self._help_tasklet_along(gen, result)
def _on_future_completion(self, future, gen):
if self._next is future:
self._next = None
self._geninfo = None
logging_debug('%s is no longer blocked waiting for %s', self, future)
exc = future.get_exception()
if exc is not None:
self._help_tasklet_along(gen, exc=exc, tb=future.get_traceback())
else:
val = future.get_result() # This won't raise an exception.
self._help_tasklet_along(gen, val)
def sleep(dt):
"""Public function to sleep some time.
Example:
yield tasklets.sleep(0.5) # Sleep for half a sec.
"""
fut = Future('sleep(%.3f)' % dt)
eventloop.queue_call(dt, fut.set_result, None)
return fut
class MultiFuture(Future):
"""A Future that depends on multiple other Futures.
This is used internally by 'v1, v2, ... = yield f1, f2, ...'; the
semantics (e.g. error handling) are constrained by that use case.
The protocol from the caller's POV is:
mf = MultiFuture()
mf.add_dependent(<some other Future>) -OR- mf.putq(<some value>)
mf.add_dependent(<some other Future>) -OR- mf.putq(<some value>)
.
. (More mf.add_dependent() and/or mf.putq() calls)
.
mf.complete() # No more dependents will be added.
.
. (Time passes)
.
results = mf.get_result()
Now, results is a list of results from all dependent Futures in
the order in which they were added.
It is legal to add the same dependent multiple times.
Callbacks can be added at any point.
From a dependent Future POV, there's nothing to be done: a callback
is automatically added to each dependent Future which will signal
its completion to the MultiFuture.
Error handling: if any dependent future raises an error, it is
propagated to mf. To force an early error, you can call
mf.set_exception() instead of mf.complete(). After this you can't
call mf.add_dependent() or mf.putq() any more.
"""
def __init__(self, info=None):
__ndb_debug__ = 'SKIP' # Hide this frame from self._where
self._full = False
self._dependents = set()
self._results = []
super(MultiFuture, self).__init__(info=info)
def __repr__(self):
# TODO: This may be invoked before __init__() returns,
# from Future.__init__(). Beware.
line = super(MultiFuture, self).__repr__()
lines = [line]
for fut in self._results:
lines.append(fut.dump_stack().replace('\n', '\n '))
return '\n waiting for '.join(lines)
# TODO: Maybe rename this method, since completion of a Future/RPC
# already means something else. But to what?
def complete(self):
assert not self._full
self._full = True
if not self._dependents:
self._finish()
# TODO: Maybe don't overload set_exception() with this?
def set_exception(self, exc, tb=None):
self._full = True
super(MultiFuture, self).set_exception(exc, tb)
def _finish(self):
assert self._full
assert not self._dependents
assert not self._done
try:
result = [r.get_result() for r in self._results]
except Exception, err:
_, _, tb = sys.exc_info()
self.set_exception(err, tb)
else:
self.set_result(result)
def putq(self, value):
if isinstance(value, Future):
fut = value
else:
fut = Future()
fut.set_result(value)
self.add_dependent(fut)
def add_dependent(self, fut):
assert isinstance(fut, Future)
assert not self._full
self._results.append(fut)
if fut not in self._dependents:
self._dependents.add(fut)
fut.add_callback(self._signal_dependent_done, fut)
def _signal_dependent_done(self, fut):
self._dependents.remove(fut)
if self._full and not self._dependents and not self._done:
self._finish()
class QueueFuture(Future):
"""A Queue following the same protocol as MultiFuture.
However, instead of returning results as a list, it lets you
retrieve results as soon as they are ready, one at a time, using
getq(). The Future itself finishes with a result of None when the
last result is ready (regardless of whether it was retrieved).
The getq() method returns a Future which blocks until the next
result is ready, and then returns that result. Each getq() call
retrieves one unique result. Extra getq() calls after the last
result is already returned return EOFError as their Future's
exception. (I.e., q.getq() returns a Future as always, but yieding
that Future raises EOFError.)
NOTE: If .getq() is given a default argument, it will be returned as
the result instead of raising EOFError. However, other exceptions
are still passed through.
NOTE: Values can also be pushed directly via .putq(value). However
there is no flow control -- if the producer is faster than the
consumer, the queue will grow unbounded.
"""
# TODO: Refactor to share code with MultiFuture.
def __init__(self, info=None):
self._full = False
self._dependents = set()
self._completed = collections.deque()
self._waiting = collections.deque()
# Invariant: at least one of _completed and _waiting is empty.
# Also: _full and not _dependents <==> _done.
super(QueueFuture, self).__init__(info=info)
# TODO: __repr__
def complete(self):
assert not self._full
self._full = True
if not self._dependents:
self.set_result(None)
self._mark_finished()
def set_exception(self, exc, tb=None):
self._full = True
super(QueueFuture, self).set_exception(exc, tb)
if not self._dependents:
self._mark_finished()
def putq(self, value):
if isinstance(value, Future):
fut = value
else:
fut = Future()
fut.set_result(value)
self.add_dependent(fut)
def add_dependent(self, fut):
assert isinstance(fut, Future)
assert not self._full
if fut not in self._dependents:
self._dependents.add(fut)
fut.add_callback(self._signal_dependent_done, fut)
def _signal_dependent_done(self, fut):
assert fut.done()
self._dependents.remove(fut)
exc = fut.get_exception()
tb = fut.get_traceback()
val = None
if exc is None:
val = fut.get_result()
if self._waiting:
waiter = self._waiting.popleft()
self._pass_result(waiter, exc, tb, val)
else:
self._completed.append((exc, tb, val))
if self._full and not self._dependents and not self._done:
self.set_result(None)
self._mark_finished()
def _mark_finished(self):
assert self._done
while self._waiting:
waiter = self._waiting.popleft()
self._pass_eof(waiter)
def getq(self):
fut = Future()
if self._completed:
exc, tb, val = self._completed.popleft()
self._pass_result(fut, exc, tb, val)
elif self._full and not self._dependents:
self._pass_eof(fut)
else:
self._waiting.append(fut)
return fut
def _pass_eof(self, fut):
assert self._done
exc = self.get_exception()
if exc is not None:
tb = self.get_traceback()
else:
exc = EOFError('Queue is empty')
tb = None
self._pass_result(fut, exc, tb, None)
def _pass_result(self, fut, exc, tb, val):
if exc is not None:
fut.set_exception(exc, tb)
else:
fut.set_result(val)
class SerialQueueFuture(Future):
"""Like QueueFuture but maintains the order of insertion.
This class is used by Query operations.
Invariants:
- At least one of _queue and _waiting is empty.
- The Futures in _waiting are always pending.
(The Futures in _queue may be pending or completed.)
In the discussion below, add_dependent() is treated the same way as
putq().
If putq() is ahead of getq(), the situation is like this:
putq()
v
_queue: [f1, f2, ...]; _waiting: []
^
getq()
Here, putq() appends a Future to the right of _queue, and getq()
removes one from the left.
If getq() is ahead of putq(), it's like this:
putq()
v
_queue: []; _waiting: [f1, f2, ...]
^
getq()
Here, putq() removes a Future from the left of _waiting, and getq()
appends one to the right.
When both are empty, putq() appends a Future to the right of _queue,
while getq() appends one to the right of _waiting.
The _full flag means that no more calls to putq() will be made; it
is set by calling either complete() or set_exception().
Calling complete() signals that no more putq() calls will be made.
If getq() is behind, subsequent getq() calls will eat up _queue
until it is empty, and after that will return a Future that passes
EOFError (note that getq() itself never raises EOFError). If getq()
is ahead when complete() is called, the Futures in _waiting are all
passed an EOFError exception (thereby eating up _waiting).
If, instead of complete(), set_exception() is called, the exception
and traceback set there will be used instead of EOFError.
"""
def __init__(self, info=None):
self._full = False
self._queue = collections.deque()
self._waiting = collections.deque()
super(SerialQueueFuture, self).__init__(info=info)
# TODO: __repr__
def complete(self):
assert not self._full
self._full = True
while self._waiting:
waiter = self._waiting.popleft()
waiter.set_exception(EOFError('Queue is empty'))
if not self._queue:
self.set_result(None)
def set_exception(self, exc, tb=None):
self._full = True
super(SerialQueueFuture, self).set_exception(exc, tb)
while self._waiting:
waiter = self._waiting.popleft()
waiter.set_exception(exc, tb)
def putq(self, value):
if isinstance(value, Future):
fut = value
else:
if self._waiting:
waiter = self._waiting.popleft()
waiter.set_result(value)
return
fut = Future()
fut.set_result(value)
self.add_dependent(fut)
def add_dependent(self, fut):
assert isinstance(fut, Future)
assert not self._full
if self._waiting:
waiter = self._waiting.popleft()
fut.add_callback(_transfer_result, fut, waiter)
else:
self._queue.append(fut)
def getq(self):
if self._queue:
fut = self._queue.popleft()
# TODO: Isn't it better to call self.set_result(None) in complete()?
if not self._queue and self._full and not self._done:
self.set_result(None)
else:
fut = Future()
if self._full:
assert self._done # Else, self._queue should be non-empty.
err = None
err = self.get_exception()
if err is not None:
tb = self.get_traceback()
else:
err = EOFError('Queue is empty')
tb = None
fut.set_exception(err, tb)
else:
self._waiting.append(fut)
return fut
def _transfer_result(fut1, fut2):
"""Helper to transfer result or errors from one Future to another."""
exc = fut1.get_exception()
if exc is not None:
tb = fut1.get_traceback()
fut2.set_exception(exc, tb)
else:
val = fut1.get_result()
fut2.set_result(val)
class ReducingFuture(Future):
"""A Queue following the same protocol as MultiFuture.
However the result, instead of being a list of results of dependent
Futures, is computed by calling a 'reducer' tasklet. The reducer tasklet
takes a list of values and returns a single value. It may be called
multiple times on sublists of values and should behave like
e.g. sum().
NOTE: The reducer input values may be reordered compared to the
order in which they were added to the queue.
"""
# TODO: Refactor to reuse some code with MultiFuture.
def __init__(self, reducer, info=None, batch_size=20):
self._reducer = reducer
self._batch_size = batch_size
self._full = False
self._dependents = set()
self._completed = collections.deque()
self._queue = collections.deque()
super(ReducingFuture, self).__init__(info=info)
# TODO: __repr__
def complete(self):
assert not self._full
self._full = True
if not self._dependents:
self._mark_finished()
def set_exception(self, exc, tb=None):
self._full = True
self._queue.clear()
super(ReducingFuture, self).set_exception(exc, tb)
def putq(self, value):
if isinstance(value, Future):
fut = value
else:
fut = Future()
fut.set_result(value)
self.add_dependent(fut)
def add_dependent(self, fut):
assert not self._full
self._internal_add_dependent(fut)
def _internal_add_dependent(self, fut):
assert isinstance(fut, Future)
if fut not in self._dependents:
self._dependents.add(fut)
fut.add_callback(self._signal_dependent_done, fut)
def _signal_dependent_done(self, fut):
assert fut.done()
self._dependents.remove(fut)
if self._done:
return # Already done.
try:
val = fut.get_result()
except Exception, err:
_, _, tb = sys.exc_info()
self.set_exception(err, tb)
return
self._queue.append(val)
if len(self._queue) >= self._batch_size:
todo = list(self._queue)
self._queue.clear()
try:
nval = self._reducer(todo)
except Exception, err:
_, _, tb = sys.exc_info()
self.set_exception(err, tb)
return
if isinstance(nval, Future):
self._internal_add_dependent(nval)
else:
self._queue.append(nval)
if self._full and not self._dependents:
self._mark_finished()
def _mark_finished(self):
if not self._queue:
self.set_result(None)
elif len(self._queue) == 1:
self.set_result(self._queue.pop())
else:
todo = list(self._queue)
self._queue.clear()
try:
nval = self._reducer(todo)
except Exception, err:
_, _, tb = sys.exc_info()
self.set_exception(err, tb)
return
if isinstance(nval, Future):
self._internal_add_dependent(nval)
else:
self.set_result(nval)
# Alias for StopIteration used to mark return values.
# To use this, raise Return(<your return value>). The semantics
# are exactly the same as raise StopIteration(<your return value>)
# but using Return clarifies that you are intending this to be the
# return value of a tasklet.
# TODO: According to Monocle authors Steve and Greg Hazel, Twisted
# used an exception to signal a return value from a generator early
# on, and they found out it was error-prone. Should I worry?
Return = StopIteration
def get_return_value(err):
# XXX Docstring
if not err.args:
result = None
elif len(err.args) == 1:
result = err.args[0]
else:
result = err.args
return result
def tasklet(func):
# XXX Docstring
@utils.wrapping(func)
def tasklet_wrapper(*args, **kwds):
# XXX Docstring
# TODO: make most of this a public function so you can take a bare
# generator and turn it into a tasklet dynamically. (Monocle has
# this I believe.)
# __ndb_debug__ = utils.func_info(func)
fut = Future('tasklet %s' % utils.func_info(func))
fut._context = get_context()
try:
result = func(*args, **kwds)
except StopIteration, err:
# Just in case the function is not a generator but still uses
# the "raise Return(...)" idiom, we'll extract the return value.
result = get_return_value(err)
if is_generator(result):
eventloop.queue_call(None, fut._help_tasklet_along, result)
else:
fut.set_result(result)
return fut
return tasklet_wrapper
def synctasklet(func):
"""Decorator to run a function as a tasklet when called.
Use this to wrap a request handler function that will be called by
some web application framework (e.g. a Django view function or a
webapp.RequestHandler.get method).
"""
@utils.wrapping(func)
def synctasklet_wrapper(*args, **kwds):
__ndb_debug__ = utils.func_info(func)
taskletfunc = tasklet(func)
return taskletfunc(*args, **kwds).get_result()
return synctasklet_wrapper
_CONTEXT_KEY = '__CONTEXT__'
# TODO: Use thread-local for this.
_context = None
def get_context():
global _context
ctx = None
if os.getenv(_CONTEXT_KEY):
ctx = _context
if ctx is None:
ctx = make_default_context()
set_context(ctx)
return ctx
def make_default_context():
import context # Late import to deal with circular imports.
return context.Context()
def set_context(new_context):
global _context
os.environ[_CONTEXT_KEY] = '1'
_context = new_context
# TODO: Rework the following into documentation.
# A tasklet/coroutine/generator can yield the following things:
# - Another tasklet/coroutine/generator; this is entirely equivalent to
# "for x in g: yield x"; this is handled entirely by the @tasklet wrapper.
# (Actually, not. @tasklet returns a function that when called returns
# a Future. You can use the pep380 module's @gwrap decorator to support
# yielding bare generators though.)
# - An RPC (or MultiRpc); the tasklet will be resumed when this completes.
# This does not use the RPC's callback mechanism.
# - A Future; the tasklet will be resumed when the Future is done.
# This uses the Future's callback mechanism.
# A Future can be used in several ways:
# - Yield it from a tasklet; see above.
# - Check (poll) its status via f.done.
# - Call its wait() method, perhaps indirectly via check_success()
# or get_result(). This invokes the event loop.
# - Call the Future.wait_any() or Future.wait_all() method.
# This is waits for any or all Futures and RPCs in the argument list.
# XXX HIRO XXX
# - A tasklet is a (generator) function decorated with @tasklet.
# - Calling a tasklet schedules the function for execution and returns a Future.
# - A function implementing a tasklet may:
# = yield a Future; this waits for the Future which returns f.get_result();
# = yield an RPC; this waits for the RPC and then returns rpc.get_result();
# = raise Return(result); this sets the outer Future's result;
# = raise StopIteration or return; this sets the outer Future's result;
# = raise another exception: this sets the outer Future's exception.
# - If a function implementing a tasklet is not a generator it will be
# immediately executed to completion and the tasklet wrapper will
# return a Future that is already done. (XXX Alternative behavior:
# it schedules the call to be run by the event loop.)
# - Code not running in a tasklet can call f.get_result() or f.wait() on
# a future. This is implemented by a simple loop like the following:
# while not self._done:
# eventloop.run1()
# - Here eventloop.run1() runs one "atomic" part of the event loop:
# = either it calls one immediately ready callback;
# = or it waits for the first RPC to complete;
# = or it sleeps until the first callback should be ready;
# = or it raises an exception indicating all queues are empty.
# - It is possible but suboptimal to call rpc.get_result() or
# rpc.wait() directly on an RPC object since this will not allow
# other callbacks to run as they become ready. Wrapping an RPC in a
# Future will take care of this issue.
# - The important insight is that when a generator function
# implementing a tasklet yields, raises or returns, there is always a
# wrapper that catches this event and either turns it into a
# callback sent to the event loop, or sets the result or exception
# for the tasklet's Future.
| Python |
"""Tests for query.py."""
import os
import re
import sys
import time
import unittest
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_file_stub
from google.appengine.api.memcache import memcache_stub
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import datastore_query
from ndb import context
from ndb import model
from ndb import query
from ndb import tasklets
from ndb import test_utils
class QueryTests(test_utils.DatastoreTest):
def setUp(self):
super(QueryTests, self).setUp()
# Create class inside tests because kinds are cleared every test.
global Foo
class Foo(model.Model):
name = model.StringProperty()
rate = model.IntegerProperty()
tags = model.StringProperty(repeated=True)
self.create_entities()
def create_entities(self):
self.joe = Foo(name='joe', tags=['joe', 'jill', 'hello'], rate=1)
self.joe.put()
self.jill = Foo(name='jill', tags=['jack', 'jill'], rate=2)
self.jill.put()
self.moe = Foo(name='moe', rate=1)
self.moe.put()
def testBasicQuery(self):
q = query.Query(kind='Foo')
q = q.filter(Foo.name >= 'joe').filter(Foo.name <= 'moe').filter()
res = list(q)
self.assertEqual(res, [self.joe, self.moe])
def testOrderedQuery(self):
q = query.Query(kind='Foo')
q = q.order(Foo.rate).order().order(-Foo.name)
res = list(q)
self.assertEqual(res, [self.moe, self.joe, self.jill])
def testQueryAttributes(self):
q = query.Query(kind='Foo')
self.assertEqual(q.kind, 'Foo')
self.assertEqual(q.ancestor, None)
self.assertEqual(q.filters, None)
self.assertEqual(q.orders, None)
key = model.Key('Barba', 'papa')
q = query.Query(kind='Foo', ancestor=key)
self.assertEqual(q.kind, 'Foo')
self.assertEqual(q.ancestor, key)
self.assertEqual(q.filters, None)
self.assertEqual(q.orders, None)
q = q.filter(Foo.rate == 1)
self.assertEqual(q.kind, 'Foo')
self.assertEqual(q.ancestor, key)
self.assertEqual(q.filters, query.FilterNode('rate', '=', 1))
self.assertEqual(q.orders, None)
q = q.order(-Foo.name)
self.assertEqual(q.kind, 'Foo')
self.assertEqual(q.ancestor, key)
self.assertEqual(q.filters, query.FilterNode('rate', '=', 1))
expected_order = [('name', query._DESC)]
self.assertEqual(query._orders_to_orderings(q.orders), expected_order)
def testQueryRepr(self):
q = Foo.query()
self.assertEqual(repr(q), "Query(kind='Foo')")
q = Foo.query(ancestor=model.Key('Bar', 1))
self.assertEqual(repr(q), "Query(kind='Foo', ancestor=Key('Bar', 1))")
# Let's not specify what it should show for filters and orders,
# just test that it doesn't blow up.
q1 = q.filter(Foo.rate == 1, Foo.name == 'x')
q2 = q1.order(-Foo.rate)
def testRunToQueue(self):
qry = Foo.query()
queue = tasklets.MultiFuture()
qry.run_to_queue(queue, self.conn).check_success()
results = queue.get_result()
self.assertEqual(len(results), 3)
self.assertEqual(results[0][2], self.joe)
self.assertEqual(results[1][2], self.jill)
self.assertEqual(results[2][2], self.moe)
def testRunToQueueError(self):
qry = Foo.query(Foo.name > '', Foo.rate > 0)
queue = tasklets.MultiFuture()
fut = qry.run_to_queue(queue, self.conn)
self.assertRaises(datastore_errors.BadRequestError, fut.check_success)
self.assertRaises(datastore_errors.BadRequestError, queue.check_success)
def testModernQuerySyntax(self):
class Employee(model.Model):
name = model.StringProperty()
age = model.IntegerProperty('Age')
rank = model.IntegerProperty()
@classmethod
def seniors(cls, min_age, min_rank):
q = cls.query().filter(cls.age >= min_age, cls.rank <= min_rank)
q = q.order(cls.name, -cls.age)
return q
q = Employee.seniors(42, 5)
self.assertEqual(q.filters,
query.ConjunctionNode(
query.FilterNode('Age', '>=', 42),
query.FilterNode('rank', '<=', 5)))
self.assertEqual(query._orders_to_orderings(q.orders),
[('name', query._ASC), ('Age', query._DESC)])
def testAndQuery(self):
class Employee(model.Model):
name = model.StringProperty()
age = model.IntegerProperty('Age')
rank = model.IntegerProperty()
q = Employee.query().filter(query.AND(Employee.age >= 42))
self.assertEqual(q.filters, query.FilterNode('Age', '>=', 42))
q = Employee.query(query.AND(Employee.age >= 42, Employee.rank <= 5))
self.assertEqual(q.filters,
query.ConjunctionNode(
query.FilterNode('Age', '>=', 42),
query.FilterNode('rank', '<=', 5)))
def testOrQuery(self):
class Employee(model.Model):
name = model.StringProperty()
age = model.IntegerProperty('Age')
rank = model.IntegerProperty()
q = Employee.query().filter(query.OR(Employee.age >= 42))
self.assertEqual(q.filters, query.FilterNode('Age', '>=', 42))
q = Employee.query(query.OR(Employee.age < 42, Employee.rank > 5))
self.assertEqual(q.filters,
query.DisjunctionNode(
query.FilterNode('Age', '<', 42),
query.FilterNode('rank', '>', 5)))
def testEmptyInFilter(self):
class Employee(model.Model):
name = model.StringProperty()
q = Employee.query(Employee.name.IN([]))
self.assertEqual(q.filters, query.FalseNode())
self.assertNotEqual(q.filters, 42)
f = iter(q).has_next_async()
self.assertRaises(datastore_errors.BadQueryError, f.check_success)
def testSingletonInFilter(self):
class Employee(model.Model):
name = model.StringProperty()
q = Employee.query(Employee.name.IN(['xyzzy']))
self.assertEqual(q.filters, query.FilterNode('name', '=', 'xyzzy'))
self.assertNotEqual(q.filters, 42)
e = Employee(name='xyzzy')
e.put()
self.assertEqual(q.get(), e)
def testInFilter(self):
class Employee(model.Model):
name = model.StringProperty()
q = Employee.query(Employee.name.IN(['a', 'b']))
self.assertEqual(q.filters,
query.DisjunctionNode(
query.FilterNode('name', '=', 'a'),
query.FilterNode('name', '=', 'b')))
a = Employee(name='a')
a.put()
b = Employee(name='b')
b.put()
self.assertEqual(list(q), [a, b])
def testQueryExceptions(self):
q = Foo.query(Foo.name > '', Foo.rate > 0)
f = q.fetch_async()
self.assertRaises(datastore_errors.BadRequestError, f.check_success)
def testFilterRepr(self):
class Employee(model.Model):
name = model.StringProperty()
f = (Employee.name == 'xyzzy')
self.assertEqual(repr(f), "FilterNode('name', '=', 'xyzzy')")
def testNodeComparisons(self):
a = query.FilterNode('foo', '=', 1)
b = query.FilterNode('foo', '=', 1)
c = query.FilterNode('foo', '=', 2)
d = query.FilterNode('foo', '<', 1)
# Don't use assertEqual/assertNotEqual; we want to be sure that
# __eq__ or __ne__ is really called here!
self.assertTrue(a == b)
self.assertTrue(a != c)
self.assertTrue(b != d)
self.assertRaises(TypeError, lambda: a < b)
self.assertRaises(TypeError, lambda: a <= b)
self.assertRaises(TypeError, lambda: a > b)
self.assertRaises(TypeError, lambda: a >= b)
x = query.AND(a, b, c)
y = query.AND(a, b, c)
z = query.AND(a, d)
self.assertTrue(x == y)
self.assertTrue(x != z)
def testQueryForStructuredProperty(self):
class Bar(model.Model):
name = model.StringProperty()
foo = model.StructuredProperty(Foo)
b1 = Bar(name='b1', foo=Foo(name='nest', rate=1, tags=['tag1', 'tag2']))
b1.put()
b2 = Bar(name='b2', foo=Foo(name='best', rate=2, tags=['tag2', 'tag3']))
b2.put()
b3 = Bar(name='b3', foo=Foo(name='rest', rate=2, tags=['tag2']))
b3.put()
q1 = Bar.query().order(Bar.name)
self.assertEqual(q1.fetch(10), [b1, b2, b3])
q2 = Bar.query().filter(Bar.foo.rate >= 2)
self.assertEqual(q2.fetch(10), [b2, b3])
q3 = q2.order(Bar.foo.rate, -Bar.foo.name, +Bar.foo.rate)
self.assertEqual(q3.fetch(10), [b3, b2])
def testQueryForNestedStructuredProperty(self):
class Bar(model.Model):
name = model.StringProperty()
foo = model.StructuredProperty(Foo)
class Bak(model.Model):
bar = model.StructuredProperty(Bar)
class Baz(model.Model):
bar = model.StructuredProperty(Bar)
bak = model.StructuredProperty(Bak)
rank = model.IntegerProperty()
b1 = Baz(bar=Bar(foo=Foo(name='a')))
b1.put()
b2 = Baz(bar=Bar(foo=Foo(name='b')), bak=Bak(bar=Bar(foo=Foo(name='c'))))
b2.put()
q1 = Baz.query().filter(Baz.bar.foo.name >= 'a')
self.assertEqual(q1.fetch(10), [b1, b2])
q2 = Baz.query().filter(Baz.bak.bar.foo.name >= 'a')
self.assertEqual(q2.fetch(10), [b2])
def testQueryForWholeStructure(self):
class Employee(model.Model):
name = model.StringProperty()
rank = model.IntegerProperty()
class Manager(Employee):
report = model.StructuredProperty(Employee, repeated=True)
reports_a = []
for i in range(3):
e = Employee(name=str(i), rank=i)
e.put()
e.key = None
reports_a.append(e)
reports_b = []
for i in range(3, 6):
e = Employee(name=str(i), rank=0)
e.put()
e.key = None
reports_b.append(e)
mgr_a = Manager(name='a', report=reports_a)
mgr_a.put()
mgr_b = Manager(name='b', report=reports_b)
mgr_b.put()
mgr_c = Manager(name='c', report=reports_a + reports_b)
mgr_c.put()
res = list(Manager.query(Manager.report == Employee(name='1', rank=1)))
self.assertEqual(res, [mgr_a, mgr_c])
res = list(Manager.query(Manager.report == Employee(rank=0)))
self.assertEqual(res, [mgr_a, mgr_b, mgr_c])
res = list(Manager.query(Manager.report == Employee(rank=0, name='3')))
self.assertEqual(res, [mgr_b, mgr_c])
res = list(Manager.query(Manager.report == Employee(rank=0, name='1')))
self.assertEqual(res, [])
res = list(Manager.query(Manager.report == Employee(rank=0, name='0'),
Manager.report == Employee(rank=1, name='1')))
self.assertEqual(res, [mgr_a, mgr_c])
q = Manager.query(Manager.report == Employee(rank=2, name='2'))
res = list(q)
self.assertEqual(res, [mgr_a, mgr_c])
res = list(q.iter(offset=1))
self.assertEqual(res, [mgr_c])
res = list(q.iter(limit=1))
self.assertEqual(res, [mgr_a])
def testMultiQuery(self):
q1 = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
q2 = query.Query(kind='Foo').filter(Foo.tags == 'joe').order(Foo.name)
qq = query._MultiQuery([q1, q2])
res = list(qq)
self.assertEqual(res, [self.jill, self.joe])
def testIterAsync(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
@tasklets.synctasklet
def foo():
it = iter(q)
res = []
while (yield it.has_next_async()):
val = it.next()
res.append(val)
self.assertEqual(res, [self.jill, self.joe])
foo()
def testMap(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
callback = lambda e: e.name
@tasklets.tasklet
def callback_async(e):
yield tasklets.sleep(0.01)
raise tasklets.Return(e.name)
self.assertEqual(q.map(callback), ['jill', 'joe'])
self.assertEqual(q.map(callback_async), ['jill', 'joe'])
# TODO: Test map() with esoteric argument combinations
# e.g. keys_only, produce_cursors, and merge_future.
def testMapAsync(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
callback = lambda e: e.name
@tasklets.tasklet
def callback_async(e):
yield tasklets.sleep(0.01)
raise tasklets.Return(e.name)
@tasklets.synctasklet
def foo():
fut = q.map_async(callback)
res = yield fut
self.assertEqual(res, ['jill', 'joe'])
fut = q.map_async(callback_async)
res = yield fut
self.assertEqual(res, ['jill', 'joe'])
foo()
def testFetch(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
self.assertEqual(q.fetch(10), [self.jill, self.joe])
self.assertEqual(q.fetch(2), [self.jill, self.joe])
self.assertEqual(q.fetch(1), [self.jill])
def testFetchAsync(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
@tasklets.synctasklet
def foo():
res = yield q.fetch_async(10)
self.assertEqual(res, [self.jill, self.joe])
res = yield q.fetch_async(2)
self.assertEqual(res, [self.jill, self.joe])
res = yield q.fetch_async(1)
self.assertEqual(res, [self.jill])
foo()
def testFetchEmpty(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jillian')
self.assertEqual(q.fetch(1), [])
def testFetchKeysOnly(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
self.assertEqual(q.fetch(10, keys_only=True),
[self.jill.key, self.joe.key])
def testGet(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
self.assertEqual(q.get(), self.jill)
def testGetEmpty(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jillian')
self.assertEqual(q.get(), None)
def testGetKeysOnly(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
self.assertEqual(q.get(keys_only=True), self.jill.key)
def testCursors(self):
q = query.Query(kind='Foo')
it = q.iter(produce_cursors=True)
expected = [self.joe, self.jill, self.moe]
self.assertRaises(datastore_errors.BadArgumentError, it.cursor_before)
self.assertRaises(datastore_errors.BadArgumentError, it.cursor_after)
before = []
after = []
for i, ent in enumerate(it):
self.assertEqual(ent, expected[i])
before.append(it.cursor_before())
after.append(it.cursor_after())
before.append(it.cursor_before())
after.append(it.cursor_after())
self.assertEqual(before[1], after[0])
self.assertEqual(before[2], after[1])
self.assertEqual(before[3], after[2])
self.assertEqual(before[3], after[3]) # !!!
def testCursorsKeysOnly(self):
q = query.Query(kind='Foo')
it = q.iter(produce_cursors=True, keys_only=True)
expected = [self.joe.key, self.jill.key, self.moe.key]
self.assertRaises(datastore_errors.BadArgumentError, it.cursor_before)
self.assertRaises(datastore_errors.BadArgumentError, it.cursor_after)
before = []
after = []
for i, ent in enumerate(it):
self.assertEqual(ent, expected[i])
before.append(it.cursor_before())
after.append(it.cursor_after())
before.append(it.cursor_before())
after.append(it.cursor_after())
self.assertEqual(before[1], after[0])
self.assertEqual(before[2], after[1])
self.assertEqual(before[3], after[2])
self.assertEqual(before[3], after[3]) # !!!
def testCursorsEfficientPaging(self):
# We want to read a 'page' of data, get the cursor just past the
# page, and know whether there is another page, all with a single
# RPC. To do this, set limit=pagesize+1, batch_size=pagesize.
q = query.Query(kind='Foo')
cursors = {}
mores = {}
for pagesize in [1, 2, 3, 4]:
it = q.iter(produce_cursors=True, limit=pagesize+1, batch_size=pagesize)
todo = pagesize
for ent in it:
todo -= 1
if todo <= 0:
break
cursors[pagesize] = it.cursor_after()
mores[pagesize] = it.probably_has_next()
self.assertEqual(mores, {1: True, 2: True, 3: False, 4: False})
self.assertEqual(cursors[3], cursors[4])
# TODO: Assert that only one RPC call was made.
def testCount(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
self.assertEqual(q.count(10), 2)
self.assertEqual(q.count(1), 1)
def testCountAsync(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jill').order(Foo.name)
@tasklets.synctasklet
def foo():
res = yield q.count_async(10)
self.assertEqual(res, 2)
res = yield q.count_async(1)
self.assertEqual(res, 1)
foo()
def testCountEmpty(self):
q = query.Query(kind='Foo').filter(Foo.tags == 'jillian')
self.assertEqual(q.count(1), 0)
def testCountPostFilter(self):
class Froo(model.Model):
name = model.StringProperty()
rate = model.IntegerProperty()
age = model.IntegerProperty()
class Bar(model.Model):
name = model.StringProperty()
froo = model.StructuredProperty(Froo, repeated=True)
b1 = Bar(name='b1', froo=[Froo(name='a', rate=1)])
b1.put()
b2 = Bar(name='b2', froo=[Froo(name='a', rate=1)])
b2.put()
q = Bar.query(Bar.froo == Froo(name='a', rate=1))
self.assertEqual(q.count(3), 2)
self.assertEqual(q.count(2), 2)
self.assertEqual(q.count(1), 1)
def testCountDisjunction(self):
q = Foo.query(Foo.name.IN(['joe', 'jill']))
self.assertEqual(q.count(3), 2)
self.assertEqual(q.count(2), 2)
self.assertEqual(q.count(1), 1)
def testFetchPage(self):
# This test implicitly also tests fetch_page_async().
q = query.Query(kind='Foo')
page_size = 1
res, curs, more = q.fetch_page(page_size)
self.assertEqual(res, [self.joe])
self.assertTrue(more)
res, curs, more = q.fetch_page(page_size, start_cursor=curs)
self.assertEqual(res, [self.jill])
self.assertTrue(more)
res, curs, more = q.fetch_page(page_size, start_cursor=curs)
self.assertEqual(res, [self.moe])
self.assertFalse(more)
res, curs, more = q.fetch_page(page_size, start_cursor=curs)
self.assertEqual(res, [])
self.assertFalse(more)
page_size = 2
res, curs, more = q.fetch_page(page_size)
self.assertEqual(res, [self.joe, self.jill])
self.assertTrue(more)
res, curs, more = q.fetch_page(page_size, start_cursor=curs)
self.assertEqual(res, [self.moe])
self.assertFalse(more)
res, curs, more = q.fetch_page(page_size, start_cursor=curs)
self.assertEqual(res, [])
self.assertFalse(more)
page_size = 3
res, curs, more = q.fetch_page(page_size)
self.assertEqual(res, [self.joe, self.jill, self.moe])
self.assertFalse(more)
res, curs, more = q.fetch_page(page_size, start_cursor=curs)
self.assertEqual(res, [])
self.assertFalse(more)
page_size = 4
res, curs, more = q.fetch_page(page_size)
self.assertEqual(res, [self.joe, self.jill, self.moe])
self.assertFalse(more)
res, curs, more = q.fetch_page(page_size, start_cursor=curs)
self.assertEqual(res, [])
self.assertFalse(more)
def testMultiQueryIterator(self):
q = query.Query(kind='Foo').filter(Foo.tags.IN(['joe', 'jill']))
q = q.order(Foo.name)
@tasklets.synctasklet
def foo():
it = iter(q)
res = []
while (yield it.has_next_async()):
val = it.next()
res.append(val)
self.assertEqual(res, [self.jill, self.joe])
foo()
def testMultiQueryIteratorUnordered(self):
q = query.Query(kind='Foo').filter(Foo.tags.IN(['joe', 'jill']))
@tasklets.synctasklet
def foo():
it = iter(q)
res = []
while (yield it.has_next_async()):
val = it.next()
res.append(val)
self.assertEqual(set(r._key for r in res),
set([self.jill._key, self.joe._key]))
foo()
def testMultiQueryFetch(self):
q = Foo.query(Foo.tags.IN(['joe', 'jill'])).order(-Foo.name)
expected = [self.joe, self.jill]
self.assertEqual(q.fetch(10), expected)
self.assertEqual(q.fetch(None), expected)
self.assertEqual(q.fetch(), expected)
self.assertEqual(q.fetch(2), expected)
self.assertEqual(q.fetch(1), expected[:1])
self.assertEqual(q.fetch(10, offset=1), expected[1:])
self.assertEqual(q.fetch(1, offset=1), expected[1:])
self.assertEqual(q.fetch(10, keys_only=True), [e._key for e in expected])
def testMultiQueryFetchUnordered(self):
q = Foo.query(Foo.tags.IN(['joe', 'jill']))
expected = [self.joe, self.jill]
self.assertEqual(q.fetch(10), expected)
self.assertEqual(q.fetch(None), expected)
self.assertEqual(q.fetch(), expected)
self.assertEqual(q.fetch(2), expected)
self.assertEqual(q.fetch(1), expected[:1])
self.assertEqual(q.fetch(10, offset=1), expected[1:])
self.assertEqual(q.fetch(1, offset=1), expected[1:])
self.assertEqual(q.fetch(10, keys_only=True), [e._key for e in expected])
def testMultiQueryCount(self):
q = Foo.query(Foo.tags.IN(['joe', 'jill'])).order(Foo.name)
self.assertEqual(q.count(10), 2)
self.assertEqual(q.count(None), 2)
self.assertEqual(q.count(), 2)
self.assertEqual(q.count(2), 2)
self.assertEqual(q.count(1), 1)
self.assertEqual(q.count(10, keys_only=True), 2)
self.assertEqual(q.count(keys_only=True), 2)
def testMultiQueryCountUnordered(self):
q = Foo.query(Foo.tags.IN(['joe', 'jill']))
self.assertEqual(q.count(10), 2)
self.assertEqual(q.count(None), 2)
self.assertEqual(q.count(), 2)
self.assertEqual(q.count(10, keys_only=True), 2)
self.assertEqual(q.count(keys_only=True), 2)
def testMultiQueryCursors(self):
# NOTE: This test will fail with SDK 1.5.0. Please upgrade to 1.5.1.
q = Foo.query(Foo.tags.IN(['joe', 'jill']))
self.assertRaises(datastore_errors.BadArgumentError, q.fetch_page, 1)
q = q.order(Foo.tags)
self.assertRaises(datastore_errors.BadArgumentError, q.fetch_page, 1)
q = q.order(Foo.key)
expected = q.fetch()
self.assertEqual(len(expected), 2)
res, curs, more = q.fetch_page(1, keys_only=True)
self.assertEqual(res, [expected[0].key])
self.assertTrue(curs is not None)
self.assertTrue(more)
res, curs, more = q.fetch_page(1, keys_only=False, start_cursor=curs)
self.assertEqual(res, [expected[1]])
self.assertTrue(curs is not None)
self.assertFalse(more)
res, curs, more = q.fetch_page(1, start_cursor=curs)
self.assertEqual(res, [])
self.assertTrue(curs is None)
self.assertFalse(more)
def testMultiQueryWithAndWithoutAncestor(self):
class Benjamin(model.Model):
name = model.StringProperty()
ben = Benjamin(name='ben', parent=self.moe.key)
ben.put()
benji = Benjamin(name='benji')
benji.put()
bq = Benjamin.query()
baq = Benjamin.query(ancestor=self.moe.key)
mq = query._MultiQuery([bq, baq])
res = list(mq)
self.assertEqual(res, [benji, ben])
def testNotEqualOperator(self):
q = query.Query(kind='Foo').filter(Foo.rate != 2)
res = list(q)
self.assertEqual(res, [self.joe, self.moe])
def testInOperator(self):
q = query.Query(kind='Foo').filter(Foo.tags.IN(('jill', 'hello')))
res = list(q)
self.assertEqual(res, [self.joe, self.jill])
def testFullDistributiveLaw(self):
q = query.Query(kind='Foo').filter(Foo.tags.IN(['jill', 'hello']))
q = q.filter(Foo.rate.IN([1, 2]))
DisjunctionNode = query.DisjunctionNode
ConjunctionNode = query.ConjunctionNode
FilterNode = query.FilterNode
expected = DisjunctionNode(
ConjunctionNode(FilterNode('tags', '=', 'jill'),
FilterNode('rate', '=', 1)),
ConjunctionNode(FilterNode('tags', '=', 'jill'),
FilterNode('rate', '=', 2)),
ConjunctionNode(FilterNode('tags', '=', 'hello'),
FilterNode('rate', '=', 1)),
ConjunctionNode(FilterNode('tags', '=', 'hello'),
FilterNode('rate', '=', 2)))
self.assertEqual(q.filters, expected)
def testHalfDistributiveLaw(self):
DisjunctionNode = query.DisjunctionNode
ConjunctionNode = query.ConjunctionNode
FilterNode = query.FilterNode
filters = ConjunctionNode(
FilterNode('tags', 'in', ['jill', 'hello']),
ConjunctionNode(FilterNode('rate', '=', 1),
FilterNode('name', '=', 'moe')))
expected = DisjunctionNode(
ConjunctionNode(FilterNode('tags', '=', 'jill'),
FilterNode('rate', '=', 1),
FilterNode('name', '=', 'moe')),
ConjunctionNode(FilterNode('tags', '=', 'hello'),
FilterNode('rate', '=', 1),
FilterNode('name', '=', 'moe')))
self.assertEqual(filters, expected)
def testGqlMinimal(self):
qry, options, bindings = query.parse_gql('SELECT * FROM Kind')
self.assertEqual(qry.kind, 'Kind')
self.assertEqual(qry.ancestor, None)
self.assertEqual(qry.filters, None)
self.assertEqual(qry.orders, None)
self.assertEqual(bindings, {})
def testGqlAncestor(self):
qry, options, bindings = query.parse_gql(
'SELECT * FROM Kind WHERE ANCESTOR IS :1')
self.assertEqual(qry.kind, 'Kind')
self.assertEqual(qry.ancestor, query.Binding(None, 1))
self.assertEqual(qry.filters, None)
self.assertEqual(qry.orders, None)
self.assertEqual(bindings, {1: query.Binding(None, 1)})
def testGqlAncestor(self):
key = model.Key('Foo', 42)
qry, options, bindings = query.parse_gql(
"SELECT * FROM Kind WHERE ANCESTOR IS KEY('%s')" % key.urlsafe())
self.assertEqual(qry.kind, 'Kind')
self.assertEqual(qry.ancestor, key)
self.assertEqual(qry.filters, None)
self.assertEqual(qry.orders, None)
self.assertEqual(bindings, {})
def testGqlFilter(self):
qry, options, bindings = query.parse_gql(
"SELECT * FROM Kind WHERE prop1 = 1 AND prop2 = 'a'")
self.assertEqual(qry.kind, 'Kind')
self.assertEqual(qry.ancestor, None)
self.assertEqual(qry.filters,
query.ConjunctionNode(
query.FilterNode('prop1', '=', 1),
query.FilterNode('prop2', '=', 'a')))
self.assertEqual(qry.orders, None)
self.assertEqual(bindings, {})
def testGqlOrder(self):
qry, options, bindings = query.parse_gql(
'SELECT * FROM Kind ORDER BY prop1')
self.assertEqual(query._orders_to_orderings(qry.orders),
[('prop1', query._ASC)])
def testGqlOffset(self):
qry, options, bindings = query.parse_gql(
'SELECT * FROM Kind OFFSET 2')
self.assertEqual(options.offset, 2)
def testGqlLimit(self):
qry, options, bindings = query.parse_gql(
'SELECT * FROM Kind LIMIT 2')
self.assertEqual(options.limit, 2)
def testGqlBindings(self):
qry, options, bindings = query.parse_gql(
'SELECT * FROM Kind WHERE prop1 = :1 AND prop2 = :foo')
self.assertEqual(qry.kind, 'Kind')
self.assertEqual(qry.ancestor, None)
self.assertEqual(qry.filters,
query.ConjunctionNode(
query.FilterNode('prop1', '=',
query.Binding(None, 1)),
query.FilterNode('prop2', '=',
query.Binding(None, 'foo'))))
self.assertEqual(qry.orders, None)
self.assertEqual(bindings, {1: query.Binding(None, 1),
'foo': query.Binding(None, 'foo')})
def testResolveBindings(self):
qry, options, bindings = query.parse_gql(
'SELECT * FROM Foo WHERE name = :1')
bindings[1].value = 'joe'
self.assertEqual(list(qry), [self.joe])
bindings[1].value = 'jill'
self.assertEqual(list(qry), [self.jill])
def testKeyFilter(self):
class MyModel(model.Model):
number = model.IntegerProperty()
k1 = model.Key('MyModel', 'foo-1')
m1 = MyModel(key=k1)
m1.put()
k2 = model.Key('MyModel', 'foo-2')
m2 = MyModel(key=k2)
m2.put()
q = MyModel.query(MyModel.key == k1)
res = q.get()
self.assertEqual(res, m1)
q = MyModel.query(MyModel.key > k1)
res = q.get()
self.assertEqual(res, m2)
q = MyModel.query(MyModel.key < k2)
res = q.get()
self.assertEqual(res, m1)
def testUnicode(self):
class MyModel(model.Model):
n = model.IntegerProperty(u'\u4321')
@classmethod
def _get_kind(cls):
return u'\u1234'.encode('utf-8')
a = MyModel(n=42)
k = a.put()
b = k.get()
self.assertEqual(a, b)
self.assertFalse(a is b)
# So far so good, now try queries
res = MyModel.query(MyModel.n == 42).fetch()
self.assertEqual(res, [a])
def testBlobQuery(self):
class MyModel(model.Model):
b = model.BlobProperty(indexed=True)
a = MyModel(b='\xff\x00')
a.put()
q = MyModel.query(MyModel.b == '\xff\x00')
it = iter(q)
b = it.next()
self.assertEqual(a, b)
def main():
unittest.main()
if __name__ == '__main__':
main()
| Python |
"""Tests for key.py."""
import base64
import pickle
import unittest
from google.appengine.api import datastore_errors
from google.appengine.datastore import entity_pb
from ndb import key
class KeyTests(unittest.TestCase):
def testShort(self):
k0 = key.Key('Kind', None)
self.assertEqual(k0.flat(), ['Kind', None])
k1 = key.Key('Kind', 1)
self.assertEqual(k1.flat(), ['Kind', 1])
k2 = key.Key('Parent', 42, 'Kind', 1)
self.assertEqual(k2.flat(), ['Parent', 42, 'Kind', 1])
def testFlat(self):
flat = ['Kind', 1]
pairs = [(flat[i], flat[i+1]) for i in xrange(0, len(flat), 2)]
k = key.Key(flat=flat)
self.assertEqual(k.pairs(), pairs)
self.assertEqual(k.flat(), flat)
self.assertEqual(k.kind(), 'Kind')
def testFlatLong(self):
flat = ['Kind', 1, 'Subkind', 'foobar']
pairs = [(flat[i], flat[i+1]) for i in xrange(0, len(flat), 2)]
k = key.Key(flat=flat)
self.assertEqual(k.pairs(), pairs)
self.assertEqual(k.flat(), flat)
self.assertEqual(k.kind(), 'Subkind')
def testSerialized(self):
flat = ['Kind', 1, 'Subkind', 'foobar']
r = entity_pb.Reference()
r.set_app('_')
e = r.mutable_path().add_element()
e.set_type(flat[0])
e.set_id(flat[1])
e = r.mutable_path().add_element()
e.set_type(flat[2])
e.set_name(flat[3])
serialized = r.Encode()
urlsafe = base64.urlsafe_b64encode(r.Encode()).rstrip('=')
k = key.Key(flat=flat)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k = key.Key(urlsafe=urlsafe)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k = key.Key(serialized=serialized)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k = key.Key(reference=r)
self.assertTrue(k._reference() is not r)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k = key.Key(reference=r, app=r.app(), namespace='')
self.assertTrue(k._reference() is not r)
self.assertEqual(k.serialized(), serialized)
self.assertEqual(k.urlsafe(), urlsafe)
self.assertEqual(k.reference(), r)
k1 = key.Key('A', 1)
self.assertEqual(k1.urlsafe(), 'agFfcgcLEgFBGAEM')
k2 = key.Key(urlsafe=k1.urlsafe())
self.assertEqual(k1, k2)
def testId(self):
k1 = key.Key('Kind', 'foo', app='app1', namespace='ns1')
self.assertEqual(k1.id(), 'foo')
k2 = key.Key('Subkind', 42, parent=k1)
self.assertEqual(k2.id(), 42)
k3 = key.Key('Subkind', 'bar', parent=k2)
self.assertEqual(k3.id(), 'bar')
# incomplete key
k4 = key.Key('Subkind', None, parent=k3)
self.assertEqual(k4.id(), None)
def testStringId(self):
k1 = key.Key('Kind', 'foo', app='app1', namespace='ns1')
self.assertEqual(k1.string_id(), 'foo')
k2 = key.Key('Subkind', 'bar', parent=k1)
self.assertEqual(k2.string_id(), 'bar')
k3 = key.Key('Subkind', 42, parent=k2)
self.assertEqual(k3.string_id(), None)
# incomplete key
k4 = key.Key('Subkind', None, parent=k3)
self.assertEqual(k4.string_id(), None)
def testIntegerId(self):
k1 = key.Key('Kind', 42, app='app1', namespace='ns1')
self.assertEqual(k1.integer_id(), 42)
k2 = key.Key('Subkind', 43, parent=k1)
self.assertEqual(k2.integer_id(), 43)
k3 = key.Key('Subkind', 'foobar', parent=k2)
self.assertEqual(k3.integer_id(), None)
# incomplete key
k4 = key.Key('Subkind', None, parent=k3)
self.assertEqual(k4.integer_id(), None)
def testParent(self):
p = key.Key('Kind', 1, app='app1', namespace='ns1')
self.assertEqual(p.parent(), None)
k = key.Key('Subkind', 'foobar', parent=p)
self.assertEqual(k.flat(), ['Kind', 1, 'Subkind', 'foobar'])
self.assertEqual(k.parent(), p)
k = key.Key('Subkind', 'foobar', parent=p,
app=p.app(), namespace=p.namespace())
self.assertEqual(k.flat(), ['Kind', 1, 'Subkind', 'foobar'])
self.assertEqual(k.parent(), p)
def testRoot(self):
p = key.Key('Kind', 1, app='app1', namespace='ns1')
self.assertEqual(p.root(), p)
k = key.Key('Subkind', 'foobar', parent=p)
self.assertEqual(k.flat(), ['Kind', 1, 'Subkind', 'foobar'])
self.assertEqual(k.root(), p)
k2 = key.Key('Subsubkind', 42, parent=k,
app=p.app(), namespace=p.namespace())
self.assertEqual(k2.flat(), ['Kind', 1,
'Subkind', 'foobar',
'Subsubkind', 42])
self.assertEqual(k2.root(), p)
def testRepr_Inferior(self):
k = key.Key('Kind', 1L, 'Subkind', 'foobar')
self.assertEqual(repr(k),
"Key('Kind', 1, 'Subkind', 'foobar')")
self.assertEqual(repr(k), str(k))
def testRepr_Toplevel(self):
k = key.Key('Kind', 1)
self.assertEqual(repr(k), "Key('Kind', 1)")
def testRepr_Incomplete(self):
k = key.Key('Kind', None)
self.assertEqual(repr(k), "Key('Kind', None)")
def testRepr_UnicodeKind(self):
k = key.Key(u'\u1234', 1)
self.assertEqual(repr(k), "Key('\\xe1\\x88\\xb4', 1)")
def testRepr_UnicodeId(self):
k = key.Key('Kind', u'\u1234')
self.assertEqual(repr(k), "Key('Kind', '\\xe1\\x88\\xb4')")
def testRepr_App(self):
k = key.Key('Kind', 1, app='foo')
self.assertEqual(repr(k), "Key('Kind', 1, app='foo')")
def testRepr_Namespace(self):
k = key.Key('Kind', 1, namespace='foo')
self.assertEqual(repr(k), "Key('Kind', 1, namespace='foo')")
def testUnicode(self):
flat_input = [u'Kind\u1234', 1, 'Subkind', u'foobar\u4321']
flat = [flat_input[0].encode('utf8'), flat_input[1],
flat_input[2], flat_input[3].encode('utf8')]
pairs = [(flat[i], flat[i+1]) for i in xrange(0, len(flat), 2)]
k = key.Key(flat=flat_input)
self.assertEqual(k.pairs(), pairs)
self.assertEqual(k.flat(), flat)
# TODO: test these more thoroughly
r = k.reference()
serialized = k.serialized()
urlsafe = k.urlsafe()
key.Key(urlsafe=urlsafe.decode('utf8'))
key.Key(serialized=serialized.decode('utf8'))
key.Key(reference=r)
# TODO: this may not make sense -- the protobuf utf8-encodes values
r = entity_pb.Reference()
r.set_app('_')
e = r.mutable_path().add_element()
e.set_type(flat_input[0])
e.set_name(flat_input[3])
k = key.Key(reference=r)
self.assertEqual(k.reference(), r)
def testHash(self):
flat = ['Kind', 1, 'Subkind', 'foobar']
pairs = [(flat[i], flat[i+1]) for i in xrange(0, len(flat), 2)]
k = key.Key(flat=flat)
self.assertEqual(hash(k), hash(tuple(pairs)))
def testPickling(self):
flat = ['Kind', 1, 'Subkind', 'foobar']
pairs = [(flat[i], flat[i+1]) for i in xrange(0, len(flat), 2)]
k = key.Key(flat=flat)
for proto in range(pickle.HIGHEST_PROTOCOL + 1):
s = pickle.dumps(k, protocol=proto)
kk = pickle.loads(s)
self.assertEqual(k, kk)
def testIncomplete(self):
k = key.Key(flat=['Kind', None])
self.assertRaises(datastore_errors.BadArgumentError,
key.Key, flat=['Kind', None, 'Subkind', 1])
self.assertRaises(AssertionError, key.Key, flat=['Kind', ()])
def testKindFromModel(self):
from ndb import model
class M(model.Model):
pass
class N(model.Model):
@classmethod
def _get_kind(cls):
return 'NN'
k = key.Key(M, 1)
self.assertEqual(k, key.Key('M', 1))
k = key.Key('X', 1, N, 2, 'Y', 3)
self.assertEqual(k, key.Key('X', 1, 'NN', 2, 'Y', 3))
def testKindFromBadValue(self):
# TODO: BadArgumentError
self.assertRaises(Exception, key.Key, 42, 42)
def main():
unittest.main()
if __name__ == '__main__':
main()
| Python |
"""Model and Property classes and associated stuff.
A model class represents the structure of entities stored in the
datastore. Applications define model classes to indicate the
structure of their entities, then instantiate those model classes
to create entities.
All model classes must inherit (directly or indirectly) from Model.
Through the magic of metaclasses, straightforward assignments in the
model class definition can be used to declare the model's structure:
class Person(Model):
name = StringProperty()
age = IntegerProperty()
We can now create a Person entity and write it to the datastore:
p = Person(name='Arthur Dent', age=42)
k = p.put()
The return value from put() is a Key (see the documentation for
ndb/key.py), which can be used to retrieve the same entity later:
p2 = k.get()
assert p2 == p
To update an entity, simple change its attributes and write it back
(note that this doesn't change the key):
p2.name = 'Arthur Philip Dent'
p2.put()
We can also delete an entity (by using the key):
k.delete()
The property definitions in the class body tell the system the names
and the types of the fields to be stored in the datastore, whether
they must be indexed, their default value, and more.
Many different Property types exist. Most are indexed by default, the
exceptions indicated in the list below:
- StringProperty: a short text string, limited to 500 bytes
- TextProperty: an unlimited text string; unindexed
- BlobProperty: an unlimited byte string; unindexed
- IntegerProperty: a 64-bit signed integer
- FloatProperty: a double precision floating point number
- BooleanProperty: a bool value
- DateTimeProperty: a datetime object. Note: App Engine always uses
UTC as the timezone
- DateProperty: a date object
- TimeProperty: a time object
- GeoPtProperty: a geographical location, i.e. (latitude, longitude)
- KeyProperty: a datastore Key value
- UserProperty: a User object (for backwards compatibility only)
- StructuredProperty: a field that is itself structured like an
entity; see below for more details
- LocalStructuredProperty: like StructuredProperty but the on-disk
representation is an opaque blob; unindexed
- ComputedProperty: a property whose value is computed from other
properties by a user-defined function. The property value is
written to the datastore so that it can be used in queries, but the
value from the datastore is not used when the entity is read back
- GenericProperty: a property whose type is not constrained; mostly
used by the Expando class (see below) but also usable explicitly
Most Property classes have similar constructor signatures. They
accept several optional keyword arguments:
- name=<string>: the name used to store the property value in the
datastore. Unlike the following options, this may also be given as
a positional argument
- indexed=<bool>: indicates whether the property should be indexed
(allowing queries on this property's value)
- repeated=<bool>: indicates that this property can have multiple
values in the same entity.
- required=<bool>: indicates that this property must be given a value
- default=<value>: a default value if no explicit value is given
- choices=<list of values>: a list or tuple of allowable values
- validator=<function>: a general-purpose validation function. It
will be called with two arguments (prop, value) and should either
return the validated value or raise an exception. It is also
allowed for the function to modify the value, but calling it again
on the modified value should not modify the value further. (For
example: a validator that returns value.strip() or value.lower() is
fine, but one that returns value + '$' is not.)
The repeated, required and default options are mutually exclusive: a
repeated property cannot be required nor can it specify a default
value (the default is always an empty list and an empty list is always
an allowed value), and a required property cannot have a default.
Some property types have additional arguments. Some property types
do not support all options.
Repeated properties are always represented as Python lists; if there
is only one value, the list has only one element. When a new list is
assigned to a repeated property, all elements of the list are
validated. Since it is also possible to mutate lists in place,
repeated properties are re-validated before they are written to the
datastore.
No validation happens when an entity is read from the datastore;
however property values read that have the wrong type (e.g. a string
value for an IntegerProperty) are ignored.
For non-repeated properties, None is always a possible value, and no
validation is called when the value is set to None. However for
required properties, writing the entity to the datastore requires
the value to be something other than None (and valid).
The StructuredProperty is different from most other properties; it
lets you define a sub-structure for your entities. The substructure
itself is defined using a model class, and the attribute value is an
instance of that model class. However it is not stored in the
datastore as a separate entity; instead, its attribute values are
included in the parent entity using a naming convention (the name of
the structured attribute followed by a dot followed by the name of the
subattribute). For example:
class Address(Model):
street = StringProperty()
city = StringProperty()
class Person(Model):
name = StringProperty()
address = StructuredProperty(Address)
p = Person(name='Harry Potter',
address=Address(street='4 Privet Drive',
city='Little Whinging'))
k.put()
This would write a single 'Person' entity with three attributes (as
you could verify using the Datastore Viewer in the Admin Console):
name = 'Harry Potter'
address.street = '4 Privet Drive'
address.city = 'Little Whinging'
Structured property types can be nested arbitrarily deep, but in a
hierarchy of nested structured property types, only one level can have
the repeated flag set. It is fine to have multiple structured
properties referencing the same model class.
It is also fine to use the same model class both as a top-level entity
class and as for a structured property; however queries for the model
class will only return the top-level entities.
The LocalStructuredProperty works similar to StructuredProperty on the
Python side. For example:
class Address(Model):
street = StringProperty()
city = StringProperty()
class Person(Model):
name = StringProperty()
address = LocalStructuredProperty(Address)
p = Person(name='Harry Potter',
address=Address(street='4 Privet Drive',
city='Little Whinging'))
k.put()
However the data written to the datastore is different; it writes a
'Person' entity with a 'name' attribute as before and a single
'address' attribute whose value is a blob which encodes the Address
value (using the standard"protocol buffer" encoding).
Sometimes the set of properties is not known ahead of time. In such
cases you can use the Expando class. This is a Model subclass that
creates properties on the fly, both upon assignment and when loading
an entity from the datastore. For example:
class SuperPerson(Expando):
name = StringProperty()
superpower = StringProperty()
razorgirl = SuperPerson(name='Molly Millions',
superpower='bionic eyes, razorblade hands',
rasta_name='Steppin\' Razor',
alt_name='Sally Shears')
elastigirl = SuperPerson(name='Helen Parr',
superpower='stretchable body')
elastigirl.max_stretch = 30 # Meters
You can inspect the properties of an expando instance using the
_properties attribute:
>>> print razorgirl._properties.keys()
['rasta_name', 'name', 'superpower', 'alt_name']
>>> print elastigirl._properties
{'max_stretch': GenericProperty('max_stretch'),
'name': StringProperty('name'),
'superpower': StringProperty('superpower')}
Note: this property exists for plain Model instances too; it is just
not as interesting for those.
The Model class offers basic query support. You can create a Query
object by calling the query() class method. Iterating over a Query
object returns the entities matching the query one at a time.
Query objects are fully described in the docstring for query.py, but
there is one handy shortcut that is only available through
Model.query(): positional arguments are interpreted as filter
expressions which are combined through an AND operator. For example:
Person.query(Person.name == 'Harry Potter', Person.age >= 11)
is equivalent to:
Person.query().filter(Person.name == 'Harry Potter', Person.age >= 11)
Keyword arguments passed to .query() are passed along to the Query()
constructor.
It is possible to query for field values of stuctured properties. For
example:
qry = Person.query(Person.address.city == 'London')
A number of top-level functions also live in this module:
- transaction() runs a function inside a transaction
- get_multi() reads multiple entities at once
- put_multi() writes multiple entities at once
- delete_multi() deletes multiple entities at once
All these have a corresponding *_async() variant as well.
The *_multi_async() functions return a list of Futures.
And finally these (without async variants):
- in_transaction() tests whether you are currently running in a transaction
- @transactional decorates functions that should be run in a transaction
"""
__author__ = 'guido@google.com (Guido van Rossum)'
# TODO: Change asserts to better exceptions.
# TODO: Add PolyModel.
import copy
import datetime
import logging
import zlib
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.datastore import datastore_query
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import entity_pb
from ndb import utils
# NOTE: Don't use "from ndb import key"; key is a common local variable name.
import ndb.key
Key = ndb.key.Key # For export.
# NOTE: Property and Error classes are added later.
__all__ = ['Key', 'ModelAdapter', 'ModelKey', 'MetaModel', 'Model', 'Expando',
'BlobKey', 'GeoPt',
'transaction', 'transaction_async',
'in_transaction', 'transactional',
'get_multi', 'get_multi_async',
'put_multi', 'put_multi_async',
'delete_multi', 'delete_multi_async',
]
BlobKey = datastore_types.BlobKey
GeoPt = datastore_types.GeoPt
class KindError(datastore_errors.BadValueError):
"""Raised when an implementation for a kind can't be found.
Also raised when the Kind is not an 8-bit string.
"""
class ComputedPropertyError(datastore_errors.Error):
"""Raised when attempting to assign a value to a computed property."""
class ModelAdapter(datastore_rpc.AbstractAdapter):
"""Conversions between 'our' Key and Model classes and protobufs.
This is needed to construct a Connection object, which in turn is
needed to construct a Context object.
See the base class docstring for more info about the signatures.
"""
def __init__(self, default_model=None):
"""Constructor.
Args:
default_model: If an implementation for the kind cannot be found, use
this model class. If none is specified, an exception will be thrown
(default).
"""
self.default_model = default_model
self.want_pbs = 0
# Make this a context manager to request setting _orig_pb.
def __enter__(self):
self.want_pbs += 1
def __exit__(self, *args):
self.want_pbs -= 1
def pb_to_key(self, pb):
return Key(reference=pb)
def key_to_pb(self, key):
return key.reference()
def pb_to_entity(self, pb):
kind = None
if pb.has_key():
# TODO: Fix the inefficiency here: we extract the key just so we
# can get the kind just so we can find the intended model class,
# but the key is extracted again and stored in the entity by _from_pb().
key = Key(reference=pb.key())
kind = key.kind()
modelclass = Model._kind_map.get(kind, self.default_model)
if modelclass is None:
raise KindError("No implementation found for kind '%s'" % kind)
entity = modelclass._from_pb(pb)
if self.want_pbs:
entity._orig_pb = pb
return entity
def entity_to_pb(self, ent):
pb = ent._to_pb()
return pb
def make_connection(config=None, default_model=None):
"""Create a new Connection object with the right adapter.
Optionally you can pass in a datastore_rpc.Configuration object.
"""
return datastore_rpc.Connection(
adapter=ModelAdapter(default_model),
config=config)
class Property(object):
"""A class describing a typed, persisted attribute of a datastore entity.
Not to be confused with Python's 'property' built-in.
This is just a base class; there are specific subclasses that
describe Properties of various types (and GenericProperty which
describes a dynamically typed Property).
All special Property attributes, even those considered 'public',
have names starting with an underscore, because StructuredProperty
uses the non-underscore attribute namespace to refer to nested
Property names; this is essential for specifying queries on
subproperties (see the module docstring).
"""
# TODO: Separate 'simple' properties from base Property class
_code_name = None
_name = None
_indexed = True
_repeated = False
_required = False
_default = None
_choices = None
_validator = None
_attributes = ['_name', '_indexed', '_repeated', '_required', '_default',
'_choices', '_validator']
_positional = 1
@datastore_rpc._positional(1 + _positional)
def __init__(self, name=None, indexed=None, repeated=None,
required=None, default=None, choices=None, validator=None):
"""Constructor. For arguments see the module docstring."""
if name is not None:
if isinstance(name, unicode):
name = name.encode('utf-8')
assert '.' not in name # The '.' is used elsewhere.
self._name = name
if indexed is not None:
self._indexed = indexed
if repeated is not None:
self._repeated = repeated
if required is not None:
self._required = required
if default is not None:
self._default = default
assert (bool(self._repeated) +
bool(self._required) +
(self._default is not None)) <= 1 # Allow at most one of these
if choices is not None:
assert isinstance(choices, (tuple, list))
self._choices = tuple(choices)
if validator is not None:
# The validator is called as follows:
# value = validator(prop, value)
# It should return the value to be used, or raise an exception.
# It should be idempotent, i.e. calling it a second time should
# not further modify the value. So a validator that returns e.g.
# value.lower() or value.strip() is fine, but one that returns
# value + '$' is not.
assert callable(validator)
self._validator = validator
def __repr__(self):
"""Return a compact unambiguous string representation."""
args = []
cls = self.__class__
for i, attr in enumerate(self._attributes):
val = getattr(self, attr)
if val is not getattr(cls, attr):
if isinstance(val, type):
s = val.__name__
else:
s = repr(val)
if i >= cls._positional:
if attr.startswith('_'):
attr = attr[1:]
s = '%s=%s' % (attr, s)
args.append(s)
s = '%s(%s)' % (self.__class__.__name__, ', '.join(args))
return s
def _datastore_type(self, value):
"""Internal hook used by property filters.
Sometimes the low-level query interface needs a specific data type
in order for the right filter to be constructed. See _comparison().
"""
return value
def _comparison(self, op, value):
"""Internal helper for comparison operators.
Args:
op: The operator ('=', '<' etc.).
Returns:
A FilterNode instance representing the requested comparison.
"""
from ndb.query import FilterNode # Import late to avoid circular imports.
if value is not None:
# TODO: Allow query.Binding instances?
value = self._validate(value)
return FilterNode(self._name, op, self._datastore_type(value))
# Comparison operators on Property instances don't compare the
# properties; instead they return FilterNode instances that can be
# used in queries. See the module docstrings above and in query.py
# for details on how these can be used.
def __eq__(self, value):
"""Return a FilterNode instance representing the '=' comparison."""
return self._comparison('=', value)
def __ne__(self, value):
"""Return a FilterNode instance representing the '!=' comparison."""
return self._comparison('!=', value)
def __lt__(self, value):
"""Return a FilterNode instance representing the '<' comparison."""
return self._comparison('<', value)
def __le__(self, value):
"""Return a FilterNode instance representing the '<=' comparison."""
return self._comparison('<=', value)
def __gt__(self, value):
"""Return a FilterNode instance representing the '>' comparison."""
return self._comparison('>', value)
def __ge__(self, value):
"""Return a FilterNode instance representing the '>=' comparison."""
return self._comparison('>=', value)
def _IN(self, value):
"""Comparison operator for the 'in' comparison operator.
The Python 'in' operator cannot be overloaded in the way we want
to, so we define a method. For example:
Employee.query(Employee.rank.IN([4, 5, 6]))
Note that the method is called ._IN() but may normally be invoked
as .IN(); ._IN() is provided for the case you have a
StructuredProperty with a model that has a Property named IN.
"""
from ndb.query import FilterNode # Import late to avoid circular imports.
if not isinstance(value, (list, tuple)):
raise datastore_errors.BadArgumentError('Expected list or tuple, got %r' %
(value,))
values = []
for val in value:
if val is not None:
val is self._validate(val)
values.append(val)
return FilterNode(self._name, 'in', values)
IN = _IN
def __neg__(self):
"""Return a descending sort order on this Property.
For example:
Employee.query().order(-Employee.rank)
"""
return datastore_query.PropertyOrder(
self._name, datastore_query.PropertyOrder.DESCENDING)
def __pos__(self):
"""Return an ascending sort order on this Property.
Note that this is redundant but provided for consistency with
__neg__. For example, the following two are equivalent:
Employee.query().order(+Employee.rank)
Employee.query().order(Employee.rank)
"""
return datastore_query.PropertyOrder(self._name)
# TODO: Explain somewhere that None is never validated.
# TODO: What if a custom validator returns None?
# TODO: What if a custom validator wants to coerce a type that the
# built-in validator for a given class does not allow?
def _validate(self, value):
"""Template method to validate and possibly modify the value.
This is intended to be overridden by Property subclasses. It
should return the value either unchanged or modified in an
idempotent way, or raise an exception to indicate that the value
is invalid. By convention the exception raised is BadValueError.
Note that for a repeated Property this function should be called
for each item in the list, not for the list as a whole.
"""
return value
def _do_validate(self, value):
"""Call all validations on the value.
This first calls self._validate(), then the custom validator
function, and finally checks the choices. It returns the value,
possibly modified in an idempotent way, or raises an exception.
Note that for a repeated Property this function should be called
for each item in the list, not for the list as a whole.
"""
value = self._validate(value)
if self._validator is not None:
value = self._validator(self, value)
if self._choices is not None:
if value not in self._choices:
raise datastore_errors.BadValueError(
'Value %r for property %s is not an allowed choice' %
(value, self._name))
return value
def _fix_up(self, code_name):
"""Internal helper called to tell the property its name.
This is called by _fix_up_properties() which is called by
MetaModel when finishing the construction of a Model subclass.
The name passed in is the name of the class attribute to which the
Property is assigned (a.k.a. the code name). Note that this means
that each Property instance must be assigned to (at most) one
class attribute. E.g. to declare three strings, you must call
StringProperty() three times, you cannot write
foo = bar = baz = StringProperty()
"""
self._code_name = code_name
if self._name is None:
self._name = code_name
def _store_value(self, entity, value):
"""Internal helper to store a value in an entity for a Property.
This assumes validation has already taken place. For a repeated
Property the value should be a list.
"""
entity._values[self._name] = value
def _set_value(self, entity, value):
"""Internal helper to set a value in an entity for a Property.
This performs validation first. For a repeated Property the value
should be a list.
"""
if self._repeated:
if not isinstance(value, (list, tuple)):
raise datastore_errors.BadValueError('Expected list or tuple, got %r' %
(value,))
values = []
for val in value:
val = self._do_validate(val)
values.append(val)
else:
if value is not None:
value = self._do_validate(value)
self._store_value(entity, value)
def _has_value(self, entity):
"""Internal helper to ask if the entity has a value for this Property."""
return self._name in entity._values
def _retrieve_value(self, entity):
"""Internal helper to retrieve the value for this Property from an entity.
This returns None if no value is set. For a repeated Property
this returns a list if a value is set, otherwise None.
"""
return entity._values.get(self._name, self._default)
def _get_value(self, entity):
"""Internal helper to get the value for this Property from an entity.
For a repeated Property this initializes the value to an empty
list if it is not set.
"""
value = self._retrieve_value(entity)
if value is None and self._repeated:
value = []
self._store_value(entity, value)
return value
def _delete_value(self, entity):
"""Internal helper to delete the value for this Property from an entity.
Note that if no value exists this is a no-op; deleted values will
not be serialized but requesting their value will return None (or
an empty list in the case of a repeated Property).
"""
if self._name in entity._values:
del entity._values[self._name]
def _is_initialized(self, entity):
"""Internal helper to ask if the entity has a value for this Property.
This returns False if a value is stored but it is None.
"""
return not self._required or (self._has_value(entity) and
self._get_value(entity) is not None)
def __get__(self, entity, cls=None):
"""Descriptor protocol: get the value from the entity."""
if entity is None:
return self # __get__ called on class
return self._get_value(entity)
def __set__(self, entity, value):
"""Descriptor protocol: set the value on the entity."""
self._set_value(entity, value)
def __delete__(self, entity):
"""Descriptor protocol: delete the value from the entity."""
self._delete_value(entity)
def _serialize(self, entity, pb, prefix='', parent_repeated=False):
"""Internal helper to serialize this property to a protocol buffer.
Subclasses may override this method.
Args:
entity: The entity, a Model (subclass) instance.
pb: The protocol buffer, an EntityProto instance.
prefix: Optional name prefix used for StructuredProperty
(if present, must end in '.').
parent_repeated: True if the parent (or an earlier ancestor)
is a repeated Property.
"""
value = self._retrieve_value(entity)
if not self._repeated:
value = [value]
elif value is None:
value = []
assert isinstance(value, list)
for val in value:
if self._repeated:
# Re-validate repeated values, since the user could have
# appended values to the list, bypassing validation.
val = self._do_validate(val)
if self._indexed:
p = pb.add_property()
else:
p = pb.add_raw_property()
p.set_name(prefix + self._name)
p.set_multiple(self._repeated or parent_repeated)
v = p.mutable_value()
if val is not None:
self._db_set_value(v, p, val)
def _deserialize(self, entity, p, depth=1):
"""Internal helper to deserialize this property from a protocol buffer.
Subclasses may override this method.
Args:
entity: The entity, a Model (subclass) instance.
p: A Property Message object (a protocol buffer).
depth: Optional nesting depth, default 1 (unused here, but used
by some subclasses that override this method).
"""
v = p.value()
val = self._db_get_value(v, p)
if self._repeated:
if self._has_value(entity):
value = self._retrieve_value(entity)
if not isinstance(value, list):
value = [value]
value.append(val)
else:
value = [val]
else:
if not self._has_value(entity):
value = val
else:
oldval = self._retrieve_value(entity)
# Maybe upgrade to a list property. Or ignore null.
if val is None:
value = oldval
elif oldval is None:
value = val
elif isinstance(oldval, list):
oldval.append(val)
value = oldval
else:
value = [oldval, val]
self._store_value(entity, value)
def _validate_key(value, entity=None):
if not isinstance(value, Key):
# TODO: BadKeyError.
raise datastore_errors.BadValueError('Expected Key, got %r' % value)
if entity and entity.__class__ not in (Model, Expando):
if value.kind() != entity._get_kind():
raise KindError('Expected Key kind to be %s; received %s' %
(entity._get_kind(), value.kind()))
return value
class ModelKey(Property):
"""Special property to store the Model key."""
def __init__(self):
self._name = '__key__'
def _datastore_type(self, value):
return datastore_types.Key(value.urlsafe())
def _comparison(self, op, value):
if value is not None:
return super(ModelKey, self)._comparison(op, value)
raise datastore_errors.BadValueError(
"__key__ filter query can't be compared to None")
# TODO: Support IN().
def _validate(self, value):
return _validate_key(value)
def _set_value(self, entity, value):
"""Setter for key attribute."""
if value is not None:
value = _validate_key(value, entity=entity)
entity._key = value
def _get_value(self, entity):
"""Getter for key attribute."""
return entity._key
def _delete_value(self, entity):
"""Deleter for key attribute."""
entity._key = None
class BooleanProperty(Property):
"""A Property whose value is a Python bool."""
# TODO: Allow int/long values equal to 0 or 1?
def _validate(self, value):
if not isinstance(value, bool):
raise datastore_errors.BadValueError('Expected bool, got %r' %
(value,))
return value
def _db_set_value(self, v, p, value):
assert isinstance(value, bool), (self._name)
v.set_booleanvalue(value)
def _db_get_value(self, v, p):
if not v.has_booleanvalue():
return None
# The booleanvalue field is an int32, so booleanvalue() returns an
# int, hence the conversion.
return bool(v.booleanvalue())
class IntegerProperty(Property):
"""A Property whose value is a Python int or long (or bool)."""
def _validate(self, value):
if not isinstance(value, (int, long)):
raise datastore_errors.BadValueError('Expected integer, got %r' %
(value,))
return int(value)
def _db_set_value(self, v, p, value):
assert isinstance(value, (bool, int, long)), (self._name)
v.set_int64value(value)
def _db_get_value(self, v, p):
if not v.has_int64value():
return None
return int(v.int64value())
class FloatProperty(Property):
"""A Property whose value is a Python float.
Note: int, long and bool are also allowed.
"""
def _validate(self, value):
if not isinstance(value, (int, long, float)):
raise datastore_errors.BadValueError('Expected float, got %r' %
(value,))
return float(value)
def _db_set_value(self, v, p, value):
assert isinstance(value, (bool, int, long, float)), (self._name)
v.set_doublevalue(float(value))
def _db_get_value(self, v, p):
if not v.has_doublevalue():
return None
return v.doublevalue()
class StringProperty(Property):
"""A Property whose value is a text string."""
# TODO: Enforce size limit when indexed.
def _validate(self, value):
if not isinstance(value, basestring):
raise datastore_errors.BadValueError('Expected string, got %r' %
(value,))
# TODO: Always convert to Unicode? But what if it's unconvertible?
return value
def _db_set_value(self, v, p, value):
assert isinstance(value, basestring)
if isinstance(value, unicode):
value = value.encode('utf-8')
v.set_stringvalue(value)
if not self._indexed:
p.set_meaning(entity_pb.Property.TEXT)
def _db_get_value(self, v, p):
if not v.has_stringvalue():
return None
raw = v.stringvalue()
try:
value = raw.decode('utf-8')
return value
except UnicodeDecodeError:
return raw
class TextProperty(StringProperty):
"""An unindexed Property whose value is a text string of unlimited length."""
# TODO: Maybe just use StringProperty(indexed=False)?
_indexed = False
def __init__(self, *args, **kwds):
super(TextProperty, self).__init__(*args, **kwds)
assert not self._indexed
class BlobProperty(Property):
"""A Property whose value is a byte string."""
# TODO: Enforce size limit when indexed.
_indexed = False
def _validate(self, value):
if not isinstance(value, str):
raise datastore_errors.BadValueError('Expected 8-bit string, got %r' %
(value,))
return value
def _datastore_type(self, value):
return datastore_types.Blob(value)
def _db_set_value(self, v, p, value):
assert isinstance(value, str)
v.set_stringvalue(value)
if self._indexed:
p.set_meaning(entity_pb.Property.BYTESTRING)
else:
p.set_meaning(entity_pb.Property.BLOB)
def _db_get_value(self, v, p):
if not v.has_stringvalue():
return None
return v.stringvalue()
class GeoPtProperty(Property):
"""A Property whose value is a GeoPt."""
def _validate(self, value):
if not isinstance(value, GeoPt):
raise datastore_errors.BadValueError('Expected GeoPt, got %r' %
(value,))
return value
def _db_set_value(self, v, p, value):
assert isinstance(value, GeoPt), (self._name)
pv = v.mutable_pointvalue()
pv.set_x(value.lat)
pv.set_y(value.lon)
def _db_get_value(self, v, p):
if not v.has_pointvalue():
return None
pv = v.pointvalue()
return GeoPt(pv.x(), pv.y())
def _unpack_user(v):
"""Internal helper to unpack a User value from a protocol buffer."""
uv = v.uservalue()
email = unicode(uv.email().decode('utf-8'))
auth_domain = unicode(uv.auth_domain().decode('utf-8'))
obfuscated_gaiaid = uv.obfuscated_gaiaid().decode('utf-8')
obfuscated_gaiaid = unicode(obfuscated_gaiaid)
federated_identity = None
if uv.has_federated_identity():
federated_identity = unicode(
uv.federated_identity().decode('utf-8'))
value = users.User(email=email,
_auth_domain=auth_domain,
_user_id=obfuscated_gaiaid,
federated_identity=federated_identity)
return value
class UserProperty(Property):
"""A Property whose value is a User object.
Note: this exists for backwards compatibility with existing
datastore schemas only; we do not recommend storing User objects
directly in the datastore, but instead recommend storing the
user.user_id() value.
"""
def _validate(self, value):
if not isinstance(value, users.User):
raise datastore_errors.BadValueError('Expected User, got %r' %
(value,))
return value
def _db_set_value(self, v, p, value):
datastore_types.PackUser(p.name(), value, v)
def _db_get_value(self, v, p):
return _unpack_user(v)
class KeyProperty(Property):
"""A Property whose value is a Key object."""
# TODO: optionally check the kind (or maybe require this?)
def _datastore_type(self, value):
return datastore_types.Key(value.urlsafe())
def _validate(self, value):
if not isinstance(value, Key):
raise datastore_errors.BadValueError('Expected Key, got %r' % (value,))
# Reject incomplete keys.
if not value.id():
raise datastore_errors.BadValueError('Expected complete Key, got %r' %
(value,))
return value
def _db_set_value(self, v, p, value):
assert isinstance(value, Key)
# See datastore_types.PackKey
ref = value._reference() # Don't copy
rv = v.mutable_referencevalue() # A Reference
rv.set_app(ref.app())
if ref.has_name_space():
rv.set_name_space(ref.name_space())
for elem in ref.path().element_list():
rv.add_pathelement().CopyFrom(elem)
def _db_get_value(self, v, p):
if not v.has_referencevalue():
return None
ref = entity_pb.Reference()
rv = v.referencevalue()
if rv.has_app():
ref.set_app(rv.app())
if rv.has_name_space():
ref.set_name_space(rv.name_space())
path = ref.mutable_path()
for elem in rv.pathelement_list():
path.add_element().CopyFrom(elem)
return Key(reference=ref)
class BlobKeyProperty(Property):
"""A Property whose value is a BlobKey object."""
def _validate(self, value):
if not isinstance(value, datastore_types.BlobKey):
raise datastore_errors.BadValueError('Expected BlobKey, got %r' %
(value,))
return value
def _db_set_value(self, v, p, value):
assert isinstance(value, datastore_types.BlobKey)
p.set_meaning(entity_pb.Property.BLOBKEY)
v.set_stringvalue(str(value))
def _db_get_value(self, v, p):
if not v.has_stringvalue():
return None
return datastore_types.BlobKey(v.stringvalue())
# The Epoch (a zero POSIX timestamp).
_EPOCH = datetime.datetime.utcfromtimestamp(0)
class DateTimeProperty(Property):
"""A Property whose value is a datetime object.
Note: Unlike Django, auto_now_add can be overridden by setting the
value before writing the entity. And unlike classic db, auto_now
does not supply a default value. Also unlike classic db, when the
entity is written, the property values are updated to match what
was written. Finally, beware that this also updates the value in
the in-process cache, *and* that auto_now_add may interact weirdly
with transaction retries (a retry of a property with auto_now_add
set will reuse the value that was set on the first try).
"""
_attributes = Property._attributes + ['_auto_now', '_auto_now_add']
@datastore_rpc._positional(1 + Property._positional)
def __init__(self, name=None, auto_now=False, auto_now_add=False, **kwds):
super(DateTimeProperty, self).__init__(name=name, **kwds)
if self._repeated:
assert not auto_now
assert not auto_now_add
self._auto_now = auto_now
self._auto_now_add = auto_now_add
def _validate(self, value):
if not isinstance(value, datetime.datetime):
raise datastore_errors.BadValueError('Expected datetime, got %r' %
(value,))
return value
def _now(self):
return datetime.datetime.now()
def _serialize(self, entity, *rest):
if (self._auto_now or
(self._auto_now_add and self._retrieve_value(entity) is None)):
value = self._now()
self._store_value(entity, value)
super(DateTimeProperty, self)._serialize(entity, *rest)
def _db_set_value(self, v, p, value):
assert isinstance(value, datetime.datetime)
assert value.tzinfo is None
dt = value - _EPOCH
ival = dt.microseconds + 1000000 * (dt.seconds + 24*3600 * dt.days)
v.set_int64value(ival)
p.set_meaning(entity_pb.Property.GD_WHEN)
def _db_get_value(self, v, p):
if not v.has_int64value():
return None
ival = v.int64value()
return _EPOCH + datetime.timedelta(microseconds=ival)
def _date_to_datetime(value):
"""Convert a date to a datetime for datastore storage.
Args:
value: A datetime.date object.
Returns:
A datetime object with time set to 0:00.
"""
assert isinstance(value, datetime.date)
return datetime.datetime(value.year, value.month, value.day)
def _time_to_datetime(value):
"""Convert a time to a datetime for datastore storage.
Args:
value: A datetime.time object.
Returns:
A datetime object with date set to 1970-01-01.
"""
assert isinstance(value, datetime.time)
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second,
value.microsecond)
class DateProperty(DateTimeProperty):
"""A Property whose value is a date object."""
def _datastore_type(self, value):
return _date_to_datetime(value)
def _validate(self, value):
if (not isinstance(value, datetime.date) or
isinstance(value, datetime.datetime)):
raise datastore_errors.BadValueError('Expected date, got %r' %
(value,))
return value
def _now(self):
return datetime.date.today()
def _db_set_value(self, v, p, value):
value = _date_to_datetime(value)
super(DateProperty, self)._db_set_value(v, p, value)
def _db_get_value(self, v, p):
value = super(DateProperty, self)._db_get_value(v, p)
return value.date()
class TimeProperty(DateTimeProperty):
"""A Property whose value is a time object."""
def _datastore_type(self, value):
return _time_to_datetime(value)
def _validate(self, value):
if not isinstance(value, datetime.time):
raise datastore_errors.BadValueError('Expected time, got %r' %
(value,))
return value
def _now(self):
return datetime.datetime.now().time()
def _db_set_value(self, v, p, value):
value = _time_to_datetime(value)
super(TimeProperty, self)._db_set_value(v, p, value)
def _db_get_value(self, v, p):
value = super(TimeProperty, self)._db_get_value(v, p)
return value.time()
class StructuredProperty(Property):
"""A Property whose value is itself an entity.
The values of the sub-entity are indexed and can be queried.
See the module docstring for details.
"""
_modelclass = None
_attributes = ['_modelclass'] + Property._attributes
_positional = 2
@datastore_rpc._positional(1 + _positional)
def __init__(self, modelclass, name=None, **kwds):
super(StructuredProperty, self).__init__(name=name, **kwds)
if self._repeated:
assert not modelclass._has_repeated
self._modelclass = modelclass
def _fix_up(self, code_name):
super(StructuredProperty, self)._fix_up(code_name)
self._fix_up_nested_properties()
def _fix_up_nested_properties(self):
for name, prop in self._modelclass._properties.iteritems():
prop_copy = copy.copy(prop)
prop_copy._name = self._name + '.' + prop._name
if isinstance(prop_copy, StructuredProperty):
# Guard against simple recursive model definitions.
# See model_test: testRecursiveStructuredProperty().
# TODO: Guard against indirect recursion.
if prop_copy._modelclass is not self._modelclass:
prop_copy._fix_up_nested_properties()
setattr(self, prop._code_name, prop_copy)
def _comparison(self, op, value):
if op != '=':
# TODO: 'in' might actually work. But maybe it's been expanded
# already before we get here?
raise datastore_errors.BadFilterError(
'StructuredProperty filter can only use ==')
# Import late to avoid circular imports.
from ndb.query import FilterNode, ConjunctionNode, PostFilterNode
from ndb.query import RepeatedStructuredPropertyPredicate
value = self._validate(value) # None is not allowed!
filters = []
match_keys = []
# TODO: Why not just iterate over value._values?
for name, prop in value._properties.iteritems():
val = prop._retrieve_value(value)
if val is not None:
name = self._name + '.' + name
filters.append(FilterNode(name, op, val))
match_keys.append(name)
if not filters:
raise datastore_errors.BadFilterError(
'StructuredProperty filter without any values')
if len(filters) == 1:
return filters[0]
if self._repeated:
pb = value._to_pb(allow_partial=True)
pred = RepeatedStructuredPropertyPredicate(match_keys, pb,
self._name + '.')
filters.append(PostFilterNode(pred))
return ConjunctionNode(*filters)
def _validate(self, value):
if not isinstance(value, self._modelclass):
raise datastore_errors.BadValueError('Expected %s instance, got %r' %
(self._modelclass.__name__, value))
return value
def _serialize(self, entity, pb, prefix='', parent_repeated=False):
# entity -> pb; pb is an EntityProto message
value = self._retrieve_value(entity)
if value is None:
# TODO: Is this the right thing for queries?
# Skip structured values that are None.
return
cls = self._modelclass
if self._repeated:
assert isinstance(value, list)
values = value
else:
assert isinstance(value, cls)
values = [value]
for value in values:
# TODO: Avoid re-sorting for repeated values.
for name, prop in sorted(value._properties.iteritems()):
prop._serialize(value, pb, prefix + self._name + '.',
self._repeated or parent_repeated)
def _deserialize(self, entity, p, depth=1):
if not self._repeated:
subentity = self._retrieve_value(entity)
if subentity is None:
subentity = self._modelclass()
self._store_value(entity, subentity)
assert isinstance(subentity, self._modelclass)
prop = subentity._get_property_for(p, depth=depth)
prop._deserialize(subentity, p, depth + 1)
return
# The repeated case is more complicated.
# TODO: Prove we won't get here for orphans.
name = p.name()
parts = name.split('.')
assert len(parts) > depth, (depth, name, parts)
next = parts[depth]
prop = self._modelclass._properties.get(next)
assert prop is not None # QED
values = self._retrieve_value(entity)
if values is None:
values = []
elif not isinstance(values, list):
values = [values]
self._store_value(entity, values)
# Find the first subentity that doesn't have a value for this
# property yet.
for sub in values:
assert isinstance(sub, self._modelclass)
if not prop._has_value(sub):
subentity = sub
break
else:
subentity = self._modelclass()
values.append(subentity)
prop._deserialize(subentity, p, depth + 1)
# A custom 'meaning' for compressed blobs.
_MEANING_URI_COMPRESSED = 'ZLIB'
class LocalStructuredProperty(Property):
"""Substructure that is serialized to an opaque blob.
This looks like StructuredProperty on the Python side, but is
written to the datastore as a single opaque blob. It is not indexed
and you cannot query for subproperties. On the other hand, the
on-disk representation is more efficient and can be made even more
efficient by passing compressed=True, which compresses the blob
data using gzip.
"""
_indexed = False
_compressed = False
_modelclass = None
_attributes = ['_modelclass'] + Property._attributes + ['_compressed']
_positional = 2
@datastore_rpc._positional(1 + _positional)
def __init__(self, modelclass, name=None, compressed=False, **kwds):
super(LocalStructuredProperty, self).__init__(name=name, **kwds)
assert not self._indexed
self._modelclass = modelclass
self._compressed = compressed
def _validate(self, value):
# This is kind of a hack. Allow tuples because if the property comes from
# datastore *and* is unchanged *and* the property has repeated=True,
# _serialize() will call _do_validate() while the value is still a tuple.
if not isinstance(value, (self._modelclass, tuple)):
raise datastore_errors.BadValueError('Expected %s instance, got %r' %
(self._modelclass.__name__, value))
return value
def _db_set_value(self, v, p, value):
"""Serializes the value to an entity_pb.
The value stored in entity._values[self._name] can be either:
- A tuple (serialized: bytes, compressed: bool), when the value comes
from datastore. This is the serialized model and a flag indicating if it
is compressed, used to lazily decompress and deserialize the property
when it is first accessed.
- An instance of self._modelclass, when the property value is set, or
after it is lazily decompressed and deserialized on first access.
"""
if isinstance(value, tuple):
# Value didn't change and is still serialized, so we store it as it is.
serialized, compressed = value
assert compressed == self._compressed
else:
pb = value._to_pb()
serialized = pb.Encode()
compressed = self._compressed
if compressed:
p.set_meaning_uri(_MEANING_URI_COMPRESSED)
serialized = zlib.compress(serialized)
if compressed:
# Use meaning_uri because setting meaning to something else that is not
# BLOB or BYTESTRING will cause the value to be decoded from utf-8
# in datastore_types.FromPropertyPb. This breaks the compressed string.
p.set_meaning_uri(_MEANING_URI_COMPRESSED)
p.set_meaning(entity_pb.Property.BLOB)
v.set_stringvalue(serialized)
def _db_get_value(self, v, p):
if not v.has_stringvalue():
return None
# Return a tuple (serialized, bool) to be lazily processed later.
return v.stringvalue(), p.meaning_uri() == _MEANING_URI_COMPRESSED
def _decompress_unserialize_value(self, value):
serialized, compressed = value
if compressed:
serialized = zlib.decompress(serialized)
pb = entity_pb.EntityProto(serialized)
return self._modelclass._from_pb(pb, set_key=False)
def _get_value(self, entity):
value = super(LocalStructuredProperty, self)._get_value(entity)
if self._repeated:
if value and isinstance(value[0], tuple):
# Decompresses and deserializes each list item.
# Reuse the original list, cleaning it first.
values = list(value)
del value[:]
for v in values:
value.append(self._decompress_unserialize_value(v))
elif isinstance(value, tuple):
# Decompresses and deserializes a single item.
value = self._decompress_unserialize_value(value)
self._store_value(entity, value)
return value
class GenericProperty(Property):
"""A Property whose value can be (almost) any basic type.
This is mainly used for Expando and for orphans (values present in
the datastore but not represented in the Model subclass) but can
also be used explicitly for properties with dynamically-typed
values.
"""
def _db_get_value(self, v, p):
# This is awkward but there seems to be no faster way to inspect
# what union member is present. datastore_types.FromPropertyPb(),
# the undisputed authority, has the same series of if-elif blocks.
# (We don't even want to think about multiple members... :-)
if v.has_stringvalue():
sval = v.stringvalue()
if p.meaning() not in (entity_pb.Property.BLOB,
entity_pb.Property.BYTESTRING):
try:
sval.decode('ascii')
# If this passes, don't return unicode.
except UnicodeDecodeError:
try:
sval = unicode(sval.decode('utf-8'))
except UnicodeDecodeError:
pass
return sval
elif v.has_int64value():
ival = v.int64value()
if p.meaning() == entity_pb.Property.GD_WHEN:
return _EPOCH + datetime.timedelta(microseconds=ival)
return ival
elif v.has_booleanvalue():
# The booleanvalue field is an int32, so booleanvalue() returns
# an int, hence the conversion.
return bool(v.booleanvalue())
elif v.has_doublevalue():
return v.doublevalue()
elif v.has_referencevalue():
rv = v.referencevalue()
app = rv.app()
namespace = rv.name_space()
pairs = [(elem.type(), elem.id() or elem.name())
for elem in rv.pathelement_list()]
return Key(pairs=pairs, app=app, namespace=namespace)
elif v.has_pointvalue():
pv = v.pointvalue()
return GeoPt(pv.x(), pv.y())
elif v.has_uservalue():
return _unpack_user(v)
else:
# A missing value implies null.
return None
def _db_set_value(self, v, p, value):
# TODO: use a dict mapping types to functions
if isinstance(value, str):
v.set_stringvalue(value)
# TODO: Set meaning to BLOB or BYTESTRING if it's not UTF-8?
# (Or TEXT if unindexed.)
elif isinstance(value, unicode):
v.set_stringvalue(value.encode('utf8'))
if not self._indexed:
p.set_meaning(entity_pb.Property.TEXT)
elif isinstance(value, bool): # Must test before int!
v.set_booleanvalue(value)
elif isinstance(value, (int, long)):
assert -2**63 <= value < 2**63
v.set_int64value(value)
elif isinstance(value, float):
v.set_doublevalue(value)
elif isinstance(value, Key):
# See datastore_types.PackKey
ref = value._reference() # Don't copy
rv = v.mutable_referencevalue() # A Reference
rv.set_app(ref.app())
if ref.has_name_space():
rv.set_name_space(ref.name_space())
for elem in ref.path().element_list():
rv.add_pathelement().CopyFrom(elem)
elif isinstance(value, datetime.datetime):
assert value.tzinfo is None
dt = value - _EPOCH
ival = dt.microseconds + 1000000 * (dt.seconds + 24*3600 * dt.days)
v.set_int64value(ival)
p.set_meaning(entity_pb.Property.GD_WHEN)
elif isinstance(value, GeoPt):
pv = v.mutable_pointvalue()
pv.set_x(value.lat)
pv.set_y(value.lon)
elif isinstance(value, users.User):
datastore_types.PackUser(p.name(), value, v)
else:
# TODO: BlobKey.
assert False, type(value)
class ComputedProperty(GenericProperty):
"""A Property whose value is determined by a user-supplied function.
Computed properties cannot be set directly, but are instead generated by a
function when required. They are useful to provide fields in the datastore
that can be used for filtering or sorting without having to manually set the
value in code - for example, sorting on the length of a BlobProperty, or
using an equality filter to check if another field is not empty.
ComputedProperty can be declared as a regular property, passing a function as
the first argument, or it can be used as a decorator for the function that
does the calculation.
Example:
>>> class DatastoreFile(Model):
... name = StringProperty()
... name_lower = ComputedProperty(lambda self: self.name.lower())
...
... data = BlobProperty()
...
... @ComputedProperty
... def size(self):
... return len(self.data)
...
... def _compute_hash(self):
... return hashlib.sha1(self.data).hexdigest()
... hash = ComputedProperty(_compute_hash, name='sha1')
"""
def __init__(self, func, *args, **kwargs):
"""Constructor.
Args:
func: A function that takes one argument, the model instance, and returns
a calculated value.
"""
super(ComputedProperty, self).__init__(*args, **kwargs)
assert not self._required, 'ComputedProperty cannot be required'
assert not self._repeated, 'ComputedProperty cannot be repeated'
assert self._default is None, 'ComputedProperty cannot have a default'
self._func = func
def _has_value(self, entity):
return True
def _store_value(self, entity, value):
raise ComputedPropertyError("Cannot assign to a ComputedProperty")
def _delete_value(self, entity):
raise ComputedPropertyError("Cannot delete a ComputedProperty")
def _retrieve_value(self, entity):
return self._func(entity)
def _deserialize(self, entity, p, depth=1):
pass
class MetaModel(type):
"""Metaclass for Model.
This exists to fix up the properties -- they need to know their name.
This is accomplished by calling the class's _fix_properties() method.
"""
def __init__(cls, name, bases, classdict):
super(MetaModel, cls).__init__(name, bases, classdict)
cls._fix_up_properties()
class Model(object):
"""A class describing datastore entities.
Model instances are usually called entities. All model classes
inheriting from Model automatically have MetaModel as their
metaclass, so that the properties are fixed up properly after the
class once the class is defined.
Because of this, you cannot use the same Property object to describe
multiple properties -- you must create separate Property objects for
each property. E.g. this does not work:
wrong_prop = StringProperty()
class Wrong(Model):
wrong1 = wrong_prop
wrong2 = wrong_prop
The kind is normally equal to the class name (exclusive of the
module name or any other parent scope). To override the kind,
define a class method named _get_kind(), as follows:
class MyModel(Model):
@classmethod
def _get_kind(cls):
return 'AnotherKind'
"""
__metaclass__ = MetaModel
# Class variables updated by _fix_up_properties()
_properties = None
_has_repeated = False
_kind_map = {} # Dict mapping {kind: Model subclass}
# Defaults for instance variables.
_key = None
_values = None
# Hardcoded pseudo-property for the key.
key = ModelKey()
@datastore_rpc._positional(1)
def __init__(self, key=None, id=None, parent=None, **kwds):
"""Creates a new instance of this model (a.k.a. as an entity).
The new entity must be written to the datastore using an explicit
call to .put().
Args:
key: Key instance for this model. If key is used, id and parent must
be None.
id: Key id for this model. If id is used, key must be None.
parent: Key instance for the parent model or None for a top-level one.
If parent is used, key must be None.
**kwds: Keyword arguments mapping to properties of this model.
Note: you cannot define a property named key; the .key attribute
always refers to the entity's key. But you can define properties
named id or parent. Values for the latter cannot be passed
through the constructor, but can be assigned to entity attributes
after the entity has been created.
"""
if key is not None:
if id is not None:
raise datastore_errors.BadArgumentError(
'Model constructor accepts key or id, not both.')
if parent is not None:
raise datastore_errors.BadArgumentError(
'Model constructor accepts key or parent, not both.')
self._key = _validate_key(key, entity=self)
elif id is not None or parent is not None:
# When parent is set but id is not, we have an incomplete key.
# Key construction will fail with invalid ids or parents, so no check
# is needed.
# TODO: should this be restricted to string ids?
self._key = Key(self._get_kind(), id, parent=parent)
self._values = {}
self._set_attributes(kwds)
def _populate(self, **kwds):
"""Populate an instance from keyword arguments.
Each keyword argument will be used to set a corresponding
property. Keywords must refer to valid property name. This is
similar to passing keyword arguments to the Model constructor,
except that no provisions for key, id or parent are made.
"""
self._set_attributes(kwds)
populate = _populate
def _set_attributes(self, kwds):
"""Internal helper to set attributes from keyword arguments.
Expando overrides this.
"""
cls = self.__class__
for name, value in kwds.iteritems():
prop = getattr(cls, name) # Raises AttributeError for unknown properties.
assert isinstance(prop, Property)
prop._set_value(self, value)
def _find_uninitialized(self):
"""Internal helper to find uninitialized properties.
Returns:
A set of property names.
"""
return set(name
for name, prop in self._properties.iteritems()
if not prop._is_initialized(self))
def _check_initialized(self):
"""Internal helper to check for uninitialized properties.
Raises:
BadValueError if it finds any.
"""
baddies = self._find_uninitialized()
if baddies:
raise datastore_errors.BadValueError(
'Entity has uninitialized properties: %s' % ', '.join(baddies))
def __repr__(self):
"""Return an unambiguous string representation of an entity."""
args = []
done = set()
for prop in self._properties.itervalues():
if prop._has_value(self):
args.append('%s=%r' % (prop._code_name, prop._retrieve_value(self)))
done.add(prop._name)
args.sort()
if self._key is not None:
args.insert(0, 'key=%r' % self._key)
s = '%s(%s)' % (self.__class__.__name__, ', '.join(args))
return s
@classmethod
def _get_kind(cls):
"""Return the kind name for this class.
This defaults to cls.__name__; users may overrid this to give a
class a different on-disk name than its class name.
"""
return cls.__name__
@classmethod
def _get_kind_map(cls):
"""Internal helper to return the kind map."""
return cls._kind_map
@classmethod
def _reset_kind_map(cls):
"""Clear the kind map. Useful for testing."""
cls._kind_map.clear()
def _has_complete_key(self):
"""Return whether this entity has a complete key."""
return self._key is not None and self._key.id() is not None
def __hash__(self):
"""Dummy hash function.
Raises:
Always TypeError to emphasize that entities are mutable.
"""
raise TypeError('Model is not immutable')
def __eq__(self, other):
"""Compare two entities of the same class for equality."""
if other.__class__ is not self.__class__:
return NotImplemented
# It's okay to use private names -- we're the same class
if self._key != other._key:
# TODO: If one key is None and the other is an explicit
# incomplete key of the simplest form, this should be OK.
return False
return self._equivalent(other)
def _equivalent(self, other):
"""Compare two entities of the same class, excluding keys."""
assert other.__class__ is self.__class__ # TODO: What about subclasses?
# It's all about determining inequality early.
if len(self._properties) != len(other._properties):
return False # Can only happen for Expandos.
my_prop_names = set(self._properties.iterkeys())
their_prop_names = set(other._properties.iterkeys())
if my_prop_names != their_prop_names:
return False # Again, only possible for Expandos.
for name in my_prop_names:
my_value = self._properties[name]._get_value(self)
their_value = other._properties[name]._get_value(other)
if my_value != their_value:
return False
return True
def __ne__(self, other):
"""Implement self != other as not(self == other)."""
eq = self.__eq__(other)
if eq is NotImplemented:
return NotImplemented
return not eq
def _to_pb(self, pb=None, allow_partial=False):
"""Internal helper to turn an entity into an EntityProto protobuf."""
if not allow_partial:
self._check_initialized()
if pb is None:
pb = entity_pb.EntityProto()
# TODO: Move the key stuff into ModelAdapter.entity_to_pb()?
key = self._key
if key is None:
pairs = [(self._get_kind(), None)]
ref = ndb.key._ReferenceFromPairs(pairs, reference=pb.mutable_key())
else:
ref = key._reference() # Don't copy
pb.mutable_key().CopyFrom(ref)
group = pb.mutable_entity_group() # Must initialize this.
# To work around an SDK issue, only set the entity group if the
# full key is complete. TODO: Remove the top test once fixed.
if key is not None and key.id():
elem = ref.path().element(0)
if elem.id() or elem.name():
group.add_element().CopyFrom(elem)
for name, prop in sorted(self._properties.iteritems()):
prop._serialize(self, pb)
return pb
@classmethod
def _from_pb(cls, pb, set_key=True):
"""Internal helper to create an entity from an EntityProto protobuf."""
assert isinstance(pb, entity_pb.EntityProto)
ent = cls()
# TODO: Move the key stuff into ModelAdapter.pb_to_entity()?
if set_key and pb.has_key():
ent._key = Key(reference=pb.key())
indexed_properties = pb.property_list()
unindexed_properties = pb.raw_property_list()
for plist in [indexed_properties, unindexed_properties]:
for p in plist:
prop = ent._get_property_for(p, plist is indexed_properties)
prop._deserialize(ent, p)
return ent
def _get_property_for(self, p, indexed=True, depth=0):
"""Internal helper to get the Property for a protobuf-level property."""
name = p.name()
parts = name.split('.')
assert len(parts) > depth, (p.name(), parts, depth)
next = parts[depth]
prop = self._properties.get(next)
if prop is None:
prop = self._fake_property(p, next, indexed)
return prop
def _clone_properties(self):
"""Internal helper to clone self._properties if necessary."""
cls = self.__class__
if self._properties is cls._properties:
self._properties = dict(cls._properties)
def _fake_property(self, p, next, indexed=True):
"""Internal helper to create a fake Property."""
self._clone_properties()
if p.name() != next and not p.name().endswith('.' + next):
prop = StructuredProperty(Expando, next)
self._values[prop._name] = Expando()
else:
prop = GenericProperty(next,
repeated=p.multiple(),
indexed=indexed)
self._properties[prop._name] = prop
return prop
@classmethod
def _fix_up_properties(cls):
"""Fix up the properties by calling their _fix_up() method.
Note: This is called by MetaModel, but may also be called manually
after dynamically updating a model class.
"""
# Verify that _get_kind() returns an 8-bit string.
kind = cls._get_kind()
if not isinstance(kind, basestring):
raise KindError('Class %s defines a _get_kind() method that returns '
'a non-string (%r)' % (cls.__name__, kind))
if not isinstance(kind, str):
try:
kind = kind.encode('ascii') # ASCII contents is okay.
except UnicodeEncodeError:
raise KindError('Class %s defines a _get_kind() method that returns '
'a Unicode string (%r); please encode using utf-8' %
(cls.__name__, kind))
cls._properties = {} # Map of {name: Property}
if cls.__module__ == __name__: # Skip the classes in *this* file.
return
for name in set(dir(cls)):
prop = getattr(cls, name, None)
if isinstance(prop, ModelKey):
continue
if isinstance(prop, Property):
assert not name.startswith('_')
# TODO: Tell prop the class, for error message.
prop._fix_up(name)
if prop._repeated:
cls._has_repeated = True
cls._properties[prop._name] = prop
cls._kind_map[cls._get_kind()] = cls
# Datastore API using the default context.
# These use local import since otherwise they'd be recursive imports.
@classmethod
def _query(cls, *args, **kwds):
"""Create a Query object for this class.
Keyword arguments are passed to the Query() constructor. If
positional arguments are given they are used to apply an initial
filter.
Returns:
A Query object.
"""
from ndb.query import Query # Import late to avoid circular imports.
qry = Query(kind=cls._get_kind(), **kwds)
if args:
qry = qry.filter(*args)
return qry
query = _query
def _put(self):
"""Write this entity to the datastore.
If the operation creates or completes a key, the entity's key
attribute is set to the new, complete key.
Returns:
The key for the entity. This is always a complete key.
"""
return self._put_async().get_result()
put = _put
def _put_async(self):
"""Write this entity to the datastore.
This is the asynchronous version of Model._put().
"""
from ndb import tasklets
return tasklets.get_context().put(self)
put_async = _put_async
@classmethod
def _get_or_insert(cls, name, parent=None, **kwds):
"""Transactionally retrieves an existing entity or creates a new one.
Args:
name: Key name to retrieve or create.
parent: Parent entity key, if any.
**kwds: Keyword arguments to pass to the constructor of the model class
if an instance for the specified key name does not already exist. If
an instance with the supplied key_name and parent already exists,
these arguments will be discarded.
Returns:
Existing instance of Model class with the specified key name and parent
or a new one that has just been created.
"""
return cls._get_or_insert_async(name=name, parent=parent,
**kwds).get_result()
get_or_insert = _get_or_insert
@classmethod
def _get_or_insert_async(cls, name, parent=None, **kwds):
"""Transactionally retrieves an existing entity or creates a new one.
This is the asynchronous version of Model._get_or_insert().
"""
from ndb import tasklets
ctx = tasklets.get_context()
return ctx.get_or_insert(cls, name=name, parent=parent, **kwds)
get_or_insert_async = _get_or_insert_async
@classmethod
def _allocate_ids(cls, size=None, max=None, parent=None):
"""Allocates a range of key IDs for this model class.
Args:
size: Number of IDs to allocate. Either size or max can be specified,
not both.
max: Maximum ID to allocate. Either size or max can be specified,
not both.
parent: Parent key for which the IDs will be allocated.
Returns:
A tuple with (start, end) for the allocated range, inclusive.
"""
return cls._allocate_ids_async(size=size, max=max,
parent=parent).get_result()
allocate_ids = _allocate_ids
@classmethod
def _allocate_ids_async(cls, size=None, max=None, parent=None):
"""Allocates a range of key IDs for this model class.
This is the asynchronous version of Model._allocate_ids().
"""
from ndb import tasklets
key = Key(cls._get_kind(), None, parent=parent)
return tasklets.get_context().allocate_ids(key, size=size, max=max)
allocate_ids_async = _allocate_ids_async
@classmethod
def _get_by_id(cls, id, parent=None):
"""Returns a instance of Model class by ID.
Args:
id: A string or integer key ID.
parent: Parent key of the model to get.
Returns:
A model instance or None if not found.
"""
return cls._get_by_id_async(id, parent=parent).get_result()
get_by_id = _get_by_id
@classmethod
def _get_by_id_async(cls, id, parent=None):
"""Returns a instance of Model class by ID.
This is the asynchronous version of Model._get_by_id().
"""
from ndb import tasklets
key = Key(cls._get_kind(), id, parent=parent)
return tasklets.get_context().get(key)
get_by_id_async = _get_by_id_async
class Expando(Model):
"""Model subclass to support dynamic Property names and types.
See the module docstring for details.
"""
# Set this to False (in an Expando subclass or entity) to make
# properties default to unindexed.
_default_indexed = True
def _set_attributes(self, kwds):
for name, value in kwds.iteritems():
setattr(self, name, value)
def __getattr__(self, name):
if name.startswith('_'):
return super(Expando, self).__getattr__(name)
prop = self._properties.get(name)
if prop is None:
return super(Expando, self).__getattribute__(name)
return prop._get_value(self)
def __setattr__(self, name, value):
if (name.startswith('_') or
isinstance(getattr(self.__class__, name, None), (Property, property))):
return super(Expando, self).__setattr__(name, value)
self._clone_properties()
if isinstance(value, Model):
prop = StructuredProperty(Model, name)
else:
repeated = isinstance(value, list)
indexed = self._default_indexed
prop = GenericProperty(name, repeated=repeated, indexed=indexed)
prop._code_name = name
self._properties[name] = prop
prop._set_value(self, value)
def __delattr__(self, name):
if (name.startswith('_') or
isinstance(getattr(self.__class__, name, None), (Property, property))):
return super(Expando, self).__delattr__(name)
prop = self._properties.get(name)
assert prop is not None
prop._delete_value(self)
assert prop not in self.__class__._properties
del self._properties[name]
@datastore_rpc._positional(1)
def transaction(callback, retry=None, entity_group=None):
"""Run a callback in a transaction.
Args:
callback: A function or tasklet to be called.
retry: Optional retry count (keyword only; default set by
ndb.context.Context.transaction()).
entity_group: Optional root key to use as transaction entity group
(keyword only; defaults to the root part of the first key used
in the transaction).
Returns:
Whatever callback() returns.
Raises:
Whatever callback() raises; datastore_errors.TransactionFailedError
if the transaction failed.
Note:
To pass arguments to a callback function, use a lambda, e.g.
def my_callback(key, inc):
...
transaction(lambda: my_callback(Key(...), 1))
"""
fut = transaction_async(callback, retry=retry, entity_group=entity_group)
return fut.get_result()
@datastore_rpc._positional(1)
def transaction_async(callback, retry=None, entity_group=None):
"""Run a callback in a transaction.
This is the asynchronous version of transaction().
"""
from ndb import tasklets
kwds = {}
if retry is not None:
kwds['retry'] = retry
if entity_group is not None:
kwds['entity_group'] = entity_group
return tasklets.get_context().transaction(callback, **kwds)
def in_transaction():
"""Return whether a transaction is currently active."""
from ndb import tasklets
return tasklets.get_context().in_transaction()
@datastore_rpc._positional(1)
def transactional(func):
"""Decorator to make a function automatically run in a transaction.
If we're already in a transaction this is a no-op.
Note: If you need to override the retry count or the entity group,
or if you want some kind of async behavior, use the transaction()
function above.
"""
@utils.wrapping(func)
def transactional_wrapper(*args, **kwds):
if in_transaction():
return func(*args, **kwds)
else:
return transaction(lambda: func(*args, **kwds))
return transactional_wrapper
def get_multi_async(keys):
"""Fetches a sequence of keys.
Args:
keys: A sequence of keys.
Returns:
A list of futures.
"""
return [key.get_async() for key in keys]
def get_multi(keys):
"""Fetches a sequence of keys.
Args:
keys: A sequence of keys.
Returns:
A list whose items are either a Model instance or None if the key wasn't
found.
"""
return [future.get_result() for future in get_multi_async(keys)]
def put_multi_async(entities):
"""Stores a sequence of Model instances.
Args:
entities: A sequence of Model instances.
Returns:
A list of futures.
"""
return [entity.put_async() for entity in entities]
def put_multi(entities):
"""Stores a sequence of Model instances.
Args:
entities: A sequence of Model instances.
Returns:
A list with the stored keys.
"""
return [future.get_result() for future in put_multi_async(entities)]
def delete_multi_async(keys):
"""Deletes a sequence of keys.
Returns:
A list of futures.
"""
return [key.delete_async() for key in keys]
def delete_multi(keys):
"""Deletes a sequence of keys.
Args:
keys: A sequence of keys.
"""
# A list full of Nones!!!
return [future.get_result() for future in delete_multi_async(keys)]
# Update __all__ to contain all Property and Exception subclasses.
for _name, _object in globals().items():
if ((_name.endswith('Property') and issubclass(_object, Property)) or
(_name.endswith('Error') and issubclass(_object, Exception))):
__all__.append(_name)
| Python |
"""Tests for context.py."""
import logging
import os
import re
import sys
import time
import unittest
from google.appengine.api import datastore_errors
from google.appengine.api import memcache
from google.appengine.api import taskqueue
from google.appengine.datastore import datastore_rpc
from ndb import context
from ndb import eventloop
from ndb import model
from ndb import query
from ndb import tasklets
from ndb import test_utils
class MyAutoBatcher(context.AutoBatcher):
_log = []
@classmethod
def reset_log(cls):
cls._log = []
def __init__(self, todo_tasklet):
def wrap(*args):
self.__class__._log.append(args)
return todo_tasklet(*args)
super(MyAutoBatcher, self).__init__(wrap)
class ContextTests(test_utils.DatastoreTest):
def setUp(self):
super(ContextTests, self).setUp()
self.set_up_eventloop()
MyAutoBatcher.reset_log()
self.ctx = context.Context(
conn=model.make_connection(default_model=model.Expando),
auto_batcher_class=MyAutoBatcher)
def set_up_eventloop(self):
if eventloop._EVENT_LOOP_KEY in os.environ:
del os.environ[eventloop._EVENT_LOOP_KEY]
self.ev = eventloop.get_event_loop()
self.log = []
def testContext_AutoBatcher_Get(self):
@tasklets.tasklet
def foo():
key1 = model.Key(flat=['Foo', 1])
key2 = model.Key(flat=['Foo', 2])
key3 = model.Key(flat=['Foo', 3])
fut1 = self.ctx.get(key1)
fut2 = self.ctx.get(key2)
fut3 = self.ctx.get(key3)
ent1 = yield fut1
ent2 = yield fut2
ent3 = yield fut3
raise tasklets.Return([ent1, ent2, ent3])
ents = foo().get_result()
self.assertEqual(ents, [None, None, None])
self.assertEqual(len(MyAutoBatcher._log), 1)
@tasklets.tasklet
def create_entities(self):
key0 = model.Key(flat=['Foo', None])
ent1 = model.Model(key=key0)
ent2 = model.Model(key=key0)
ent3 = model.Model(key=key0)
fut1 = self.ctx.put(ent1)
fut2 = self.ctx.put(ent2)
fut3 = self.ctx.put(ent3)
key1 = yield fut1
key2 = yield fut2
key3 = yield fut3
raise tasklets.Return([key1, key2, key3])
def testContext_AutoBatcher_Put(self):
keys = self.create_entities().get_result()
self.assertEqual(len(keys), 3)
self.assertTrue(None not in keys)
self.assertEqual(len(MyAutoBatcher._log), 1)
def testContext_AutoBatcher_Delete(self):
@tasklets.tasklet
def foo():
key1 = model.Key(flat=['Foo', 1])
key2 = model.Key(flat=['Foo', 2])
key3 = model.Key(flat=['Foo', 3])
fut1 = self.ctx.delete(key1)
fut2 = self.ctx.delete(key2)
fut3 = self.ctx.delete(key3)
yield fut1
yield fut2
yield fut3
foo().check_success()
self.assertEqual(len(MyAutoBatcher._log), 1)
def testContext_MultiRpc(self):
# This test really tests the proper handling of MultiRpc by
# queue_rpc() in eventloop.py. It's easier to test from here, and
# gives more assurance that it works.
config = datastore_rpc.Configuration(max_get_keys=3, max_put_entities=3)
self.ctx._conn = model.make_connection(config, default_model=model.Expando)
@tasklets.tasklet
def foo():
ents = [model.Expando() for i in range(10)]
futs = [self.ctx.put(ent) for ent in ents]
keys = yield futs
futs = [self.ctx.get(key) for key in keys]
ents2 = yield futs
self.assertEqual(ents2, ents)
raise tasklets.Return(keys)
keys = foo().get_result()
print keys
self.assertEqual(len(keys), 10)
def testContext_Cache(self):
@tasklets.tasklet
def foo():
key1 = model.Key(flat=('Foo', 1))
ent1 = model.Expando(key=key1, foo=42, bar='hello')
key = yield self.ctx.put(ent1)
self.assertTrue(key1 in self.ctx._cache) # Whitebox.
a = yield self.ctx.get(key1)
b = yield self.ctx.get(key1)
self.assertTrue(a is b)
yield self.ctx.delete(key1)
self.assertTrue(self.ctx._cache[key] is None) # Whitebox.
a = yield self.ctx.get(key1)
self.assertTrue(a is None)
foo().check_success()
def testContext_CachePolicy(self):
def should_cache(key):
return False
@tasklets.tasklet
def foo():
key1 = model.Key(flat=('Foo', 1))
ent1 = model.Expando(key=key1, foo=42, bar='hello')
key = yield self.ctx.put(ent1)
self.assertTrue(key1 not in self.ctx._cache) # Whitebox.
a = yield self.ctx.get(key1)
b = yield self.ctx.get(key1)
self.assertTrue(a is not b)
yield self.ctx.delete(key1)
self.assertTrue(key not in self.ctx._cache) # Whitebox.
a = yield self.ctx.get(key1)
self.assertTrue(a is None)
self.ctx.set_cache_policy(should_cache)
foo().check_success()
def testContext_CachePolicyDisabledLater(self):
# If the cache is disabled after an entity is stored in the cache,
# further get() attempts *must not* return the result stored in cache.
self.ctx.set_cache_policy(lambda key: True)
key1 = model.Key(flat=('Foo', 1))
ent1 = model.Expando(key=key1)
self.ctx.put(ent1).get_result()
# get() uses cache
self.assertTrue(key1 in self.ctx._cache) # Whitebox.
self.assertEqual(self.ctx.get(key1).get_result(), ent1)
# get() uses cache
self.ctx._cache[key1] = None # Whitebox.
self.assertEqual(self.ctx.get(key1).get_result(), None)
# get() doesn't use cache
self.ctx.set_cache_policy(lambda key: False)
self.assertEqual(self.ctx.get(key1).get_result(), ent1)
def testContext_Memcache(self):
@tasklets.tasklet
def foo():
key1 = model.Key(flat=('Foo', 1))
key2 = model.Key(flat=('Foo', 2))
ent1 = model.Expando(key=key1, foo=42, bar='hello')
ent2 = model.Expando(key=key2, foo=1, bar='world')
k1, k2 = yield self.ctx.put(ent1), self.ctx.put(ent2)
self.assertEqual(k1, key1)
self.assertEqual(k2, key2)
yield tasklets.sleep(0.01) # Let other tasklet complete.
keys = [k1.urlsafe(), k2.urlsafe()]
results = memcache.get_multi(keys, key_prefix='NDB:')
self.assertEqual(
results,
{key1.urlsafe(): self.ctx._conn.adapter.entity_to_pb(ent1),
key2.urlsafe(): self.ctx._conn.adapter.entity_to_pb(ent2)})
foo().check_success()
def testContext_MemcachePolicy(self):
badkeys = []
def tracking_set_multi(*args, **kwds):
try:
res = save_set_multi(*args, **kwds)
if badkeys and not res:
res = badkeys
track.append((args, kwds, res, None))
return res
except Exception, err:
track.append((args, kwds, None, err))
raise
@tasklets.tasklet
def foo():
k1, k2 = yield self.ctx.put(ent1), self.ctx.put(ent2)
self.assertEqual(k1, key1)
self.assertEqual(k2, key2)
yield tasklets.sleep(0.01) # Let other tasklet complete.
key1 = model.Key('Foo', 1)
key2 = model.Key('Foo', 2)
ent1 = model.Expando(key=key1, foo=42, bar='hello')
ent2 = model.Expando(key=key2, foo=1, bar='world')
save_set_multi = memcache.set_multi
try:
memcache.set_multi = tracking_set_multi
memcache.flush_all()
track = []
foo().check_success()
self.assertEqual(len(track), 1)
self.assertEqual(track[0][0],
({key1.urlsafe(): ent1._to_pb(),
key2.urlsafe(): ent2._to_pb()},))
self.assertEqual(track[0][1], {'key_prefix': 'NDB:', 'time': 0})
memcache.flush_all()
track = []
self.ctx.set_memcache_policy(lambda key: False)
foo().check_success()
self.assertEqual(len(track), 0)
memcache.flush_all()
track = []
self.ctx.set_memcache_policy(lambda key: key == key1)
foo().check_success()
self.assertEqual(len(track), 1)
self.assertEqual(track[0][0],
({key1.urlsafe(): ent1._to_pb()},))
self.assertEqual(track[0][1], {'key_prefix': 'NDB:', 'time': 0})
memcache.flush_all()
track = []
self.ctx.set_memcache_policy(lambda key: True)
self.ctx.set_memcache_timeout_policy(lambda key: key.id())
foo().check_success()
self.assertEqual(len(track), 2)
self.assertEqual(track[0][0],
({key1.urlsafe(): ent1._to_pb()},))
self.assertEqual(track[0][1], {'key_prefix': 'NDB:', 'time': 1})
self.assertEqual(track[1][0],
({key2.urlsafe(): ent2._to_pb()},))
self.assertEqual(track[1][1], {'key_prefix': 'NDB:', 'time': 2})
memcache.flush_all()
track = []
badkeys = [key2.urlsafe()]
self.ctx.set_memcache_timeout_policy(lambda key: 0)
foo().check_success()
self.assertEqual(len(track), 1)
self.assertEqual(track[0][2], badkeys)
memcache.flush_all()
finally:
memcache.set_multi = save_set_multi
def testContext_CacheQuery(self):
@tasklets.tasklet
def foo():
key1 = model.Key(flat=('Foo', 1))
key2 = model.Key(flat=('Foo', 2))
ent1 = model.Expando(key=key1, foo=42, bar='hello')
ent2 = model.Expando(key=key2, foo=1, bar='world')
key1a, key2a = yield self.ctx.put(ent1), self.ctx.put(ent2)
self.assertTrue(key1 in self.ctx._cache) # Whitebox.
self.assertTrue(key2 in self.ctx._cache) # Whitebox.
self.assertEqual(key1, key1a)
self.assertEqual(key2, key2a)
@tasklets.tasklet
def callback(ent):
return ent
qry = query.Query(kind='Foo')
results = yield self.ctx.map_query(qry, callback)
self.assertEqual(results, [ent1, ent2])
self.assertTrue(results[0] is ent1)
self.assertTrue(results[1] is ent2)
foo().check_success()
def testContext_AllocateIds(self):
@tasklets.tasklet
def foo():
key = model.Key(flat=('Foo', 1))
lo_hi = yield self.ctx.allocate_ids(key, size=10)
self.assertEqual(lo_hi, (1, 10))
lo_hi = yield self.ctx.allocate_ids(key, max=20)
self.assertEqual(lo_hi, (11, 20))
foo().check_success()
def testContext_MapQuery(self):
@tasklets.tasklet
def callback(ent):
return ent.key.flat()[-1]
@tasklets.tasklet
def foo():
yield self.create_entities()
qry = query.Query(kind='Foo')
res = yield self.ctx.map_query(qry, callback)
raise tasklets.Return(res)
res = foo().get_result()
self.assertEqual(set(res), set([1, 2, 3]))
def testContext_MapQuery_NoCallback(self):
@tasklets.tasklet
def foo():
yield self.create_entities()
qry = query.Query(kind='Foo')
res = yield self.ctx.map_query(qry, None)
raise tasklets.Return(res)
res = foo().get_result()
self.assertEqual(len(res), 3)
for i, ent in enumerate(res):
self.assertTrue(isinstance(ent, model.Model))
self.assertEqual(ent.key.flat(), ['Foo', i+1])
def testContext_MapQuery_NonTaskletCallback(self):
def callback(ent):
return ent.key.flat()[-1]
@tasklets.tasklet
def foo():
yield self.create_entities()
qry = query.Query(kind='Foo')
res = yield self.ctx.map_query(qry, callback)
raise tasklets.Return(res)
res = foo().get_result()
self.assertEqual(res, [1, 2, 3])
def testContext_MapQuery_CustomFuture(self):
mfut = tasklets.QueueFuture()
@tasklets.tasklet
def callback(ent):
return ent.key.flat()[-1]
@tasklets.tasklet
def foo():
yield self.create_entities()
qry = query.Query(kind='Foo')
res = yield self.ctx.map_query(qry, callback, merge_future=mfut)
self.assertEqual(res, None)
vals = set()
for i in range(3):
val = yield mfut.getq()
vals.add(val)
fail = mfut.getq()
self.assertRaises(EOFError, fail.get_result)
raise tasklets.Return(vals)
res = foo().get_result()
self.assertEqual(res, set([1, 2, 3]))
def testContext_MapQuery_KeysOnly(self):
qo = query.QueryOptions(keys_only=True)
@tasklets.tasklet
def callback(key):
return key.pairs()[-1]
@tasklets.tasklet
def foo():
yield self.create_entities()
qry = query.Query(kind='Foo')
res = yield self.ctx.map_query(qry, callback, options=qo)
raise tasklets.Return(res)
res = foo().get_result()
self.assertEqual(set(res), set([('Foo', 1), ('Foo', 2), ('Foo', 3)]))
def testContext_MapQuery_Cursors(self):
qo = query.QueryOptions(produce_cursors=True)
@tasklets.tasklet
def callback(batch, i, ent):
return ent.key.pairs()[-1]
@tasklets.tasklet
def foo():
yield self.create_entities()
qry = query.Query(kind='Foo')
res = yield self.ctx.map_query(qry, callback, options=qo)
raise tasklets.Return(res)
res = foo().get_result()
self.assertEqual(set(res), set([('Foo', 1), ('Foo', 2), ('Foo', 3)]))
def testContext_IterQuery(self):
@tasklets.tasklet
def foo():
yield self.create_entities()
qry = query.Query(kind='Foo')
it = self.ctx.iter_query(qry)
res = []
while True:
try:
ent = yield it.getq()
except EOFError:
break
res.append(ent)
raise tasklets.Return(res)
res = foo().get_result()
self.assertEqual(len(res), 3)
for i, ent in enumerate(res):
self.assertTrue(isinstance(ent, model.Model))
self.assertEqual(ent.key.flat(), ['Foo', i+1])
def testContext_TransactionFailed(self):
@tasklets.tasklet
def foo():
key = model.Key(flat=('Foo', 1))
ent = model.Expando(key=key, bar=1)
yield self.ctx.put(ent)
@tasklets.tasklet
def callback():
ctx = tasklets.get_context()
self.assertTrue(key not in ctx._cache) # Whitebox.
e = yield key.get_async()
self.assertTrue(key in ctx._cache) # Whitebox.
e.bar = 2
yield e.put_async()
yield self.ctx.transaction(callback)
self.assertEqual(self.ctx._cache[key].bar, 2)
foo().check_success()
def testContext_TransactionException(self):
key = model.Key('Foo', 1)
@tasklets.tasklet
def foo():
ent = model.Expando(key=key, bar=1)
@tasklets.tasklet
def callback():
ctx = tasklets.get_context()
key = yield ent.put_async()
raise Exception('foo')
yield self.ctx.transaction(callback)
self.assertRaises(Exception, foo().check_success)
self.assertEqual(key.get(), None)
def testContext_TransactionRollback(self):
key = model.Key('Foo', 1)
@tasklets.tasklet
def foo():
ent = model.Expando(key=key, bar=1)
@tasklets.tasklet
def callback():
ctx = tasklets.get_context()
key = yield ent.put_async()
raise datastore_errors.Rollback()
yield self.ctx.transaction(callback)
foo().check_success()
self.assertEqual(key.get(), None)
def testContext_TransactionAddTask(self):
key = model.Key('Foo', 1)
@tasklets.tasklet
def foo():
ent = model.Expando(key=key, bar=1)
@tasklets.tasklet
def callback():
ctx = tasklets.get_context()
key = yield ctx.put(ent)
taskqueue.add(url='/', transactional=True)
yield self.ctx.transaction(callback)
foo().check_success()
def testContext_GetOrInsert(self):
# This also tests Context.transaction()
class Mod(model.Model):
data = model.StringProperty()
@tasklets.tasklet
def foo():
ent = yield self.ctx.get_or_insert(Mod, 'a', data='hello')
assert isinstance(ent, Mod)
ent2 = yield self.ctx.get_or_insert(Mod, 'a', data='hello')
assert ent2 == ent
foo().check_success()
def testContext_GetOrInsertWithParent(self):
# This also tests Context.transaction()
class Mod(model.Model):
data = model.StringProperty()
@tasklets.tasklet
def foo():
parent = model.Key(flat=('Foo', 1))
ent = yield self.ctx.get_or_insert(Mod, 'a', parent=parent, data='hello')
assert isinstance(ent, Mod)
ent2 = yield self.ctx.get_or_insert(Mod, 'a', parent=parent, data='hello')
assert ent2 == ent
foo().check_success()
def testAddContextDecorator(self):
class Demo(object):
@context.toplevel
def method(self, arg):
return (tasklets.get_context(), arg)
@context.toplevel
def method2(self, **kwds):
return (tasklets.get_context(), kwds)
a = Demo()
old_ctx = tasklets.get_context()
ctx, arg = a.method(42)
self.assertTrue(isinstance(ctx, context.Context))
self.assertEqual(arg, 42)
self.assertTrue(ctx is not old_ctx)
old_ctx = tasklets.get_context()
ctx, kwds = a.method2(foo='bar', baz='ding')
self.assertTrue(isinstance(ctx, context.Context))
self.assertEqual(kwds, dict(foo='bar', baz='ding'))
self.assertTrue(ctx is not old_ctx)
def testDefaultContextTransaction(self):
@tasklets.synctasklet
def outer():
ctx1 = tasklets.get_context()
@tasklets.tasklet
def inner():
ctx2 = tasklets.get_context()
self.assertTrue(ctx1 is not ctx2)
self.assertTrue(isinstance(ctx2._conn,
datastore_rpc.TransactionalConnection))
return 42
a = yield tasklets.get_context().transaction(inner)
ctx1a = tasklets.get_context()
self.assertTrue(ctx1 is ctx1a)
raise tasklets.Return(a)
b = outer()
self.assertEqual(b, 42)
def testExplicitTransactionClearsDefaultContext(self):
old_ctx = tasklets.get_context()
@tasklets.synctasklet
def outer():
ctx1 = tasklets.get_context()
@tasklets.tasklet
def inner():
ctx = tasklets.get_context()
self.assertTrue(ctx is not ctx1)
key = model.Key('Account', 1)
ent = yield key.get_async()
self.assertTrue(tasklets.get_context() is ctx)
self.assertTrue(ent is None)
raise tasklets.Return(42)
fut = ctx1.transaction(inner)
self.assertEqual(tasklets.get_context(), ctx1)
val = yield fut
self.assertEqual(tasklets.get_context(), ctx1)
raise tasklets.Return(val)
val = outer()
self.assertEqual(val, 42)
self.assertTrue(tasklets.get_context() is old_ctx)
def testKindError(self):
ctx = context.Context()
# If the cache is enabled, attempts to retrieve the object we just put will
# be satisfied from the cache, so the adapter we're testing will never get
# called.
ctx.set_cache_policy(lambda key: False)
@tasklets.tasklet
def foo():
# Foo class is declared in query_test, so let's get a unusual class name.
key1 = model.Key(flat=('ThisModelClassDoesntExist', 1))
ent1 = model.Expando(key=key1, foo=42, bar='hello')
key = yield ctx.put(ent1)
a = yield ctx.get(key1)
self.assertRaises(model.KindError, foo().check_success)
def main():
##logging.basicConfig(level=logging.INFO)
unittest.main()
if __name__ == '__main__':
main()
| Python |
"""Some tests for datastore_rpc.py."""
import unittest
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_file_stub
from google.appengine.datastore import entity_pb
from google.appengine.datastore import datastore_rpc
from ndb import key, model, test_utils
class PendingTests(test_utils.DatastoreTest):
"""Tests for the 'pending RPC' management."""
def testBasicSetup1(self):
ent = model.Expando()
ent.foo = 'bar'
rpc = self.conn.async_put(None, [ent])
[key] = rpc.get_result()
self.assertEqual(key, model.Key(flat=['Expando', 1]))
def testBasicSetup2(self):
key = model.Key(flat=['Expando', 1])
rpc = self.conn.async_get(None, [key])
[ent] = rpc.get_result()
self.assertTrue(ent is None)
def SetUpCallHooks(self):
self.pre_args = []
self.post_args = []
apiproxy_stub_map.apiproxy.GetPreCallHooks().Append('test1',
self.PreCallHook)
apiproxy_stub_map.apiproxy.GetPostCallHooks().Append('test1',
self.PostCallHook)
def PreCallHook(self, service, call, request, response, rpc=None):
self.pre_args.append((service, call, request, response, rpc))
def PostCallHook(self, service, call, request, response,
rpc=None, error=None):
self.post_args.append((service, call, request, response, rpc, error))
def testCallHooks(self):
self.SetUpCallHooks()
key = model.Key(flat=['Expando', 1])
rpc = self.conn.async_get(None, [key])
self.assertEqual(len(self.pre_args), 1)
self.assertEqual(self.post_args, [])
[ent] = rpc.get_result()
self.assertEqual(len(self.pre_args), 1)
self.assertEqual(len(self.post_args), 1)
self.assertEqual(self.pre_args[0][:2], ('datastore_v3', 'Get'))
self.assertEqual(self.post_args[0][:2], ('datastore_v3', 'Get'))
def testCallHooks_Pending(self):
self.SetUpCallHooks()
key = model.Key(flat=['Expando', 1])
rpc = self.conn.async_get(None, [key])
self.conn.wait_for_all_pending_rpcs()
self.assertEqual(rpc.state, 2) # FINISHING
self.assertEqual(len(self.pre_args), 1)
self.assertEqual(len(self.post_args), 1) # NAILED IT!
self.assertEqual(self.conn.get_pending_rpcs(), set())
def NastyCallback(self, rpc):
[ent] = rpc.get_result()
key = model.Key(flat=['Expando', 1])
newrpc = self.conn.async_get(None, [key])
def testCallHooks_Pending_CallbackAddsMore(self):
self.SetUpCallHooks()
conf = datastore_rpc.Configuration(on_completion=self.NastyCallback)
key = model.Key(flat=['Expando', 1])
rpc = self.conn.async_get(conf, [key])
self.conn.wait_for_all_pending_rpcs()
self.assertEqual(self.conn.get_pending_rpcs(), set())
def main():
unittest.main()
if __name__ == '__main__':
main()
| Python |
"""The Key class, and associated utilities.
A Key encapsulates the following pieces of information, which together
uniquely designate a (possible) entity in the App Engine datastore:
- an application id (a string)
- a namespace (a string)
- a list of one or more (kind, id) pairs where kind is a string and id
is either a string or an integer.
The appication id must always be part of the key, but since most
applications can only access their own entities, it defaults to the
current application id and you rarely need to worry about it. It must
not be empty.
The namespace designates a top-level partition of the key space for a
particular application. If you've never heard of namespaces, you can
safely ignore this feature.
Most of the action is in the (kind, id) pairs. A key must have at
least one (kind, id) pair. The last (kind, id) pair gives the kind
and the id of the entity that the key refers to, the others merely
specify a 'parent key'.
The kind is a string giving the name of the model class used to
represent the entity. (In more traditional databases this would be
the table name.) A model class is a Python class derived from
ndb.Model; see the documentation for ndb/model.py. Only the class
name itself is used as the kind. This means all your model classes
must be uniquely named within one application. You can override this
on a per-class basis.
The id is either a string or an integer. When the id is a string, the
application is in control of how it assigns ids: For example, if you
could use an email address as the id for Account entities.
To use integer ids, you must let the datastore choose a uniqe id for
an entity when it is first inserted into the datastore. You can set
the id to None to represent the key for an entity that hasn't yet been
inserted into the datastore. The final key (including the assigned
id) will be returned after the entity is successfully inserted into
the datastore.
A key for which the id of the last (kind, id) pair is set to None is
called an incomplete key. Such keys can only be used to insert
entities into the datastore.
A key with exactly one (kind, id) pair is called a toplevel key or a
root key. Toplevel keys are also used as entity groups, which play a
role in transaction management.
If there is more than one (kind, id) pair, all but the last pair
represent the 'ancestor path', also known as the key of the 'parent
entity'.
Other constraints:
- Kinds and string ids must not be empty and must be at most 500 bytes
long (after UTF-8 encoding, if given as Python unicode objects).
- Integer ids must be at least 1 and less than 2**63.
For more info about namespaces, see
http://code.google.com/appengine/docs/python/multitenancy/overview.html.
The namespace defaults to the 'default namespace' selected by the
namespace manager. To explicitly select the empty namespace pass
namespace=''.
"""
__author__ = 'guido@google.com (Guido van Rossum)'
# TODO: Change asserts to better exceptions.
import base64
import os
from google.appengine.api import datastore_errors
from google.appengine.api import namespace_manager
from google.appengine.datastore import datastore_rpc
from google.appengine.datastore import entity_pb
__all__ = ['Key']
class Key(object):
"""An immutable datastore key.
For flexibility and convenience, multiple constructor signatures are
supported.
The primary way to construct a key is using positional arguments:
- Key(kind1, id1, kind2, id2, ...).
This is shorthand for either of the following two longer forms:
- Key(pairs=[(kind1, id1), (kind2, id2), ...])
- Key(flat=[kind1, id1, kind2, id2, ...])
Either of the above constructor forms can additional pass in another
key using parent=<key>. The (kind, id) pairs of the parent key are
inserted before the (kind, id) pairs passed explicitly.
You can also construct a Key from a 'url-safe' encoded string:
- Key(urlsafe=<string>)
For esoteric purposes the following constructors exist:
- Key(reference=<reference>) -- passing in a low-level Reference object
- Key(serialized=<string>) -- passing in a serialized low-level Reference
- Key(<dict>) -- for unpickling, the same as Key(**<dict>)
The 'url-safe' string is really a websafe-base64-encoded serialized
Reference, but it's best to think of it as just an opaque unique
string.
Additional constructor keyword arguments:
- app=<string> -- specify the application id
- namespace=<string> -- specify the namespace
If a Reference is passed (using one of reference, serialized or
urlsafe), the args and namespace keywords must match what is already
present in the Reference (after decoding if necessary). The parent
keyword cannot be combined with a Refence in any form.
Keys are immutable, which means that a Key object cannot be modified
once it has been created. This is enforced by the implementation as
well as Python allows.
For access to the contents of a key, the following methods and
operations are supported:
- repr(key), str(key) -- return a string representation resembling
the shortest constructor form, omitting the app and namespace
unless they differ from the default value.
- key1 == key2, key1 != key2 -- comparison for equality between Keys.
- hash(key) -- a hash value sufficient for storing Keys in a dict.
- key.pairs() -- a list of (kind, id) pairs.
- key.flat() -- a list of flattened kind and id values, i.e.
[kind1, id1, kind2, id2, ...].
- key.app() -- the application id.
- key.id() -- the string or integer id in the last (kind, id) pair,
or None if the key is incomplete.
- key.string_id() -- the string id in the last (kind, id) pair,
or None if the key has an integer id or is incomplete.
- key.integer_id() -- the integer id in the last (kind, id) pair,
or None if the key has a string id or is incomplete.
- key.namespace() -- the namespace.
- key.kind() -- a shortcut for key.pairs()[-1][0].
- key.parent() -- a Key constructed from all but the last (kind, id)
pairs.
- key.urlsafe() -- a websafe-base64-encoded serialized Reference.
- key.serialized() -- a serialized Reference.
- key.reference() -- a Reference object. Since Reference objects are
mutable, this returns a brand new Reference object.
- key._reference() -- the Reference object contained in the Key.
The caller promises not to mutate it.
- key._pairs() -- an iterator, equivalent to iter(key.pairs()).
- key._flat() -- an iterator, equivalent to iter(key.flat()).
Keys also support interaction with the datastore; these methods are
the only ones that engage in any kind of I/O activity. For Future
objects, see the document for ndb/tasklets.py.
- key.get() -- return the entity for the Key.
- key.get_async() -- return a Future whose eventual result is
the entity for the Key.
- key.delete() -- delete the entity for the Key.
- key.delete_async() -- asynchronously delete the entity for the Key.
Keys may be pickled.
Subclassing Key is best avoided; it would be hard to get right.
"""
__slots__ = ['__reference']
def __new__(cls, *_args, **kwargs):
"""Constructor. See the class docstring for arguments."""
if _args:
if len(_args) == 1 and isinstance(_args[0], dict):
# For pickling only: one positional argument is allowed,
# giving a dict specifying the keyword arguments.
assert not kwargs
kwargs = _args[0]
else:
assert 'flat' not in kwargs
kwargs['flat'] = _args
self = super(Key, cls).__new__(cls)
self.__reference = _ConstructReference(cls, **kwargs)
return self
def __repr__(self):
"""String representation, used by str() and repr().
We produce a short string that conveys all relevant information,
suppressing app and namespace when they are equal to the default.
"""
# TODO: Instead of "Key('Foo', 1)" perhaps return "Key(Foo, 1)" ?
args = []
for item in self._flat():
if not item:
args.append('None')
elif isinstance(item, basestring):
assert isinstance(item, str) # No unicode should make it here.
args.append(repr(item))
else:
args.append(str(item))
if self.app() != _DefaultAppId():
args.append('app=%r' % self.app())
if self.namespace() != _DefaultNamespace():
args.append('namespace=%r' % self.namespace())
return 'Key(%s)' % ', '.join(args)
__str__ = __repr__
def __hash__(self):
"""Hash value, for use in dict lookups."""
# This ignores app and namespace, which is fine since hash()
# doesn't need to return a unique value -- it only needs to ensure
# that the hashes of equal keys are equal, not the other way
# around.
return hash(tuple(self._pairs()))
def __eq__(self, other):
"""Equality comparison operation."""
if not isinstance(other, Key):
return NotImplemented
return (tuple(self._pairs()) == tuple(other._pairs()) and
self.app() == other.app() and
self.namespace() == other.namespace())
def __ne__(self, other):
"""The opposite of __eq__."""
if not isinstance(other, Key):
return NotImplemented
return not self.__eq__(other)
def __getstate__(self):
"""Private API used for pickling."""
return ({'pairs': tuple(self._pairs()),
'app': self.app(),
'namespace': self.namespace()},)
def __setstate__(self, state):
"""Private API used for pickling."""
assert len(state) == 1
kwargs = state[0]
assert isinstance(kwargs, dict)
self.__reference = _ConstructReference(self.__class__, **kwargs)
def __getnewargs__(self):
"""Private API used for pickling."""
return ({'pairs': tuple(self._pairs()),
'app': self.app(),
'namespace': self.namespace()},)
def parent(self):
"""Return a Key constructed from all but the last (kind, id) pairs.
If there is only one (kind, id) pair, return None.
"""
pairs = self.pairs()
if len(pairs) <= 1:
return None
return Key(pairs=pairs[:-1], app=self.app(), namespace=self.namespace())
def root(self):
"""Return the root key. This is either self or the highest parent."""
pairs = self.pairs()
if len(pairs) <= 1:
return self
return Key(pairs=pairs[:1], app=self.app(), namespace=self.namespace())
def namespace(self):
"""Return the namespace."""
return self.__reference.name_space()
def app(self):
"""Return the application id."""
return self.__reference.app()
def id(self):
"""Return the string or integer id in the last (kind, id) pair, if any.
Returns:
A string or integer id, or None if the key is incomplete.
"""
elem = self.__reference.path().element(-1)
return elem.name() or elem.id() or None
def string_id(self):
"""Return the string id in the last (kind, id) pair, if any.
Returns:
A string id, or None if the key has an integer id or is incomplete.
"""
elem = self.__reference.path().element(-1)
return elem.name() or None
def integer_id(self):
"""Return the integer id in the last (kind, id) pair, if any.
Returns:
An integer id, or None if the key has a string id or is incomplete.
"""
elem = self.__reference.path().element(-1)
return elem.id() or None
def pairs(self):
"""Return a list of (kind, id) pairs."""
return list(self._pairs())
def _pairs(self):
"""Iterator yielding (kind, id) pairs."""
for elem in self.__reference.path().element_list():
kind = elem.type()
if elem.has_id():
idorname = elem.id()
else:
idorname = elem.name()
if not idorname:
idorname = None
yield (kind, idorname)
def flat(self):
"""Return a list of alternating kind and id values."""
return list(self._flat())
def _flat(self):
"""Iterator yielding alternating kind and id values."""
for kind, idorname in self._pairs():
yield kind
yield idorname
def kind(self):
"""Return the kind of the entity referenced.
This is the kind from the last (kind, id) pair.
"""
return self.__reference.path().element(-1).type()
def reference(self):
"""Return a copy of the Reference object for this Key.
This is a entity_pb.Reference instance -- a protocol buffer class
used by the lower-level API to the datastore.
"""
return _ReferenceFromReference(self.__reference)
def _reference(self):
"""Return the Reference object for this Key.
This is a backdoor API for internal use only. The caller should
not mutate the return value.
"""
return self.__reference
def serialized(self):
"""Return a serialized Reference object for this Key."""
return self.__reference.Encode()
def urlsafe(self):
"""Return a url-safe string encoding this Key's Reference.
This string is compatible with other APIs and languages and with
the strings used to represent Keys in GQL and in the App Engine
Admin Console.
"""
# This is 3-4x faster than urlsafe_b64decode()
urlsafe = base64.b64encode(self.__reference.Encode())
return urlsafe.rstrip('=').replace('+', '-').replace('/', '_')
# Datastore API using the default context.
# These use local import since otherwise they'd be recursive imports.
def get(self):
"""Synchronously get the entity for this Key.
Return None if there is no such entity.
"""
return self.get_async().get_result()
def get_async(self):
"""Return a Future whose result is the entity for this Key.
If no such entity exists, a Future is still returned, and the
Future's eventual return result be None.
"""
from ndb import tasklets
return tasklets.get_context().get(self)
def delete(self):
"""Synchronously delete the entity for this Key.
This is a no-op if no such entity exists.
"""
return self.delete_async().get_result()
def delete_async(self):
"""Schedule deletion of the entity for this Key.
This returns a Future, whose result becomes available once the
deletion is complete. If no such entity exists, a Future is still
returned. In all cases the Future's result is None (i.e. there is
no way to tell whether the entity existed or not).
"""
from ndb import tasklets
return tasklets.get_context().delete(self)
# The remaining functions in this module are private.
@datastore_rpc._positional(1)
def _ConstructReference(cls, pairs=None, flat=None,
reference=None, serialized=None, urlsafe=None,
app=None, namespace=None, parent=None):
"""Construct a Reference; the signature is the same as for Key."""
assert cls is Key
howmany = (bool(pairs) + bool(flat) +
bool(reference) + bool(serialized) + bool(urlsafe))
assert howmany == 1
if flat or pairs:
if flat:
assert len(flat) % 2 == 0
pairs = [(flat[i], flat[i+1]) for i in xrange(0, len(flat), 2)]
assert pairs
if parent is not None:
if not isinstance(parent, Key):
raise datastore_errors.BadValueError(
'Expected Key instance, got %r' % parent)
pairs[:0] = parent.pairs()
if app:
assert app == parent.app(), (app, parent.app())
else:
app = parent.app()
if namespace is not None:
assert namespace == parent.namespace(), (namespace,
parent.namespace())
else:
namespace = parent.namespace()
reference = _ReferenceFromPairs(pairs, app=app, namespace=namespace)
else:
# You can't combine parent= with reference=, serialized= or urlsafe=.
assert parent is None
if urlsafe:
serialized = _DecodeUrlSafe(urlsafe)
if serialized:
reference = _ReferenceFromSerialized(serialized)
assert reference.path().element_size()
# TODO: assert that each element has a type and either an id or a name
if not serialized:
reference = _ReferenceFromReference(reference)
# You needn't specify app= or namespace= together with reference=,
# serialized= or urlsafe=, but if you do, their values must match
# what is already in the reference.
if app is not None:
assert app == reference.app(), (app, reference.app())
if namespace is not None:
assert namespace == reference.name_space(), (namespace,
reference.name_space())
return reference
def _ReferenceFromPairs(pairs, reference=None, app=None, namespace=None):
"""Construct a Reference from a list of pairs.
If a Reference is passed in as the second argument, it is modified
in place. The app and namespace are set from the corresponding
keyword arguments, with the customary defaults.
"""
if reference is None:
reference = entity_pb.Reference()
path = reference.mutable_path()
last = False
for kind, idorname in pairs:
if last:
raise datastore_errors.BadArgumentError(
'Incomplete Key entry must be last')
if not isinstance(kind, basestring):
if isinstance(kind, type):
# Late import to avoid cycles.
from ndb.model import Model
modelclass = kind
assert issubclass(modelclass, Model), repr(modelclass)
kind = modelclass._get_kind()
assert isinstance(kind, basestring), repr(kind)
if isinstance(kind, unicode):
kind = kind.encode('utf8')
assert 1 <= len(kind) <= 500
elem = path.add_element()
elem.set_type(kind)
if isinstance(idorname, (int, long)):
assert 1 <= idorname < 2**63
elem.set_id(idorname)
elif isinstance(idorname, basestring):
if isinstance(idorname, unicode):
idorname = idorname.encode('utf8')
assert 1 <= len(idorname) <= 500
elem.set_name(idorname)
elif idorname is None:
elem.set_id(0)
last = True
else:
assert False, 'bad idorname (%r)' % (idorname,)
# An empty app id means to use the default app id.
if not app:
app = _DefaultAppId()
# Always set the app id, since it is mandatory.
reference.set_app(app)
# An empty namespace overrides the default namespace.
if namespace is None:
namespace = _DefaultNamespace()
# Only set the namespace if it is not empty.
if namespace:
reference.set_name_space(namespace)
return reference
def _ReferenceFromReference(reference):
"""Copy a Reference."""
new_reference = entity_pb.Reference()
new_reference.CopyFrom(reference)
return new_reference
def _ReferenceFromSerialized(serialized):
"""Construct a Reference from a serialized Reference."""
assert isinstance(serialized, basestring)
if isinstance(serialized, unicode):
serialized = serialized.encode('utf8')
return entity_pb.Reference(serialized)
def _DecodeUrlSafe(urlsafe):
"""Decode a url-safe base64-encoded string.
This returns the decoded string.
"""
assert isinstance(urlsafe, basestring)
if isinstance(urlsafe, unicode):
urlsafe = urlsafe.encode('utf8')
mod = len(urlsafe) % 4
if mod:
urlsafe += '=' * (4 - mod)
# This is 3-4x faster than urlsafe_b64decode()
return base64.b64decode(urlsafe.replace('-', '+').replace('_', '/'))
def _DefaultAppId():
"""Return the default application id.
This is taken from the APPLICATION_ID environment variable.
"""
return os.getenv('APPLICATION_ID', '_')
def _DefaultNamespace():
"""Return the default namespace.
This is taken from the namespace manager.
"""
return namespace_manager.get_namespace()
| Python |
import logging
import os
import sys
def wrapping(wrapped):
# A decorator to decorate a decorator's wrapper. Following the lead
# of Twisted and Monocle, this is supposed to make debugging heavily
# decorated code easier. We'll see...
# TODO: Evaluate; so far it hasn't helped (nor hurt).
def wrapping_wrapper(wrapper):
wrapper.__name__ = wrapped.__name__
wrapper.__doc__ = wrapped.__doc__
wrapper.__dict__.update(wrapped.__dict__)
return wrapper
return wrapping_wrapper
def get_stack(limit=10):
# Return a list of strings showing where the current frame was called.
frame = sys._getframe(1) # Always skip get_stack() itself.
lines = []
while len(lines) < limit and frame is not None:
locals = frame.f_locals
ndb_debug = locals.get('__ndb_debug__')
if ndb_debug != 'SKIP':
line = frame_info(frame)
if ndb_debug is not None:
line += ' # ' + str(ndb_debug)
lines.append(line)
frame = frame.f_back
return lines
def func_info(func, lineno=None):
code = func.func_code
return code_info(code, lineno)
def gen_info(gen):
frame = gen.gi_frame
if gen.gi_running:
prefix = 'running generator '
elif frame:
if frame.f_lasti < 0:
prefix = 'initial generator '
else:
prefix = 'suspended generator '
else:
prefix = 'terminated generator '
if frame:
return prefix + frame_info(frame)
code = getattr(gen, 'gi_code', None)
if code:
return prefix + code_info(code)
return prefix + hex(id(gen))
def frame_info(frame):
return code_info(frame.f_code, frame.f_lineno)
def code_info(code, lineno=None):
funcname = code.co_name
# TODO: Be cleverer about stripping filename,
# e.g. strip based on sys.path.
filename = os.path.basename(code.co_filename)
if lineno is None:
lineno = code.co_firstlineno
return '%s(%s:%s)' % (funcname, filename, lineno)
def logging_debug(*args):
# NOTE: If you want to see debug messages, set the logging level
# manually to logging.DEBUG - 1; or for tests use -v -v -v (see below).
if logging.getLogger().level < logging.DEBUG:
logging.debug(*args)
def tweak_logging():
# Hack for running tests with verbose logging. If there are two or
# more -v flags, turn on INFO logging; if there are 3 or more, DEBUG.
# (A single -v just tells unittest.main() to print the name of each
# test; we don't want to interfere with that.)
v = 0
for arg in sys.argv[1:]:
if arg.startswith('-v'):
v += arg.count('v')
if v >= 2:
level = logging.INFO
if v >= 3:
level = logging.DEBUG - 1
logging.basicConfig(level=level)
if sys.argv[0].endswith('_test.py'):
tweak_logging()
| Python |
"""Tests for model.py."""
import base64
import datetime
import difflib
import pickle
import re
import unittest
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import namespace_manager
from google.appengine.api import users
from google.appengine.datastore import entity_pb
from ndb import model, query, tasklets, test_utils
TESTUSER = users.User('test@example.com', 'example.com', '123')
AMSTERDAM = model.GeoPt(52.35, 4.9166667)
GOLDEN_PB = """\
key <
app: "_"
path <
Element {
type: "Model"
id: 42
}
>
>
entity_group <
Element {
type: "Model"
id: 42
}
>
property <
name: "b"
value <
booleanValue: true
>
multiple: false
>
property <
name: "d"
value <
doubleValue: 2.5
>
multiple: false
>
property <
name: "k"
value <
ReferenceValue {
app: "_"
PathElement {
type: "Model"
id: 42
}
}
>
multiple: false
>
property <
name: "p"
value <
int64Value: 42
>
multiple: false
>
property <
name: "q"
value <
stringValue: "hello"
>
multiple: false
>
property <
name: "u"
value <
UserValue {
email: "test@example.com"
auth_domain: "example.com"
gaiaid: 0
obfuscated_gaiaid: "123"
}
>
multiple: false
>
property <
name: "xy"
value <
PointValue {
x: 52.35
y: 4.9166667
}
>
multiple: false
>
"""
INDEXED_PB = re.sub('Model', 'MyModel', GOLDEN_PB)
UNINDEXED_PB = """\
key <
app: "_"
path <
Element {
type: "MyModel"
id: 0
}
>
>
entity_group <
>
raw_property <
meaning: 14
name: "b"
value <
stringValue: "\\000\\377"
>
multiple: false
>
raw_property <
meaning: 15
name: "t"
value <
stringValue: "Hello world\\341\\210\\264"
>
multiple: false
>
"""
PERSON_PB = """\
key <
app: "_"
path <
Element {
type: "Person"
id: 0
}
>
>
entity_group <
>
property <
name: "address.city"
value <
stringValue: "Mountain View"
>
multiple: false
>
property <
name: "address.street"
value <
stringValue: "1600 Amphitheatre"
>
multiple: false
>
property <
name: "name"
value <
stringValue: "Google"
>
multiple: false
>
"""
NESTED_PB = """\
key <
app: "_"
path <
Element {
type: "Person"
id: 0
}
>
>
entity_group <
>
property <
name: "address.home.city"
value <
stringValue: "Mountain View"
>
multiple: false
>
property <
name: "address.home.street"
value <
stringValue: "1600 Amphitheatre"
>
multiple: false
>
property <
name: "address.work.city"
value <
stringValue: "San Francisco"
>
multiple: false
>
property <
name: "address.work.street"
value <
stringValue: "345 Spear"
>
multiple: false
>
property <
name: "name"
value <
stringValue: "Google"
>
multiple: false
>
"""
RECURSIVE_PB = """\
key <
app: "_"
path <
Element {
type: "Tree"
id: 0
}
>
>
entity_group <
>
raw_property <
meaning: 15
name: "root.left.left.name"
value <
stringValue: "a1a"
>
multiple: false
>
raw_property <
meaning: 15
name: "root.left.name"
value <
stringValue: "a1"
>
multiple: false
>
raw_property <
meaning: 15
name: "root.left.rite.name"
value <
stringValue: "a1b"
>
multiple: false
>
raw_property <
meaning: 15
name: "root.name"
value <
stringValue: "a"
>
multiple: false
>
raw_property <
meaning: 15
name: "root.rite.name"
value <
stringValue: "a2"
>
multiple: false
>
raw_property <
meaning: 15
name: "root.rite.rite.name"
value <
stringValue: "a2b"
>
multiple: false
>
"""
MULTI_PB = """\
key <
app: "_"
path <
Element {
type: "Person"
id: 0
}
>
>
entity_group <
>
property <
name: "address"
value <
stringValue: "345 Spear"
>
multiple: true
>
property <
name: "address"
value <
stringValue: "San Francisco"
>
multiple: true
>
property <
name: "name"
value <
stringValue: "Google"
>
multiple: false
>
"""
MULTIINSTRUCT_PB = """\
key <
app: "_"
path <
Element {
type: "Person"
id: 0
}
>
>
entity_group <
>
property <
name: "address.label"
value <
stringValue: "work"
>
multiple: false
>
property <
name: "address.line"
value <
stringValue: "345 Spear"
>
multiple: true
>
property <
name: "address.line"
value <
stringValue: "San Francisco"
>
multiple: true
>
property <
name: "name"
value <
stringValue: "Google"
>
multiple: false
>
"""
MULTISTRUCT_PB = """\
key <
app: "_"
path <
Element {
type: "Person"
id: 0
}
>
>
entity_group <
>
property <
name: "address.label"
value <
stringValue: "work"
>
multiple: true
>
property <
name: "address.text"
value <
stringValue: "San Francisco"
>
multiple: true
>
property <
name: "address.label"
value <
stringValue: "home"
>
multiple: true
>
property <
name: "address.text"
value <
stringValue: "Mountain View"
>
multiple: true
>
property <
name: "name"
value <
stringValue: "Google"
>
multiple: false
>
"""
class ModelTests(test_utils.DatastoreTest):
def tearDown(self):
self.assertTrue(model.Model._properties == {})
self.assertTrue(model.Expando._properties == {})
super(ModelTests, self).tearDown()
def testKey(self):
m = model.Model()
self.assertEqual(m.key, None)
k = model.Key(flat=['ParentModel', 42, 'Model', 'foobar'])
m.key = k
self.assertEqual(m.key, k)
del m.key
self.assertEqual(m.key, None)
# incomplete key
k2 = model.Key(flat=['ParentModel', 42, 'Model', None])
m.key = k2
self.assertEqual(m.key, k2)
def testIncompleteKey(self):
m = model.Model()
k = model.Key(flat=['Model', None])
m.key = k
pb = m._to_pb()
m2 = model.Model._from_pb(pb)
self.assertEqual(m2, m)
def testIdAndParent(self):
p = model.Key('ParentModel', 'foo')
# key name
m = model.Model(id='bar')
m2 = model.Model._from_pb(m._to_pb())
self.assertEqual(m2.key, model.Key('Model', 'bar'))
# key name + parent
m = model.Model(id='bar', parent=p)
m2 = model.Model._from_pb(m._to_pb())
self.assertEqual(m2.key, model.Key('ParentModel', 'foo', 'Model', 'bar'))
# key id
m = model.Model(id=42)
m2 = model.Model._from_pb(m._to_pb())
self.assertEqual(m2.key, model.Key('Model', 42))
# key id + parent
m = model.Model(id=42, parent=p)
m2 = model.Model._from_pb(m._to_pb())
self.assertEqual(m2.key, model.Key('ParentModel', 'foo', 'Model', 42))
# parent
m = model.Model(parent=p)
m2 = model.Model._from_pb(m._to_pb())
self.assertEqual(m2.key, model.Key('ParentModel', 'foo', 'Model', None))
# not key -- invalid
self.assertRaises(datastore_errors.BadValueError, model.Model, key='foo')
# wrong key kind -- invalid
k = model.Key('OtherModel', 'bar')
class MyModel(model.Model):
pass
self.assertRaises(model.KindError, MyModel, key=k)
# incomplete parent -- invalid
p2 = model.Key('ParentModel', None)
self.assertRaises(datastore_errors.BadArgumentError, model.Model,
parent=p2)
self.assertRaises(datastore_errors.BadArgumentError, model.Model,
id='bar', parent=p2)
# key + id -- invalid
k = model.Key('Model', 'bar')
self.assertRaises(datastore_errors.BadArgumentError, model.Model, key=k,
id='bar')
# key + parent -- invalid
k = model.Key('Model', 'bar', parent=p)
self.assertRaises(datastore_errors.BadArgumentError, model.Model, key=k,
parent=p)
# key + id + parent -- invalid
self.assertRaises(datastore_errors.BadArgumentError, model.Model, key=k,
id='bar', parent=p)
def testQuery(self):
class MyModel(model.Model):
p = model.IntegerProperty()
q = MyModel.query()
self.assertTrue(isinstance(q, query.Query))
self.assertEqual(q.kind, 'MyModel')
self.assertEqual(q.ancestor, None)
k = model.Key(flat=['Model', 1])
q = MyModel.query(ancestor=k)
self.assertEqual(q.kind, 'MyModel')
self.assertEqual(q.ancestor, k)
k0 = model.Key(flat=['Model', None])
self.assertRaises(Exception, MyModel.query, ancestor=k0)
def testQueryWithFilter(self):
class MyModel(model.Model):
p = model.IntegerProperty()
q = MyModel.query(MyModel.p >= 0)
self.assertTrue(isinstance(q, query.Query))
self.assertEqual(q.kind, 'MyModel')
self.assertEqual(q.ancestor, None)
self.assertTrue(q.filters is not None)
q2 = MyModel.query().filter(MyModel.p >= 0)
self.assertEqual(q.filters, q2.filters)
def testProperty(self):
class MyModel(model.Model):
b = model.BooleanProperty()
p = model.IntegerProperty()
q = model.StringProperty()
d = model.FloatProperty()
k = model.KeyProperty()
u = model.UserProperty()
xy = model.GeoPtProperty()
ent = MyModel()
k = model.Key(flat=['MyModel', 42])
ent.key = k
MyModel.b._set_value(ent, True)
MyModel.p._set_value(ent, 42)
MyModel.q._set_value(ent, 'hello')
MyModel.d._set_value(ent, 2.5)
MyModel.k._set_value(ent, k)
MyModel.u._set_value(ent, TESTUSER)
MyModel.xy._set_value(ent, AMSTERDAM)
self.assertEqual(MyModel.b._get_value(ent), True)
self.assertEqual(MyModel.p._get_value(ent), 42)
self.assertEqual(MyModel.q._get_value(ent), 'hello')
self.assertEqual(MyModel.d._get_value(ent), 2.5)
self.assertEqual(MyModel.k._get_value(ent), k)
self.assertEqual(MyModel.u._get_value(ent), TESTUSER)
self.assertEqual(MyModel.xy._get_value(ent), AMSTERDAM)
pb = self.conn.adapter.entity_to_pb(ent)
self.assertEqual(str(pb), INDEXED_PB)
ent = MyModel._from_pb(pb)
self.assertEqual(ent._get_kind(), 'MyModel')
k = model.Key(flat=['MyModel', 42])
self.assertEqual(ent.key, k)
self.assertEqual(MyModel.p._get_value(ent), 42)
self.assertEqual(MyModel.q._get_value(ent), 'hello')
self.assertEqual(MyModel.d._get_value(ent), 2.5)
self.assertEqual(MyModel.k._get_value(ent), k)
def testDeletingPropertyValue(self):
class MyModel(model.Model):
a = model.StringProperty()
m = MyModel()
# Initially it isn't there (but the value defaults to None).
self.assertEqual(m.a, None)
self.assertFalse(MyModel.a._has_value(m))
# Explicit None assignment makes it present.
m.a = None
self.assertEqual(m.a, None)
self.assertTrue(MyModel.a._has_value(m))
# Deletion restores the initial state.
del m.a
self.assertEqual(m.a, None)
self.assertFalse(MyModel.a._has_value(m))
# Redundant deletions are okay.
del m.a
self.assertEqual(m.a, None)
self.assertFalse(MyModel.a._has_value(m))
# Deleted/missing values are serialized and considered present
# when deserialized.
pb = m._to_pb()
m = MyModel._from_pb(pb)
self.assertEqual(m.a, None)
self.assertTrue(MyModel.a._has_value(m))
def testDefaultPropertyValue(self):
class MyModel(model.Model):
a = model.StringProperty(default='a')
b = model.StringProperty(default='')
m = MyModel()
# Initial values equal the defaults.
self.assertEqual(m.a, 'a')
self.assertEqual(m.b, '')
self.assertFalse(MyModel.a._has_value(m))
self.assertFalse(MyModel.b._has_value(m))
# Setting values erases the defaults.
m.a = ''
m.b = 'b'
self.assertEqual(m.a, '')
self.assertEqual(m.b, 'b')
self.assertTrue(MyModel.a._has_value(m))
self.assertTrue(MyModel.b._has_value(m))
# Deleting values restores the defaults.
del m.a
del m.b
self.assertEqual(m.a, 'a')
self.assertEqual(m.b, '')
self.assertFalse(MyModel.a._has_value(m))
self.assertFalse(MyModel.b._has_value(m))
# Serialization makes the default values explicit.
pb = m._to_pb()
m = MyModel._from_pb(pb)
self.assertEqual(m.a, 'a')
self.assertEqual(m.b, '')
self.assertTrue(MyModel.a._has_value(m))
self.assertTrue(MyModel.b._has_value(m))
def testComparingExplicitAndImplicitValue(self):
class MyModel(model.Model):
a = model.StringProperty(default='a')
b = model.StringProperty()
m1 = MyModel(b=None)
m2 = MyModel()
self.assertEqual(m1, m2)
m1.a = 'a'
self.assertEqual(m1, m2)
def testRequiredProperty(self):
class MyModel(model.Model):
a = model.StringProperty(required=True)
b = model.StringProperty() # Never counts as uninitialized
self.assertEqual(repr(MyModel.a), "StringProperty('a', required=True)")
m = MyModel()
# Never-assigned values are considered uninitialized.
self.assertEqual(m._find_uninitialized(), set(['a']))
self.assertRaises(datastore_errors.BadValueError, m._check_initialized)
self.assertRaises(datastore_errors.BadValueError, m._to_pb)
# Empty string is fine.
m.a = ''
self.assertFalse(m._find_uninitialized())
m._check_initialized()
m._to_pb()
# Non-empty string is fine (of course).
m.a = 'foo'
self.assertFalse(m._find_uninitialized())
m._check_initialized()
m._to_pb()
# Deleted value is not fine.
del m.a
self.assertEqual(m._find_uninitialized(), set(['a']))
self.assertRaises(datastore_errors.BadValueError, m._check_initialized)
self.assertRaises(datastore_errors.BadValueError, m._to_pb)
# Explicitly assigned None is *not* fine.
m.a = None
self.assertEqual(m._find_uninitialized(), set(['a']))
self.assertRaises(datastore_errors.BadValueError, m._check_initialized)
self.assertRaises(datastore_errors.BadValueError, m._to_pb)
# Check that b is still unset.
self.assertFalse(MyModel.b._has_value(m))
def testRepeatedRequiredDefaultConflict(self):
# Allow at most one of repeated=True, required=True, default=<non-None>.
class MyModel(model.Model):
self.assertRaises(Exception,
model.StringProperty, repeated=True, default='')
self.assertRaises(Exception,
model.StringProperty, repeated=True, required=True)
self.assertRaises(Exception,
model.StringProperty, required=True, default='')
self.assertRaises(Exception,
model.StringProperty,
repeated=True, required=True, default='')
def testBlobKeyProperty(self):
class MyModel(model.Model):
image = model.BlobKeyProperty()
test_blobkey = datastore_types.BlobKey('testkey123')
m = MyModel()
m.image = test_blobkey
m.put()
m = m.key.get()
self.assertTrue(isinstance(m.image, datastore_types.BlobKey))
self.assertEqual(str(m.image), str(test_blobkey))
def testChoicesProperty(self):
class MyModel(model.Model):
a = model.StringProperty(choices=['a', 'b', 'c'])
b = model.IntegerProperty(choices=[1, 2, 3], repeated=True)
m = MyModel(a='a', b=[1, 2])
m.a = 'b'
m.a = None
m.b = [1, 1, 3]
m.b = []
self.assertRaises(datastore_errors.BadValueError,
setattr, m, 'a', 'A')
self.assertRaises(datastore_errors.BadValueError,
setattr, m, 'b', [42])
def testValidatorProperty(self):
def my_validator(prop, value):
value = value.lower()
if not value.startswith('a'):
raise datastore_errors.BadValueError('%s does not start with "a"' %
prop._name)
return value
class MyModel(model.Model):
a = model.StringProperty(validator=my_validator)
m = MyModel()
m.a = 'ABC'
self.assertEqual(m.a, 'abc')
self.assertRaises(datastore_errors.BadValueError,
setattr, m, 'a', 'def')
def testUnindexedProperty(self):
class MyModel(model.Model):
t = model.TextProperty()
b = model.BlobProperty()
ent = MyModel()
MyModel.t._set_value(ent, u'Hello world\u1234')
MyModel.b._set_value(ent, '\x00\xff')
self.assertEqual(MyModel.t._get_value(ent), u'Hello world\u1234')
self.assertEqual(MyModel.b._get_value(ent), '\x00\xff')
pb = ent._to_pb()
self.assertEqual(str(pb), UNINDEXED_PB)
ent = MyModel._from_pb(pb)
self.assertEqual(ent._get_kind(), 'MyModel')
k = model.Key(flat=['MyModel', None])
self.assertEqual(ent.key, k)
self.assertEqual(MyModel.t._get_value(ent), u'Hello world\u1234')
self.assertEqual(MyModel.b._get_value(ent), '\x00\xff')
def DateAndOrTimePropertyTest(self, propclass, t1, t2):
class Person(model.Model):
name = model.StringProperty()
ctime = propclass(auto_now_add=True)
mtime = propclass(auto_now=True)
atime = propclass()
times = propclass(repeated=True)
p = Person()
p.atime = t1
p.times = [t1, t2]
self.assertEqual(p.ctime, None)
self.assertEqual(p.mtime, None)
pb = p._to_pb()
self.assertNotEqual(p.ctime, None)
self.assertNotEqual(p.mtime, None)
q = Person._from_pb(pb)
self.assertEqual(q.ctime, p.ctime)
self.assertEqual(q.mtime, p.mtime)
self.assertEqual(q.atime, t1)
self.assertEqual(q.times, [t1, t2])
def testDateTimeProperty(self):
self.DateAndOrTimePropertyTest(model.DateTimeProperty,
datetime.datetime(1982, 12, 1, 9, 0, 0),
datetime.datetime(1995, 4, 15, 5, 0, 0))
def testDateProperty(self):
self.DateAndOrTimePropertyTest(model.DateProperty,
datetime.date(1982, 12, 1),
datetime.date(1995, 4, 15))
def testTimeProperty(self):
self.DateAndOrTimePropertyTest(model.TimeProperty,
datetime.time(9, 0, 0),
datetime.time(5, 0, 0, 500))
def testStructuredProperty(self):
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
class Person(model.Model):
name = model.StringProperty()
address = model.StructuredProperty(Address)
p = Person()
p.name = 'Google'
a = Address(street='1600 Amphitheatre')
p.address = a
p.address.city = 'Mountain View'
self.assertEqual(Person.name._get_value(p), 'Google')
self.assertEqual(p.name, 'Google')
self.assertEqual(Person.address._get_value(p), a)
self.assertEqual(Address.street._get_value(a), '1600 Amphitheatre')
self.assertEqual(Address.city._get_value(a), 'Mountain View')
pb = p._to_pb()
self.assertEqual(str(pb), PERSON_PB)
p = Person._from_pb(pb)
self.assertEqual(p.name, 'Google')
self.assertEqual(p.address.street, '1600 Amphitheatre')
self.assertEqual(p.address.city, 'Mountain View')
self.assertEqual(p.address, a)
def testNestedStructuredProperty(self):
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
class AddressPair(model.Model):
home = model.StructuredProperty(Address)
work = model.StructuredProperty(Address)
class Person(model.Model):
name = model.StringProperty()
address = model.StructuredProperty(AddressPair)
p = Person()
p.name = 'Google'
p.address = AddressPair(home=Address(), work=Address())
p.address.home.city = 'Mountain View'
p.address.home.street = '1600 Amphitheatre'
p.address.work.city = 'San Francisco'
p.address.work.street = '345 Spear'
pb = p._to_pb()
self.assertEqual(str(pb), NESTED_PB)
p = Person._from_pb(pb)
self.assertEqual(p.name, 'Google')
self.assertEqual(p.address.home.street, '1600 Amphitheatre')
self.assertEqual(p.address.home.city, 'Mountain View')
self.assertEqual(p.address.work.street, '345 Spear')
self.assertEqual(p.address.work.city, 'San Francisco')
def testRecursiveStructuredProperty(self):
class Node(model.Model):
name = model.StringProperty(indexed=False)
Node.left = model.StructuredProperty(Node)
Node.rite = model.StructuredProperty(Node)
Node._fix_up_properties()
class Tree(model.Model):
root = model.StructuredProperty(Node)
k = model.Key(flat=['Tree', None])
tree = Tree()
tree.key = k
tree.root = Node(name='a',
left=Node(name='a1',
left=Node(name='a1a'),
rite=Node(name='a1b')),
rite=Node(name='a2',
rite=Node(name='a2b')))
pb = tree._to_pb()
self.assertEqual(str(pb), RECURSIVE_PB)
tree2 = Tree._from_pb(pb)
self.assertEqual(tree2, tree)
def testRenamedProperty(self):
class MyModel(model.Model):
bb = model.BooleanProperty('b')
pp = model.IntegerProperty('p')
qq = model.StringProperty('q')
dd = model.FloatProperty('d')
kk = model.KeyProperty('k')
uu = model.UserProperty('u')
xxyy = model.GeoPtProperty('xy')
ent = MyModel()
k = model.Key(flat=['MyModel', 42])
ent.key = k
MyModel.bb._set_value(ent, True)
MyModel.pp._set_value(ent, 42)
MyModel.qq._set_value(ent, 'hello')
MyModel.dd._set_value(ent, 2.5)
MyModel.kk._set_value(ent, k)
MyModel.uu._set_value(ent, TESTUSER)
MyModel.xxyy._set_value(ent, AMSTERDAM)
self.assertEqual(MyModel.pp._get_value(ent), 42)
self.assertEqual(MyModel.qq._get_value(ent), 'hello')
self.assertEqual(MyModel.dd._get_value(ent), 2.5)
self.assertEqual(MyModel.kk._get_value(ent), k)
self.assertEqual(MyModel.uu._get_value(ent), TESTUSER)
self.assertEqual(MyModel.xxyy._get_value(ent), AMSTERDAM)
pb = self.conn.adapter.entity_to_pb(ent)
self.assertEqual(str(pb), INDEXED_PB)
ent = MyModel._from_pb(pb)
self.assertEqual(ent._get_kind(), 'MyModel')
k = model.Key(flat=['MyModel', 42])
self.assertEqual(ent.key, k)
self.assertEqual(MyModel.pp._get_value(ent), 42)
self.assertEqual(MyModel.qq._get_value(ent), 'hello')
self.assertEqual(MyModel.dd._get_value(ent), 2.5)
self.assertEqual(MyModel.kk._get_value(ent), k)
def testUnicodeRenamedProperty(self):
class UModel(model.Model):
val = model.StringProperty(u'\u00fc')
@classmethod
def _get_kind(cls):
return u'UModel' # Pure ASCII Unicode kind string is find.
u = UModel(val='abc')
u.put()
v = u.key.get()
self.assertFalse(u is v)
self.assertEqual(u.val, v.val)
def testUnicodeKind(self):
def helper():
class UModel(model.Model):
val = model.StringProperty()
@classmethod
def _get_kind(cls):
return u'\u00fcModel'
self.assertRaises(model.KindError, helper)
def testRenamedStructuredProperty(self):
uhome = u'hom\u00e9'
uhome_enc_repr = r'hom\303\251'
class Address(model.Model):
st = model.StringProperty('street')
ci = model.StringProperty('city')
class AddressPair(model.Model):
ho = model.StructuredProperty(Address, uhome)
wo = model.StructuredProperty(Address, 'work')
class Person(model.Model):
na = model.StringProperty('name')
ad = model.StructuredProperty(AddressPair, 'address')
p = Person()
p.na = 'Google'
p.ad = AddressPair(ho=Address(), wo=Address())
p.ad.ho.ci = 'Mountain View'
p.ad.ho.st = '1600 Amphitheatre'
p.ad.wo.ci = 'San Francisco'
p.ad.wo.st = '345 Spear'
pb = p._to_pb()
expected = NESTED_PB.replace('home', uhome_enc_repr)
self.assertEqual(str(pb), expected)
p = Person._from_pb(pb)
self.assertEqual(p.na, 'Google')
self.assertEqual(p.ad.ho.st, '1600 Amphitheatre')
self.assertEqual(p.ad.ho.ci, 'Mountain View')
self.assertEqual(p.ad.wo.st, '345 Spear')
self.assertEqual(p.ad.wo.ci, 'San Francisco')
def testKindMap(self):
model.Model._reset_kind_map()
class A1(model.Model):
pass
self.assertEqual(model.Model._get_kind_map(), {'A1': A1})
class A2(model.Model):
pass
self.assertEqual(model.Model._get_kind_map(), {'A1': A1, 'A2': A2})
def testMultipleProperty(self):
class Person(model.Model):
name = model.StringProperty()
address = model.StringProperty(repeated=True)
m = Person(name='Google', address=['345 Spear', 'San Francisco'])
m.key = model.Key(flat=['Person', None])
self.assertEqual(m.address, ['345 Spear', 'San Francisco'])
pb = m._to_pb()
self.assertEqual(str(pb), MULTI_PB)
m2 = Person._from_pb(pb)
self.assertEqual(m2, m)
def testMultipleInStructuredProperty(self):
class Address(model.Model):
label = model.StringProperty()
line = model.StringProperty(repeated=True)
class Person(model.Model):
name = model.StringProperty()
address = model.StructuredProperty(Address)
m = Person(name='Google',
address=Address(label='work',
line=['345 Spear', 'San Francisco']))
m.key = model.Key(flat=['Person', None])
self.assertEqual(m.address.line, ['345 Spear', 'San Francisco'])
pb = m._to_pb()
self.assertEqual(str(pb), MULTIINSTRUCT_PB)
m2 = Person._from_pb(pb)
self.assertEqual(m2, m)
def testMultipleStructuredProperty(self):
class Address(model.Model):
label = model.StringProperty()
text = model.StringProperty()
class Person(model.Model):
name = model.StringProperty()
address = model.StructuredProperty(Address, repeated=True)
m = Person(name='Google',
address=[Address(label='work', text='San Francisco'),
Address(label='home', text='Mountain View')])
m.key = model.Key(flat=['Person', None])
self.assertEqual(m.address[0].label, 'work')
self.assertEqual(m.address[0].text, 'San Francisco')
self.assertEqual(m.address[1].label, 'home')
self.assertEqual(m.address[1].text, 'Mountain View')
pb = m._to_pb()
self.assertEqual(str(pb), MULTISTRUCT_PB)
m2 = Person._from_pb(pb)
self.assertEqual(m2, m)
def testCannotMultipleInMultiple(self):
class Inner(model.Model):
innerval = model.StringProperty(repeated=True)
self.assertRaises(AssertionError,
model.StructuredProperty, Inner, repeated=True)
def testNullProperties(self):
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
zip = model.IntegerProperty()
class Person(model.Model):
address = model.StructuredProperty(Address)
age = model.IntegerProperty()
name = model.StringProperty()
k = model.KeyProperty()
k = model.Key(flat=['Person', 42])
p = Person()
p.key = k
self.assertEqual(p.address, None)
self.assertEqual(p.age, None)
self.assertEqual(p.name, None)
self.assertEqual(p.k, None)
pb = p._to_pb()
q = Person._from_pb(pb)
self.assertEqual(q.address, None)
self.assertEqual(q.age, None)
self.assertEqual(q.name, None)
self.assertEqual(q.k, None)
self.assertEqual(q, p)
def testOrphanProperties(self):
class Tag(model.Model):
names = model.StringProperty(repeated=True)
ratings = model.IntegerProperty(repeated=True)
class Address(model.Model):
line = model.StringProperty(repeated=True)
city = model.StringProperty()
zip = model.IntegerProperty()
tags = model.StructuredProperty(Tag)
class Person(model.Model):
address = model.StructuredProperty(Address)
age = model.IntegerProperty(repeated=True)
name = model.StringProperty()
k = model.KeyProperty()
k = model.Key(flat=['Person', 42])
p = Person(name='White House', k=k, age=[210, 211],
address=Address(line=['1600 Pennsylvania', 'Washington, DC'],
tags=Tag(names=['a', 'b'], ratings=[1, 2]),
zip=20500))
p.key = k
pb = p._to_pb()
q = model.Model._from_pb(pb)
qb = q._to_pb()
linesp = str(pb).splitlines(True)
linesq = str(qb).splitlines(True)
lines = difflib.unified_diff(linesp, linesq, 'Expected', 'Actual')
self.assertEqual(pb, qb, ''.join(lines))
def testModelRepr(self):
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
class Person(model.Model):
name = model.StringProperty()
address = model.StructuredProperty(Address)
p = Person(name='Google', address=Address(street='345 Spear', city='SF'))
self.assertEqual(
repr(p),
"Person(address=Address(city='SF', street='345 Spear'), name='Google')")
p.key = model.Key(pairs=[('Person', 42)])
self.assertEqual(
repr(p),
"Person(key=Key('Person', 42), "
"address=Address(city='SF', street='345 Spear'), name='Google')")
def testModelRepr_RenamedProperty(self):
class Address(model.Model):
street = model.StringProperty('Street')
city = model.StringProperty('City')
a = Address(street='345 Spear', city='SF')
self.assertEqual(repr(a), "Address(city='SF', street='345 Spear')")
def testModel_RenameAlias(self):
class Person(model.Model):
name = model.StringProperty('Name')
p = Person(name='Fred')
self.assertRaises(AttributeError, getattr, p, 'Name')
self.assertRaises(AttributeError, Person, Name='Fred')
# Unfortunately, p.Name = 'boo' just sets p.__dict__['Name'] = 'boo'.
self.assertRaises(AttributeError, getattr, p, 'foo')
def testExpando_RenameAlias(self):
class Person(model.Expando):
name = model.StringProperty('Name')
p = Person(name='Fred')
self.assertEqual(p.name, 'Fred')
self.assertEqual(p.Name, 'Fred')
self.assertEqual(p._values, {'Name': 'Fred'})
self.assertTrue(p._properties, Person._properties)
p = Person(Name='Fred')
self.assertEqual(p.name, 'Fred')
self.assertEqual(p.Name, 'Fred')
self.assertEqual(p._values, {'Name': 'Fred'})
self.assertTrue(p._properties, Person._properties)
p = Person()
p.Name = 'Fred'
self.assertEqual(p.name, 'Fred')
self.assertEqual(p.Name, 'Fred')
self.assertEqual(p._values, {'Name': 'Fred'})
self.assertTrue(p._properties, Person._properties)
self.assertRaises(AttributeError, getattr, p, 'foo')
def testModel_RenameSwap(self):
class Person(model.Model):
foo = model.StringProperty('bar')
bar = model.StringProperty('foo')
p = Person(foo='foo', bar='bar')
self.assertEqual(p._values,
{'foo': 'bar', 'bar': 'foo'})
def testExpando_RenameSwap(self):
class Person(model.Expando):
foo = model.StringProperty('bar')
bar = model.StringProperty('foo')
p = Person(foo='foo', bar='bar', baz='baz')
self.assertEqual(p._values,
{'foo': 'bar', 'bar': 'foo', 'baz': 'baz'})
p = Person()
p.foo = 'foo'
p.bar = 'bar'
p.baz = 'baz'
self.assertEqual(p._values,
{'foo': 'bar', 'bar': 'foo', 'baz': 'baz'})
def testPropertyRepr(self):
p = model.Property()
self.assertEqual(repr(p), 'Property()')
p = model.IntegerProperty('foo', indexed=False, repeated=True)
self.assertEqual(repr(p),
"IntegerProperty('foo', indexed=False, repeated=True)")
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
p = model.StructuredProperty(Address, 'foo')
self.assertEqual(repr(p), "StructuredProperty(Address, 'foo')")
def testValidation(self):
class All(model.Model):
s = model.StringProperty()
i = model.IntegerProperty()
f = model.FloatProperty()
t = model.TextProperty()
b = model.BlobProperty()
k = model.KeyProperty()
BVE = datastore_errors.BadValueError
a = All()
a.s = None
a.s = 'abc'
a.s = u'def'
a.s = '\xff' # Not UTF-8.
self.assertRaises(BVE, setattr, a, 's', 0)
a.i = None
a.i = 42
a.i = 123L
self.assertRaises(BVE, setattr, a, 'i', '')
a.f = None
a.f = 42
a.f = 3.14
self.assertRaises(BVE, setattr, a, 'f', '')
a.t = None
a.t = 'abc'
a.t = u'def'
a.t = '\xff' # Not UTF-8.
self.assertRaises(BVE, setattr, a, 't', 0)
a.b = None
a.b = 'abc'
a.b = '\xff'
self.assertRaises(BVE, setattr, a, 'b', u'')
self.assertRaises(BVE, setattr, a, 'b', u'')
a.k = None
a.k = model.Key('Foo', 42)
self.assertRaises(BVE, setattr, a, 'k', '')
def testLocalStructuredProperty(self):
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
class Person(model.Model):
name = model.StringProperty()
address = model.LocalStructuredProperty(Address)
p = Person()
p.name = 'Google'
a = Address(street='1600 Amphitheatre')
p.address = a
p.address.city = 'Mountain View'
self.assertEqual(Person.name._get_value(p), 'Google')
self.assertEqual(p.name, 'Google')
self.assertEqual(Person.address._get_value(p), a)
self.assertEqual(Address.street._get_value(a), '1600 Amphitheatre')
self.assertEqual(Address.city._get_value(a), 'Mountain View')
pb = p._to_pb()
# TODO: Validate pb
# Check we can enable and disable compression and have old data still
# be understood.
Person.address._compressed = True
p = Person._from_pb(pb)
self.assertEqual(p.name, 'Google')
self.assertEqual(p.address.street, '1600 Amphitheatre')
self.assertEqual(p.address.city, 'Mountain View')
self.assertEqual(p.address, a)
self.assertEqual(repr(Person.address),
"LocalStructuredProperty(Address, 'address', "
"compressed=True)")
pb = p._to_pb()
Person.address._compressed = False
p = Person._from_pb(pb)
# Now try with an empty address
p = Person()
p.name = 'Google'
self.assertTrue(p.address is None)
pb = p._to_pb()
p = Person._from_pb(pb)
self.assertTrue(p.address is None)
self.assertEqual(p.name, 'Google')
def testLocalStructuredPropertyCompressed(self):
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
class Person(model.Model):
name = model.StringProperty()
address = model.LocalStructuredProperty(Address, compressed=True)
k = model.Key('Person', 'google')
p = Person(key=k)
p.name = 'Google'
p.address = Address(street='1600 Amphitheatre', city='Mountain View')
p.put()
# Putting and getting to test compression and deserialization.
p = k.get()
p.put()
p = k.get()
self.assertEqual(p.name, 'Google')
self.assertEqual(p.address.street, '1600 Amphitheatre')
self.assertEqual(p.address.city, 'Mountain View')
def testLocalStructuredPropertyRepeated(self):
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
class Person(model.Model):
name = model.StringProperty()
address = model.LocalStructuredProperty(Address, repeated=True)
k = model.Key('Person', 'google')
p = Person(key=k)
p.name = 'Google'
p.address.append(Address(street='1600 Amphitheatre', city='Mountain View'))
p.address.append(Address(street='Webb crater', city='Moon'))
p.put()
# Putting and getting to test compression and deserialization.
p = k.get()
p.put()
p = k.get()
self.assertEqual(p.name, 'Google')
self.assertEqual(p.address[0].street, '1600 Amphitheatre')
self.assertEqual(p.address[0].city, 'Mountain View')
self.assertEqual(p.address[1].street, 'Webb crater')
self.assertEqual(p.address[1].city, 'Moon')
def testLocalStructuredPropertyRepeatedCompressed(self):
class Address(model.Model):
street = model.StringProperty()
city = model.StringProperty()
class Person(model.Model):
name = model.StringProperty()
address = model.LocalStructuredProperty(Address, repeated=True,
compressed=True)
k = model.Key('Person', 'google')
p = Person(key=k)
p.name = 'Google'
p.address.append(Address(street='1600 Amphitheatre', city='Mountain View'))
p.address.append(Address(street='Webb crater', city='Moon'))
p.put()
# Putting and getting to test compression and deserialization.
p = k.get()
p.put()
p = k.get()
self.assertEqual(p.name, 'Google')
self.assertEqual(p.address[0].street, '1600 Amphitheatre')
self.assertEqual(p.address[0].city, 'Mountain View')
self.assertEqual(p.address[1].street, 'Webb crater')
self.assertEqual(p.address[1].city, 'Moon')
def testLocalStructuredPropertyRepeatedRepeated(self):
class Inner(model.Model):
a = model.IntegerProperty(repeated=True)
self.assertTrue(Inner._has_repeated)
class Outer(model.Model):
b = model.LocalStructuredProperty(Inner, repeated=True)
self.assertTrue(Inner._has_repeated)
x = Outer(b=[Inner(a=[1, 2]), Inner(a=[3, 4, 5])])
k = x.put()
y = k.get()
self.assertTrue(x is not y)
self.assertEqual(x, y)
def testEmptyList(self):
class Person(model.Model):
name = model.StringProperty(repeated=True)
p = Person()
self.assertEqual(p.name, [])
pb = p._to_pb()
q = Person._from_pb(pb)
self.assertEqual(q.name, [], str(pb))
def testEmptyListSerialized(self):
class Person(model.Model):
name = model.StringProperty(repeated=True)
p = Person()
pb = p._to_pb()
q = Person._from_pb(pb)
self.assertEqual(q.name, [], str(pb))
def testDatetimeSerializing(self):
class Person(model.Model):
t = model.GenericProperty()
p = Person(t=datetime.datetime.utcnow())
pb = p._to_pb()
q = Person._from_pb(pb)
self.assertEqual(p.t, q.t)
def testExpandoKey(self):
class Ex(model.Expando):
pass
e = Ex()
self.assertEqual(e.key, None)
k = model.Key('Ex', 'abc')
e.key = k
self.assertEqual(e.key, k)
k2 = model.Key('Ex', 'def')
e2 = Ex(key=k2)
self.assertEqual(e2.key, k2)
e2.key = k
self.assertEqual(e2.key, k)
self.assertEqual(e, e2)
del e.key
self.assertEqual(e.key, None)
def testExpandoRead(self):
class Person(model.Model):
name = model.StringProperty()
city = model.StringProperty()
p = Person(name='Guido', city='SF')
pb = p._to_pb()
q = model.Expando._from_pb(pb)
self.assertEqual(q.name, 'Guido')
self.assertEqual(q.city, 'SF')
def testExpandoWrite(self):
k = model.Key(flat=['Model', 42])
p = model.Expando(key=k)
p.k = k
p.p = 42
p.q = 'hello'
p.u = TESTUSER
p.d = 2.5
p.b = True
p.xy = AMSTERDAM
pb = p._to_pb()
self.assertEqual(str(pb), GOLDEN_PB)
def testExpandoDelAttr(self):
class Ex(model.Expando):
static = model.StringProperty()
e = Ex()
self.assertEqual(e.static, None)
self.assertRaises(AttributeError, getattr, e, 'dynamic')
self.assertRaises(AttributeError, getattr, e, '_absent')
e.static = 'a'
e.dynamic = 'b'
self.assertEqual(e.static, 'a')
self.assertEqual(e.dynamic, 'b')
e = Ex(static='a', dynamic='b')
self.assertEqual(e.static, 'a')
self.assertEqual(e.dynamic, 'b')
del e.static
del e.dynamic
self.assertEqual(e.static, None)
self.assertRaises(AttributeError, getattr, e, 'dynamic')
def testExpandoRepr(self):
class Person(model.Expando):
name = model.StringProperty('Name')
city = model.StringProperty('City')
p = Person(name='Guido', zip='00000')
p.city= 'SF'
self.assertEqual(repr(p),
"Person(city='SF', name='Guido', zip='00000')")
# White box confirmation.
self.assertEqual(p._values,
{'City': 'SF', 'Name': 'Guido', 'zip': '00000'})
def testExpandoNested(self):
p = model.Expando()
nest = model.Expando()
nest.foo = 42
nest.bar = 'hello'
p.nest = nest
self.assertEqual(p.nest.foo, 42)
self.assertEqual(p.nest.bar, 'hello')
pb = p._to_pb()
q = model.Expando._from_pb(pb)
self.assertEqual(q.nest.foo, 42)
self.assertEqual(q.nest.bar, 'hello')
def testExpandoSubclass(self):
class Person(model.Expando):
name = model.StringProperty()
p = Person()
p.name = 'Joe'
p.age = 7
self.assertEqual(p.name, 'Joe')
self.assertEqual(p.age, 7)
def testExpandoConstructor(self):
p = model.Expando(foo=42, bar='hello')
self.assertEqual(p.foo, 42)
self.assertEqual(p.bar, 'hello')
pb = p._to_pb()
q = model.Expando._from_pb(pb)
self.assertEqual(q.foo, 42)
self.assertEqual(q.bar, 'hello')
def testExpandoNestedConstructor(self):
p = model.Expando(foo=42, bar=model.Expando(hello='hello'))
self.assertEqual(p.foo, 42)
self.assertEqual(p.bar.hello, 'hello')
pb = p._to_pb()
q = model.Expando._from_pb(pb)
self.assertEqual(q.foo, 42)
self.assertEqual(q.bar.hello, 'hello')
def testExpandoRepeatedProperties(self):
p = model.Expando(foo=1, bar=[1, 2])
p.baz = [3]
self.assertFalse(p._properties['foo']._repeated)
self.assertTrue(p._properties['bar']._repeated)
self.assertTrue(p._properties['baz']._repeated)
p.bar = 'abc'
self.assertFalse(p._properties['bar']._repeated)
pb = p._to_pb()
q = model.Expando._from_pb(pb)
q.key = None
self.assertFalse(p._properties['foo']._repeated)
self.assertFalse(p._properties['bar']._repeated)
self.assertTrue(p._properties['baz']._repeated)
self.assertEqual(q, model.Expando(foo=1, bar='abc', baz=[3]))
def testExpandoUnindexedProperties(self):
class Mine(model.Expando):
pass
a = Mine(foo=1, bar=['a', 'b'])
self.assertTrue(a._properties['foo']._indexed)
self.assertTrue(a._properties['bar']._indexed)
a._default_indexed = False
a.baz = 'baz'
self.assertFalse(a._properties['baz']._indexed)
Mine._default_indexed = False
b = Mine(foo=1)
b.bar=['a', 'b']
self.assertFalse(b._properties['foo']._indexed)
self.assertFalse(b._properties['bar']._indexed)
def testComputedProperty(self):
class ComputedTest(model.Model):
name = model.StringProperty()
name_lower = model.ComputedProperty(lambda self: self.name.lower())
@model.ComputedProperty
def length(self):
return len(self.name)
def _compute_hash(self):
return hash(self.name)
hash = model.ComputedProperty(_compute_hash, name='hashcode')
m = ComputedTest(name='Foobar')
pb = m._to_pb()
for p in pb.property_list():
if p.name() == 'name_lower':
self.assertEqual(p.value().stringvalue(), 'foobar')
break
else:
self.assert_(False, "name_lower not found in PB")
m = ComputedTest._from_pb(pb)
self.assertEqual(m.name, 'Foobar')
self.assertEqual(m.name_lower, 'foobar')
self.assertEqual(m.length, 6)
self.assertEqual(m.hash, hash('Foobar'))
def testLargeValues(self):
class Demo(model.Model):
bytes = model.BlobProperty()
text = model.TextProperty()
x = Demo(bytes='x'*1000, text=u'a'*1000)
key = x.put()
y = key.get()
self.assertEqual(x, y)
self.assertTrue(isinstance(y.bytes, str))
self.assertTrue(isinstance(y.text, unicode))
def testMultipleStructuredProperty(self):
class Address(model.Model):
label = model.StringProperty()
text = model.StringProperty()
class Person(model.Model):
name = model.StringProperty()
address = model.StructuredProperty(Address, repeated=True)
m = Person(name='Google',
address=[Address(label='work', text='San Francisco'),
Address(label='home', text='Mountain View')])
m.key = model.Key(flat=['Person', None])
self.assertEqual(m.address[0].label, 'work')
self.assertEqual(m.address[0].text, 'San Francisco')
self.assertEqual(m.address[1].label, 'home')
self.assertEqual(m.address[1].text, 'Mountain View')
[k] = self.conn.put([m])
m.key = k # Connection.put() doesn't do this!
[m2] = self.conn.get([k])
self.assertEqual(m2, m)
def testIdAndParentPut(self):
# id
m = model.Model(id='bar')
self.assertEqual(m.put(), model.Key('Model', 'bar'))
# id + parent
p = model.Key('ParentModel', 'foo')
m = model.Model(id='bar', parent=p)
self.assertEqual(m.put(), model.Key('ParentModel', 'foo', 'Model', 'bar'))
# parent without id
p = model.Key('ParentModel', 'foo')
m = model.Model(parent=p)
m.put()
self.assertTrue(m.key.id())
def testAllocateIds(self):
class MyModel(model.Model):
pass
res = MyModel.allocate_ids(size=100)
self.assertEqual(res, (1, 100))
# with parent
key = model.Key(flat=(MyModel._get_kind(), 1))
res = MyModel.allocate_ids(size=200, parent=key)
self.assertEqual(res, (101, 300))
def testGetOrInsert(self):
class MyModel(model.Model):
text = model.StringProperty()
key = model.Key(flat=(MyModel._get_kind(), 'baz'))
self.assertEqual(key.get(), None)
MyModel.get_or_insert('baz', text='baz')
self.assertNotEqual(key.get(), None)
self.assertEqual(key.get().text, 'baz')
def testGetById(self):
class MyModel(model.Model):
pass
kind = MyModel._get_kind()
# key id
ent1 = MyModel(key=model.Key(pairs=[(kind, 1)]))
key = ent1.put()
res = MyModel.get_by_id(1)
self.assertEqual(res, ent1)
# key name
ent2 = MyModel(key=model.Key(pairs=[(kind, 'foo')]))
key = ent2.put()
res = MyModel.get_by_id('foo')
self.assertEqual(res, ent2)
# key id + parent
ent3 = MyModel(key=model.Key(pairs=[(kind, 1), (kind, 2)]))
key = ent3.put()
res = MyModel.get_by_id(2, parent=model.Key(pairs=[(kind, 1)]))
self.assertEqual(res, ent3)
# key name + parent
ent4 = MyModel(key=model.Key(pairs=[(kind, 1), (kind, 'bar')]))
key = ent4.put()
res = MyModel.get_by_id('bar', parent=ent1.key)
self.assertEqual(res, ent4)
# None
res = MyModel.get_by_id('idontexist')
self.assertEqual(res, None)
# Invalid parent
self.assertRaises(datastore_errors.BadValueError, MyModel.get_by_id,
'bar', parent=1)
def testDelete(self):
class MyModel(model.Model):
pass
ent1 = MyModel()
key1 = ent1.put()
ent2 = key1.get()
self.assertEqual(ent1, ent2)
key1.delete()
ent3 = key1.get()
self.assertEqual(ent3, None)
def testPopulate(self):
class MyModel(model.Model):
name = model.StringProperty()
m = MyModel()
m.populate(name='abc')
self.assertEqual(m.name, 'abc')
m.populate(name='def')
self.assertEqual(m.name, 'def')
self.assertRaises(AttributeError, m.populate, foo=42)
def testPopulate_Expando(self):
class Ex(model.Expando):
name = model.StringProperty()
m = Ex()
m.populate(name='abc')
self.assertEqual(m.name, 'abc')
m.populate(foo=42)
self.assertEqual(m.foo, 42)
def testTransaction(self):
class MyModel(model.Model):
text = model.StringProperty()
key = model.Key(MyModel, 'babaz')
self.assertEqual(key.get(), None)
def callback():
# Emulate get_or_insert()
a = key.get()
if a is None:
a = MyModel(text='baz', key=key)
a.put()
return a
b = model.transaction(callback)
self.assertNotEqual(b, None)
self.assertEqual(b.text, 'baz')
self.assertEqual(key.get(), b)
key = model.Key(MyModel, 'bababaz')
self.assertEqual(key.get(), None)
c = model.transaction(callback, retry=0, entity_group=key)
self.assertNotEqual(c, None)
self.assertEqual(c.text, 'baz')
self.assertEqual(key.get(), c)
def testGetMultiAsync(self):
model.Model._kind_map['Model'] = model.Model
ent1 = model.Model(key=model.Key('Model', 1))
ent2 = model.Model(key=model.Key('Model', 2))
ent3 = model.Model(key=model.Key('Model', 3))
key1 = ent1.put()
key2 = ent2.put()
key3 = ent3.put()
@tasklets.tasklet
def foo():
ents = yield model.get_multi_async([key1, key2, key3])
raise tasklets.Return(ents)
res = foo().get_result()
self.assertEqual(res, [ent1, ent2, ent3])
def testGetMulti(self):
model.Model._kind_map['Model'] = model.Model
ent1 = model.Model(key=model.Key('Model', 1))
ent2 = model.Model(key=model.Key('Model', 2))
ent3 = model.Model(key=model.Key('Model', 3))
key1 = ent1.put()
key2 = ent2.put()
key3 = ent3.put()
res = model.get_multi((key1, key2, key3))
self.assertEqual(res, [ent1, ent2, ent3])
def testPutMultiAsync(self):
ent1 = model.Model(key=model.Key('Model', 1))
ent2 = model.Model(key=model.Key('Model', 2))
ent3 = model.Model(key=model.Key('Model', 3))
@tasklets.tasklet
def foo():
ents = yield model.put_multi_async([ent1, ent2, ent3])
raise tasklets.Return(ents)
res = foo().get_result()
self.assertEqual(res, [ent1.key, ent2.key, ent3.key])
def testPutMulti(self):
ent1 = model.Model(key=model.Key('Model', 1))
ent2 = model.Model(key=model.Key('Model', 2))
ent3 = model.Model(key=model.Key('Model', 3))
res = model.put_multi((ent1, ent2, ent3))
self.assertEqual(res, [ent1.key, ent2.key, ent3.key])
def testDeleteMultiAsync(self):
model.Model._kind_map['Model'] = model.Model
ent1 = model.Model(key=model.Key('Model', 1))
ent2 = model.Model(key=model.Key('Model', 2))
ent3 = model.Model(key=model.Key('Model', 3))
key1 = ent1.put()
key2 = ent2.put()
key3 = ent3.put()
self.assertEqual(key1.get(), ent1)
self.assertEqual(key2.get(), ent2)
self.assertEqual(key3.get(), ent3)
@tasklets.tasklet
def foo():
ents = yield model.delete_multi_async([key1, key2, key3])
raise tasklets.Return(ents)
res = foo().get_result()
self.assertEqual(key1.get(), None)
self.assertEqual(key2.get(), None)
self.assertEqual(key3.get(), None)
def testDeleteMulti(self):
model.Model._kind_map['Model'] = model.Model
ent1 = model.Model(key=model.Key('Model', 1))
ent2 = model.Model(key=model.Key('Model', 2))
ent3 = model.Model(key=model.Key('Model', 3))
key1 = ent1.put()
key2 = ent2.put()
key3 = ent3.put()
self.assertEqual(key1.get(), ent1)
self.assertEqual(key2.get(), ent2)
self.assertEqual(key3.get(), ent3)
res = model.delete_multi((key1, key2, key3))
self.assertEqual(key1.get(), None)
self.assertEqual(key2.get(), None)
self.assertEqual(key3.get(), None)
def testNamespaces(self):
save_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace('ns1')
k1 = model.Key('A', 1)
self.assertEqual(k1.namespace(), 'ns1')
k2 = model.Key('B', 2, namespace='ns2')
self.assertEqual(k2.namespace(), 'ns2')
namespace_manager.set_namespace('ns3')
self.assertEqual(k1.namespace(), 'ns1')
k3 = model.Key('C', 3, parent=k1)
self.assertEqual(k3.namespace(), 'ns1')
# Test that namespaces survive serialization
namespace_manager.set_namespace('ns2')
km = model.Key('M', 1, namespace='ns4')
class M(model.Model):
keys = model.KeyProperty(repeated=True)
m1 = M(keys=[k1, k2, k3], key=km)
pb = m1._to_pb()
namespace_manager.set_namespace('ns3')
m2 = M._from_pb(pb)
self.assertEqual(m1, m2)
self.assertEqual(m2.keys[0].namespace(), 'ns1')
self.assertEqual(m2.keys[1].namespace(), 'ns2')
self.assertEqual(m2.keys[2].namespace(), 'ns1')
# Now test the same thing for Expando
namespace_manager.set_namespace('ns2')
ke = model.Key('E', 1)
class E(model.Expando):
pass
e1 = E(keys=[k1, k2, k3], key=ke)
pb = e1._to_pb()
namespace_manager.set_namespace('ns3')
e2 = E._from_pb(pb)
self.assertEqual(e1, e2)
# Test that an absent namespace always means the empty namespace
namespace_manager.set_namespace('')
k3 = model.Key('E', 2)
e3 = E(key=k3, k=k3)
pb = e3._to_pb()
namespace_manager.set_namespace('ns4')
e4 = E._from_pb(pb)
self.assertEqual(e4.key.namespace(), '')
self.assertEqual(e4.k.namespace(), '')
finally:
namespace_manager.set_namespace(save_namespace)
def testOverrideModelKey(self):
class MyModel(model.Model):
# key, overriden
key = model.StringProperty()
# aha, here it is!
real_key = model.ModelKey()
class MyExpando(model.Expando):
# key, overriden
key = model.StringProperty()
# aha, here it is!
real_key = model.ModelKey()
m = MyModel()
k = model.Key('MyModel', 'foo')
m.key = 'bar'
m.real_key = k
m.put()
res = k.get()
self.assertEqual(res, m)
self.assertEqual(res.key, 'bar')
self.assertEqual(res.real_key, k)
q = MyModel.query(MyModel.real_key == k)
res = q.get()
self.assertEqual(res, m)
self.assertEqual(res.key, 'bar')
self.assertEqual(res.real_key, k)
m = MyExpando()
k = model.Key('MyExpando', 'foo')
m.key = 'bar'
m.real_key = k
m.put()
res = k.get()
self.assertEqual(res, m)
self.assertEqual(res.key, 'bar')
self.assertEqual(res.real_key, k)
q = MyExpando.query(MyModel.real_key == k)
res = q.get()
self.assertEqual(res, m)
self.assertEqual(res.key, 'bar')
self.assertEqual(res.real_key, k)
def testTransactionalDecorator(self):
# This tests @model.transactional and model.in_transaction(), and
# indirectly context.Context.in_transaction().
logs = []
@model.transactional
def foo(a, b):
self.assertTrue(model.in_transaction())
logs.append(tasklets.get_context()._conn) # White box
return a + b
@model.transactional
def bar(a):
self.assertTrue(model.in_transaction())
logs.append(tasklets.get_context()._conn) # White box
return foo(a, 42)
before = tasklets.get_context()._conn
self.assertFalse(model.in_transaction())
x = bar(100)
self.assertFalse(model.in_transaction())
after = tasklets.get_context()._conn
self.assertEqual(before, after)
self.assertEqual(x, 142)
self.assertEqual(len(logs), 2)
self.assertEqual(logs[0], logs[1])
self.assertNotEqual(before, logs[0])
def testPropertyFilters(self):
class M(model.Model):
dt = model.DateTimeProperty()
d = model.DateProperty()
t = model.TimeProperty()
f = model.FloatProperty()
s = model.StringProperty()
k = model.KeyProperty()
b = model.BooleanProperty()
i = model.IntegerProperty()
g = model.GeoPtProperty()
@model.ComputedProperty
def c(self):
return self.i + 1
u = model.UserProperty()
values = {
'dt': datetime.datetime.now(),
'd': datetime.date.today(),
't': datetime.datetime.now().time(),
'f': 4.2,
's': 'foo',
'k': model.Key('Foo', 'bar'),
'b': False,
'i': 42,
'g': AMSTERDAM,
'u': TESTUSER,
}
m = M(**values)
m.put()
q = M.query(M.dt == values['dt'])
self.assertEqual(q.get(), m)
q = M.query(M.d == values['d'])
self.assertEqual(q.get(), m)
q = M.query(M.t == values['t'])
self.assertEqual(q.get(), m)
q = M.query(M.f == values['f'])
self.assertEqual(q.get(), m)
q = M.query(M.s == values['s'])
self.assertEqual(q.get(), m)
q = M.query(M.k == values['k'])
self.assertEqual(q.get(), m)
q = M.query(M.b == values['b'])
self.assertEqual(q.get(), m)
q = M.query(M.i == values['i'])
self.assertEqual(q.get(), m)
q = M.query(M.g == values['g'])
self.assertEqual(q.get(), m)
q = M.query(M.c == values['i'] + 1)
self.assertEqual(q.get(), m)
q = M.query(M.u == values['u'])
self.assertEqual(q.get(), m)
class CacheTests(test_utils.DatastoreTest):
def SetupContextCache(self):
"""Set up the context cache.
We only need cache active when testing the cache, so the default behavior
is to disable it to avoid misleading test results. Override this when
needed.
"""
from ndb import tasklets
ctx = tasklets.get_context()
ctx.set_cache_policy(lambda key: True)
ctx.set_memcache_policy(lambda key: True)
def test_issue_13(self):
class Employee(model.Model):
pass
e = Employee(key=model.Key(Employee, 'joe'))
e.put()
e.key = model.Key(Employee, 'fred')
f = model.Key(Employee, 'joe').get()
# Now f is e;
# With bug this is True.
# self.assertEqual(f.key, model.Key(Employee, 'fred'))
# Removing key from context cache when it is set to a different one
# makes the test correct.
self.assertEqual(f.key, model.Key(Employee, 'joe'))
def main():
unittest.main()
if __name__ == '__main__':
main()
| Python |
# This file intentionally left blank.
| Python |
"""Context class."""
# TODO: Handle things like request size limits. E.g. what if we've
# batched up 1000 entities to put and now the memcache call fails?
import logging
import sys
from google.appengine.api import datastore # For taskqueue coordination
from google.appengine.api import datastore_errors
from google.appengine.api import memcache
from google.appengine.datastore import datastore_rpc
import ndb.key
from ndb import model, tasklets, eventloop, utils
class AutoBatcher(object):
def __init__(self, todo_tasklet):
# todo_tasklet is a tasklet to be called with list of (future, arg) pairs
self._todo_tasklet = todo_tasklet
self._todo = [] # List of (future, arg) pairs
self._running = None # Currently running tasklet, if any
def __repr__(self):
return '%s(%s)' % (self.__class__.__name__, self._todo_tasklet.__name__)
def add(self, arg):
fut = tasklets.Future('%s.add(%s)' % (self, arg))
if not self._todo: # Schedule the callback
# We use the fact that regular tasklets are queued at time None,
# which puts them at absolute time 0 (i.e. ASAP -- still on a
# FIFO basis). Callbacks explicitly scheduled with a delay of 0
# are only run after all immediately runnable tasklets have run.
eventloop.queue_call(0, self._autobatcher_callback)
self._todo.append((fut, arg))
return fut
def _autobatcher_callback(self):
if not self._todo:
return
if self._running is not None:
# Another callback may still be running.
if not self._running.done():
# Wait for it to complete first, then try again.
self._running.add_callback(self._autobatcher_callback)
return
self._running = None
# We cannot postpone the inevitable any longer.
todo = self._todo
self._todo = [] # Get ready for the next batch
# TODO: Use logging_debug(), at least if len(todo) == 1.
logging.info('AutoBatcher(%s): %d items',
self._todo_tasklet.__name__, len(todo))
self._running = self._todo_tasklet(todo)
# Add a callback to the Future to propagate exceptions,
# since this Future is not normally checked otherwise.
self._running.add_callback(self._running.check_success)
@tasklets.tasklet
def flush(self):
while self._running or self._todo:
if self._running:
if self._running.done():
self._running.check_success()
self._running = None
else:
yield self._running
else:
self._autobatcher_callback()
class Context(object):
def __init__(self, conn=None, auto_batcher_class=AutoBatcher):
if conn is None:
conn = model.make_connection()
self._conn = conn
self._auto_batcher_class = auto_batcher_class
self._get_batcher = auto_batcher_class(self._get_tasklet)
self._put_batcher = auto_batcher_class(self._put_tasklet)
self._delete_batcher = auto_batcher_class(self._delete_tasklet)
self._cache = {}
self._cache_policy = lambda key: True
self._memcache_policy = lambda key: True
self._memcache_timeout_policy = lambda key: 0
self._memcache_prefix = 'NDB:' # TODO: make this configurable.
# TODO: Also add a way to compute the memcache expiration time.
@tasklets.tasklet
def flush(self):
yield (self._get_batcher.flush(),
self._put_batcher.flush(),
self._delete_batcher.flush())
@tasklets.tasklet
def _get_tasklet(self, todo):
assert todo
# First check memcache.
keys = set(key for _, key in todo)
memkeymap = dict((key, key.urlsafe())
for key in keys if self.should_memcache(key))
if memkeymap:
results = memcache.get_multi(memkeymap.values(),
key_prefix=self._memcache_prefix)
leftover = []
## del todo[1:] # Uncommenting this creates an interesting bug.
for fut, key in todo:
mkey = memkeymap[key]
if mkey in results:
pb = results[mkey]
ent = self._conn.adapter.pb_to_entity(pb)
fut.set_result(ent)
else:
leftover.append((fut, key))
todo = leftover
if todo:
keys = [key for (_, key) in todo]
# TODO: What if async_get() created a non-trivial MultiRpc?
results = yield self._conn.async_get(None, keys)
for ent, (fut, _) in zip(results, todo):
fut.set_result(ent)
@tasklets.tasklet
def _put_tasklet(self, todo):
assert todo
# TODO: What if the same entity is being put twice?
# TODO: What if two entities with the same key are being put?
# TODO: Clear entities from memcache before starting the write?
# TODO: Attempt to prevent dogpile effect while keeping cache consistent?
ents = [ent for (_, ent) in todo]
results = yield self._conn.async_put(None, ents)
for key, (fut, ent) in zip(results, todo):
if key != ent._key:
if ent._has_complete_key():
raise datastore_errors.BadKeyError(
'Entity key differs from the one returned by the datastore. '
'Expected %r, got %r' % (key, ent._key))
ent._key = key
fut.set_result(key)
# Now update memcache.
# TODO: Could we update memcache *before* calling async_put()?
# (Hm, not for new entities but possibly for updated ones.)
mappings = {} # Maps timeout value to {urlsafe_key: pb} mapping.
for _, ent in todo:
if self.should_memcache(ent._key):
pb = self._conn.adapter.entity_to_pb(ent)
timeout = self._memcache_timeout_policy(ent._key)
mapping = mappings.get(timeout)
if mapping is None:
mapping = mappings[timeout] = {}
mapping[ent._key.urlsafe()] = pb
if mappings:
# If the timeouts are not uniform, make a separate call for each
# distinct timeout value.
for timeout, mapping in mappings.iteritems():
failures = memcache.set_multi(mapping, time=timeout,
key_prefix=self._memcache_prefix)
if failures:
badkeys = []
for failure in failures:
badkeys.append(mapping[failure].key)
logging.info('memcache failed to set %d out of %d keys: %s',
len(failures), len(mapping), badkeys)
@tasklets.tasklet
def _delete_tasklet(self, todo):
assert todo
keys = set(key for (_, key) in todo)
yield self._conn.async_delete(None, keys)
for fut, _ in todo:
fut.set_result(None)
# Now update memcache.
memkeys = [key.urlsafe() for key in keys if self.should_memcache(key)]
if memkeys:
memcache.delete_multi(memkeys, key_prefix=self._memcache_prefix)
# The value returned by delete_multi() is pretty much useless, it
# could be the keys were never cached in the first place.
def get_cache_policy(self):
"""Returns the current context cache policy function.
Returns:
A function that accepts a Key instance as argument and returns
a boolean indicating if it should be cached.
"""
return self._cache_policy
def set_cache_policy(self, func):
"""Sets the context cache policy function.
Args:
func: A function that accepts a Key instance as argument and returns
a boolean indicating if it should be cached.
"""
self._cache_policy = func
def should_cache(self, key):
"""Return whether to use the context cache for this key.
Args:
key: Key instance.
Returns:
True if the key should be cached, False otherwise.
"""
return self._cache_policy(key)
def get_memcache_policy(self):
"""Returns the current memcache policy function.
Returns:
A function that accepts a Key instance as argument and returns
a boolean indicating if it should be cached.
"""
return self._memcache_policy
def set_memcache_policy(self, func):
"""Sets the memcache policy function.
Args:
func: A function that accepts a Key instance as argument and returns
a boolean indicating if it should be cached.
"""
self._memcache_policy = func
def set_memcache_timeout_policy(self, func):
"""Sets the policy function for memcache timeout (expiration).
Args:
func: A function that accepts a key instance as argument and returns
an integer indicating the desired memcache timeout.
If the function returns 0 it implies the default timeout.
"""
self._memcache_timeout_policy = func
def get_memcache_timeout_policy(self):
"""Returns the current policy function for memcache timeout (expiration)."""
return self._memcache_timeout_policy
def should_memcache(self, key):
"""Return whether to use memcache for this key.
Args:
key: Key instance.
Returns:
True if the key should be cached, False otherwise.
"""
return self._memcache_policy(key)
# TODO: What about conflicting requests to different autobatchers,
# e.g. tasklet A calls get() on a given key while tasklet B calls
# delete()? The outcome is nondeterministic, depending on which
# autobatcher gets run first. Maybe we should just flag such
# conflicts as errors, with an overridable policy to resolve them
# differently?
@tasklets.tasklet
def get(self, key):
"""Returns a Model instance given the entity key.
It will use the context cache if the cache policy for the given
key is enabled.
Args:
key: Key instance.
Returns:
A Model instance it the key exists in the datastore; None otherwise.
"""
should_cache = self.should_cache(key)
if should_cache and key in self._cache:
entity = self._cache[key] # May be None, meaning "doesn't exist".
if entity is None or entity._key == key:
# If entity's key didn't change later, it is ok. See issue #13.
raise tasklets.Return(entity)
entity = yield self._get_batcher.add(key)
if should_cache:
self._cache[key] = entity
raise tasklets.Return(entity)
@tasklets.tasklet
def put(self, entity):
key = yield self._put_batcher.add(entity)
if entity._key != key:
logging.info('replacing key %s with %s', entity._key, key)
entity._key = key
# TODO: For updated entities, could we update the cache first?
if self.should_cache(key):
# TODO: What if by now the entity is already in the cache?
self._cache[key] = entity
raise tasklets.Return(key)
@tasklets.tasklet
def delete(self, key):
yield self._delete_batcher.add(key)
if key in self._cache:
self._cache[key] = None
@tasklets.tasklet
def allocate_ids(self, key, size=None, max=None):
lo_hi = yield self._conn.async_allocate_ids(None, key, size, max)
raise tasklets.Return(lo_hi)
@datastore_rpc._positional(3)
def map_query(self, query, callback, options=None, merge_future=None):
mfut = merge_future
if mfut is None:
mfut = tasklets.MultiFuture('map_query')
@tasklets.tasklet
def helper():
try:
inq = tasklets.SerialQueueFuture()
query.run_to_queue(inq, self._conn, options)
is_ancestor_query = query.ancestor is not None
while True:
try:
batch, i, ent = yield inq.getq()
except EOFError:
break
if isinstance(ent, model.Key):
pass # It was a keys-only query and ent is really a Key.
else:
key = ent._key
if key in self._cache:
hit = self._cache[key]
if hit is not None and hit.key != key:
# The cached entry has been mutated to have a different key.
# That's a false hit. Get rid of it. See issue #13.
del self._cache[key]
if key in self._cache:
# Assume the cache is more up to date.
if self._cache[key] is None:
# This is a weird case. Apparently this entity was
# deleted concurrently with the query. Let's just
# pretend the delete happened first.
logging.info('Conflict: entity %s was deleted', key)
continue
# Replace the entity the callback will see with the one
# from the cache.
if ent != self._cache[key]:
logging.info('Conflict: entity %s was modified', key)
ent = self._cache[key]
else:
# Cache the entity only if this is an ancestor query;
# non-ancestor queries may return stale results, since in
# the HRD these queries are "eventually consistent".
# TODO: Shouldn't we check this before considering cache hits?
if is_ancestor_query and self.should_cache(key):
self._cache[key] = ent
if callback is None:
val = ent
else:
# TODO: If the callback raises, log and ignore.
if options is not None and options.produce_cursors:
val = callback(batch, i, ent)
else:
val = callback(ent)
mfut.putq(val)
except Exception, err:
_, _, tb = sys.exc_info()
mfut.set_exception(err, tb)
raise
else:
mfut.complete()
helper()
return mfut
@datastore_rpc._positional(2)
def iter_query(self, query, callback=None, options=None):
return self.map_query(query, callback=callback, options=options,
merge_future=tasklets.SerialQueueFuture())
@tasklets.tasklet
def transaction(self, callback, retry=3, entity_group=None):
# Will invoke callback() one or more times with the default
# context set to a new, transactional Context. Returns a Future.
# Callback may be a tasklet.
if entity_group is not None:
app = entity_group.app()
else:
app = ndb.key._DefaultAppId()
yield self.flush()
for i in range(1 + max(0, retry)):
transaction = yield self._conn.async_begin_transaction(None, app)
tconn = datastore_rpc.TransactionalConnection(
adapter=self._conn.adapter,
config=self._conn.config,
transaction=transaction,
entity_group=entity_group)
tctx = self.__class__(conn=tconn,
auto_batcher_class=self._auto_batcher_class)
tctx.set_memcache_policy(lambda key: False)
tasklets.set_context(tctx)
old_ds_conn = datastore._GetConnection()
try:
datastore._SetConnection(tconn) # For taskqueue coordination
try:
try:
result = callback()
if isinstance(result, tasklets.Future):
result = yield result
finally:
yield tctx.flush()
except Exception, err:
t, e, tb = sys.exc_info()
yield tconn.async_rollback(None) # TODO: Don't block???
if issubclass(t, datastore_errors.Rollback):
return
else:
raise t, e, tb
else:
ok = yield tconn.async_commit(None)
if ok:
# TODO: This is questionable when self is transactional.
self._cache.update(tctx._cache)
self._flush_memcache(tctx._cache)
raise tasklets.Return(result)
finally:
datastore._SetConnection(old_ds_conn)
# Out of retries
raise datastore_errors.TransactionFailedError(
'The transaction could not be committed. Please try again.')
def in_transaction(self):
"""Return whether a transaction is currently active."""
return isinstance(self._conn, datastore_rpc.TransactionalConnection)
def flush_cache(self):
"""Clears the in-memory cache.
NOTE: This does not affect memcache.
"""
self._cache.clear()
def _flush_memcache(self, keys):
keys = set(key for key in keys if self.should_memcache(key))
if keys:
memkeys = [key.urlsafe() for key in keys]
memcache.delete_multi(memkeys, key_prefix=self._memcache_prefix)
@tasklets.tasklet
def get_or_insert(self, model_class, name,
app=None, namespace=None, parent=None,
**kwds):
# TODO: Test the heck out of this, in all sorts of evil scenarios.
assert isinstance(name, basestring) and name
key = model.Key(model_class, name,
app=app, namespace=namespace, parent=parent)
# TODO: Can (and should) the cache be trusted here?
ent = yield self.get(key)
if ent is None:
@tasklets.tasklet
def txn():
ent = yield key.get_async()
if ent is None:
ent = model_class(**kwds) # TODO: Check for forbidden keys
ent._key = key
yield ent.put_async()
raise tasklets.Return(ent)
ent = yield self.transaction(txn)
raise tasklets.Return(ent)
def toplevel(func):
"""A sync tasklet that sets a fresh default Context.
Use this for toplevel view functions such as
webapp.RequestHandler.get() or Django view functions.
"""
@utils.wrapping(func)
def add_context_wrapper(*args, **kwds):
__ndb_debug__ = utils.func_info(func)
tasklets.Future.clear_all_pending()
# Reset context; a new one will be created on the first call to
# get_context().
tasklets.set_context(None)
ctx = tasklets.get_context()
try:
return tasklets.synctasklet(func)(*args, **kwds)
finally:
eventloop.run() # Ensure writes are flushed, etc.
return add_context_wrapper
| Python |
"""Tests for tasklets.py."""
import os
import re
import random
import sys
import time
import unittest
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_file_stub
from google.appengine.datastore import datastore_rpc
from ndb import eventloop
from ndb import model
from ndb import test_utils
from ndb import tasklets
from ndb.tasklets import Future, tasklet
class TaskletTests(test_utils.DatastoreTest):
def setUp(self):
super(TaskletTests, self).setUp()
if eventloop._EVENT_LOOP_KEY in os.environ:
del os.environ[eventloop._EVENT_LOOP_KEY]
if tasklets._CONTEXT_KEY in os.environ:
del os.environ[tasklets._CONTEXT_KEY]
self.ev = eventloop.get_event_loop()
self.log = []
def universal_callback(self, *args):
self.log.append(args)
def testFuture_Constructor(self):
f = tasklets.Future()
self.assertEqual(f._result, None)
self.assertEqual(f._exception, None)
self.assertEqual(f._callbacks, [])
def testFuture_Repr(self):
f = tasklets.Future()
prefix = (r'<Future [\da-f]+ created by '
r'testFuture_Repr\(tasklets_test.py:\d+\) ')
self.assertTrue(re.match(prefix + r'pending>$', repr(f)), repr(f))
f.set_result('abc')
self.assertTrue(re.match(prefix + r'result \'abc\'>$', repr(f)), repr(f))
f = tasklets.Future()
f.set_exception(RuntimeError('abc'))
self.assertTrue(re.match(prefix + r'exception RuntimeError: abc>$',
repr(f)),
repr(f))
def testFuture_Done_State(self):
f = tasklets.Future()
self.assertFalse(f.done())
self.assertEqual(f.state, f.RUNNING)
f.set_result(42)
self.assertTrue(f.done())
self.assertEqual(f.state, f.FINISHING)
def testFuture_SetResult(self):
f = tasklets.Future()
f.set_result(42)
self.assertEqual(f._result, 42)
self.assertEqual(f._exception, None)
self.assertEqual(f.get_result(), 42)
def testFuture_SetException(self):
f = tasklets.Future()
err = RuntimeError(42)
f.set_exception(err)
self.assertEqual(f.done(), True)
self.assertEqual(f._exception, err)
self.assertEqual(f._result, None)
self.assertEqual(f.get_exception(), err)
self.assertRaises(RuntimeError, f.get_result)
def testFuture_AddDoneCallback_SetResult(self):
f = tasklets.Future()
f.add_callback(self.universal_callback, f)
self.assertEqual(self.log, []) # Nothing happened yet.
f.set_result(42)
eventloop.run()
self.assertEqual(self.log, [(f,)])
def testFuture_SetResult_AddDoneCallback(self):
f = tasklets.Future()
f.set_result(42)
self.assertEqual(f.get_result(), 42)
f.add_callback(self.universal_callback, f)
eventloop.run()
self.assertEqual(self.log, [(f,)])
def testFuture_AddDoneCallback_SetException(self):
f = tasklets.Future()
f.add_callback(self.universal_callback, f)
f.set_exception(RuntimeError(42))
eventloop.run()
self.assertEqual(self.log, [(f,)])
self.assertEqual(f.done(), True)
def create_futures(self):
self.futs = []
for i in range(5):
f = tasklets.Future()
f.add_callback(self.universal_callback, f)
def wake(fut, result):
fut.set_result(result)
self.ev.queue_call(i*0.01, wake, f, i)
self.futs.append(f)
return set(self.futs)
def testFuture_WaitAny(self):
self.assertEqual(tasklets.Future.wait_any([]), None)
todo = self.create_futures()
while todo:
f = tasklets.Future.wait_any(todo)
todo.remove(f)
eventloop.run()
self.assertEqual(self.log, [(f,) for f in self.futs])
def testFuture_WaitAll(self):
todo = self.create_futures()
tasklets.Future.wait_all(todo)
self.assertEqual(self.log, [(f,) for f in self.futs])
def testSleep(self):
log = []
@tasklets.tasklet
def foo():
log.append(time.time())
yield tasklets.sleep(0.1)
log.append(time.time())
foo()
eventloop.run()
t0, t1 = log
dt = t1-t0
self.assertAlmostEqual(dt, 0.1, places=2)
def testMultiFuture(self):
@tasklets.tasklet
def foo(dt):
yield tasklets.sleep(dt)
raise tasklets.Return('foo-%s' % dt)
@tasklets.tasklet
def bar(n):
for i in range(n):
yield tasklets.sleep(0.01)
raise tasklets.Return('bar-%d' % n)
bar5 = bar(5)
futs = [foo(0.05), foo(0.01), foo(0.03), bar(3), bar5, bar5]
mfut = tasklets.MultiFuture()
for fut in futs:
mfut.add_dependent(fut)
mfut.complete()
results = mfut.get_result()
self.assertEqual(set(results),
set(['foo-0.01', 'foo-0.03', 'foo-0.05',
'bar-3', 'bar-5']))
def testMultiFuture_PreCompleted(self):
@tasklets.tasklet
def foo():
yield tasklets.sleep(0.01)
raise tasklets.Return(42)
mfut = tasklets.MultiFuture()
dep = foo()
dep.wait()
mfut.add_dependent(dep)
mfut.complete()
eventloop.run()
self.assertTrue(mfut.done())
self.assertEqual(mfut.get_result(), [42])
def testMultiFuture_SetException(self):
mf = tasklets.MultiFuture()
f1 = Future()
f2 = Future()
f3 = Future()
f2.set_result(2)
mf.putq(f1)
f1.set_result(1)
mf.putq(f2)
mf.putq(f3)
mf.putq(4)
self.ev.run()
mf.set_exception(ZeroDivisionError())
f3.set_result(3)
self.ev.run()
self.assertRaises(ZeroDivisionError, mf.get_result)
def testMultiFuture_ItemException(self):
mf = tasklets.MultiFuture()
f1 = Future()
f2 = Future()
f3 = Future()
f2.set_result(2)
mf.putq(f1)
f1.set_exception(ZeroDivisionError())
mf.putq(f2)
mf.putq(f3)
f3.set_result(3)
self.ev.run()
mf.complete()
self.assertRaises(ZeroDivisionError, mf.get_result)
def testMultiFuture_Repr(self):
mf = tasklets.MultiFuture('info')
r1 = repr(mf)
mf.putq(1)
r2 = repr(mf)
f2 = Future()
f2.set_result(2)
mf.putq(2)
r3 = repr(mf)
self.ev.run()
r4 = repr(mf)
f3 = Future()
mf.putq(f3)
r5 = repr(mf)
mf.complete()
r6 = repr(mf)
f3.set_result(3)
self.ev.run()
r7 = repr(mf)
for r in r1, r2, r3, r4, r5, r6, r7:
self.assertTrue(
re.match(
r'<MultiFuture [\da-f]+ created by '
r'testMultiFuture_Repr\(tasklets_test.py:\d+\) for info; ',
r))
if r is r7:
self.assertTrue('result' in r)
else:
self.assertTrue('pending' in r)
def testQueueFuture(self):
q = tasklets.QueueFuture()
@tasklets.tasklet
def produce_one(i):
yield tasklets.sleep(i * 0.01)
raise tasklets.Return(i)
@tasklets.tasklet
def producer():
q.putq(0)
for i in range(1, 10):
q.add_dependent(produce_one(i))
q.complete()
@tasklets.tasklet
def consumer():
for i in range(10):
val = yield q.getq()
self.assertEqual(val, i)
yield q
self.assertRaises(EOFError, q.getq().get_result)
@tasklets.tasklet
def foo():
yield producer(), consumer()
foo().get_result()
def testQueueFuture_Complete(self):
qf = tasklets.QueueFuture()
qf.putq(1)
f2 = Future()
qf.putq(f2)
self.ev.run()
g1 = qf.getq()
g2 = qf.getq()
g3 = qf.getq()
f2.set_result(2)
self.ev.run()
qf.complete()
self.ev.run()
self.assertEqual(g1.get_result(), 1)
self.assertEqual(g2.get_result(), 2)
self.assertRaises(EOFError, g3.get_result)
self.assertRaises(EOFError, qf.getq().get_result)
def testQueueFuture_SetException(self):
qf = tasklets.QueueFuture()
f1 = Future()
f1.set_result(1)
qf.putq(f1)
qf.putq(f1)
self.ev.run()
qf.putq(2)
self.ev.run()
f3 = Future()
f3.set_exception(ZeroDivisionError())
qf.putq(f3)
self.ev.run()
f4 = Future()
qf.putq(f4)
self.ev.run()
qf.set_exception(KeyError())
f4.set_result(4)
self.ev.run()
self.assertRaises(KeyError, qf.get_result)
# Futures are returned in the order of completion, which should be
# f1, f2, f3, f4. These produce 1, 2, ZeroDivisionError, 4,
# respectively. After that KeyError (the exception set on qf
# itself) is raised.
self.assertEqual(qf.getq().get_result(), 1)
self.assertEqual(qf.getq().get_result(), 2)
self.assertRaises(ZeroDivisionError, qf.getq().get_result)
self.assertEqual(qf.getq().get_result(), 4)
self.assertRaises(KeyError, qf.getq().get_result)
self.assertRaises(KeyError, qf.getq().get_result)
def testQueueFuture_SetExceptionAlternative(self):
qf = tasklets.QueueFuture()
g1 = qf.getq()
qf.set_exception(KeyError())
self.ev.run()
self.assertRaises(KeyError, g1.get_result)
def testQueueFuture_ItemException(self):
qf = tasklets.QueueFuture()
qf.putq(1)
f2 = Future()
qf.putq(f2)
f3 = Future()
f3.set_result(3)
self.ev.run()
qf.putq(f3)
self.ev.run()
f4 = Future()
f4.set_exception(ZeroDivisionError())
self.ev.run()
qf.putq(f4)
f5 = Future()
qf.putq(f5)
self.ev.run()
qf.complete()
self.ev.run()
f2.set_result(2)
self.ev.run()
f5.set_exception(KeyError())
self.ev.run()
# Futures are returned in the order of completion, which should be
# f1, f3, f4, f2, f5. These produce 1, 3, ZeroDivisionError, 2,
# KeyError, respectively. After that EOFError is raised.
self.assertEqual(qf.getq().get_result(), 1)
self.assertEqual(qf.getq().get_result(), 3)
self.assertRaises(ZeroDivisionError, qf.getq().get_result)
self.assertEqual(qf.getq().get_result(), 2)
self.assertRaises(KeyError, qf.getq().get_result)
self.assertRaises(EOFError, qf.getq().get_result)
self.assertRaises(EOFError, qf.getq().get_result)
def testSerialQueueFuture(self):
q = tasklets.SerialQueueFuture()
@tasklets.tasklet
def produce_one(i):
yield tasklets.sleep(random.randrange(10) * 0.01)
raise tasklets.Return(i)
@tasklets.tasklet
def producer():
for i in range(10):
q.add_dependent(produce_one(i))
q.complete()
@tasklets.tasklet
def consumer():
for i in range(10):
val = yield q.getq()
self.assertEqual(val, i)
yield q
self.assertRaises(EOFError, q.getq().get_result)
yield q
@tasklets.synctasklet
def foo():
yield producer(), consumer()
foo()
def testSerialQueueFuture_Complete(self):
sqf = tasklets.SerialQueueFuture()
g1 = sqf.getq()
sqf.complete()
self.assertRaises(EOFError, g1.get_result)
def testSerialQueueFuture_SetException(self):
sqf = tasklets.SerialQueueFuture()
g1 = sqf.getq()
sqf.set_exception(KeyError())
self.assertRaises(KeyError, g1.get_result)
def testSerialQueueFuture_ItemException(self):
sqf = tasklets.SerialQueueFuture()
g1 = sqf.getq()
f1 = Future()
sqf.putq(f1)
sqf.complete()
f1.set_exception(ZeroDivisionError())
self.assertRaises(ZeroDivisionError, g1.get_result)
def testSerialQueueFuture_PutQ_1(self):
sqf = tasklets.SerialQueueFuture()
f1 = Future()
sqf.putq(f1)
sqf.complete()
f1.set_result(1)
self.assertEqual(sqf.getq().get_result(), 1)
def testSerialQueueFuture_PutQ_2(self):
sqf = tasklets.SerialQueueFuture()
sqf.putq(1)
sqf.complete()
self.assertEqual(sqf.getq().get_result(), 1)
def testSerialQueueFuture_PutQ_3(self):
sqf = tasklets.SerialQueueFuture()
g1 = sqf.getq()
sqf.putq(1)
sqf.complete()
self.assertEqual(g1.get_result(), 1)
def testSerialQueueFuture_PutQ_4(self):
sqf = tasklets.SerialQueueFuture()
g1 = sqf.getq()
f1 = Future()
sqf.putq(f1)
sqf.complete()
f1.set_result(1)
self.assertEqual(g1.get_result(), 1)
def testSerialQueueFuture_GetQ(self):
sqf = tasklets.SerialQueueFuture()
sqf.set_exception(KeyError())
self.assertRaises(KeyError, sqf.getq().get_result)
def testReducingFuture(self):
def reducer(arg):
return sum(arg)
rf = tasklets.ReducingFuture(reducer, batch_size=10)
for i in range(10):
rf.putq(i)
for i in range(10, 20):
f = Future()
rf.putq(f)
f.set_result(i)
rf.complete()
self.assertEqual(rf.get_result(), sum(range(20)))
def testReducingFuture_Empty(self):
def reducer(arg):
self.fail()
rf = tasklets.ReducingFuture(reducer)
rf.complete()
self.assertEqual(rf.get_result(), None)
def testReducingFuture_OneItem(self):
def reducer(arg):
self.fail()
rf = tasklets.ReducingFuture(reducer)
rf.putq(1)
rf.complete()
self.assertEqual(rf.get_result(), 1)
def testReducingFuture_ItemException(self):
def reducer(arg):
return sum(arg)
rf = tasklets.ReducingFuture(reducer)
f1 = Future()
f1.set_exception(ZeroDivisionError())
rf.putq(f1)
rf.complete()
self.assertRaises(ZeroDivisionError, rf.get_result)
def testReducingFuture_ReducerException_1(self):
def reducer(arg):
raise ZeroDivisionError
rf = tasklets.ReducingFuture(reducer)
rf.putq(1)
rf.putq(1)
rf.complete()
self.assertRaises(ZeroDivisionError, rf.get_result)
def testReducingFuture_ReducerException_2(self):
def reducer(arg):
raise ZeroDivisionError
rf = tasklets.ReducingFuture(reducer, batch_size=2)
rf.putq(1)
rf.putq(1)
rf.putq(1)
rf.complete()
self.assertRaises(ZeroDivisionError, rf.get_result)
def testReducingFuture_ReducerFuture_1(self):
def reducer(arg):
f = Future()
f.set_result(sum(arg))
return f
rf = tasklets.ReducingFuture(reducer, batch_size=2)
rf.putq(1)
rf.putq(1)
rf.complete()
self.assertEqual(rf.get_result(), 2)
def testReducingFuture_ReducerFuture_2(self):
# Weird hack to reach _internal_add_dependent() call in _mark_finished().
def reducer(arg):
res = sum(arg)
if len(arg) < 3:
f = Future()
f.set_result(res)
res = f
return res
rf = tasklets.ReducingFuture(reducer, batch_size=3)
rf.putq(1)
rf.putq(1)
rf.putq(1)
rf.putq(1)
rf.complete()
self.assertEqual(rf.get_result(), 4)
def testGetReturnValue(self):
r0 = tasklets.Return()
r1 = tasklets.Return(42)
r2 = tasklets.Return(42, 'hello')
r3 = tasklets.Return((1, 2, 3))
self.assertEqual(tasklets.get_return_value(r0), None)
self.assertEqual(tasklets.get_return_value(r1), 42)
self.assertEqual(tasklets.get_return_value(r2), (42, 'hello'))
self.assertEqual(tasklets.get_return_value(r3), (1, 2, 3))
def testTasklets_Basic(self):
@tasklets.tasklet
def t1():
a = yield t2(3)
b = yield t3(2)
raise tasklets.Return(a + b)
@tasklets.tasklet
def t2(n):
raise tasklets.Return(n)
@tasklets.tasklet
def t3(n):
return n
x = t1()
self.assertTrue(isinstance(x, tasklets.Future))
y = x.get_result()
self.assertEqual(y, 5)
def testTasklets_Raising(self):
@tasklets.tasklet
def t1():
f = t2(True)
try:
a = yield f
except RuntimeError, err:
self.assertEqual(f.get_exception(), err)
raise tasklets.Return(str(err))
@tasklets.tasklet
def t2(error):
if error:
raise RuntimeError('hello')
else:
yield tasklets.Future()
x = t1()
y = x.get_result()
self.assertEqual(y, 'hello')
def testTasklets_YieldRpcs(self):
@tasklets.tasklet
def main_tasklet():
rpc1 = self.conn.async_get(None, [])
rpc2 = self.conn.async_put(None, [])
res1 = yield rpc1
res2 = yield rpc2
raise tasklets.Return(res1, res2)
f = main_tasklet()
result = f.get_result()
self.assertEqual(result, ([], []))
def testTasklet_YieldTuple(self):
@tasklets.tasklet
def fib(n):
if n <= 1:
raise tasklets.Return(n)
a, b = yield fib(n - 1), fib(n - 2)
# print 'fib(%r) = %r + %r = %r' % (n, a, b, a + b)
self.assertTrue(a >= b, (a, b))
raise tasklets.Return(a + b)
fut = fib(10)
val = fut.get_result()
self.assertEqual(val, 55)
def testTasklet_YieldTupleError(self):
@tasklets.tasklet
def good():
yield tasklets.sleep(0)
@tasklets.tasklet
def bad():
1/0
yield tasklets.sleep(0)
@tasklets.tasklet
def foo():
try:
yield good(), bad(), good()
self.assertFalse('Should have raised ZeroDivisionError')
except ZeroDivisionError:
pass
foo().check_success()
def testTasklet_YieldTupleTypeError(self):
@tasklets.tasklet
def good():
yield tasklets.sleep(0)
@tasklets.tasklet
def bad():
1/0
yield tasklets.sleep(0)
@tasklets.tasklet
def foo():
try:
yield good(), bad(), 42
except AssertionError: # TODO: Maybe TypeError?
pass
else:
self.assertFalse('Should have raised AssertionError')
foo().check_success()
class TracebackTests(unittest.TestCase):
"""Checks that errors result in reasonable tracebacks."""
def testBasicError(self):
frames = [sys._getframe()]
@tasklets.tasklet
def level3():
frames.append(sys._getframe())
raise RuntimeError('hello')
yield
@tasklets.tasklet
def level2():
frames.append(sys._getframe())
yield level3()
@tasklets.tasklet
def level1():
frames.append(sys._getframe())
yield level2()
@tasklets.tasklet
def level0():
frames.append(sys._getframe())
yield level1()
fut = level0()
try:
fut.check_success()
except RuntimeError, err:
_, _, tb = sys.exc_info()
self.assertEqual(str(err), 'hello')
tbframes = []
while tb is not None:
# It's okay if some _help_tasklet_along frames are present.
if tb.tb_frame.f_code.co_name != '_help_tasklet_along':
tbframes.append(tb.tb_frame)
tb = tb.tb_next
self.assertEqual(frames, tbframes)
else:
self.fail('Expected RuntimeError not raised')
def main():
unittest.main()
if __name__ == '__main__':
main()
| Python |
"""A simple guestbook app to test parts of NDB end-to-end."""
import cgi
import logging
import re
import sys
import time
from google.appengine.api import urlfetch
from google.appengine.api import users
from google.appengine.datastore import entity_pb
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from google.appengine.datastore import datastore_query
from google.appengine.datastore import datastore_rpc
from ndb import context
from ndb import eventloop
from ndb import model
from ndb import tasklets
HOME_PAGE = """
<script>
function focus() {
textarea = document.getElementById('body');
textarea.focus();
}
</script>
<body onload=focus()>
Nickname: <a href="/account">%(nickname)s</a> |
<a href="%(login)s">login</a> |
<a href="%(logout)s">logout</a>
<form method=POST action=/>
<!-- TODO: XSRF protection -->
<input type=text id=body name=body size=60>
<input type=submit>
</form>
</body>
"""
ACCOUNT_PAGE = """
<body>
Nickname: <a href="/account">%(nickname)s</a> |
<a href="%(logout)s">logout</a>
<form method=POST action=/account>
<!-- TODO: XSRF protection -->
Email: %(email)s<br>
New nickname:
<input type=text name=nickname size=20 value=%(proposed_nickname)s><br>
<input type=submit name=%(action)s value="%(action)s Account">
<input type=submit name=delete value="Delete Account">
<a href=/>back to home page</a>
</form>
</body>
"""
class Account(model.Model):
"""User account."""
email = model.StringProperty()
userid = model.StringProperty()
nickname = model.StringProperty()
class Message(model.Model):
"""Guestbook message."""
body = model.StringProperty()
when = model.FloatProperty()
userid = model.StringProperty()
class UrlSummary(model.Model):
"""Metadata about a URL."""
MAX_AGE = 60
url = model.StringProperty()
title = model.StringProperty()
when = model.FloatProperty()
def account_key(userid):
return model.Key(flat=['Account', userid])
def get_account(userid):
"""Return a Future for an Account."""
return account_key(userid).get_async()
@tasklets.tasklet
def get_nickname(userid):
"""Return a Future for a nickname from an account."""
account = yield get_account(userid)
if not account:
nickname = 'Unregistered'
else:
nickname = account.nickname or account.email
raise tasklets.Return(nickname)
class HomePage(webapp.RequestHandler):
@context.toplevel
def get(self):
nickname = 'Anonymous'
user = users.get_current_user()
if user is not None:
nickname = yield get_nickname(user.user_id())
values = {'nickname': nickname,
'login': users.create_login_url('/'),
'logout': users.create_logout_url('/'),
}
self.response.out.write(HOME_PAGE % values)
qry, options = self._make_query()
pairs = yield qry.map_async(self._hp_callback, options=options)
for key, text in pairs:
self.response.out.write(text)
def _make_query(self):
qry = Message.query().order(-Message.when)
options = datastore_query.QueryOptions(batch_size=13, limit=43)
return qry, options
@tasklets.tasklet
def _hp_callback(self, message):
nickname = 'Anonymous'
if message.userid:
nickname = yield get_nickname(message.userid)
# Check if there's an URL.
body = message.body
m = re.search(r'(?i)\bhttps?://\S+[^\s.,;\]\}\)]', body)
if not m:
escbody = cgi.escape(body)
else:
url = m.group()
pre = body[:m.start()]
post = body[m.end():]
title = ''
key = model.Key(flat=[UrlSummary.GetKind(), url])
summary = yield key.get_async()
if not summary or summary.when < time.time() - UrlSummary.MAX_AGE:
rpc = urlfetch.create_rpc(deadline=0.5)
urlfetch.make_fetch_call(rpc, url,allow_truncated=True)
t0 = time.time()
result = yield rpc
t1 = time.time()
logging.warning('url=%r, status=%r, dt=%.3f',
url, result.status_code, t1-t0)
if result.status_code == 200:
bodytext = result.content
m = re.search(r'(?i)<title>([^<]+)</title>', bodytext)
if m:
title = m.group(1).strip()
summary = UrlSummary(key=key, url=url, title=title,
when=time.time())
yield summary.put_async()
hover = ''
if summary.title:
hover = ' title="%s"' % summary.title
escbody = (cgi.escape(pre) +
'<a%s href="%s">' % (hover, cgi.escape(url)) +
cgi.escape(url) + '</a>' + cgi.escape(post))
text = '%s - %s - %s<br>' % (cgi.escape(nickname),
time.ctime(message.when),
escbody)
raise tasklets.Return((-message.when, text))
@context.toplevel
def post(self):
# TODO: XSRF protection.
body = self.request.get('body', '').strip()
if body:
userid = None
user = users.get_current_user()
if user:
userid = user.user_id()
message = Message(body=body, when=time.time(), userid=userid)
yield message.put_async()
self.redirect('/')
class AccountPage(webapp.RequestHandler):
@context.toplevel
def get(self):
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url('/account'))
return
email = user.email()
action = 'Create'
account, nickname = yield (get_account(user.user_id()),
get_nickname(user.user_id()))
if account is not None:
action = 'Update'
if account:
proposed_nickname = account.nickname or account.email
else:
proposed_nickname = email
values = {'email': email,
'nickname': nickname,
'proposed_nickname': proposed_nickname,
'login': users.create_login_url('/'),
'logout': users.create_logout_url('/'),
'action': action,
}
self.response.out.write(ACCOUNT_PAGE % values)
@context.toplevel
def post(self):
# TODO: XSRF protection.
@tasklets.tasklet
def helper():
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url('/account'))
return
account = yield get_account(user.user_id())
if self.request.get('delete'):
if account:
yield account.key.delete_async()
self.redirect('/account')
return
if not account:
account = Account(key=account_key(user.user_id()),
email=user.email(), userid=user.user_id())
nickname = self.request.get('nickname')
if nickname:
account.nickname = nickname
yield account.put_async()
self.redirect('/account')
yield model.transaction_async(helper)
urls = [
('/', HomePage),
('/account', AccountPage),
]
app = webapp.WSGIApplication(urls)
def main():
util.run_wsgi_app(app)
if __name__ == '__main__':
main()
| Python |
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
from ndb import model
class Greeting(model.Model):
message = model.StringProperty()
userid = model.IntegerProperty() # Not used here, but later
class HomePage(webapp.RequestHandler):
def get(self):
msg = Greeting.get_or_insert('hello', message='Hello world')
self.response.out.write(msg.message)
urls = [('/.*', HomePage)]
app = webapp.WSGIApplication(urls)
def main():
util.run_wsgi_app(app)
if __name__ == '__main__':
main()
| Python |
"""Quick hack to (a) demo the synchronous APIs and (b) dump all records."""
import time
from demo.main import model, context, tasklets, Message, Account, account_key
class LogRecord(model.Model):
timestamp = model.FloatProperty()
@context.toplevel
def main():
print 'Content-type: text/plain'
print
qry = Message.query().order(-Message.when)
for msg in qry:
print time.ctime(msg.when), repr(msg.body)
if msg.userid is None:
print ' * Anonymous'
else:
act = account_key(msg.userid).get()
if act is None:
print ' * Bad account', msg.userid
else:
print ' * Account', act.nickname, act.email, msg.userid
log = LogRecord(timestamp=time.time())
log.put_async()
if __name__ == '__main__':
main()
| Python |
# This file intentionally left blank.
| Python |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
rootpatterns = patterns('',
(r'^xnmemo/', include('apps.xnmemo.urls')),
)
| Python |
# -*- coding: utf-8 -*-
from django.db.models import permalink, signals
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode,smart_str
from google.appengine.ext import db
from django.contrib.auth.models import User
import re
| Python |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
urlpatterns = patterns('apps.xnmemo.views',
#~ (r'^$', 'xnmemo_index'),
(r'^has_scheduled_items/$', 'has_scheduled_items'),
(r'^get_items/$', 'get_items'),
(r'^update_item/$', 'update_item'),
(r'^skip_item/$', 'skip_item'),
(r'^mark_items/$', 'mark_items'),
(r'^mark_items_worker/$', 'mark_items_worker'),
(r'^get_stats/$', 'get_stats'),
(r'^get_learning_progress/$', 'get_learning_progress'),
(r'^update_learning_progress/$', 'update_learning_progress'),
(r'^change_deck/$', 'change_deck'),
(r'^fix_learning_progress/$', 'fix_learning_progress'),
(r'^fix_learning_progress_worker/$', 'fix_learning_progress_worker'),
(r'^fix_learning_record/$', 'fix_learning_record'),
(r'^convert_learning_progress/$', 'convert_learning_progress'),
(r'^flush_cache/$', 'flush_cache'),
#~ (r'^(?P<lesson_id>\d+)/$', 'lesson_detail'),
)
| Python |
from django import template
register = template.Library()
def get_deck_id(deck):
'''Workaround for deck._id'''
return deck._id
get_deck_id = register.filter(get_deck_id)
| Python |
#!/usr/bin/python
# -*- coding: utf-8 -*-
import unittest
class RangeList():
@staticmethod
def encode(_list):
if len(_list) == 0:
return []
_list.sort()
rangelist = []
prev = _list[0]
start = end = prev
for i in _list[1:]:
if i == prev+1:
prev = end = i
else:
rangelist += [start,-1,end]
start = end = prev = i
rangelist += [start,-1,end]
return rangelist
@staticmethod
def decode(rangelist):
_list = []
for i in range(0,len(rangelist),3):
start = rangelist[i]
end = rangelist[i+2]
delimiter = rangelist[i+1]
if delimiter != -1 or end < start:
raise ValueError, 'Invalid triplet in rangelist: (%d,%d,%d)' % (start,end,delimiter)
_list += range(start, end+1)
return _list
class TestRangeList(unittest.TestCase):
def setUp(self):
self.encode_seqs = [[1,2,3,4,5,8,10,12,13,14,15]+range(100,10000),[]]
self.decode_seqs = [[1,-1,5,6,-1,6,7,-1,77],[]]
self.seqs = [[1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 39, 40, 41, 42, 43, 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, 117, 118, 5519, 5520, 5521, 5522, 5523, 5524, 5525, 5526, 5527, 5528, 5529, 5530, 5531, 5532, 5533, 5534, 5535, 5536, 5537, 5538, 5539, 5540, 5541, 5542, 5543, 5544, 5545, 5546, 5547, 5548, 5549, 5550, 5551, 5552, 5553, 5554, 5555, 5556, 5557, 5558, 5559, 5560, 5561, 5562, 5563, 5564, 5565, 5566, 5567, 5568, 5569, 5570, 5571, 5572, 5573, 5574, 5575, 5576, 5577, 5578, 5579, 5580, 5581, 5582, 5583, 5584, 5585, 5586, 5587, 5588, 5589, 5590, 5591, 5592, 5593, 5594, 5595, 5596, 5597, 5598, 5599, 5600, 5601, 5602, 5603, 5604, 5605, 5606, 5607, 5608, 5609, 5610, 5611, 5612, 5613, 5614, 5615, 5616, 5617, 5618, 5619, 5620, 5621, 5622, 5623, 5624, 5625, 5626, 5627, 5628, 5629, 5630, 5631, 5632, 5633, 5634, 5635, 5636, 5637, 5638, 5639, 5640, 5641, 5642, 5643, 5644, 5645, 5646, 5647, 5648, 5649, 5650, 5651, 5652, 5653, 5654, 5655, 5656, 5657, 5658, 5659, 6480, 6481, 6482, 6483, 6484, 6485, 6486, 6487, 6488, 6489, 6490, 6491, 6492, 6493, 6494, 6495, 6496, 6497, 6498, 6499, 6500, 6501, 6502, 6503, 6504, 6505, 6506, 6507, 6508, 6509, 6510, 6511, 6512, 6513, 6514, 6515, 6516, 6517, 6518, 6519, 6520, 6521, 6522, 6523, 6524, 6525, 6526, 6527, 6528, 6529, 6530, 6531, 6532, 6533, 6534, 6535, 6536, 6537, 6538, 6539, 6540, 6541, 6542, 6543, 6544, 6545, 6546, 6547, 6548, 6549, 6550, 6551, 6552, 6553, 6554, 6895, 6896, 6897, 6898, 6899, 6900, 6901, 6902, 6903, 6904]]
def test_encode(self):
for seq in self.encode_seqs:
rl = RangeList.encode(seq)
print rl
#~ self.assertTrue(rl)
def test_decode(self):
for seq in self.decode_seqs:
lst = RangeList.decode(seq)
print lst
#~ self.assertTrue(lst)
def test_encode_decode(self):
for seq in self.seqs:
rl = RangeList.encode(seq)
print rl
lst = RangeList.decode(rl)
self.assertEqual(seq, lst)
if __name__ == '__main__':
unittest.main()
| Python |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Bill Chen <pro711@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import datetime, logging
from functools import wraps
from django.core.urlresolvers import reverse
from django.contrib.auth.models import User
from django.contrib.auth.decorators import login_required
from django.http import HttpResponseRedirect,Http404, HttpResponseForbidden,HttpResponse,HttpResponseNotFound, HttpResponseServerError
from django.utils.translation import ugettext_lazy as _
from django.utils.encoding import force_unicode,smart_str
from django.utils import simplejson
from django.views.decorators.http import require_GET, require_POST
from google.appengine.ext import db
from google.appengine.api import urlfetch, quota, memcache
from google.appengine.api.labs import taskqueue
from ragendja.template import render_to_response
from apps.core.models import Card, Deck, LearningRecord, LearningProgress, CST
from rangelist import RangeList
def range_segment(lst, segment_range=20):
list_size = len(lst)
lst.sort()
x, y = 0, 0
while y < list_size:
if lst[y] - lst[x] < segment_range:
y += 1
else:
yield lst[x:y]
x = y
yield lst[x:y]
def http_get(f):
def forbidden(request):
return HttpResponseForbidden('ONLY GET IS ALLOWED')
@wraps(f)
def wrapper(request, *args, **kwds):
if request.method == 'GET':
return f(request, *args, **kwds)
else:
return HttpResponseForbidden('ONLY GET IS ALLOWED')
return wrapper
def require_login(f):
@wraps(f)
def wrapper(request, *args, **kwds):
# check whether logged in
if not request.user.is_authenticated():
result = { 'status': 'failed',
'message': 'user not authenticated' }
return HttpResponse(simplejson.dumps(result))
elif not request.user.is_active:
result = { 'status': 'failed',
'message': 'user not active' }
return HttpResponse(simplejson.dumps(result))
else:
return f(request, *args, **kwds)
return wrapper
@require_GET
@require_login
def has_scheduled_items(request):
limit = int(request.GET.get('limit', 0))
size = limit if limit else 100
result = {}
# get learning progress
learning_progress = memcache.get('learning_progress'+request.user.username)
if learning_progress is not None:
# put into memcache
memcache.add('learning_progress'+request.user.username,learning_progress,3600)
else:
learning_progress = LearningProgress.gql('WHERE _user = :1 AND active = TRUE', request.user).get()
if not learning_progress:
result['scheduled_items'] = 0
result['scheduled_items_count'] = 0
return HttpResponse(simplejson.dumps(result))
s_items = LearningRecord.get_scheduled_items(request.user,learning_progress._deck,0,size,0,'')
result['scheduled_items_count'] = len(s_items)
result['scheduled_items'] = len(s_items)
result['scheduled_items_details'] = [item.card_id for item in s_items]
return HttpResponse(simplejson.dumps(result))
@require_GET
@require_login
def get_items(request):
result = {}
result['records'] = []
rr = result['records'] # shortcut
today = datetime.datetime.now(tz=CST).date()
size = int(request.GET.get('size', 20))
# quota
start = quota.get_request_cpu_usage()
# get learning progress
learning_progress = memcache.get('learning_progress'+request.user.username)
if learning_progress is not None:
pass
else:
learning_progress = LearningProgress.gql('WHERE _user = :1 AND active = TRUE', request.user).get()
if not learning_progress:
# get deck first
deck = Deck.gql('WHERE _id = 1').get() #FIXME: GRE
if not deck:
logging.error('deck not found')
raise Exception, 'deck not found'
learning_progress = LearningProgress.create(request.user, deck)
learning_progress.active = True
learning_progress.put()
# put into memcache
memcache.add('learning_progress'+request.user.username,learning_progress,3600)
deck = learning_progress._deck
end = quota.get_request_cpu_usage()
logging.info("get learning_progress cost %d megacycles." % (end - start))
# quota
start = quota.get_request_cpu_usage()
# get scheduled items first
records = LearningRecord.get_scheduled_items(request.user,deck,0,size,0,'')
if len(records) < size:
# get some new items
new_items_size = size - len(records)
records += LearningRecord.get_new_items(request.user,deck,0,new_items_size,0,'')
end = quota.get_request_cpu_usage()
logging.info("fetch items cost %d megacycles." % (end - start))
# quota
start = quota.get_request_cpu_usage()
# prepare response
record_ids = [i.card_id for i in records]
logging.debug('All: '+str(record_ids))
# check memcache first
cached = []
for i in record_ids:
card = memcache.get(key='mememo_card'+str(i))
if card is not None:
rr.append({'_id':card._id,
'question':card.question,
'answer':card.answer,
'note':card.note,
'deck_id':card.deck_id,
'category':card.category
})
cached.append(i)
record_ids = list(set(record_ids) - set(cached))
logging.debug('fetching cards from datestore: '+str(record_ids))
# otherwise we have to fetch them from datastore
for i in range_segment(record_ids):
if len(i) > 1:
cards = Card.gql('WHERE _id >= :1 and _id <= :2', i[0], i[-1]).fetch(i[-1]-i[0]+1)
# filter out unrelated cards
cards = filter(lambda x:x._id in i, cards)
elif len(i) == 1:
cards = [Card.gql('WHERE _id = :1', i[0]).get()]
else:
continue
# add to memcache
for card in cards:
if not memcache.set(key='mememo_card'+str(card._id), value=card, time=7200):
logging.error('memcache set item failed: '+ str(card._id))
if card:
rr.append({'_id':card._id,
'question':card.question,
'answer':card.answer,
'note':card.note,
'deck_id':card.deck_id,
'category':card.category
})
end = quota.get_request_cpu_usage()
logging.info("prepare response cost %d megacycles." % (end - start))
return HttpResponse(simplejson.dumps(result,sort_keys=False))
def update_item(request):
'''Update the status of a record.'''
if request.method == 'GET':
# check whether logged in
if not request.user.is_authenticated():
result = { 'status': 'failed',
'message': 'user not authenticated' }
return HttpResponse(simplejson.dumps(result))
elif not request.user.is_active:
result = { 'status': 'failed',
'message': 'user not active' }
return HttpResponse(simplejson.dumps(result))
_id = int(request.GET.get('_id', -1))
new_grade = int(request.GET.get('new_grade', -1))
if _id == -1 or new_grade == -1:
result = { 'status': 'failed',
'message': 'error: _id or new_grade undefined' }
return HttpResponse(simplejson.dumps(result))
record = LearningRecord.gql('WHERE _user = :1 AND card_id = :2', request.user, _id).get()
if not record:
result = { 'status': 'failed',
'message': 'error: _id not found' }
return HttpResponse(simplejson.dumps(result))
if record.update_item(request.user,_id,new_grade):
result = { 'status': 'succeed',
'message': 'update_item id=%d succeeded.' % (_id,),
'record': {
'interval': record.interval,
'grade' : record.grade,
'easiness': record.easiness,
'acq_reps': record.acq_reps,
'ret_reps': record.ret_reps,
'lapses': record.lapses,
'acq_reps_since_lapse': record.acq_reps_since_lapse,
'ret_reps_since_lapse': record.ret_reps_since_lapse,
}
}
return HttpResponse(simplejson.dumps(result))
else:
result = { 'status': 'failed',
'message': 'error: update_item failed.',
}
return HttpResponse(simplejson.dumps(result))
def skip_item(request):
'''Skip an item forever.'''
if request.method == 'GET':
# check whether logged in
if not request.user.is_authenticated():
result = { 'status': 'failed',
'message': 'user not authenticated' }
return HttpResponse(simplejson.dumps(result))
elif not request.user.is_active:
result = { 'status': 'failed',
'message': 'user not active' }
return HttpResponse(simplejson.dumps(result))
_id = int(request.GET.get('_id', -1))
if _id == -1:
result = { 'status': 'failed',
'message': 'error: _id undefined' }
return HttpResponse(simplejson.dumps(result))
record = LearningRecord.gql('WHERE _user = :1 AND card_id = :2', request.user, _id).get()
if not record:
result = { 'status': 'failed',
'message': 'error: _id not found' }
return HttpResponse(simplejson.dumps(result))
if record.skip():
result = { 'status': 'succeed',
'message': 'skipping item id=%d succeeded.' % (_id,),
}
return HttpResponse(simplejson.dumps(result))
else:
result = { 'status': 'failed',
'message': 'skipping item id=%d failed.' % (_id,),
}
return HttpResponse(simplejson.dumps(result))
def mark_items(request):
'''Mark items as new.'''
if request.method == 'GET':
# check whether logged in
if not request.user.is_authenticated():
result = { 'status': 'failed',
'message': 'user not authenticated' }
return HttpResponse(simplejson.dumps(result))
elif not request.user.is_active:
result = { 'status': 'failed',
'message': 'user not active' }
return HttpResponse(simplejson.dumps(result))
MAX_SIZE = 500
w_from = request.GET.get('from', '')
w_to = request.GET.get('to', '')
if not w_from or not w_to:
result = { 'status': 'failed',
'message': 'error: from or to undefined' }
return HttpResponse(simplejson.dumps(result))
# get learning progress
learning_progress = LearningProgress.gql('WHERE _user = :1 AND active = TRUE', request.user).get()
if not learning_progress:
# get deck first
deck = Deck.gql('WHERE _id = 1').get() #FIXME: GRE
if not deck:
logging.error('deck not found')
raise Exception, 'deck not found'
learning_progress = LearningProgress.create(request.user, deck)
learning_progress.active = True
learning_progress.put()
q_card = Card.all()
q_card.filter('deck_id =', learning_progress._deck._id).filter('question >',w_from).filter('question <=',w_to)
new_cards = q_card.fetch(MAX_SIZE)
count = 0
new_cards_ids = [c._id for c in new_cards]
for g in range_segment(new_cards_ids):
# Add the task to the mark-items-queue queue.
taskqueue.add(queue_name='mark-items-queue',
url='/xnmemo/mark_items_worker/',
params={'username': request.user.username,
'card_ids': '_'.join([str(c) for c in g])})
count += len(g)
result = { 'status': 'succeed',
'message': '%d records queued to be created.' % count }
return HttpResponse(simplejson.dumps(result))
def mark_items_worker(request):
'''Worker for mark_items.'''
if request.method == 'POST':
username = request.POST.get('username', '')
card_ids_joined = request.POST.get('card_ids', '')
card_ids = [int(c) for c in card_ids_joined.split('_')]
# get user
user = User.gql('WHERE username = :1', username).get()
if not user:
result = { 'status': 'failed',
'message': 'user does not exist' }
return HttpResponse(simplejson.dumps(result))
# create learning record
today = datetime.datetime.now(tz=CST).date()
# get learning progress
learning_progress = LearningProgress.gql('WHERE _user = :1 AND active = TRUE', user).get()
learned_items_list = RangeList.decode(learning_progress.learned_items)
for card_id in card_ids:
if LearningRecord.gql('WHERE _user = :1 AND card_id = :2', user, card_id).get():
# record already exists
continue
r = LearningRecord(_user = user,
card_id = card_id,
deck_id = learning_progress._deck._id,
date_learn = today,
interval = 0,
next_rep = None,
grade = 0,
easiness = 2.5,
acq_reps = 0,
ret_reps = 0,
lapses = 0,
acq_reps_since_lapse = 0,
ret_reps_since_lapse = 0)
r.put()
# append to learning progress
learned_items_list.append(card_id)
# update learning record
learning_progress.learned_items = RangeList.encode(learned_items_list)
learning_progress.put()
# prepare response
result = { 'status': 'succeed',
'message': 'learning record for card %s created.' % card_ids_joined }
return HttpResponse(simplejson.dumps(result))
def get_stats(request):
'''Learning statistics.'''
@require_GET
@require_login
def get_learning_progress(request):
# get learning progress
learning_progress = LearningProgress.gql('WHERE _user = :1', request.user).get()
if learning_progress:
learned_items_list = RangeList.decode(learning_progress.learned_items)
result = { 'status': 'succeed',
'learned_items': learned_items_list,
'learned_items_count': len(learned_items_list) }
else:
result = { 'status': 'failed',
'message': 'learning_progress not found' }
return HttpResponse(simplejson.dumps(result))
def update_learning_progress(request):
if request.method == 'GET':
username = request.GET.get('user', '')
if not username:
result = { 'status': 'failed',
'message': 'user not specified' }
return HttpResponse(simplejson.dumps(result))
user = User.gql('WHERE username = :1', username).get()
if not user:
result = { 'status': 'failed',
'message': 'user %s not found' % (username,) }
return HttpResponse(simplejson.dumps(result))
records = LearningRecord.gql('WHERE _user = :1', user).fetch(1000)
record_ids = [i.card_id for i in records]
# get learning progress
learning_progress = LearningProgress.gql('WHERE _user = :1', user).get()
if not learning_progress:
# get deck first
deck = Deck.gql('WHERE _id = 1').get() #FIXME: GRE
if not deck:
logging.error('deck not found')
raise Exception, 'deck not found'
learning_progress = LearningProgress.create(user, deck)
learning_progress.learned_items = RangeList.encode(sorted(record_ids))
learning_progress.put()
result = { 'status': 'succeed',
'message': 'learning_progress updated' }
return HttpResponse(simplejson.dumps(result))
@require_login
def change_deck(request):
if request.method == 'GET':
decks = Deck.all().fetch(1000)
active_learning_progress = LearningProgress.gql('WHERE _user = :1 AND active = TRUE', request.user).get()
if active_learning_progress:
current_deck = active_learning_progress._deck
else:
current_deck = None
template_vals = {'current_deck': current_deck,
'decks': decks,
'message': None}
return render_to_response(request, 'xnmemo/change_deck.html', template_vals)
elif request.method == 'POST':
decks = Deck.all().fetch(1000)
deck_id = int(request.POST.get('deck', 0))
if deck_id:
active_learning_progress = LearningProgress.gql('WHERE _user = :1 AND active = TRUE', request.user).get()
if active_learning_progress:
current_deck = active_learning_progress._deck
if current_deck._id == deck_id:
# no need to change
message = 'Deck is not changed.'
else:
new_deck = Deck.gql('WHERE _id = :1', deck_id).get()
learning_progresses = LearningProgress.gql('WHERE _user = :1', request.user).fetch(1000)
learning_progresses = filter(lambda x:x._deck==new_deck, learning_progresses)
if learning_progresses:
new_learning_progress = learning_progresses[0]
else:
new_learning_progress = None
if not new_learning_progress:
new_learning_progress = LearningProgress.create(request.user, new_deck)
active_learning_progress.active = False
active_learning_progress.put()
new_learning_progress.active = True
new_learning_progress.put()
current_deck = new_deck
message = 'Deck changed to %s.' % (new_deck,)
else:
new_deck = Deck.gql('WHERE _id = :1', deck_id).get()
learning_progresses = LearningProgress.gql('WHERE _user = :1', request.user).fetch(1000)
learning_progresses = filter(lambda x:x._deck==new_deck, learning_progresses)
if learning_progresses:
new_learning_progress = learning_progresses[0]
else:
new_learning_progress = None
if not new_learning_progress:
new_learning_progress = LearningProgress.create(request.user, new_deck)
new_learning_progress.active = True
new_learning_progress.put()
current_deck = new_deck
message = 'Deck changed to %s.' % (new_deck,)
# delete learning_progress from memcache
memcache.delete('learning_progress'+request.user.username)
# prepare response
template_vals = {'current_deck': current_deck,
'decks': decks,
'message': message}
return render_to_response(request, 'xnmemo/change_deck.html', template_vals)
def fix_learning_progress(request):
lps = LearningProgress.all().fetch(1000)
entity_keys = []
for lp in lps:
taskqueue.add(queue_name='fix-learning-progress',
url='/xnmemo/fix_learning_progress_worker/',
params={'key': str(lp.key())})
entity_keys.append(str(lp.key()))
return HttpResponse('%d LearningProgresses fixed.\n%s' % (len(lps),entity_keys))
@require_POST
def fix_learning_progress_worker(request):
key = request.POST.get('key', '')
start_id = request.POST.get('start_id', '')
if not key:
return HttpResponse('Entity key not defined.')
lp = LearningProgress.get(key)
if not lp:
logging.error('Entity with key %s not found.' % key)
return HttpResponse('Entity with key %s not found.' % key)
if not start_id:
start_id = lp._deck.first_card_id
else:
start_id = int(start_id)
records = LearningRecord.gql('WHERE _user = :1 AND deck_id = :2 AND card_id >= :3 AND card_id <= :4 ORDER BY card_id', lp._user, lp._deck._id, start_id, lp._deck.last_card_id).fetch(500)
card_ids = [r.card_id for r in records]
learned_items = list(set(card_ids) | set(RangeList.decode(lp.learned_items)))
lp.learned_items = RangeList.encode(learned_items)
lp.put()
if len(card_ids) == 500:
# we have more to fix
taskqueue.add(queue_name='fix-learning-progress',
url='/xnmemo/fix_learning_progress_worker/',
params={'key': str(lp.key()),
'start_id': max(card_ids)+1})
return HttpResponse('LearningProgresses fixed.')
@require_GET
def fix_learning_record(request):
start_id = int(request.GET.get('from', 0))
end_id = int(request.GET.get('to', 0))
if not start_id or not end_id:
result = { 'status': 'failed',
'message': 'error: from or to undefined' }
return HttpResponse(simplejson.dumps(result))
decks = Deck.all().fetch(1000)
records = LearningRecord.gql('WHERE card_id >= :1 AND card_id <= :2', start_id, end_id).fetch(1000)
count = 0
for r in records:
for d in decks:
if r.card_id >= d.first_card_id and r.card_id <= d.last_card_id:
r.deck_id = d._id
r.put()
count += 1
return HttpResponse('%d of %d records fixed, from id: %d to id: %d' % (count, len(records),start_id,end_id))
def convert_learning_progress(request):
lps = LearningProgress.all().fetch(1000)
for lp in lps:
lp.learned_items = RangeList.encode(lp.learned_items)
lp.put()
return HttpResponse('%d LearningProgresses fixed.' % (len(lps),))
def flush_cache(request):
'''Flush memcache.'''
if memcache.flush_all():
return HttpResponse('Flush cache success.')
else:
return HttpResponse('Flush cache failed.')
| Python |
from django.contrib import admin
| Python |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
rootpatterns = patterns('',
(r'^dbbuilder/', include('apps.dbbuilder.urls')),
)
| Python |
# -*- coding: utf-8 -*-
from django.utils.translation import ugettext_lazy as _
from google.appengine.ext import db
| Python |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
urlpatterns = patterns('apps.dbbuilder.views',
#~ (r'^$', 'xnmemo_index'),
(r'^import/$', 'db_import'),
(r'^fix_deck_info/$', 'fix_deck_info'),
(r'^remove_duplicates/$', 'remove_duplicates'),
#~ (r'^(?P<lesson_id>\d+)/$', 'lesson_detail'),
#~ (r'^all/$', 'lesson_all'),
)
| Python |
# -*- coding: utf-8 -*-
import os,logging,csv,codecs,cStringIO
from django.http import HttpResponseRedirect,Http404,HttpResponseForbidden,HttpResponse,HttpResponseNotFound
from django.views.decorators.http import require_GET, require_POST
from django.utils.translation import ugettext as _
from ragendja.template import render_to_response
from apps.core.models import Card, Deck
#~ def sqlite_import(request, dbfn, deckname, deckdesc):
#~ '''import from sqlite database (AnyMemo format)'''
#~ response = ''
#~ # open database file
#~ _localDir=os.path.dirname(__file__)
#~ _curpath=os.path.normpath(os.path.join(os.getcwd(),_localDir))
#~ curpath=_curpath
#~ try:
#~ conn = sqlite3.connect(s.path.join(curpath,dbfile))
#~ except:
#~ logging.error('Error opening sqlite file!')
#~ return HttpResponse('Error opening sqlite file!')
#~ q = Deck.all()
#~ q.filter("name =", deckname)
#~ deck = q.get()
#~ if not deck:
#~ # determine deck id
#~ q = Deck.all()
#~ q.order("-_id")
#~ results = q.fetch(1)
#~ if len(results) == 0:
#~ last_id = 0
#~ else:
#~ last_id = results[0].book_id
#~ new_id = last_id + 1
#~ # create a new deck
#~ volume = 0
#~ deck = Deck(_id=new_id, name=deckname,description=deckdesc,volume=0)
#~ else:
#~ volume = deck.volume
#~ # import cards from sqlite database, 100 cards per session
#~ start_card = volume + 1
#~ c = conn.cursor()
#~ t = (start_card,)
#~ c.execute('select * from dict_tbl where _id>=? order by _id limit 100')
#~ for row in c:
#~ response += (str(row) + '\n')
#~ return HttpResponse(response)
class UTF8Recoder:
"""
Iterator that reads an encoded stream and reencodes the input to UTF-8
"""
def __init__(self, f, encoding):
self.reader = codecs.getreader(encoding)(f)
def __iter__(self):
return self
def next(self):
return self.reader.next().encode("utf-8")
class UnicodeReader:
"""
A CSV reader which will iterate over lines in the CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
f = UTF8Recoder(f, encoding)
self.reader = csv.reader(f, dialect=dialect, **kwds)
def next(self):
row = self.reader.next()
return [unicode(s, "utf-8") for s in row]
def __iter__(self):
return self
class UnicodeWriter:
"""
A CSV writer which will write rows to CSV file "f",
which is encoded in the given encoding.
"""
def __init__(self, f, dialect=csv.excel, encoding="utf-8", **kwds):
# Redirect output to a queue
self.queue = cStringIO.StringIO()
self.writer = csv.writer(self.queue, dialect=dialect, **kwds)
self.stream = f
self.encoder = codecs.getincrementalencoder(encoding)()
def writerow(self, row):
self.writer.writerow([s.encode("utf-8") for s in row])
# Fetch UTF-8 output from the queue ...
data = self.queue.getvalue()
data = data.decode("utf-8")
# ... and reencode it into the target encoding
data = self.encoder.encode(data)
# write to the target stream
self.stream.write(data)
# empty queue
self.queue.truncate(0)
def writerows(self, rows):
for row in rows:
self.writerow(row)
def csv_import(request, dbfile, deckname, deckdesc):
'''import from csv file'''
response = ''
# open csv file
_localDir=os.path.dirname(__file__)
_curpath=os.path.normpath(os.path.join(os.getcwd(),_localDir))
curpath=_curpath
csvf = open(os.path.join(curpath,dbfile),'r')
csv_reader = UnicodeReader(csvf, delimiter='\t', quotechar='|', quoting=csv.QUOTE_MINIMAL)
deck = Deck.all().filter("name =", deckname).get()
if not deck:
# determine deck id
q = Deck.all()
q.order("-_id")
results = q.fetch(1)
if len(results) == 0:
last_id = 0
else:
last_id = results[0]._id
new_deck_id = last_id + 1
# create a new deck
volume = 0
deck = Deck(_id=new_deck_id, name=deckname,description=deckdesc,volume=0)
deck.put()
else:
volume = deck.volume
# get last card of deck
last_card_from_deck = Card.gql('WHERE deck_id = :1 ORDER BY _id DESC', deck._id).get()
# calculate first_card_id, which is the first card id of the current deck
# and start_card_id, which import process in this request starts from
if not last_card_from_deck:
# deck is currently empty
last_card = Card.gql('ORDER BY _id DESC').get()
if last_card:
first_card_id = last_card._id + 1
else:
first_card_id = 1
start_card_id = first_card_id
else:
first_card_id = Card.gql('WHERE deck_id = :1 ORDER BY _id', deck._id).get()._id
start_card_id = last_card_from_deck._id + 1
# import cards from sqlite database, 500 cards per session
count = 0
for row in csv_reader:
if int(row[0]) > start_card_id - first_card_id:
card = Card(_id=int(row[0])+first_card_id-1, question=row[1], answer = row[2], note = row[3],
deck_id=deck._id, category=row[4])
card.put()
count += 1
if count >= 200:
break
# update volume, first_card_id, last_card_id
deck.volume += count
deck.first_card_id = first_card_id
deck.last_card_id = first_card_id + deck.volume - 1
deck.put()
return HttpResponse('%d cards imported.' % (count,))
def db_import(request):
if request.method == 'GET':
dbfile = request.GET.get('dbfile',None)
deckname = request.GET.get('deckname','')
deckdesc = request.GET.get('deckdesc','')
if not dbfile:
return render_to_response(request, "dbbuilder/db_import.html")
else:
if dbfile.endswith('.csv'):
# import from csv file
return csv_import(request,dbfile,deckname,deckdesc)
def remove_duplicates(request):
if request.method == 'GET':
_from = int(request.GET.get('from',0))
_to = int(request.GET.get('to',0))
count = 0
for i in range(_from,_to+1):
q = Card.all().filter('_id =',i).fetch(100)
for card in q[1:]:
card.delete()
count += 1
return HttpResponse('%d duplicates removed.' % (count,))
@require_GET
def fix_deck_info(request):
deck_id = int(request.GET.get('deck_id',0))
if deck_id == 0:
return HttpResponse('deck_id not specified.')
deck = Deck.all().filter("_id =", deck_id).get()
if not deck:
return HttpResponse('deck %d not found.' % (deck_id,))
# get last card of deck
last_card_from_deck = Card.gql('WHERE deck_id = :1 ORDER BY _id DESC', deck._id).get()
# calculate first_card_id, which is the first card id of the current deck
if not last_card_from_deck:
# deck is currently empty
deck.first_card_id = deck.last_card_id = None
deck.put()
else:
deck.first_card_id = Card.gql('WHERE deck_id = :1 ORDER BY _id', deck._id).get()._id
deck.last_card_id = last_card_from_deck._id
deck.put()
return HttpResponse('Deck %d fixed. first_card_id: %s, last_card_id: %s' % (deck_id,deck.first_card_id,deck.last_card_id))
| Python |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Bill Chen <pro711@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
from django.utils.translation import ugettext_lazy as _
from google.appengine.ext import db
from django.contrib.auth.models import User
import random, datetime, logging
from apps.core.item import Item
from apps.xnmemo.rangelist import RangeList
class FixedOffset(datetime.tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = datetime.timedelta(minutes = offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return datetime.timedelta(0)
CST = FixedOffset(480,"China Standard Time")
class Card(db.Model):
"""A card in a deck."""
# CREATE TABLE dict_tbl(_id INTEGER PRIMARY KEY ASC AUTOINCREMENT, question TEXT, answer TEXT, note TEXT, category TEXT)
_id = db.IntegerProperty(required=True, default=0)
question = db.StringProperty(required=True)
answer = db.StringProperty()
note = db.TextProperty()
deck_id = db.IntegerProperty(required=True, default=0)
category = db.StringProperty()
def __unicode__(self):
return '%s %s' % (self._id, self.question)
class Deck(db.Model):
"""A collection of cards."""
_id = db.IntegerProperty(required=True, default=0)
name = db.StringProperty(required=True)
description = db.StringProperty(multiline=True)
volume = db.IntegerProperty(required=True, default=0)
first_card_id = db.IntegerProperty(required=True, default=0)
last_card_id = db.IntegerProperty(required=True, default=0)
def __unicode__(self):
return '%s' % (self.name,)
class LearningRecord(db.Model):
'''Learing record for a card.'''
# CREATE TABLE learn_tbl(_id INTEGER PRIMARY KEY ASC AUTOINCREMENT, date_learn, interval INTEGER, grade INTEGER, easiness REAL, acq_reps INTEGER, ret_reps INTEGER, lapses INTEGER, acq_reps_since_lapse INTEGER, ret_reps_since_lapse INTEGER)
#~ _id = db.IntegerProperty(required=True, default=0)
_user = db.ReferenceProperty(User)
card_id = db.IntegerProperty(required=True)
deck_id = db.IntegerProperty()
date_learn = db.DateProperty(auto_now=True)
interval = db.IntegerProperty(required=True, default=0)
next_rep = db.DateProperty()
grade = db.IntegerProperty(required=True, default=0)
easiness = db.FloatProperty(required=True, default=2.5)
acq_reps = db.IntegerProperty(required=True, default=0)
ret_reps = db.IntegerProperty(required=True, default=0)
lapses = db.IntegerProperty(required=True, default=0)
acq_reps_since_lapse = db.IntegerProperty(required=True, default=0)
ret_reps_since_lapse = db.IntegerProperty(required=True, default=0)
def __unicode__(self):
return '%s: %s' % (self._user, self.card_id)
@classmethod
def get_scheduled_items(self, user, deck, id, size, flag, flt):
'''
Return a list of items.
id: from which ID
flag = 0 means no condition
flag = 1 means new items, the items user have never seen (acq=0)
flag = 2 means item due, they need to be reviewed. (ret)
flag = 3 means items that is ahead of time (cram)
flag = 4 means both ret and acq items, but ret comes first
flag = 5: shuffle items no other condition
'''
# FIXME: flag
# limit number of items to MAX_SIZE
MAX_SIZE = 500
size = min(size,MAX_SIZE)
today = datetime.datetime.now(tz=CST).date()
q = LearningRecord.gql('WHERE _user = :1 AND deck_id = :2 AND next_rep <= :3 AND next_rep > NULL ORDER BY next_rep', user, deck._id, today)
results = q.fetch(size)
results = filter(lambda x:x.acq_reps > 0, results)
if len(results) > size:
results = results[0:size]
#~ raise Exception
# sort results by card_id
results.sort(key=lambda x:x.card_id)
logging.debug('Scheduled: ' + str([i.card_id for i in results]))
return results
@classmethod
def get_new_items(self, user, deck, id, size, flag, flt):
'''
Return a list of items.
id: from which ID
flag = 0 means no condition
flag = 1 means new items, the items user have never seen (acq=0)
flag = 2 means item due, they need to be reviewed. (ret)
flag = 3 means items that is ahead of time (cram)
flag = 4 means both ret and acq items, but ret comes first
flag = 5: shuffle items no other condition
'''
# limit number of items to MAX_SIZE
MAX_SIZE = 100
size = min(size,MAX_SIZE)
q = LearningRecord.gql('WHERE _user = :1 AND deck_id = :2 AND acq_reps = 0', user, deck._id)
results = q.fetch(size)
if len(results) >= size:
# we have fetched enough records
logging.debug('New: ' + str([i.card_id for i in results]))
return results
else:
# create some records
new_items_size = size - len(results)
count = 0
#~ while count < new_items_size:
# get learning progress
lp = LearningProgress.gql('WHERE _user = :1 AND active = TRUE', user).get()
if not lp:
logging.error('LearningProgress not found.')
return results
new_cards = []
count = 0
learned_items_list = RangeList.decode(lp.learned_items)
#~ logging.debug(str(learned_items_list))
for i in range(lp._deck.first_card_id, lp._deck.last_card_id):
if i not in learned_items_list:
new_cards.append(i)
count += 1
if count == new_items_size:
learned_items_list += new_cards
lp.learned_items = RangeList.encode(learned_items_list)
lp.put()
break
#~ last_card = LearningRecord.gql('WHERE _user = :1 ORDER BY card_id DESC', user).get()
#~ if not last_card:
#~ # we do not have any records now
#~ last_card_id = 0
#~ else:
#~ last_card_id = last_card.card_id
#~ # get some cards
#~ # LIMIT do not support bound parameters, use query instead
#~ q_card = Card.all()
#~ q_card.filter('_id >',last_card_id).order('_id')
#~ new_cards = q_card.fetch(new_items_size)
#~ raise Exception
# create learning records for these cards
today = datetime.datetime.now(tz=CST).date()
for c in new_cards:
# check if record already exists
# this should not happen, but build here as a safeguard
r = LearningRecord.gql('WHERE _user = :1 AND card_id = :2', user, c).get()
if r:
continue
r = LearningRecord(_user = user,
#~ card_id = c._id,
card_id = c,
deck_id = lp._deck._id,
date_learn = today,
interval = 0,
next_rep = None,
grade = 0,
easiness = 2.5,
acq_reps = 0,
ret_reps = 0,
lapses = 0,
acq_reps_since_lapse = 0,
ret_reps_since_lapse = 0)
r.put()
count += 1
# add to results
results.append(r)
logging.debug('New & Created: ' + str([i.card_id for i in results]))
return results
def update_item(self, user, id, new_grade):
'''Update an item.'''
card = Card.gql('WHERE _id = :1', self.card_id).get()
if not card:
return False
# process answer
item = Item(card, self)
item.process_answer(new_grade)
self.date_learn = item.date_learn
self.interval = item.interval
self.next_rep = item.next_rep
self.grade = item.grade
self.easiness = item.easiness
self.acq_reps = item.acq_reps
self.ret_reps = item.ret_reps
self.lapses = item.lapses
self.acq_reps_since_lapse = item.acq_reps_since_lapse
self.ret_reps_since_lapse = item.ret_reps_since_lapse
self.put()
return True
def skip(self):
'''Update an item.'''
try:
self.interval = 10000
self.next_rep = datetime.datetime.now(tz=CST).date() + datetime.timedelta(10000)
self.grade = 5
self.easiness = 10.0
self.acq_reps = 1
self.put()
return True
except:
return False
class LearningProgress(db.Model):
'''Learning progress for a deck.'''
_user = db.ReferenceProperty(User, required=True)
_deck = db.ReferenceProperty(Deck)
date_start = db.DateProperty(auto_now_add=True)
date_learn = db.DateProperty(auto_now=True)
learned_items = db.ListProperty(long)
active = db.BooleanProperty()
def __unicode__(self):
return '%s' % (self._user)
@classmethod
def create(self, user, deck):
'''Create new learning progress for user.'''
lp = LearningProgress(_user=user, _deck=deck, learned_items=[])
lp.put()
return lp
| Python |
# -*- coding: utf-8 -*-
#
# Copyright 2010 Bill Chen <pro711@gmail.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
import random, time, os, string, sys, logging, re
import datetime, copy
#~ from apps.core.models import Card, Deck, LearningRecord
class FixedOffset(datetime.tzinfo):
"""Fixed offset in minutes east from UTC."""
def __init__(self, offset, name):
self.__offset = datetime.timedelta(minutes = offset)
self.__name = name
def utcoffset(self, dt):
return self.__offset
def tzname(self, dt):
return self.__name
def dst(self, dt):
return datetime.timedelta(0)
CST = FixedOffset(480,"China Standard Time")
class Item(object):
def __init__(self, card, record=None):
#~ if not card:
#~ raise TypeError, 'card should not be None'
if not record:
# new record
#~ self.date_learn = datetime.datetime.now()
self.interval = 0
self.grade = 0
self.easiness = 2.5
self.acq_reps = 0
self.ret_reps = 0
self.lapses = 0
self.acq_reps_since_lapse = 0
self.ret_reps_since_lapse = 0
else:
self.date_learn = record.date_learn
self.interval = record.interval
self.next_rep = record.next_rep
self.grade = record.grade
self.easiness = record.easiness
self.acq_reps = record.acq_reps
self.ret_reps = record.ret_reps
self.lapses = record.lapses
self.acq_reps_since_lapse = record.acq_reps_since_lapse
self.ret_reps_since_lapse = record.ret_reps_since_lapse
def calculate_initial_interval(self, grade):
# If this is the first time we grade this item, allow for slightly
# longer scheduled intervals, as we might know this item from before.
interval = (0, 0, 1, 3, 4, 5) [grade]
return interval
def calculate_interval_noise(self, interval):
if interval == 0:
noise = 0
elif interval == 1:
noise = random.randint(0,1)
elif interval <= 10:
noise = random.randint(-1,1)
elif interval <= 60:
noise = random.randint(-3,3)
else:
a = .05 * interval
noise = int(round(random.uniform(-a,a)))
return noise
def diff_date(self, date1, date2):
delta = date2 - date1
return delta.days
def increment_date(self, date1, days):
delta = datetime.timedelta(days)
return date1 + delta
def process_answer(self, new_grade, dry_run=False):
'''
dryRun will leave the original one intact and return the interval
'''
# When doing a dry run, make a copy to operate on. Note that this
# leaves the original in items and the reference in the GUI intact.
if dry_run:
item = copy.copy(item)
# Calculate scheduled and actual interval, taking care of corner
# case when learning ahead on the same day.
scheduled_interval = self.interval
actual_interval = self.diff_date(self.date_learn, datetime.datetime.now(tz=CST).date())
new_interval = 0
retval = False
if actual_interval == 0:
actual_interval = 1 # Otherwise new interval can become zero.
if self.acq_reps == 0:
# The item is not graded yet, e.g. because it is imported.
self.acq_reps = 1
self.acq_reps_since_lapse = 1
self.easiness = 2.5
new_interval = self.calculate_initial_interval(new_grade)
if new_grade >= 2:
retval = True
elif self.grade in [0,1] and new_grade in [0,1]:
# In the acquisition phase and staying there.
self.acq_reps += 1
self.acq_reps_since_lapse += 1
new_interval = 0
elif self.grade in [0,1] and new_grade in [2,3,4,5]:
# In the acquisition phase and moving to the retention phase.
self.acq_reps += 1
self.acq_reps_since_lapse += 1
new_interval = 1
retval = True
elif self.grade in [2,3,4,5] and new_grade in [0,1]:
# In the retention phase and dropping back to the acquisition phase.
self.ret_reps += 1
self.lapses += 1
self.acq_reps_since_lapse = 0
self.ret_reps_since_lapse = 0
new_interval = 0
returnValue = False
elif self.grade in [2,3,4,5] and new_grade in [2,3,4,5]:
# In the retention phase and staying there.
self.ret_reps += 1
self.ret_reps_since_lapse += 1
retval = True
logging.debug('scheduled_interval: %d, actual_interval: %d' % (scheduled_interval,actual_interval))
if actual_interval >= scheduled_interval:
if new_grade == 2:
self.easiness -= 0.16
if new_grade == 3:
self.easiness -= 0.14
if new_grade == 5:
self.easiness += 0.10
if self.easiness < 1.3:
self.easiness = 1.3
new_interval = 0
if self.ret_reps_since_lapse == 1:
new_interval = 6
else:
if new_grade == 2 or new_grade == 3:
if actual_interval <= scheduled_interval:
new_interval = actual_interval * self.easiness
else:
new_interval = scheduled_interval
if new_grade == 4:
new_interval = actual_interval * self.easiness
if new_grade == 5:
if actual_interval < scheduled_interval:
new_interval = scheduled_interval # Avoid spacing.
else:
new_interval = actual_interval * self.easiness
# Shouldn't happen, but build in a safeguard.
if new_interval == 0:
logger.info("Internal error: new interval was zero.")
new_interval = scheduled_interval
new_interval = int(new_interval)
# When doing a dry run, stop here and return the scheduled interval.
if dry_run:
return new_interval
# Add some randomness to interval.
noise = self.calculate_interval_noise(new_interval)
# Update grade and interval.
self.date_learn = datetime.datetime.now(tz=CST).date()
self.interval = new_interval + noise
self.next_rep = self.date_learn + datetime.timedelta(self.interval)
self.grade = new_grade
#~ # Don't schedule inverse or identical questions on the same day.
#~
#~ for j in items:
#~ if (j.q == self.q and j.a == self.a) or items_are_inverses(item, j):
#~ if j != item and j.next_rep == self.next_rep and self.grade >= 2:
#~ self.next_rep += 1
#~ noise += 1
#~ # Create log entry.
#~
#~ logger.info("R %s %d %1.2f | %d %d %d %d %d | %d %d | %d %d | %1.1f",
#~ self.id, self.grade, self.easiness,
#~ self.acq_reps, self.ret_reps, self.lapses,
#~ self.acq_reps_since_lapse, self.ret_reps_since_lapse,
#~ scheduled_interval, actual_interval,
#~ new_interval, noise, thinking_time)
return 1 if retval else 0 # 1 for success, 0 for fail
def is_scheduled(self):
scheduled_interval = self.interval
actual_interval = self.diff_date(self.date_learn, datetime.datetime.now(tz=CST).date())
if scheduled_interval <= actual_interval and self.acq_reps > 0:
return True
else:
return False
def reset_learning_data(self):
self.interval = 0
self.grade = 0
self.easiness = 2.5
self.acq_reps = 0
self.ret_reps = 0
self.lapses = 0
self.acq_reps_since_lapse = 0
self.ret_reps_since_lapse = 0
self.last_rep = 0 # In days since beginning.
self.next_rep = 0 #
| Python |
# -*- coding: utf-8 -*-
from django import forms
from django.contrib.auth.models import User
from django.core.files.uploadedfile import UploadedFile
from django.utils.translation import ugettext_lazy as _, ugettext as __
from ragendja.auth.models import UserTraits
from ragendja.forms import FormWithSets, FormSetField
from registration.forms import RegistrationForm, RegistrationFormUniqueEmail
from registration.models import RegistrationProfile
class UserRegistrationForm(forms.ModelForm):
username = forms.RegexField(regex=r'^\w+$', max_length=30,
label=_(u'Username'))
email = forms.EmailField(widget=forms.TextInput(attrs=dict(maxlength=75)),
label=_(u'Email address'))
password1 = forms.CharField(widget=forms.PasswordInput(render_value=False),
label=_(u'Password'))
password2 = forms.CharField(widget=forms.PasswordInput(render_value=False),
label=_(u'Password (again)'))
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use.
"""
user = User.get_by_key_name("key_"+self.cleaned_data['username'].lower())
if user and user.is_active:
raise forms.ValidationError(__(u'This username is already taken. Please choose another.'))
return self.cleaned_data['username']
def clean(self):
"""
Verifiy that the values entered into the two password fields
match. Note that an error here will end up in
``non_field_errors()`` because it doesn't apply to a single
field.
"""
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(__(u'You must type the same password each time'))
return self.cleaned_data
def save(self, domain_override=""):
"""
Create the new ``User`` and ``RegistrationProfile``, and
returns the ``User``.
This is essentially a light wrapper around
``RegistrationProfile.objects.create_inactive_user()``,
feeding it the form data and a profile callback (see the
documentation on ``create_inactive_user()`` for details) if
supplied.
"""
new_user = RegistrationProfile.objects.create_inactive_user(
username=self.cleaned_data['username'],
password=self.cleaned_data['password1'],
email=self.cleaned_data['email'],
domain_override=domain_override)
self.instance = new_user
return super(UserRegistrationForm, self).save()
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
email = self.cleaned_data['email'].lower()
if User.all().filter('email =', email).filter(
'is_active =', True).count(1):
raise forms.ValidationError(__(u'This email address is already in use. Please supply a different email address.'))
return email
class Meta:
model = User
exclude = UserTraits.properties().keys()
| Python |
# -*- coding: utf-8 -*-
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext as _
from ragendja.template import render_to_response
| Python |
# -*- coding: utf-8 -*-
from ragendja.settings_pre import *
DEBUG = True
# Increase this when you update your media on the production site, so users
# don't have to refresh their cache. By setting this your MEDIA_URL
# automatically becomes /media/MEDIA_VERSION/
MEDIA_VERSION = 1
# By hosting media on a different domain we can get a speedup (more parallel
# browser connections).
#if on_production_server or not have_appserver:
# MEDIA_URL = 'http://media.mydomain.com/media/%d/'
# Change your email settings
if on_production_server:
DEFAULT_FROM_EMAIL = 'admin@me-memo.appspotmail.com'
SERVER_EMAIL = DEFAULT_FROM_EMAIL
# Make this unique, and don't share it with anybody.
SECRET_KEY = '31415926535897932384626'
#~ ENABLE_PROFILER = True
#~ #ONLY_FORCED_PROFILE = True
#~ #PROFILE_PERCENTAGE = 25
#~ SORT_PROFILE_RESULTS_BY = 'cumulative' # default is 'time'
#~ # Profile only datastore calls
#~ PROFILE_PATTERN = 'ext.db..+\((?:get|get_by_key_name|fetch|count|put)\)'
# Enable I18N and set default language to 'en'
USE_I18N = True
LANGUAGE_CODE = 'zh_CN'
# Restrict supported languages (and JS media generation)
LANGUAGES = (
('en', 'English'),
('zh_CN', 'Chinese (Simplified)'),
)
TEMPLATE_CONTEXT_PROCESSORS = (
'django.core.context_processors.auth',
'django.core.context_processors.media',
'django.core.context_processors.request',
'django.core.context_processors.i18n',
)
MIDDLEWARE_CLASSES = (
'ragendja.middleware.ErrorMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
# Django authentication
'django.contrib.auth.middleware.AuthenticationMiddleware',
# Google authentication
#'ragendja.auth.middleware.GoogleAuthenticationMiddleware',
# Hybrid Django/Google authentication
#'ragendja.auth.middleware.HybridAuthenticationMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.locale.LocaleMiddleware',
'ragendja.sites.dynamicsite.DynamicSiteIDMiddleware',
'django.contrib.flatpages.middleware.FlatpageFallbackMiddleware',
'django.contrib.redirects.middleware.RedirectFallbackMiddleware',
)
# Google authentication
#AUTH_USER_MODULE = 'ragendja.auth.google_models'
#AUTH_ADMIN_MODULE = 'ragendja.auth.google_admin'
# Hybrid Django/Google authentication
#AUTH_USER_MODULE = 'ragendja.auth.hybrid_models'
LOGIN_URL = '/account/login/'
LOGOUT_URL = '/account/logout/'
LOGIN_REDIRECT_URL = '/'
INSTALLED_APPS = (
# Add jquery support (app is in "common" folder). This automatically
# adds jquery to your COMBINE_MEDIA['combined-%(LANGUAGE_CODE)s.js']
# Note: the order of your INSTALLED_APPS specifies the order in which
# your app-specific media files get combined, so jquery should normally
# come first.
'jquery',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.admin',
'django.contrib.webdesign',
'django.contrib.flatpages',
'django.contrib.redirects',
'django.contrib.sites',
'appenginepatcher',
'ragendja',
'apps.core',
'apps.dbbuilder',
'apps.xnmemo',
'registration',
)
# List apps which should be left out from app settings and urlsauto loading
IGNORE_APP_SETTINGS = IGNORE_APP_URLSAUTO = (
# Example:
# 'django.contrib.admin',
# 'django.contrib.auth',
# 'yetanotherapp',
)
# Remote access to production server (e.g., via manage.py shell --remote)
DATABASE_OPTIONS = {
# Override remoteapi handler's path (default: '/remote_api').
# This is a good idea, so you make it not too easy for hackers. ;)
# Don't forget to also update your app.yaml!
#'remote_url': '/remote-secret-url',
# !!!Normally, the following settings should not be used!!!
# Always use remoteapi (no need to add manage.py --remote option)
#'use_remote': True,
# Change appid for remote connection (by default it's the same as in
# your app.yaml)
#'remote_id': 'otherappid',
# Change domain (default: <remoteid>.appspot.com)
#'remote_host': 'bla.com',
}
from ragendja.settings_post import *
| Python |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from ragendja.urlsauto import urlpatterns
from ragendja.auth.urls import urlpatterns as auth_patterns
from apps.core.forms import UserRegistrationForm
from apps.core.item import Item
from django.contrib import admin
admin.autodiscover()
handler500 = 'ragendja.views.server_error'
urlpatterns = auth_patterns + patterns('',
('^admin/(.*)', admin.site.root),
(r'^$', 'django.views.generic.simple.direct_to_template',
{'template': 'main.html'}),
# Override the default registration form
url(r'^account/register/$', 'registration.views.register',
kwargs={'form_class': UserRegistrationForm},
name='registration_register'),
) + urlpatterns
| Python |
from ragendja.settings_post import settings
if not hasattr(settings, 'ACCOUNT_ACTIVATION_DAYS'):
settings.ACCOUNT_ACTIVATION_DAYS = 30
| Python |
from django.conf.urls.defaults import *
rootpatterns = patterns('',
(r'^account/', include('registration.urls')),
)
| Python |
import datetime
import random
import re
import sha
from google.appengine.ext import db
from django.conf import settings
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.db import models
from django.template.loader import render_to_string
from django.utils.translation import ugettext_lazy as _
SHA1_RE = re.compile('^[a-f0-9]{40}$')
class RegistrationManager(models.Manager):
"""
Custom manager for the ``RegistrationProfile`` model.
The methods defined here provide shortcuts for account creation
and activation (including generation and emailing of activation
keys), and for cleaning out expired inactive accounts.
"""
def activate_user(self, activation_key):
"""
Validate an activation key and activate the corresponding
``User`` if valid.
If the key is valid and has not expired, return the ``User``
after activating.
If the key is not valid or has expired, return ``False``.
If the key is valid but the ``User`` is already active,
return ``False``.
To prevent reactivation of an account which has been
deactivated by site administrators, the activation key is
reset to the string constant ``RegistrationProfile.ACTIVATED``
after successful activation.
To execute customized logic when a ``User`` is activated,
connect a function to the signal
``registration.signals.user_activated``; this signal will be
sent (with the ``User`` as the value of the keyword argument
``user``) after a successful activation.
"""
from registration.signals import user_activated
# Make sure the key we're trying conforms to the pattern of a
# SHA1 hash; if it doesn't, no point trying to look it up in
# the database.
if SHA1_RE.search(activation_key):
profile = RegistrationProfile.get_by_key_name("key_"+activation_key)
if not profile:
return False
if not profile.activation_key_expired():
user = profile.user
user.is_active = True
user.put()
profile.activation_key = RegistrationProfile.ACTIVATED
profile.put()
user_activated.send(sender=self.model, user=user)
return user
return False
def create_inactive_user(self, username, password, email, domain_override="",
send_email=True):
"""
Create a new, inactive ``User``, generate a
``RegistrationProfile`` and email its activation key to the
``User``, returning the new ``User``.
To disable the email, call with ``send_email=False``.
The activation email will make use of two templates:
``registration/activation_email_subject.txt``
This template will be used for the subject line of the
email. It receives one context variable, ``site``, which
is the currently-active
``django.contrib.sites.models.Site`` instance. Because it
is used as the subject line of an email, this template's
output **must** be only a single line of text; output
longer than one line will be forcibly joined into only a
single line.
``registration/activation_email.txt``
This template will be used for the body of the email. It
will receive three context variables: ``activation_key``
will be the user's activation key (for use in constructing
a URL to activate the account), ``expiration_days`` will
be the number of days for which the key will be valid and
``site`` will be the currently-active
``django.contrib.sites.models.Site`` instance.
To execute customized logic once the new ``User`` has been
created, connect a function to the signal
``registration.signals.user_registered``; this signal will be
sent (with the new ``User`` as the value of the keyword
argument ``user``) after the ``User`` and
``RegistrationProfile`` have been created, and the email (if
any) has been sent..
"""
from registration.signals import user_registered
# prepend "key_" to the key_name, because key_names can't start with numbers
new_user = User(username=username, key_name="key_"+username.lower(),
email=email, is_active=False)
new_user.set_password(password)
new_user.put()
registration_profile = self.create_profile(new_user)
if send_email:
from django.core.mail import send_mail
current_site = domain_override
# current_site = Site.objects.get_current()
subject = render_to_string('registration/activation_email_subject.txt',
{ 'site': current_site })
# Email subject *must not* contain newlines
subject = ''.join(subject.splitlines())
message = render_to_string('registration/activation_email.txt',
{ 'activation_key': registration_profile.activation_key,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS,
'site': current_site })
send_mail(subject, message, settings.DEFAULT_FROM_EMAIL, [new_user.email])
user_registered.send(sender=self.model, user=new_user)
return new_user
def create_profile(self, user):
"""
Create a ``RegistrationProfile`` for a given
``User``, and return the ``RegistrationProfile``.
The activation key for the ``RegistrationProfile`` will be a
SHA1 hash, generated from a combination of the ``User``'s
username and a random salt.
"""
salt = sha.new(str(random.random())).hexdigest()[:5]
activation_key = sha.new(salt+user.username).hexdigest()
# prepend "key_" to the key_name, because key_names can't start with numbers
registrationprofile = RegistrationProfile(user=user, activation_key=activation_key, key_name="key_"+activation_key)
registrationprofile.put()
return registrationprofile
def delete_expired_users(self):
"""
Remove expired instances of ``RegistrationProfile`` and their
associated ``User``s.
Accounts to be deleted are identified by searching for
instances of ``RegistrationProfile`` with expired activation
keys, and then checking to see if their associated ``User``
instances have the field ``is_active`` set to ``False``; any
``User`` who is both inactive and has an expired activation
key will be deleted.
It is recommended that this method be executed regularly as
part of your routine site maintenance; this application
provides a custom management command which will call this
method, accessible as ``manage.py cleanupregistration``.
Regularly clearing out accounts which have never been
activated serves two useful purposes:
1. It alleviates the ocasional need to reset a
``RegistrationProfile`` and/or re-send an activation email
when a user does not receive or does not act upon the
initial activation email; since the account will be
deleted, the user will be able to simply re-register and
receive a new activation key.
2. It prevents the possibility of a malicious user registering
one or more accounts and never activating them (thus
denying the use of those usernames to anyone else); since
those accounts will be deleted, the usernames will become
available for use again.
If you have a troublesome ``User`` and wish to disable their
account while keeping it in the database, simply delete the
associated ``RegistrationProfile``; an inactive ``User`` which
does not have an associated ``RegistrationProfile`` will not
be deleted.
"""
for profile in RegistrationProfile.all():
if profile.activation_key_expired():
user = profile.user
if not user.is_active:
user.delete()
profile.delete()
class RegistrationProfile(db.Model):
"""
A simple profile which stores an activation key for use during
user account registration.
Generally, you will not want to interact directly with instances
of this model; the provided manager includes methods
for creating and activating new accounts, as well as for cleaning
out accounts which have never been activated.
While it is possible to use this model as the value of the
``AUTH_PROFILE_MODULE`` setting, it's not recommended that you do
so. This model's sole purpose is to store data temporarily during
account registration and activation.
"""
ACTIVATED = u"ALREADY_ACTIVATED"
user = db.ReferenceProperty(User, verbose_name=_('user'))
activation_key = db.StringProperty(_('activation key'))
objects = RegistrationManager()
class Meta:
verbose_name = _('registration profile')
verbose_name_plural = _('registration profiles')
def __unicode__(self):
return u"Registration information for %s" % self.user
def activation_key_expired(self):
"""
Determine whether this ``RegistrationProfile``'s activation
key has expired, returning a boolean -- ``True`` if the key
has expired.
Key expiration is determined by a two-step process:
1. If the user has already activated, the key will have been
reset to the string constant ``ACTIVATED``. Re-activating
is not permitted, and so this method returns ``True`` in
this case.
2. Otherwise, the date the user signed up is incremented by
the number of days specified in the setting
``ACCOUNT_ACTIVATION_DAYS`` (which should be the number of
days after signup during which a user is allowed to
activate their account); if the result is less than or
equal to the current date, the key has expired and this
method returns ``True``.
"""
expiration_date = datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS)
return self.activation_key == RegistrationProfile.ACTIVATED or \
(self.user.date_joined + expiration_date <= datetime.datetime.now())
activation_key_expired.boolean = True
| Python |
"""
Forms and validation code for user registration.
"""
from django.contrib.auth.models import User
from django import forms
from django.utils.translation import ugettext_lazy as _
from registration.models import RegistrationProfile
# I put this on all required fields, because it's easier to pick up
# on them with CSS or JavaScript if they have a class of "required"
# in the HTML. Your mileage may vary. If/when Django ticket #3515
# lands in trunk, this will no longer be necessary.
attrs_dict = { 'class': 'required' }
class RegistrationForm(forms.Form):
"""
Form for registering a new user account.
Validates that the requested username is not already in use, and
requires the password to be entered twice to catch typos.
Subclasses should feel free to add any additional validation they
need, but should either preserve the base ``save()`` or implement
a ``save()`` method which returns a ``User``.
"""
username = forms.RegexField(regex=r'^\w+$',
max_length=30,
widget=forms.TextInput(attrs=attrs_dict),
label=_(u'username'))
email = forms.EmailField(widget=forms.TextInput(attrs=dict(attrs_dict,
maxlength=75)),
label=_(u'email address'))
password1 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_(u'password'))
password2 = forms.CharField(widget=forms.PasswordInput(attrs=attrs_dict, render_value=False),
label=_(u'password (again)'))
def clean_username(self):
"""
Validate that the username is alphanumeric and is not already
in use.
"""
user = User.get_by_key_name("key_"+self.cleaned_data['username'].lower())
if user:
raise forms.ValidationError(_(u'This username is already taken. Please choose another.'))
return self.cleaned_data['username']
def clean(self):
"""
Verifiy that the values entered into the two password fields
match. Note that an error here will end up in
``non_field_errors()`` because it doesn't apply to a single
field.
"""
if 'password1' in self.cleaned_data and 'password2' in self.cleaned_data:
if self.cleaned_data['password1'] != self.cleaned_data['password2']:
raise forms.ValidationError(_(u'You must type the same password each time'))
return self.cleaned_data
def save(self, domain_override=""):
"""
Create the new ``User`` and ``RegistrationProfile``, and
returns the ``User`` (by calling
``RegistrationProfile.objects.create_inactive_user()``).
"""
new_user = RegistrationProfile.objects.create_inactive_user(username=self.cleaned_data['username'],
password=self.cleaned_data['password1'],
email=self.cleaned_data['email'],
domain_override=domain_override,
)
return new_user
class RegistrationFormTermsOfService(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which adds a required checkbox
for agreeing to a site's Terms of Service.
"""
tos = forms.BooleanField(widget=forms.CheckboxInput(attrs=attrs_dict),
label=_(u'I have read and agree to the Terms of Service'),
error_messages={ 'required': u"You must agree to the terms to register" })
class RegistrationFormUniqueEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which enforces uniqueness of
email addresses.
"""
def clean_email(self):
"""
Validate that the supplied email address is unique for the
site.
"""
email = self.cleaned_data['email'].lower()
if User.all().filter('email =', email).count(1):
raise forms.ValidationError(_(u'This email address is already in use. Please supply a different email address.'))
return email
class RegistrationFormNoFreeEmail(RegistrationForm):
"""
Subclass of ``RegistrationForm`` which disallows registration with
email addresses from popular free webmail services; moderately
useful for preventing automated spam registrations.
To change the list of banned domains, subclass this form and
override the attribute ``bad_domains``.
"""
bad_domains = ['aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com']
def clean_email(self):
"""
Check the supplied email address against a list of known free
webmail domains.
"""
email_domain = self.cleaned_data['email'].split('@')[1]
if email_domain in self.bad_domains:
raise forms.ValidationError(_(u'Registration using free email addresses is prohibited. Please supply a different email address.'))
return self.cleaned_data['email']
| Python |
"""
Unit tests for django-registration.
These tests assume that you've completed all the prerequisites for
getting django-registration running in the default setup, to wit:
1. You have ``registration`` in your ``INSTALLED_APPS`` setting.
2. You have created all of the templates mentioned in this
application's documentation.
3. You have added the setting ``ACCOUNT_ACTIVATION_DAYS`` to your
settings file.
4. You have URL patterns pointing to the registration and activation
views, with the names ``registration_register`` and
``registration_activate``, respectively, and a URL pattern named
'registration_complete'.
"""
import datetime
import sha
from django.conf import settings
from django.contrib.auth.models import User
from django.core import mail
from django.core import management
from django.core.urlresolvers import reverse
from django.test import TestCase
from google.appengine.ext import db
from registration import forms
from registration.models import RegistrationProfile
from registration import signals
class RegistrationTestCase(TestCase):
"""
Base class for the test cases; this sets up two users -- one
expired, one not -- which are used to exercise various parts of
the application.
"""
def setUp(self):
self.sample_user = RegistrationProfile.objects.create_inactive_user(username='alice',
password='secret',
email='alice@example.com')
self.expired_user = RegistrationProfile.objects.create_inactive_user(username='bob',
password='swordfish',
email='bob@example.com')
self.expired_user.date_joined -= datetime.timedelta(days=settings.ACCOUNT_ACTIVATION_DAYS + 1)
self.expired_user.save()
class RegistrationModelTests(RegistrationTestCase):
"""
Tests for the model-oriented functionality of django-registration,
including ``RegistrationProfile`` and its custom manager.
"""
def test_new_user_is_inactive(self):
"""
Test that a newly-created user is inactive.
"""
self.failIf(self.sample_user.is_active)
def test_registration_profile_created(self):
"""
Test that a ``RegistrationProfile`` is created for a new user.
"""
self.assertEqual(RegistrationProfile.all().count(), 2)
def test_activation_email(self):
"""
Test that user signup sends an activation email.
"""
self.assertEqual(len(mail.outbox), 2)
def test_activation_email_disable(self):
"""
Test that activation email can be disabled.
"""
RegistrationProfile.objects.create_inactive_user(username='noemail',
password='foo',
email='nobody@example.com',
send_email=False)
self.assertEqual(len(mail.outbox), 2)
def test_activation(self):
"""
Test that user activation actually activates the user and
properly resets the activation key, and fails for an
already-active or expired user, or an invalid key.
"""
# Activating a valid user returns the user.
self.failUnlessEqual(RegistrationProfile.objects.activate_user(RegistrationProfile.all().filter('user =', self.sample_user).get().activation_key).key(),
self.sample_user.key())
# The activated user must now be active.
self.failUnless(User.get(self.sample_user.key()).is_active)
# The activation key must now be reset to the "already activated" constant.
self.failUnlessEqual(RegistrationProfile.all().filter('user =', self.sample_user).get().activation_key,
RegistrationProfile.ACTIVATED)
# Activating an expired user returns False.
self.failIf(RegistrationProfile.objects.activate_user(RegistrationProfile.all().filter('user =', self.expired_user).get().activation_key))
# Activating from a key that isn't a SHA1 hash returns False.
self.failIf(RegistrationProfile.objects.activate_user('foo'))
# Activating from a key that doesn't exist returns False.
self.failIf(RegistrationProfile.objects.activate_user(sha.new('foo').hexdigest()))
def test_account_expiration_condition(self):
"""
Test that ``RegistrationProfile.activation_key_expired()``
returns ``True`` for expired users and for active users, and
``False`` otherwise.
"""
# Unexpired user returns False.
self.failIf(RegistrationProfile.all().filter('user =', self.sample_user).get().activation_key_expired())
# Expired user returns True.
self.failUnless(RegistrationProfile.all().filter('user =', self.expired_user).get().activation_key_expired())
# Activated user returns True.
RegistrationProfile.objects.activate_user(RegistrationProfile.all().filter('user =', self.sample_user).get().activation_key)
self.failUnless(RegistrationProfile.all().filter('user =', self.sample_user).get().activation_key_expired())
def test_expired_user_deletion(self):
"""
Test that
``RegistrationProfile.objects.delete_expired_users()`` deletes
only inactive users whose activation window has expired.
"""
RegistrationProfile.objects.delete_expired_users()
self.assertEqual(RegistrationProfile.all().count(), 1)
def test_management_command(self):
"""
Test that ``manage.py cleanupregistration`` functions
correctly.
"""
management.call_command('cleanupregistration')
self.assertEqual(RegistrationProfile.all().count(), 1)
def test_signals(self):
"""
Test that the ``user_registered`` and ``user_activated``
signals are sent, and that they send the ``User`` as an
argument.
"""
def receiver(sender, **kwargs):
self.assert_('user' in kwargs)
self.assertEqual(kwargs['user'].username, u'signal_test')
received_signals.append(kwargs.get('signal'))
received_signals = []
expected_signals = [signals.user_registered, signals.user_activated]
for signal in expected_signals:
signal.connect(receiver)
RegistrationProfile.objects.create_inactive_user(username='signal_test',
password='foo',
email='nobody@example.com',
send_email=False)
RegistrationProfile.objects.activate_user(RegistrationProfile.all().filter('user =', db.Key.from_path(User.kind(), 'key_signal_test')).get().activation_key)
self.assertEqual(received_signals, expected_signals)
class RegistrationFormTests(RegistrationTestCase):
"""
Tests for the forms and custom validation logic included in
django-registration.
"""
def test_registration_form(self):
"""
Test that ``RegistrationForm`` enforces username constraints
and matching passwords.
"""
invalid_data_dicts = [
# Non-alphanumeric username.
{
'data':
{ 'username': 'foo/bar',
'email': 'foo@example.com',
'password1': 'foo',
'password2': 'foo' },
'error':
('username', [u"Enter a valid value."])
},
# Already-existing username.
{
'data':
{ 'username': 'alice',
'email': 'alice@example.com',
'password1': 'secret',
'password2': 'secret' },
'error':
('username', [u"This username is already taken. Please choose another."])
},
# Mismatched passwords.
{
'data':
{ 'username': 'foo',
'email': 'foo@example.com',
'password1': 'foo',
'password2': 'bar' },
'error':
('__all__', [u"You must type the same password each time"])
},
]
for invalid_dict in invalid_data_dicts:
form = forms.RegistrationForm(data=invalid_dict['data'])
self.failIf(form.is_valid())
self.assertEqual(form.errors[invalid_dict['error'][0]], invalid_dict['error'][1])
form = forms.RegistrationForm(data={ 'username': 'foo',
'email': 'foo@example.com',
'password1': 'foo',
'password2': 'foo' })
self.failUnless(form.is_valid())
def test_registration_form_tos(self):
"""
Test that ``RegistrationFormTermsOfService`` requires
agreement to the terms of service.
"""
form = forms.RegistrationFormTermsOfService(data={ 'username': 'foo',
'email': 'foo@example.com',
'password1': 'foo',
'password2': 'foo' })
self.failIf(form.is_valid())
self.assertEqual(form.errors['tos'], [u"You must agree to the terms to register"])
form = forms.RegistrationFormTermsOfService(data={ 'username': 'foo',
'email': 'foo@example.com',
'password1': 'foo',
'password2': 'foo',
'tos': 'on' })
self.failUnless(form.is_valid())
def test_registration_form_unique_email(self):
"""
Test that ``RegistrationFormUniqueEmail`` validates uniqueness
of email addresses.
"""
form = forms.RegistrationFormUniqueEmail(data={ 'username': 'foo',
'email': 'alice@example.com',
'password1': 'foo',
'password2': 'foo' })
self.failIf(form.is_valid())
self.assertEqual(form.errors['email'], [u"This email address is already in use. Please supply a different email address."])
form = forms.RegistrationFormUniqueEmail(data={ 'username': 'foo',
'email': 'foo@example.com',
'password1': 'foo',
'password2': 'foo' })
self.failUnless(form.is_valid())
def test_registration_form_no_free_email(self):
"""
Test that ``RegistrationFormNoFreeEmail`` disallows
registration with free email addresses.
"""
base_data = { 'username': 'foo',
'password1': 'foo',
'password2': 'foo' }
for domain in ('aim.com', 'aol.com', 'email.com', 'gmail.com',
'googlemail.com', 'hotmail.com', 'hushmail.com',
'msn.com', 'mail.ru', 'mailinator.com', 'live.com'):
invalid_data = base_data.copy()
invalid_data['email'] = u"foo@%s" % domain
form = forms.RegistrationFormNoFreeEmail(data=invalid_data)
self.failIf(form.is_valid())
self.assertEqual(form.errors['email'], [u"Registration using free email addresses is prohibited. Please supply a different email address."])
base_data['email'] = 'foo@example.com'
form = forms.RegistrationFormNoFreeEmail(data=base_data)
self.failUnless(form.is_valid())
class RegistrationViewTests(RegistrationTestCase):
"""
Tests for the views included in django-registration.
"""
def test_registration_view(self):
"""
Test that the registration view rejects invalid submissions,
and creates a new user and redirects after a valid submission.
"""
# Invalid data fails.
alice = User.all().filter('username =', 'alice').get()
alice.is_active = True
alice.put()
response = self.client.post(reverse('registration_register'),
data={ 'username': 'alice', # Will fail on username uniqueness.
'email': 'foo@example.com',
'password1': 'foo',
'password2': 'foo' })
self.assertEqual(response.status_code, 200)
self.failUnless(response.context[0]['form'])
self.failUnless(response.context[0]['form'].errors)
response = self.client.post(reverse('registration_register'),
data={ 'username': 'foo',
'email': 'foo@example.com',
'password1': 'foo',
'password2': 'foo' })
self.assertEqual(response.status_code, 302)
self.assertEqual(response['Location'], 'http://testserver%s' % reverse('registration_complete'))
self.assertEqual(RegistrationProfile.all().count(), 3)
def test_activation_view(self):
"""
Test that the activation view activates the user from a valid
key and fails if the key is invalid or has expired.
"""
# Valid user puts the user account into the context.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': RegistrationProfile.all().filter('user =', self.sample_user).get().activation_key }))
self.assertEqual(response.status_code, 200)
self.assertEqual(response.context[0]['account'].key(), self.sample_user.key())
# Expired user sets the account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': RegistrationProfile.all().filter('user =', self.expired_user).get().activation_key }))
self.failIf(response.context[0]['account'])
# Invalid key gets to the view, but sets account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': 'foo' }))
self.failIf(response.context[0]['account'])
# Nonexistent key sets the account to False.
response = self.client.get(reverse('registration_activate',
kwargs={ 'activation_key': sha.new('foo').hexdigest() }))
self.failIf(response.context[0]['account'])
| Python |
"""
URLConf for Django user registration and authentication.
If the default behavior of the registration views is acceptable to
you, simply use a line like this in your root URLConf to set up the
default URLs for registration::
(r'^accounts/', include('registration.urls')),
This will also automatically set up the views in
``django.contrib.auth`` at sensible default locations.
But if you'd like to customize the behavior (e.g., by passing extra
arguments to the various views) or split up the URLs, feel free to set
up your own URL patterns for these views instead. If you do, it's a
good idea to use the names ``registration_activate``,
``registration_complete`` and ``registration_register`` for the
various steps of the user-signup process.
"""
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template
from django.contrib.auth import views as auth_views
from registration.views import activate
from registration.views import register
urlpatterns = patterns('',
# Activation keys get matched by \w+ instead of the more specific
# [a-fA-F0-9]{40} because a bad activation key should still get to the view;
# that way it can return a sensible "invalid key" message instead of a
# confusing 404.
url(r'^activate/(?P<activation_key>\w+)/$',
activate,
name='registration_activate'),
url(r'^login/$',
auth_views.login,
{'template_name': 'registration/login.html'},
name='auth_login'),
url(r'^logout/$',
auth_views.logout,
name='auth_logout'),
url(r'^password/change/$',
auth_views.password_change,
name='auth_password_change'),
url(r'^password/change/done/$',
auth_views.password_change_done,
name='auth_password_change_done'),
url(r'^password/reset/$',
auth_views.password_reset,
name='auth_password_reset'),
url(r'^password/reset/confirm/(?P<uidb36>.+)/(?P<token>.+)/$',
auth_views.password_reset_confirm,
name='auth_password_reset_confirm'),
url(r'^password/reset/complete/$',
auth_views.password_reset_complete,
name='auth_password_reset_complete'),
url(r'^password/reset/done/$',
auth_views.password_reset_done,
name='auth_password_reset_done'),
url(r'^register/$',
register,
name='registration_register'),
url(r'^register/complete/$',
direct_to_template,
{'template': 'registration/registration_complete.html'},
name='registration_complete'),
)
| Python |
"""
A management command which deletes expired accounts (e.g.,
accounts which signed up but never activated) from the database.
Calls ``RegistrationProfile.objects.delete_expired_users()``, which
contains the actual logic for determining which accounts are deleted.
"""
from django.core.management.base import NoArgsCommand
from registration.models import RegistrationProfile
class Command(NoArgsCommand):
help = "Delete expired user registrations from the database"
def handle_noargs(self, **options):
RegistrationProfile.objects.delete_expired_users()
| Python |
"""
Views which allow users to create and activate accounts.
"""
from django.conf import settings
from django.core.urlresolvers import reverse
from django.http import HttpResponseRedirect
from django.shortcuts import render_to_response
from django.template import RequestContext
from registration.forms import RegistrationForm
from registration.models import RegistrationProfile
def activate(request, activation_key,
template_name='registration/activate.html',
extra_context=None):
"""
Activate a ``User``'s account from an activation key, if their key
is valid and hasn't expired.
By default, use the template ``registration/activate.html``; to
change this, pass the name of a template as the keyword argument
``template_name``.
**Required arguments**
``activation_key``
The activation key to validate and use for activating the
``User``.
**Optional arguments**
``extra_context``
A dictionary of variables to add to the template context. Any
callable object in this dictionary will be called to produce
the end result which appears in the context.
``template_name``
A custom template to use.
**Context:**
``account``
The ``User`` object corresponding to the account, if the
activation was successful. ``False`` if the activation was not
successful.
``expiration_days``
The number of days for which activation keys stay valid after
registration.
Any extra variables supplied in the ``extra_context`` argument
(see above).
**Template:**
registration/activate.html or ``template_name`` keyword argument.
"""
activation_key = activation_key.lower() # Normalize before trying anything with it.
account = RegistrationProfile.objects.activate_user(activation_key)
if extra_context is None:
extra_context = {}
context = RequestContext(request)
for key, value in extra_context.items():
context[key] = callable(value) and value() or value
return render_to_response(template_name,
{ 'account': account,
'expiration_days': settings.ACCOUNT_ACTIVATION_DAYS },
context_instance=context)
def register(request, success_url=None,
form_class=RegistrationForm,
template_name='registration/registration_form.html',
extra_context=None):
"""
Allow a new user to register an account.
Following successful registration, issue a redirect; by default,
this will be whatever URL corresponds to the named URL pattern
``registration_complete``, which will be
``/accounts/register/complete/`` if using the included URLConf. To
change this, point that named pattern at another URL, or pass your
preferred URL as the keyword argument ``success_url``.
By default, ``registration.forms.RegistrationForm`` will be used
as the registration form; to change this, pass a different form
class as the ``form_class`` keyword argument. The form class you
specify must have a method ``save`` which will create and return
the new ``User``.
By default, use the template
``registration/registration_form.html``; to change this, pass the
name of a template as the keyword argument ``template_name``.
**Required arguments**
None.
**Optional arguments**
``form_class``
The form class to use for registration.
``extra_context``
A dictionary of variables to add to the template context. Any
callable object in this dictionary will be called to produce
the end result which appears in the context.
``success_url``
The URL to redirect to on successful registration.
``template_name``
A custom template to use.
**Context:**
``form``
The registration form.
Any extra variables supplied in the ``extra_context`` argument
(see above).
**Template:**
registration/registration_form.html or ``template_name`` keyword
argument.
"""
if request.method == 'POST':
form = form_class(data=request.POST, files=request.FILES)
domain_override = request.get_host()
if form.is_valid():
new_user = form.save(domain_override)
# success_url needs to be dynamically generated here; setting a
# a default value using reverse() will cause circular-import
# problems with the default URLConf for this application, which
# imports this file.
return HttpResponseRedirect(success_url or reverse('registration_complete'))
else:
form = form_class()
if extra_context is None:
extra_context = {}
context = RequestContext(request)
for key, value in extra_context.items():
context[key] = callable(value) and value() or value
return render_to_response(template_name,
{ 'form': form },
context_instance=context)
| Python |
from django.contrib import admin
from registration.models import RegistrationProfile
class RegistrationAdmin(admin.ModelAdmin):
list_display = ('__unicode__', 'activation_key_expired')
search_fields = ('user__username', 'user__first_name')
admin.site.register(RegistrationProfile, RegistrationAdmin)
| Python |
from django.dispatch import Signal
# A new user has registered.
user_registered = Signal(providing_args=["user"])
# A user has activated his or her account.
user_activated = Signal(providing_args=["user"])
| Python |
#!/usr/bin/env python
if __name__ == '__main__':
from common.appenginepatch.aecmd import setup_env
setup_env(manage_py_env=True)
# Recompile translation files
from mediautils.compilemessages import updatemessages
updatemessages()
# Generate compressed media files for manage.py update
import sys
from mediautils.generatemedia import updatemedia
if len(sys.argv) >= 2 and sys.argv[1] == 'update':
updatemedia(True)
import settings
from django.core.management import execute_manager
execute_manager(settings)
| Python |
#! /usr/bin/python
import sys
import os
# executable base_feature_dir refactor_feature_dir subdir_to_be_refactored files_to_be_refactored
def foo():
flagsvn = 0
flagcp = 0
if len(sys.argv) < 5:
print "Usage: " + sys.argv[0] + " [-cp] <fromdir> <todir> <hierarchy> <files>"
sys.exit()
cmd=""
if sys.argv[1] == "-svn":
cmd = "svn "
sys.argv.pop(1)
flagsvn=1
if sys.argv[1] == "-cp":
cmd += "cp "
sys.argv.pop(1)
flagcp=1
else:
cmd += "mv "
base = sys.argv[1]
basesubdir = sys.argv[1] + "/" + sys.argv[3] + "/"
feature = sys.argv[2]
featuresubdir = sys.argv[2] + "/" + sys.argv[3] + "/"
print ("flags ", flagcp, flagsvn)
if not os.access( featuresubdir, 000 ):
os.makedirs( featuresubdir )
if flagsvn == 1 and flagcp == 1:
svnaddcmd = "svn add "
svnaddcmd += feature
svnaddcmd += "/*"
print "adding new dir to svn:"
print svnaddcmd
os.system(svnaddcmd)
for x in sys.argv[4:]:
command = cmd + basesubdir + x + " " + featuresubdir
print ( command )
os.system( command )
foo()
| Python |
#! /usr/bin/python
import sys
import os
# executable base_feature_dir refactor_feature_dir subdir_to_be_refactored files_to_be_refactored
def foo():
flagsvn = 0
flagcp = 0
if len(sys.argv) < 5:
print "Usage: " + sys.argv[0] + " [-cp] <fromdir> <todir> <hierarchy> <files>"
sys.exit()
cmd=""
if sys.argv[1] == "-svn":
cmd = "svn "
sys.argv.pop(1)
flagsvn=1
if sys.argv[1] == "-cp":
cmd += "cp "
sys.argv.pop(1)
flagcp=1
else:
cmd += "mv "
base = sys.argv[1]
basesubdir = sys.argv[1] + "/" + sys.argv[3] + "/"
feature = sys.argv[2]
featuresubdir = sys.argv[2] + "/" + sys.argv[3] + "/"
print ("flags ", flagcp, flagsvn)
if not os.access( featuresubdir, 000 ):
os.makedirs( featuresubdir )
if flagsvn == 1 and flagcp == 1:
svnaddcmd = "svn add "
svnaddcmd += feature
svnaddcmd += "/*"
print "adding new dir to svn:"
print svnaddcmd
os.system(svnaddcmd)
for x in sys.argv[4:]:
command = cmd + basesubdir + x + " " + featuresubdir
print ( command )
os.system( command )
foo()
| Python |
#!/usr/bin/env python
#
#
target = "jsb" # BHJTW change this to /var/cache/jsb on debian
import os
try:
from setuptools import setup
except ImportError:
from distutils.core import setup
print "TARGET IS %s" % target
upload = []
def uploadfiles(dir):
upl = []
if not os.path.isdir(dir): print "%s does not exist" % dir ; os._exit(1)
for file in os.listdir(dir):
if not file or file.startswith('.'):
continue
d = dir + os.sep + file
if not os.path.isdir(d):
if file.endswith(".pyc"):
continue
upl.append(d)
return upl
def uploadlist(dir):
upl = []
for file in os.listdir(dir):
if not file or file.startswith('.'):
continue
d = dir + os.sep + file
if os.path.isdir(d):
upl.extend(uploadlist(d))
else:
if file.endswith(".pyc"):
continue
upl.append(d)
return upl
setup(
name='jsb',
version='0.7',
url='http://jsonbot.googlecode.com/',
download_url="http://code.google.com/p/jsonbot/downloads",
author='Bart Thate',
author_email='bthate@gmail.com',
description='The bot for you!',
license='MIT',
scripts=['bin/jsb',
'bin/jsb-init',
'bin/jsb-irc',
'bin/jsb-fleet',
'bin/jsb-xmpp',
'bin/jsb-release',
'bin/jsb-rollback',
'bin/jsb-run',
'bin/jsb-stop',
'bin/jsb-udp',
'bin/jsb-upgrade',
'bin/jsb-upload',
'bin/jsb-uploadall'],
packages=['jsb',
'jsb.lib',
'jsb.lib.rest',
'jsb.lib.gae',
'jsb.drivers',
'jsb.drivers.console',
'jsb.drivers.irc',
'jsb.drivers.xmpp',
'jsb.drivers.convore',
'jsb.drivers.gae',
'jsb.drivers.gae.web',
'jsb.drivers.gae.wave',
'jsb.drivers.gae.xmpp',
'jsb.utils',
'jsb.utils.gae',
'jsb.plugs',
'jsb.plugs.core',
'jsb.plugs.wave',
'jsb.plugs.common',
'jsb.plugs.socket',
'jsb.plugs.gae',
'jsb.plugs.myplugs',
'jsb.plugs.myplugs.socket',
'jsb.plugs.myplugs.gae',
'jsb.plugs.myplugs.common',
'jsb.contrib',
'jsb.contrib.simplejson',
'jsb.contrib.tweepy',
'jsb.contrib.requests',
'jsb.contrib.requests.packages',
'jsb.contrib.requests.packages.poster'],
long_description = """ JSONBOT is a remote event-driven framework for building bots that talk JSON to each other over XMPP. IRC/Console/XMPP (shell) Wave/Web/XMPP (GAE) implementations provided. """,
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Console',
'Environment :: Other Environment',
'Intended Audience :: Developers',
'License :: OSI Approved :: MIT License',
'Operating System :: Unix',
'Operating System :: Other OS',
'Programming Language :: Python',
'Topic :: Communications :: Chat',
'Topic :: Software Development :: Libraries :: Python Modules'],
data_files=[(target + os.sep + 'data', uploadlist('jsb' + os.sep + 'data')),
(target + os.sep + 'data' + os.sep + 'examples', uploadfiles('jsb' + os.sep + 'data' + os.sep + 'examples')),
(target + os.sep + 'upload', uploadfiles('jsb' + os.sep + 'upload')),
(target + os.sep + 'upload' + os.sep + 'webapp2', uploadlist('jsb' + os.sep + 'upload' + os.sep + 'webapp2')),
(target + os.sep + 'upload' + os.sep + 'assets', uploadlist('jsb' + os.sep + 'upload' + os.sep + 'assets')),
(target + os.sep + 'upload' + os.sep + 'templates', uploadlist('jsb' + os.sep + 'upload' + os.sep +'templates')),
(target + os.sep + 'upload' + os.sep + 'waveapi', uploadlist('jsb' + os.sep + 'upload' + os.sep + 'waveapi')),
(target + os.sep + 'upload' + os.sep + 'waveapi' + os.sep + 'oauth', uploadlist('jsb' + os.sep + 'upload' + os.sep + 'waveapi' + os.sep + 'oauth')),
(target + os.sep + 'upload' + os.sep + 'waveapi' + os.sep + 'simplejson', uploadlist('jsb' + os.sep + 'upload' + os.sep + 'waveapi' + os.sep + 'simplejson')),
(target + os.sep + 'upload' + os.sep + 'gadgets', uploadlist('jsb' + os.sep + 'upload' + os.sep + 'gadgets'))],
)
| Python |
# handler_dispatch.py
#
#
""" jsb dispatch handler. dispatches remote commands. """
## boot
from jsb.lib.boot import boot
boot()
## jsb imports
from jsb.utils.generic import fromenc, toenc
from jsb.version import getversion
from jsb.utils.xmpp import stripped
from jsb.utils.url import getpostdata, useragent
from jsb.lib.plugins import plugs
from jsb.lib.persist import Persist
from jsb.utils.exception import handle_exception, exceptionmsg
from jsb.lib.fleet import fleet
from jsb.lib.errors import NoSuchCommand
## gaelib imports
from jsb.lib.botbase import BotBase
from jsb.drivers.gae.web.bot import WebBot
from jsb.drivers.gae.web.event import WebEvent
from jsb.utils.gae.auth import checkuser
## google imports
from webapp2 import RequestHandler, Route, WSGIApplication
from google.appengine.api import users as gusers
## basic imports
import sys
import time
import types
import os
import logging
import google
logging.warn(getversion('GAE DISPATCH'))
bot = WebBot(botname="gae-web")
class Dispatch_Handler(RequestHandler):
""" the bots remote command dispatcher. """
def options(self):
self.response.headers.add_header('Content-Type', 'application/x-www-form-urlencoded')
#self.response.headers.add_header("Cache-Control", "private")
self.response.headers.add_header("Server", getversion())
self.response.headers.add_header("Public", "*")
self.response.headers.add_header('Accept', '*')
self.response.headers.add_header('Access-Control-Allow-Origin', self.request.headers['Origin'])
self.response.out.write("Allow: *")
self.response.out.write('Access-Control-Allow-Origin: *')
logging.warn("dispatch - options response send to %s - %s" % (self.request.remote_addr, str(self.request.headers)))
def post(self):
""" this is where the command get disaptched. """
starttime = time.time()
try:
logging.warn("DISPATCH incoming: %s - %s" % (self.request.get('content'), self.request.remote_addr))
if not gusers.get_current_user():
logging.warn("denied access for %s - %s" % (self.request.remote_addr, self.request.get('content')))
self.response.out.write("acess denied .. plz login")
self.response.set_status(400)
return
event = WebEvent(bot=bot).parse(self.response, self.request)
event.cbtype = "DISPATCH"
event.type = "DISPATCH"
(userhost, user, u, nick) = checkuser(self.response, self.request, event)
bot.gatekeeper.allow(userhost)
event.bind(bot)
bot.doevent(event)
except NoSuchCommand:
self.response.out.write("no such command: %s" % event.usercmnd)
except google.appengine.runtime.DeadlineExceededError, ex:
self.response.out.write("the command took too long to finish: %s" % str(time.time()-starttime))
except Exception, ex:
self.response.out.write("the command had an eror: %s" % exceptionmsg())
handle_exception()
get = post
# the application
application = WSGIApplication([Route('/dispatch/', Dispatch_Handler) ], debug=True)
def main():
global bot
global application
try: application.run()
except google.appengine.runtime.DeadlineExceededError:
pass
except Exception, ex:
logging.error("dispatch - %s" % str(ex))
if __name__ == "__main__":
main()
| Python |
# -*- coding: utf-8 -*-
"""
webapp2
=======
Taking Google App Engine's webapp to the next level!
:copyright: 2010 by tipfy.org.
:license: Apache Sotware License, see LICENSE for details.
"""
import logging
import re
import urllib
import urlparse
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_bare_wsgi_app, run_wsgi_app
import webob
import webob.exc
#: Base HTTP exception, set here as public interface.
HTTPException = webob.exc.HTTPException
#: Allowed request methods.
ALLOWED_METHODS = frozenset(['GET', 'POST', 'HEAD', 'OPTIONS', 'PUT',
'DELETE', 'TRACE'])
# Value used for missing default values.
DEFAULT_VALUE = object()
# Value used for required values.
REQUIRED_VALUE = object()
#: Regex for URL definitions.
_ROUTE_REGEX = re.compile(r'''
\< # The exact character "<"
(\w*) # The optional variable name (restricted to a-z, 0-9, _)
(?::([^>]*))? # The optional :regex part
\> # The exact character ">"
''', re.VERBOSE)
class Request(webapp.Request):
def __init__(self, *args, **kwargs):
super(Request, self).__init__(*args, **kwargs)
# A registry for objects used during the request lifetime.
self.registry = {}
# A dictionary for variables used in rendering.
self.context = {}
class Response(webob.Response):
"""Abstraction for an HTTP response.
Implements all of ``webapp.Response`` interface, except ``wsgi_write()``
as the response itself is returned by the WSGI application.
"""
def __init__(self, *args, **kwargs):
super(Response, self).__init__(*args, **kwargs)
# webapp uses response.out.write(), so we point `.out` to `self`
# and it will use `Response.write()`.
self.out = self
def write(self, text):
"""Appends a text to the response body."""
# webapp uses StringIO as Response.out, so we need to convert anything
# that is not str or unicode to string to keep same behavior.
if not isinstance(text, basestring):
text = unicode(text)
if isinstance(text, unicode) and not self.charset:
self.charset = self.default_charset
super(Response, self).write(text)
def set_status(self, code, message=None):
"""Sets the HTTP status code of this response.
:param message:
The HTTP status string to use
:param message:
A status string. If none is given, uses the default from the
HTTP/1.1 specification.
"""
if message:
self.status = '%d %s' % (code, message)
else:
self.status = code
def clear(self):
"""Clears all data written to the output stream so that it is empty."""
self.body = ''
@staticmethod
def http_status_message(code):
"""Returns the default HTTP status message for the given code.
:param code:
The HTTP code for which we want a message.
"""
message = webob.statusreasons.status_reasons.get(code)
if not message:
raise KeyError('Invalid HTTP status code: %d' % code)
return message
class RequestHandler(object):
"""Base HTTP request handler. Clients should subclass this class.
Subclasses should override get(), post(), head(), options(), etc to handle
different HTTP methods.
Implements most of ``webapp.RequestHandler`` interface.
"""
def __init__(self, app=None, request=None, response=None):
"""Initializes this request handler with the given WSGI application,
Request and Response.
.. note::
Parameters are optional only to support webapp's constructor which
doesn't take any arguments. Consider them as required.
:param app:
A :class:`WSGIApplication` instance.
:param request:
A ``webapp.Request`` instance.
:param response:
A :class:`Response` instance.
"""
self.app = app
self.request = request
self.response = response
def initialize(self, request, response):
"""Initializes this request handler with the given WSGI application,
Request and Response.
.. warning::
This is deprecated. It is here for compatibility with webapp only.
Use __init__() instead.
:param request:
A ``webapp.Request`` instance.
:param response:
A :class:`Response` instance.
"""
logging.warning('RequestHandler.initialize() is deprecated. '
'Use __init__() instead.')
self.app = WSGIApplication.app
self.request = request
self.response = response
def __call__(self, _method, *args, **kwargs):
"""Dispatches the requested method.
:param _method:
The method to be dispatched: the request method in lower case
(e.g., 'get', 'post', 'head', 'put' etc).
:param args:
Positional arguments to be passed to the method, coming from the
matched :class:`Route`.
:param kwargs:
Keyword arguments to be passed to the method, coming from the
matched :class:`Route`.
:returns:
None.
"""
method = getattr(self, _method, None)
if method is None:
# 405 Method Not Allowed.
# The response MUST include an Allow header containing a
# list of valid methods for the requested resource.
# http://www.w3.org/Protocols/rfc2616/rfc2616-sec10.html#sec10.4.6
valid = ', '.join(get_valid_methods(self))
self.abort(405, headers=[('Allow', valid)])
# Execute the method.
method(*args, **kwargs)
def error(self, code):
"""Clears the response output stream and sets the given HTTP error
code. This doesn't stop code execution; the response is still
available to be filled.
:param code:
HTTP status error code (e.g., 501).
"""
self.response.set_status(code)
self.response.clear()
def abort(self, code, *args, **kwargs):
"""Raises an :class:`HTTPException`. This stops code execution,
leaving the HTTP exception to be handled by an exception handler.
:param code:
HTTP status error code (e.g., 404).
:param args:
Positional arguments to be passed to the exception class.
:param kwargs:
Keyword arguments to be passed to the exception class.
"""
abort(code, *args, **kwargs)
def redirect(self, uri, permanent=False, abort=False):
"""Issues an HTTP redirect to the given relative URL. This won't stop
code execution unless **abort** is True. A common practice is to
return when calling the function::
return self.redirect('/some-path')
:param uri:
A relative or absolute URI (e.g., '../flowers.html').
:param permanent:
If True, uses a 301 redirect instead of a 302 redirect.
:param abort:
If True, raises an exception to perform the redirect.
.. seealso:: :meth:`redirect_to`.
"""
absolute_url = str(urlparse.urljoin(self.request.uri, uri))
if permanent:
code = 301
else:
code = 302
if abort:
self.abort(code, headers=[('Location', absolute_url)])
self.response.headers['Location'] = absolute_url
self.response.set_status(code)
self.response.clear()
def redirect_to(self, _name, _permanent=False, _abort=False, *args,
**kwargs):
"""Convenience method mixing :meth:`redirect` and :meth:`url_for`:
Issues an HTTP redirect to a named URL built using :meth:`url_for`.
:param _name:
The route name to redirect to.
:param _permanent:
If True, uses a 301 redirect instead of a 302 redirect.
:param _abort:
If True, raises an exception to perform the redirect.
:param args:
Positional arguments to build the URL.
:param kwargs:
Keyword arguments to build the URL.
.. seealso:: :meth:`redirect` and :meth:`url_for`.
"""
url = self.url_for(_name, *args, **kwargs)
self.redirect(url, permanent=_permanent, abort=_abort)
def url_for(self, _name, *args, **kwargs):
"""Builds and returns a URL for a named :class:`Route`.
For example, if you have these routes defined for the application::
app = WSGIApplication([
Route(r'/', 'handlers.HomeHandler', 'home'),
Route(r'/wiki', WikiHandler, 'wiki'),
Route(r'/wiki/<page>', WikiHandler, 'wiki-page'),
])
Here are some examples of how to generate URLs inside a handler::
# /
url = self.url_for('home')
# http://localhost:8080/
url = self.url_for('home', _full=True)
# /wiki
url = self.url_for('wiki')
# http://localhost:8080/wiki
url = self.url_for('wiki', _full=True)
# http://localhost:8080/wiki#my-heading
url = self.url_for('wiki', _full=True, _anchor='my-heading')
# /wiki/my-first-page
url = self.url_for('wiki-page', page='my-first-page')
# /wiki/my-first-page?format=atom
url = self.url_for('wiki-page', page='my-first-page', format='atom')
:param _name:
The route name.
:param args:
Positional arguments to build the URL. All positional variables
defined in the route must be passed and must conform to the
format set in the route. Extra arguments are ignored.
:param kwargs:
Keyword arguments to build the URL. All variables not set in the
route default values must be passed and must conform to the format
set in the route. Extra keywords are appended as URL arguments.
A few keywords have special meaning:
- **_full**: If True, builds an absolute URL.
- **_scheme**: URL scheme, e.g., `http` or `https`. If defined,
an absolute URL is always returned.
- **_netloc**: Network location, e.g., `www.google.com`. If
defined, an absolute URL is always returned.
- **_anchor**: If set, appends an anchor to generated URL.
:returns:
An absolute or relative URL.
.. note::
This method, like :meth:`WSGIApplication.url_for`, needs the request
attribute to be set to build absolute URLs. This is because some
routes may need to retrieve information from the request to set the
URL host. We pass the request object explicitly instead of relying
on ``os.environ`` mainly for better testability, but it also helps
middleware.
.. seealso:: :meth:`Router.build`.
"""
return self.app.router.build(_name, self.request, args, kwargs)
def get_config(self, module, key=None, default=REQUIRED_VALUE):
"""Returns a configuration value for a module.
.. seealso:: :meth:`Config.get_config`.
"""
return self.app.config.get_config(module, key=key, default=default)
def handle_exception(self, exception, debug_mode):
"""Called if this handler throws an exception during execution.
The default behavior is to re-raise the exception to be handled by
:meth:`WSGIApplication.handle_exception`.
:param exception:
The exception that was thrown.
:param debug_mode:
True if the web application is running in debug mode.
"""
raise
class RedirectHandler(RequestHandler):
"""Redirects to the given URL for all GET requests. This is meant to be
used when defining URL routes. You must provide at least the keyword
argument *url* in the route default values. Example::
def get_redirect_url(handler, *args, **kwargs):
return handler.url_for('new-route-name')
app = WSGIApplication([
Route(r'/old-url', RedirectHandler, defaults={'url': '/new-url'}),
Route(r'/other-old-url', RedirectHandler, defaults={'url': get_redirect_url}),
])
Based on idea from `Tornado`_.
"""
def get(self, *args, **kwargs):
"""Performs the redirect. Two keyword arguments can be passed through
the URL route:
- **url**: A URL string or a callable that returns a URL. The callable
is called passing ``(handler, *args, **kwargs)`` as arguments.
- **permanent**: If False, uses a 301 redirect instead of a 302
redirect Default is True.
"""
url = kwargs.pop('url', '/')
permanent = kwargs.pop('permanent', True)
if callable(url):
url = url(self, *args, **kwargs)
self.redirect(url, permanent=permanent)
class Config(dict):
"""A simple configuration dictionary keyed by module name. This is a
dictionary of dictionaries. It requires all values to be dictionaries
and applies updates and default values to the inner dictionaries instead
of the first level one.
The configuration object is available as a ``config`` attribute of
:class:`WSGIApplication`. If is instantiated and populated when the app
is built::
config = {}
config['my.module'] = {
'foo': 'bar',
}
app = WSGIApplication(rules=[Rule('/', name='home', handler=MyHandler)], config=config)
Then to read configuration values, use :meth:`RequestHandler.get_config`::
class MyHandler(RequestHandler):
def get(self):
foo = self.get_config('my.module', 'foo')
# ...
"""
#: Loaded module configurations.
loaded = None
def __init__(self, values=None, defaults=None):
"""Initializes the configuration object.
:param values:
A dictionary of configuration dictionaries for modules.
:param defaults:
A dictionary of configuration dictionaries for initial default
values. These modules are marked as loaded.
"""
self.loaded = []
if values is not None:
assert isinstance(values, dict)
for module, config in values.iteritems():
self.update(module, config)
if defaults is not None:
assert isinstance(defaults, dict)
for module, config in defaults.iteritems():
self.setdefault(module, config)
self.loaded.append(module)
def __getitem__(self, module):
"""Returns the configuration for a module. If it is not already
set, loads a ``default_config`` variable from the given module and
updates the configuration with those default values
Every module that allows some kind of configuration sets a
``default_config`` global variable that is loaded by this function,
cached and used in case the requested configuration was not defined
by the user.
:param module:
The module name.
:returns:
A configuration value.
"""
if module not in self.loaded:
# Load default configuration and update config.
values = import_string(module + '.default_config', silent=True)
if values:
self.setdefault(module, values)
self.loaded.append(module)
try:
return dict.__getitem__(self, module)
except KeyError:
raise KeyError('Module %r is not configured.' % module)
def __setitem__(self, module, values):
"""Sets a configuration for a module, requiring it to be a dictionary.
:param module:
A module name for the configuration, e.g.: `webapp2.ext.i18n`.
:param values:
A dictionary of configurations for the module.
"""
assert isinstance(values, dict), 'Module configuration must be a dict.'
dict.__setitem__(self, module, SubConfig(module, values))
def get(self, module, default=DEFAULT_VALUE):
"""Returns a configuration for a module. If default is not provided,
returns an empty dict if the module is not configured.
:param module:
The module name.
:params default:
Default value to return if the module is not configured. If not
set, returns an empty dict.
:returns:
A module configuration.
"""
if default is DEFAULT_VALUE:
default = {}
return dict.get(self, module, default)
def setdefault(self, module, values):
"""Sets a default configuration dictionary for a module.
:param module:
The module to set default configuration, e.g.: `webapp2.ext.i18n`.
:param values:
A dictionary of configurations for the module.
:returns:
The module configuration dictionary.
"""
assert isinstance(values, dict), 'Module configuration must be a dict.'
if module not in self:
dict.__setitem__(self, module, SubConfig(module))
module_dict = dict.__getitem__(self, module)
for key, value in values.iteritems():
module_dict.setdefault(key, value)
return module_dict
def update(self, module, values):
"""Updates the configuration dictionary for a module.
:param module:
The module to update the configuration, e.g.: `webapp2.ext.i18n`.
:param values:
A dictionary of configurations for the module.
"""
assert isinstance(values, dict), 'Module configuration must be a dict.'
if module not in self:
dict.__setitem__(self, module, SubConfig(module))
dict.__getitem__(self, module).update(values)
def get_config(self, module, key=None, default=REQUIRED_VALUE):
"""Returns a configuration value for a module and optionally a key.
Will raise a KeyError if they the module is not configured or the key
doesn't exist and a default is not provided.
:param module:
The module name.
:params key:
The configuration key.
:param default:
Default value to return if the key doesn't exist.
:returns:
A module configuration.
"""
module_dict = self.__getitem__(module)
if key is None:
return module_dict
return module_dict.get(key, default)
class SubConfig(dict):
def __init__(self, module, values=None):
dict.__init__(self, values or ())
self.module = module
def __getitem__(self, key):
try:
value = dict.__getitem__(self, key)
except KeyError:
raise KeyError('Module %r does not have the config key %r' %
(self.module, key))
if value is REQUIRED_VALUE:
raise KeyError('Module %r requires the config key %r to be '
'set.' % (self.module, key))
return value
def get(self, key, default=None):
if key not in self:
value = default
else:
value = dict.__getitem__(self, key)
if value is REQUIRED_VALUE:
raise KeyError('Module %r requires the config key %r to be '
'set.' % (self.module, key))
return value
class BaseRoute(object):
"""Interface for URL routes. Custom routes must implement some or all
methods and attributes from this class.
"""
#: Route name, used to build URLs.
name = None
#: True if this route is only used for URL generation and never matches.
build_only = False
def match(self, request):
"""Matches this route against the current request.
:param request:
A ``webapp.Request`` instance.
:returns:
A tuple ``(handler, args, kwargs)`` if the route matches, or None.
"""
raise NotImplementedError()
def build(self, request, args, kwargs):
"""Builds and returns a URL for this route.
:param request:
The current ``Request`` object.
:param args:
Tuple of positional arguments to build the URL.
:param kwargs:
Dictionary of keyword arguments to build the URL.
:returns:
An absolute or relative URL.
"""
raise NotImplementedError()
def get_routes(self):
"""Generator to get all routes from a route.
:yields:
This route or all nested routes that it contains.
"""
yield self
def get_match_routes(self):
"""Generator to get all routes that can be matched from a route.
:yields:
This route or all nested routes that can be matched.
"""
if not self.build_only:
yield self
elif not self.name:
raise ValueError("Route %r is build_only but doesn't have a "
"name" % self)
def get_build_routes(self):
"""Generator to get all routes that can be built from a route.
:yields:
This route or all nested routes that can be built.
"""
if self.name is not None:
yield self
class SimpleRoute(BaseRoute):
"""A route that is compatible with webapp's routing. URL building is not
implemented as webapp has rudimentar support for it, and this is the most
unknown webapp feature anyway.
"""
def __init__(self, template, handler):
"""Initializes a URL route.
:param template:
A regex to be matched.
:param handler:
A :class:`RequestHandler` class or dotted name for a class to be
lazily imported, e.g., ``my.module.MyHandler``.
"""
self.template = template
self.handler = handler
# Lazy property.
self.regex = None
def _regex(self):
if not self.template.startswith('^'):
self.template = '^' + self.template
if not self.template.endswith('$'):
self.template += '$'
self.regex = re.compile(self.template)
return self.regex
def match(self, request):
"""Matches this route against the current request.
.. seealso:: :meth:`BaseRoute.match`.
"""
regex = self.regex or self._regex()
match = regex.match(request.path)
if match:
return self.handler, match.groups(), {}
def __repr__(self):
return '<SimpleRoute(%r, %r)>' % (self.template, self.handler)
__str__ = __repr__
class Route(BaseRoute):
"""A URL route definition. A route template contains parts enclosed by
``<>`` and is used to match requested URLs. Here are some examples::
route = Route(r'/article/<id:[\d]+>', ArticleHandler)
route = Route(r'/wiki/<page_name:\w+>', WikiPageHandler)
route = Route(r'/blog/<year:\d{4}>/<month:\d{2}>/<day:\d{2}>/<slug:\w+>', BlogItemHandler)
Based on `Another Do-It-Yourself Framework`_, by Ian Bicking. We added
URL building, non-keyword variables and other improvements.
"""
def __init__(self, template, handler=None, name=None, defaults=None,
build_only=False):
"""Initializes a URL route.
:param template:
A route template to be matched, containing parts enclosed by ``<>``
that can have only a name, only a regular expression or both:
============================= ==================================
Format Example
============================= ==================================
``<name>`` ``r'/<year>/<month>'``
``<:regular expression>`` ``r'/<:\d{4}>/<:\d{2}>'``
``<name:regular expression>`` ``r'/<year:\d{4}>/<month:\d{2}>'``
============================= ==================================
If the name is set, the value of the matched regular expression
is passed as keyword argument to the :class:`RequestHandler`.
Otherwise it is passed as positional argument.
The same template can mix parts with name, regular expression or
both.
:param handler:
A :class:`RequestHandler` class or dotted name for a class to be
lazily imported, e.g., ``my.module.MyHandler``.
:param name:
The name of this route, used to build URLs based on it.
:param defaults:
Default or extra keywords to be returned by this route. Values
also present in the route variables are used to build the URL
when they are missing.
:param build_only:
If True, this route never matches and is used only to build URLs.
"""
self.template = template
self.handler = handler
self.name = name
self.defaults = defaults or {}
self.build_only = build_only
# Lazy properties.
self.regex = None
self.variables = None
self.reverse_template = None
def _parse_template(self):
self.variables = {}
last = count = 0
regex = reverse_template = ''
for match in _ROUTE_REGEX.finditer(self.template):
part = self.template[last:match.start()]
name = match.group(1)
expr = match.group(2) or '[^/]+'
last = match.end()
if not name:
name = '__%d__' % count
count += 1
reverse_template += '%s%%(%s)s' % (part, name)
regex += '%s(?P<%s>%s)' % (re.escape(part), name, expr)
self.variables[name] = re.compile('^%s$' % expr)
regex = '^%s%s$' % (regex, re.escape(self.template[last:]))
self.regex = re.compile(regex)
self.reverse_template = reverse_template + self.template[last:]
self.has_positional_variables = count > 0
def _regex(self):
self._parse_template()
return self.regex
def _variables(self):
self._parse_template()
return self.variables
def _reverse_template(self):
self._parse_template()
return self.reverse_template
def match(self, request):
"""Matches this route against the current request.
.. seealso:: :meth:`BaseRoute.match`.
"""
regex = self.regex or self._regex()
match = regex.match(request.path)
if match:
kwargs = self.defaults.copy()
kwargs.update(match.groupdict())
if kwargs and self.has_positional_variables:
args = tuple(value[1] for value in sorted((int(key[2:-2]), \
kwargs.pop(key)) for key in \
kwargs.keys() if key.startswith('__')))
else:
args = ()
return self.handler, args, kwargs
def build(self, request, args, kwargs):
"""Builds a URL for this route.
.. seealso:: :meth:`Router.build`.
"""
full = kwargs.pop('_full', False)
scheme = kwargs.pop('_scheme', None)
netloc = kwargs.pop('_netloc', None)
anchor = kwargs.pop('_anchor', None)
if full or scheme or netloc:
if not netloc:
netloc = request.host
if not scheme:
scheme = 'http'
path, query = self._build(args, kwargs)
return urlunsplit(scheme, netloc, path, query, anchor)
def _build(self, args, kwargs):
"""Builds the path for this route.
:returns:
A tuple ``(path, kwargs)`` with the built URL path and extra
keywords to be used as URL query arguments.
"""
variables = self.variables or self._variables()
if self.has_positional_variables:
for index, value in enumerate(args):
key = '__%d__' % index
if key in variables:
kwargs[key] = value
values = {}
for name, regex in variables.iteritems():
value = kwargs.pop(name, self.defaults.get(name))
if not value:
raise KeyError('Missing argument "%s" to build URL.' % \
name.strip('_'))
if not isinstance(value, basestring):
value = str(value)
if not regex.match(value):
raise ValueError('URL buiding error: Value "%s" is not '
'supported for argument "%s".' % (value, name.strip('_')))
values[name] = value
return (self.reverse_template % values, kwargs)
def __repr__(self):
return '<Route(%r, %r, name=%r, defaults=%r, build_only=%r)>' % \
(self.template, self.handler, self.name, self.defaults,
self.build_only)
__str__ = __repr__
class Router(object):
"""A simple URL router used to match the current URL, dispatch the handler
and build URLs for other resources.
"""
#: Class used when the route is a tuple. Default is compatible with webapp.
route_class = SimpleRoute
def __init__(self, app, routes=None):
"""Initializes the router.
:param app:
The :class:`WSGIApplication` instance.
:param routes:
A list of :class:`Route` instances to initialize the router.
"""
self.app = app
# Handler classes imported lazily.
self._handlers = {}
# All routes that can be matched.
self.match_routes = []
# All routes that can be built.
self.build_routes = {}
if routes:
for route in routes:
self.add(route)
def add(self, route):
"""Adds a route to this router.
:param route:
A :class:`Route` instance.
"""
if isinstance(route, tuple):
# Simple route, compatible with webapp.
route = self.route_class(*route)
for r in route.get_match_routes():
self.match_routes.append(r)
for r in route.get_build_routes():
self.build_routes[r.name] = r
def match(self, request):
"""Matches all routes against the current request. The first one that
matches is returned.
:param request:
A ``webapp.Request`` instance.
:returns:
A tuple ``(route, args, kwargs)`` if a route matched, or None.
"""
for route in self.match_routes:
match = route.match(request)
if match:
request.route = route
request.route_args, request.route_kwargs = match[1], match[2]
return match
def dispatch(self, app, request, response, match, method=None):
"""Dispatches a request. This calls the :class:`RequestHandler` from
the matched :class:`Route`.
:param app:
A :class:`WSGIApplication` instance.
:param request:
A ``webapp.Request`` instance.
:param response:
A :class:`Response` instance.
:param match:
A tuple ``(handler, args, kwargs)``, resulted from the matched
route.
:param method:
Handler method to be called. In cases like exception handling, a
method can be forced instead of using the request method.
"""
handler_class, args, kwargs = match
method = method or request.method.lower().replace('-', '_')
if isinstance(handler_class, basestring):
if handler_class not in self._handlers:
self._handlers[handler_class] = import_string(handler_class)
handler_class = self._handlers[handler_class]
new_style_handler = True
try:
handler = handler_class(app, request, response)
except TypeError, e:
# Support webapp's initialize().
new_style_handler = False
handler = handler_class()
handler.initialize(request, response)
try:
if new_style_handler:
handler(method, *args, **kwargs)
else:
# Support webapp handlers which don't implement __call__().
getattr(handler, method)(*args)
except Exception, e:
if method == 'handle_exception':
# We are already handling an exception.
raise
# If the handler implements exception handling, let it handle it.
handler.handle_exception(e, app.debug)
def build(self, name, request, args, kwargs):
"""Builds and returns a URL for a named :class:`Route`.
:param name:
The route name.
:param request:
The current ``Request`` object.
:param args:
Tuple of positional arguments to build the URL.
:param kwargs:
Dictionary of keyword arguments to build the URL.
:returns:
An absolute or relative URL.
.. seealso:: :meth:`RequestHandler.url_for`.
"""
route = self.build_routes.get(name)
if not route:
raise KeyError('Route "%s" is not defined.' % name)
return route.build(request, args, kwargs)
def __repr__(self):
routes = self.match_routes + [v for k, v in \
self.build_routes.iteritems() if v not in self.match_routes]
return '<Router(%r)>' % routes
__str__ = __repr__
class WSGIApplication(object):
"""Wraps a set of webapp RequestHandlers in a WSGI-compatible application.
To use this class, pass a list of tuples ``(regex, RequestHandler class)``
or :class:`Route` instances to the constructor, and pass the class instance
to a WSGI handler. Example::
from webapp2 import RequestHandler, WSGIApplication
class HelloWorldHandler(RequestHandler):
def get(self):
self.response.out.write('Hello, World!')
app = WSGIApplication([
(r'/', HelloWorldHandler),
])
def main():
app.run()
if __name__ == '__main__':
main()
The URL mapping is first-match based on the list ordering. Items in the
list can also be an object that implements the method ``match(request)``.
The provided class :class:`Route` is a route implementation that allows
reversible URLs and keyword arguments passed to the handler. Example::
app = WSGIApplication([
Route(r'/articles', ArticlesHandler, 'articles'),
Route(r'/articles/<id:[\d]+>', ArticleHandler, 'article'),
])
.. seealso:: :class:`Route`.
"""
#: Default class used for the request object.
request_class = Request
#: Default class used for the response object.
response_class = Response
#: Default class used for the router object.
router_class = Router
#: Default class used for the config object.
config_class = Config
#: Request variables.
active_instance = app = request = None
def __init__(self, routes=None, debug=False, config=None):
"""Initializes the WSGI application.
:param routes:
List of URL definitions as tuples ``(route, RequestHandler class)``.
:param debug:
True if this is debug mode, False otherwise.
:param config:
A configuration dictionary for the application.
"""
self.debug = debug
self.config = self.config_class(config)
self.router = self.router_class(self, routes)
# A dictionary mapping HTTP error codes to :class:`RequestHandler`
# classes used to handle them.
self.error_handlers = {}
# A registry for objects used during the app lifetime.
self.registry = {}
# The active app.
WSGIApplication.active_instance = WSGIApplication.app = self
# Current request did not start yet, so we set a fallback.
self.request = None
def __call__(self, environ, start_response):
"""Called by WSGI when a request comes in. Calls :meth:`wsgi_app`."""
return self.wsgi_app(environ, start_response)
def wsgi_app(self, environ, start_response):
"""This is the actual WSGI application. This is not implemented in
:meth:`__call__` so that middlewares can be applied without losing a
reference to the class. So instead of doing this::
app = MyMiddleware(app)
It's a better idea to do this instead::
app.wsgi_app = MyMiddleware(app.wsgi_app)
Then you still have the original application object around and
can continue to call methods on it.
This idea comes from `Flask`_.
:param environ:
A WSGI environment.
:param start_response:
A callable accepting a status code, a list of headers and an
optional exception context to start the response.
"""
try:
# The active app.
WSGIApplication.active_instance = WSGIApplication.app = self
# The active request.
WSGIApplication.request = request = self.request_class(environ)
response = self.response_class()
if request.method not in ALLOWED_METHODS:
# 501 Not Implemented.
raise webob.exc.HTTPNotImplemented()
# Matched values are (handler, args, kwargs).
match = self.router.match(request)
if match:
self.router.dispatch(self, request, response, match)
else:
# 404 Not Found.
raise webob.exc.HTTPNotFound()
except Exception, e:
try:
self.handle_exception(request, response, e)
except webob.exc.WSGIHTTPException, e:
# Use the exception as response.
response = e
except Exception, e:
# Error wasn't handled so we have nothing else to do.
logging.exception(e)
if self.debug:
raise
# 500 Internal Server Error.
response = webob.exc.HTTPInternalServerError()
finally:
WSGIApplication.active_instance = WSGIApplication.app = \
WSGIApplication.request = None
return response(environ, start_response)
def handle_exception(self, request, response, e):
"""Handles an exception. To set app-wide error handlers, define them
using the corresponent HTTP status code in the ``error_handlers``
dictionary of :class:`WSGIApplication`. For example, to set a custom
`Not Found` page::
class Handle404(RequestHandler):
def handle_exception(self, exception, debug_mode):
self.response.out.write('Oops! I could swear this page was here!')
self.response.set_status(404)
app = WSGIApplication([
(r'/', MyHandler),
])
app.error_handlers[404] = Handle404
When an ``HTTPException`` is raised using :func:`abort` or because the
app could not fulfill the request, the error handler defined for the
current HTTP status code will be called. If it is not set, the
exception is re-raised.
.. note::
Although being a :class:`RequestHandler`, the error handler will
execute the ``handle_exception`` method after instantiation, instead
of the method corresponding to the current request.
Also, the error handler is responsible for setting the response
status code, as shown in the example above.
:param request:
A ``webapp.Request`` instance.
:param response:
A :class:`Response` instance.
:param e:
The raised exception.
"""
if isinstance(e, HTTPException):
code = e.code
else:
code = 500
handler = self.error_handlers.get(code)
if handler:
# Handle the exception using a custom handler.
match = (handler, (e, self.debug), {})
self.router.dispatch(self, request, response, match,
method='handle_exception')
else:
# No exception handler. Catch it in the WSGI app.
raise
def url_for(self, _name, *args, **kwargs):
"""Builds and returns a URL for a named :class:`Route`.
.. seealso:: :meth:`RequestHandler.url_for` and :meth:`Router.build`.
"""
return self.router.build(_name, self.request, args, kwargs)
def get_config(self, module, key=None, default=REQUIRED_VALUE):
"""Returns a configuration value for a module.
.. seealso:: :meth:`Config.get_config`.
"""
return self.config.get_config(module, key=key, default=default)
def run(self, bare=False):
"""Runs the app using ``google.appengine.ext.webapp.util.run_wsgi_app``.
This is generally called inside a ``main()`` function of the file
mapped in *app.yaml* to run the application::
# ...
app = WSGIApplication([
Route(r'/', HelloWorldHandler),
])
def main():
app.run()
if __name__ == '__main__':
main()
:param bare:
If True, uses ``run_bare_wsgi_app`` instead of ``run_wsgi_app``,
which doesn't add WSGI middleware.
"""
# Fix issue #772.
if self.debug:
fix_sys_path()
if bare:
run_bare_wsgi_app(self)
else:
run_wsgi_app(self)
def abort(code, *args, **kwargs):
"""Raises an ``HTTPException``. The exception is instantiated passing
*args* and *kwargs*.
:param code:
A valid HTTP error code from ``webob.exc.status_map``, a dictionary
mapping status codes to subclasses of ``HTTPException``.
:param args:
Arguments to be used to instantiate the exception.
:param kwargs:
Keyword arguments to be used to instantiate the exception.
"""
cls = webob.exc.status_map.get(code)
if not cls:
raise KeyError('No exception is defined for code %r.' % code)
raise cls(*args, **kwargs)
def get_valid_methods(handler):
"""Returns a list of HTTP methods supported by a handler.
:param handler:
A :class:`RequestHandler` instance.
:returns:
A list of HTTP methods supported by the handler.
"""
return [method for method in ALLOWED_METHODS if getattr(handler,
method.lower().replace('-', '_'), None)]
def import_string(import_name, silent=False):
"""Imports an object based on a string. If *silent* is True the return
value will be None if the import fails.
Simplified version of the function with same name from `Werkzeug`_.
:param import_name:
The dotted name for the object to import.
:param silent:
If True, import errors are ignored and None is returned instead.
:returns:
The imported object.
"""
import_name = to_utf8(import_name)
try:
if '.' in import_name:
module, obj = import_name.rsplit('.', 1)
return getattr(__import__(module, None, None, [obj]), obj)
else:
return __import__(import_name)
except (ImportError, AttributeError):
if not silent:
raise
def to_utf8(value):
"""Returns a string encoded using UTF-8.
This function comes from `Tornado`_.
:param value:
A unicode or string to be encoded.
:returns:
The encoded string.
"""
if isinstance(value, unicode):
return value.encode('utf-8')
assert isinstance(value, str)
return value
def to_unicode(value):
"""Returns a unicode string from a string, using UTF-8 to decode if needed.
This function comes from `Tornado`_.
:param value:
A unicode or string to be decoded.
:returns:
The decoded string.
"""
if isinstance(value, str):
return value.decode('utf-8')
assert isinstance(value, unicode)
return value
def urlunsplit(scheme=None, netloc=None, path=None, query=None, fragment=None):
"""Similar to ``urlparse.urlunsplit``, but will escape values and
urlencode and sort query arguments.
:param scheme:
URL scheme, e.g., `http` or `https`.
:param netloc:
Network location, e.g., `localhost:8080` or `www.google.com`.
:param path:
URL path.
:param query:
URL query as an escaped string, or a dictionary or list of key-values
tuples to build a query.
:param fragment:
Fragment identifier, also known as "anchor".
:returns:
An assembled absolute or relative URL.
"""
if not scheme or not netloc:
scheme = None
netloc = None
if path:
path = urllib.quote(to_utf8(path))
if query and not isinstance(query, basestring):
if isinstance(query, dict):
query = query.items()
query_args = []
for key, values in query:
if isinstance(values, basestring):
values = (values,)
for value in values:
query_args.append((to_utf8(key), to_utf8(value)))
# Sorting should be optional? Sorted args are commonly needed to build
# URL signatures for services.
query_args.sort()
query = urllib.urlencode(query_args)
if fragment:
fragment = urllib.quote(to_utf8(fragment))
return urlparse.urlunsplit((scheme, netloc, path, query, fragment))
_ULTIMATE_SYS_PATH = None
def fix_sys_path():
"""A fix for issue 772. We must keep this here until it is fixed in the dev
server. I know, I don't like it either.
See: http://code.google.com/p/googleappengine/issues/detail?id=772
"""
global _ULTIMATE_SYS_PATH
import sys
if _ULTIMATE_SYS_PATH is None:
_ULTIMATE_SYS_PATH = list(sys.path)
elif sys.path != _ULTIMATE_SYS_PATH:
sys.path[:] = _ULTIMATE_SYS_PATH
| Python |
# handler_docs.py
#
#
""" xmpp request handler. """
## jsb imports
from jsb.utils.exception import handle_exception
from jsb.version import getversion
## google imports
import webapp2
## basic imports
import sys
import time
import types
import logging
## greet
logging.warn(getversion('REDIRECT'))
## classes
class DocsHandler(webapp2.RequestHandler):
def get(self, url=None):
try:
if not url.endswith(".html"):
if not url.endswith('/'):
url += u"/index.html"
else:
url += u"index.html"
splitted = url.split('/')
splitted.insert(2, 'html')
goto = '/'.join(splitted[-3:])
logging.warn("docs - redirecting %s" % goto)
self.redirect(goto)
except Exception, ex:
handle_exception()
#self.send_error(500)
application = webapp2.WSGIApplication([webapp2.Route(r'<url:.*>', DocsHandler)],
debug=True)
def main():
global application
application.run()
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines event types that are sent from the wave server.
This module defines all of the event types currently supported by the wave
server. Each event type is sub classed from Event and has its own
properties depending on the type.
"""
class Context(object):
"""Specifies constants representing different context requests."""
#: Requests the root blip.
ROOT = 'ROOT'
#: Requests the parent blip of the event blip.
PARENT = 'PARENT'
#: Requests the siblings blip of the event blip.
SIBLINGS = 'SIBLINGS'
#: Requests the child blips of the event blip.
CHILDREN = 'CHILDREN'
#: Requests the event blip itself.
SELF = 'SELF'
#: Requests all of the blips of the event wavelet.
ALL = 'ALL'
class Event(object):
"""Object describing a single event.
Attributes:
modified_by: Participant id that caused this event.
timestamp: Timestamp that this event occurred on the server.
type: Type string of this event.
properties: Dictionary of all extra properties. Typically the derrived
event type should have these explicitly set as attributes, but
experimental features might appear in properties before that.
blip_id: The blip_id of the blip for blip related events or the root
blip for wavelet related events.
blip: If available, the blip with id equal to the events blip_id.
proxying_for: If available, the proxyingFor id of the robot that caused the
event.
"""
def __init__(self, json, wavelet):
"""Inits this event with JSON data.
Args:
json: JSON data from Wave server.
"""
self.modified_by = json.get('modifiedBy')
self.timestamp = json.get('timestamp', 0)
self.type = json.get('type')
self.raw_data = json
self.properties = json.get('properties', {})
self.blip_id = self.properties.get('blipId')
self.blip = wavelet.blips.get(self.blip_id)
self.proxying_for = json.get('proxyingFor')
class WaveletBlipCreated(Event):
"""Event triggered when a new blip is created.
Attributes:
new_blip_id: The id of the newly created blip.
new_blip: If in context, the actual new blip.
"""
type = 'WAVELET_BLIP_CREATED'
def __init__(self, json, wavelet):
super(WaveletBlipCreated, self).__init__(json, wavelet)
self.new_blip_id = self.properties['newBlipId']
self.new_blip = wavelet.blips.get(self.new_blip_id)
class WaveletBlipRemoved(Event):
"""Event triggered when a new blip is removed.
Attributes:
removed_blip_id: the id of the removed blip
removed_blip: if in context, the removed blip
"""
type = 'WAVELET_BLIP_REMOVED'
def __init__(self, json, wavelet):
super(WaveletBlipRemoved, self).__init__(json, wavelet)
self.removed_blip_id = self.properties['removedBlipId']
self.removed_blip = wavelet.blips.get(self.removed_blip_id)
class WaveletParticipantsChanged(Event):
"""Event triggered when the participants on a wave change.
Attributes:
participants_added: List of participants added.
participants_removed: List of participants removed.
"""
type = 'WAVELET_PARTICIPANTS_CHANGED'
def __init__(self, json, wavelet):
super(WaveletParticipantsChanged, self).__init__(json, wavelet)
self.participants_added = self.properties['participantsAdded']
self.participants_removed = self.properties['participantsRemoved']
class WaveletSelfAdded(Event):
"""Event triggered when the robot is added to the wavelet."""
type = 'WAVELET_SELF_ADDED'
class WaveletSelfRemoved(Event):
"""Event triggered when the robot is removed from the wavelet."""
type = 'WAVELET_SELF_REMOVED'
class WaveletTitleChanged(Event):
"""Event triggered when the title of the wavelet has changed.
Attributes:
title: The new title.
"""
type = 'WAVELET_TITLE_CHANGED'
def __init__(self, json, wavelet):
super(WaveletTitleChanged, self).__init__(json, wavelet)
self.title = self.properties['title']
class BlipContributorsChanged(Event):
"""Event triggered when the contributors to this blip change.
Attributes:
contributors_added: List of contributors that were added.
contributors_removed: List of contributors that were removed.
"""
type = 'BLIP_CONTRIBUTORS_CHANGED'
def __init__(self, json, wavelet):
super(BlipContributorsChanged, self).__init__(json, wavelet)
self.contibutors_added = self.properties['contributorsAdded']
self.contibutors_removed = self.properties['contributorsRemoved']
class BlipSubmitted(Event):
"""Event triggered when a blip is submitted."""
type = 'BLIP_SUBMITTED'
class DocumentChanged(Event):
"""Event triggered when a document is changed.
This event is fired after any changes in the document and should be used
carefully to keep the amount of traffic to the robot reasonable. Use
filters where appropriate.
"""
type = 'DOCUMENT_CHANGED'
class FormButtonClicked(Event):
"""Event triggered when a form button is clicked.
Attributes:
button_name: The name of the button that was clicked.
"""
type = 'FORM_BUTTON_CLICKED'
def __init__(self, json, wavelet):
super(FormButtonClicked, self).__init__(json, wavelet)
self.button_name = self.properties['buttonName']
class GadgetStateChanged(Event):
"""Event triggered when the state of a gadget changes.
Attributes:
index: The index of the gadget that changed in the document.
old_state: The old state of the gadget.
"""
type = 'GADGET_STATE_CHANGED'
def __init__(self, json, wavelet):
super(GadgetStateChanged, self).__init__(json, wavelet)
self.index = self.properties['index']
self.old_state = self.properties['oldState']
class AnnotatedTextChanged(Event):
"""Event triggered when text with an annotation has changed.
This is mainly useful in combination with a filter on the
name of the annotation.
Attributes:
name: The name of the annotation.
value: The value of the annotation that changed.
"""
type = 'ANNOTATED_TEXT_CHANGED'
def __init__(self, json, wavelet):
super(AnnotatedTextChanged, self).__init__(json, wavelet)
self.name = self.properties['name']
self.value = self.properties.get('value')
class OperationError(Event):
"""Triggered when an event on the server occurred.
Attributes:
operation_id: The operation id of the failing operation.
error_message: More information as to what went wrong.
"""
type = 'OPERATION_ERROR'
def __init__(self, json, wavelet):
super(OperationError, self).__init__(json, wavelet)
self.operation_id = self.properties['operationId']
self.error_message = self.properties['message']
class WaveletCreated(Event):
"""Triggered when a new wavelet is created.
This event is only triggered if the robot creates a new
wavelet and can be used to initialize the newly created wave.
wavelets created by other participants remain invisible
to the robot until the robot is added to the wave in
which case WaveletSelfAdded is triggered.
Attributes:
message: Whatever string was passed into the new_wave
call as message (if any).
"""
type = 'WAVELET_CREATED'
def __init__(self, json, wavelet):
super(WaveletCreated, self).__init__(json, wavelet)
self.message = self.properties['message']
class WaveletFetched(Event):
"""Triggered when a new wavelet is fetched.
This event is triggered after a robot requests to
see another wavelet. The robot has to be on the other
wavelet already.
Attributes:
message: Whatever string was passed into the new_wave
call as message (if any).
"""
type = 'WAVELET_FETCHED'
def __init__(self, json, wavelet):
super(WaveletFetched, self).__init__(json, wavelet)
self.message = self.properties['message']
class WaveletTagsChanged(Event):
"""Event triggered when the tags on a wavelet change."""
type = 'WAVELET_TAGS_CHANGED'
def __init__(self, json, wavelet):
super(WaveletTagsChanged, self).__init__(json, wavelet)
def is_event(cls):
"""Returns whether the passed class is an event."""
try:
if not issubclass(cls, Event):
return False
return hasattr(cls, 'type')
except TypeError:
return False
ALL = [item for item in globals().copy().values() if is_event(item)]
| Python |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""A module to run wave robots on app engine."""
import logging
import sys
import events
from google.appengine.api import urlfetch
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
class CapabilitiesHandler(webapp.RequestHandler):
"""Handler to forward a request ot a handler of a robot."""
def __init__(self, method, contenttype):
"""Initializes this handler with a specific robot."""
self._method = method
self._contenttype = contenttype
def get(self):
"""Handles HTTP GET request."""
self.response.headers['Content-Type'] = self._contenttype
self.response.out.write(self._method())
class ProfileHandler(webapp.RequestHandler):
"""Handler to forward a request ot a handler of a robot."""
def __init__(self, method, contenttype):
"""Initializes this handler with a specific robot."""
self._method = method
self._contenttype = contenttype
def get(self):
"""Handles HTTP GET request."""
self.response.headers['Content-Type'] = self._contenttype
# Respond with proxied profile if name specified
if self.request.get('name'):
self.response.out.write(self._method(self.request.get('name')))
else:
self.response.out.write(self._method())
class RobotEventHandler(webapp.RequestHandler):
"""Handler for the dispatching of events to various handlers to a robot.
This handler only responds to post events with a JSON post body. Its primary
task is to separate out the context data from the events in the post body
and dispatch all events in order. Once all events have been dispatched
it serializes the context data and its associated operations as a response.
"""
def __init__(self, robot):
"""Initializes self with a specific robot."""
self._robot = robot
def get(self):
"""Handles the get event for debugging.
This is useful for debugging but since event bundles tend to be
rather big it often won't fit for more complex requests.
"""
ops = self.request.get('events')
if ops:
self.request.body = events
self.post()
def post(self):
"""Handles HTTP POST requests."""
json_body = self.request.body
if not json_body:
# TODO(davidbyttow): Log error?
return
# Redirect stdout to stderr while executing handlers. This way, any stray
# "print" statements in bot code go to the error logs instead of breaking
# the JSON response sent to the HTTP channel.
saved_stdout, sys.stdout = sys.stdout, sys.stderr
json_body = unicode(json_body, 'utf8')
logging.info('Incoming: %s', json_body)
json_response = self._robot.process_events(json_body)
logging.info('Outgoing: %s', json_response)
sys.stdout = saved_stdout
# Build the response.
self.response.headers['Content-Type'] = 'application/json; charset=utf-8'
self.response.out.write(json_response.encode('utf-8'))
def operation_error_handler(event, wavelet):
"""Default operation error handler, logging what went wrong."""
if isinstance(event, events.OperationError):
logging.error('Previously operation failed: id=%s, message: %s',
event.operation_id, event.error_message)
def appengine_post(url, data, headers):
result = urlfetch.fetch(
method='POST',
url=url,
payload=data,
headers=headers,
deadline=10)
return result.status_code, result.content
class RobotVerifyTokenHandler(webapp.RequestHandler):
"""Handler for the token_verify request."""
def __init__(self, robot):
"""Initializes self with a specific robot."""
self._robot = robot
def get(self):
"""Handles the get event for debugging. Ops usually too long."""
token, st = self._robot.get_verification_token_info()
logging.info('token=' + token)
if token is None:
self.error(404)
self.response.out.write('No token set')
return
if not st is None:
if self.request.get('st') != st:
self.response.out.write('Invalid st value passed')
return
self.response.out.write(token)
def create_robot_webapp(robot, debug=False, extra_handlers=None):
"""Returns an instance of webapp.WSGIApplication with robot handlers."""
if not extra_handlers:
extra_handlers = []
return webapp.WSGIApplication([('.*/_wave/capabilities.xml',
lambda: CapabilitiesHandler(
robot.capabilities_xml,
'application/xml')),
('.*/_wave/robot/profile',
lambda: ProfileHandler(
robot.profile_json,
'application/json')),
('.*/_wave/robot/jsonrpc',
lambda: RobotEventHandler(robot)),
('.*/_wave/verify_token',
lambda: RobotVerifyTokenHandler(robot)),
] + extra_handlers,
debug=debug)
def run(robot, debug=False, log_errors=True, extra_handlers=None):
"""Sets up the webapp handlers for this robot and starts listening.
A robot is typically setup in the following steps:
1. Instantiate and define robot.
2. Register various handlers that it is interested in.
3. Call Run, which will setup the handlers for the app.
For example:
robot = Robot('Terminator',
image_url='http://www.sky.net/models/t800.png',
profile_url='http://www.sky.net/models/t800.html')
robot.register_handler(WAVELET_PARTICIPANTS_CHANGED, KillParticipant)
run(robot)
Args:
robot: the robot to run. This robot is modified to use app engines
urlfetch for posting http.
debug: Optional variable that defaults to False and is passed through
to the webapp application to determine if it should show debug info.
log_errors: Optional flag that defaults to True and determines whether
a default handlers to catch errors should be setup that uses the
app engine logging to log errors.
extra_handlers: Optional list of tuples that are passed to the webapp
to install more handlers. For example, passing
[('/about', AboutHandler),] would install an extra about handler
for the robot.
"""
# App Engine expects to construct a class with no arguments, so we
# pass a lambda that constructs the appropriate handler with
# arguments from the enclosing scope.
if log_errors:
robot.register_handler(events.OperationError, operation_error_handler)
robot.http_post = appengine_post
app = create_robot_webapp(robot, debug, extra_handlers)
run_wsgi_app(app)
| Python |
#!/usr/bin/python2.4
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the blip module."""
import unittest
import blip
import element
import ops
import simplejson
TEST_BLIP_DATA = {
'childBlipIds': [],
'content': '\nhello world!\nanother line',
'contributors': ['robot@test.com', 'user@test.com'],
'creator': 'user@test.com',
'lastModifiedTime': 1000,
'parentBlipId': None,
'annotations': [{'range': {'start': 2, 'end': 3},
'name': 'key', 'value': 'val'}],
'waveId': 'test.com!w+g3h3im',
'waveletId': 'test.com!root+conv',
'elements':{'14':{'type':'GADGET','properties':{'url':'http://a/b.xml'}}},
}
CHILD_BLIP_ID = 'b+42'
ROOT_BLIP_ID = 'b+43'
class TestBlip(unittest.TestCase):
"""Tests the primary data structures for the wave model."""
def assertBlipStartswith(self, expected, totest):
actual = totest.text[:len(expected)]
self.assertEquals(expected, actual)
def new_blip(self, **args):
"""Create a blip for testing."""
data = TEST_BLIP_DATA.copy()
data.update(args)
res = blip.Blip(data, self.all_blips, self.operation_queue)
self.all_blips[res.blip_id] = res
return res
def setUp(self):
self.all_blips = {}
self.operation_queue = ops.OperationQueue()
def testBlipProperties(self):
root = self.new_blip(blipId=ROOT_BLIP_ID,
childBlipIds=[CHILD_BLIP_ID])
child = self.new_blip(blipId=CHILD_BLIP_ID,
parentBlipId=ROOT_BLIP_ID)
self.assertEquals(ROOT_BLIP_ID, root.blip_id)
self.assertEquals(set([CHILD_BLIP_ID]), root.child_blip_ids)
self.assertEquals(set(TEST_BLIP_DATA['contributors']), root.contributors)
self.assertEquals(TEST_BLIP_DATA['creator'], root.creator)
self.assertEquals(TEST_BLIP_DATA['content'], root.text)
self.assertEquals(TEST_BLIP_DATA['lastModifiedTime'],
root.last_modified_time)
self.assertEquals(TEST_BLIP_DATA['parentBlipId'], root.parent_blip_id)
self.assertEquals(TEST_BLIP_DATA['waveId'], root.wave_id)
self.assertEquals(TEST_BLIP_DATA['waveletId'], root.wavelet_id)
self.assertEquals(TEST_BLIP_DATA['content'][3], root[3])
self.assertEquals(element.Gadget.class_type, root[14].type)
self.assertEquals('http://a/b.xml', root[14].url)
self.assertEquals('a', root.text[14])
self.assertEquals(len(TEST_BLIP_DATA['content']), len(root))
self.assertTrue(root.is_root())
self.assertFalse(child.is_root())
self.assertEquals(root, child.parent_blip)
def testBlipSerialize(self):
root = self.new_blip(blipId=ROOT_BLIP_ID,
childBlipIds=[CHILD_BLIP_ID])
serialized = root.serialize()
unserialized = blip.Blip(serialized, self.all_blips, self.operation_queue)
self.assertEquals(root.blip_id, unserialized.blip_id)
self.assertEquals(root.child_blip_ids, unserialized.child_blip_ids)
self.assertEquals(root.contributors, unserialized.contributors)
self.assertEquals(root.creator, unserialized.creator)
self.assertEquals(root.text, unserialized.text)
self.assertEquals(root.last_modified_time, unserialized.last_modified_time)
self.assertEquals(root.parent_blip_id, unserialized.parent_blip_id)
self.assertEquals(root.wave_id, unserialized.wave_id)
self.assertEquals(root.wavelet_id, unserialized.wavelet_id)
self.assertTrue(unserialized.is_root())
def testDocumentOperations(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
newlines = [x for x in blip.find('\n')]
self.assertEquals(2, len(newlines))
blip.first('world').replace('jupiter')
bits = blip.text.split('\n')
self.assertEquals(3, len(bits))
self.assertEquals('hello jupiter!', bits[1])
blip.range(2, 5).delete()
self.assertBlipStartswith('\nho jupiter', blip)
blip.first('ho').insert_after('la')
self.assertBlipStartswith('\nhola jupiter', blip)
blip.at(3).insert(' ')
self.assertBlipStartswith('\nho la jupiter', blip)
def testElementHandling(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
url = 'http://www.test.com/image.png'
org_len = len(blip)
blip.append(element.Image(url=url))
elems = [elem for elem in blip.find(element.Image, url=url)]
self.assertEquals(1, len(elems))
elem = elems[0]
self.assertTrue(isinstance(elem, element.Image))
blip.at(1).insert('twelve chars')
self.assertTrue(blip.text.startswith('\ntwelve charshello'))
elem = blip[org_len + 12].value()
self.assertTrue(isinstance(elem, element.Image))
blip.first('twelve ').delete()
self.assertTrue(blip.text.startswith('\nchars'))
elem = blip[org_len + 12 - len('twelve ')].value()
self.assertTrue(isinstance(elem, element.Image))
blip.first('chars').replace(element.Image(url=url))
elems = [elem for elem in blip.find(element.Image, url=url)]
self.assertEquals(2, len(elems))
self.assertTrue(blip.text.startswith('\n hello'))
elem = blip[1].value()
self.assertTrue(isinstance(elem, element.Image))
def testAnnotationHandling(self):
key = 'style/fontWeight'
def get_bold():
for an in blip.annotations[key]:
if an.value == 'bold':
return an
return None
json = ('[{"range":{"start":3,"end":6},"name":"%s","value":"bold"}]'
% key)
blip = self.new_blip(blipId=ROOT_BLIP_ID,
annotations=simplejson.loads(json))
self.assertEquals(1, len(blip.annotations))
self.assertNotEqual(None, get_bold().value)
self.assertTrue(key in blip.annotations)
# extend the bold annotation by adding:
blip.range(5, 8).annotate(key, 'bold')
self.assertEquals(1, len(blip.annotations))
self.assertEquals(8, get_bold().end)
# clip by adding a same keyed:
blip[4:12].annotate(key, 'italic')
self.assertEquals(2, len(blip.annotations[key]))
self.assertEquals(4, get_bold().end)
# now split the italic one:
blip.range(6, 7).clear_annotation(key)
self.assertEquals(3, len(blip.annotations[key]))
# test names and iteration
self.assertEquals(1, len(blip.annotations.names()))
self.assertEquals(3, len([x for x in blip.annotations]))
blip[3: 5].annotate('foo', 'bar')
self.assertEquals(2, len(blip.annotations.names()))
self.assertEquals(4, len([x for x in blip.annotations]))
blip[3: 5].clear_annotation('foo')
# clear the whole thing
blip.all().clear_annotation(key)
# getting to the key should now throw an exception
self.assertRaises(KeyError, blip.annotations.__getitem__, key)
def testBlipOperations(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
self.assertEquals(1, len(self.all_blips))
otherblip = blip.reply()
otherblip.append('hello world')
self.assertEquals('hello world', otherblip.text)
self.assertEquals(blip.blip_id, otherblip.parent_blip_id)
self.assertEquals(2, len(self.all_blips))
inline = blip.insert_inline_blip(3)
self.assertEquals(blip.blip_id, inline.parent_blip_id)
self.assertEquals(3, len(self.all_blips))
def testInsertInlineBlipCantInsertAtTheBeginning(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
self.assertEquals(1, len(self.all_blips))
self.assertRaises(IndexError, blip.insert_inline_blip, 0)
self.assertEquals(1, len(self.all_blips))
def testDocumentModify(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
blip.all().replace('a text with text and then some text')
blip[7].insert('text ')
blip.all('text').replace('thing')
self.assertEquals('a thing thing with thing and then some thing',
blip.text)
def testIteration(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
blip.all().replace('aaa 012 aaa 345 aaa 322')
count = 0
prev = -1
for start, end in blip.all('aaa'):
count += 1
self.assertTrue(prev < start)
prev = start
self.assertEquals(3, count)
def testBlipRefValue(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
content = blip.text
content = content[:4] + content[5:]
del blip[4]
self.assertEquals(content, blip.text)
content = content[:2] + content[3:]
del blip[2:3]
self.assertEquals(content, blip.text)
blip[2:3] = 'bike'
content = content[:2] + 'bike' + content[3:]
self.assertEquals(content, blip.text)
url = 'http://www.test.com/image.png'
blip.append(element.Image(url=url))
self.assertEqual(url, blip.first(element.Image).url)
url2 = 'http://www.test.com/another.png'
blip[-1].update_element({'url': url2})
self.assertEqual(url2, blip.first(element.Image).url)
self.assertTrue(blip[3:5] == blip.text[3:5])
blip.append('geheim')
self.assertTrue(blip.first('geheim'))
self.assertFalse(blip.first(element.Button))
blip.append(element.Button(name='test1', value='Click'))
button = blip.first(element.Button)
button.update_element({'name': 'test2'})
self.assertEqual('test2', button.name)
def testReplace(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
blip.all().replace('\nxxxx')
blip.all('yyy').replace('zzz')
self.assertEqual('\nxxxx', blip.text)
def testDeleteRangeThatSpansAcrossAnnotationEndPoint(self):
json = ('[{"range":{"start":1,"end":3},"name":"style","value":"bold"}]')
blip = self.new_blip(blipId=ROOT_BLIP_ID,
annotations=simplejson.loads(json),
content='\nFoo bar.')
blip.range(2, 4).delete()
self.assertEqual('\nF bar.', blip.text)
self.assertEqual(1, blip.annotations['style'][0].start)
self.assertEqual(2, blip.annotations['style'][0].end)
def testInsertBeforeAnnotationStartPoint(self):
json = ('[{"range":{"start":4,"end":9},"name":"style","value":"bold"}]')
blip = self.new_blip(blipId=ROOT_BLIP_ID,
annotations=simplejson.loads(json),
content='\nFoo bar.')
blip.at(4).insert('d and')
self.assertEqual('\nFood and bar.', blip.text)
self.assertEqual(9, blip.annotations['style'][0].start)
self.assertEqual(14, blip.annotations['style'][0].end)
def testDeleteRangeInsideAnnotation(self):
json = ('[{"range":{"start":1,"end":5},"name":"style","value":"bold"}]')
blip = self.new_blip(blipId=ROOT_BLIP_ID,
annotations=simplejson.loads(json),
content='\nFoo bar.')
blip.range(2, 4).delete()
self.assertEqual('\nF bar.', blip.text)
self.assertEqual(1, blip.annotations['style'][0].start)
self.assertEqual(3, blip.annotations['style'][0].end)
def testReplaceInsideAnnotation(self):
json = ('[{"range":{"start":1,"end":5},"name":"style","value":"bold"}]')
blip = self.new_blip(blipId=ROOT_BLIP_ID,
annotations=simplejson.loads(json),
content='\nFoo bar.')
blip.range(2, 4).replace('ooo')
self.assertEqual('\nFooo bar.', blip.text)
self.assertEqual(1, blip.annotations['style'][0].start)
self.assertEqual(6, blip.annotations['style'][0].end)
blip.range(2, 5).replace('o')
self.assertEqual('\nFo bar.', blip.text)
self.assertEqual(1, blip.annotations['style'][0].start)
self.assertEqual(4, blip.annotations['style'][0].end)
def testReplaceSpanAnnotation(self):
json = ('[{"range":{"start":1,"end":4},"name":"style","value":"bold"}]')
blip = self.new_blip(blipId=ROOT_BLIP_ID,
annotations=simplejson.loads(json),
content='\nFoo bar.')
blip.range(2, 9).replace('')
self.assertEqual('\nF', blip.text)
self.assertEqual(1, blip.annotations['style'][0].start)
self.assertEqual(2, blip.annotations['style'][0].end)
def testSearchWithNoMatchShouldNotGenerateOperation(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID)
self.assertEqual(-1, blip.text.find(':('))
self.assertEqual(0, len(self.operation_queue))
blip.all(':(').replace(':)')
self.assertEqual(0, len(self.operation_queue))
def testBlipsRemoveWithId(self):
blip_dict = {
ROOT_BLIP_ID: self.new_blip(blipId=ROOT_BLIP_ID,
childBlipIds=[CHILD_BLIP_ID]),
CHILD_BLIP_ID: self.new_blip(blipId=CHILD_BLIP_ID,
parentBlipId=ROOT_BLIP_ID)
}
blips = blip.Blips(blip_dict)
blips._remove_with_id(CHILD_BLIP_ID)
self.assertEqual(1, len(blips))
self.assertEqual(0, len(blips[ROOT_BLIP_ID].child_blip_ids))
def testAppendMarkup(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID, content='\nFoo bar.')
markup = '<p><span>markup<span> content</p>'
blip.append_markup(markup)
self.assertEqual(1, len(self.operation_queue))
self.assertEqual('\nFoo bar.\nmarkup content', blip.text)
def testBundledAnnotations(self):
blip = self.new_blip(blipId=ROOT_BLIP_ID, content='\nFoo bar.')
blip.append('not bold')
blip.append('bold', bundled_annotations=[('style/fontWeight', 'bold')])
self.assertEqual(2, len(blip.annotations))
self.assertEqual('bold', blip.annotations['style/fontWeight'][0].value)
def testInlineBlipOffset(self):
offset = 14
self.new_blip(blipId=ROOT_BLIP_ID,
childBlipIds=[CHILD_BLIP_ID],
elements={str(offset):
{'type': element.Element.INLINE_BLIP_TYPE,
'properties': {'id': CHILD_BLIP_ID}}})
child = self.new_blip(blipId=CHILD_BLIP_ID,
parentBlipId=ROOT_BLIP_ID)
self.assertEqual(offset, child.inline_blip_offset)
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Script to run all unit tests in this package."""
import blip_test
import element_test
import module_test_runner
import ops_test
import robot_test
import util_test
import wavelet_test
def RunUnitTests():
"""Runs all registered unit tests."""
test_runner = module_test_runner.ModuleTestRunner()
test_runner.modules = [
blip_test,
element_test,
ops_test,
robot_test,
util_test,
wavelet_test,
]
test_runner.RunAllTests()
if __name__ == "__main__":
RunUnitTests()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the generic robot classes.
This module provides the Robot class and RobotListener interface,
as well as some helper functions for web requests and responses.
"""
import base64
import logging
import sys
try:
__import__('google3') # setup internal test environment
except ImportError:
pass
import simplejson
import blip
import events
import ops
import util
import wavelet
import errors
# We only import oauth when we need it
oauth = None
DEFAULT_PROFILE_URL = (
'http://code.google.com/apis/wave/extensions/robots/python-tutorial.html')
class Robot(object):
"""Robot metadata class.
This class holds on to basic robot information like the name and profile.
It also maintains the list of event handlers and cron jobs and
dispatches events to the appropriate handlers.
"""
def __init__(self, name, image_url='', profile_url=DEFAULT_PROFILE_URL):
"""Initializes self with robot information.
Args:
name: The name of the robot
image_url: (optional) url of an image that should be used as the avatar
for this robot.
profile_url: (optional) url of a webpage with more information about
this robot.
"""
self._handlers = {}
self._name = name
self._verification_token = None
self._st = None
self._consumer_key = None
self._consumer_secret = None
self._server_rpc_base = None
self._profile_handler = None
self._image_url = image_url
self._profile_url = profile_url
self._capability_hash = 0
@property
def name(self):
"""Returns the name of the robot."""
return self._name
@property
def image_url(self):
"""Returns the URL of the avatar image."""
return self._image_url
@property
def profile_url(self):
"""Returns the URL of an info page for the robot."""
return self._profile_url
def http_post(self, url, data, headers):
"""Execute an http post.
Monkey patch this method to use something other than
the default urllib.
Args:
url: to post to
body: post body
headers: extra headers to pass along
Returns:
response_code, returned_page
"""
import urllib2
req = urllib2.Request(url,
data=data,
headers=headers)
try:
f = urllib2.urlopen(req)
return f.code, f.read()
except urllib2.URLError, e:
return e.code, e.read()
def get_verification_token_info(self):
"""Returns the verification token and ST parameter."""
return self._verification_token, self._st
def capabilities_hash(self):
"""Return the capabilities hash as a hex string."""
return hex(self._capability_hash)
def register_handler(self, event_class, handler, context=None, filter=None):
"""Registers a handler on a specific event type.
Multiple handlers may be registered on a single event type and are
guaranteed to be called in order of registration.
The handler takes two arguments, the event object and the corresponding
wavelet.
Args:
event_class: An event to listen for from the classes defined in the
events module.
handler: A function handler which takes two arguments, the wavelet for
the event and the event object.
context: The context to provide for this handler.
filter: Depending on the event, a filter can be specified that restricts
for which values the event handler will be called from the server.
Valuable to restrict the amount of traffic send to the robot.
"""
payload = (handler, event_class, context, filter)
self._handlers.setdefault(event_class.type, []).append(payload)
if type(context) == list:
context = ','.join(context)
self._capability_hash = (self._capability_hash * 13 +
hash(event_class.type) +
hash(context) +
hash(filter)) & 0xfffffff
def set_verification_token_info(self, token, st=None):
"""Set the verification token used in the ownership verification.
/wave/robot/register starts this process up and will produce this token.
Args:
token: the token provided by /wave/robot/register.
st: optional parameter to verify the request for the token came from
the wave server.
"""
self._verification_token = token
self._st = st
def setup_oauth(self, consumer_key, consumer_secret,
server_rpc_base='http://gmodules.com/api/rpc'):
"""Configure this robot to use the oauth'd json rpc.
Args:
consumer_key: consumer key received from the verification process.
consumer_secret: secret received from the verification process.
server_rpc_base: url of the rpc gateway to use. Specify None for default.
For wave preview, http://gmodules.com/api/rpc should be used.
For wave sandbox, http://sandbox.gmodules.com/api/rpc should be used.
"""
# Import oauth inline and using __import__ for pyexe compatibility
# when oauth is not installed.
global oauth
__import__('waveapi.oauth')
oauth = sys.modules['waveapi.oauth']
self._server_rpc_base = server_rpc_base
self._consumer_key = consumer_key
self._consumer_secret = consumer_secret
self._oauth_signature_method = oauth.OAuthSignatureMethod_HMAC_SHA1()
self._oauth_consumer = oauth.OAuthConsumer(self._consumer_key,
self._consumer_secret)
def register_profile_handler(self, handler):
"""Sets the profile handler for this robot.
The profile handler will be called when a profile is needed. The handler
gets passed the name for which a profile is needed or None for the
robot itself. A dictionary with keys for name, imageUrl and
profileUrl should be returned.
"""
self._profile_handler = handler
def _hash(self, value):
"""return b64encoded sha1 hash of value."""
try:
hashlib = __import__('hashlib') # 2.5
hashed = hashlib.sha1(value)
except ImportError:
import sha # deprecated
hashed = sha.sha(value)
return base64.b64encode(hashed.digest())
def make_rpc(self, operations):
"""Make an rpc call, submitting the specified operations."""
if not oauth or not self._oauth_consumer.key:
raise errors.Error('OAuth has not been configured')
if (not type(operations) == list and
not isinstance(operations, ops.OperationQueue)):
operations = [operations]
rpcs = [op.serialize(method_prefix='wave') for op in operations]
post_body = simplejson.dumps(rpcs)
body_hash = self._hash(post_body)
params = {
'oauth_consumer_key': 'google.com:' + self._oauth_consumer.key,
'oauth_timestamp': oauth.generate_timestamp(),
'oauth_nonce': oauth.generate_nonce(),
'oauth_version': oauth.OAuthRequest.version,
'oauth_body_hash': body_hash,
}
oauth_request = oauth.OAuthRequest.from_request('POST',
self._server_rpc_base,
parameters=params)
oauth_request.sign_request(self._oauth_signature_method,
self._oauth_consumer,
None)
code, content = self.http_post(
url=oauth_request.to_url(),
data=post_body,
headers={'Content-Type': 'application/json'})
logging.info('Active URL: %s' % oauth_request.to_url())
logging.info('Active Outgoing: %s' % post_body)
if code != 200:
logging.info(oauth_request.to_url())
logging.info(content)
raise IOError('HttpError ' + str(code))
return simplejson.loads(content)
def _first_rpc_result(self, result):
"""result is returned from make_rpc. Get the first data record
or throw an exception if it was an error."""
if type(result) == list:
result = result[0]
error = result.get('error')
if error:
raise errors.Error('RPC Error' + str(error['code'])
+ ': ' + error['message'])
data = result.get('data')
if data:
return data
raise errors.Error('RPC Error: No data record.')
def capabilities_xml(self):
"""Return this robot's capabilities as an XML string."""
lines = []
for capability, payloads in self._handlers.items():
for payload in payloads:
handler, event_class, context, filter = payload
line = ' <w:capability name="%s"' % capability
if context:
if type(context) == list:
context = ','.join(context)
line += ' context="%s"' % context
if filter:
line += ' filter="%s"' % filter
line += '/>\n'
lines.append(line)
if self._consumer_key:
oauth_tag = '<w:consumer_key>%s</w:consumer_key>\n' % self._consumer_key
else:
oauth_tag = ''
return ('<?xml version="1.0"?>\n'
'<w:robot xmlns:w="http://wave.google.com/extensions/robots/1.0">\n'
'<w:version>%s</w:version>\n'
'%s'
'<w:protocolversion>%s</w:protocolversion>\n'
'<w:capabilities>\n'
'%s'
'</w:capabilities>\n'
'</w:robot>\n') % (self.capabilities_hash(),
oauth_tag,
ops.PROTOCOL_VERSION,
'\n'.join(lines))
def profile_json(self, name=None):
"""Returns a JSON representation of the profile.
This method is called both for the basic profile of the robot and to
get a proxying for profile, in which case name is set. By default
the information supplied at registration is returned.
Use register_profile_handler to override this default behavior.
"""
if self._profile_handler:
data = self._profile_handler(name)
else:
data = {'name': self.name,
'imageUrl': self.image_url,
'profileUrl': self.profile_url}
return simplejson.dumps(data)
def _wavelet_from_json(self, json, pending_ops):
"""Construct a wavelet from the passed json.
The json should either contain a wavelet and a blips record that
define those respective object. The returned wavelet
will be constructed using the passed pending_ops
OperationQueue.
Alternatively the json can be the result of a previous
wavelet.serialize() call. In that case the blips will
be contaned in the wavelet record.
"""
if isinstance(json, basestring):
json = simplejson.loads(json)
blips = {}
for blip_id, raw_blip_data in json['blips'].items():
blips[blip_id] = blip.Blip(raw_blip_data, blips, pending_ops)
if 'wavelet' in json:
raw_wavelet_data = json['wavelet']
elif 'waveletData' in json:
raw_wavelet_data = json['waveletData']
else:
raw_wavelet_data = json
wavelet_blips = {}
wavelet_id = raw_wavelet_data['waveletId']
wave_id = raw_wavelet_data['waveId']
for blip_id, instance in blips.items():
if instance.wavelet_id == wavelet_id and instance.wave_id == wave_id:
wavelet_blips[blip_id] = instance
result = wavelet.Wavelet(raw_wavelet_data, wavelet_blips, self, pending_ops)
robot_address = json.get('robotAddress')
if robot_address:
result.robot_address = robot_address
return result
def process_events(self, json):
"""Process an incoming set of events encoded as json."""
parsed = simplejson.loads(json)
pending_ops = ops.OperationQueue()
event_wavelet = self._wavelet_from_json(parsed, pending_ops)
for event_data in parsed['events']:
for payload in self._handlers.get(event_data['type'], []):
handler, event_class, context, filter = payload
event = event_class(event_data, event_wavelet)
handler(event, event_wavelet)
pending_ops.set_capability_hash(self.capabilities_hash())
return simplejson.dumps(pending_ops.serialize())
def new_wave(self, domain, participants=None, message='', proxy_for_id=None,
submit=False):
"""Create a new wave with the initial participants on it.
A new wave is returned with its own operation queue. It the
responsibility of the caller to make sure this wave gets
submitted to the server, either by calling robot.submit() or
by calling .submit_with() on the returned wave.
Args:
domain: the domain to create the wavelet on. This should
in general correspond to the domain of the incoming
wavelet. (wavelet.domain). Exceptions are situations
where the robot is calling new_wave outside of an
event or when the server is handling multiple domains.
participants: initial participants on the wave. The robot
as the creator of the wave is always added.
message: a string that will be passed back to the robot
when the WAVELET_CREATOR event is fired. This is a
lightweight way to pass around state.
submit: if true, use the active gateway to make a round
trip to the server. This will return immediately an
actual waveid/waveletid and blipId for the root blip.
"""
operation_queue = ops.OperationQueue(proxy_for_id)
if not isinstance(message, basestring):
message = simplejson.dumps(message)
blip_data, wavelet_data = operation_queue.robot_create_wavelet(
domain=domain,
participants=participants,
message=message)
blips = {}
root_blip = blip.Blip(blip_data, blips, operation_queue)
blips[root_blip.blip_id] = root_blip
created = wavelet.Wavelet(wavelet_data,
blips=blips,
robot=self,
operation_queue=operation_queue)
if submit:
result = self._first_rpc_result(self.submit(created))
if type(result) == list:
result = result[0]
# Currently, data is sometimes wrapped in an outer 'data'
# Remove these 2 lines when that is no longer an issue.
if 'data' in result and len(result) == 2:
result = result['data']
if 'blipId' in result:
blip_data['blipId'] = result['blipId']
wavelet_data['rootBlipId'] = result['blipId']
for field in 'waveId', 'waveletId':
if field in result:
wavelet_data[field] = result[field]
blip_data[field] = result[field]
blips = {}
root_blip = blip.Blip(blip_data, blips, operation_queue)
blips[root_blip.blip_id] = root_blip
created = wavelet.Wavelet(wavelet_data,
blips=blips,
robot=self,
operation_queue=operation_queue)
return created
def fetch_wavelet(self, wave_id, wavelet_id, proxy_for_id=None):
"""Use the REST interface to fetch a wave and return it.
The returned wavelet contains a snapshot of the state of the
wavelet at that point. It can be used to modify the wavelet,
but the wavelet might change in between, so treat carefully.
Also note that the wavelet returned has its own operation
queue. It the responsibility of the caller to make sure this
wavelet gets submited to the server, either by calling
robot.submit() or by calling .submit_with() on the returned
wavelet.
"""
operation_queue = ops.OperationQueue(proxy_for_id)
operation_queue.robot_fetch_wave(wave_id, wavelet_id)
result = self._first_rpc_result(self.make_rpc(operation_queue))
return self._wavelet_from_json(result, ops.OperationQueue(proxy_for_id))
def blind_wavelet(self, json, proxy_for_id=None):
"""Construct a blind wave from a json string.
Call this method if you have a snapshot of a wave that you
want to operate on outside of an event. Since the wave might
have changed since you last saw it, you should take care to
submit operations that are as safe as possible.
Args:
json: a json object or string containing at least a key
wavelet defining the wavelet and a key blips defining the
blips in the view.
proxy_for_id: the proxying information that will be set on the wavelet's
operation queue.
Returns:
A new wavelet with its own operation queue. It the
responsibility of the caller to make sure this wavelet gets
submited to the server, either by calling robot.submit() or
by calling .submit_with() on the returned wavelet.
"""
return self._wavelet_from_json(json, ops.OperationQueue(proxy_for_id))
def submit(self, wavelet_to_submit):
"""Submit the pending operations associated with wavelet_to_submit.
Typically the wavelet will be the result of fetch_wavelet, blind_wavelet
or new_wave.
"""
pending = wavelet_to_submit.get_operation_queue()
res = self.make_rpc(pending)
pending.clear()
logging.info('submit returned:%s', res)
return res
| Python |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Support for operations that can be applied to the server.
Contains classes and utilities for creating operations that are to be
applied on the server.
"""
import errors
import random
import util
import sys
PROTOCOL_VERSION = '0.21'
# Operation Types
WAVELET_APPEND_BLIP = 'wavelet.appendBlip'
WAVELET_SET_TITLE = 'wavelet.setTitle'
WAVELET_ADD_PARTICIPANT = 'wavelet.participant.add'
WAVELET_DATADOC_SET = 'wavelet.datadoc.set'
WAVELET_MODIFY_TAG = 'wavelet.modifyTag'
WAVELET_MODIFY_PARTICIPANT_ROLE = 'wavelet.modifyParticipantRole'
BLIP_CREATE_CHILD = 'blip.createChild'
BLIP_DELETE = 'blip.delete'
DOCUMENT_APPEND_MARKUP = 'document.appendMarkup'
DOCUMENT_INLINE_BLIP_INSERT = 'document.inlineBlip.insert'
DOCUMENT_MODIFY = 'document.modify'
ROBOT_CREATE_WAVELET = 'robot.createWavelet'
ROBOT_FETCH_WAVE = 'robot.fetchWave'
ROBOT_NOTIFY_CAPABILITIES_HASH = 'robot.notifyCapabilitiesHash'
class Operation(object):
"""Represents a generic operation applied on the server.
This operation class contains data that is filled in depending on the
operation type.
It can be used directly, but doing so will not result
in local, transient reflection of state on the blips. In other words,
creating a 'delete blip' operation will not remove the blip from the local
context for the duration of this session. It is better to use the OpBased
model classes directly instead.
"""
def __init__(self, method, opid, params):
"""Initializes this operation with contextual data.
Args:
method: Method to call or type of operation.
opid: The id of the operation. Any callbacks will refer to these.
params: An operation type dependent dictionary
"""
self.method = method
self.id = opid
self.params = params
def __str__(self):
return '%s[%s]%s' % (self.method, self.id, str(self.params))
def set_param(self, param, value):
self.params[param] = value
return self
def serialize(self, method_prefix=''):
"""Serialize the operation.
Args:
method_prefix: prefixed for each method name to allow for specifying
a namespace.
Returns:
a dict representation of the operation.
"""
if method_prefix and not method_prefix.endswith('.'):
method_prefix += '.'
return {'method': method_prefix + self.method,
'id': self.id,
'params': util.serialize(self.params)}
def set_optional(self, param, value):
"""Sets an optional parameter.
If value is None or "", this is a no op. Otherwise it calls
set_param.
"""
if value == '' or value is None:
return self
else:
return self.set_param(param, value)
class OperationQueue(object):
"""Wraps the queuing of operations using easily callable functions.
The operation queue wraps single operations as functions and queues the
resulting operations in-order. Typically there shouldn't be a need to
call this directly unless operations are needed on entities outside
of the scope of the robot. For example, to modify a blip that
does not exist in the current context, you might specify the wave, wavelet
and blip id to generate an operation.
Any calls to this will not be reflected in the robot in any way.
For example, calling wavelet_append_blip will not result in a new blip
being added to the robot, only an operation to be applied on the
server.
"""
# Some class global counters:
_next_operation_id = 1
def __init__(self, proxy_for_id=None):
self.__pending = []
self._capability_hash = 0
self._proxy_for_id = proxy_for_id
def _new_blipdata(self, wave_id, wavelet_id, initial_content='',
parent_blip_id=None):
"""Creates JSON of the blip used for this session."""
temp_blip_id = 'TBD_%s_%s' % (wavelet_id,
hex(random.randint(0, sys.maxint)))
return {'waveId': wave_id,
'waveletId': wavelet_id,
'blipId': temp_blip_id,
'content': initial_content,
'parentBlipId': parent_blip_id}
def _new_waveletdata(self, domain, participants):
"""Creates an ephemeral WaveletData instance used for this session.
Args:
domain: the domain to create the data for.
participants initially on the wavelet
Returns:
Blipdata (for the rootblip), WaveletData.
"""
wave_id = domain + '!TBD_%s' % hex(random.randint(0, sys.maxint))
wavelet_id = domain + '!conv+root'
root_blip_data = self._new_blipdata(wave_id, wavelet_id)
participants = set(participants)
wavelet_data = {'waveId': wave_id,
'waveletId': wavelet_id,
'rootBlipId': root_blip_data['blipId'],
'participants': participants}
return root_blip_data, wavelet_data
def __len__(self):
return len(self.__pending)
def __iter__(self):
return self.__pending.__iter__()
def clear(self):
self.__pending = []
def proxy_for(self, proxy):
"""Return a view of this operation queue with the proxying for set to proxy.
This method returns a new instance of an operation queue that shares the
operation list, but has a different proxying_for_id set so the robot using
this new queue will send out operations with the proxying_for field set.
"""
res = OperationQueue()
res.__pending = self.__pending
res._capability_hash = self._capability_hash
res._proxy_for_id = proxy
return res
def set_capability_hash(self, capability_hash):
self._capability_hash = capability_hash
def serialize(self):
first = Operation(ROBOT_NOTIFY_CAPABILITIES_HASH,
'0',
{'capabilitiesHash': self._capability_hash,
'protocolVersion': PROTOCOL_VERSION})
operations = [first] + self.__pending
res = util.serialize(operations)
return res
def copy_operations(self, other_queue):
"""Copy the pending operations from other_queue into this one."""
for op in other_queue:
self.__pending.append(op)
def new_operation(self, method, wave_id, wavelet_id, props=None, **kwprops):
"""Creates and adds a new operation to the operation list."""
if props is None:
props = {}
props.update(kwprops)
props['waveId'] = wave_id
props['waveletId'] = wavelet_id
if self._proxy_for_id:
props['proxyingFor'] = self._proxy_for_id
operation = Operation(method,
'op%s' % OperationQueue._next_operation_id,
props)
self.__pending.append(operation)
OperationQueue._next_operation_id += 1
return operation
def wavelet_append_blip(self, wave_id, wavelet_id, initial_content=''):
"""Appends a blip to a wavelet.
Args:
wave_id: The wave id owning the containing wavelet.
wavelet_id: The wavelet id that this blip should be appended to.
initial_content: optionally the content to start with
Returns:
JSON representing the information of the new blip.
"""
blip_data = self._new_blipdata(wave_id, wavelet_id, initial_content)
self.new_operation(WAVELET_APPEND_BLIP, wave_id,
wavelet_id, blipData=blip_data)
return blip_data
def wavelet_add_participant(self, wave_id, wavelet_id, participant_id):
"""Adds a participant to a wavelet.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
participant_id: Id of the participant to add.
Returns:
data for the root_blip, wavelet
"""
return self.new_operation(WAVELET_ADD_PARTICIPANT, wave_id, wavelet_id,
participantId=participant_id)
def wavelet_datadoc_set(self, wave_id, wavelet_id, name, data):
"""Sets a key/value pair on the data document of a wavelet.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
name: The key name for this data.
data: The value of the data to set.
Returns:
The operation created.
"""
return self.new_operation(WAVELET_DATADOC_SET, wave_id, wavelet_id,
datadocName=name, datadocValue=data)
def robot_create_wavelet(self, domain, participants=None, message=''):
"""Creates a new wavelet.
Args:
domain: the domain to create the wave in
participants: initial participants on this wavelet or None if none
message: an optional payload that is returned with the corresponding
event.
Returns:
data for the root_blip, wavelet
"""
if participants is None:
participants = []
blip_data, wavelet_data = self._new_waveletdata(domain, participants)
op = self.new_operation(ROBOT_CREATE_WAVELET,
wave_id=wavelet_data['waveId'],
wavelet_id=wavelet_data['waveletId'],
waveletData=wavelet_data)
op.set_optional('message', message)
return blip_data, wavelet_data
def robot_fetch_wave(self, wave_id, wavelet_id):
"""Requests a snapshot of the specified wave.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
Returns:
The operation created.
"""
return self.new_operation(ROBOT_FETCH_WAVE, wave_id, wavelet_id)
def wavelet_set_title(self, wave_id, wavelet_id, title):
"""Sets the title of a wavelet.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
title: The title to set.
Returns:
The operation created.
"""
return self.new_operation(WAVELET_SET_TITLE, wave_id, wavelet_id,
waveletTitle=title)
def wavelet_modify_participant_role(
self, wave_id, wavelet_id, participant_id, role):
"""Modify the role of a participant on a wavelet.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
participant_id: Id of the participant to add.
role: the new roles
Returns:
data for the root_blip, wavelet
"""
return self.new_operation(WAVELET_MODIFY_PARTICIPANT_ROLE, wave_id,
wavelet_id, participantId=participant_id,
participantRole=role)
def wavelet_modify_tag(self, wave_id, wavelet_id, tag, modify_how=None):
"""Modifies a tag in a wavelet.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
tag: The tag (a string).
modify_how: (optional) how to apply the tag. The default is to add
the tag. Specify 'remove' to remove. Specify None or 'add' to
add.
Returns:
The operation created.
"""
return self.new_operation(WAVELET_MODIFY_TAG, wave_id, wavelet_id,
name=tag).set_optional("modify_how", modify_how)
def blip_create_child(self, wave_id, wavelet_id, blip_id):
"""Creates a child blip of another blip.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
blip_id: The blip id that this operation is applied to.
Returns:
JSON of blip for which further operations can be applied.
"""
blip_data = self._new_blipdata(wave_id, wavelet_id, parent_blip_id=blip_id)
self.new_operation(BLIP_CREATE_CHILD, wave_id, wavelet_id,
blipId=blip_id,
blipData=blip_data)
return blip_data
def blip_delete(self, wave_id, wavelet_id, blip_id):
"""Deletes the specified blip.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
blip_id: The blip id that this operation is applied to.
Returns:
The operation created.
"""
return self.new_operation(BLIP_DELETE, wave_id, wavelet_id, blipId=blip_id)
def document_append_markup(self, wave_id, wavelet_id, blip_id, content):
"""Appends content with markup to a document.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
blip_id: The blip id that this operation is applied to.
content: The markup content to append.
Returns:
The operation created.
"""
return self.new_operation(DOCUMENT_APPEND_MARKUP, wave_id, wavelet_id,
blipId=blip_id, content=content)
def document_modify(self, wave_id, wavelet_id, blip_id):
"""Creates and queues a document modify operation
The returned operation still needs to be filled with details before
it makes sense.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
blip_id: The blip id that this operation is applied to.
Returns:
The operation created.
"""
return self.new_operation(DOCUMENT_MODIFY,
wave_id,
wavelet_id,
blipId=blip_id)
def document_inline_blip_insert(self, wave_id, wavelet_id, blip_id, position):
"""Inserts an inline blip at a specific location.
Args:
wave_id: The wave id owning that this operation is applied to.
wavelet_id: The wavelet id that this operation is applied to.
blip_id: The blip id that this operation is applied to.
position: The position in the document to insert the blip.
Returns:
JSON data for the blip that was created for further operations.
"""
inline_blip_data = self._new_blipdata(wave_id, wavelet_id)
inline_blip_data['parentBlipId'] = blip_id
self.new_operation(DOCUMENT_INLINE_BLIP_INSERT, wave_id, wavelet_id,
blipId=blip_id,
index=position,
blipData=inline_blip_data)
return inline_blip_data
| Python |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the ops module."""
import unittest
import ops
class TestOperation(unittest.TestCase):
"""Test case for Operation class."""
def testFields(self):
op = ops.Operation(ops.WAVELET_SET_TITLE, 'opid02',
{'waveId': 'wavelet-id',
'title': 'a title'})
self.assertEqual(ops.WAVELET_SET_TITLE, op.method)
self.assertEqual('opid02', op.id)
self.assertEqual(2, len(op.params))
def testConstructModifyTag(self):
q = ops.OperationQueue()
op = q.wavelet_modify_tag('waveid', 'waveletid', 'tag')
self.assertEqual(3, len(op.params))
op = q.wavelet_modify_tag(
'waveid', 'waveletid', 'tag', modify_how='remove')
self.assertEqual(4, len(op.params))
def testConstructRobotFetchWave(self):
q = ops.OperationQueue('proxyid')
op = q.robot_fetch_wave('wave1', 'wavelet1')
self.assertEqual(3, len(op.params))
self.assertEqual('proxyid', op.params['proxyingFor'])
self.assertEqual('wave1', op.params['waveId'])
self.assertEqual('wavelet1', op.params['waveletId'])
class TestOperationQueue(unittest.TestCase):
"""Test case for OperationQueue class."""
def testSerialize(self):
q = ops.OperationQueue()
q.set_capability_hash('hash')
op = q.wavelet_modify_tag('waveid', 'waveletid', 'tag')
json = q.serialize()
self.assertEqual(2, len(json))
self.assertEqual('robot.notifyCapabilitiesHash', json[0]['method'])
self.assertEqual('hash', json[0]['params']['capabilitiesHash'])
self.assertEqual(ops.PROTOCOL_VERSION, json[0]['params']['protocolVersion'])
self.assertEqual('wavelet.modifyTag', json[1]['method'])
if __name__ == '__main__':
unittest.main()
| Python |
#!/usr/bin/python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module defines the ModuleTestRunnerClass."""
import unittest
class ModuleTestRunner(object):
"""Responsible for executing all test cases in a list of modules."""
def __init__(self, module_list=None, module_test_settings=None):
self.modules = module_list or []
self.settings = module_test_settings or {}
def RunAllTests(self):
"""Executes all tests present in the list of modules."""
runner = unittest.TextTestRunner()
for module in self.modules:
for setting, value in self.settings.iteritems():
try:
setattr(module, setting, value)
except AttributeError:
print '\nError running ' + str(setting)
print '\nRunning all tests in module', module.__name__
runner.run(unittest.defaultTestLoader.loadTestsFromModule(module))
| Python |
#!/usr/bin/python2.4
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the wavelet module."""
import unittest
import blip
import element
import ops
import wavelet
import simplejson
ROBOT_NAME = 'robot@appspot.com'
TEST_WAVELET_DATA = {
'creator': ROBOT_NAME,
'creationTime': 100,
'lastModifiedTime': 101,
'participants': [ROBOT_NAME],
'participantsRoles': {ROBOT_NAME: wavelet.Participants.ROLE_FULL},
'rootBlipId': 'blip-1',
'title': 'Title',
'waveId': 'test.com!w+g3h3im',
'waveletId': 'test.com!root+conv',
'tags': ['tag1', 'tag2'],
}
TEST_BLIP_DATA = {
'blipId': TEST_WAVELET_DATA['rootBlipId'],
'childBlipIds': [],
'content': '\ntesting',
'contributors': [TEST_WAVELET_DATA['creator'], 'robot@google.com'],
'creator': TEST_WAVELET_DATA['creator'],
'lastModifiedTime': TEST_WAVELET_DATA['lastModifiedTime'],
'parentBlipId': None,
'waveId': TEST_WAVELET_DATA['waveId'],
'elements': {},
'waveletId': TEST_WAVELET_DATA['waveletId'],
}
class TestWavelet(unittest.TestCase):
"""Tests the wavelet class."""
def setUp(self):
self.operation_queue = ops.OperationQueue()
self.all_blips = {}
self.blip = blip.Blip(TEST_BLIP_DATA,
self.all_blips,
self.operation_queue)
self.all_blips[self.blip.blip_id] = self.blip
self.wavelet = wavelet.Wavelet(TEST_WAVELET_DATA,
self.all_blips,
None,
self.operation_queue)
self.wavelet.robot_address = ROBOT_NAME
def testWaveletProperties(self):
w = self.wavelet
self.assertEquals(TEST_WAVELET_DATA['creator'], w.creator)
self.assertEquals(TEST_WAVELET_DATA['creationTime'], w.creation_time)
self.assertEquals(TEST_WAVELET_DATA['lastModifiedTime'],
w.last_modified_time)
self.assertEquals(len(TEST_WAVELET_DATA['participants']),
len(w.participants))
self.assertTrue(TEST_WAVELET_DATA['participants'][0] in w.participants)
self.assertEquals(TEST_WAVELET_DATA['rootBlipId'], w.root_blip.blip_id)
self.assertEquals(TEST_WAVELET_DATA['title'], w.title)
self.assertEquals(TEST_WAVELET_DATA['waveId'], w.wave_id)
self.assertEquals(TEST_WAVELET_DATA['waveletId'], w.wavelet_id)
self.assertEquals('test.com', w.domain)
def testWaveletMethods(self):
w = self.wavelet
reply = w.reply()
self.assertEquals(2, len(w.blips))
w.delete(reply)
self.assertEquals(1, len(w.blips))
self.assertEquals(0, len(w.data_documents))
self.wavelet.data_documents['key'] = 'value'
self.assert_('key' in w.data_documents)
self.assertEquals(1, len(w.data_documents))
for key in w.data_documents:
self.assertEquals(key, 'key')
self.assertEquals(1, len(w.data_documents.keys()))
self.wavelet.data_documents['key'] = None
self.assertEquals(0, len(w.data_documents))
num_participants = len(w.participants)
w.proxy_for('proxy').reply()
self.assertEquals(2, len(w.blips))
# check that the new proxy for participant was added
self.assertEquals(num_participants + 1, len(w.participants))
w._robot_address = ROBOT_NAME.replace('@', '+proxy@')
w.proxy_for('proxy').reply()
self.assertEquals(num_participants + 1, len(w.participants))
self.assertEquals(3, len(w.blips))
def testSetTitle(self):
self.blip._content = '\nOld title\n\nContent'
self.wavelet.title = 'New title \xd0\xb0\xd0\xb1\xd0\xb2'
self.assertEquals(1, len(self.operation_queue))
self.assertEquals('wavelet.setTitle',
self.operation_queue.serialize()[1]['method'])
self.assertEquals(u'\nNew title \u0430\u0431\u0432\n\nContent',
self.blip._content)
def testSetTitleAdjustRootBlipWithOneLineProperly(self):
self.blip._content = '\nOld title'
self.wavelet.title = 'New title'
self.assertEquals(1, len(self.operation_queue))
self.assertEquals('wavelet.setTitle',
self.operation_queue.serialize()[1]['method'])
self.assertEquals('\nNew title\n', self.blip._content)
def testSetTitleAdjustEmptyRootBlipProperly(self):
self.blip._content = '\n'
self.wavelet.title = 'New title'
self.assertEquals(1, len(self.operation_queue))
self.assertEquals('wavelet.setTitle',
self.operation_queue.serialize()[1]['method'])
self.assertEquals('\nNew title\n', self.blip._content)
def testTags(self):
w = self.wavelet
self.assertEquals(2, len(w.tags))
w.tags.append('tag3')
self.assertEquals(3, len(w.tags))
w.tags.append('tag3')
self.assertEquals(3, len(w.tags))
w.tags.remove('tag1')
self.assertEquals(2, len(w.tags))
self.assertEquals('tag2', w.tags[0])
def testParticipantRoles(self):
w = self.wavelet
self.assertEquals(wavelet.Participants.ROLE_FULL,
w.participants.get_role(ROBOT_NAME))
w.participants.set_role(ROBOT_NAME, wavelet.Participants.ROLE_READ_ONLY)
self.assertEquals(wavelet.Participants.ROLE_READ_ONLY,
w.participants.get_role(ROBOT_NAME))
def testSerialize(self):
self.blip.append(element.Gadget('http://test.com', {'a': 3}))
self.wavelet.title = 'A wavelet title'
self.blip.append(element.Image(url='http://www.google.com/logos/clickortreat1.gif',
width=320, height=118))
self.blip.append(element.Attachment(caption='fake', data='fake data'))
self.blip.append(element.Line(line_type='li', indent='2'))
self.blip.append('bulleted!')
self.blip.append(element.Installer(
'http://wave-skynet.appspot.com/public/extensions/areyouin/manifest.xml'))
self.wavelet.proxy_for('proxy').reply().append('hi from douwe')
inlineBlip = self.blip.insert_inline_blip(5)
inlineBlip.append('hello again!')
serialized = self.wavelet.serialize()
serialized = simplejson.dumps(serialized)
self.assertTrue(serialized.find('test.com') > 0)
if __name__ == '__main__':
unittest.main()
| Python |
"""Implementation of JSONEncoder
"""
import re
try:
from _speedups import encode_basestring_ascii as \
c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from _speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
from decoder import PosInf
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
if isinstance(indent, (int, long)):
indent = ' ' * indent
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if (_one_shot and c_make_encoder is not None
and not self.indent and not self.sort_keys):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| Python |
import simplejson
import cgi
class JSONFilter(object):
def __init__(self, app, mime_type='text/x-json'):
self.app = app
self.mime_type = mime_type
def __call__(self, environ, start_response):
# Read JSON POST input to jsonfilter.json if matching mime type
response = {'status': '200 OK', 'headers': []}
def json_start_response(status, headers):
response['status'] = status
response['headers'].extend(headers)
environ['jsonfilter.mime_type'] = self.mime_type
if environ.get('REQUEST_METHOD', '') == 'POST':
if environ.get('CONTENT_TYPE', '') == self.mime_type:
args = [_ for _ in [environ.get('CONTENT_LENGTH')] if _]
data = environ['wsgi.input'].read(*map(int, args))
environ['jsonfilter.json'] = simplejson.loads(data)
res = simplejson.dumps(self.app(environ, json_start_response))
jsonp = cgi.parse_qs(environ.get('QUERY_STRING', '')).get('jsonp')
if jsonp:
content_type = 'text/javascript'
res = ''.join(jsonp + ['(', res, ')'])
elif 'Opera' in environ.get('HTTP_USER_AGENT', ''):
# Opera has bunk XMLHttpRequest support for most mime types
content_type = 'text/plain'
else:
content_type = self.mime_type
headers = [
('Content-type', content_type),
('Content-length', len(res)),
]
headers.extend(response['headers'])
start_response(response['status'], headers)
return [res]
def factory(app, global_conf, **kw):
return JSONFilter(app, **kw)
| Python |
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from scanner import make_scanner
try:
from _speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
class JSONDecodeError(ValueError):
"""Subclass of ValueError with the following additional properties:
msg: The unformatted error message
doc: The JSON document being parsed
pos: The start index of doc where parsing failed
end: The end index of doc where parsing failed (may be None)
lineno: The line corresponding to pos
colno: The column corresponding to pos
endlineno: The line corresponding to end (may be None)
endcolno: The column corresponding to end (may be None)
"""
def __init__(self, msg, doc, pos, end=None):
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
self.msg = msg
self.doc = doc
self.pos = pos
self.end = end
self.lineno, self.colno = linecol(doc, pos)
if end is not None:
self.endlineno, self.endcolno = linecol(doc, pos)
else:
self.endlineno, self.endcolno = None, None
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise JSONDecodeError(msg, s, end)
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise JSONDecodeError(msg, s, end)
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise JSONDecodeError(msg, s, end)
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
object_pairs_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting : delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise JSONDecodeError("No JSON object could be decoded", s, idx)
return obj, end
| Python |
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.1.0'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decoder import JSONDecoder, JSONDecodeError
from encoder import JSONEncoder
try:
from collections import OrderedDict
except ImportError:
from ordered_dict import OrderedDict
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
**kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
import simplejson.decoder as dec
import simplejson.encoder as enc
import simplejson.scanner as scan
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
) | Python |
r"""Command-line tool to validate and pretty-print JSON
Usage::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
import sys
import simplejson as json
def main():
if len(sys.argv) == 1:
infile = sys.stdin
outfile = sys.stdout
elif len(sys.argv) == 2:
infile = open(sys.argv[1], 'rb')
outfile = sys.stdout
elif len(sys.argv) == 3:
infile = open(sys.argv[1], 'rb')
outfile = open(sys.argv[2], 'wb')
else:
raise SystemExit(sys.argv[0] + " [infile [outfile]]")
try:
obj = json.load(infile, object_pairs_hook=json.OrderedDict)
except ValueError, e:
raise SystemExit(e)
json.dump(obj, outfile, sort_keys=True, indent=' ')
outfile.write('\n')
if __name__ == '__main__':
main()
| Python |
"""JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
object_pairs_hook = context.object_pairs_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict,
_scan_once, object_hook, object_pairs_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner
| Python |
"""Drop-in replacement for collections.OrderedDict by Raymond Hettinger
http://code.activestate.com/recipes/576693/
"""
from UserDict import DictMixin
# Modified from original to support Python 2.4, see
# http://code.google.com/p/simplejson/issues/detail?id=53
try:
all
except NameError:
def all(seq):
for elem in seq:
if not elem:
return False
return True
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
# Modified from original to support Python 2.4, see
# http://code.google.com/p/simplejson/issues/detail?id=53
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
return len(self)==len(other) and \
all(p==q for p, q in zip(self.items(), other.items()))
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
| Python |
#!/usr/bin/python2.4
#
# Copyright 2009 Google Inc. All Rights Reserved.
"""Run robot from the commandline for testing.
This robot_runner let's you define event handlers using flags and takes the
json input from the std in and writes out the json output to stdout.
for example
cat events | commandline_robot_runner.py \
--eventdef-blip_submitted="wavelet.title='title'"
"""
__author__ = 'douwe@google.com (Douwe Osinga)'
import sys
import urllib
from google3.pyglib import app
from google3.pyglib import flags
from google3.walkabout.externalagents import api
from google3.walkabout.externalagents.api import blip
from google3.walkabout.externalagents.api import element
from google3.walkabout.externalagents.api import errors
from google3.walkabout.externalagents.api import events
from google3.walkabout.externalagents.api import ops
from google3.walkabout.externalagents.api import robot
from google3.walkabout.externalagents.api import util
FLAGS = flags.FLAGS
for event in events.ALL:
flags.DEFINE_string('eventdef_' + event.type.lower(),
'',
'Event definition for the %s event' % event.type)
def handle_event(src, bot, e, w):
"""Handle an event by executing the source code src."""
globs = {'e': e, 'w': w, 'api': api, 'bot': bot,
'blip': blip, 'element': element, 'errors': errors,
'events': events, 'ops': ops, 'robot': robot,
'util': util}
exec src in globs
def run_bot(input_file, output_file):
"""Run a robot defined on the command line."""
cmdbot = robot.Robot('Commandline bot')
for event in events.ALL:
src = getattr(FLAGS, 'eventdef_' + event.type.lower())
src = urllib.unquote_plus(src)
if src:
cmdbot.register_handler(event,
lambda event, wavelet, src=src, bot=cmdbot:
handle_event(src, bot, event, wavelet))
json_body = unicode(input_file.read(), 'utf8')
json_response = cmdbot.process_events(json_body)
output_file.write(json_response)
def main(argv):
run_bot(sys.stdin, sys.stdout)
if __name__ == '__main__':
app.run()
| Python |
#!/usr/bin/python2.4
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Unit tests for the element module."""
import base64
import unittest
import element
import util
class TestElement(unittest.TestCase):
"""Tests for the element.Element class."""
def testProperties(self):
el = element.Element(element.Gadget.class_type,
key='value')
self.assertEquals('value', el.key)
def testFormElement(self):
el = element.Input('input')
self.assertEquals(element.Input.class_type, el.type)
self.assertEquals(el.value, '')
self.assertEquals(el.name, 'input')
def testImage(self):
image = element.Image('http://test.com/image.png', width=100, height=100)
self.assertEquals(element.Image.class_type, image.type)
self.assertEquals(image.url, 'http://test.com/image.png')
self.assertEquals(image.width, 100)
self.assertEquals(image.height, 100)
def testAttachment(self):
attachment = element.Attachment(caption='My Favorite', data='SomefakeData')
self.assertEquals(element.Attachment.class_type, attachment.type)
self.assertEquals(attachment.caption, 'My Favorite')
self.assertEquals(attachment.data, 'SomefakeData')
def testGadget(self):
gadget = element.Gadget('http://test.com/gadget.xml')
self.assertEquals(element.Gadget.class_type, gadget.type)
self.assertEquals(gadget.url, 'http://test.com/gadget.xml')
def testInstaller(self):
installer = element.Installer('http://test.com/installer.xml')
self.assertEquals(element.Installer.class_type, installer.type)
self.assertEquals(installer.manifest, 'http://test.com/installer.xml')
def testSerialize(self):
image = element.Image('http://test.com/image.png', width=100, height=100)
s = util.serialize(image)
k = s.keys()
k.sort()
# we should really only have three things to serialize
props = s['properties']
self.assertEquals(len(props), 3)
self.assertEquals(props['url'], 'http://test.com/image.png')
self.assertEquals(props['width'], 100)
self.assertEquals(props['height'], 100)
def testSerializeAttachment(self):
attachment = element.Attachment(caption='My Favorite', data='SomefakeData')
s = util.serialize(attachment)
k = s.keys()
k.sort()
# we should really have two things to serialize
props = s['properties']
self.assertEquals(len(props), 2)
self.assertEquals(props['caption'], 'My Favorite')
self.assertEquals(props['data'], base64.encodestring('SomefakeData'))
self.assertEquals(attachment.data, 'SomefakeData')
def testSerializeLine(self):
line = element.Line(element.Line.TYPE_H1, alignment=element.Line.ALIGN_LEFT)
s = util.serialize(line)
k = s.keys()
k.sort()
# we should really only have three things to serialize
props = s['properties']
self.assertEquals(len(props), 2)
self.assertEquals(props['alignment'], 'l')
self.assertEquals(props['lineType'], 'h1')
def testSerializeGadget(self):
gadget = element.Gadget('http://test.com', {'prop1': 'a', 'prop_cap': None})
s = util.serialize(gadget)
k = s.keys()
k.sort()
# we should really only have three things to serialize
props = s['properties']
self.assertEquals(len(props), 3)
self.assertEquals(props['url'], 'http://test.com')
self.assertEquals(props['prop1'], 'a')
self.assertEquals(props['prop_cap'], None)
def testGadgetElementFromJson(self):
url = 'http://www.foo.com/gadget.xml'
json = {
'type': element.Gadget.class_type,
'properties': {
'url': url,
}
}
gadget = element.Element.from_json(json)
self.assertEquals(element.Gadget.class_type, gadget.type)
self.assertEquals(url, gadget.url)
def testImageElementFromJson(self):
url = 'http://www.foo.com/image.png'
width = '32'
height = '32'
attachment_id = '2'
caption = 'Test Image'
json = {
'type': element.Image.class_type,
'properties': {
'url': url,
'width': width,
'height': height,
'attachmentId': attachment_id,
'caption': caption,
}
}
image = element.Element.from_json(json)
self.assertEquals(element.Image.class_type, image.type)
self.assertEquals(url, image.url)
self.assertEquals(width, image.width)
self.assertEquals(height, image.height)
self.assertEquals(attachment_id, image.attachmentId)
self.assertEquals(caption, image.caption)
def testAttachmentElementFromJson(self):
caption = 'fake caption'
data = 'fake data'
mime_type = 'fake mime'
attachment_id = 'fake id'
attachment_url = 'fake URL'
json = {
'type': element.Attachment.class_type,
'properties': {
'caption': caption,
'data': data,
'mimeType': mime_type,
'attachmentId': attachment_id,
'attachmentUrl': attachment_url,
}
}
attachment = element.Element.from_json(json)
self.assertEquals(element.Attachment.class_type, attachment.type)
self.assertEquals(caption, attachment.caption)
self.assertEquals(data, attachment.data)
self.assertEquals(mime_type, attachment.mimeType)
self.assertEquals(attachment_id, attachment.attachmentId)
self.assertEquals(attachment_url, attachment.attachmentUrl)
def testFormElementFromJson(self):
name = 'button'
value = 'value'
default_value = 'foo'
json = {
'type': element.Label.class_type,
'properties': {
'name': name,
'value': value,
'defaultValue': default_value,
}
}
el = element.Element.from_json(json)
self.assertEquals(element.Label.class_type, el.type)
self.assertEquals(name, el.name)
self.assertEquals(value, el.value)
def testCanInstantiate(self):
bag = [element.Check(name='check', value='value'),
element.Button(name='button', value='caption'),
element.Input(name='input', value='caption'),
element.Label(label_for='button', caption='caption'),
element.RadioButton(name='name', group='group'),
element.RadioButtonGroup(name='name', value='value'),
element.Password(name='name', value='geheim'),
element.TextArea(name='name', value='\n\n\n'),
element.Installer(manifest='test.com/installer.xml'),
element.Line(line_type='type',
indent='3',
alignment='r',
direction='d'),
element.Gadget(url='test.com/gadget.xml',
props={'key1': 'val1', 'key2': 'val2'}),
element.Image(url='test.com/image.png', width=100, height=200),
element.Attachment(caption='fake caption', data='fake data')]
types_constructed = set([type(x) for x in bag])
types_required = set(element.ALL.values())
missing_required = types_constructed.difference(types_required)
self.assertEquals(missing_required, set())
missing_constructed = types_required.difference(types_constructed)
self.assertEquals(missing_constructed, set())
if __name__ == '__main__':
unittest.main()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.