code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
import pygame, os
import newtest
class cursor:
def __init__(self):
# self.image = pygame.image.load("ttt.png")
# self.imagerect = self.image.get_rect(0,0,1,1)
self.imagerect = pygame.Rect(0,0,1,1)
def update(self):
self.imagerect.topleft = pygame.mouse.get_pos()
class menuitem:
def __init__(self, text, pos, cursor, command = ""):
self.text = text
self.font = pygame.font.Font(pygame.font.get_default_font(), 25)
self.txtimage = self.font.render(self.text, True, (255, 255, 255))
self.txtrect = self.txtimage.get_rect()
self.txtrect.topleft = pos
self.command = command
self.cursor = cursor
self.heightlighted = False
def update(self):
if (self.txtrect.colliderect(self.cursor.imagerect)):
self.heightlighted = True
else:
self.heightlighted = False
def detctselect(self,event):
if (event.type == pygame.MOUSEBUTTONDOWN):
if (event.button == 1):
if (self.cursor.imagerect.colliderect(self.txtrect)):
return 1
def show(self, screen):
if (self.heightlighted):
ttxtimage = self.font.render(self.text, True, (255, 255, 128))
screen.blit(ttxtimage, self.txtrect)
else:
screen.blit(self.txtimage, self.txtrect)
class Menu:
def __init__(self,screen):
self.cursor = cursor()
self.clock = pygame.time.Clock()
xoffset = 210
yoffset = 150
self.options = []
self.options.append(menuitem("1 Player Game", (xoffset,yoffset + (0 * 35)), self.cursor, "1P"))
self.options.append(menuitem("2 Player Game", (xoffset,yoffset + (1 * 35)), self.cursor, "2P"))
self.options.append(menuitem("Quit The Game", (xoffset,yoffset + (2 * 35)), self.cursor, "QUIT"))
while 1:
self.clock.tick(50)
events = pygame.event.get()
for e in events:
if e.type == pygame.QUIT:
pygame.quit()
return
if e.type == pygame.KEYDOWN:
if e.key == pygame.K_ESCAPE:
pygame.quit()
return
if (self.options[0].detctselect(e)):
print "1 Player Game"
newtest.rungame(screen,"1P")
if (self.options[1].detctselect(e)):
print "2 Player Game"
newtest.rungame(screen,"2P")
if (self.options[2].detctselect(e)):
print "Quit The Game"
pygame.quit()
return
screen.fill((0, 0, 0))
for op in self.options:
op.update()
op.show(screen)
self.cursor.update()
# screen.blit(self.cursor.image, self.cursor.imagerect)
pygame.display.flip()
| Python |
import pygame
from pygame.locals import *
import menu
if (__name__ == "__main__"):
pygame.init()
pygame.display.set_caption("--SHIT WAR--")
screen = pygame.display.set_mode((600, 400))
menu.Menu(screen)
| Python |
import pygame
import msg
import random
STARNUM = 80
MAXOBJ = 90
GUNFIRETOP = 130
TOTALFIRETOP = 140
class pcounter:
def __init__(self):
self.score = 0
class cplayer:
def __init__(self, playernum, screen, screen_width, screen_height, objlist, mid = True):
self.screen = screen
self.screen_width = screen_width
self.screen_height = screen_height
self.objlist = objlist
self.pcnt = pcounter()
self.playernum = playernum
self.active = True
if (playernum == 1):
self.image = pygame.image.load("player1.png")
self.imagerect = self.image.get_rect()
if (mid):
self.imagerect = self.imagerect.move(self.screen_width / 2 - self.imagerect.width / 2, self.screen_height - self.imagerect.height)
else:
self.imagerect = self.imagerect.move(self.screen_width / 4, self.screen_height - self.imagerect.height)
self.score = msg.msgimage("SCORE: " + str(self.pcnt.score),(0,0))#player score data
elif (playernum == 2):
self.image = pygame.image.load("player2.png")
self.imagerect = self.image.get_rect()
if (mid):
self.imagerect = self.imagerect.move(self.screen_width / 2 - self.imagerect.width / 2, self.screen_height - self.imagerect.height)
else:
self.imagerect = self.imagerect.move(3 * self.screen_width / 4, self.screen_height - self.imagerect.height)
self.score = msg.msgimage("SCORE: " + str(self.pcnt.score),(self.screen_width - 180,0))#player score data
self.up = 0 #going up
self.down = 0 #going down
self.left = 0 #going left
self.right = 0 #going right
self.fire = 0 #going to fire
self.v = 8 #speed
self.alive = True
def set_move_left(self):
self.left = True
def set_move_right(self):
self.right = True
def set_move_up(self):
self.up = True
def set_move_down(self):
self.down = True
def set_move_fire(self):
self.fire = True
def clr_move_left(self):
self.left = False
def clr_move_right(self):
self.right = False
def clr_move_up(self):
self.up = False
def clr_move_down(self):
self.down = False
def clr_move_fire(self):
self.fire = False
def fireinthehole(self, target, belongto = None):
for i in range(MAXOBJ, GUNFIRETOP):
if (self.objlist[i].type == self.objlist[i].NON_TYPE):
self.objlist[i].set_type(self.objlist[i].GUN_FIRE, target, belongto)
return
def motion(self):
if (self.alive and self.active):
if (self.up):
if (self.imagerect.top - self.v >= 0):
self.imagerect = self.imagerect.move(0,-self.v)
else:
self.imagerect = self.imagerect.move(0,0 - self.imagerect.top)
if (self.down):
if (self.imagerect.bottom + self.v < self.screen_height):
self.imagerect = self.imagerect.move(0,self.v)
else:
self.imagerect = self.imagerect.move(0,self.screen_height - self.imagerect.bottom)
if (self.left):
if (self.imagerect.left - self.v >= 0):
self.imagerect = self.imagerect.move(-self.v,0)
else:
self.imagerect = self.imagerect.move(0 - self.imagerect.left,0)
if (self.right):
if (self.imagerect.right + self.v < self.screen_width):
self.imagerect = self.imagerect.move(self.v,0)
else:
self.imagerect = self.imagerect.move(self.screen_width - self.imagerect.right,0)
if (self.fire):
self.fireinthehole(self.imagerect,self)
def show(self):
if (self.alive and self.active):
self.screen.blit(self.image,self.imagerect)
def updatescore(self):
if (self.alive):
self.score.update("SCORE: " + str(self.pcnt.score))
else:
self.score.update("OH!! SHIT!!!")
if (self.active):
self.score.show(self.screen)
| Python |
import pygame
class msgimage:
def __init__(self, msg, pos, size = 16):
self.msg = msg
self.font = pygame.font.Font(pygame.font.get_default_font(),size)
self.msgimage = self.font.render(msg, False, (255,255,255))
self.pos = pos
self.msgimagerect = self.msgimage.get_rect().move(self.pos)
def update(self, msg = None):
if (self.msg != msg and msg != None):
self.msgimage = self.font.render(msg, False, (255,255,255))
self.msgimagerect = self.msgimage.get_rect().move(self.pos)
def show(self, screen):
screen.blit(self.msgimage, self.msgimagerect)
| Python |
import pygame
import random,sys,math
#virtical shotting game...
STARNUM = 80
MAXOBJ = 90
GUNFIRETOP = 130
TOTALFIRETOP = 140
class obj:
GUNFROFFSETV = 10
GUNFROFFSETH = 14
NON_TYPE = 0
GUN_FIRE = 1
ENEMY = 2
STAR = 3
FLAME1 = 4
FLAME2 = 5
FLAME3 = 6
ENEMYFR = 7
TRACELEN = 20
def __init__(self, length, height, objlist):
self.type = 0
self.belongto = None
self.tracelen = 0
self.objlist = objlist
self.smart = 0
self.v = 0 #virtical speed
self.h = 0 #h....... spead
self.length = length
self.height = height
self.image = None
self.imagerect = None
self.target = None
def set_type(self, t, target = None, belongto = None):
self.type = t
if (self.type == self.GUN_FIRE and target != None):
self.image = pygame.image.load("gun_fire.png")
self.belongto = belongto
self.imagerect = self.image.get_rect().move(target.left + self.GUNFROFFSETV,target.top + self.GUNFROFFSETH)
self.v = 25
elif (self.type == self.ENEMY):
self.image = pygame.image.load("enemy.png")
self.target = target
self.tracelen = self.TRACELEN
self.imagerect = self.image.get_rect().move(random.randint(20,self.length - 20),-random.randint(10,400))
self.v = 7
elif (self.type == self.STAR):
self.image = pygame.image.load("star.png")
self.imagerect = self.image.get_rect().move(random.randint(0,self.length),random.randint(0,self.height))
self.v = random.randint(3,6)
elif (self.type == self.FLAME1):
self.image = pygame.image.load("flame1.png")
self.imagerect = self.image.get_rect().move(target.left - self.imagerect.width / 2, target.top - self.imagerect.height / 2)
self.tracelen = 3#last time counter
elif (self.type == self.FLAME2):
self.image = pygame.image.load("flame2.png")
self.imagerect = self.image.get_rect().move(target.left, target.top)
self.tracelen = 3#last time counter
elif (self.type == self.FLAME3):
self.image = pygame.image.load("flame3.png")
self.imagerect = self.image.get_rect().move(target.left, target.top)
self.tracelen = 3#last time counter
elif (self.type == self.ENEMYFR):
self.image = pygame.image.load("enemy_fire.png")
if (belongto != None and target.alive):
self.imagerect = self.image.get_rect().move(belongto.imagerect.center) # the belong here is the location of the enemy craft
tcen = list(target.imagerect.center)
bcen = list(belongto.imagerect.center)
dv = abs(tcen[1] - bcen[1])
dh = abs(tcen[0] - bcen[0])
if (dv != 0):
r = float(dh) / float(dv)
self.v = 10.0 / float(math.sqrt(r ** 2 + 1))
self.h = r * self.v
else:
self.v = 10
self.h = 0
else:
self.v = 10
self.h = 0
if (tcen[0] < bcen[0]):
self.h = -self.h
if (tcen[1] < bcen[1]):
self.v = -self.v
else:
pass
def fireoutthehole(self, target, belongto = None):
for i in range(GUNFIRETOP, TOTALFIRETOP):
if (self.objlist[i].type == self.objlist[i].NON_TYPE):
self.objlist[i].set_type(self.objlist[i].ENEMYFR, target, belongto)
return
def update(self, playerlist = None):
if (self.type == self.GUN_FIRE): # GUN FIRE PROCCESS
if (self.imagerect.bottom - self.v <= 0):
self.type = self.NON_TYPE
return
self.imagerect = self.imagerect.move(0,-self.v)
kill = False #if kill somebody
mostfront = 0 #most front death
killindex = -1 #where is the unlucky guy
for i in range(STARNUM, MAXOBJ + 1):
if (self.objlist[i].type == self.ENEMY
and ( # if there is any
(self.objlist[i].imagerect.bottom >= self.imagerect.top # intersection
and self.objlist[i].imagerect.bottom <= self.imagerect.bottom)
or
(self.objlist[i].imagerect.top >= self.imagerect.top
and self.objlist[i].imagerect.top <= self.imagerect.bottom)
or
(self.objlist[i].imagerect.top >= self.imagerect.top
and self.objlist[i].imagerect.bottom <= self.imagerect.bottom)
or
(self.objlist[i].imagerect.top <= self.imagerect.top
and self.objlist[i].imagerect.bottom >= self.imagerect.bottom)
)
and (
(self.imagerect.left >= self.objlist[i].imagerect.left
and self.imagerect.left <= self.objlist[i].imagerect.right)
or
(self.imagerect.right >= self.objlist[i].imagerect.left
and self.imagerect.right <= self.objlist[i].imagerect.right)
or
(self.imagerect.left >= self.objlist[i].imagerect.left
and self.imagerect.right <= self.objlist[i].imagerect.right)
or
(self.imagerect.left <= self.objlist[i].imagerect.left
and self.imagerect.right >= self.objlist[i].imagerect.right)
)
):#Kill the enemy
if (self.objlist[i].imagerect.bottom > mostfront):
mostfront = self.objlist[i].imagerect.bottom
killindex = i
kill = True
if (kill):
if (self.belongto != None):
self.belongto.pcnt.score += 10
self.belongto = None
self.type = self.NON_TYPE
self.objlist[killindex].set_type(self.FLAME1,self.objlist[killindex].imagerect)
self.objlist[killindex].smart = 0 #make it stupid!
elif (self.type == self.ENEMY): # ENEMY PROCCESS
if (self.imagerect.top + self.v > self.height):
self.type = self.NON_TYPE
self.smart = 0 #make it stupid!
else:
if (self.smart == 1 and self.imagerect.bottom >= 0 and self.target != None and self.target.alive):
if (self.tracelen and self.imagerect.bottom <= self.target.imagerect.top):
if (list(self.imagerect.center)[0] < list(self.target.imagerect.center)[0] - self.v):
self.tracelen -= 1
self.imagerect = self.imagerect.move(self.v,0)
elif (list(self.imagerect.center)[0] > list(self.target.imagerect.center)[0] + self.v):
self.tracelen -= 1
self.imagerect = self.imagerect.move(-self.v,0)
self.imagerect = self.imagerect.move(0,self.v)
if (self.smart == 2 and self.imagerect.bottom >= 0 and self.target != None and self.target.alive):
if (random.randint(1,20) == 1):
self.fireoutthehole(self.target,self) #fire to the player
if (self.tracelen and self.imagerect.bottom <= self.target.imagerect.top):
if (list(self.imagerect.center)[0] < list(self.target.imagerect.center)[0] - self.v):
self.tracelen -= 1
self.imagerect = self.imagerect.move(self.v,0)
elif (list(self.imagerect.center)[0] > list(self.target.imagerect.center)[0] + self.v):
self.tracelen -= 1
self.imagerect = self.imagerect.move(-self.v,0)
self.imagerect = self.imagerect.move(0,self.v)
for item in playerlist:
if (item.alive):
if (self.imagerect.colliderect(item.imagerect)):# Crash with player
self.set_type(self.FLAME1,self.imagerect)
self.smart = 0 #make it stupid!
item.alive = False# The player is dead
return
elif (self.type == self.STAR): # STAR PROCCESS
if (self.imagerect.top + self.v > self.height):
self.imagerect = self.image.get_rect().move(random.randint(0,self.length),-random.randint(10,150))
else:
self.imagerect = self.imagerect.move(0,self.v)
elif (self.type == self.FLAME1): # FLAME PROCCESS
if (self.tracelen):
self.tracelen -= 1
if (self.imagerect.top + self.v > self.height):
self.type = self.NON_TYPE
self.smart = 0 #make it stupid!
else:
self.imagerect = self.imagerect.move(0,self.v)
else:
self.set_type(self.FLAME2,self.imagerect)
elif (self.type == self.FLAME2): # FLAME PROCCESS
if (self.tracelen):
self.tracelen -= 1
if (self.imagerect.top + self.v > self.height):
self.type = self.NON_TYPE
self.smart = 0 #make it stupid!
else:
self.imagerect = self.imagerect.move(0,self.v)
else:
self.set_type(self.FLAME3,self.imagerect)
elif (self.type == self.FLAME3): # FLAME PROCCESS
if (self.tracelen):
self.tracelen -= 1
if (self.imagerect.top + self.v > self.height):
self.type = self.NON_TYPE
self.smart = 0 #make it stupid!
else:
self.imagerect = self.imagerect.move(0,self.v)
else:
self.set_type(self.NON_TYPE)
self.tracelen = 0
self.smart = 0
elif (self.type == self.ENEMYFR): # ENEMY FIRE PROCCESS
if ((self.imagerect.top + self.v >= self.height and self.imagerect.bottom + self.v > self.height)
or
(self.imagerect.top + self.v < 0 and self.imagerect.bottom + self.v <= 0)
or
(self.imagerect.left + self.h < 0 and self.imagerect.right + self.h <= 0)
or
(self.imagerect.right + self.h > self.length and self.imagerect.left + self.h >= self.length)):
self.type = self.NON_TYPE
return
self.imagerect = self.imagerect.move(self.h, self.v)
for item in playerlist:
if (item.alive):
if (self.imagerect.colliderect(item.imagerect)):# Crash with player
self.set_type(self.FLAME1,self.imagerect)
self.smart = 0 #make it stupid!
item.alive = False# The player is dead
return
else:
pass
def show(self, screen):
if (self.type == self.STAR
or self.type == self.ENEMY
or self.type == self.GUN_FIRE
or self.type == self.FLAME1
or self.type == self.FLAME2
or self.type == self.FLAME3
or self.type == self.ENEMYFR):
screen.blit(self.image, self.imagerect)
| Python |
import pygame
import mynewobject,msg,player
import sys,random
pygame.init()
STARNUM = 80
MAXOBJ = 90
GUNFIRETOP = 130
TOTALFIRETOP = 140
def mkenemy(objlist, playerlist):
o = objlist[random.randint(STARNUM,MAXOBJ)]
if (o.type == o.NON_TYPE):
o.set_type(o.ENEMY, playerlist[random.randint(0,1)])
if (random.randint(1,2) == 1):
o.smart = 1
if (random.randint(1,2) == 1):
o.smart = 2
def rungame(screen, command):
screensize = screen.get_size()
screen_width = screensize[0]
screen_height = screensize[1]
objectlist = []
for i in range(TOTALFIRETOP):
objectlist.append(mynewobject.obj(screen_width, screen_height, objectlist))
if (i < STARNUM):
objectlist[i].set_type(objectlist[i].STAR)
# screen = pygame.display.set_mode((screen_width,screen_height),pygame.DOUBLEBUF)
if (command == "1P"):
player1 = player.cplayer(1,screen,screen_width,screen_height,objectlist) #new player1 obj
player2 = player.cplayer(2,screen,screen_width,screen_height,objectlist) #new player1 obj
player2.active = False
# player2.alive = False
elif (command == "2P"):
player1 = player.cplayer(1,screen,screen_width,screen_height,objectlist,False) #new player1 obj
player2 = player.cplayer(2,screen,screen_width,screen_height,objectlist,False) #new player1 obj
players = [player1,player2]
clock = pygame.time.Clock()
while((player1.alive and player1.active) or (player2.alive and player2.active)):
clock.tick(15.7)
for event in pygame.event.get():
if event.type == pygame.QUIT:
sys.exit()
if event.type == pygame.KEYDOWN:
if (event.key == pygame.K_s): #player1
player1.set_move_down()
elif (event.key == pygame.K_w): #player1
player1.set_move_up()
elif (event.key == pygame.K_a): #player1
player1.set_move_left()
elif (event.key == pygame.K_d): #player1
player1.set_move_right()
elif (event.key == pygame.K_LCTRL): #player1
player1.set_move_fire()
elif (event.key == pygame.K_DOWN): #player2
player2.set_move_down()
elif (event.key == pygame.K_UP): #player2
player2.set_move_up()
elif (event.key == pygame.K_LEFT): #player2
player2.set_move_left()
elif (event.key == pygame.K_RIGHT): #player2
player2.set_move_right()
elif (event.key == pygame.K_RCTRL): #player2
player2.set_move_fire()
elif (event.key == pygame.K_ESCAPE): #quit game
sys.exit();
else:
pass
if event.type == pygame.KEYUP:
if (event.key == pygame.K_s): #player1
player1.clr_move_down()
elif (event.key == pygame.K_w): #player1
player1.clr_move_up()
elif (event.key == pygame.K_a): #player1
player1.clr_move_left()
elif (event.key == pygame.K_d): #player1
player1.clr_move_right()
elif (event.key == pygame.K_LCTRL): #player1
player1.clr_move_fire()
elif (event.key == pygame.K_DOWN): #player2
player2.clr_move_down()
elif (event.key == pygame.K_UP): #player2
player2.clr_move_up()
elif (event.key == pygame.K_LEFT): #player2
player2.clr_move_left()
elif (event.key == pygame.K_RIGHT): #player2
player2.clr_move_right()
elif (event.key == pygame.K_RCTRL): #player2
player2.clr_move_fire()
else:
pass
screen.fill((0,0,0))
for item in players:
# if (item.alive):
item.motion()
for o in objectlist:
o.update(players) #switch to playerlist
o.show(screen)
for item in players:
item.show()
item.updatescore()
pygame.display.flip()
mkenemy(objectlist,players)
# pygame.time.delay(60)
# print "score: ", player1cnt.score
| Python |
# Python script to get both the data and resource fork from a BinHex encoded
# file.
# Author: Taro Muraoka
# Last Change: 2003 Oct 25
import sys
import binhex
input = sys.argv[1]
conv = binhex.HexBin(input)
info = conv.FInfo
out = conv.FName
out_data = out
out_rsrc = out + '.rsrcfork'
#print 'out_rsrc=' + out_rsrc
print 'In file: ' + input
outfile = open(out_data, 'wb')
print ' Out data fork: ' + out_data
while 1:
d = conv.read(128000)
if not d: break
outfile.write(d)
outfile.close()
conv.close_data()
d = conv.read_rsrc(128000)
if d:
print ' Out rsrc fork: ' + out_rsrc
outfile = open(out_rsrc, 'wb')
outfile.write(d)
while 1:
d = conv.read_rsrc(128000)
if not d: break
outfile.write(d)
outfile.close()
conv.close()
# vim:set ts=8 sts=4 sw=4 et:
| Python |
from .resolver import resolver
from django.utils.importlib import import_module
def __repr__(self):
return '<%s, %s, %s, %s>' % (self.alias, self.col, self.field.name,
self.field.model.__name__)
from django.db.models.sql.where import Constraint
Constraint.__repr__ = __repr__
# TODO: manipulate a copy of the query instead of the query itself. This has to
# be done because the query can be reused afterwards by the user so that a
# manipulated query can result in strange behavior for these cases!
# TODO: Add watching layer which gives suggestions for indexes via query inspection
# at runtime
class BaseCompiler(object):
def convert_filters(self):
resolver.convert_filters(self.query)
class SQLCompiler(BaseCompiler):
def execute_sql(self, *args, **kwargs):
self.convert_filters()
return super(SQLCompiler, self).execute_sql(*args, **kwargs)
def results_iter(self):
self.convert_filters()
return super(SQLCompiler, self).results_iter()
def has_results(self):
self.convert_filters()
return super(SQLCompiler, self).has_results()
class SQLInsertCompiler(BaseCompiler):
def execute_sql(self, return_id=False):
resolver.convert_insert_query(self.query)
return super(SQLInsertCompiler, self).execute_sql(return_id=return_id)
class SQLUpdateCompiler(BaseCompiler):
pass
class SQLDeleteCompiler(BaseCompiler):
pass
| Python |
from django.db import models
from django.db.models.fields import FieldDoesNotExist
from django.db.models.sql.constants import JOIN_TYPE, LHS_ALIAS, LHS_JOIN_COL, \
TABLE_NAME, RHS_JOIN_COL
from django.utils.tree import Node
from djangotoolbox.fields import ListField
from .lookups import StandardLookup
OR = 'OR'
# TODO: optimize code
class BaseResolver(object):
def __init__(self):
# mapping from lookups to indexes
self.index_map = {}
# mapping from column names to field names
self.column_to_name = {}
''' API called by resolver'''
def create_index(self, lookup):
field_to_index = self.get_field_to_index(lookup.model, lookup.field_name)
# backend doesn't now how to handle this index definition
if not field_to_index:
return
index_field = lookup.get_field_to_add(field_to_index)
config_field = index_field.item_field if \
isinstance(index_field, ListField) else index_field
if hasattr(field_to_index, 'max_length') and \
isinstance(config_field, models.CharField):
config_field.max_length = field_to_index.max_length
# don't install a field if it already exists
try:
lookup.model._meta.get_field(self.index_name(lookup))
except:
lookup.model.add_to_class(self.index_name(lookup), index_field)
self.index_map[lookup] = index_field
self.add_column_to_name(lookup.model, lookup.field_name)
else:
# makes dbindexer unit test compatible
if lookup not in self.index_map:
self.index_map[lookup] = lookup.model._meta.get_field(
self.index_name(lookup))
self.add_column_to_name(lookup.model, lookup.field_name)
def convert_insert_query(self, query):
'''Converts a database saving query.'''
for lookup in self.index_map.keys():
self._convert_insert_query(query, lookup)
def _convert_insert_query(self, query, lookup):
if not lookup.model == query.model:
return
position = self.get_query_position(query, lookup)
if position is None:
return
value = self.get_value(lookup.model, lookup.field_name, query)
value = lookup.convert_value(value)
query.values[position] = (self.get_index(lookup), value)
def convert_filters(self, query):
self._convert_filters(query, query.where)
''' helper methods '''
def _convert_filters(self, query, filters):
for index, child in enumerate(filters.children[:]):
if isinstance(child, Node):
self._convert_filters(query, child)
continue
self.convert_filter(query, filters, child, index)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
if constraint.field is None:
return
field_name = self.column_to_name.get(constraint.field.column)
if field_name and constraint.alias == \
query.table_map[query.model._meta.db_table][0]:
for lookup in self.index_map.keys():
if lookup.matches_filter(query.model, field_name, lookup_type,
value):
new_lookup_type, new_value = lookup.convert_lookup(value,
lookup_type)
index_name = self.index_name(lookup)
self._convert_filter(query, filters, child, index,
new_lookup_type, new_value, index_name)
def _convert_filter(self, query, filters, child, index, new_lookup_type,
new_value, index_name):
constraint, lookup_type, annotation, value = child
lookup_type, value = new_lookup_type, new_value
constraint.field = query.get_meta().get_field(index_name)
constraint.col = constraint.field.column
child = constraint, lookup_type, annotation, value
filters.children[index] = child
def index_name(self, lookup):
return lookup.index_name
def get_field_to_index(self, model, field_name):
try:
return model._meta.get_field(field_name)
except:
return None
def get_value(self, model, field_name, query):
field_to_index = self.get_field_to_index(model, field_name)
for query_field, value in query.values[:]:
if field_to_index == query_field:
return value
raise FieldDoesNotExist('Cannot find field in query.')
def add_column_to_name(self, model, field_name):
column_name = model._meta.get_field(field_name).column
self.column_to_name[column_name] = field_name
def get_index(self, lookup):
return self.index_map[lookup]
def get_query_position(self, query, lookup):
for index, (field, query_value) in enumerate(query.values[:]):
if field is self.get_index(lookup):
return index
return None
def unref_alias(query, alias):
table_name = query.alias_map[alias][TABLE_NAME]
query.alias_refcount[alias] -= 1
if query.alias_refcount[alias] < 1:
# Remove all information about the join
del query.alias_refcount[alias]
del query.join_map[query.rev_join_map[alias]]
del query.rev_join_map[alias]
del query.alias_map[alias]
query.table_map[table_name].remove(alias)
if len(query.table_map[table_name]) == 0:
del query.table_map[table_name]
query.used_aliases.discard(alias)
class FKNullFix(BaseResolver):
'''
Django doesn't generate correct code for ForeignKey__isnull.
It becomes a JOIN with pk__isnull which won't work on nonrel DBs,
so we rewrite the JOIN here.
'''
def create_index(self, lookup):
pass
def convert_insert_query(self, query):
pass
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
if constraint.field is not None and lookup_type == 'isnull' and \
isinstance(constraint.field, models.ForeignKey):
self.fix_fk_null_filter(query, constraint)
def unref_alias(self, query, alias):
unref_alias(query, alias)
def fix_fk_null_filter(self, query, constraint):
alias = constraint.alias
table_name = query.alias_map[alias][TABLE_NAME]
lhs_join_col = query.alias_map[alias][LHS_JOIN_COL]
rhs_join_col = query.alias_map[alias][RHS_JOIN_COL]
if table_name != constraint.field.rel.to._meta.db_table or \
rhs_join_col != constraint.field.rel.to._meta.pk.column or \
lhs_join_col != constraint.field.column:
return
next_alias = query.alias_map[alias][LHS_ALIAS]
if not next_alias:
return
self.unref_alias(query, alias)
alias = next_alias
constraint.col = constraint.field.column
constraint.alias = alias
class ConstantFieldJOINResolver(BaseResolver):
def create_index(self, lookup):
if '__' in lookup.field_name:
super(ConstantFieldJOINResolver, self).create_index(lookup)
def convert_insert_query(self, query):
'''Converts a database saving query.'''
for lookup in self.index_map.keys():
if '__' in lookup.field_name:
self._convert_insert_query(query, lookup)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
field_chain = self.get_field_chain(query, constraint)
if field_chain is None:
return
for lookup in self.index_map.keys():
if lookup.matches_filter(query.model, field_chain, lookup_type,
value):
self.resolve_join(query, child)
new_lookup_type, new_value = lookup.convert_lookup(value,
lookup_type)
index_name = self.index_name(lookup)
self._convert_filter(query, filters, child, index,
new_lookup_type, new_value, index_name)
def get_field_to_index(self, model, field_name):
model = self.get_model_chain(model, field_name)[-1]
field_name = field_name.split('__')[-1]
return super(ConstantFieldJOINResolver, self).get_field_to_index(model,
field_name)
def get_value(self, model, field_name, query):
value = super(ConstantFieldJOINResolver, self).get_value(model,
field_name.split('__')[0],
query)
if value is not None:
value = self.get_target_value(model, field_name, value)
return value
def get_field_chain(self, query, constraint):
if constraint.field is None:
return
column_index = self.get_column_index(query, constraint)
return self.column_to_name.get(column_index)
def get_model_chain(self, model, field_chain):
model_chain = [model, ]
for value in field_chain.split('__')[:-1]:
model = model._meta.get_field(value).rel.to
model_chain.append(model)
return model_chain
def get_target_value(self, start_model, field_chain, pk):
fields = field_chain.split('__')
foreign_key = start_model._meta.get_field(fields[0])
if not foreign_key.rel:
# field isn't a related one, so return the value itself
return pk
target_model = foreign_key.rel.to
foreignkey = target_model.objects.all().get(pk=pk)
for value in fields[1:-1]:
foreignkey = getattr(foreignkey, value)
if isinstance(foreignkey._meta.get_field(fields[-1]), models.ForeignKey):
return getattr(foreignkey, '%s_id' % fields[-1])
else:
return getattr(foreignkey, fields[-1])
def add_column_to_name(self, model, field_name):
model_chain = self.get_model_chain(model, field_name)
column_chain = ''
field_names = field_name.split('__')
for model, name in zip(model_chain, field_names):
column_chain += model._meta.get_field(name).column + '__'
self.column_to_name[column_chain[:-2]] = field_name
def unref_alias(self, query, alias):
unref_alias(query, alias)
def get_column_index(self, query, constraint):
if constraint.field:
column_chain = constraint.field.column
alias = constraint.alias
while alias:
join = query.alias_map.get(alias)
if join and join[JOIN_TYPE] == 'INNER JOIN':
column_chain += '__' + join[LHS_JOIN_COL]
alias = query.alias_map[alias][LHS_ALIAS]
else:
alias = None
return '__'.join(reversed(column_chain.split('__')))
def resolve_join(self, query, child):
constraint, lookup_type, annotation, value = child
if not constraint.field:
return
alias = constraint.alias
while True:
next_alias = query.alias_map[alias][LHS_ALIAS]
if not next_alias:
break
self.unref_alias(query, alias)
alias = next_alias
constraint.alias = alias
# TODO: distinguish in memory joins from standard joins somehow
class InMemoryJOINResolver(ConstantFieldJOINResolver):
def __init__(self):
self.field_chains = []
super(InMemoryJOINResolver, self).__init__()
def create_index(self, lookup):
if '__' in lookup.field_name:
field_to_index = self.get_field_to_index(lookup.model, lookup.field_name)
if not field_to_index:
return
# save old column_to_name so we can make in memory queries later on
self.add_column_to_name(lookup.model, lookup.field_name)
# don't add an extra field for standard lookups!
if isinstance(lookup, StandardLookup):
return
# install lookup on target model
model = self.get_model_chain(lookup.model, lookup.field_name)[-1]
lookup.model = model
lookup.field_name = lookup.field_name.split('__')[-1]
super(ConstantFieldJOINResolver, self).create_index(lookup)
def convert_insert_query(self, query):
super(ConstantFieldJOINResolver, self).convert_insert_query(query)
def _convert_filters(self, query, filters):
# or queries are not supported for in-memory-JOINs
if self.contains_OR(query.where, OR):
return
# start with the deepest JOIN level filter!
all_filters = self.get_all_filters(filters)
all_filters.sort(key=lambda item: self.get_field_chain(query, item[1][0]) and \
-len(self.get_field_chain(query, item[1][0])) or 0)
for filters, child, index in all_filters:
# check if convert_filter removed a given child from the where-tree
if not self.contains_child(query.where, child):
continue
self.convert_filter(query, filters, child, index)
def convert_filter(self, query, filters, child, index):
constraint, lookup_type, annotation, value = child
field_chain = self.get_field_chain(query, constraint)
if field_chain is None:
return
if '__' not in field_chain:
return super(ConstantFieldJOINResolver, self).convert_filter(query,
filters, child, index)
pks = self.get_pks(query, field_chain, lookup_type, value)
self.resolve_join(query, child)
self._convert_filter(query, filters, child, index, 'in',
(pk for pk in pks), field_chain.split('__')[0])
def tree_contains(self, filters, to_find, func):
result = False
for child in filters.children[:]:
if func(child, to_find):
result = True
break
if isinstance(child, Node):
result = self.tree_contains(child, to_find, func)
if result:
break
return result
def contains_OR(self, filters, or_):
return self.tree_contains(filters, or_,
lambda c, f: isinstance(c, Node) and c.connector == f)
def contains_child(self, filters, to_find):
return self.tree_contains(filters, to_find, lambda c, f: c is f)
def get_all_filters(self, filters):
all_filters = []
for index, child in enumerate(filters.children[:]):
if isinstance(child, Node):
all_filters.extend(self.get_all_filters(child))
continue
all_filters.append((filters, child, index))
return all_filters
def index_name(self, lookup):
# use another index_name to avoid conflicts with lookups defined on the
# target model which are handled by the BaseBackend
return lookup.index_name + '_in_memory_join'
def get_pks(self, query, field_chain, lookup_type, value):
model_chain = self.get_model_chain(query.model, field_chain)
first_lookup = {'%s__%s' %(field_chain.rsplit('__', 1)[-1],
lookup_type): value}
self.combine_with_same_level_filter(first_lookup, query, field_chain)
pks = model_chain[-1].objects.all().filter(**first_lookup).values_list(
'id', flat=True)
chains = [field_chain.rsplit('__', i+1)[0]
for i in range(field_chain.count('__'))]
lookup = {}
for model, chain in reversed(zip(model_chain[1:-1], chains[:-1])):
lookup.update({'%s__%s' %(chain.rsplit('__', 1)[-1], 'in'):
(pk for pk in pks)})
self.combine_with_same_level_filter(lookup, query, chain)
pks = model.objects.all().filter(**lookup).values_list('id', flat=True)
return pks
def combine_with_same_level_filter(self, lookup, query, field_chain):
lookup_updates = {}
field_chains = self.get_all_field_chains(query, query.where)
for chain, child in field_chains.items():
if chain == field_chain:
continue
if field_chain.rsplit('__', 1)[0] == chain.rsplit('__', 1)[0]:
lookup_updates ['%s__%s' %(chain.rsplit('__', 1)[1], child[1])] \
= child[3]
self.remove_child(query.where, child)
self.resolve_join(query, child)
# TODO: update query.alias_refcount correctly!
lookup.update(lookup_updates)
def remove_child(self, filters, to_remove):
''' Removes a child object from filters. If filters doesn't contain
children afterwoods, filters will be removed from its parent. '''
for child in filters.children[:]:
if child is to_remove:
self._remove_child(filters, to_remove)
return
elif isinstance(child, Node):
self.remove_child(child, to_remove)
if hasattr(child, 'children') and not child.children:
self.remove_child(filters, child)
def _remove_child(self, filters, to_remove):
result = []
for child in filters.children[:]:
if child is to_remove:
continue
result.append(child)
filters.children = result
def get_all_field_chains(self, query, filters):
''' Returns a dict mapping from field_chains to the corresponding child.'''
field_chains = {}
all_filters = self.get_all_filters(filters)
for filters, child, index in all_filters:
field_chain = self.get_field_chain(query, child[0])
# field_chain can be None if the user didn't specified an index for it
if field_chain:
field_chains[field_chain] = child
return field_chains
| Python |
from django.db import models
from django.test import TestCase
from .api import register_index
from .lookups import StandardLookup
from .resolver import resolver
from djangotoolbox.fields import ListField
from datetime import datetime
import re
class ForeignIndexed2(models.Model):
name_fi2 = models.CharField(max_length=500)
age = models.IntegerField()
class ForeignIndexed(models.Model):
title = models.CharField(max_length=500)
name_fi = models.CharField(max_length=500)
fk = models.ForeignKey(ForeignIndexed2, null=True)
class Indexed(models.Model):
name = models.CharField(max_length=500)
published = models.DateTimeField(auto_now_add=True)
foreignkey = models.ForeignKey(ForeignIndexed, null=True)
foreignkey2 = models.ForeignKey(ForeignIndexed2, related_name='idx_set', null=True)
tags = ListField(models.CharField(max_length=500, null=True))
class NullableCharField(models.Model):
name = models.CharField(max_length=500, null=True)
# TODO: add test for foreign key with multiple filters via different and equal paths
# to do so we have to create some entities matching equal paths but not matching
# different paths
class TestIndexed(TestCase):
def setUp(self):
self.backends = list(resolver.backends)
resolver.backends = []
resolver.load_backends(('dbindexer.backends.BaseResolver',
'dbindexer.backends.FKNullFix',
# 'dbindexer.backends.InMemoryJOINResolver',
'dbindexer.backends.ConstantFieldJOINResolver',
))
self.register_indexex()
juubi = ForeignIndexed2(name_fi2='Juubi', age=2)
juubi.save()
rikudo = ForeignIndexed2(name_fi2='Rikudo', age=200)
rikudo.save()
kyuubi = ForeignIndexed(name_fi='Kyuubi', title='Bijuu', fk=juubi)
hachibi= ForeignIndexed(name_fi='Hachibi', title='Bijuu', fk=rikudo)
kyuubi.save()
hachibi.save()
Indexed(name='ItAchi', tags=('Sasuke', 'Madara'), foreignkey=kyuubi,
foreignkey2=juubi).save()
Indexed(name='YondAimE', tags=('Naruto', 'Jiraya'), foreignkey=kyuubi,
foreignkey2=juubi).save()
Indexed(name='Neji', tags=('Hinata'), foreignkey=hachibi,
foreignkey2=juubi).save()
Indexed(name='I1038593i', tags=('Sharingan'), foreignkey=hachibi,
foreignkey2=rikudo).save()
def tearDown(self):
resolver.backends = self.backends
def register_indexex(self):
register_index(Indexed, {
'name': ('iexact', 'endswith', 'istartswith', 'iendswith', 'contains',
'icontains', re.compile('^i+', re.I), re.compile('^I+'),
re.compile('^i\d*i$', re.I)),
'published': ('month', 'day', 'year', 'week_day'),
'tags': ('iexact', 'icontains', StandardLookup() ),
'foreignkey__fk': (StandardLookup()),
'foreignkey__title': 'iexact',
'foreignkey__name_fi': 'iexact',
'foreignkey__fk__name_fi2': ('iexact', 'endswith'),
'foreignkey2__name_fi2': (StandardLookup(), 'iexact'),
'foreignkey2__age': (StandardLookup())
})
register_index(ForeignIndexed, {
'title': 'iexact',
'name_fi': ('iexact', 'icontains'),
'fk__name_fi2': ('iexact', 'endswith'),
'fk__age': (StandardLookup()),
})
register_index(NullableCharField, {
'name': ('iexact', 'istartswith', 'endswith', 'iendswith',)
})
# TODO: add tests for created indexes for all backends!
# def test_model_fields(self):
# field_list = [(item[0], item[0].column)
# for item in Indexed._meta.get_fields_with_model()]
# print field_list
# x()
# in-memory JOIN backend shouldn't create multiple indexes on the foreignkey side
# for different paths or not even for index definition on different models. Test this!
# standard JOIN backend should always add extra fields to registered model. Test this!
def test_joins(self):
self.assertEqual(2, len(Indexed.objects.all().filter(
foreignkey__fk__name_fi2__iexact='juuBi',
foreignkey__title__iexact='biJuu')))
self.assertEqual(0, len(Indexed.objects.all().filter(
foreignkey__fk__name_fi2__iexact='juuBi',
foreignkey2__name_fi2__iexact='Rikudo')))
self.assertEqual(1, len(Indexed.objects.all().filter(
foreignkey__fk__name_fi2__endswith='udo',
foreignkey2__name_fi2__iexact='Rikudo')))
self.assertEqual(2, len(Indexed.objects.all().filter(
foreignkey__title__iexact='biJuu',
foreignkey__name_fi__iexact='kyuuBi')))
self.assertEqual(2, len(Indexed.objects.all().filter(
foreignkey__title__iexact='biJuu',
foreignkey__name_fi__iexact='Hachibi')))
self.assertEqual(1, len(Indexed.objects.all().filter(
foreignkey__title__iexact='biJuu', name__iendswith='iMe')))
# JOINs on one field only
self.assertEqual(4, len(Indexed.objects.all().filter(
foreignkey__title__iexact='biJuu')))
self.assertEqual(3, len(Indexed.objects.all().filter(
foreignkey2__name_fi2='Juubi')))
# text endswith instead iexact all the time :)
self.assertEqual(2, len(Indexed.objects.all().filter(
foreignkey__fk__name_fi2__endswith='bi')))
# test JOINs via different paths targeting the same field
self.assertEqual(2, len(Indexed.objects.all().filter(
foreignkey__fk__name_fi2__iexact='juuBi')))
self.assertEqual(3, len(Indexed.objects.all().filter(
foreignkey2__name_fi2__iexact='Juubi')))
# test standard lookups for foreign_keys
self.assertEqual(3, len(Indexed.objects.all().filter(
foreignkey2__age=2)))
self.assertEqual(4, len(Indexed.objects.all().filter(
foreignkey2__age__lt=201)))
# test JOINs on different model
# standard lookups JOINs
self.assertEqual(1, len(ForeignIndexed.objects.all().filter(
fk__age=2)))
self.assertEqual(2, len(ForeignIndexed.objects.all().filter(
fk__age__lt=210)))
# other JOINs
self.assertEqual(1, len(ForeignIndexed.objects.all().filter(
fk__name_fi2__iexact='juUBI')))
self.assertEqual(1, len(ForeignIndexed.objects.all().filter(
fk__name_fi2__endswith='bi')))
def test_fix_fk_isnull(self):
self.assertEqual(0, len(Indexed.objects.filter(foreignkey=None)))
self.assertEqual(4, len(Indexed.objects.exclude(foreignkey=None)))
def test_iexact(self):
self.assertEqual(1, len(Indexed.objects.filter(name__iexact='itaChi')))
self.assertEqual(1, Indexed.objects.filter(name__iexact='itaChi').count())
self.assertEqual(2, ForeignIndexed.objects.filter(title__iexact='BIJUU').count())
self.assertEqual(1, ForeignIndexed.objects.filter(name_fi__iexact='KYuubi').count())
# test on list field
self.assertEqual(1, Indexed.objects.filter(tags__iexact='SasuKE').count())
def test_standard_lookups(self):
self.assertEqual(1, Indexed.objects.filter(tags__exact='Naruto').count())
# test standard lookup on foreign_key
juubi = ForeignIndexed2.objects.all().get(name_fi2='Juubi', age=2)
self.assertEqual(2, Indexed.objects.filter(foreignkey__fk=juubi).count())
def test_delete(self):
Indexed.objects.get(name__iexact='itaChi').delete()
self.assertEqual(0, Indexed.objects.all().filter(name__iexact='itaChi').count())
def test_delete_query(self):
Indexed.objects.all().delete()
self.assertEqual(0, Indexed.objects.all().filter(name__iexact='itaChi').count())
def test_exists_query(self):
self.assertTrue(Indexed.objects.filter(name__iexact='itaChi').exists())
def test_istartswith(self):
self.assertEqual(1, len(Indexed.objects.all().filter(name__istartswith='iTa')))
def test_endswith(self):
self.assertEqual(1, len(Indexed.objects.all().filter(name__endswith='imE')))
self.assertEqual(1, len(Indexed.objects.all().filter(name__iendswith='iMe')))
def test_regex(self):
self.assertEqual(2, len(Indexed.objects.all().filter(name__iregex='^i+')))
self.assertEqual(2, len(Indexed.objects.all().filter(name__regex='^I+')))
self.assertEqual(1, len(Indexed.objects.all().filter(name__iregex='^i\d*i$')))
def test_date_filters(self):
now = datetime.now()
self.assertEqual(4, len(Indexed.objects.all().filter(published__month=now.month)))
self.assertEqual(4, len(Indexed.objects.all().filter(published__day=now.day)))
self.assertEqual(4, len(Indexed.objects.all().filter(published__year=now.year)))
self.assertEqual(4, len(Indexed.objects.all().filter(
published__week_day=now.isoweekday())))
def test_null_strings(self):
"""Test indexing with nullable CharFields, see: https://github.com/django-nonrel/django-dbindexer/issues/3."""
NullableCharField.objects.create()
# def test_contains(self):
# # passes on production but not on gae-sdk (development)
# self.assertEqual(1, len(Indexed.objects.all().filter(name__contains='Aim')))
# self.assertEqual(1, len(Indexed.objects.all().filter(name__icontains='aim')))
#
# self.assertEqual(1, ForeignIndexed.objects.filter(name_fi__icontains='Yu').count())
#
# # test icontains on a list
# self.assertEqual(2, len(Indexed.objects.all().filter(tags__icontains='RA')))
| Python |
from django.db import models
from djangotoolbox.fields import ListField
from copy import deepcopy
import re
regex = type(re.compile(''))
class LookupDoesNotExist(Exception):
pass
class LookupBase(type):
def __new__(cls, name, bases, attrs):
new_cls = type.__new__(cls, name, bases, attrs)
if not isinstance(new_cls.lookup_types, (list, tuple)):
new_cls.lookup_types = (new_cls.lookup_types, )
return new_cls
class ExtraFieldLookup(object):
'''Default is to behave like an exact filter on an ExtraField.'''
__metaclass__ = LookupBase
lookup_types = 'exact'
def __init__(self, model=None, field_name=None, lookup_def=None,
new_lookup='exact', field_to_add=models.CharField(
max_length=500, editable=False, null=True)):
self.field_to_add = field_to_add
self.new_lookup = new_lookup
self.contribute(model, field_name, lookup_def)
def contribute(self, model, field_name, lookup_def):
self.model = model
self.field_name = field_name
self.lookup_def = lookup_def
@property
def index_name(self):
return 'idxf_%s_l_%s' % (self.field_name, self.lookup_types[0])
def convert_lookup(self, value, lookup_type):
# TODO: can value be a list or tuple? (in case of in yes)
if isinstance(value, (tuple, list)):
value = [self._convert_lookup(val, lookup_type)[1] for val in value]
else:
_, value = self._convert_lookup(value, lookup_type)
return self.new_lookup, value
def _convert_lookup(self, value, lookup_type):
return lookup_type, value
def convert_value(self, value):
if value is not None:
if isinstance(value, (tuple, list)):
value = [self._convert_value(val) for val in value]
else:
value = self._convert_value(value)
return value
def _convert_value(self, value):
return value
def matches_filter(self, model, field_name, lookup_type, value):
return self.model == model and lookup_type in self.lookup_types \
and field_name == self.field_name
@classmethod
def matches_lookup_def(cls, lookup_def):
if lookup_def in cls.lookup_types:
return True
return False
def get_field_to_add(self, field_to_index):
field_to_add = deepcopy(self.field_to_add)
if isinstance(field_to_index, ListField):
field_to_add = ListField(field_to_add, editable=False, null=True)
return field_to_add
class DateLookup(ExtraFieldLookup):
def __init__(self, *args, **kwargs):
defaults = {'new_lookup': 'exact',
'field_to_add': models.IntegerField(editable=False, null=True)}
defaults.update(kwargs)
ExtraFieldLookup.__init__(self, *args, **defaults)
def _convert_lookup(self, value, lookup_type):
return self.new_lookup, value
class Day(DateLookup):
lookup_types = 'day'
def _convert_value(self, value):
return value.day
class Month(DateLookup):
lookup_types = 'month'
def _convert_value(self, value):
return value.month
class Year(DateLookup):
lookup_types = 'year'
def _convert_value(self, value):
return value.year
class Weekday(DateLookup):
lookup_types = 'week_day'
def _convert_value(self, value):
return value.isoweekday()
class Contains(ExtraFieldLookup):
lookup_types = 'contains'
def __init__(self, *args, **kwargs):
defaults = {'new_lookup': 'startswith',
'field_to_add': ListField(models.CharField(500),
editable=False, null=True)
}
defaults.update(kwargs)
ExtraFieldLookup.__init__(self, *args, **defaults)
def get_field_to_add(self, field_to_index):
# always return a ListField of CharFields even in the case of
# field_to_index being a ListField itself!
return deepcopy(self.field_to_add)
def convert_value(self, value):
new_value = []
if isinstance(value, (tuple, list)):
for val in value:
new_value.extend(self.contains_indexer(val))
else:
new_value = self.contains_indexer(value)
return new_value
def _convert_lookup(self, value, lookup_type):
return self.new_lookup, value
def contains_indexer(self, value):
# In indexing mode we add all postfixes ('o', 'lo', ..., 'hello')
result = []
if value:
result.extend([value[count:] for count in range(len(value))])
return result
class Icontains(Contains):
lookup_types = 'icontains'
def convert_value(self, value):
return [val.lower() for val in Contains.convert_value(self, value)]
def _convert_lookup(self, value, lookup_type):
return self.new_lookup, value.lower()
class Iexact(ExtraFieldLookup):
lookup_types = 'iexact'
def _convert_lookup(self, value, lookup_type):
return self.new_lookup, value.lower()
def _convert_value(self, value):
return value.lower()
class Istartswith(ExtraFieldLookup):
lookup_types = 'istartswith'
def __init__(self, *args, **kwargs):
defaults = {'new_lookup': 'startswith'}
defaults.update(kwargs)
ExtraFieldLookup.__init__(self, *args, **defaults)
def _convert_lookup(self, value, lookup_type):
return self.new_lookup, value.lower()
def _convert_value(self, value):
return value.lower()
class Endswith(ExtraFieldLookup):
lookup_types = 'endswith'
def __init__(self, *args, **kwargs):
defaults = {'new_lookup': 'startswith'}
defaults.update(kwargs)
ExtraFieldLookup.__init__(self, *args, **defaults)
def _convert_lookup(self, value, lookup_type):
return self.new_lookup, value[::-1]
def _convert_value(self, value):
return value[::-1]
class Iendswith(Endswith):
lookup_types = 'iendswith'
def _convert_lookup(self, value, lookup_type):
return self.new_lookup, value[::-1].lower()
def _convert_value(self, value):
return value[::-1].lower()
class RegexLookup(ExtraFieldLookup):
lookup_types = ('regex', 'iregex')
def __init__(self, *args, **kwargs):
defaults = {'field_to_add': models.NullBooleanField(editable=False,
null=True)
}
defaults.update(kwargs)
ExtraFieldLookup.__init__(self, *args, **defaults)
def contribute(self, model, field_name, lookup_def):
ExtraFieldLookup.contribute(self, model, field_name, lookup_def)
if isinstance(lookup_def, regex):
self.lookup_def = re.compile(lookup_def.pattern, re.S | re.U |
(lookup_def.flags & re.I))
@property
def index_name(self):
return 'idxf_%s_l_%s' % (self.field_name,
self.lookup_def.pattern.encode('hex'))
def is_icase(self):
return self.lookup_def.flags & re.I
def _convert_lookup(self, value, lookup_type):
return self.new_lookup, True
def _convert_value(self, value):
if self.lookup_def.match(value):
return True
return False
def matches_filter(self, model, field_name, lookup_type, value):
return self.model == model and lookup_type == \
'%sregex' % ('i' if self.is_icase() else '') and \
value == self.lookup_def.pattern and field_name == self.field_name
@classmethod
def matches_lookup_def(cls, lookup_def):
if isinstance(lookup_def, regex):
return True
return False
class StandardLookup(ExtraFieldLookup):
''' Creates a copy of the field_to_index in order to allow querying for
standard lookup_types on a JOINed property. '''
# TODO: database backend can specify standardLookups
lookup_types = ('exact', 'gt', 'gte', 'lt', 'lte', 'in', 'range', 'isnull')
@property
def index_name(self):
return 'idxf_%s_l_%s' % (self.field_name, 'standard')
def convert_lookup(self, value, lookup_type):
return lookup_type, value
def get_field_to_add(self, field_to_index):
field_to_add = deepcopy(field_to_index)
if isinstance(field_to_add, (models.DateTimeField,
models.DateField, models.TimeField)):
field_to_add.auto_now_add = field_to_add.auto_now = False
return field_to_add
| Python |
from django.conf import settings
from django.utils.importlib import import_module
def merge_dicts(d1, d2):
'''Update dictionary recursively. If values for a given key exist in both dictionaries and are dict-like they are merged.'''
for k, v in d2.iteritems():
# Try to merge the values as if they were dicts.
try:
merge_dicts(d1[k], v)
# Otherwise just overwrite the original value (if any).
except (AttributeError, KeyError):
d1[k] = v
class DatabaseOperations(object):
dbindexer_compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
def __init__(self):
self._dbindexer_cache = {}
def compiler(self, compiler_name):
if compiler_name not in self._dbindexer_cache:
target = super(DatabaseOperations, self).compiler(compiler_name)
base = getattr(
import_module(self.dbindexer_compiler_module), compiler_name)
class Compiler(base, target):
pass
self._dbindexer_cache[compiler_name] = Compiler
return self._dbindexer_cache[compiler_name]
class BaseDatabaseWrapper(object):
def __init__(self, *args, **kwargs):
super(BaseDatabaseWrapper, self).__init__(*args, **kwargs)
class Operations(DatabaseOperations, self.ops.__class__):
pass
self.ops.__class__ = Operations
self.ops.__init__()
def DatabaseWrapper(settings_dict, *args, **kwargs):
target_settings = settings.DATABASES[settings_dict['TARGET']]
engine = target_settings['ENGINE'] + '.base'
target = import_module(engine).DatabaseWrapper
class Wrapper(BaseDatabaseWrapper, target):
pass
# Update settings with target database settings (which can contain nested dicts).
merge_dicts(settings_dict, target_settings)
return Wrapper(settings_dict, *args, **kwargs)
| Python |
from django.conf import settings
from django.utils.importlib import import_module
from django.core.exceptions import ImproperlyConfigured
class Resolver(object):
def __init__(self):
self.backends = []
self.load_backends(getattr(settings, 'DBINDEXER_BACKENDS',
('dbindexer.backends.BaseResolver',
'dbindexer.backends.FKNullFix')))
def load_backends(self, backend_paths):
for backend in backend_paths:
self.backends.append(self.load_backend(backend))
def load_backend(self, path):
module_name, attr_name = path.rsplit('.', 1)
try:
mod = import_module(module_name)
except (ImportError, ValueError), e:
raise ImproperlyConfigured('Error importing backend module %s: "%s"'
% (module_name, e))
try:
return getattr(mod, attr_name)()
except AttributeError:
raise ImproperlyConfigured('Module "%s" does not define a "%s" backend'
% (module_name, attr_name))
def convert_filters(self, query):
for backend in self.backends:
backend.convert_filters(query)
def create_index(self, lookup):
for backend in self.backends:
backend.create_index(lookup)
def convert_insert_query(self, query):
for backend in self.backends:
backend.convert_insert_query(query)
resolver = Resolver()
| Python |
def autodiscover():
from autoload import autodiscover as auto_discover
auto_discover('dbindexes')
def load_indexes():
from django.conf import settings
from django.utils.importlib import import_module
for name in getattr(settings, 'DB_INDEX_MODULES', ()):
import_module(name)
| Python |
from .lookups import LookupDoesNotExist, ExtraFieldLookup
from . import lookups as lookups_module
from .resolver import resolver
import inspect
# TODO: add possibility to add lookup modules
def create_lookup(lookup_def):
for _, cls in inspect.getmembers(lookups_module):
if inspect.isclass(cls) and issubclass(cls, ExtraFieldLookup) and \
cls.matches_lookup_def(lookup_def):
return cls()
raise LookupDoesNotExist('No Lookup found for %s .' % lookup_def)
def register_index(model, mapping):
for field_name, lookups in mapping.items():
if not isinstance(lookups, (list, tuple)):
lookups = (lookups, )
# create indexes and add model and field_name to lookups
# create ExtraFieldLookup instances on the fly if needed
for lookup in lookups:
lookup_def = None
if not isinstance(lookup, ExtraFieldLookup):
lookup_def = lookup
lookup = create_lookup(lookup_def)
lookup.contribute(model, field_name, lookup_def)
resolver.create_index(lookup)
| Python |
from django import forms
class CommForm(forms.Form):
"""
TODO: evaluate message
"""
message = forms.CharField(widget=forms.Textarea, max_length=400)
class DealForm(forms.Form):
deal = forms.BooleanField(label='Seal The Deal?')
class CancelForm(forms.Form):
cancel = forms.BooleanField(label='Cancel The Deal?') | Python |
from django.db import models
from userTools.models import user_profile
from itemTools.models import items
import datetime
import logging
# Create your models here.
class Comm(models.Model):
"""
status codes:
2 = Done!
1 = Waiting!
0 = Ongoing
-1 = Failed (Not urgent)
"""
buyer = models.ForeignKey(user_profile, editable=False)
item = models.ForeignKey(items, null=True)
status = models.IntegerField(default=0)
class Messages(models.Model):
time = models.DateTimeField(auto_now_add=True, editable=False)
msg_type = models.IntegerField(default=0)
content = models.TextField(max_length=400)
comm = models.ForeignKey(Comm)
user = models.ForeignKey(user_profile)
def save(self, *args, **kwargs):
super(Messages, self).save(*args, **kwargs)
sender = self.user
if sender == self.comm.buyer:
recipient = self.comm.item.user
else:
recipient = self.comm.buyer
logging.info('Messages: User id %d sent User id %d at %s for Item %s' %(sender.id, recipient.id, str(self.time), self.comm.item.title))
| Python |
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
| Python |
from userTools.main import handle_login_register, user, handle_optional_login, myprofile_links
from userTools.models import user_profile
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from itemTools.models import items
from forms import CommForm, DealForm, CancelForm
from models import Messages, Comm
@handle_login_register
def seller_seal(request, item_id, buyer_nick, curr_user):
try:
item = items.objects.get(id=item_id, is_expired=False, is_sold=False)
except items.DoesNotExist:
return render(request, 'error.html', {'error': 'Item Not Found'})
try:
tar_user = user_profile.objects.get(nick=buyer_nick)
except user_profile.DoesNotExist:
return render(request, 'error.html', {'error': 'User Not Found'})
if request.method == 'POST':
form = DealForm(request.POST)
if form.is_valid() and form.cleaned_data.get('deal', False):
# If sealed by buyer
if item.user != curr_user.user_obj:
return render(request, 'error.html', {'error': 'Not your item!'})
else:
# Current user is the seller
try:
comm = Comm.objects.get(buyer=tar_user, item=item)
except Comm.DoesNotExist:
return render(request, 'error.html', {'error': 'Invalid Action!!'})
if comm.status == 1:
item.is_sold = True
item.buyer = tar_user
item.save()
comm.status = 2
comm.save()
else:
return render(request, 'error.html', {'error': 'Invalid Action!!'})
return HttpResponseRedirect(item.get_comm_url())
@handle_login_register
def cancel(request, item_id, curr_user):
"""
When Buyer Cancels the deal
status = 1 -> 0
"""
try:
item = items.objects.get(id=item_id, is_expired=False, is_sold=False)
except items.DoesNotExist:
return render(request, 'error.html', {'error': 'Item Not Found'})
if request.method == 'POST':
form = CancelForm(request.POST)
if form.is_valid() and form.cleaned_data.get('cancel', False):
if item.user != curr_user.user_obj:
try:
comm = Comm.objects.get(buyer=curr_user.user_obj, item=item)
except Comm.DoesNotExist:
return render(request, 'error.html', {'error': 'Invalid Action!!'})
if comm.status == 1:
comm.status = 0
comm.save()
else:
return render(request, 'error.html', {'error': 'You can\'t cancel the deal now!'})
return HttpResponseRedirect(item.get_comm_url())
else:
return render(request, 'error.html', {'error': 'Invalid Action!!'})
return HttpResponseRedirect(item.get_comm_url())
@handle_login_register
def seal(request, item_id, curr_user):
"""
When buyer seals the deal
status = 0 -> 1
"""
try:
item = items.objects.get(id=item_id, is_expired=False, is_sold=False)
except items.DoesNotExist:
return render(request, 'error.html', {'error': 'Item Not Found'})
if request.method == 'POST':
form = DealForm(request.POST)
if form.is_valid() and form.cleaned_data.get('deal', False):
# If sealed by buyer
if item.user != curr_user.user_obj:
try:
comm = Comm.objects.get(buyer=curr_user.user_obj, item=item)
except Comm.DoesNotExist:
comm = Comm(buyer=curr_user.user_obj, item=item)
comm.save()
if comm.status == 0:
comm.status = 1
comm.save()
else:
return render(request, 'error.html', {'error': 'You can\'t seal the deal now!'})
return HttpResponseRedirect(item.get_comm_url())
else:
# Current user is the seller
return render(request, 'error.html', {'error': 'You can\'t purchase your own item!!'})
return HttpResponseRedirect(item.get_comm_url())
@handle_login_register
def comm_seller_buyer(request, item_id, user_nick, curr_user):
"""
Communication view for seller with a buyer
"""
try:
item = items.objects.get(id=item_id, is_expired=False, is_sold=False)
except items.DoesNotExist:
try:
item = items.objects.get(id=item_id, is_expired=False, is_sold=True)
if item.user != curr_user.user_obj and item.buyer != curr_user.user_obj:
return render(request, 'error.html', {'error': 'Item Not Found'})
except items.DoesNotExist:
return render(request, 'error.html', {'error': 'Item Not Found'})
try:
tar_user = user_profile.objects.get(nick=user_nick)
except user_profile.DoesNotExist:
return render(request, 'error.html', {'error': 'User Not Found'})
# if the curr_user is not the seller
if item.user != curr_user.user_obj:
return render(request, 'error.html', {'error': 'You are not allowed to be here!'})
try:
comm = Comm.objects.get(buyer=tar_user, item=item)
except Comm.DoesNotExist:
return render(request, 'error.html', {'error': 'No communications exist!'})
if request.method == 'POST':
form = CommForm(request.POST)
if form.is_valid():
msg = form.cleaned_data['message']
message = Messages(comm=comm, content=msg, user=curr_user.user_obj)
message.save()
form = CommForm()
else:
form = CommForm()
# View Part
try:
comm = Comm.objects.get(buyer=tar_user, item=item)
try:
ret = Messages.objects.filter(comm=comm).order_by('-time')
except Messages.DoesNotExist:
ret = []
except Comm.DoesNotExist:
return render(request, 'error.html', {'error': 'No communications exist!'})
return render(request, 'comm.html',{'msgs': ret, 'form': form, 'role': 'seller', 'DealForm': DealForm(), 'item': item, 'comm': comm})
def comm_item_seller(request, item, curr_user):
"""
The curr_user is the seller, show him the list of interested Buyers
"""
ret = Comm.objects.filter(item=item)
return render(request, 'comm_list.html', {'comms': ret, 'item': item})
def comm_item_buyer(request, item, curr_user):
"""
The curr_user is the seller, show him the communication interface
"""
if request.method == 'POST':
form = CommForm(request.POST)
if form.is_valid():
msg = form.cleaned_data['message']
try:
comm = Comm.objects.get(buyer=curr_user.user_obj, item=item)
except Comm.DoesNotExist:
comm = Comm(buyer=curr_user.user_obj, item=item)
comm.save()
message = Messages(comm=comm, content=msg, user=curr_user.user_obj)
message.save()
form = CommForm()
else:
form = CommForm()
# View Part
try:
comm = Comm.objects.get(buyer=curr_user.user_obj, item=item)
try:
ret = Messages.objects.filter(comm=comm).order_by('-time')
except Messages.DoesNotExist:
ret = []
except Comm.DoesNotExist:
ret = []
comm = Comm(buyer=curr_user.user_obj, item=item)
return render(request, 'comm.html',{'msgs': ret, 'form': form, 'DealForm': DealForm(), 'comm': comm, 'role': 'buyer', 'item': item, 'CancelForm': CancelForm()})
@handle_login_register
def comm_item(request, item_id, curr_user):
"""
Returns appropriate Communication view according to seller/buyer
"""
try:
item = items.objects.get(id=item_id, is_expired=False, is_sold=False)
except items.DoesNotExist:
try:
item = items.objects.get(id=item_id, is_expired=False, is_sold=True)
if item.user == curr_user.user_obj or item.buyer == curr_user.user_obj:
pass
else:
return render(request, 'error.html', {'error': 'Item Not Found'})
except items.DoesNotExist:
return render(request, 'error.html', {'error': 'Item Not Found'})
if curr_user.user_obj == item.user:
return comm_item_seller(request, item, curr_user)
else:
return comm_item_buyer(request, item, curr_user) | Python |
#!/usr/bin/env python
from django.core.management import execute_manager
import imp
try:
imp.find_module('settings') # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n" % __file__)
sys.exit(1)
import settings
if __name__ == "__main__":
execute_manager(settings)
| Python |
import mimetypes
import os
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from django.conf import settings
from django.core.files.base import File
from django.core.files.storage import Storage
from django.core.files.uploadedfile import UploadedFile
from django.core.files.uploadhandler import FileUploadHandler, \
StopFutureHandlers
from django.core.exceptions import ImproperlyConfigured
from django.http import HttpResponse
from django.utils.encoding import smart_str, force_unicode
from google.appengine.api import files
from google.appengine.api.images import get_serving_url, NotImageError
from google.appengine.ext.blobstore import BlobInfo, BlobKey, delete, \
create_upload_url, BLOB_KEY_HEADER, BLOB_RANGE_HEADER, BlobReader
def prepare_upload(request, url, **kwargs):
return create_upload_url(url), {}
def serve_file(request, file, save_as, content_type, **kwargs):
if hasattr(file, 'file') and hasattr(file.file, 'blobstore_info'):
blobkey = file.file.blobstore_info.key()
elif hasattr(file, 'blobstore_info'):
blobkey = file.blobstore_info.key()
else:
raise ValueError("The provided file can't be served via the "
"Google App Engine Blobstore.")
response = HttpResponse(content_type=content_type)
response[BLOB_KEY_HEADER] = str(blobkey)
response['Accept-Ranges'] = 'bytes'
http_range = request.META.get('HTTP_RANGE')
if http_range is not None:
response[BLOB_RANGE_HEADER] = http_range
if save_as:
response['Content-Disposition'] = smart_str(
u'attachment; filename=%s' % save_as)
if file.size is not None:
response['Content-Length'] = file.size
return response
class BlobstoreStorage(Storage):
"""Google App Engine Blobstore storage backend."""
def _open(self, name, mode='rb'):
return BlobstoreFile(name, mode, self)
def _save(self, name, content):
name = name.replace('\\', '/')
if hasattr(content, 'file') and \
hasattr(content.file, 'blobstore_info'):
data = content.file.blobstore_info
elif hasattr(content, 'blobstore_info'):
data = content.blobstore_info
elif isinstance(content, File):
guessed_type = mimetypes.guess_type(name)[0]
file_name = files.blobstore.create(mime_type=guessed_type or 'application/octet-stream',
_blobinfo_uploaded_filename=name)
with files.open(file_name, 'a') as f:
for chunk in content.chunks():
f.write(chunk)
files.finalize(file_name)
data = files.blobstore.get_blob_key(file_name)
else:
raise ValueError("The App Engine storage backend only supports "
"BlobstoreFile instances or File instances.")
if isinstance(data, (BlobInfo, BlobKey)):
# We change the file name to the BlobKey's str() value.
if isinstance(data, BlobInfo):
data = data.key()
return '%s/%s' % (data, name.lstrip('/'))
else:
raise ValueError("The App Engine Blobstore only supports "
"BlobInfo values. Data can't be uploaded "
"directly. You have to use the file upload "
"handler.")
def delete(self, name):
delete(self._get_key(name))
def exists(self, name):
return self._get_blobinfo(name) is not None
def size(self, name):
return self._get_blobinfo(name).size
def url(self, name):
try:
return get_serving_url(self._get_blobinfo(name))
except NotImageError:
return None
def created_time(self, name):
return self._get_blobinfo(name).creation
def get_valid_name(self, name):
return force_unicode(name).strip().replace('\\', '/')
def get_available_name(self, name):
return name.replace('\\', '/')
def _get_key(self, name):
return BlobKey(name.split('/', 1)[0])
def _get_blobinfo(self, name):
return BlobInfo.get(self._get_key(name))
class BlobstoreFile(File):
def __init__(self, name, mode, storage):
self.name = name
self._storage = storage
self._mode = mode
self.blobstore_info = storage._get_blobinfo(name)
@property
def size(self):
return self.blobstore_info.size
def write(self, content):
raise NotImplementedError()
@property
def file(self):
if not hasattr(self, '_file'):
self._file = BlobReader(self.blobstore_info.key())
return self._file
class BlobstoreFileUploadHandler(FileUploadHandler):
"""
File upload handler for the Google App Engine Blobstore.
"""
def new_file(self, *args, **kwargs):
super(BlobstoreFileUploadHandler, self).new_file(*args, **kwargs)
blobkey = self.content_type_extra.get('blob-key')
self.active = blobkey is not None
if self.active:
self.blobkey = BlobKey(blobkey)
raise StopFutureHandlers()
def receive_data_chunk(self, raw_data, start):
"""
Add the data to the StringIO file.
"""
if not self.active:
return raw_data
def file_complete(self, file_size):
"""
Return a file object if we're activated.
"""
if not self.active:
return
return BlobstoreUploadedFile(
blobinfo=BlobInfo(self.blobkey),
charset=self.charset)
class BlobstoreUploadedFile(UploadedFile):
"""
A file uploaded into memory (i.e. stream-to-memory).
"""
def __init__(self, blobinfo, charset):
super(BlobstoreUploadedFile, self).__init__(
BlobReader(blobinfo.key()), blobinfo.filename,
blobinfo.content_type, blobinfo.size, charset)
self.blobstore_info = blobinfo
def open(self, mode=None):
pass
def chunks(self, chunk_size=1024 * 128):
self.file.seek(0)
while True:
content = self.read(chunk_size)
if not content:
break
yield content
def multiple_chunks(self, chunk_size=1024 * 128):
return True
| Python |
# Initialize Django.
from djangoappengine import main
from django.utils.importlib import import_module
from django.conf import settings
# Load all models.py to ensure signal handling installation or index
# loading of some apps.
for app in settings.INSTALLED_APPS:
try:
import_module('%s.models' % app)
except ImportError:
pass
try:
from mapreduce.main import APP as application, main
except ImportError:
from google.appengine.ext.mapreduce.main import APP as application, main
if __name__ == '__main__':
main()
| Python |
# Initialize Django
from djangoappengine import main
from google.appengine.ext.appstats.ui import app as application, main
if __name__ == '__main__':
main()
| Python |
# Initialize Django.
from djangoappengine import main
from google.appengine.ext.appstats.ui import app as application
| Python |
from django.test import TestCase
from .testmodels import OrderedModel
class OrderTest(TestCase):
def create_ordered_model_items(self):
pks = []
priorities = [5, 2, 9, 1]
for pk, priority in enumerate(priorities):
pk += 1
model = OrderedModel(pk=pk, priority=priority)
model.save()
pks.append(model.pk)
return pks, priorities
def test_default_order(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.priority for item in OrderedModel.objects.all()],
sorted(priorities, reverse=True))
def test_override_default_order(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.priority for item in
OrderedModel.objects.all().order_by('priority')],
sorted(priorities))
def test_remove_default_order(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.pk for item in OrderedModel.objects.all().order_by()],
sorted(pks))
def test_order_with_pk_filter(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.priority for item in
OrderedModel.objects.filter(pk__in=pks)],
sorted(priorities, reverse=True))
# Test with id__in.
self.assertEquals(
[item.priority for item in
OrderedModel.objects.filter(id__in=pks)],
sorted(priorities, reverse=True))
# Test reverse.
self.assertEquals(
[item.priority for item in
OrderedModel.objects.filter(pk__in=pks).reverse()],
sorted(priorities, reverse=False))
def test_remove_default_order_with_pk_filter(self):
pks, priorities = self.create_ordered_model_items()
self.assertEquals(
[item.priority for item in
OrderedModel.objects.filter(pk__in=pks).order_by()],
priorities)
# TODO: Test multiple orders.
| Python |
import datetime
import time
from django.db import models
from django.db.models import Q
from django.db.utils import DatabaseError
from django.test import TestCase
from django.utils import unittest
from google.appengine.api.datastore import Get, Key
from ..db.utils import get_cursor, set_cursor
from .testmodels import FieldsWithOptionsModel, EmailModel, DateTimeModel, \
OrderedModel, BlobModel
class FilterTest(TestCase):
floats = [5.3, 2.6, 9.1, 1.58]
emails = ['app-engine@scholardocs.com', 'sharingan@uchias.com',
'rinnengan@sage.de', 'rasengan@naruto.com']
datetimes = [datetime.datetime(2010, 1, 1, 0, 0, 0, 0),
datetime.datetime(2010, 12, 31, 23, 59, 59, 999999),
datetime.datetime(2011, 1, 1, 0, 0, 0, 0),
datetime.datetime(2013, 7, 28, 22, 30, 20, 50)]
def setUp(self):
for index, (float, email, datetime_value) in enumerate(zip(
FilterTest.floats, FilterTest.emails, FilterTest.datetimes)):
# Ensure distinct times when saving entities.
time.sleep(0.01)
self.last_save_datetime = datetime.datetime.now()
self.last_save_time = self.last_save_datetime.time()
ordered_instance = OrderedModel(priority=index, pk=index + 1)
ordered_instance.save()
FieldsWithOptionsModel(floating_point=float,
integer=int(float), email=email,
time=self.last_save_time,
foreign_key=ordered_instance).save()
EmailModel(email=email).save()
DateTimeModel(datetime=datetime_value).save()
def test_startswith(self):
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__startswith='r').order_by('email')],
['rasengan@naruto.com', 'rinnengan@sage.de'])
self.assertEquals(
[entity.email for entity in EmailModel.objects
.filter(email__startswith='r').order_by('email')],
['rasengan@naruto.com', 'rinnengan@sage.de'])
def test_pk_and_startswith(self):
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(text__startswith='Ha', pk='rinnengan@sage.de').order_by('text')],
['rinnengan@sage.de'])
def test_gt(self):
# Test gt on float.
self.assertEquals(
[entity.floating_point
for entity in FieldsWithOptionsModel.objects
.filter(floating_point__gt=3.1).order_by('floating_point')],
[5.3, 9.1])
# Test gt on integer.
self.assertEquals(
[entity.integer for entity in FieldsWithOptionsModel.objects
.filter(integer__gt=3).order_by('integer')],
[5, 9])
# Test filter on primary_key field.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__gt='as').order_by('email')],
['rasengan@naruto.com', 'rinnengan@sage.de',
'sharingan@uchias.com', ])
# Test ForeignKeys with id.
self.assertEquals(
sorted([entity.email for entity in FieldsWithOptionsModel.objects
.filter(foreign_key__gt=2)]),
['rasengan@naruto.com', 'rinnengan@sage.de'])
# And with instance.
ordered_instance = OrderedModel.objects.get(priority=1)
self.assertEquals(
sorted([entity.email for entity in FieldsWithOptionsModel.objects
.filter(foreign_key__gt=ordered_instance)]),
['rasengan@naruto.com', 'rinnengan@sage.de'])
def test_lt(self):
# Test lt on float.
self.assertEquals(
[entity.floating_point
for entity in FieldsWithOptionsModel.objects
.filter(floating_point__lt=3.1).order_by('floating_point')],
[1.58, 2.6])
# Test lt on integer.
self.assertEquals(
[entity.integer for entity in FieldsWithOptionsModel.objects
.filter(integer__lt=3).order_by('integer')],
[1, 2])
# Test filter on primary_key field.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__lt='as').order_by('email')],
['app-engine@scholardocs.com', ])
# Filter on datetime.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(time__lt=self.last_save_time).order_by('time')],
['app-engine@scholardocs.com', 'sharingan@uchias.com',
'rinnengan@sage.de'])
# Test ForeignKeys with id.
self.assertEquals(
sorted([entity.email for entity in FieldsWithOptionsModel.objects
.filter(foreign_key__lt=3)]),
['app-engine@scholardocs.com', 'sharingan@uchias.com'])
# And with instance.
ordered_instance = OrderedModel.objects.get(priority=2)
self.assertEquals(
sorted([entity.email for entity in FieldsWithOptionsModel.objects
.filter(foreign_key__lt=ordered_instance)]),
['app-engine@scholardocs.com', 'sharingan@uchias.com'])
def test_gte(self):
# Test gte on float.
self.assertEquals(
[entity.floating_point
for entity in FieldsWithOptionsModel.objects
.filter(floating_point__gte=2.6).order_by('floating_point')],
[2.6, 5.3, 9.1])
# Test gte on integer.
self.assertEquals(
[entity.integer for entity in FieldsWithOptionsModel.objects
.filter(integer__gte=2).order_by('integer')],
[2, 5, 9])
# Test filter on primary_key field.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__gte='rinnengan@sage.de').order_by('email')],
['rinnengan@sage.de', 'sharingan@uchias.com', ])
def test_lte(self):
# Test lte on float.
self.assertEquals(
[entity.floating_point
for entity in FieldsWithOptionsModel.objects
.filter(floating_point__lte=5.3).order_by('floating_point')],
[1.58, 2.6, 5.3])
# Test lte on integer.
self.assertEquals(
[entity.integer for entity in FieldsWithOptionsModel.objects
.filter(integer__lte=5).order_by('integer')],
[1, 2, 5])
# Test filter on primary_key field.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__lte='rinnengan@sage.de').order_by('email')],
['app-engine@scholardocs.com', 'rasengan@naruto.com',
'rinnengan@sage.de'])
def test_equals(self):
# Test equality filter on primary_key field.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email='rinnengan@sage.de').order_by('email')],
['rinnengan@sage.de'])
def test_is_null(self):
self.assertEquals(FieldsWithOptionsModel.objects.filter(
floating_point__isnull=True).count(), 0)
FieldsWithOptionsModel(
integer=5.4, email='shinra.tensai@sixpaths.com',
time=datetime.datetime.now().time()).save()
self.assertEquals(FieldsWithOptionsModel.objects.filter(
floating_point__isnull=True).count(), 1)
# XXX: These filters will not work because of a Django bug.
# self.assertEquals(FieldsWithOptionsModel.objects.filter(
# foreign_key=None).count(), 1)
# (it uses left outer joins if checked against isnull)
# self.assertEquals(FieldsWithOptionsModel.objects.filter(
# foreign_key__isnull=True).count(), 1)
def test_exclude(self):
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.all().exclude(floating_point__lt=9.1)
.order_by('floating_point')],
['rinnengan@sage.de', ])
# Test exclude with ForeignKey.
ordered_instance = OrderedModel.objects.get(priority=1)
self.assertEquals(
sorted([entity.email for entity in FieldsWithOptionsModel.objects
.all().exclude(foreign_key__gt=ordered_instance)]),
['app-engine@scholardocs.com', 'sharingan@uchias.com'])
def test_exclude_pk(self):
self.assertEquals(
[entity.pk for entity in OrderedModel.objects
.exclude(pk__in=[2, 3]).order_by('pk')],
[1, 4])
def test_chained_filter(self):
# Additionally tests count :)
self.assertEquals(FieldsWithOptionsModel.objects.filter(
floating_point__lt=5.3, floating_point__gt=2.6).count(), 0)
# Test across multiple columns. On App Engine only one filter
# is allowed to be an inequality filter.
self.assertEquals(
[(entity.floating_point, entity.integer)
for entity in FieldsWithOptionsModel.objects
.filter(floating_point__lte=5.3, integer=2)
.order_by('floating_point')],
[(2.6, 2), ])
# Test multiple filters including the primary_key field.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__gte='rinnengan@sage.de', integer=2)
.order_by('email')],
['sharingan@uchias.com', ])
# Test in filter on primary key with another arbitrary filter.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__in=['rinnengan@sage.de',
'sharingan@uchias.com'],
integer__gt=2)
.order_by('integer')],
['rinnengan@sage.de', ])
# Test exceptions.
# Test multiple filters exception when filtered and not ordered
# against the first filter.
self.assertRaises(
DatabaseError,
lambda: FieldsWithOptionsModel.objects
.filter(email__gte='rinnengan@sage.de', floating_point=5.3)
.order_by('floating_point')[0])
# Test exception if filtered across multiple columns with
# inequality filter.
self.assertRaises(
DatabaseError,
FieldsWithOptionsModel.objects
.filter(floating_point__lte=5.3, integer__gte=2)
.order_by('floating_point').get)
# Test exception if filtered across multiple columns with
# inequality filter with exclude.
self.assertRaises(
DatabaseError,
FieldsWithOptionsModel.objects
.filter(email__lte='rinnengan@sage.de')
.exclude(floating_point__lt=9.1).order_by('email').get)
self.assertRaises(
DatabaseError,
lambda: FieldsWithOptionsModel.objects
.all().exclude(floating_point__lt=9.1).order_by('email')[0])
# TODO: Maybe check all possible exceptions.
def test_slicing(self):
# Test slicing on filter with primary_key.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__lte='rinnengan@sage.de')
.order_by('email')[:2]],
['app-engine@scholardocs.com', 'rasengan@naruto.com', ])
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__lte='rinnengan@sage.de')
.order_by('email')[1:2]],
['rasengan@naruto.com', ])
# Test on non pk field.
self.assertEquals(
[entity.integer for entity in FieldsWithOptionsModel.objects
.all().order_by('integer')[:2]],
[1, 2, ])
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.all().order_by('email')[::2]],
['app-engine@scholardocs.com', 'rinnengan@sage.de'])
def test_cursor(self):
results = list(FieldsWithOptionsModel.objects.all())
cursor = None
for item in results:
query = FieldsWithOptionsModel.objects.all()[:1]
if cursor is not None:
query = set_cursor(query, cursor)
next = query[0]
self.assertEqual(next.pk, item.pk)
cursor = get_cursor(query)
query = set_cursor(FieldsWithOptionsModel.objects.all(), cursor)
self.assertEqual(list(query[:1]), [])
def test_Q_objects(self):
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(Q(email__lte='rinnengan@sage.de'))
.order_by('email')][:2],
['app-engine@scholardocs.com', 'rasengan@naruto.com', ])
self.assertEquals(
[entity.integer for entity in FieldsWithOptionsModel.objects
.exclude(Q(integer__lt=5) | Q(integer__gte=9))
.order_by('integer')],
[5, ])
self.assertRaises(
TypeError,
FieldsWithOptionsModel.objects
.filter(Q(floating_point=9.1), Q(integer=9) | Q(integer=2)))
def test_pk_in(self):
# Test pk__in with field name email.
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(email__in=['app-engine@scholardocs.com',
'rasengan@naruto.com'])],
['app-engine@scholardocs.com', 'rasengan@naruto.com'])
def test_in(self):
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(floating_point__in=[5.3, 2.6, 1.58])
.filter(integer__in=[1, 5, 9])],
['app-engine@scholardocs.com', 'rasengan@naruto.com'])
def test_in_with_pk_in(self):
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(floating_point__in=[5.3, 2.6, 1.58])
.filter(email__in=['app-engine@scholardocs.com',
'rasengan@naruto.com'])],
['app-engine@scholardocs.com', 'rasengan@naruto.com'])
def test_in_with_order_by(self):
class Post(models.Model):
writer = models.IntegerField()
order = models.IntegerField()
Post(writer=1, order=1).save()
Post(writer=1, order=2).save()
Post(writer=1, order=3).save()
Post(writer=2, order=4).save()
Post(writer=2, order=5).save()
posts = Post.objects.filter(writer__in=[1, 2]).order_by('order')
orders = [post.order for post in posts]
self.assertEqual(orders, range(1, 6))
posts = Post.objects.filter(writer__in=[1, 2]).order_by('-order')
orders = [post.order for post in posts]
self.assertEqual(orders, range(5, 0, -1))
def test_inequality(self):
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.exclude(floating_point=5.3).filter(integer__in=[1, 5, 9])],
['rasengan@naruto.com', 'rinnengan@sage.de'])
def test_values(self):
# Test values().
self.assertEquals(
[entity['pk'] for entity in FieldsWithOptionsModel.objects
.filter(integer__gt=3).order_by('integer').values('pk')],
['app-engine@scholardocs.com', 'rinnengan@sage.de'])
self.assertEquals(FieldsWithOptionsModel.objects
.filter(integer__gt=3).order_by('integer').values('pk').count(), 2)
# These queries first fetch the whole entity and then only
# return the desired fields selected in .values.
self.assertEquals(
[entity['integer'] for entity in FieldsWithOptionsModel.objects
.filter(email__startswith='r')
.order_by('email').values('integer')],
[1, 9])
self.assertEquals(
[entity['floating_point']
for entity in FieldsWithOptionsModel.objects
.filter(integer__gt=3)
.order_by('integer').values('floating_point')],
[5.3, 9.1])
# Test values_list.
self.assertEquals(
[entity[0] for entity in FieldsWithOptionsModel.objects
.filter(integer__gt=3).order_by('integer').values_list('pk')],
['app-engine@scholardocs.com', 'rinnengan@sage.de'])
def test_range(self):
# Test range on float.
self.assertEquals(
[entity.floating_point
for entity in FieldsWithOptionsModel.objects
.filter(floating_point__range=(2.6, 9.1))
.order_by('floating_point')],
[2.6, 5.3, 9.1])
# Test range on pk.
self.assertEquals(
[entity.pk for entity in FieldsWithOptionsModel.objects
.filter(pk__range=('app-engine@scholardocs.com',
'rinnengan@sage.de'))
.order_by('pk')],
['app-engine@scholardocs.com', 'rasengan@naruto.com',
'rinnengan@sage.de'])
# Test range on date/datetime objects.
start_time = self.last_save_datetime - datetime.timedelta(minutes=1)
self.assertEquals(
[entity.email for entity in FieldsWithOptionsModel.objects
.filter(time__range=(start_time, self.last_save_time))
.order_by('time')],
['app-engine@scholardocs.com', 'sharingan@uchias.com',
'rinnengan@sage.de', 'rasengan@naruto.com'])
def test_date(self):
# Test year on date range boundaries.
self.assertEquals(
[entity.datetime for entity in DateTimeModel.objects
.filter(datetime__year=2010).order_by('datetime')],
[datetime.datetime(2010, 1, 1, 0, 0, 0, 0),
datetime.datetime(2010, 12, 31, 23, 59, 59, 999999)])
# Test year on non boundary date.
self.assertEquals(
[entity.datetime for entity in DateTimeModel.objects
.filter(datetime__year=2013).order_by('datetime')],
[datetime.datetime(2013, 7, 28, 22, 30, 20, 50)])
def test_auto_now(self):
time.sleep(0.1)
entity = DateTimeModel.objects.all()[0]
auto_now = entity.datetime_auto_now
entity.save()
entity = DateTimeModel.objects.get(pk=entity.pk)
self.assertNotEqual(auto_now, entity.datetime_auto_now)
def test_auto_now_add(self):
time.sleep(0.1)
entity = DateTimeModel.objects.all()[0]
auto_now_add = entity.datetime_auto_now_add
entity.save()
entity = DateTimeModel.objects.get(pk=entity.pk)
self.assertEqual(auto_now_add, entity.datetime_auto_now_add)
def test_latest(self):
self.assertEquals(FieldsWithOptionsModel.objects
.latest('time').floating_point, 1.58)
def test_blob(self):
x = BlobModel(data='lalala')
x.full_clean()
x.save()
e = Get(Key.from_path(BlobModel._meta.db_table, x.pk))
self.assertEqual(e['data'], x.data)
x = BlobModel.objects.all()[0]
self.assertEqual(e['data'], x.data)
| Python |
from django.db import models
from django.db.utils import DatabaseError
from django.test import TestCase
class A(models.Model):
value = models.IntegerField()
class B(A):
other = models.IntegerField()
class BackendTest(TestCase):
def test_model_forms(self):
from django import forms
class F(forms.ModelForm):
class Meta:
model = A
F({'value': '3'}).save()
def test_multi_table_inheritance(self):
B(value=3, other=5).save()
self.assertEqual(A.objects.count(), 1)
self.assertEqual(A.objects.all()[0].value, 3)
self.assertRaises(DatabaseError, B.objects.count)
self.assertRaises(DatabaseError, lambda: B.objects.all()[0])
| Python |
from django.db.models import F
from django.test import TestCase
from .testmodels import EmailModel
class TransactionTest(TestCase):
emails = ['app-engine@scholardocs.com', 'sharingan@uchias.com',
'rinnengan@sage.de', 'rasengan@naruto.com']
def setUp(self):
EmailModel(email=self.emails[0], number=1).save()
EmailModel(email=self.emails[0], number=2).save()
EmailModel(email=self.emails[1], number=3).save()
def test_update(self):
self.assertEqual(2, len(EmailModel.objects.all().filter(
email=self.emails[0])))
self.assertEqual(1, len(EmailModel.objects.all().filter(
email=self.emails[1])))
EmailModel.objects.all().filter(email=self.emails[0]).update(
email=self.emails[1])
self.assertEqual(0, len(EmailModel.objects.all().filter(
email=self.emails[0])))
self.assertEqual(3, len(EmailModel.objects.all().filter(
email=self.emails[1])))
def test_f_object_updates(self):
self.assertEqual(1, len(EmailModel.objects.all().filter(
number=1)))
self.assertEqual(1, len(EmailModel.objects.all().filter(
number=2)))
# Test add.
EmailModel.objects.all().filter(email=self.emails[0]).update(
number=F('number') + F('number'))
self.assertEqual(1, len(EmailModel.objects.all().filter(
number=2)))
self.assertEqual(1, len(EmailModel.objects.all().filter(
number=4)))
EmailModel.objects.all().filter(email=self.emails[1]).update(
number=F('number') + 10, email=self.emails[0])
self.assertEqual(1, len(EmailModel.objects.all().filter(number=13)))
self.assertEqual(self.emails[0],
EmailModel.objects.all().get(number=13).email)
# Complex expression test.
EmailModel.objects.all().filter(number=13).update(
number=F('number') * (F('number') + 10) - 5, email=self.emails[0])
self.assertEqual(1, len(EmailModel.objects.all().filter(number=294)))
# TODO: Tests for: sub, muld, div, mod, ....
| Python |
import datetime
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
from django.test import TestCase
from .testmodels import FieldsWithOptionsModel, OrderedModel, \
SelfReferenceModel
class NonReturnSetsTest(TestCase):
floats = [5.3, 2.6, 9.1, 1.58, 2.4]
emails = ['app-engine@scholardocs.com', 'sharingan@uchias.com',
'rinnengan@sage.de', 'rasengan@naruto.com', 'itachi@uchia.com']
def setUp(self):
for index, (float, email) in enumerate(zip(NonReturnSetsTest.floats,
NonReturnSetsTest.emails)):
self.last_save_time = datetime.datetime.now().time()
ordered_instance = OrderedModel(priority=index, pk=index + 1)
ordered_instance.save()
model = FieldsWithOptionsModel(floating_point=float,
integer=int(float), email=email,
time=self.last_save_time,
foreign_key=ordered_instance)
model.save()
def test_get(self):
self.assertEquals(
FieldsWithOptionsModel.objects.get(
email='itachi@uchia.com').email,
'itachi@uchia.com')
# Test exception when matching multiple entities.
self.assertRaises(MultipleObjectsReturned,
FieldsWithOptionsModel.objects.get,
integer=2)
# Test exception when entity does not exist.
self.assertRaises(ObjectDoesNotExist,
FieldsWithOptionsModel.objects.get,
floating_point=5.2)
# TODO: Test create when djangos model.save_base is refactored.
# TODO: Test get_or_create when refactored.
def test_count(self):
self.assertEquals(
FieldsWithOptionsModel.objects.filter(integer=2).count(), 2)
def test_in_bulk(self):
self.assertEquals(
[key in ['sharingan@uchias.com', 'itachi@uchia.com']
for key in FieldsWithOptionsModel.objects.in_bulk(
['sharingan@uchias.com', 'itachi@uchia.com']).keys()],
[True, ] * 2)
def test_latest(self):
self.assertEquals(
FieldsWithOptionsModel.objects.latest('time').email,
'itachi@uchia.com')
def test_exists(self):
self.assertEquals(FieldsWithOptionsModel.objects.exists(), True)
def test_deletion(self):
# TODO: ForeignKeys will not be deleted! This has to be done
# via background tasks.
self.assertEquals(FieldsWithOptionsModel.objects.count(), 5)
FieldsWithOptionsModel.objects.get(email='itachi@uchia.com').delete()
self.assertEquals(FieldsWithOptionsModel.objects.count(), 4)
FieldsWithOptionsModel.objects.filter(email__in=[
'sharingan@uchias.com', 'itachi@uchia.com',
'rasengan@naruto.com', ]).delete()
self.assertEquals(FieldsWithOptionsModel.objects.count(), 2)
def test_selfref_deletion(self):
entity = SelfReferenceModel()
entity.save()
entity.delete()
def test_foreign_key_fetch(self):
# Test fetching the ForeignKey.
ordered_instance = OrderedModel.objects.get(priority=2)
self.assertEquals(
FieldsWithOptionsModel.objects.get(integer=9).foreign_key,
ordered_instance)
def test_foreign_key_backward(self):
entity = OrderedModel.objects.all()[0]
self.assertEquals(entity.keys.count(), 1)
# TODO: Add should save the added instance transactional via for
# example force_insert.
new_foreign_key = FieldsWithOptionsModel(
floating_point=5.6, integer=3,
email='temp@temp.com', time=datetime.datetime.now())
entity.keys.add(new_foreign_key)
self.assertEquals(entity.keys.count(), 2)
# TODO: Add test for create.
entity.keys.remove(new_foreign_key)
self.assertEquals(entity.keys.count(), 1)
entity.keys.clear()
self.assertTrue(not entity.keys.exists())
entity.keys = [new_foreign_key, new_foreign_key]
self.assertEquals(entity.keys.count(), 1)
self.assertEquals(entity.keys.all()[0].integer, 3)
| Python |
import datetime
from django.test import TestCase
from django.db.utils import DatabaseError
from django.db.models.fields import NOT_PROVIDED
from google.appengine.api import users
from google.appengine.api.datastore import Get
from google.appengine.api.datastore_types import Text, Category, Email, Link, \
PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, Key, \
Rating, BlobKey
from google.appengine.ext.db import Key
from .testmodels import FieldsWithOptionsModel, NullableTextModel
class FieldOptionsTest(TestCase):
def test_options(self):
entity = FieldsWithOptionsModel()
# Try to save the entity with non-nullable field time set to
# None, should raise an exception.
self.assertRaises(DatabaseError, entity.save)
time = datetime.datetime.now().time()
entity.time = time
entity.save()
# Check if primary_key=True is set correctly for the saved entity.
self.assertEquals(entity.pk, u'app-engine@scholardocs.com')
gae_entity = Get(
Key.from_path(FieldsWithOptionsModel._meta.db_table, entity.pk))
self.assertTrue(gae_entity is not None)
self.assertEquals(gae_entity.key().name(),
u'app-engine@scholardocs.com')
# Check if default values are set correctly on the db level,
# primary_key field is not stored at the db level.
for field in FieldsWithOptionsModel._meta.local_fields:
if field.default and field.default != NOT_PROVIDED and \
not field.primary_key:
self.assertEquals(gae_entity[field.column], field.default)
elif field.column == 'time':
self.assertEquals(
gae_entity[field.column],
datetime.datetime(1970, 1, 1,
time.hour, time.minute, time.second,
time.microsecond))
elif field.null and field.editable:
self.assertEquals(gae_entity[field.column], None)
# Check if default values are set correct on the model instance
# level.
entity = FieldsWithOptionsModel.objects.get()
for field in FieldsWithOptionsModel._meta.local_fields:
if field.default and field.default != NOT_PROVIDED:
self.assertEquals(getattr(entity, field.column), field.default)
elif field.column == 'time':
self.assertEquals(getattr(entity, field.column), time)
elif field.null and field.editable:
self.assertEquals(getattr(entity, field.column), None)
# Check if nullable field with default values can be set to
# None.
entity.slug = None
entity.positive_small_integer = None
try:
entity.save()
except:
self.fail()
# Check if slug and positive_small_integer will be retrieved
# with values set to None (on db level and model instance
# level).
gae_entity = Get(Key.from_path(
FieldsWithOptionsModel._meta.db_table, entity.pk))
opts = FieldsWithOptionsModel._meta
self.assertEquals(
gae_entity[opts.get_field_by_name('slug')[0].column],
None)
self.assertEquals(
gae_entity[opts.get_field_by_name(
'positive_small_integer')[0].column],
None)
# On the model instance level.
entity = FieldsWithOptionsModel.objects.get()
self.assertEquals(
getattr(entity, opts.get_field_by_name('slug')[0].column),
None)
self.assertEquals(
getattr(entity, opts.get_field_by_name(
'positive_small_integer')[0].column),
None)
# TODO: Check db_column option.
# TODO: Change the primary key and check if a new instance with
# the changed primary key will be saved (not in this test
# class).
def test_nullable_text(self):
"""
Regression test for #48 (in old BitBucket repository).
"""
entity = NullableTextModel(text=None)
entity.save()
db_entity = NullableTextModel.objects.get()
self.assertEquals(db_entity.text, None)
| Python |
from __future__ import with_statement
import warnings
from django.db import connection, models
from django.db.utils import DatabaseError
from django.test import TestCase
from django.utils import unittest
from djangotoolbox.fields import ListField
class AutoKey(models.Model):
pass
class CharKey(models.Model):
id = models.CharField(primary_key=True, max_length=10)
class IntegerKey(models.Model):
id = models.IntegerField(primary_key=True)
class Parent(models.Model):
pass
class Child(models.Model):
parent = models.ForeignKey(Parent, null=True)
class CharParent(models.Model):
id = models.CharField(primary_key=True, max_length=10)
class CharChild(models.Model):
parent = models.ForeignKey(CharParent)
class IntegerParent(models.Model):
id = models.IntegerField(primary_key=True)
class IntegerChild(models.Model):
parent = models.ForeignKey(IntegerParent)
class ParentKind(models.Model):
pass
class ChildKind(models.Model):
parent = models.ForeignKey(ParentKind)
parents = ListField(models.ForeignKey(ParentKind))
class KeysTest(TestCase):
"""
GAE requires that keys are strings or positive integers,
keys also play a role in defining entity groups.
Note: len() is a way of forcing evaluation of a QuerySet -- we
depend on the back-end to do some checks, so sometimes there is no
way to raise an exception earlier.
"""
def setUp(self):
self.save_warnings_state()
def tearDown(self):
self.restore_warnings_state()
def test_auto_field(self):
"""
GAE keys may hold either strings or positive integers, however
Django uses integers as well as their string representations
for lookups, expecting both to be considered equivalent, so we
limit AutoFields to just ints and check that int or string(int)
may be used interchangably.
Nonpositive keys are not allowed, and trying to use them to
create or look up objects should raise a database exception.
See: http://code.google.com/appengine/docs/python/datastore/keyclass.html.
"""
AutoKey.objects.create()
o1 = AutoKey.objects.create(pk=1)
o2 = AutoKey.objects.create(pk='1')
# self.assertEqual(o1, o2) TODO: Not same for Django, same for the database.
with self.assertRaises(ValueError):
AutoKey.objects.create(pk='a')
self.assertEqual(AutoKey.objects.get(pk=1), o1)
self.assertEqual(AutoKey.objects.get(pk='1'), o1)
with self.assertRaises(ValueError):
AutoKey.objects.get(pk='a')
with self.assertRaises(DatabaseError):
AutoKey.objects.create(id=-1)
with self.assertRaises(DatabaseError):
AutoKey.objects.create(id=0)
with self.assertRaises(DatabaseError):
AutoKey.objects.get(id=-1)
with self.assertRaises(DatabaseError):
AutoKey.objects.get(id__gt=-1)
with self.assertRaises(DatabaseError):
AutoKey.objects.get(id=0)
with self.assertRaises(DatabaseError):
AutoKey.objects.get(id__gt=0)
with self.assertRaises(DatabaseError):
len(AutoKey.objects.filter(id__gt=-1))
with self.assertRaises(DatabaseError):
len(AutoKey.objects.filter(id__gt=0))
def test_primary_key(self):
"""
Specifying a field as primary_key should work as long as the
field values (after get_db_prep_*/value_to_db_* layer) can be
represented by the back-end key type. In case a value can be
represented, but lossy conversions, unexpected sorting, range
limitation or potential future ramifications are possible it
should warn the user (as early as possible).
TODO: It may be even better to raise exceptions / issue
warnings during model validation. And make use of the new
supports_primary_key_on to prevent validation of models
using unsupported primary keys.
"""
# TODO: Move to djangotoolbox or django.db.utils?
class Warning(StandardError):
"""Database warning (name following PEP 249)."""
pass
warnings.simplefilter('error', Warning)
# This should just work.
class AutoFieldKey(models.Model):
key = models.AutoField(primary_key=True)
AutoFieldKey.objects.create()
# This one can be exactly represented.
class CharKey(models.Model):
id = models.CharField(primary_key=True, max_length=10)
CharKey.objects.create(id='a')
# Some rely on unstable assumptions or have other quirks and
# should warn.
# # TODO: Warning with a range limitation.
# with self.assertRaises(Warning):
#
# class IntegerKey(models.Model):
# id = models.IntegerField(primary_key=True)
# IntegerKey.objects.create(id=1)
# # TODO: date/times could be resonably encoded / decoded as
# # strings (in a reversible manner) for key usage, but
# # would need special handling and continue to raise an
# # exception for now
# with self.assertRaises(Warning):
#
# class DateKey(models.Model):
# id = models.DateField(primary_key=True, auto_now=True)
# DateKey.objects.create()
# # TODO: There is a db.Email field that would be better to
# # store emails, but that may prevent them from being
# # used as keys.
# with self.assertRaises(Warning):
#
# class EmailKey(models.Model):
# id = models.EmailField(primary_key=True)
# EmailKey.objects.create(id='aaa@example.com')
# # TODO: Warn that changing field parameters breaks sorting.
# # This applies to any DecimalField, so should belong to
# # the docs.
# with self.assertRaises(Warning):
#
# class DecimalKey(models.Model):
# id = models.DecimalField(primary_key=True, decimal_places=2,
# max_digits=5)
# DecimalKey.objects.create(id=1)
# Some cannot be reasonably represented (e.g. binary or string
# encoding would prevent comparisons to work as expected).
with self.assertRaises(DatabaseError):
class FloatKey(models.Model):
id = models.FloatField(primary_key=True)
FloatKey.objects.create(id=1.0)
# TODO: Better fail during validation or creation than
# sometimes when filtering (False = 0 is a wrong key value).
with self.assertRaises(DatabaseError):
class BooleanKey(models.Model):
id = models.BooleanField(primary_key=True)
BooleanKey.objects.create(id=True)
len(BooleanKey.objects.filter(id=False))
def test_primary_key_coercing(self):
"""
Creation and lookups should use the same type casting as
vanilla Django does, so CharField used as a key should cast
everything to a string, while IntegerField should cast to int.
"""
CharKey.objects.create(id=1)
CharKey.objects.create(id='a')
CharKey.objects.create(id=1.1)
CharKey.objects.get(id='1')
CharKey.objects.get(id='a')
CharKey.objects.get(id='1.1')
IntegerKey.objects.create(id=1)
with self.assertRaises(ValueError):
IntegerKey.objects.create(id='a')
IntegerKey.objects.create(id=1.1)
IntegerKey.objects.get(id='1')
with self.assertRaises(ValueError):
IntegerKey.objects.get(id='a')
IntegerKey.objects.get(id=1.1)
def test_foreign_key(self):
"""
Foreign key lookups may use parent instance or parent key value.
Using null foreign keys needs some special attention.
TODO: In 1.4 one may also add _id suffix and use the key value.
"""
parent1 = Parent.objects.create(pk=1)
child1 = Child.objects.create(parent=parent1)
child2 = Child.objects.create(parent=None)
self.assertEqual(child1.parent, parent1)
self.assertEqual(child2.parent, None)
self.assertEqual(Child.objects.get(parent=parent1), child1)
self.assertEqual(Child.objects.get(parent=1), child1)
self.assertEqual(Child.objects.get(parent='1'), child1)
with self.assertRaises(ValueError):
Child.objects.get(parent='a')
self.assertEqual(Child.objects.get(parent=None), child2)
def test_foreign_key_backwards(self):
"""
Following relationships backwards (_set syntax) with typed
parent key causes a unique problem for the legacy key storage.
"""
parent = CharParent.objects.create(id=1)
child = CharChild.objects.create(parent=parent)
self.assertEqual(list(parent.charchild_set.all()), [child])
parent = IntegerParent.objects.create(id=1)
child = IntegerChild.objects.create(parent=parent)
self.assertEqual(list(parent.integerchild_set.all()), [child])
@unittest.skipIf(
not connection.settings_dict.get('STORE_RELATIONS_AS_DB_KEYS'),
"No key kinds to check with the string/int foreign key storage.")
def test_key_kind(self):
"""
Checks that db.Keys stored in the database use proper kinds.
Key kind should be the name of the table (db_table) of a model
for primary keys of entities, but for foreign keys, references
in general, it should be the db_table of the model the field
refers to.
Note that Django hides the underlying db.Key objects well, and
it does work even with wrong kinds, but keeping the data
consistent may be significant for external tools.
TODO: Add DictField / EmbeddedModelField and nesting checks.
"""
parent = ParentKind.objects.create(pk=1)
child = ChildKind.objects.create(
pk=2, parent=parent, parents=[parent.pk])
self.assertEqual(child.parent.pk, parent.pk)
self.assertEqual(child.parents[0], parent.pk)
from google.appengine.api.datastore import Get
from google.appengine.api.datastore_types import Key
parent_key = Key.from_path(parent._meta.db_table, 1)
child_key = Key.from_path(child._meta.db_table, 2)
parent_entity = Get(parent_key)
child_entity = Get(child_key)
parent_column = child._meta.get_field('parent').column
parents_column = child._meta.get_field('parents').column
self.assertEqual(child_entity[parent_column], parent_key)
self.assertEqual(child_entity[parents_column][0], parent_key)
| Python |
from .backend import BackendTest
from .field_db_conversion import FieldDBConversionTest
from .field_options import FieldOptionsTest
from .filter import FilterTest
from .keys import KeysTest
from .not_return_sets import NonReturnSetsTest
from .order import OrderTest
from .transactions import TransactionTest
| Python |
from django.db import models
from djangotoolbox.fields import BlobField
from ..db.db_settings import get_indexes
class EmailModel(models.Model):
email = models.EmailField()
number = models.IntegerField(null=True)
class DateTimeModel(models.Model):
datetime = models.DateTimeField()
datetime_auto_now = models.DateTimeField(auto_now=True)
datetime_auto_now_add = models.DateTimeField(auto_now_add=True)
class FieldsWithoutOptionsModel(models.Model):
datetime = models.DateTimeField()
date = models.DateField()
time = models.TimeField()
floating_point = models.FloatField()
boolean = models.BooleanField()
null_boolean = models.NullBooleanField()
text = models.CharField(max_length=32)
email = models.EmailField()
comma_seperated_integer = models.CommaSeparatedIntegerField(max_length=10)
ip_address = models.IPAddressField()
slug = models.SlugField()
url = models.URLField()
# file = models.FileField()
# file_path = models.FilePathField()
long_text = models.TextField()
indexed_text = models.TextField()
xml = models.XMLField()
integer = models.IntegerField()
small_integer = models.SmallIntegerField()
positive_integer = models.PositiveIntegerField()
positive_small_integer = models.PositiveSmallIntegerField()
# foreign_key = models.ForeignKey('FieldsWithOptionsModel')
# foreign_key = models.ForeignKey('OrderedModel')
# one_to_one = models.OneToOneField()
# decimal = models.DecimalField() # can be None
# image = models.ImageField()
get_indexes()[FieldsWithoutOptionsModel] = {'indexed': ('indexed_text',)}
class FieldsWithOptionsModel(models.Model):
# Any type of unique (unique_data, ...) is not supported on GAE,
# instead you can use primary_key=True for some special cases. But
# be carefull: changing the primary_key of an entity will not
# result in an updated entity, instead a new entity will be putted
# into the datastore. The old one will not be deleted and all
# references pointing to the old entitiy will not point to the new
# one either.
datetime = models.DateTimeField(auto_now=True, db_column='birthday')
date = models.DateField(auto_now_add=True)
time = models.TimeField()
floating_point = models.FloatField(null=True)
boolean = models.BooleanField()
null_boolean = models.NullBooleanField(default=True)
text = models.CharField(default='Hallo', max_length=10)
email = models.EmailField(default='app-engine@scholardocs.com',
primary_key=True)
comma_seperated_integer = models.CommaSeparatedIntegerField(max_length=10)
ip_address = models.IPAddressField(default='192.168.0.2')
slug = models.SlugField(default='GAGAA', null=True)
url = models.URLField(default='http://www.scholardocs.com')
# file = FileField()
# file_path = FilePathField()
long_text = models.TextField(default=1000 * 'A')
xml = models.XMLField(default=2000 * 'B')
integer = models.IntegerField(default=100)
small_integer = models.SmallIntegerField(default=-5)
positive_integer = models.PositiveIntegerField(default=80)
positive_small_integer = models.PositiveSmallIntegerField(default=3,
null=True)
foreign_key = models.ForeignKey('OrderedModel', null=True,
related_name='keys')
# one_to_one = OneToOneField()
# decimal = DecimalField()
# image = ImageField()
class OrderedModel(models.Model):
id = models.IntegerField(primary_key=True)
priority = models.IntegerField()
class Meta:
ordering = ('-priority',)
class BlobModel(models.Model):
data = BlobField()
class SelfReferenceModel(models.Model):
ref = models.ForeignKey('self', null=True)
class NullableTextModel(models.Model):
text = models.TextField(null=True)
| Python |
import datetime
from django.test import TestCase
from google.appengine.api.datastore import Get
from google.appengine.api.datastore_types import Text, Category, Email, \
Link, PhoneNumber, PostalAddress, Text, Blob, ByteString, GeoPt, IM, \
Key, Rating, BlobKey
from .testmodels import FieldsWithoutOptionsModel
# TODO: Add field conversions for ForeignKeys?
class FieldDBConversionTest(TestCase):
def test_db_conversion(self):
actual_datetime = datetime.datetime.now()
entity = FieldsWithoutOptionsModel(
datetime=actual_datetime, date=actual_datetime.date(),
time=actual_datetime.time(), floating_point=5.97, boolean=True,
null_boolean=False, text='Hallo', email='hallo@hallo.com',
comma_seperated_integer='5,4,3,2',
ip_address='194.167.1.1', slug='you slugy slut :)',
url='http://www.scholardocs.com', long_text=1000 * 'A',
indexed_text='hello', xml=2000 * 'B',
integer=-400, small_integer=-4, positive_integer=400,
positive_small_integer=4)
entity.save()
# Get the gae entity (not the django model instance) and test
# if the fields have been converted right to the corresponding
# GAE database types.
gae_entity = Get(
Key.from_path(FieldsWithoutOptionsModel._meta.db_table,
entity.pk))
opts = FieldsWithoutOptionsModel._meta
for name, types in [('long_text', Text),
('indexed_text', unicode), ('xml', Text),
('text', unicode), ('ip_address', unicode), ('slug', unicode),
('email', unicode), ('comma_seperated_integer', unicode),
('url', unicode), ('time', datetime.datetime),
('datetime', datetime.datetime), ('date', datetime.datetime),
('floating_point', float), ('boolean', bool),
('null_boolean', bool), ('integer', (int, long)),
('small_integer', (int, long)),
('positive_integer', (int, long)),
('positive_small_integer', (int, long))]:
column = opts.get_field_by_name(name)[0].column
if not isinstance(types, (list, tuple)):
types = (types, )
self.assertTrue(type(gae_entity[column]) in types)
# Get the model instance and check if the fields convert back
# to the right types.
model = FieldsWithoutOptionsModel.objects.get()
for name, types in [
('long_text', unicode),
('indexed_text', unicode), ('xml', unicode),
('text', unicode), ('ip_address', unicode),
('slug', unicode),
('email', unicode), ('comma_seperated_integer', unicode),
('url', unicode), ('datetime', datetime.datetime),
('date', datetime.date), ('time', datetime.time),
('floating_point', float), ('boolean', bool),
('null_boolean', bool), ('integer', (int, long)),
('small_integer', (int, long)),
('positive_integer', (int, long)),
('positive_small_integer', (int, long))]:
if not isinstance(types, (list, tuple)):
types = (types, )
self.assertTrue(type(getattr(model, name)) in types)
| Python |
import logging
import os
import sys
def find_project_dir():
"""
Go through the path, and look for manage.py
"""
for path in sys.path:
abs_path = os.path.join(os.path.abspath(path), "manage.py")
if os.path.exists(abs_path):
return os.path.dirname(abs_path)
raise RuntimeError("Unable to locate manage.py on sys.path")
PROJECT_DIR = find_project_dir()
DATA_ROOT = os.path.join(PROJECT_DIR, '.gaedata')
# Overrides for os.environ.
env_ext = {}
if 'DJANGO_SETTINGS_MODULE' not in os.environ:
env_ext['DJANGO_SETTINGS_MODULE'] = 'settings'
def setup_env():
"""Configures GAE environment for command-line apps."""
# Try to import the appengine code from the system path.
try:
from google.appengine.api import apiproxy_stub_map
except ImportError:
for k in [k for k in sys.modules if k.startswith('google')]:
del sys.modules[k]
# Not on the system path. Build a list of alternative paths
# where it may be. First look within the project for a local
# copy, then look for where the Mac OS SDK installs it.
paths = [os.path.join(PROJECT_DIR, 'google_appengine'),
os.environ.get('APP_ENGINE_SDK'),
'/usr/local/google_appengine',
'/Applications/GoogleAppEngineLauncher.app/Contents/Resources/GoogleAppEngine-default.bundle/Contents/Resources/google_appengine']
for path in os.environ.get('PATH', '').split(os.pathsep):
path = path.rstrip(os.sep)
if path.endswith('google_appengine'):
paths.append(path)
if os.name in ('nt', 'dos'):
path = r'%(PROGRAMFILES)s\Google\google_appengine' % os.environ
paths.append(path)
# Loop through all possible paths and look for the SDK dir.
sdk_path = None
for path in paths:
if not path:
continue
path = os.path.expanduser(path)
path = os.path.realpath(path)
if os.path.exists(path):
sdk_path = path
break
# The SDK could not be found in any known location.
if sdk_path is None:
sys.stderr.write("The Google App Engine SDK could not be found!\n"
"Make sure it's accessible via your PATH "
"environment and called google_appengine.\n")
sys.exit(1)
# First add the found SDK to the path
sys.path = [ sdk_path ] + sys.path
# Then call fix_sys_path from the SDK
from dev_appserver import fix_sys_path
fix_sys_path()
setup_project()
from .utils import have_appserver
if have_appserver:
# App Engine's threading.local is broken.
setup_threading()
elif not os.path.exists(DATA_ROOT):
os.mkdir(DATA_ROOT)
setup_logging()
if not have_appserver:
# Patch Django to support loading management commands from zip
# files.
from django.core import management
management.find_commands = find_commands
def find_commands(management_dir):
"""
Given a path to a management directory, returns a list of all the
command names that are available.
This version works for django deployments which are file based or
contained in a ZIP (in sys.path).
Returns an empty list if no commands are defined.
"""
import pkgutil
return [modname for importer, modname, ispkg in pkgutil.iter_modules(
[os.path.join(management_dir, 'commands')]) if not ispkg]
def setup_threading():
if sys.version_info >= (2, 7):
return
# XXX: On Python 2.5 GAE's threading.local doesn't work correctly
# with subclassing.
try:
from django.utils._threading_local import local
import threading
threading.local = local
except ImportError:
pass
def setup_logging():
# Fix Python 2.6 logging module.
logging.logMultiprocessing = 0
# Enable logging.
level = logging.DEBUG
from .utils import have_appserver
if have_appserver:
# We can't import settings at this point when running a normal
# manage.py command because this module gets imported from
# settings.py.
from django.conf import settings
if not settings.DEBUG:
level = logging.INFO
logging.getLogger().setLevel(level)
def setup_project():
from .utils import have_appserver, on_production_server
if have_appserver:
# This fixes a pwd import bug for os.path.expanduser().
env_ext['HOME'] = PROJECT_DIR
# The dev_appserver creates a sandbox which restricts access to
# certain modules and builtins in order to emulate the production
# environment. Here we get the subprocess module back into the
# dev_appserver sandbox.This module is just too important for
# development. Also we add the compiler/parser module back and
# enable https connections (seem to be broken on Windows because
# the _ssl module is disallowed).
if not have_appserver:
from google.appengine.tools import dev_appserver
try:
# Backup os.environ. It gets overwritten by the
# dev_appserver, but it's needed by the subprocess module.
env = dev_appserver.DEFAULT_ENV
dev_appserver.DEFAULT_ENV = os.environ.copy()
dev_appserver.DEFAULT_ENV.update(env)
# Backup the buffer() builtin. The subprocess in Python 2.5
# on Linux and OS X uses needs it, but the dev_appserver
# removes it.
dev_appserver.buffer = buffer
except AttributeError:
logging.warn("Could not patch the default environment. "
"The subprocess module will not work correctly.")
try:
# Allow importing compiler/parser, _ssl (for https),
# _io for Python 2.7 io support on OS X
dev_appserver.HardenedModulesHook._WHITE_LIST_C_MODULES.extend(
('parser', '_ssl', '_io'))
except AttributeError:
logging.warn("Could not patch modules whitelist. the compiler "
"and parser modules will not work and SSL support "
"is disabled.")
elif not on_production_server:
try:
# Restore the real subprocess module.
from google.appengine.api.mail_stub import subprocess
sys.modules['subprocess'] = subprocess
# Re-inject the buffer() builtin into the subprocess module.
from google.appengine.tools import dev_appserver
subprocess.buffer = dev_appserver.buffer
except Exception, e:
logging.warn("Could not add the subprocess module to the "
"sandbox: %s" % e)
os.environ.update(env_ext)
extra_paths = [PROJECT_DIR, os.path.join(os.path.dirname(__file__), 'lib')]
zip_packages_dir = os.path.join(PROJECT_DIR, 'zip-packages')
# We support zipped packages in the common and project folders.
if os.path.isdir(zip_packages_dir):
for zip_package in os.listdir(zip_packages_dir):
extra_paths.append(os.path.join(zip_packages_dir, zip_package))
# App Engine causes main.py to be reloaded if an exception gets
# raised on the first request of a main.py instance, so don't call
# setup_project() multiple times. We ensure this indirectly by
# checking if we've already modified sys.path, already.
if len(sys.path) < len(extra_paths) or \
sys.path[:len(extra_paths)] != extra_paths:
for path in extra_paths:
while path in sys.path:
sys.path.remove(path)
sys.path = extra_paths + sys.path
| Python |
# Initialize App Engine SDK if necessary.
try:
from google.appengine.api import apiproxy_stub_map
except ImportError:
from .boot import setup_env
setup_env()
from djangoappengine.utils import on_production_server, have_appserver
DEBUG = not on_production_server
TEMPLATE_DEBUG = DEBUG
ROOT_URLCONF = 'urls'
DATABASES = {
'default': {
'ENGINE': 'djangoappengine.db',
# Other settings which you might want to override in your
# settings.py.
# Activates high-replication support for remote_api.
# 'HIGH_REPLICATION': True,
# Switch to the App Engine for Business domain.
# 'DOMAIN': 'googleplex.com',
# Store db.Keys as values of ForeignKey or other related
# fields. Warning: dump your data before, and reload it after
# changing! Defaults to False if not set.
# 'STORE_RELATIONS_AS_DB_KEYS': True,
'DEV_APPSERVER_OPTIONS': {
# Optional parameters for development environment.
# Emulate the high-replication datastore locally.
# TODO: Likely to break loaddata (some records missing).
# 'high_replication' : True,
# Use the SQLite backend for local storage (instead of
# default in-memory datastore). Useful for testing with
# larger datasets or when debugging concurrency/async
# issues (separate processes will share a common db state,
# rather than syncing on startup).
# 'use_sqlite': True,
},
},
}
if on_production_server:
EMAIL_BACKEND = 'djangoappengine.mail.AsyncEmailBackend'
else:
EMAIL_BACKEND = 'djangoappengine.mail.EmailBackend'
# Specify a queue name for the async. email backend.
EMAIL_QUEUE_NAME = 'default'
PREPARE_UPLOAD_BACKEND = 'djangoappengine.storage.prepare_upload'
SERVE_FILE_BACKEND = 'djangoappengine.storage.serve_file'
DEFAULT_FILE_STORAGE = 'djangoappengine.storage.BlobstoreStorage'
FILE_UPLOAD_MAX_MEMORY_SIZE = 1024 * 1024
FILE_UPLOAD_HANDLERS = (
'djangoappengine.storage.BlobstoreFileUploadHandler',
'django.core.files.uploadhandler.MemoryFileUploadHandler',
)
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.memcached.MemcachedCache',
'TIMEOUT': 0,
}
}
SESSION_ENGINE = 'django.contrib.sessions.backends.cached_db'
if not on_production_server:
INTERNAL_IPS = ('127.0.0.1',)
| Python |
# Python 2.5 CGI handler.
import os
import sys
from djangoappengine.main import application
from google.appengine.ext.webapp.util import run_wsgi_app
from djangoappengine.boot import setup_logging, env_ext
from django.conf import settings
path_backup = None
def real_main():
# Reset path and environment variables.
global path_backup
try:
sys.path = path_backup[:]
except:
path_backup = sys.path[:]
os.environ.update(env_ext)
setup_logging()
# Run the WSGI CGI handler with that application.
run_wsgi_app(application)
def profile_main(func):
from cStringIO import StringIO
import cProfile
import logging
import pstats
import random
only_forced_profile = getattr(settings, 'ONLY_FORCED_PROFILE', False)
profile_percentage = getattr(settings, 'PROFILE_PERCENTAGE', None)
if (only_forced_profile and
'profile=forced' not in os.environ.get('QUERY_STRING')) or \
(not only_forced_profile and profile_percentage and
float(profile_percentage) / 100.0 <= random.random()):
return func()
prof = cProfile.Profile()
prof = prof.runctx('func()', globals(), locals())
stream = StringIO()
stats = pstats.Stats(prof, stream=stream)
sort_by = getattr(settings, 'SORT_PROFILE_RESULTS_BY', 'time')
if not isinstance(sort_by, (list, tuple)):
sort_by = (sort_by,)
stats.sort_stats(*sort_by)
restrictions = []
profile_pattern = getattr(settings, 'PROFILE_PATTERN', None)
if profile_pattern:
restrictions.append(profile_pattern)
max_results = getattr(settings, 'MAX_PROFILE_RESULTS', 80)
if max_results and max_results != 'all':
restrictions.append(max_results)
stats.print_stats(*restrictions)
extra_output = getattr(settings, 'EXTRA_PROFILE_OUTPUT', None) or ()
if not isinstance(sort_by, (list, tuple)):
extra_output = (extra_output,)
if 'callees' in extra_output:
stats.print_callees()
if 'callers' in extra_output:
stats.print_callers()
logging.info("Profile data:\n%s.", stream.getvalue())
def make_profileable(func):
if getattr(settings, 'ENABLE_PROFILER', False):
return lambda: profile_main(func)
return func
main = make_profileable(real_main)
if __name__ == '__main__':
main()
| Python |
import os
import sys
# Add parent folder to sys.path, so we can import boot.
# App Engine causes main.py to be reloaded if an exception gets raised
# on the first request of a main.py instance, so don't add project_dir
# multiple times.
project_dir = os.path.abspath(
os.path.dirname(os.path.dirname(os.path.dirname(__file__))))
if project_dir not in sys.path or sys.path.index(project_dir) > 0:
while project_dir in sys.path:
sys.path.remove(project_dir)
sys.path.insert(0, project_dir)
for path in sys.path[:]:
if path != project_dir and os.path.isdir(os.path.join(path, 'django')):
sys.path.remove(path)
break
# Remove the standard version of Django.
if 'django' in sys.modules and sys.modules['django'].VERSION < (1, 2):
for k in [k for k in sys.modules
if k.startswith('django.') or k == 'django']:
del sys.modules[k]
from djangoappengine.boot import setup_env
setup_env()
def validate_models():
"""
Since BaseRunserverCommand is only run once, we need to call
model valdidation here to ensure it is run every time the code
changes.
"""
import logging
from django.core.management.validation import get_validation_errors
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
logging.info("Validating models...")
s = StringIO()
num_errors = get_validation_errors(s, None)
if num_errors:
s.seek(0)
error_text = s.read()
logging.critical("One or more models did not validate:\n%s" %
error_text)
else:
logging.info("All models validated.")
from djangoappengine.utils import on_production_server
if not on_production_server:
validate_models()
from django.core.handlers.wsgi import WSGIHandler
from google.appengine.ext.webapp.util import run_wsgi_app
from django.conf import settings
def log_traceback(*args, **kwargs):
import logging
logging.exception("Exception in request:")
from django.core import signals
signals.got_request_exception.connect(log_traceback)
# Create a Django application for WSGI.
application = WSGIHandler()
# Add the staticfiles handler if necessary.
if settings.DEBUG and 'django.contrib.staticfiles' in settings.INSTALLED_APPS:
from django.contrib.staticfiles.handlers import StaticFilesHandler
application = StaticFilesHandler(application)
if getattr(settings, 'ENABLE_APPSTATS', False):
from google.appengine.ext.appstats.recording import \
appstats_wsgi_middleware
application = appstats_wsgi_middleware(application)
| Python |
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.app_identity import get_application_id
have_appserver = bool(apiproxy_stub_map.apiproxy.GetStub('datastore_v3'))
if have_appserver:
appid = get_application_id()
else:
try:
from google.appengine.tools import dev_appserver
from .boot import PROJECT_DIR
appconfig = dev_appserver.LoadAppConfig(PROJECT_DIR, {},
default_partition='dev')[0]
appid = appconfig.application.split('~', 1)[-1]
except ImportError, e:
raise Exception("Could not get appid. Is your app.yaml file missing? "
"Error was: %s" % e)
on_production_server = have_appserver and \
not os.environ.get('SERVER_SOFTWARE', '').lower().startswith('devel')
| Python |
from django.core.management import execute_from_command_line
from django.core.management.base import BaseCommand
class Command(BaseCommand):
help = "Runs a command with access to the remote App Engine production " \
"server (e.g. manage.py remote shell)."
args = "remotecommand"
def run_from_argv(self, argv):
from django.db import connections
from ...db.base import DatabaseWrapper
from ...db.stubs import stub_manager
for connection in connections.all():
if isinstance(connection, DatabaseWrapper):
stub_manager.setup_remote_stubs(connection)
break
argv = argv[:1] + argv[2:]
execute_from_command_line(argv)
| Python |
import logging
import time
import sys
from django.conf import settings
from django.core.management import call_command
from django.core.management.base import BaseCommand
from ...boot import PROJECT_DIR
from ...utils import appconfig
PRE_DEPLOY_COMMANDS = ()
if 'mediagenerator' in settings.INSTALLED_APPS:
PRE_DEPLOY_COMMANDS += ('generatemedia',)
PRE_DEPLOY_COMMANDS = getattr(settings, 'PRE_DEPLOY_COMMANDS',
PRE_DEPLOY_COMMANDS)
POST_DEPLOY_COMMANDS = getattr(settings, 'POST_DEPLOY_COMMANDS', ())
def run_appcfg(argv):
# We don't really want to use that one though, it just executes
# this one.
from google.appengine.tools import appcfg
# Reset the logging level to WARN as appcfg will spew tons of logs
# on INFO.
logging.getLogger().setLevel(logging.WARN)
new_args = argv[:]
new_args[1] = 'update'
if appconfig.runtime != 'python':
new_args.insert(1, '-R')
new_args.append(PROJECT_DIR)
syncdb = True
if '--nosyncdb' in new_args:
syncdb = False
new_args.remove('--nosyncdb')
appcfg.main(new_args)
if syncdb:
print "Running syncdb."
# Wait a little bit for deployment to finish.
for countdown in range(9, 0, -1):
sys.stdout.write('%s\r' % countdown)
time.sleep(1)
from django.db import connections
for connection in connections.all():
if hasattr(connection, 'setup_remote'):
connection.setup_remote()
call_command('syncdb', remote=True, interactive=True)
if getattr(settings, 'ENABLE_PROFILER', False):
print "--------------------------\n" \
"WARNING: PROFILER ENABLED!\n" \
"--------------------------"
class Command(BaseCommand):
"""
Deploys the website to the production server.
Any additional arguments are passed directly to appcfg.py update.
"""
help = "Calls appcfg.py update for the current project."
args = "[any appcfg.py options]"
def run_from_argv(self, argv):
for command in PRE_DEPLOY_COMMANDS:
call_command(command)
try:
run_appcfg(argv)
finally:
for command in POST_DEPLOY_COMMANDS:
call_command(command)
| Python |
from django.core.management.base import BaseCommand
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_stub_util
from optparse import make_option
class Command(BaseCommand):
option_list = BaseCommand.option_list + (
make_option('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.'),
make_option('--addrport', action='store', dest='addrport',
type='string', default='',
help='port number or ipaddr:port to run the server on'),
make_option('--ipv6', '-6', action='store_true', dest='use_ipv6', default=False,
help='Tells Django to use a IPv6 address.'),
)
help = 'Runs a development server with data from the given fixture(s).'
args = '[fixture ...]'
requires_model_validation = False
def handle(self, *fixture_labels, **options):
from django.core.management import call_command
from django import db
from ...db.base import get_datastore_paths
from ...db.stubs import stub_manager
verbosity = int(options.get('verbosity'))
interactive = options.get('interactive')
addrport = options.get('addrport')
db_name = None
# switch default database to test
for name, settings in db.connections.databases.items():
if settings['ENGINE'] == 'djangoappengine.db':
db_name = name
for key, path in get_datastore_paths(settings).items():
settings[key] = "%s-testdb" % path
break
# reset connections list so its repopulated
db.connections._connections = {}
db.connection = db.connections[db.DEFAULT_DB_ALIAS]
# also reset stub manager
stub_manager.active_stubs = None
# run flush on that db
conn = db.connections[db_name]
conn.flush()
# Temporarily change consistency policy to force apply loaded data
datastore = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
orig_consistency_policy = datastore._consistency_policy
datastore.SetConsistencyPolicy(datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1))
# Import the fixture data into the test database.
call_command('loaddata', *fixture_labels, **{'verbosity': verbosity})
datastore.SetConsistencyPolicy(orig_consistency_policy)
# Run the development server. Turn off auto-reloading because it causes
# a strange error -- it causes this handle() method to be called
# multiple times.
shutdown_message = '\nServer stopped.\nNote that the test database, %r, has not been deleted. You can explore it on your own.' % db_name
call_command('runserver', addrport=addrport, shutdown_message=shutdown_message, use_reloader=False, use_ipv6=options['use_ipv6'])
| Python |
import logging
from optparse import make_option
import sys
from django.db import connections
from django.core.management.base import BaseCommand
from django.core.management.commands.runserver import BaseRunserverCommand
from django.core.exceptions import ImproperlyConfigured
from google.appengine.tools import dev_appserver_main
from ...boot import PROJECT_DIR
from ...db.base import DatabaseWrapper, get_datastore_paths
class Command(BaseRunserverCommand):
"""
Overrides the default Django runserver command.
Instead of starting the default Django development server this
command fires up a copy of the full fledged App Engine
dev_appserver that emulates the live environment your application
will be deployed to.
"""
option_list = BaseCommand.option_list + (
make_option(
'--debug', action='store_true', default=False,
help="Prints verbose debugging messages to the console while " \
"running."),
make_option(
'--debug_imports', action='store_true', default=False,
help="Prints debugging messages related to importing modules, " \
"including search paths and errors."),
make_option(
'-c', '--clear_datastore', action='store_true', default=False,
help="Clears the datastore data and history files before " \
"starting the web server."),
make_option(
'--high_replication', action='store_true', default=False,
help='Use the high replication datastore consistency model.'),
make_option(
'--require_indexes', action='store_true', default=False,
help="Disables automatic generation of entries in the " \
"index.yaml file. Instead, when the application makes a " \
"query that requires that its index be defined in the file " \
"and the index definition is not found, an exception will " \
"be raised, similar to what would happen when running on " \
"App Engine."),
make_option(
'--enable_sendmail', action='store_true', default=False,
help="Uses the local computer's Sendmail installation for " \
"sending email messages."),
make_option(
'--datastore_path',
help="The path to use for the local datastore data file. " \
"The server creates this file if it does not exist."),
make_option(
'--blobstore_path',
help="The path to use for the local blob data directory. " \
"The server creates this directory if it does not exist."),
make_option(
'--history_path',
help="The path to use for the local datastore history file. " \
"The server uses the query history file to generate " \
"entries for index.yaml."),
make_option(
'--login_url',
help="The relative URL to use for the Users sign-in page. " \
"Default is /_ah/login."),
make_option(
'--smtp_host',
help="The hostname of the SMTP server to use for sending email " \
"messages."),
make_option(
'--smtp_port',
help="The port number of the SMTP server to use for sending " \
"email messages."),
make_option(
'--smtp_user',
help="The username to use with the SMTP server for sending " \
"email messages."),
make_option(
'--smtp_password',
help="The password to use with the SMTP server for sending " \
"email messages."),
make_option(
'--use_sqlite', action='store_true', default=False,
help="Use the new, SQLite datastore stub."),
make_option(
'--allow_skipped_files', action='store_true', default=False,
help="Allow access to files listed in skip_files."),
make_option(
'--disable_task_running', action='store_true', default=False,
help="When supplied, tasks will not be automatically run after " \
"submission and must be run manually in the local admin " \
"console."),
)
help = "Runs a copy of the App Engine development server."
args = "[optional port number, or ipaddr:port]"
def create_parser(self, prog_name, subcommand):
"""
Creates and returns the ``OptionParser`` which will be used to
parse the arguments to this command.
"""
# Hack __main__ so --help in dev_appserver_main works OK.
sys.modules['__main__'] = dev_appserver_main
return super(Command, self).create_parser(prog_name, subcommand)
def run_from_argv(self, argv):
"""
Captures the program name, usually "manage.py".
"""
self.progname = argv[0]
super(Command, self).run_from_argv(argv)
def run(self, *args, **options):
"""
Starts the App Engine dev_appserver program for the Django
project. The appserver is run with default parameters. If you
need to pass any special parameters to the dev_appserver you
will have to invoke it manually.
Unlike the normal devserver, does not use the autoreloader as
App Engine dev_appserver needs to be run from the main thread
"""
args = []
# Set bind ip/port if specified.
if self.addr:
args.extend(['--address', self.addr])
if self.port:
args.extend(['--port', self.port])
# If runserver is called using handle(), progname will not be
# set.
if not hasattr(self, 'progname'):
self.progname = 'manage.py'
# Add email settings.
from django.conf import settings
if not options.get('smtp_host', None) and \
not options.get('enable_sendmail', None):
args.extend(['--smtp_host', settings.EMAIL_HOST,
'--smtp_port', str(settings.EMAIL_PORT),
'--smtp_user', settings.EMAIL_HOST_USER,
'--smtp_password', settings.EMAIL_HOST_PASSWORD])
# Pass the application specific datastore location to the
# server.
preset_options = {}
for name in connections:
connection = connections[name]
if isinstance(connection, DatabaseWrapper):
for key, path in get_datastore_paths(
connection.settings_dict).items():
# XXX/TODO: Remove this when SDK 1.4.3 is released.
if key == 'prospective_search_path':
continue
arg = '--' + key
if arg not in args:
args.extend([arg, path])
# Get dev_appserver option presets, to be applied below.
preset_options = connection.settings_dict.get(
'DEV_APPSERVER_OPTIONS', {})
break
# Process the rest of the options here.
bool_options = [
'debug', 'debug_imports', 'clear_datastore', 'require_indexes',
'high_replication', 'enable_sendmail', 'use_sqlite',
'allow_skipped_files', 'disable_task_running', ]
for opt in bool_options:
if options[opt] != False:
args.append('--%s' % opt)
str_options = [
'datastore_path', 'blobstore_path', 'history_path', 'login_url', 'smtp_host',
'smtp_port', 'smtp_user', 'smtp_password', ]
for opt in str_options:
if options.get(opt, None) != None:
args.extend(['--%s' % opt, options[opt]])
# Fill any non-overridden options with presets from settings.
for opt, value in preset_options.items():
arg = '--%s' % opt
if arg not in args:
if value and opt in bool_options:
args.append(arg)
elif opt in str_options:
args.extend([arg, value])
# TODO: Issue warning about bogus option key(s)?
# Reset logging level to INFO as dev_appserver will spew tons
# of debug logs.
logging.getLogger().setLevel(logging.INFO)
# Append the current working directory to the arguments.
dev_appserver_main.main([self.progname] + args + [PROJECT_DIR])
| Python |
from django.conf import settings
from django.http import HttpResponse
from django.utils.importlib import import_module
def warmup(request):
"""
Provides default procedure for handling warmup requests on App
Engine. Just add this view to your main urls.py.
"""
for app in settings.INSTALLED_APPS:
for name in ('urls', 'views', 'models'):
try:
import_module('%s.%s' % (app, name))
except ImportError:
pass
content_type = 'text/plain; charset=%s' % settings.DEFAULT_CHARSET
return HttpResponse("Warmup done.", content_type=content_type)
| Python |
from functools import wraps
import sys
from django.db.models.fields import AutoField
from django.db.models.sql import aggregates as sqlaggregates
from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
from django.db.models.sql.where import AND, OR
from django.db.utils import DatabaseError, IntegrityError
from django.utils.tree import Node
from google.appengine.api.datastore import Entity, Query, MultiQuery, \
Put, Get, Delete
from google.appengine.api.datastore_errors import Error as GAEError
from google.appengine.api.datastore_types import Key, Text
from djangotoolbox.db.basecompiler import (
NonrelQuery,
NonrelCompiler,
NonrelInsertCompiler,
NonrelUpdateCompiler,
NonrelDeleteCompiler)
from .db_settings import get_model_indexes
from .expressions import ExpressionEvaluator
from .utils import commit_locked
# Valid query types (a dictionary is used for speedy lookups).
OPERATORS_MAP = {
'exact': '=',
'gt': '>',
'gte': '>=',
'lt': '<',
'lte': '<=',
# The following operators are supported with special code below.
'isnull': None,
'in': None,
'startswith': None,
'range': None,
'year': None,
}
# GAE filters used for negated Django lookups.
NEGATION_MAP = {
'gt': '<=',
'gte': '<',
'lt': '>=',
'lte': '>',
# TODO: Support: "'exact': '!='" (it might actually become
# individual '<' and '>' queries).
}
# In some places None is an allowed value, and we need to distinguish
# it from the lack of value.
NOT_PROVIDED = object()
def safe_call(func):
"""
Causes the decorated function to reraise GAE datastore errors as
Django DatabaseErrors.
"""
@wraps(func)
def _func(*args, **kwargs):
try:
return func(*args, **kwargs)
except GAEError, e:
raise DatabaseError, DatabaseError(str(e)), sys.exc_info()[2]
return _func
class GAEQuery(NonrelQuery):
"""
A simple App Engine query: no joins, no distinct, etc.
"""
# ----------------------------------------------
# Public API
# ----------------------------------------------
def __init__(self, compiler, fields):
super(GAEQuery, self).__init__(compiler, fields)
self.inequality_field = None
self.included_pks = None
self.excluded_pks = ()
self.has_negated_exact_filter = False
self.ordering = []
self.db_table = self.query.get_meta().db_table
self.pks_only = (len(fields) == 1 and fields[0].primary_key)
start_cursor = getattr(self.query, '_gae_start_cursor', None)
end_cursor = getattr(self.query, '_gae_end_cursor', None)
self.gae_query = [Query(self.db_table, keys_only=self.pks_only,
cursor=start_cursor, end_cursor=end_cursor)]
# This is needed for debugging.
def __repr__(self):
return '<GAEQuery: %r ORDER %r>' % (self.gae_query, self.ordering)
@safe_call
def fetch(self, low_mark, high_mark):
query = self._build_query()
executed = False
if self.excluded_pks and high_mark is not None:
high_mark += len(self.excluded_pks)
if self.included_pks is not None:
results = self.get_matching_pk(low_mark, high_mark)
else:
if high_mark is None:
kw = {}
if low_mark:
kw['offset'] = low_mark
results = query.Run(**kw)
executed = True
elif high_mark > low_mark:
results = query.Get(high_mark - low_mark, low_mark)
executed = True
else:
results = ()
for entity in results:
if isinstance(entity, Key):
key = entity
else:
key = entity.key()
if key in self.excluded_pks:
continue
yield self._make_entity(entity)
if executed and not isinstance(query, MultiQuery):
try:
self.query._gae_cursor = query.GetCompiledCursor()
except:
pass
@safe_call
def count(self, limit=NOT_PROVIDED):
if self.included_pks is not None:
return len(self.get_matching_pk(0, limit))
if self.excluded_pks:
return len(list(self.fetch(0, 2000)))
# The datastore's Count() method has a 'limit' kwarg, which has
# a default value (obviously). This value can be overridden to
# anything you like, and importantly can be overridden to
# unlimited by passing a value of None. Hence *this* method
# has a default value of NOT_PROVIDED, rather than a default
# value of None
kw = {}
if limit is not NOT_PROVIDED:
kw['limit'] = limit
return self._build_query().Count(**kw)
@safe_call
def delete(self):
if self.included_pks is not None:
keys = [key for key in self.included_pks if key is not None]
else:
keys = self.fetch()
if keys:
Delete(keys)
@safe_call
def order_by(self, ordering):
# GAE doesn't have any kind of natural ordering?
if not isinstance(ordering, bool):
for field, ascending in ordering:
column = '__key__' if field.primary_key else field.column
direction = Query.ASCENDING if ascending else Query.DESCENDING
self.ordering.append((column, direction))
@safe_call
def add_filter(self, field, lookup_type, negated, value):
"""
This function is used by the default add_filters()
implementation.
"""
if lookup_type not in OPERATORS_MAP:
raise DatabaseError("Lookup type %r isn't supported." %
lookup_type)
# GAE does not let you store empty lists, so we can tell
# upfront that queriying for one will return nothing.
if value in ([], ()):
self.included_pks = []
return
# Optimization: batch-get by key; this is only suitable for
# primary keys, not for anything that uses the key type.
if field.primary_key and lookup_type in ('exact', 'in'):
if self.included_pks is not None:
raise DatabaseError("You can't apply multiple AND "
"filters on the primary key. "
"Did you mean __in=[...]?")
if not isinstance(value, (tuple, list)):
value = [value]
pks = [pk for pk in value if pk is not None]
if negated:
self.excluded_pks = pks
else:
self.included_pks = pks
return
# We check for negation after lookup_type isnull because it
# simplifies the code. All following lookup_type checks assume
# that they're not negated.
if lookup_type == 'isnull':
if (negated and value) or not value:
# TODO/XXX: Is everything greater than None?
op = '>'
else:
op = '='
value = None
elif negated and lookup_type == 'exact':
if self.has_negated_exact_filter:
raise DatabaseError("You can't exclude more than one __exact "
"filter.")
self.has_negated_exact_filter = True
self._combine_filters(field, (('<', value), ('>', value)))
return
elif negated:
try:
op = NEGATION_MAP[lookup_type]
except KeyError:
raise DatabaseError("Lookup type %r can't be negated." %
lookup_type)
if self.inequality_field and field != self.inequality_field:
raise DatabaseError("Can't have inequality filters on "
"multiple fields (here: %r and %r)." %
(field, self.inequality_field))
self.inequality_field = field
elif lookup_type == 'in':
# Create sub-query combinations, one for each value.
if len(self.gae_query) * len(value) > 30:
raise DatabaseError("You can't query against more than "
"30 __in filter value combinations.")
op_values = [('=', v) for v in value]
self._combine_filters(field, op_values)
return
elif lookup_type == 'startswith':
# Lookup argument was converted to [arg, arg + u'\ufffd'].
self._add_filter(field, '>=', value[0])
self._add_filter(field, '<=', value[1])
return
elif lookup_type in ('range', 'year'):
self._add_filter(field, '>=', value[0])
op = '<=' if lookup_type == 'range' else '<'
self._add_filter(field, op, value[1])
return
else:
op = OPERATORS_MAP[lookup_type]
self._add_filter(field, op, value)
# ----------------------------------------------
# Internal API
# ----------------------------------------------
def _add_filter(self, field, op, value):
for query in self.gae_query:
# GAE uses a special property name for primary key filters.
if field.primary_key:
column = '__key__'
else:
column = field.column
key = '%s %s' % (column, op)
if isinstance(value, Text):
raise DatabaseError("TextField is not indexed, by default, "
"so you can't filter on it. Please add "
"an index definition for the field %s "
"on the model %s.%s as described here:\n"
"http://www.allbuttonspressed.com/blog/django/2010/07/Managing-per-field-indexes-on-App-Engine" %
(column, self.query.model.__module__,
self.query.model.__name__))
if key in query:
existing_value = query[key]
if isinstance(existing_value, list):
existing_value.append(value)
else:
query[key] = [existing_value, value]
else:
query[key] = value
def _combine_filters(self, field, op_values):
gae_query = self.gae_query
combined = []
for query in gae_query:
for op, value in op_values:
self.gae_query = [Query(self.db_table,
keys_only=self.pks_only)]
self.gae_query[0].update(query)
self._add_filter(field, op, value)
combined.append(self.gae_query[0])
self.gae_query = combined
def _make_entity(self, entity):
if isinstance(entity, Key):
key = entity
entity = {}
else:
key = entity.key()
entity[self.query.get_meta().pk.column] = key
return entity
@safe_call
def _build_query(self):
for query in self.gae_query:
query.Order(*self.ordering)
if len(self.gae_query) > 1:
return MultiQuery(self.gae_query, self.ordering)
return self.gae_query[0]
def get_matching_pk(self, low_mark=0, high_mark=None):
if not self.included_pks:
return []
results = [result for result in Get(self.included_pks)
if result is not None and
self.matches_filters(result)]
if self.ordering:
results.sort(cmp=self.order_pk_filtered)
if high_mark is not None and high_mark < len(results) - 1:
results = results[:high_mark]
if low_mark:
results = results[low_mark:]
return results
def order_pk_filtered(self, lhs, rhs):
left = dict(lhs)
left[self.query.get_meta().pk.column] = lhs.key().to_path()
right = dict(rhs)
right[self.query.get_meta().pk.column] = rhs.key().to_path()
return self._order_in_memory(left, right)
def matches_filters(self, entity):
"""
Checks if the GAE entity fetched from the database satisfies
the current query's constraints.
"""
item = dict(entity)
item[self.query.get_meta().pk.column] = entity.key()
return self._matches_filters(item, self.query.where)
class SQLCompiler(NonrelCompiler):
"""
Base class for all GAE compilers.
"""
query_class = GAEQuery
class SQLInsertCompiler(NonrelInsertCompiler, SQLCompiler):
@safe_call
def insert(self, data, return_id=False):
opts = self.query.get_meta()
unindexed_fields = get_model_indexes(self.query.model)['unindexed']
kwds = {'unindexed_properties': []}
properties = {}
for field, value in data.iteritems():
# The value will already be a db.Key, but the Entity
# constructor takes a name or id of the key, and will
# automatically create a new key if neither is given.
if field.primary_key:
if value is not None:
kwds['id'] = value.id()
kwds['name'] = value.name()
# GAE does not store empty lists (and even does not allow
# passing empty lists to Entity.update) so skip them.
elif isinstance(value, (tuple, list)) and not len(value):
continue
# Use column names as property names.
else:
properties[field.column] = value
if field in unindexed_fields:
kwds['unindexed_properties'].append(field.column)
entity = Entity(opts.db_table, **kwds)
entity.update(properties)
return Put(entity)
class SQLUpdateCompiler(NonrelUpdateCompiler, SQLCompiler):
def execute_sql(self, result_type=MULTI):
# Modify query to fetch pks only and then execute the query
# to get all pks.
pk_field = self.query.model._meta.pk
self.query.add_immediate_loading([pk_field.name])
pks = [row for row in self.results_iter()]
self.update_entities(pks, pk_field)
return len(pks)
def update_entities(self, pks, pk_field):
for pk in pks:
self.update_entity(pk[0], pk_field)
@commit_locked
def update_entity(self, pk, pk_field):
gae_query = self.build_query()
entity = Get(self.ops.value_for_db(pk, pk_field))
if not gae_query.matches_filters(entity):
return
for field, _, value in self.query.values:
if hasattr(value, 'prepare_database_save'):
value = value.prepare_database_save(field)
else:
value = field.get_db_prep_save(value,
connection=self.connection)
if hasattr(value, 'evaluate'):
assert not value.negated
assert not value.subtree_parents
value = ExpressionEvaluator(value, self.query, entity,
allow_joins=False)
if hasattr(value, 'as_sql'):
value = value.as_sql(lambda n: n, self.connection)
entity[field.column] = self.ops.value_for_db(value, field)
Put(entity)
class SQLDeleteCompiler(NonrelDeleteCompiler, SQLCompiler):
pass
| Python |
from django.db.models.sql.expressions import SQLEvaluator
from django.db.models.expressions import ExpressionNode
OPERATION_MAP = {
ExpressionNode.ADD: lambda x, y: x + y,
ExpressionNode.SUB: lambda x, y: x - y,
ExpressionNode.MUL: lambda x, y: x * y,
ExpressionNode.DIV: lambda x, y: x / y,
ExpressionNode.MOD: lambda x, y: x % y,
ExpressionNode.AND: lambda x, y: x & y,
ExpressionNode.OR: lambda x, y: x | y,
}
class ExpressionEvaluator(SQLEvaluator):
def __init__(self, expression, query, entity, allow_joins=True):
super(ExpressionEvaluator, self).__init__(expression, query,
allow_joins)
self.entity = entity
##################################################
# Vistor methods for final expression evaluation #
##################################################
def evaluate_node(self, node, qn, connection):
values = []
for child in node.children:
if hasattr(child, 'evaluate'):
value = child.evaluate(self, qn, connection)
else:
value = child
if value is not None:
values.append(value)
return OPERATION_MAP[node.connector](*values)
def evaluate_leaf(self, node, qn, connection):
return self.entity[qn(self.cols[node][1])]
| Python |
import logging
import os
import time
from urllib2 import HTTPError, URLError
from google.appengine.ext.testbed import Testbed
from ..boot import PROJECT_DIR
from ..utils import appid, have_appserver
REMOTE_API_SCRIPTS = (
'$PYTHON_LIB/google/appengine/ext/remote_api/handler.py',
'google.appengine.ext.remote_api.handler.application',
)
def auth_func():
import getpass
return raw_input("Login via Google Account (see note above if login fails): "), getpass.getpass("Password: ")
def rpc_server_factory(*args, ** kwargs):
from google.appengine.tools import appengine_rpc
kwargs['save_cookies'] = True
return appengine_rpc.HttpRpcServer(*args, ** kwargs)
class StubManager(object):
def __init__(self):
self.testbed = Testbed()
self.active_stubs = None
self.pre_test_stubs = None
def setup_stubs(self, connection):
if self.active_stubs is not None:
return
if not have_appserver:
self.setup_local_stubs(connection)
def activate_test_stubs(self, connection):
if self.active_stubs == 'test':
return
os.environ['HTTP_HOST'] = "%s.appspot.com" % appid
appserver_opts = connection.settings_dict.get('DEV_APPSERVER_OPTIONS', {})
high_replication = appserver_opts.get('high_replication', False)
datastore_opts = {}
if high_replication:
from google.appengine.datastore import datastore_stub_util
datastore_opts['consistency_policy'] = datastore_stub_util.PseudoRandomHRConsistencyPolicy(probability=1)
self.testbed.activate()
self.pre_test_stubs = self.active_stubs
self.active_stubs = 'test'
self.testbed.init_datastore_v3_stub(**datastore_opts)
self.testbed.init_memcache_stub()
self.testbed.init_taskqueue_stub(auto_task_running=True, root_path=PROJECT_DIR)
self.testbed.init_urlfetch_stub()
self.testbed.init_user_stub()
self.testbed.init_xmpp_stub()
self.testbed.init_channel_stub()
def deactivate_test_stubs(self):
if self.active_stubs == 'test':
self.testbed.deactivate()
self.active_stubs = self.pre_test_stubs
def setup_local_stubs(self, connection):
if self.active_stubs == 'local':
return
from .base import get_datastore_paths
from google.appengine.tools import dev_appserver_main
args = dev_appserver_main.DEFAULT_ARGS.copy()
args.update(get_datastore_paths(connection.settings_dict))
args.update(connection.settings_dict.get('DEV_APPSERVER_OPTIONS', {}))
log_level = logging.getLogger().getEffectiveLevel()
logging.getLogger().setLevel(logging.WARNING)
from google.appengine.tools import dev_appserver
dev_appserver.SetupStubs('dev~' + appid, **args)
logging.getLogger().setLevel(log_level)
self.active_stubs = 'local'
def setup_remote_stubs(self, connection):
if self.active_stubs == 'remote':
return
if not connection.remote_api_path:
from ..utils import appconfig
for handler in appconfig.handlers:
if handler.script in REMOTE_API_SCRIPTS:
connection.remote_api_path = handler.url.split('(', 1)[0]
break
server = '%s.%s' % (connection.remote_app_id, connection.domain)
remote_url = 'https://%s%s' % (server, connection.remote_api_path)
logging.info("Setting up remote_api for '%s' at %s." %
(connection.remote_app_id, remote_url))
if not have_appserver:
logging.info(
"Connecting to remote_api handler.\n\n"
"IMPORTANT: Check your login method settings in the "
"App Engine Dashboard if you have problems logging in. "
"Login is only supported for Google Accounts.")
from google.appengine.ext.remote_api import remote_api_stub
remote_api_stub.ConfigureRemoteApi(None,
connection.remote_api_path, auth_func, servername=server,
secure=connection.secure_remote_api,
rpc_server_factory=rpc_server_factory)
retry_delay = 1
while retry_delay <= 16:
try:
remote_api_stub.MaybeInvokeAuthentication()
except HTTPError, e:
if not have_appserver:
logging.info("Retrying in %d seconds..." % retry_delay)
time.sleep(retry_delay)
retry_delay *= 2
else:
break
else:
try:
remote_api_stub.MaybeInvokeAuthentication()
except HTTPError, e:
raise URLError("%s\n"
"Couldn't reach remote_api handler at %s.\n"
"Make sure you've deployed your project and "
"installed a remote_api handler in app.yaml. "
"Note that login is only supported for "
"Google Accounts. Make sure you've configured "
"the correct authentication method in the "
"App Engine Dashboard." % (e, remote_url))
logging.info("Now using the remote datastore for '%s' at %s." %
(connection.remote_app_id, remote_url))
self.active_stubs = 'remote'
stub_manager = StubManager()
| Python |
from djangotoolbox.db.creation import NonrelDatabaseCreation
from .db_settings import get_model_indexes
from .stubs import stub_manager
class DatabaseCreation(NonrelDatabaseCreation):
# For TextFields and XMLFields we'll default to the unindexable,
# but not length-limited, db.Text (db_type of "string" fields is
# overriden indexed / unindexed fields).
# GAE datastore cannot process sets directly, so we'll store them
# as lists, it also can't handle dicts so we'll store DictField and
# EmbeddedModelFields pickled as Blobs (pickled using the binary
# protocol 2, even though they used to be serialized with the ascii
# protocol 0 -- the deconversion is the same for both).
data_types = dict(NonrelDatabaseCreation.data_types, **{
'TextField': 'text',
'XMLField': 'text',
'SetField': 'list',
'DictField': 'bytes',
'EmbeddedModelField': 'bytes',
})
def db_type(self, field):
"""
Provides a choice to continue using db.Key just for primary key
storage or to use it for all references (ForeignKeys and other
relations).
We also force the "string" db_type (plain string storage) if a
field is to be indexed, and the "text" db_type (db.Text) if
it's registered as unindexed.
"""
if self.connection.settings_dict.get('STORE_RELATIONS_AS_DB_KEYS'):
if field.primary_key or field.rel is not None:
return 'key'
# Primary keys were processed as db.Keys; for related fields
# the db_type of primary key of the referenced model was used,
# but RelatedAutoField type was not defined and resulted in
# "integer" being used for relations to models with AutoFields.
# TODO: Check with Positive/SmallIntegerField primary keys.
else:
if field.primary_key:
return 'key'
if field.rel is not None:
related_field = field.rel.get_related_field()
if related_field.get_internal_type() == 'AutoField':
return 'integer'
else:
return related_field.db_type(connection=self.connection)
db_type = field.db_type(connection=self.connection)
# Override db_type of "string" fields according to indexing.
if db_type in ('string', 'text'):
indexes = get_model_indexes(field.model)
if field.attname in indexes['indexed']:
return 'string'
elif field.attname in indexes['unindexed']:
return 'text'
return db_type
def _create_test_db(self, *args, **kw):
self._had_test_stubs = stub_manager.active_stubs != 'test'
if self._had_test_stubs:
stub_manager.activate_test_stubs(self.connection)
def _destroy_test_db(self, *args, **kw):
if self._had_test_stubs:
stub_manager.deactivate_test_stubs()
stub_manager.setup_stubs(self.connection)
del self._had_test_stubs
| Python |
from google.appengine.datastore.datastore_query import Cursor
from django.db import models, DEFAULT_DB_ALIAS
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.3, 2.4 fallback.
class CursorQueryMixin(object):
def clone(self, *args, **kwargs):
kwargs['_gae_cursor'] = getattr(self, '_gae_cursor', None)
kwargs['_gae_start_cursor'] = getattr(self, '_gae_start_cursor', None)
kwargs['_gae_end_cursor'] = getattr(self, '_gae_end_cursor', None)
return super(CursorQueryMixin, self).clone(*args, **kwargs)
def get_cursor(queryset):
# Evaluate QuerySet.
len(queryset)
cursor = getattr(queryset.query, '_gae_cursor', None)
return Cursor.to_websafe_string(cursor)
def set_cursor(queryset, start=None, end=None):
queryset = queryset.all()
if CursorQueryMixin not in queryset.query.__class__.__bases__:
class CursorQuery(CursorQueryMixin, queryset.query.__class__):
pass
queryset.query = queryset.query.clone(klass=CursorQuery)
if start is not None:
start = Cursor.from_websafe_string(start)
queryset.query._gae_start_cursor = start
if end is not None:
end = Cursor.from_websafe_string(end)
queryset.query._gae_end_cursor = end
return queryset
def commit_locked(func_or_using=None, retries=None, xg=False):
"""
Decorator that locks rows on DB reads.
"""
def inner_commit_locked(func, using=None):
def _commit_locked(*args, **kw):
from google.appengine.api.datastore import RunInTransactionOptions
from google.appengine.datastore.datastore_rpc import TransactionOptions
option_dict = {}
if retries:
option_dict['retries'] = retries
if xg:
option_dict['xg'] = True
options = TransactionOptions(**option_dict)
return RunInTransactionOptions(options, func, *args, **kw)
return wraps(func)(_commit_locked)
if func_or_using is None:
func_or_using = DEFAULT_DB_ALIAS
if callable(func_or_using):
return inner_commit_locked(func_or_using, DEFAULT_DB_ALIAS)
return lambda func: inner_commit_locked(func, func_or_using)
| Python |
from django.conf import settings
from django.utils.importlib import import_module
# TODO: Add autodiscover() and make API more like dbindexer's
# register_index.
# TODO: Add support for eventual consistency setting on specific
# models.
_MODULE_NAMES = getattr(settings, 'GAE_SETTINGS_MODULES', ())
FIELD_INDEXES = None
def get_model_indexes(model):
indexes = get_indexes()
model_index = {'indexed': [], 'unindexed': []}
for item in reversed(model.mro()):
config = indexes.get(item, {})
model_index['indexed'].extend(config.get('indexed', ()))
model_index['unindexed'].extend(config.get('unindexed', ()))
return model_index
def get_indexes():
global FIELD_INDEXES
if FIELD_INDEXES is None:
field_indexes = {}
for name in _MODULE_NAMES:
field_indexes.update(import_module(name).FIELD_INDEXES)
FIELD_INDEXES = field_indexes
return FIELD_INDEXES
| Python |
import datetime
import decimal
import logging
import os
import shutil
from django.db.utils import DatabaseError
from google.appengine.api.datastore import Delete, Query
from google.appengine.api.datastore_errors import BadArgumentError, \
BadValueError
from google.appengine.api.datastore_types import Blob, Key, Text, \
ValidateInteger
from google.appengine.api.namespace_manager import set_namespace
from google.appengine.ext.db.metadata import get_kinds, get_namespaces
from djangotoolbox.db.base import (
NonrelDatabaseClient,
NonrelDatabaseFeatures,
NonrelDatabaseIntrospection,
NonrelDatabaseOperations,
NonrelDatabaseValidation,
NonrelDatabaseWrapper)
from djangotoolbox.db.utils import decimal_to_string
from ..boot import DATA_ROOT
from ..utils import appid, on_production_server
from .creation import DatabaseCreation
from .stubs import stub_manager
DATASTORE_PATHS = {
'datastore_path': os.path.join(DATA_ROOT, 'datastore'),
'blobstore_path': os.path.join(DATA_ROOT, 'blobstore'),
#'rdbms_sqlite_path': os.path.join(DATA_ROOT, 'rdbms'),
'prospective_search_path': os.path.join(DATA_ROOT, 'prospective-search'),
}
def key_from_path(db_table, value):
"""
Workaround for GAE choosing not to validate integer ids when
creating keys.
TODO: Should be removed if it gets fixed.
"""
if isinstance(value, (int, long)):
ValidateInteger(value, 'id')
return Key.from_path(db_table, value)
def get_datastore_paths(options):
paths = {}
for key, path in DATASTORE_PATHS.items():
paths[key] = options.get(key, path)
return paths
def destroy_datastore(paths):
"""Destroys the appengine datastore at the specified paths."""
for path in paths.values():
if not path:
continue
try:
if os.path.isdir(path):
shutil.rmtree(path)
else:
os.remove(path)
except OSError, error:
if error.errno != 2:
logging.error("Failed to clear datastore: %s" % error)
class DatabaseFeatures(NonrelDatabaseFeatures):
# GAE only allow strictly positive integers (and strings) to be
# used as key values.
allows_primary_key_0 = False
# Anything that results in a something different than a positive
# integer or a string cannot be directly used as a key on GAE.
# Note that DecimalField values are encoded as strings, so can be
# used as keys.
# With some encoding, we could allow most fields to be used as a
# primary key, but for now only mark what can and what cannot be
# safely used.
supports_primary_key_on = \
NonrelDatabaseFeatures.supports_primary_key_on - set((
'FloatField', 'DateField', 'DateTimeField', 'TimeField',
'BooleanField', 'NullBooleanField', 'TextField', 'XMLField'))
class DatabaseOperations(NonrelDatabaseOperations):
compiler_module = __name__.rsplit('.', 1)[0] + '.compiler'
# Date used to store times as datetimes.
# TODO: Use just date()?
DEFAULT_DATE = datetime.date(1970, 1, 1)
# Time used to store dates as datetimes.
DEFAULT_TIME = datetime.time()
def sql_flush(self, style, tables, sequences):
self.connection.flush()
return []
def value_to_db_auto(self, value):
"""
New keys generated by the GAE datastore hold longs.
"""
if value is None:
return None
return long(value)
def value_for_db(self, value, field, lookup=None):
"""
We'll simulate `startswith` lookups with two inequalities:
property >= value and property <= value + u'\ufffd',
and need to "double" the value before passing it through the
actual datastore conversions.
"""
super_value_for_db = super(DatabaseOperations, self).value_for_db
if lookup == 'startswith':
return [super_value_for_db(value, field, lookup),
super_value_for_db(value + u'\ufffd', field, lookup)]
return super_value_for_db(value, field, lookup)
def _value_for_db(self, value, field, field_kind, db_type, lookup):
"""
GAE database may store a restricted set of Python types, for
some cases it has its own types like Key, Text or Blob.
TODO: Consider moving empty list handling here (from insert).
"""
# Store Nones as Nones to handle nullable fields, even keys.
if value is None:
return None
# Parent can handle iterable fields and Django wrappers.
value = super(DatabaseOperations, self)._value_for_db(
value, field, field_kind, db_type, lookup)
# Convert decimals to strings preserving order.
if field_kind == 'DecimalField':
value = decimal_to_string(
value, field.max_digits, field.decimal_places)
# Create GAE db.Keys from Django keys.
# We use model's table name as key kind (the table of the model
# of the instance that the key identifies, for ForeignKeys and
# other relations).
if db_type == 'key':
# value = self._value_for_db_key(value, field_kind)
try:
value = key_from_path(field.model._meta.db_table, value)
except (BadArgumentError, BadValueError,):
raise DatabaseError("Only strings and positive integers "
"may be used as keys on GAE.")
# Store all strings as unicode, use db.Text for longer content.
elif db_type == 'string' or db_type == 'text':
if isinstance(value, str):
value = value.decode('utf-8')
if db_type == 'text':
value = Text(value)
# Store all date / time values as datetimes, by using some
# default time or date.
elif db_type == 'date':
value = datetime.datetime.combine(value, self.DEFAULT_TIME)
elif db_type == 'time':
value = datetime.datetime.combine(self.DEFAULT_DATE, value)
# Store BlobField, DictField and EmbeddedModelField values as Blobs.
elif db_type == 'bytes':
value = Blob(value)
return value
def _value_from_db(self, value, field, field_kind, db_type):
"""
Undoes conversions done in value_for_db.
"""
# We could have stored None for a null field.
if value is None:
return None
# All keys were converted to the Key class.
if db_type == 'key':
assert isinstance(value, Key), \
"GAE db.Key expected! Try changing to old storage, " \
"dumping data, changing to new storage and reloading."
assert value.parent() is None, "Parents are not yet supported!"
value = value.id_or_name()
# value = self._value_from_db_key(value, field_kind)
# Always retrieve strings as unicode (old datasets may
# contain non-unicode strings).
elif db_type == 'string' or db_type == 'text':
if isinstance(value, str):
value = value.decode('utf-8')
else:
value = unicode(value)
# Dates and times are stored as datetimes, drop the added part.
elif db_type == 'date':
value = value.date()
elif db_type == 'time':
value = value.time()
# Convert GAE Blobs to plain strings for Django.
elif db_type == 'bytes':
value = str(value)
# Revert the decimal-to-string encoding.
if field_kind == 'DecimalField':
value = decimal.Decimal(value)
return super(DatabaseOperations, self)._value_from_db(
value, field, field_kind, db_type)
# def _value_for_db_key(self, value, field_kind):
# """
# Converts values to be used as entity keys to strings,
# trying (but not fully succeeding) to preserve comparisons.
# """
# # Bools as positive integers.
# if field_kind == 'BooleanField':
# value = int(value) + 1
# # Encode floats as strings.
# elif field_kind == 'FloatField':
# value = self.value_to_db_decimal(
# decimal.Decimal(value), None, None)
# # Integers as strings (string keys sort after int keys, so
# # all need to be encoded to preserve comparisons).
# elif field_kind in ('IntegerField', 'BigIntegerField',
# 'PositiveIntegerField', 'PositiveSmallIntegerField',
# 'SmallIntegerField'):
# value = self.value_to_db_decimal(
# decimal.Decimal(value), None, 0)
# return value
# def value_from_db_key(self, value, field_kind):
# """
# Decodes value previously encoded in a key.
# """
# if field_kind == 'BooleanField':
# value = bool(value - 1)
# elif field_kind == 'FloatField':
# value = float(value)
# elif field_kind in ('IntegerField', 'BigIntegerField',
# 'PositiveIntegerField', 'PositiveSmallIntegerField',
# 'SmallIntegerField'):
# value = int(value)
# return value
class DatabaseClient(NonrelDatabaseClient):
pass
class DatabaseValidation(NonrelDatabaseValidation):
pass
class DatabaseIntrospection(NonrelDatabaseIntrospection):
def table_names(self):
"""
Returns a list of names of all tables that exist in the
database.
"""
return [kind.key().name() for kind in Query(kind='__kind__').Run()]
class DatabaseWrapper(NonrelDatabaseWrapper):
def __init__(self, *args, **kwds):
super(DatabaseWrapper, self).__init__(*args, **kwds)
self.features = DatabaseFeatures(self)
self.ops = DatabaseOperations(self)
self.client = DatabaseClient(self)
self.creation = DatabaseCreation(self)
self.validation = DatabaseValidation(self)
self.introspection = DatabaseIntrospection(self)
options = self.settings_dict
self.remote_app_id = options.get('REMOTE_APP_ID', appid)
self.domain = options.get('DOMAIN', 'appspot.com')
self.remote_api_path = options.get('REMOTE_API_PATH', None)
self.secure_remote_api = options.get('SECURE_REMOTE_API', True)
remote = options.get('REMOTE', False)
if on_production_server:
remote = False
if remote:
stub_manager.setup_remote_stubs(self)
else:
stub_manager.setup_stubs(self)
def flush(self):
"""
Helper function to remove the current datastore and re-open the
stubs.
"""
if stub_manager.active_stubs == 'remote':
import random
import string
code = ''.join([random.choice(string.ascii_letters)
for x in range(4)])
print "\n\n!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
print "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
print "Warning! You're about to delete the *production* datastore!"
print "Only models defined in your INSTALLED_APPS can be removed!"
print "If you want to clear the whole datastore you have to use " \
"the datastore viewer in the dashboard. Also, in order to " \
"delete all unneeded indexes you have to run appcfg.py " \
"vacuum_indexes."
print "In order to proceed you have to enter the following code:"
print code
response = raw_input("Repeat: ")
if code == response:
print "Deleting..."
delete_all_entities()
print "Datastore flushed! Please check your dashboard's " \
"datastore viewer for any remaining entities and " \
"remove all unneeded indexes with appcfg.py " \
"vacuum_indexes."
else:
print "Aborting."
exit()
elif stub_manager.active_stubs == 'test':
stub_manager.deactivate_test_stubs()
stub_manager.activate_test_stubs(self)
else:
destroy_datastore(get_datastore_paths(self.settings_dict))
stub_manager.setup_local_stubs(self)
def delete_all_entities():
for namespace in get_namespaces():
set_namespace(namespace)
for kind in get_kinds():
if kind.startswith('__'):
continue
while True:
data = Query(kind=kind, keys_only=True).Get(200)
if not data:
break
Delete(data)
| Python |
from google.appengine.api.memcache import *
| Python |
from email.MIMEBase import MIMEBase
from django.core.mail.backends.base import BaseEmailBackend
from django.core.mail import EmailMultiAlternatives
from django.core.exceptions import ImproperlyConfigured
from google.appengine.api import mail as aeemail
from google.appengine.runtime import apiproxy_errors
def _send_deferred(message, fail_silently=False):
try:
message.send()
except (aeemail.Error, apiproxy_errors.Error):
if not fail_silently:
raise
class EmailBackend(BaseEmailBackend):
can_defer = False
def send_messages(self, email_messages):
num_sent = 0
for message in email_messages:
if self._send(message):
num_sent += 1
return num_sent
def _copy_message(self, message):
"""
Creates and returns App Engine EmailMessage class from message.
"""
gmsg = aeemail.EmailMessage(sender=message.from_email,
to=message.to,
subject=message.subject,
body=message.body)
if message.extra_headers.get('Reply-To', None):
gmsg.reply_to = message.extra_headers['Reply-To']
if message.cc:
gmsg.cc = list(message.cc)
if message.bcc:
gmsg.bcc = list(message.bcc)
if message.attachments:
# Must be populated with (filename, filecontents) tuples.
attachments = []
for attachment in message.attachments:
if isinstance(attachment, MIMEBase):
attachments.append((attachment.get_filename(),
attachment.get_payload(decode=True)))
else:
attachments.append((attachment[0], attachment[1]))
gmsg.attachments = attachments
# Look for HTML alternative content.
if isinstance(message, EmailMultiAlternatives):
for content, mimetype in message.alternatives:
if mimetype == 'text/html':
gmsg.html = content
break
return gmsg
def _send(self, message):
try:
message = self._copy_message(message)
except (ValueError, aeemail.InvalidEmailError), err:
import logging
logging.warn(err)
if not self.fail_silently:
raise
return False
if self.can_defer:
self._defer_message(message)
return True
try:
message.send()
except (aeemail.Error, apiproxy_errors.Error):
if not self.fail_silently:
raise
return False
return True
def _defer_message(self, message):
from google.appengine.ext import deferred
from django.conf import settings
queue_name = getattr(settings, 'EMAIL_QUEUE_NAME', 'default')
deferred.defer(_send_deferred,
message,
fail_silently=self.fail_silently,
_queue=queue_name)
class AsyncEmailBackend(EmailBackend):
can_defer = True
| Python |
# Initialize Django.
from djangoappengine import main
from django.utils.importlib import import_module
from django.conf import settings
# Load all models.py to ensure signal handling installation or index
# loading of some apps
for app in settings.INSTALLED_APPS:
try:
import_module('%s.models' % (app))
except ImportError:
pass
from google.appengine.ext.deferred.handler import main
from google.appengine.ext.deferred.deferred import application
if __name__ == '__main__':
main()
| Python |
from django.conf import settings
if 'django.contrib.auth' in settings.INSTALLED_APPS:
from dbindexer.api import register_index
from django.contrib.auth.models import User
register_index(User, {
'username': 'iexact',
'email': 'iexact',
})
if 'django.contrib.admin' in settings.INSTALLED_APPS:
from dbindexer.api import register_index
from django.contrib.admin.models import LogEntry
register_index(LogEntry, {
'object_id': 'exact',
})
| Python |
from django.forms import ModelForm, Form
from django import forms
from models import user_profile, admins
class ProfileForm(ModelForm):
class Meta:
model = user_profile
fields = ('f_name', 'l_name', 'email_visibility', 'about_me')
class AdminProfileForm(ModelForm):
class Meta:
model = user_profile
class NickForm(forms.Form):
nick = forms.CharField(max_length=20)
def clean_nick(self):
import re
#Nick Validation
regex = re.compile(r'^[a-z][a-z0-9_]+$')
nick = self.cleaned_data['nick']
if len(nick)>20 or len(nick)<3 or regex.match(nick)==None:
raise forms.ValidationError("Nick should have 3-20 lowercase alphanumeric characters, underscores and should start with a letter")
#Check if nick is available
try:
user_profile.objects.get(nick=nick)
raise forms.ValidationError("Nick %s is already taken!" % nick)
except user_profile.DoesNotExist:
return nick
class DelForm(forms.Form):
confirm = forms.BooleanField(label='Confirm Account Deletion?')
class ConfirmForm(forms.Form):
confirm = forms.BooleanField(label='Confirm?')
class AddAdminForm(forms.Form):
email = forms.EmailField(label='Email of the existing user to give admins rights to')
def clean_email(self):
email = self.cleaned_data['email']
# error if user does not exist
try:
target_user = user_profile.objects.get(email=email)
except user_profile.DoesNotExist:
raise forms.ValidationError("User with such email doesn't exist")
# error if user is inactive
if not target_user.is_active:
raise forms.ValidationError("Inactive User cannot be given admin rights")
# error if user is already an admin
try:
foo = admins.objects.get(email=email)
raise forms.ValidationError("User already an admin")
except admins.DoesNotExist:
# No problems
return email
| Python |
from django.db import models
import logging
class user_profile(models.Model):
google_user_id = models.CharField(max_length=200, editable=False, unique=True)
f_name = models.CharField(max_length=30, null=True, blank=True, verbose_name='First Name')
l_name = models.CharField(max_length=30, null=True, blank=True, verbose_name='Last Name')
nick = models.CharField(max_length=20, editable=False, unique=True)
email_visibility = models.BooleanField(default=False, verbose_name='E-mail Visible in Profile')
about_me = models.TextField(max_length=400, null=True, blank=True, verbose_name='About Me')
email = models.EmailField(editable=False, unique=True)
is_active = models.BooleanField(default=True, verbose_name='Is Active')
def __unicode__(self):
return self.nick
def get_url(self):
return '/user/%s/' % self.nick
def save2(self, *args, **kwargs):
super(user_profile, self).save(*args, **kwargs)
def save(self, *args, **kwargs):
if user_profile.objects.filter(nick=self.nick).exists():
new = False
else:
new = True
super(user_profile, self).save(*args, **kwargs)
if new:
logging.info('user_profile: %s created' % self.nick)
else:
logging.info('user_profile: %s changed' % self.nick)
def delete(self, *args, **kwargs):
super(user_profile, self).delete(*args, **kwargs)
logging.info('user_profile: deleted %s' % self.nick)
class admins(models.Model):
email = models.EmailField(editable=False, unique=True)
def save(self, *args, **kwargs):
super(admins, self).save(*args, **kwargs)
logging.info('admins: added %s' % self.email)
def delete(self, *args, **kwargs):
super(admins, self).delete(*args, **kwargs)
logging.info('admins: deleted %s' % self.email) | Python |
from models import user_profile, admins
from google.appengine.api import users
from django.shortcuts import render
from forms import NickForm
myprofile_links = [('/myprofile/', 'My Profile'), ('/myprofile/edit/', 'Edit Profile'), ('/my_items/', 'My Items'), ('/my_buys/', 'My Purchases'), ('/my_sells/', 'My Sold Items'), ('/ongoing_deals/', 'Ongoing Purchases')]
user_links = []
class user:
"Abstraction to handle user_profile models"
def add_user(self, obj):
"Add the current user instance to the database"
self.registered = True
self.is_active = True
self.user_obj = obj
self.user_obj.save()
try:
admins.objects.get(email=self.user_obj.email)
self.is_admin = True
except admins.DoesNotExist:
self.is_admin = False
def get_info(self):
"Get list of info for the current user"
ret = []
ret.append(('Nick', self.user_obj.nick))
if self.user_obj.f_name or self.user_obj.l_name:
ret.append(('Name', '%s %s'%(self.user_obj.f_name, self.user_obj.l_name)))
if self.user_obj.about_me:
ret.append(('About Me', self.user_obj.about_me))
if self.user_obj.email_visibility:
ret.append(('E-mail', self.user_obj.email))
return ret
def __init__(self, nick=None, user_id=None, email = None):
"Initialises with any one of the argument"
self.registered = False
self.user_obj = None
self.is_active = False
self.is_admin = False
if nick:
try:
self.user_obj = user_profile.objects.get(nick=nick)
self.registered = True
self.is_active = self.user_obj.is_active
try:
admins.objects.get(email=self.user_obj.email)
self.is_admin = True
except admins.DoesNotExist:
self.is_admin = False
except user_profile.DoesNotExist:
self.user_obj = None
elif user_id:
try:
self.user_obj = user_profile.objects.get(google_user_id=user_id)
self.registered = True
self.is_active = self.user_obj.is_active
try:
admins.objects.get(email=self.user_obj.email)
self.is_admin = True
except admins.DoesNotExist:
self.is_admin = False
except user_profile.DoesNotExist:
self.user_obj = None
elif email:
try:
self.user_obj = user_profile.objects.get(email=email)
self.registered = True
self.is_active = self.user_obj.is_active
try:
admins.objects.get(email=self.user_obj.email)
self.is_admin = True
except admins.DoesNotExist:
self.is_admin = False
except user_profile.DoesNotExist:
self.user_obj = None
def handle_login_register(func):
"""
Passes the function the current users instance.
If the user is new, asks him to register when accessing any restricted page.
If no user logged in, shows an error
"""
def handle(*args):
user_g = users.get_current_user()
if user_g:
curr_user = user(email=user_g.email())
if not curr_user.registered:
if(args[0].method=='POST'):
form = NickForm(args[0].POST)
if form.is_bound and form.is_valid():
curr_user.add_user(user_profile(google_user_id=user_g.user_id(),
nick=form.cleaned_data['nick'],
email=user_g.email()))
else:
return render(args[0], 'register.html', {'form': form})
else:
return render(args[0], 'register.html', {'form': NickForm()})
else:
curr_user = None
if curr_user == None:
return render(args[0], 'error.html', {'error': 'Auth Failed!'})
if curr_user and not curr_user.user_obj.is_active:
return render(args[0], 'error.html', {'error': 'You have been deactivated, contact the admin!'})
return func(*args, curr_user=curr_user)
return handle
def handle_optional_login(func):
"""
Doesn't show error if no user logged in
"""
def handle(*args):
user_g = users.get_current_user()
if user_g:
curr_user = user(email=user_g.email())
else:
curr_user = None
return func(*args, curr_user=curr_user)
return handle | Python |
import main
from main import handle_login_register, user
from django.http import HttpResponse, HttpResponseRedirect
from google.appengine.api import users
from django.shortcuts import render
from forms import ProfileForm, DelForm, AddAdminForm, ConfirmForm
from models import user_profile, admins
from itemTools.models import items
from commTools.models import Comm, Messages
import logging
@handle_login_register
def user_info(request, curr_user):
"My Profile Page"
return render(request, 'myprofile.html', {'user_info': curr_user.get_info(), 'links': main.myprofile_links})
@handle_login_register
def edit_profile(request, curr_user):
"Edit Profile Page"
if request.method == 'POST':
form = ProfileForm(request.POST, instance=curr_user.user_obj)
if form.is_valid():
form.save()
return render(request, 'edit_profile.html', {'form': form, 'saved': True, 'links': main.myprofile_links})
else:
form = ProfileForm(instance=curr_user.user_obj)
return render(request, 'edit_profile.html', {'form': form, 'links': main.myprofile_links})
def view_profile(request, nick):
"Profile page of another user (nick)"
curr_user = user(nick=nick)
if curr_user.user_obj!=None:
return render(request, 'view_profile.html', {'target_user': curr_user, 'user_info': curr_user.get_info(), 'links': main.user_links})
else:
return render(request, 'error.html', {'error': 'User %s not found!' % nick})
@handle_login_register
def del_profile(request, curr_user):
"Delete Profile of currently logged in user"
if request.method == 'POST':
form = DelForm(request.POST)
if form.is_valid():
if form.cleaned_data['confirm']:
# remove all comm and messages
comms = Comm.objects.filter(buyer=curr_user.user_obj)
for comm in comms:
Messages.objects.filter(comm=comm).delete()
comms.delete()
# remove all items
_items = items.objects.filter(user=curr_user.user_obj)
for item in _items:
comms = Comm.objects.filter(item=item)
for comm in comms:
Messages.objects.filter(comm=comm).delete()
comms.delete()
# remove admin entries
try:
tmp = admins.objects.get(email=curr_user.user_obj.email)
tmp.delete()
except admins.DoesNotExist:
pass
# remove user
curr_user.user_obj.delete()
return HttpResponseRedirect(users.create_logout_url('/'))
else:
return HttpResponseRedirect('/')
else:
return HttpResponseRedirect('/')
else:
return render(request, 'user_delete.html', {'form': DelForm()})
@handle_login_register
def deact_profile(request, nick, curr_user):
"Admin Action: Deactivate profile nick"
# error if curr_user doesn't have the rights
if not curr_user.is_admin:
return render(request, 'error.html', {'error': 'You are not allowed to be here!'})
target_user = user(nick=nick)
# error if target user doesn't exist or is an admin
if target_user.user_obj == None:
return render(request, 'error.html', {'error': 'User %s does not exist!' % nick})
if target_user.is_admin:
return render(request, 'error.html', {'error': 'Cannot Deactivate an Admin!'})
if not target_user.is_active:
return render(request, 'error.html', {'error': 'User %s already deactivated!' % nick})
if request.method == 'POST':
form = ConfirmForm(request.POST)
if form.is_valid():
if not form.cleaned_data.get('confirm', False):
return HttpResponseRedirect(target_user.user_obj.get_url())
else:
return HttpResponseRedirect(target_user.user_obj.get_url())
else:
return render(request, 'confirm.html', {'form': ConfirmForm(), 'title': 'Deactivate user %s' % nick})
target_user.user_obj.is_active = False
target_user.user_obj.save()
logging.info('Admin Action: Deactivated account %s' % target_user.user_obj.nick)
return HttpResponseRedirect(target_user.user_obj.get_url())
@handle_login_register
def act_profile(request, nick, curr_user):
"Admin Action: Activate Profile nick"
# error if curr_user doesn't have the rights
if not curr_user.is_admin:
return render(request, 'error.html', {'error': 'You are not allowed to be here!'})
target_user = user(nick=nick)
# error if target user doesn't exist or is an admin
if target_user.user_obj == None:
return render(request, 'error.html', {'error': 'User %s does not exist!' % nick})
if target_user.is_admin:
return render(request, 'error.html', {'error': 'Cannot Perform this operation on an Admin!'})
if target_user.is_active:
return render(request, 'error.html', {'error': 'User %s already activated!' % nick})
if request.method == 'POST':
form = ConfirmForm(request.POST)
if form.is_valid():
if not form.cleaned_data.get('confirm', False):
return HttpResponseRedirect(target_user.user_obj.get_url())
else:
return HttpResponseRedirect(target_user.user_obj.get_url())
else:
return render(request, 'confirm.html', {'form': ConfirmForm(), 'title': 'Activate user %s' % nick})
target_user.user_obj.is_active = True
target_user.user_obj.save()
logging.info('Admin Action: Activated account %s' % target_user.user_obj.nick)
return HttpResponseRedirect(target_user.user_obj.get_url())
@handle_login_register
def admin_add(request, curr_user):
"Admin Action: Add Admin"
if not curr_user.is_admin:
return render(request, 'error.html', {'error': 'You are not allowed to be here!'})
if request.method == 'POST':
form = AddAdminForm(request.POST)
if form.is_bound and form.is_valid():
admins(email=form.cleaned_data['email']).save()
return render(request, 'admin_panel_add.html', {'msg': 'User with email %s given Admin rights' % form.cleaned_data['email'], 'form': AddAdminForm()})
else:
return render(request, 'admin_panel_add.html', {'form': form})
else:
return render(request, 'admin_panel_add.html', {'form': AddAdminForm()})
@handle_login_register
def admin_panel_home(request, curr_user):
"Admin View: Admin Panel"
if not curr_user.is_admin:
return render(request, 'error.html', {'error': 'You are not allowed to be here!'})
return render(request, 'admin_panel_base.html')
@handle_login_register
def deact_users(request, curr_user):
"Admin View: Shows deactivated users"
if not curr_user.is_admin:
return render(request, 'error.html', {'error': 'You are not allowed to be here!'})
ret = user_profile.objects.filter(is_active=False)
return render(request, 'admin_panel_deact.html', {'list': ret, 'size': len(ret)}) | Python |
from django.conf.urls.defaults import patterns, include, url
import views
import userTools.views
import itemTools.views
import searchTools.views
import commTools.views
import expiry
urlpatterns = patterns('',
url(r'^$', views.home),
url(r'^myprofile/$', userTools.views.user_info),
url(r'^myprofile/edit/$', userTools.views.edit_profile),
url(r'^myprofile/delete/$', userTools.views.del_profile),
url(r'^user/([a-z]{1}[a-z0-9_]{2,19})/$', userTools.views.view_profile),
url(r'^admin/$', userTools.views.admin_panel_home),
url(r'^admin/user/deact/([a-z]{1}[a-z0-9_]{2,19})/$', userTools.views.deact_profile),
url(r'^admin/user/act/([a-z]{1}[a-z0-9_]{2,19})/$', userTools.views.act_profile),
url(r'^admin/add/$', userTools.views.admin_add),
url(r'^admin/deact_users/$', userTools.views.deact_users),
url(r'^sell/$', itemTools.views.sell),
url(r'^buy/$', itemTools.views.buy),
url(r'^item/$', itemTools.views.item_view),
url(r'^item/(\d+)/delete/$', itemTools.views.item_delete),
url(r'^item/(\d+)/edit/$', itemTools.views.item_edit),
url(r'^my_items/$', itemTools.views.my_items),
url(r'^my_buys/$', itemTools.views.my_items_buys),
url(r'^my_sells/$', itemTools.views.my_items_sold),
url(r'^ongoing_deals/$', itemTools.views.ongoing_deals),
url(r'^search/$', searchTools.views.search_items),
url(r'^search_handle/$', searchTools.views.search_handle),
url(r'^item/(\d+)/comm/$', commTools.views.comm_item),
url(r'^item/(\d+)/comm/([a-z]{1}[a-z0-9_]{2,19})/$', commTools.views.comm_seller_buyer),
url(r'^item/(\d+)/seal/$', commTools.views.seal),
url(r'^item/(\d+)/cancel/$', commTools.views.cancel),
url(r'^item/(\d+)/comm/([a-z]{1}[a-z0-9_]{2,19})/seal/$', commTools.views.seller_seal),
url(r'^tasks/expiry$', expiry.chk_exp),
# Testing Purpose
url(r'^reset/$', views.reset),
)
| Python |
from __future__ import with_statement
from decimal import Decimal, InvalidOperation
import time
from django.core import serializers
from django.db import models
from django.db.models import Q
from django.db.models.signals import post_save
from django.db.utils import DatabaseError
from django.dispatch.dispatcher import receiver
from django.test import TestCase
from django.utils.unittest import expectedFailure, skip
from .fields import ListField, SetField, DictField, EmbeddedModelField
def count_calls(func):
def wrapper(*args, **kwargs):
wrapper.calls += 1
return func(*args, **kwargs)
wrapper.calls = 0
return wrapper
class Target(models.Model):
index = models.IntegerField()
class Source(models.Model):
target = models.ForeignKey(Target)
index = models.IntegerField()
class DecimalModel(models.Model):
decimal = models.DecimalField(max_digits=9, decimal_places=2)
class DecimalKey(models.Model):
decimal = models.DecimalField(max_digits=9, decimal_places=2, primary_key=True)
class DecimalParent(models.Model):
child = models.ForeignKey(DecimalKey)
class DecimalsList(models.Model):
decimals = ListField(models.ForeignKey(DecimalKey))
class ListModel(models.Model):
integer = models.IntegerField(primary_key=True)
floating_point = models.FloatField()
names = ListField(models.CharField)
names_with_default = ListField(models.CharField(max_length=500),
default=[])
names_nullable = ListField(models.CharField(max_length=500), null=True)
class OrderedListModel(models.Model):
ordered_ints = ListField(models.IntegerField(max_length=500), default=[],
ordering=count_calls(lambda x: x), null=True)
ordered_nullable = ListField(ordering=lambda x: x, null=True)
class SetModel(models.Model):
setfield = SetField(models.IntegerField())
class DictModel(models.Model):
dictfield = DictField(models.IntegerField)
dictfield_nullable = DictField(null=True)
auto_now = DictField(models.DateTimeField(auto_now=True))
class EmbeddedModelFieldModel(models.Model):
simple = EmbeddedModelField('EmbeddedModel', null=True)
simple_untyped = EmbeddedModelField(null=True)
decimal_parent = EmbeddedModelField(DecimalParent, null=True)
typed_list = ListField(EmbeddedModelField('SetModel'))
typed_list2 = ListField(EmbeddedModelField('EmbeddedModel'))
untyped_list = ListField(EmbeddedModelField())
untyped_dict = DictField(EmbeddedModelField())
ordered_list = ListField(EmbeddedModelField(),
ordering=lambda obj: obj.index)
class EmbeddedModel(models.Model):
some_relation = models.ForeignKey(DictModel, null=True)
someint = models.IntegerField(db_column='custom')
auto_now = models.DateTimeField(auto_now=True)
auto_now_add = models.DateTimeField(auto_now_add=True)
class IterableFieldsTest(TestCase):
floats = [5.3, 2.6, 9.1, 1.58]
names = [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura']
unordered_ints = [4, 2, 6, 1]
def setUp(self):
for i, float in zip(range(1, 5), IterableFieldsTest.floats):
ListModel(integer=i, floating_point=float,
names=IterableFieldsTest.names[:i]).save()
def test_startswith(self):
self.assertEquals(
dict([(entity.pk, entity.names) for entity in
ListModel.objects.filter(names__startswith='Sa')]),
dict([(3, ['Kakashi', 'Naruto', 'Sasuke']),
(4, ['Kakashi', 'Naruto', 'Sasuke', 'Sakura']), ]))
def test_options(self):
self.assertEqual([entity.names_with_default for entity in
ListModel.objects.filter(names__startswith='Sa')],
[[], []])
self.assertEqual([entity.names_nullable for entity in
ListModel.objects.filter(names__startswith='Sa')],
[None, None])
def test_default_value(self):
# Make sure default value is copied.
ListModel().names_with_default.append(2)
self.assertEqual(ListModel().names_with_default, [])
def test_ordering(self):
f = OrderedListModel._meta.fields[1]
f.ordering.calls = 0
# Ensure no ordering happens on assignment.
obj = OrderedListModel()
obj.ordered_ints = self.unordered_ints
self.assertEqual(f.ordering.calls, 0)
obj.save()
self.assertEqual(OrderedListModel.objects.get().ordered_ints,
sorted(self.unordered_ints))
# Ordering should happen only once, i.e. the order function may
# be called N times at most (N being the number of items in the
# list).
self.assertLessEqual(f.ordering.calls, len(self.unordered_ints))
def test_gt(self):
self.assertEquals(
dict([(entity.pk, entity.names) for entity in
ListModel.objects.filter(names__gt='Kakashi')]),
dict([(2, [u'Kakashi', u'Naruto']),
(3, [u'Kakashi', u'Naruto', u'Sasuke']),
(4, [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura']), ]))
def test_lt(self):
self.assertEquals(
dict([(entity.pk, entity.names) for entity in
ListModel.objects.filter(names__lt='Naruto')]),
dict([(1, [u'Kakashi']),
(2, [u'Kakashi', u'Naruto']),
(3, [u'Kakashi', u'Naruto', u'Sasuke']),
(4, [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura']), ]))
def test_gte(self):
self.assertEquals(
dict([(entity.pk, entity.names) for entity in
ListModel.objects.filter(names__gte='Sakura')]),
dict([(3, [u'Kakashi', u'Naruto', u'Sasuke']),
(4, [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura']), ]))
def test_lte(self):
self.assertEquals(
dict([(entity.pk, entity.names) for entity in
ListModel.objects.filter(names__lte='Kakashi')]),
dict([(1, [u'Kakashi']),
(2, [u'Kakashi', u'Naruto']),
(3, [u'Kakashi', u'Naruto', u'Sasuke']),
(4, [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura']), ]))
def test_equals(self):
self.assertEquals([entity.names for entity in
ListModel.objects.filter(names='Sakura')],
[[u'Kakashi', u'Naruto', u'Sasuke', u'Sakura']])
# Test with additonal pk filter (for DBs that have special pk
# queries).
query = ListModel.objects.filter(names='Sakura')
self.assertEquals(query.get(pk=query[0].pk).names,
[u'Kakashi', u'Naruto', u'Sasuke', u'Sakura'])
def test_is_null(self):
self.assertEquals(ListModel.objects.filter(
names__isnull=True).count(), 0)
def test_exclude(self):
self.assertEquals(
dict([(entity.pk, entity.names) for entity in
ListModel.objects.all().exclude(names__lt='Sakura')]),
dict([(3, [u'Kakashi', u'Naruto', u'Sasuke']),
(4, [u'Kakashi', u'Naruto', u'Sasuke', u'Sakura']), ]))
def test_chained_filter(self):
self.assertEquals(
[entity.names for entity in ListModel.objects
.filter(names='Sasuke').filter(names='Sakura')],
[['Kakashi', 'Naruto', 'Sasuke', 'Sakura'], ])
self.assertEquals(
[entity.names for entity in ListModel.objects
.filter(names__startswith='Sa').filter(names='Sakura')],
[['Kakashi', 'Naruto', 'Sasuke', 'Sakura']])
# Test across multiple columns. On app engine only one filter
# is allowed to be an inequality filter.
self.assertEquals(
[entity.names for entity in ListModel.objects
.filter(floating_point=9.1).filter(names__startswith='Sa')],
[['Kakashi', 'Naruto', 'Sasuke'], ])
def test_setfield(self):
setdata = [1, 2, 3, 2, 1]
# At the same time test value conversion.
SetModel(setfield=map(str, setdata)).save()
item = SetModel.objects.filter(setfield=3)[0]
self.assertEqual(item.setfield, set(setdata))
# This shouldn't raise an error because the default value is
# an empty list.
SetModel().save()
def test_dictfield(self):
DictModel(dictfield=dict(a=1, b='55', foo=3.14),
auto_now={'a': None}).save()
item = DictModel.objects.get()
self.assertEqual(item.dictfield, {u'a': 1, u'b': 55, u'foo': 3})
dt = item.auto_now['a']
self.assertNotEqual(dt, None)
item.save()
time.sleep(0.5) # Sleep to avoid false positive failure on the assertion below
self.assertGreater(DictModel.objects.get().auto_now['a'], dt)
item.delete()
# Saving empty dicts shouldn't throw errors.
DictModel().save()
# Regression tests for djangoappengine issue #39.
DictModel.add_to_class('new_dict_field', DictField())
DictModel.objects.get()
@skip("GAE specific?")
def test_Q_objects(self):
self.assertEquals(
[entity.names for entity in ListModel.objects
.exclude(Q(names__lt='Sakura') | Q(names__gte='Sasuke'))],
[['Kakashi', 'Naruto', 'Sasuke', 'Sakura']])
def test_list_with_foreignkeys(self):
class ReferenceList(models.Model):
keys = ListField(models.ForeignKey('Model'))
class Model(models.Model):
pass
model1 = Model.objects.create()
model2 = Model.objects.create()
ReferenceList.objects.create(keys=[model1.pk, model2.pk])
self.assertEqual(ReferenceList.objects.get().keys[0], model1.pk)
self.assertEqual(ReferenceList.objects.filter(keys=model1.pk).count(), 1)
def test_list_with_foreign_conversion(self):
decimal = DecimalKey.objects.create(decimal=Decimal('1.5'))
DecimalsList.objects.create(decimals=[decimal.pk])
@expectedFailure
def test_nested_list(self):
"""
Some back-ends expect lists to be strongly typed or not contain
other lists (e.g. GAE), this limits how the ListField can be
used (unless the back-end were to serialize all lists).
"""
class UntypedListModel(models.Model):
untyped_list = ListField()
UntypedListModel.objects.create(untyped_list=[1, [2, 3]])
class Child(models.Model):
pass
class Parent(models.Model):
id = models.IntegerField(primary_key=True)
integer_list = ListField(models.IntegerField)
integer_dict = DictField(models.IntegerField)
embedded_list = ListField(EmbeddedModelField(Child))
embedded_dict = DictField(EmbeddedModelField(Child))
class EmbeddedModelFieldTest(TestCase):
def assertEqualDatetime(self, d1, d2):
"""Compares d1 and d2, ignoring microseconds."""
self.assertEqual(d1.replace(microsecond=0),
d2.replace(microsecond=0))
def assertNotEqualDatetime(self, d1, d2):
self.assertNotEqual(d1.replace(microsecond=0),
d2.replace(microsecond=0))
def _simple_instance(self):
EmbeddedModelFieldModel.objects.create(
simple=EmbeddedModel(someint='5'))
return EmbeddedModelFieldModel.objects.get()
def test_simple(self):
instance = self._simple_instance()
self.assertIsInstance(instance.simple, EmbeddedModel)
# Make sure get_prep_value is called.
self.assertEqual(instance.simple.someint, 5)
# Primary keys should not be populated...
self.assertEqual(instance.simple.id, None)
# ... unless set explicitly.
instance.simple.id = instance.id
instance.save()
instance = EmbeddedModelFieldModel.objects.get()
self.assertEqual(instance.simple.id, instance.id)
def _test_pre_save(self, instance, get_field):
# Make sure field.pre_save is called for embedded objects.
from time import sleep
instance.save()
auto_now = get_field(instance).auto_now
auto_now_add = get_field(instance).auto_now_add
self.assertNotEqual(auto_now, None)
self.assertNotEqual(auto_now_add, None)
sleep(1) # FIXME
instance.save()
self.assertNotEqualDatetime(get_field(instance).auto_now,
get_field(instance).auto_now_add)
instance = EmbeddedModelFieldModel.objects.get()
instance.save()
# auto_now_add shouldn't have changed now, but auto_now should.
self.assertEqualDatetime(get_field(instance).auto_now_add,
auto_now_add)
self.assertGreater(get_field(instance).auto_now, auto_now)
def test_pre_save(self):
obj = EmbeddedModelFieldModel(simple=EmbeddedModel())
self._test_pre_save(obj, lambda instance: instance.simple)
def test_pre_save_untyped(self):
obj = EmbeddedModelFieldModel(simple_untyped=EmbeddedModel())
self._test_pre_save(obj, lambda instance: instance.simple_untyped)
def test_pre_save_in_list(self):
obj = EmbeddedModelFieldModel(untyped_list=[EmbeddedModel()])
self._test_pre_save(obj, lambda instance: instance.untyped_list[0])
def test_pre_save_in_dict(self):
obj = EmbeddedModelFieldModel(untyped_dict={'a': EmbeddedModel()})
self._test_pre_save(obj, lambda instance: instance.untyped_dict['a'])
def test_pre_save_list(self):
# Also make sure auto_now{,add} works for embedded object *lists*.
EmbeddedModelFieldModel.objects.create(typed_list2=[EmbeddedModel()])
instance = EmbeddedModelFieldModel.objects.get()
auto_now = instance.typed_list2[0].auto_now
auto_now_add = instance.typed_list2[0].auto_now_add
self.assertNotEqual(auto_now, None)
self.assertNotEqual(auto_now_add, None)
instance.typed_list2.append(EmbeddedModel())
instance.save()
instance = EmbeddedModelFieldModel.objects.get()
self.assertEqualDatetime(instance.typed_list2[0].auto_now_add,
auto_now_add)
self.assertGreater(instance.typed_list2[0].auto_now, auto_now)
self.assertNotEqual(instance.typed_list2[1].auto_now, None)
self.assertNotEqual(instance.typed_list2[1].auto_now_add, None)
def test_error_messages(self):
for kwargs, expected in (
({'simple': 42}, EmbeddedModel),
({'simple_untyped': 42}, models.Model),
({'typed_list': [EmbeddedModel()]}, SetModel)):
self.assertRaisesRegexp(
TypeError, "Expected instance of type %r." % expected,
EmbeddedModelFieldModel(**kwargs).save)
def test_typed_listfield(self):
EmbeddedModelFieldModel.objects.create(
typed_list=[SetModel(setfield=range(3)),
SetModel(setfield=range(9))],
ordered_list=[Target(index=i) for i in xrange(5, 0, -1)])
obj = EmbeddedModelFieldModel.objects.get()
self.assertIn(5, obj.typed_list[1].setfield)
self.assertEqual([target.index for target in obj.ordered_list],
range(1, 6))
def test_untyped_listfield(self):
EmbeddedModelFieldModel.objects.create(untyped_list=[
EmbeddedModel(someint=7),
OrderedListModel(ordered_ints=range(5, 0, -1)),
SetModel(setfield=[1, 2, 2, 3])])
instances = EmbeddedModelFieldModel.objects.get().untyped_list
for instance, cls in zip(instances,
[EmbeddedModel, OrderedListModel, SetModel]):
self.assertIsInstance(instance, cls)
self.assertNotEqual(instances[0].auto_now, None)
self.assertEqual(instances[1].ordered_ints, range(1, 6))
def test_untyped_dict(self):
EmbeddedModelFieldModel.objects.create(untyped_dict={
'a': SetModel(setfield=range(3)),
'b': DictModel(dictfield={'a': 1, 'b': 2}),
'c': DictModel(dictfield={}, auto_now={'y': 1})})
data = EmbeddedModelFieldModel.objects.get().untyped_dict
self.assertIsInstance(data['a'], SetModel)
self.assertNotEqual(data['c'].auto_now['y'], None)
def test_foreignkey_in_embedded_object(self):
simple = EmbeddedModel(some_relation=DictModel.objects.create())
obj = EmbeddedModelFieldModel.objects.create(simple=simple)
simple = EmbeddedModelFieldModel.objects.get().simple
self.assertNotIn('some_relation', simple.__dict__)
self.assertIsInstance(simple.__dict__['some_relation_id'],
type(obj.id))
self.assertIsInstance(simple.some_relation, DictModel)
def test_embedded_field_with_foreign_conversion(self):
decimal = DecimalKey.objects.create(decimal=Decimal('1.5'))
decimal_parent = DecimalParent.objects.create(child=decimal)
EmbeddedModelFieldModel.objects.create(decimal_parent=decimal_parent)
def test_update(self):
"""
Test that update can be used on an a subset of objects
containing collections of embedded instances; see issue #13.
Also ensure that updated values are coerced according to
collection field.
"""
child1 = Child.objects.create()
child2 = Child.objects.create()
parent = Parent.objects.create(pk=1,
integer_list=[1], integer_dict={'a': 2},
embedded_list=[child1], embedded_dict={'a': child2})
Parent.objects.filter(pk=1).update(
integer_list=['3'], integer_dict={'b': '3'},
embedded_list=[child2], embedded_dict={'b': child1})
parent = Parent.objects.get()
self.assertEqual(parent.integer_list, [3])
self.assertEqual(parent.integer_dict, {'b': 3})
self.assertEqual(parent.embedded_list, [child2])
self.assertEqual(parent.embedded_dict, {'b': child1})
class BaseModel(models.Model):
pass
class ExtendedModel(BaseModel):
name = models.CharField(max_length=20)
class BaseModelProxy(BaseModel):
class Meta:
proxy = True
class ExtendedModelProxy(ExtendedModel):
class Meta:
proxy = True
class ProxyTest(TestCase):
def test_proxy(self):
list(BaseModelProxy.objects.all())
def test_proxy_with_inheritance(self):
self.assertRaises(DatabaseError,
lambda: list(ExtendedModelProxy.objects.all()))
class SignalTest(TestCase):
def test_post_save(self):
created = []
@receiver(post_save, sender=SetModel)
def handle(**kwargs):
created.append(kwargs['created'])
SetModel().save()
self.assertEqual(created, [True])
SetModel.objects.get().save()
self.assertEqual(created, [True, False])
qs = SetModel.objects.all()
list(qs)[0].save()
self.assertEqual(created, [True, False, False])
list(qs)[0].save()
self.assertEqual(created, [True, False, False, False])
list(qs.select_related())[0].save()
self.assertEqual(created, [True, False, False, False, False])
class SelectRelatedTest(TestCase):
def test_select_related(self):
target = Target(index=5)
target.save()
Source(target=target, index=8).save()
source = Source.objects.all().select_related()[0]
self.assertEqual(source.target.pk, target.pk)
self.assertEqual(source.target.index, target.index)
source = Source.objects.all().select_related('target')[0]
self.assertEqual(source.target.pk, target.pk)
self.assertEqual(source.target.index, target.index)
class DBColumn(models.Model):
a = models.IntegerField(db_column='b')
class OrderByTest(TestCase):
def test_foreign_keys(self):
target1 = Target.objects.create(index=1)
target2 = Target.objects.create(index=2)
source1 = Source.objects.create(target=target1, index=3)
source2 = Source.objects.create(target=target2, index=4)
self.assertEqual(list(Source.objects.all().order_by('target')),
[source1, source2])
self.assertEqual(list(Source.objects.all().order_by('-target')),
[source2, source1])
def test_db_column(self):
model1 = DBColumn.objects.create(a=1)
model2 = DBColumn.objects.create(a=2)
self.assertEqual(list(DBColumn.objects.all().order_by('a')),
[model1, model2])
self.assertEqual(list(DBColumn.objects.all().order_by('-a')),
[model2, model1])
def test_reverse(self):
model1 = DBColumn.objects.create(a=1)
model2 = DBColumn.objects.create(a=2)
self.assertEqual(list(DBColumn.objects.all().order_by('a').reverse()),
[model2, model1])
self.assertEqual(list(DBColumn.objects.all().order_by('-a').reverse()),
[model1, model2])
def test_chain(self):
model1 = Target.objects.create(index=1)
model2 = Target.objects.create(index=2)
self.assertEqual(
list(Target.objects.all().order_by('index').order_by('-index')),
[model2, model1])
class SerializableSetModel(models.Model):
setfield = SetField(models.IntegerField())
setcharfield = SetField(models.CharField(), null=True)
class SerializationTest(TestCase):
"""
JSON doesn't support sets, so they need to be converted to lists
for serialization; see issue #12.
TODO: Check if the fix works with embedded models / nested sets.
"""
names = ['foo', 'bar', 'baz', 'monkey']
def test_json_listfield(self):
for i in range(1, 5):
ListModel(integer=i, floating_point=0,
names=SerializationTest.names[:i]).save()
objects = ListModel.objects.all()
serialized = serializers.serialize('json', objects)
deserialized = serializers.deserialize('json', serialized)
for m in deserialized:
integer = m.object.integer
names = m.object.names
self.assertEqual(names, SerializationTest.names[:integer])
def test_json_setfield(self):
for i in range(1, 5):
SerializableSetModel(
setfield=set([i - 1]),
setcharfield=set(SerializationTest.names[:i])).save()
objects = SerializableSetModel.objects.all()
serialized = serializers.serialize('json', objects)
deserialized = serializers.deserialize('json', serialized)
for m in deserialized:
integer = m.object.setfield.pop()
names = m.object.setcharfield
self.assertEqual(names, set(SerializationTest.names[:integer + 1]))
class String(models.Model):
s = models.CharField(max_length=20)
class LazyObjectsTest(TestCase):
def test_translation(self):
"""
Using a lazy translation call should work just the same as
a non-lazy one (or a plain string).
"""
from django.utils.translation import ugettext_lazy
a = String.objects.create(s='a')
b = String.objects.create(s=ugettext_lazy('b'))
self.assertEqual(String.objects.get(s='a'), a)
self.assertEqual(list(String.objects.filter(s='a')), [a])
self.assertEqual(list(String.objects.filter(s__lte='a')), [a])
self.assertEqual(String.objects.get(s=ugettext_lazy('a')), a)
self.assertEqual(
list(String.objects.filter(s__lte=ugettext_lazy('a'))), [a])
self.assertEqual(String.objects.get(s='b'), b)
self.assertEqual(list(String.objects.filter(s='b')), [b])
self.assertEqual(list(String.objects.filter(s__gte='b')), [b])
self.assertEqual(String.objects.get(s=ugettext_lazy('b')), b)
self.assertEqual(
list(String.objects.filter(s__gte=ugettext_lazy('b'))), [b])
def test_marked_strings(self):
"""
Check that strings marked as safe or needing escaping do not
confuse the back-end.
"""
from django.utils.safestring import mark_safe, mark_for_escaping
a = String.objects.create(s='a')
b = String.objects.create(s=mark_safe('b'))
c = String.objects.create(s=mark_for_escaping('c'))
self.assertEqual(String.objects.get(s='a'), a)
self.assertEqual(list(String.objects.filter(s__startswith='a')), [a])
self.assertEqual(String.objects.get(s=mark_safe('a')), a)
self.assertEqual(
list(String.objects.filter(s__startswith=mark_safe('a'))), [a])
self.assertEqual(String.objects.get(s=mark_for_escaping('a')), a)
self.assertEqual(
list(String.objects.filter(s__startswith=mark_for_escaping('a'))),
[a])
self.assertEqual(String.objects.get(s='b'), b)
self.assertEqual(list(String.objects.filter(s__startswith='b')), [b])
self.assertEqual(String.objects.get(s=mark_safe('b')), b)
self.assertEqual(
list(String.objects.filter(s__startswith=mark_safe('b'))), [b])
self.assertEqual(String.objects.get(s=mark_for_escaping('b')), b)
self.assertEqual(
list(String.objects.filter(s__startswith=mark_for_escaping('b'))),
[b])
self.assertEqual(String.objects.get(s='c'), c)
self.assertEqual(list(String.objects.filter(s__startswith='c')), [c])
self.assertEqual(String.objects.get(s=mark_safe('c')), c)
self.assertEqual(
list(String.objects.filter(s__startswith=mark_safe('c'))), [c])
self.assertEqual(String.objects.get(s=mark_for_escaping('c')), c)
self.assertEqual(
list(String.objects.filter(s__startswith=mark_for_escaping('c'))),
[c])
class FeaturesTest(TestCase):
"""
Some things are unlikely to cause problems for SQL back-ends, but
require special handling in nonrel.
"""
def test_subqueries(self):
"""
Django includes SQL statements as WHERE tree values when
filtering using a QuerySet -- this won't "just work" with
nonrel back-ends.
TODO: Subqueries handling may require a bit of Django
changing, but should be easy to support.
"""
target = Target.objects.create(index=1)
source = Source.objects.create(index=2, target=target)
targets = Target.objects.all()
with self.assertRaises(DatabaseError):
Source.objects.get(target__in=targets)
self.assertEqual(
Source.objects.get(target__in=list(targets)),
source)
class DecimalFieldTest(TestCase):
"""
Some NoSQL databases can't handle Decimals, so respective back-ends
convert them to strings or floats. This can cause some precision
and sorting problems.
"""
def setUp(self):
for d in (Decimal('12345.6789'), Decimal('5'), Decimal('345.67'),
Decimal('45.6'), Decimal('2345.678'),):
DecimalModel(decimal=d).save()
def test_filter(self):
d = DecimalModel.objects.get(decimal=Decimal('5.0'))
self.assertTrue(isinstance(d.decimal, Decimal))
self.assertEquals(str(d.decimal), '5.00')
d = DecimalModel.objects.get(decimal=Decimal('45.60'))
self.assertEquals(str(d.decimal), '45.60')
# Filter argument should be converted to Decimal with 2 decimal
#_places.
d = DecimalModel.objects.get(decimal='0000345.67333333333333333')
self.assertEquals(str(d.decimal), '345.67')
def test_order(self):
"""
Standard Django decimal-to-string conversion isn't monotonic
(see `django.db.backends.util.format_number`).
"""
rows = DecimalModel.objects.all().order_by('decimal')
values = list(d.decimal for d in rows)
self.assertEquals(values, sorted(values))
def test_sign_extend(self):
DecimalModel(decimal=Decimal('-0.0')).save()
try:
# If we've written a valid string we should be able to
# retrieve the DecimalModel object without error.
DecimalModel.objects.filter(decimal__lt=1)[0]
except InvalidOperation:
self.assertTrue(False)
| Python |
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.http import HttpResponse
from django.utils import simplejson
from django.utils.encoding import force_unicode
from django.utils.functional import Promise
class LazyEncoder(DjangoJSONEncoder):
def default(self, obj):
if isinstance(obj, Promise):
return force_unicode(obj)
return super(LazyEncoder, self).default(obj)
class JSONResponse(HttpResponse):
def __init__(self, pyobj, **kwargs):
super(JSONResponse, self).__init__(
simplejson.dumps(pyobj, cls=LazyEncoder),
content_type='application/json; charset=%s' %
settings.DEFAULT_CHARSET,
**kwargs)
class TextResponse(HttpResponse):
def __init__(self, string='', **kwargs):
super(TextResponse, self).__init__(
string,
content_type='text/plain; charset=%s' % settings.DEFAULT_CHARSET,
**kwargs)
| Python |
# All fields except for BlobField written by Jonas Haag <jonas@lophus.org>
from django.core.exceptions import ValidationError
from django.utils.importlib import import_module
from django.db import models
from django.db.models.fields.subclassing import Creator
from django.db.utils import IntegrityError
from django.db.models.fields.related import add_lazy_relation
__all__ = ('RawField', 'ListField', 'SetField', 'DictField',
'EmbeddedModelField', 'BlobField')
EMPTY_ITER = ()
class _FakeModel(object):
"""
An object of this class can pass itself off as a model instance
when used as an arguments to Field.pre_save method (item_fields
of iterable fields are not actually fields of any model).
"""
def __init__(self, field, value):
setattr(self, field.attname, value)
class RawField(models.Field):
"""
Generic field to store anything your database backend allows you
to. No validation or conversions are done for this field.
"""
def get_internal_type(self):
"""
Returns this field's kind. Nonrel fields are meant to extend
the set of standard fields, so fields subclassing them should
get the same internal type, rather than their own class name.
"""
return 'RawField'
class AbstractIterableField(models.Field):
"""
Abstract field for fields for storing iterable data type like
``list``, ``set`` and ``dict``.
You can pass an instance of a field as the first argument.
If you do, the iterable items will be piped through the passed
field's validation and conversion routines, converting the items
to the appropriate data type.
"""
def __init__(self, item_field=None, *args, **kwargs):
default = kwargs.get(
'default', None if kwargs.get('null') else EMPTY_ITER)
# Ensure a new object is created every time the default is
# accessed.
if default is not None and not callable(default):
kwargs['default'] = lambda: self._type(default)
super(AbstractIterableField, self).__init__(*args, **kwargs)
# Either use the provided item_field or a RawField.
if item_field is None:
item_field = RawField()
elif callable(item_field):
item_field = item_field()
self.item_field = item_field
# We'll be pretending that item_field is a field of a model
# with just one "value" field.
assert not hasattr(self.item_field, 'attname')
self.item_field.set_attributes_from_name('value')
def contribute_to_class(self, cls, name):
self.item_field.model = cls
self.item_field.name = name
super(AbstractIterableField, self).contribute_to_class(cls, name)
# If items' field uses SubfieldBase we also need to.
item_metaclass = getattr(self.item_field, '__metaclass__', None)
if issubclass(item_metaclass, models.SubfieldBase):
setattr(cls, self.name, Creator(self))
if isinstance(self.item_field, models.ForeignKey) and isinstance(self.item_field.rel.to, basestring):
"""
If rel.to is a string because the actual class is not yet defined, look up the
actual class later. Refer to django.models.fields.related.RelatedField.contribute_to_class.
"""
def _resolve_lookup(_, resolved_model, __):
self.item_field.rel.to = resolved_model
self.item_field.do_related_class(self, cls)
add_lazy_relation(cls, self, self.item_field.rel.to, _resolve_lookup)
def _map(self, function, iterable, *args, **kwargs):
"""
Applies the function to items of the iterable and returns
an iterable of the proper type for the field.
Overriden by DictField to only apply the function to values.
"""
return self._type(function(element, *args, **kwargs)
for element in iterable)
def to_python(self, value):
"""
Passes value items through item_field's to_python.
"""
if value is None:
return None
return self._map(self.item_field.to_python, value)
def pre_save(self, model_instance, add):
"""
Gets our value from the model_instance and passes its items
through item_field's pre_save (using a fake model instance).
"""
value = getattr(model_instance, self.attname)
if value is None:
return None
return self._map(
lambda item: self.item_field.pre_save(
_FakeModel(self.item_field, item), add),
value)
def get_db_prep_save(self, value, connection):
"""
Applies get_db_prep_save of item_field on value items.
"""
if value is None:
return None
return self._map(self.item_field.get_db_prep_save, value,
connection=connection)
def get_db_prep_lookup(self, lookup_type, value, connection,
prepared=False):
"""
Passes the value through get_db_prep_lookup of item_field.
"""
# TODO/XXX: Remove as_lookup_value() once we have a cleaner
# solution for dot-notation queries.
# See: https://groups.google.com/group/django-non-relational/browse_thread/thread/6056f8384c9caf04/89eeb9fb22ad16f3).
if hasattr(value, 'as_lookup_value'):
value = value.as_lookup_value(self, lookup_type, connection)
return self.item_field.get_db_prep_lookup(
lookup_type, value, connection=connection, prepared=prepared)
def validate(self, values, model_instance):
try:
iter(values)
except TypeError:
raise ValidationError("Value of type %r is not iterable." %
type(values))
def formfield(self, **kwargs):
raise NotImplementedError("No form field implemented for %r." %
type(self))
class ListField(AbstractIterableField):
"""
Field representing a Python ``list``.
If the optional keyword argument `ordering` is given, it must be a
callable that is passed to :meth:`list.sort` as `key` argument. If
`ordering` is given, the items in the list will be sorted before
sending them to the database.
"""
_type = list
def __init__(self, *args, **kwargs):
self.ordering = kwargs.pop('ordering', None)
if self.ordering is not None and not callable(self.ordering):
raise TypeError("'ordering' has to be a callable or None, "
"not of type %r." % type(self.ordering))
super(ListField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'ListField'
def pre_save(self, model_instance, add):
value = getattr(model_instance, self.attname)
if value is None:
return None
if value and self.ordering:
value.sort(key=self.ordering)
return super(ListField, self).pre_save(model_instance, add)
class SetField(AbstractIterableField):
"""
Field representing a Python ``set``.
"""
_type = set
def get_internal_type(self):
return 'SetField'
def value_to_string(self, obj):
"""
Custom method for serialization, as JSON doesn't support
serializing sets.
"""
return list(self._get_val_from_obj(obj))
class DictField(AbstractIterableField):
"""
Field representing a Python ``dict``.
Type conversions described in :class:`AbstractIterableField` only
affect values of the dictionary, not keys. Depending on the
back-end, keys that aren't strings might not be allowed.
"""
_type = dict
def get_internal_type(self):
return 'DictField'
def _map(self, function, iterable, *args, **kwargs):
return self._type((key, function(value, *args, **kwargs))
for key, value in iterable.iteritems())
def validate(self, values, model_instance):
if not isinstance(values, dict):
raise ValidationError("Value is of type %r. Should be a dict." %
type(values))
class EmbeddedModelField(models.Field):
"""
Field that allows you to embed a model instance.
:param embedded_model: (optional) The model class of instances we
will be embedding; may also be passed as a
string, similar to relation fields
TODO: Make sure to delegate all signals and other field methods to
the embedded instance (not just pre_save, get_db_prep_* and
to_python).
"""
__metaclass__ = models.SubfieldBase
def __init__(self, embedded_model=None, *args, **kwargs):
self.embedded_model = embedded_model
kwargs.setdefault('default', None)
super(EmbeddedModelField, self).__init__(*args, **kwargs)
def get_internal_type(self):
return 'EmbeddedModelField'
def _set_model(self, model):
"""
Resolves embedded model class once the field knows the model it
belongs to.
If the model argument passed to __init__ was a string, we need
to make sure to resolve that string to the corresponding model
class, similar to relation fields.
However, we need to know our own model to generate a valid key
for the embedded model class lookup and EmbeddedModelFields are
not contributed_to_class if used in iterable fields. Thus we
rely on the collection field telling us its model (by setting
our "model" attribute in its contribute_to_class method).
"""
self._model = model
if model is not None and isinstance(self.embedded_model, basestring):
def _resolve_lookup(self_, resolved_model, model):
self.embedded_model = resolved_model
add_lazy_relation(model, self, self.embedded_model, _resolve_lookup)
model = property(lambda self: self._model, _set_model)
def stored_model(self, column_values):
"""
Returns the fixed embedded_model this field was initialized
with (typed embedding) or tries to determine the model from
_module / _model keys stored together with column_values
(untyped embedding).
We give precedence to the field's definition model, as silently
using a differing serialized one could hide some data integrity
problems.
Note that a single untyped EmbeddedModelField may process
instances of different models (especially when used as a type
of a collection field).
"""
module = column_values.pop('_module', None)
model = column_values.pop('_model', None)
if self.embedded_model is not None:
return self.embedded_model
elif module is not None:
return getattr(import_module(module), model)
else:
raise IntegrityError("Untyped EmbeddedModelField trying to load "
"data without serialized model class info.")
def to_python(self, value):
"""
Passes embedded model fields' values through embedded fields
to_python methods and reinstiatates the embedded instance.
We expect to receive a field.attname => value dict together
with a model class from back-end database deconversion (which
needs to know fields of the model beforehand).
"""
# Either the model class has already been determined during
# deconverting values from the database or we've got a dict
# from a deserializer that may contain model class info.
if isinstance(value, tuple):
embedded_model, attribute_values = value
elif isinstance(value, dict):
embedded_model = self.stored_model(value)
attribute_values = value
else:
return value
# Pass values through respective fields' to_python, leaving
# fields for which no value is specified uninitialized.
attribute_values = dict(
(field.attname, field.to_python(attribute_values[field.attname]))
for field in embedded_model._meta.fields
if field.attname in attribute_values)
# Create the model instance.
# Note: the double underline is not a typo -- this lets the
# model know that the object already exists in the database.
return embedded_model(__entity_exists=True, **attribute_values)
def get_db_prep_save(self, embedded_instance, connection):
"""
Applies pre_save and get_db_prep_save of embedded instance
fields and passes a field => value mapping down to database
type conversions.
The embedded instance will be saved as a column => value dict
in the end (possibly augmented with info about instance's model
for untyped embedding), but because we need to apply database
type conversions on embedded instance fields' values and for
these we need to know fields those values come from, we need to
entrust the database layer with creating the dict.
"""
if embedded_instance is None:
return None
# The field's value should be an instance of the model given in
# its declaration or at least of some model.
embedded_model = self.embedded_model or models.Model
if not isinstance(embedded_instance, embedded_model):
raise TypeError("Expected instance of type %r, not %r." %
(embedded_model, type(embedded_instance)))
# Apply pre_save and get_db_prep_save of embedded instance
# fields, create the field => value mapping to be passed to
# storage preprocessing.
field_values = {}
add = not embedded_instance._entity_exists
for field in embedded_instance._meta.fields:
value = field.get_db_prep_save(
field.pre_save(embedded_instance, add), connection=connection)
# Exclude unset primary keys (e.g. {'id': None}).
if field.primary_key and value is None:
continue
field_values[field] = value
# Let untyped fields store model info alongside values.
# We use fake RawFields for additional values to avoid passing
# embedded_instance to database conversions and to give
# back-ends a chance to apply generic conversions.
if self.embedded_model is None:
module_field = RawField()
module_field.set_attributes_from_name('_module')
model_field = RawField()
model_field.set_attributes_from_name('_model')
field_values.update(
((module_field, embedded_instance.__class__.__module__),
(model_field, embedded_instance.__class__.__name__)))
# This instance will exist in the database soon.
# TODO.XXX: Ensure that this doesn't cause race conditions.
embedded_instance._entity_exists = True
return field_values
# TODO/XXX: Remove this once we have a cleaner solution.
def get_db_prep_lookup(self, lookup_type, value, connection,
prepared=False):
if hasattr(value, 'as_lookup_value'):
value = value.as_lookup_value(self, lookup_type, connection)
return value
class BlobField(models.Field):
"""
A field for storing blobs of binary data.
The value might either be a string (or something that can be
converted to a string), or a file-like object.
In the latter case, the object has to provide a ``read`` method
from which the blob is read.
"""
def get_internal_type(self):
return 'BlobField'
def formfield(self, **kwargs):
"""
A file widget is provided, but use model FileField or
ImageField for storing specific files most of the time.
"""
from .widgets import BlobWidget
from django.forms import FileField
defaults = {'form_class': FileField, 'widget': BlobWidget}
defaults.update(kwargs)
return super(BlobField, self).formfield(**defaults)
def get_db_prep_save(self, value, connection):
if hasattr(value, 'read'):
return value.read()
else:
return str(value)
def get_db_prep_lookup(self, lookup_type, value, connection,
prepared=False):
raise TypeError("BlobFields do not support lookups.")
def value_to_string(self, obj):
return str(self._get_val_from_obj(obj))
| Python |
from django.conf import settings
from django.http import HttpResponseRedirect
from django.utils.cache import patch_cache_control
LOGIN_REQUIRED_PREFIXES = getattr(settings, 'LOGIN_REQUIRED_PREFIXES', ())
NO_LOGIN_REQUIRED_PREFIXES = getattr(settings,
'NO_LOGIN_REQUIRED_PREFIXES', ())
ALLOWED_DOMAINS = getattr(settings, 'ALLOWED_DOMAINS', None)
NON_REDIRECTED_PATHS = getattr(settings, 'NON_REDIRECTED_PATHS', ())
NON_REDIRECTED_BASE_PATHS = tuple(path.rstrip('/') + '/'
for path in NON_REDIRECTED_PATHS)
class LoginRequiredMiddleware(object):
"""
Redirects to login page if request path begins with a
LOGIN_REQURED_PREFIXES prefix. You can also specify
NO_LOGIN_REQUIRED_PREFIXES which take precedence.
"""
def process_request(self, request):
for prefix in NO_LOGIN_REQUIRED_PREFIXES:
if request.path.startswith(prefix):
return None
for prefix in LOGIN_REQUIRED_PREFIXES:
if request.path.startswith(prefix) and \
not request.user.is_authenticated():
from django.contrib.auth.views import redirect_to_login
return redirect_to_login(request.get_full_path())
return None
class RedirectMiddleware(object):
"""
A static redirect middleware. Mostly useful for hosting providers
that automatically setup an alternative domain for your website.
You might not want anyone to access the site via those possibly
well-known URLs.
"""
def process_request(self, request):
host = request.get_host().split(':')[0]
# Turn off redirects when in debug mode, running unit tests, or
# when handling an App Engine cron job.
if (settings.DEBUG or host == 'testserver' or
not ALLOWED_DOMAINS or
request.META.get('HTTP_X_APPENGINE_CRON') == 'true' or
request.path.startswith('/_ah/') or
request.path in NON_REDIRECTED_PATHS or
request.path.startswith(NON_REDIRECTED_BASE_PATHS)):
return
if host not in settings.ALLOWED_DOMAINS:
return HttpResponseRedirect(
'http://' + settings.ALLOWED_DOMAINS[0] + request.path)
class NoHistoryCacheMiddleware(object):
"""
If user is authenticated we disable browser caching of pages in
history.
"""
def process_response(self, request, response):
if 'Expires' not in response and \
'Cache-Control' not in response and \
hasattr(request, 'session') and \
request.user.is_authenticated():
patch_cache_control(response,
no_store=True, no_cache=True, must_revalidate=True, max_age=0)
return response
| Python |
from django.conf import settings
from django.core.cache import cache
from django.contrib.sites.models import Site
from djangotoolbox.utils import make_tls_property
_default_site_id = getattr(settings, 'SITE_ID', None)
SITE_ID = settings.__class__.SITE_ID = make_tls_property()
class DynamicSiteIDMiddleware(object):
"""Sets settings.SITE_ID based on request's domain."""
def process_request(self, request):
# Ignore port if it's 80 or 443
if ':' in request.get_host():
domain, port = request.get_host().split(':')
if int(port) not in (80, 443):
domain = request.get_host()
else:
domain = request.get_host().split(':')[0]
# Domains are case insensitive
domain = domain.lower()
# We cache the SITE_ID
cache_key = 'Site:domain:%s' % domain
site = cache.get(cache_key)
if site:
SITE_ID.value = site
else:
try:
site = Site.objects.get(domain=domain)
except Site.DoesNotExist:
site = None
if not site:
# Fall back to with/without 'www.'
if domain.startswith('www.'):
fallback_domain = domain[4:]
else:
fallback_domain = 'www.' + domain
try:
site = Site.objects.get(domain=fallback_domain)
except Site.DoesNotExist:
site = None
# Add site if it doesn't exist
if not site and getattr(settings, 'CREATE_SITES_AUTOMATICALLY',
True):
site = Site(domain=domain, name=domain)
site.save()
# Set SITE_ID for this thread/request
if site:
SITE_ID.value = site.pk
else:
SITE_ID.value = _default_site_id
cache.set(cache_key, SITE_ID.value, 5 * 60)
| Python |
from django.test import TestCase
from django.test.simple import DjangoTestSuiteRunner
from django.utils.unittest import TextTestRunner
from .utils import object_list_to_table
class ModelTestCase(TestCase):
"""
A test case for models that provides an easy way to validate the DB
contents against a given list of row-values.
You have to specify the model to validate using the 'model'
attribute:
class MyTestCase(ModelTestCase):
model = MyModel
"""
def validate_state(self, columns, *state_table):
"""
Validates that the DB contains exactly the values given in the
state table. The list of columns is given in the columns tuple.
Example:
self.validate_state(
('a', 'b', 'c'),
(1, 2, 3),
(11, 12, 13),
)
validates that the table contains exactly two rows and that
their 'a', 'b', and 'c' attributes are 1, 2, 3 for one row and
11, 12, 13 for the other row. The order of the rows doesn't
matter.
"""
current_state = object_list_to_table(
columns, self.model.all())[1:]
if not equal_lists(current_state, state_table):
print "DB state not valid:"
print "Current state:"
print columns
for state in current_state:
print state
print "Should be:"
for state in state_table:
print state
self.fail("DB state not valid.")
class CapturingTestSuiteRunner(DjangoTestSuiteRunner):
"""
Captures stdout/stderr during test and shows them next to
tracebacks.
"""
def run_suite(self, suite, **kwargs):
return TextTestRunner(verbosity=self.verbosity,
failfast=self.failfast,
buffer=True).run(suite)
| Python |
def make_tls_property(default=None):
"""
Creates a class-wide instance property with a thread-specific
value.
"""
class TLSProperty(object):
def __init__(self):
from threading import local
self.local = local()
def __get__(self, instance, cls):
if not instance:
return self
return self.value
def __set__(self, instance, value):
self.value = value
def _get_value(self):
return getattr(self.local, 'value', default)
def _set_value(self, value):
self.local.value = value
value = property(_get_value, _set_value)
return TLSProperty()
def getattr_by_path(obj, attr, *default):
"""
Like getattr(), but can go down a hierarchy like "attr.subattr".
"""
value = obj
for part in attr.split('.'):
if not hasattr(value, part) and len(default):
return default[0]
value = getattr(value, part)
if callable(value):
value = value()
return value
def subdict(data, *attrs):
"""Returns a subset of the keys of a dictionary."""
result = {}
result.update([(key, data[key]) for key in attrs])
return result
def equal_lists(left, right):
"""
Compares two lists and returs True if they contain the same
elements, but doesn't require that they have the same order.
"""
right = list(right)
if len(left) != len(right):
return False
for item in left:
if item in right:
del right[right.index(item)]
else:
return False
return True
def object_list_to_table(headings, dict_list):
"""
Converts objects to table-style list of rows with heading:
Example:
x.a = 1
x.b = 2
x.c = 3
y.a = 11
y.b = 12
y.c = 13
object_list_to_table(('a', 'b', 'c'), [x, y])
results in the following (dict keys reordered for better readability):
[
('a', 'b', 'c'),
(1, 2, 3),
(11, 12, 13),
]
"""
return [headings] + [tuple([getattr_by_path(row, heading, None)
for heading in headings])
for row in dict_list]
def dict_list_to_table(headings, dict_list):
"""
Converts dict to table-style list of rows with heading:
Example:
dict_list_to_table(('a', 'b', 'c'),
[{'a': 1, 'b': 2, 'c': 3}, {'a': 11, 'b': 12, 'c': 13}])
results in the following (dict keys reordered for better readability):
[
('a', 'b', 'c'),
(1, 2, 3),
(11, 12, 13),
]
"""
return [headings] + [tuple([row[heading] for heading in headings])
for row in dict_list]
| Python |
from django import forms
from django.contrib import admin
from django.contrib.auth.admin import UserAdmin
from django.contrib.auth.models import User, Group
class UserForm(forms.ModelForm):
class Meta:
model = User
fields = ('username', 'email', 'first_name', 'last_name', 'is_active',
'is_staff', 'is_superuser')
class CustomUserAdmin(UserAdmin):
fieldsets = None
form = UserForm
search_fields = ('=username',)
admin.site.unregister(User)
admin.site.unregister(Group)
admin.site.register(User, CustomUserAdmin)
| Python |
from django.forms import widgets
from django.template.defaultfilters import filesizeformat
from django.utils.safestring import mark_safe
class BlobWidget(widgets.FileInput):
def render(self, name, value, attrs=None):
try:
blob_size = len(value)
except:
blob_size = 0
blob_size = filesizeformat(blob_size)
original = super(BlobWidget, self).render(name, value, attrs=None)
return mark_safe("%s<p>Current size: %s</p>" % (original, blob_size))
| Python |
from django.db.backends.creation import BaseDatabaseCreation
class NonrelDatabaseCreation(BaseDatabaseCreation):
# "Types" used by database conversion methods to decide how to
# convert data for or from the database. Type is understood here
# a bit differently than in vanilla Django -- it should be read
# as an identifier of an encoding / decoding procedure rather than
# just a database column type.
data_types = {
# NoSQL databases often have specific concepts of entity keys.
# For example, GAE has the db.Key class, MongoDB likes to use
# ObjectIds, Redis uses strings, while Cassandra supports
# different types (including binary data).
'AutoField': 'key',
'RelatedAutoField': 'key',
'ForeignKey': 'key',
'OneToOneField': 'key',
'ManyToManyField': 'key',
# Standard field types, more or less suitable for a database
# (or its client / driver) being able to directly store or
# process Python objects.
'BigIntegerField': 'long',
'BooleanField': 'bool',
'CharField': 'string',
'CommaSeparatedIntegerField': 'string',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'decimal',
'EmailField': 'string',
'FileField': 'string',
'FilePathField': 'string',
'FloatField': 'float',
'ImageField': 'string',
'IntegerField': 'integer',
'IPAddressField': 'string',
'NullBooleanField': 'bool',
'PositiveIntegerField': 'integer',
'PositiveSmallIntegerField': 'integer',
'SlugField': 'string',
'SmallIntegerField': 'integer',
'TextField': 'string',
'TimeField': 'time',
'URLField': 'string',
'XMLField': 'string',
# You may use "list" for SetField, or even DictField and
# EmbeddedModelField (if your database supports nested lists).
# All following fields also support "string" and "bytes" as
# their storage types -- which work by serializing using pickle
# protocol 0 or 2 respectively.
# Please note that if you can't support the "natural" storage
# type then the order of field values will be undetermined, and
# lookups or filters may not work as specified (e.g. the same
# set or dict may be represented by different lists, with
# elements in different order, so the same two instances may
# compare one way or the other).
'AbstractIterableField': 'list',
'ListField': 'list',
'SetField': 'set',
'DictField': 'dict',
'EmbeddedModelField': 'dict',
# RawFields ("raw" db_type) are used when type is not known
# (untyped collections) or for values that do not come from
# a field at all (model info serialization), only do generic
# processing for them (if any). On the other hand, anything
# using the "bytes" db_type should be converted to a database
# blob type or stored as binary data.
'RawField': 'raw',
'BlobField': 'bytes',
}
def db_type(self, field):
"""
Allows back-ends to override db_type determined by the field.
This has to be called instead of the Field.db_type, because we
may need to override a db_type a custom field returns directly,
and need more freedom in handling types of primary keys and
related fields.
:param field: A field we want to know the storage type of
TODO: Field.db_type (as of 1.3.1) is used mostly for generating
SQL statements (through a couple of methods in
DatabaseCreation and DatabaseOperations.field_cast_sql)
or within back-end implementations -- nonrel is not
dependend on any of these; but there are two cases that
might need to be fixed, namely:
-- management/createcachetable (calls field.db_type),
-- and contrib/gis (defines its own geo_db_type method).
"""
return field.db_type(connection=self.connection)
def sql_create_model(self, model, style, known_models=set()):
"""
Most NoSQL databases are mostly schema-less, no data
definitions are needed.
"""
return [], {}
def sql_indexes_for_model(self, model, style):
"""
Creates all indexes needed for local (not inherited) fields of
a model.
"""
return []
| Python |
import datetime
import random
from django.conf import settings
from django.db.models.fields import NOT_PROVIDED
from django.db.models.query import QuerySet
from django.db.models.sql import aggregates as sqlaggregates
from django.db.models.sql.compiler import SQLCompiler
from django.db.models.sql.constants import LOOKUP_SEP, MULTI, SINGLE
from django.db.models.sql.where import AND, OR
from django.db.utils import DatabaseError, IntegrityError
from django.utils.tree import Node
EMULATED_OPS = {
'exact': lambda x, y: y in x if isinstance(x, (list, tuple)) else x == y,
'iexact': lambda x, y: x.lower() == y.lower(),
'startswith': lambda x, y: x.startswith(y[0]),
'istartswith': lambda x, y: x.lower().startswith(y[0].lower()),
'isnull': lambda x, y: x is None if y else x is not None,
'in': lambda x, y: x in y,
'lt': lambda x, y: x < y,
'lte': lambda x, y: x <= y,
'gt': lambda x, y: x > y,
'gte': lambda x, y: x >= y,
}
class NonrelQuery(object):
"""
Base class for nonrel queries.
Compilers build a nonrel query when they want to fetch some data.
They work by first allowing sql.compiler.SQLCompiler to partly build
a sql.Query, constructing a NonrelQuery query on top of it, and then
iterating over its results.
This class provides in-memory filtering and ordering and a
framework for converting SQL constraint tree built by Django to a
"representation" more suitable for most NoSQL databases.
TODO: Replace with FetchCompiler, there are too many query concepts
around, and it isn't a good abstraction for NoSQL databases.
TODO: Nonrel currently uses constraint's tree built by Django for
its SQL back-ends to handle filtering. However, Django
intermingles translating its lookup / filtering abstraction
to a logical formula with some preprocessing for joins and
this results in hacks in nonrel. It would be a better to pull
out SQL-specific parts from the constraints preprocessing.
"""
# ----------------------------------------------
# Public API
# ----------------------------------------------
def __init__(self, compiler, fields):
self.compiler = compiler
self.connection = compiler.connection
self.ops = compiler.connection.ops
self.query = compiler.query # sql.Query
self.fields = fields
self._negated = False
def fetch(self, low_mark=0, high_mark=None):
"""
Returns an iterator over some part of query results.
"""
raise NotImplementedError
def count(self, limit=None):
"""
Returns the number of objects that would be returned, if
this query was executed, up to `limit`.
"""
raise NotImplementedError
def delete(self):
"""
Called by NonrelDeleteCompiler after it builds a delete query.
"""
raise NotImplementedError
def order_by(self, ordering):
"""
Reorders query results or execution order. Called by
NonrelCompilers during query building.
:param ordering: A list with (field, ascending) tuples or a
boolean -- use natural ordering, if any, when
the argument is True and its reverse otherwise
"""
raise NotImplementedError
def add_filter(self, field, lookup_type, negated, value):
"""
Adds a single constraint to the query. Called by add_filters for
each constraint leaf in the WHERE tree built by Django.
:param field: Lookup field (instance of Field); field.column
should be used for database keys
:param lookup_type: Lookup name (e.g. "startswith")
:param negated: Is the leaf negated
:param value: Lookup argument, such as a value to compare with;
already prepared for the database
"""
raise NotImplementedError
def add_filters(self, filters):
"""
Converts a constraint tree (sql.where.WhereNode) created by
Django's SQL query machinery to nonrel style filters, calling
add_filter for each constraint.
This assumes the database doesn't support alternatives of
constraints, you should override this method if it does.
TODO: Simulate both conjunctions and alternatives in general
let GAE override conjunctions not to split them into
multiple queries.
"""
if filters.negated:
self._negated = not self._negated
if not self._negated and filters.connector != AND:
raise DatabaseError("Only AND filters are supported.")
# Remove unneeded children from the tree.
children = self._get_children(filters.children)
if self._negated and filters.connector != OR and len(children) > 1:
raise DatabaseError("When negating a whole filter subgroup "
"(e.g. a Q object) the subgroup filters must "
"be connected via OR, so the non-relational "
"backend can convert them like this: "
"'not (a OR b) => (not a) AND (not b)'.")
# Recursively call the method for internal tree nodes, add a
# filter for each leaf.
for child in children:
if isinstance(child, Node):
self.add_filters(child)
continue
field, lookup_type, value = self._decode_child(child)
self.add_filter(field, lookup_type, self._negated, value)
if filters.negated:
self._negated = not self._negated
# ----------------------------------------------
# Internal API for reuse by subclasses
# ----------------------------------------------
def _decode_child(self, child):
"""
Produces arguments suitable for add_filter from a WHERE tree
leaf (a tuple).
"""
# TODO: Call get_db_prep_lookup directly, constrain.process
# doesn't do much more.
constraint, lookup_type, annotation, value = child
packed, value = constraint.process(lookup_type, value, self.connection)
alias, column, db_type = packed
field = constraint.field
opts = self.query.model._meta
if alias and alias != opts.db_table:
raise DatabaseError("This database doesn't support JOINs "
"and multi-table inheritance.")
# For parent.child_set queries the field held by the constraint
# is the parent's primary key, while the field the filter
# should consider is the child's foreign key field.
if column != field.column:
assert field.primary_key
field = (f for f in opts.fields if f.column == column).next()
assert field.rel is not None
value = self._normalize_lookup_value(
lookup_type, value, field, annotation)
return field, lookup_type, value
def _normalize_lookup_value(self, lookup_type, value, field, annotation):
"""
Undoes preparations done by `Field.get_db_prep_lookup` not
suitable for nonrel back-ends and passes the lookup argument
through nonrel's `value_for_db`.
TODO: Blank `Field.get_db_prep_lookup` and remove this method.
"""
# Undo Field.get_db_prep_lookup putting most values in a list
# (a subclass may override this, so check if it's a list) and
# losing the (True / False) argument to the "isnull" lookup.
if lookup_type not in ('in', 'range', 'year') and \
isinstance(value, (tuple, list)):
if len(value) > 1:
raise DatabaseError("Filter lookup type was %s; expected the "
"filter argument not to be a list. Only "
"'in'-filters can be used with lists." %
lookup_type)
elif lookup_type == 'isnull':
value = annotation
else:
value = value[0]
# Remove percents added by Field.get_db_prep_lookup (useful
# if one were to use the value in a LIKE expression).
if lookup_type in ('startswith', 'istartswith'):
value = value[:-1]
elif lookup_type in ('endswith', 'iendswith'):
value = value[1:]
elif lookup_type in ('contains', 'icontains'):
value = value[1:-1]
# Prepare the value for a database using the nonrel framework.
return self.ops.value_for_db(value, field, lookup_type)
def _get_children(self, children):
"""
Filters out nodes of the given contraint tree not needed for
nonrel queries; checks that given constraints are supported.
"""
result = []
for child in children:
if isinstance(child, tuple):
constraint, lookup_type, _, value = child
# When doing a lookup using a QuerySet Django would use
# a subquery, but this won't work for nonrel.
# TODO: Add a supports_subqueries feature and let
# Django evaluate subqueries instead of passing
# them as SQL strings (QueryWrappers) to
# filtering.
if isinstance(value, QuerySet):
raise DatabaseError("Subqueries are not supported (yet).")
# Remove leafs that were automatically added by
# sql.Query.add_filter to handle negations of outer
# joins.
if lookup_type == 'isnull' and constraint.field is None:
continue
result.append(child)
return result
def _matches_filters(self, entity, filters):
"""
Checks if an entity returned by the database satisfies
constraints in a WHERE tree (in-memory filtering).
"""
# Filters without rules match everything.
if not filters.children:
return True
result = filters.connector == AND
for child in filters.children:
# Recursively check a subtree,
if isinstance(child, Node):
submatch = self._matches_filters(entity, child)
# Check constraint leaf, emulating a database condition.
else:
field, lookup_type, lookup_value = self._decode_child(child)
entity_value = entity[field.column]
if entity_value is None:
if isinstance(lookup_value, (datetime.datetime, datetime.date,
datetime.time)):
submatch = lookup_type in ('lt', 'lte')
elif lookup_type in (
'startswith', 'contains', 'endswith', 'iexact',
'istartswith', 'icontains', 'iendswith'):
submatch = False
else:
submatch = EMULATED_OPS[lookup_type](
entity_value, lookup_value)
else:
submatch = EMULATED_OPS[lookup_type](
entity_value, lookup_value)
if filters.connector == OR and submatch:
result = True
break
elif filters.connector == AND and not submatch:
result = False
break
if filters.negated:
return not result
return result
def _order_in_memory(self, lhs, rhs):
for field, ascending in self.compiler._get_ordering():
column = field.column
result = cmp(lhs.get(column), rhs.get(column))
if result != 0:
return result if ascending else -result
return 0
class NonrelCompiler(SQLCompiler):
"""
Base class for data fetching back-end compilers.
Note that nonrel compilers derive from sql.compiler.SQLCompiler and
thus hold a reference to a sql.Query, not a NonrelQuery.
TODO: Separate FetchCompiler from the abstract NonrelCompiler.
"""
def __init__(self, query, connection, using):
"""
Initializes the underlying SQLCompiler.
"""
super(NonrelCompiler, self).__init__(query, connection, using)
self.ops = self.connection.ops
# ----------------------------------------------
# Public API
# ----------------------------------------------
def results_iter(self):
"""
Returns an iterator over the results from executing query given
to this compiler. Called by QuerySet methods.
"""
fields = self.get_fields()
results = self.build_query(fields).fetch(
self.query.low_mark, self.query.high_mark)
for entity in results:
yield self._make_result(entity, fields)
def has_results(self):
return self.get_count(check_exists=True)
def execute_sql(self, result_type=MULTI):
"""
Handles SQL-like aggregate queries. This class only emulates COUNT
by using abstract NonrelQuery.count method.
"""
aggregates = self.query.aggregate_select.values()
# Simulate a count().
if aggregates:
assert len(aggregates) == 1
aggregate = aggregates[0]
assert isinstance(aggregate, sqlaggregates.Count)
opts = self.query.get_meta()
assert aggregate.col == '*' or \
aggregate.col == (opts.db_table, opts.pk.column)
count = self.get_count()
if result_type is SINGLE:
return [count]
elif result_type is MULTI:
return [[count]]
raise NotImplementedError("The database backend only supports "
"count() queries.")
# ----------------------------------------------
# Additional NonrelCompiler API
# ----------------------------------------------
def _make_result(self, entity, fields):
"""
Decodes values for the given fields from the database entity.
The entity is assumed to be a dict using field database column
names as keys. Decodes values using `value_from_db` as well as
the standard `convert_values`.
"""
result = []
for field in fields:
value = entity.get(field.column, NOT_PROVIDED)
if value is NOT_PROVIDED:
value = field.get_default()
else:
value = self.ops.value_from_db(value, field)
value = self.query.convert_values(value, field,
self.connection)
if value is None and not field.null:
raise IntegrityError("Non-nullable field %s can't be None!" %
field.name)
result.append(value)
return result
def check_query(self):
"""
Checks if the current query is supported by the database.
In general, we expect queries requiring JOINs (many-to-many
relations, abstract model bases, or model spanning filtering),
using DISTINCT (through `QuerySet.distinct()`, which is not
required in most situations) or using the SQL-specific
`QuerySet.extra()` to not work with nonrel back-ends.
"""
if (len([a for a in self.query.alias_map if
self.query.alias_refcount[a]]) > 1 or
self.query.distinct or self.query.extra or self.query.having):
raise DatabaseError("This query is not supported by the database.")
def get_count(self, check_exists=False):
"""
Counts objects matching the current filters / constraints.
:param check_exists: Only check if any object matches
"""
if check_exists:
high_mark = 1
else:
high_mark = self.query.high_mark
return self.build_query().count(high_mark)
def build_query(self, fields=None):
"""
Checks if the underlying SQL query is supported and prepares
a NonrelQuery to be executed on the database.
"""
self.check_query()
if fields is None:
fields = self.get_fields()
query = self.query_class(self, fields)
query.add_filters(self.query.where)
query.order_by(self._get_ordering())
# This at least satisfies the most basic unit tests.
if settings.DEBUG:
self.connection.queries.append({'sql': repr(query)})
return query
def get_fields(self):
"""
Returns fields which should get loaded from the back-end by the
current query.
"""
# We only set this up here because related_select_fields isn't
# populated until execute_sql() has been called.
if self.query.select_fields:
fields = (self.query.select_fields +
self.query.related_select_fields)
else:
fields = self.query.model._meta.fields
# If the field was deferred, exclude it from being passed
# into `resolve_columns` because it wasn't selected.
only_load = self.deferred_to_columns()
if only_load:
db_table = self.query.model._meta.db_table
only_load = dict((k, v) for k, v in only_load.items()
if v or k == db_table)
if len(only_load.keys()) > 1:
raise DatabaseError("Multi-table inheritance is not "
"supported by non-relational DBs %s." %
repr(only_load))
fields = [f for f in fields if db_table in only_load and
f.column in only_load[db_table]]
query_model = self.query.model
if query_model._meta.proxy:
query_model = query_model._meta.proxy_for_model
for field in fields:
if field.model._meta != query_model._meta:
raise DatabaseError("Multi-table inheritance is not "
"supported by non-relational DBs.")
return fields
def _get_ordering(self):
"""
Returns a list of (field, ascending) tuples that the query
results should be ordered by. If there is no field ordering
defined returns just the standard_ordering (a boolean, needed
for MongoDB "$natural" ordering).
"""
opts = self.query.get_meta()
if not self.query.default_ordering:
ordering = self.query.order_by
else:
ordering = self.query.order_by or opts.ordering
if not ordering:
return self.query.standard_ordering
field_ordering = []
for order in ordering:
if LOOKUP_SEP in order:
raise DatabaseError("Ordering can't span tables on "
"non-relational backends (%s)." % order)
if order == '?':
raise DatabaseError("Randomized ordering isn't supported by "
"the backend.")
ascending = not order.startswith('-')
if not self.query.standard_ordering:
ascending = not ascending
name = order.lstrip('+-')
if name == 'pk':
name = opts.pk.name
field_ordering.append((opts.get_field(name), ascending))
return field_ordering
class NonrelInsertCompiler(NonrelCompiler):
"""
Base class for all compliers that create new entities or objects
in the database. It has to define execute_sql method due to being
used in place of a SQLInsertCompiler.
TODO: Analyze if it's always true that when field is None we should
use the PK from self.query (check if the column assertion
below ever fails).
"""
def execute_sql(self, return_id=False):
field_values = {}
pk = self.query.get_meta().pk
for (field, value), column in zip(self.query.values,
self.query.columns):
# Raise an exception for non-nullable fields without a value.
if field is not None:
if not field.null and value is None:
raise IntegrityError("You can't set %s (a non-nullable "
"field) to None!" % field.name)
# Use the primary key field when our sql.Query provides a
# value without a field.
if field is None:
field = pk
assert field.column == column
assert field not in field_values
# Prepare value for database, note that query.values have
# already passed through get_db_prep_save.
value = self.ops.value_for_db(value, field)
field_values[field] = value
key = self.insert(field_values, return_id=return_id)
# Pass the key value through normal database deconversion.
return self.ops.convert_values(self.ops.value_from_db(key, pk), pk)
def insert(self, values, return_id):
"""
Creates a new entity to represent a model.
Note that the returned key will go through the same database
deconversions that every value coming from the database does
(`convert_values` and `value_from_db`).
:param values: The model object as a list of (field, value)
pairs; each value is already prepared for the
database
:param return_id: Whether to return the id or key of the newly
created entity
"""
raise NotImplementedError
class NonrelUpdateCompiler(NonrelCompiler):
def execute_sql(self, result_type):
values = []
for field, _, value in self.query.values:
if hasattr(value, 'prepare_database_save'):
value = value.prepare_database_save(field)
else:
value = field.get_db_prep_save(value,
connection=self.connection)
value = self.ops.value_for_db(value, field)
values.append((field, value))
return self.update(values)
def update(self, values):
"""
Changes an entity that already exists in the database.
:param values: A list of (field, new-value) pairs
"""
raise NotImplementedError
class NonrelDeleteCompiler(NonrelCompiler):
def execute_sql(self, result_type=MULTI):
self.build_query([self.query.get_meta().pk]).delete()
| Python |
from django.db.backends.util import format_number
def decimal_to_string(value, max_digits=16, decimal_places=0):
"""
Converts decimal to a unicode string for storage / lookup by nonrel
databases that don't support decimals natively.
This is an extension to `django.db.backends.util.format_number`
that preserves order -- if one decimal is less than another, their
string representations should compare the same (as strings).
TODO: Can't this be done using string.format()?
Not in Python 2.5, str.format is backported to 2.6 only.
"""
# Handle sign separately.
if value.is_signed():
sign = u'-'
value = abs(value)
else:
sign = u''
# Let Django quantize and cast to a string.
value = format_number(value, max_digits, decimal_places)
# Pad with zeroes to a constant width.
n = value.find('.')
if n < 0:
n = len(value)
if n < max_digits - decimal_places:
value = u'0' * (max_digits - decimal_places - n) + value
return sign + value
| Python |
import cPickle as pickle
import datetime
from django.db.backends import (
BaseDatabaseFeatures,
BaseDatabaseOperations,
BaseDatabaseWrapper,
BaseDatabaseClient,
BaseDatabaseValidation,
BaseDatabaseIntrospection)
from django.db.utils import DatabaseError
from django.utils.functional import Promise
from django.utils.safestring import EscapeString, EscapeUnicode, SafeString, \
SafeUnicode
from .creation import NonrelDatabaseCreation
class NonrelDatabaseFeatures(BaseDatabaseFeatures):
# NoSQL databases usually return a key after saving a new object.
can_return_id_from_insert = True
# TODO: Doesn't seem necessary in general, move to back-ends.
# Mongo: see PyMongo's FAQ; GAE: see: http://timezones.appspot.com/.
supports_date_lookup_using_string = False
supports_timezones = False
# Features that are commonly not available on nonrel databases.
supports_joins = False
supports_select_related = False
supports_deleting_related_objects = False
# Having to decide whether to use an INSERT or an UPDATE query is
# specific to SQL-based databases.
distinguishes_insert_from_update = False
# Can primary_key be used on any field? Without encoding usually
# only a limited set of types is acceptable for keys. This is a set
# of all field kinds (internal_types) for which the primary_key
# argument may be used.
# TODO: Use during model validation.
# TODO: Move to core and use to skip unsuitable Django tests.
supports_primary_key_on = set(NonrelDatabaseCreation.data_types.keys()) - \
set(('ForeignKey', 'OneToOneField', 'ManyToManyField', 'RawField',
'AbstractIterableField', 'ListField', 'SetField', 'DictField',
'EmbeddedModelField', 'BlobField'))
def _supports_transactions(self):
return False
class NonrelDatabaseOperations(BaseDatabaseOperations):
"""
Override all database conversions normally done by fields (through
`get_db_prep_value/save/lookup`) to make it possible to pass Python
values directly to the database layer. On the other hand, provide a
framework for making type-based conversions -- drivers of NoSQL
database either can work with Python objects directly, sometimes
representing one type using a another or expect everything encoded
in some specific manner.
Django normally handles conversions for the database by providing
`BaseDatabaseOperations.value_to_db_*` / `convert_values` methods,
but there are some problems with them:
-- some preparations need to be done for all values or for values
of a particular "kind" (e.g. lazy objects evaluation or casting
strings wrappers to standard types);
-- some conversions need more info about the field or model the
value comes from (e.g. key conversions, embedded deconversion);
-- there are no value_to_db_* methods for some value types (bools);
-- we need to handle collecion fields (list, set, dict): they
need to differentiate between deconverting from database and
deserializing (so single to_python is inconvenient) and need to
do some recursion, so a single `value_for_db` is better than one
method for each field kind.
Don't use these standard methods in nonrel, `value_for/from_db` are
more elastic and keeping all conversions in one place makes the
code easier to analyse.
Please note, that after changes to type conversions, data saved
using preexisting methods needs to be handled; and also that Django
does not expect any special database driver exceptions, so any such
exceptions should be reraised as django.db.utils.DatabaseError.
TODO: Consider replacing all `value_to_db_*` and `convert_values`
with just `BaseDatabaseOperations.value_for/from_db` and also
moving there code from `Field.get_db_prep_lookup` (and maybe
`RelatedField.get_db_prep_lookup`).
"""
def __init__(self, connection):
self.connection = connection
super(NonrelDatabaseOperations, self).__init__()
def pk_default_value(self):
"""
Returns None, to be interpreted by back-ends as a request to
generate a new key for an "inserted" object.
"""
return None
def quote_name(self, name):
"""
Does not do any quoting, as it is not needed for most NoSQL
databases.
"""
return name
def prep_for_like_query(self, value):
"""
Does no conversion, parent string-cast is SQL specific.
"""
return value
def prep_for_iexact_query(self, value):
"""
Does no conversion, parent string-cast is SQL specific.
"""
return value
def value_to_db_auto(self, value):
"""
Assuming that the database has its own key type, leaves any
conversions to the back-end.
This method is added my nonrel to allow various types to be
used for automatic primary keys. `AutoField.get_db_prep_value`
calls it to prepare field's value for the database.
Note that Django can pass a string representation of the value
instead of the value itself (after receiving it as a query
parameter for example), so you'll likely need to limit
your `AutoFields` in a way that makes `str(value)` reversible.
TODO: This could become a part of `value_for_db` if it makes
to Django (with a `field_kind` condition).
"""
return value
def value_to_db_date(self, value):
"""
Unlike with SQL database clients, it's better to assume that
a date can be stored directly.
"""
return value
def value_to_db_datetime(self, value):
"""
We may pass a datetime object to a database driver without
casting it to a string.
"""
return value
def value_to_db_time(self, value):
"""
Unlike with SQL database clients, we may assume that a time can
be stored directly.
"""
return value
def value_to_db_decimal(self, value, max_digits, decimal_places):
"""
We may assume that a decimal can be passed to a NoSQL database
driver directly.
"""
return value
def year_lookup_bounds(self, value):
"""
Converts year bounds to datetime bounds as these can likely be
used directly, also adds one to the upper bound as it should be
natural to use one strict inequality for BETWEEN-like filters
for most nonrel back-ends.
"""
return [datetime.datetime(value, 1, 1, 0, 0, 0, 0),
datetime.datetime(value + 1, 1, 1, 0, 0, 0, 0)]
def convert_values(self, value, field):
"""
We may assume that values returned by the database are standard
Python types suitable to be passed to fields.
"""
return value
def check_aggregate_support(self, aggregate):
"""
Nonrel back-ends are only expected to implement COUNT in
general.
"""
from django.db.models.sql.aggregates import Count
if not isinstance(aggregate, Count):
raise NotImplementedError("This database does not support %r "
"aggregates." % type(aggregate))
def value_for_db(self, value, field, lookup=None):
"""
Does type-conversions needed before storing a value in the
the database or using it as a filter parameter.
This is a convience wrapper that only precomputes field's kind
and a db_type for the field (or the primary key of the related
model for ForeignKeys etc.) and knows that arguments to the
`isnull` lookup (`True` or `False`) should not be converted,
while some other lookups take a list of arguments.
In the end, it calls `_value_for_db` to do the real work; you
should typically extend that method, but only call this one.
:param value: A value to be passed to the database driver
:param field: A field the value comes from
:param lookup: None if the value is being prepared for storage;
lookup type name, when its going to be used as a
filter argument
"""
field, field_kind, db_type = self._convert_as(field, lookup)
# Argument to the "isnull" lookup is just a boolean, while some
# other lookups take a list of values.
if lookup == 'isnull':
return value
elif lookup in ('in', 'range', 'year'):
return [self._value_for_db(subvalue, field,
field_kind, db_type, lookup)
for subvalue in value]
else:
return self._value_for_db(value, field,
field_kind, db_type, lookup)
def value_from_db(self, value, field):
"""
Performs deconversions defined by `_value_from_db`.
:param value: A value received from the database client
:param field: A field the value is meant for
"""
return self._value_from_db(value, *self._convert_as(field))
def _convert_as(self, field, lookup=None):
"""
Computes parameters that should be used for preparing the field
for the database or deconverting a database value for it.
"""
# We need to compute db_type using the original field to allow
# GAE to use different storage for primary and foreign keys.
db_type = self.connection.creation.db_type(field)
if field.rel is not None:
field = field.rel.get_related_field()
field_kind = field.get_internal_type()
# Values for standard month / day queries are integers.
if (field_kind in ('DateField', 'DateTimeField') and
lookup in ('month', 'day')):
db_type = 'integer'
return field, field_kind, db_type
def _value_for_db(self, value, field, field_kind, db_type, lookup):
"""
Converts a standard Python value to a type that can be stored
or processed by the database driver.
This implementation only converts elements of iterables passed
by collection fields, evaluates Django's lazy objects and
marked strings and handles embedded models.
Currently, we assume that dict keys and column, model, module
names (strings) of embedded models require no conversion.
We need to know the field for two reasons:
-- to allow back-ends having separate key spaces for different
tables to create keys refering to the right table (which can
be the field model's table or the table of the model of the
instance a ForeignKey or other relation field points to).
-- to know the field of values passed by typed collection
fields and to use the proper fields when deconverting values
stored for typed embedding field.
Avoid using the field in any other way than by inspecting its
properties, it may not hold any value or hold a value other
than the one you're asked to convert.
You may want to call this method before doing other back-end
specific conversions.
:param value: A value to be passed to the database driver
:param field: A field having the same properties as the field
the value comes from; instead of related fields
you'll get the related model primary key, as the
value usually needs to be converted using its
properties
:param field_kind: Equal to field.get_internal_type()
:param db_type: Same as creation.db_type(field)
:param lookup: None if the value is being prepared for storage;
lookup type name, when its going to be used as a
filter argument
"""
# Back-ends may want to store empty lists or dicts as None.
if value is None:
return None
# Force evaluation of lazy objects (e.g. lazy translation
# strings).
# Some back-ends pass values directly to the database driver,
# which may fail if it relies on type inspection and gets a
# functional proxy.
# This code relies on unicode cast in django.utils.functional
# just evaluating the wrapped function and doing nothing more.
# TODO: This has been partially fixed in vanilla with:
# https://code.djangoproject.com/changeset/17698, however
# still fails for proxies in lookups; reconsider in 1.4.
# Also research cases of database operations not done
# through the sql.Query.
if isinstance(value, Promise):
value = unicode(value)
# Django wraps strings marked as safe or needed escaping,
# convert them to just strings for type-inspecting back-ends.
if isinstance(value, (SafeString, EscapeString)):
value = str(value)
elif isinstance(value, (SafeUnicode, EscapeUnicode)):
value = unicode(value)
# Convert elements of collection fields.
if field_kind in ('ListField', 'SetField', 'DictField',):
value = self._value_for_db_collection(value, field,
field_kind, db_type, lookup)
# Store model instance fields' values.
elif field_kind == 'EmbeddedModelField':
value = self._value_for_db_model(value, field,
field_kind, db_type, lookup)
return value
def _value_from_db(self, value, field, field_kind, db_type):
"""
Converts a database type to a type acceptable by the field.
If you encoded a value for storage in the database, reverse the
encoding here. This implementation only recursively deconverts
elements of collection fields and handles embedded models.
You may want to call this method after any back-end specific
deconversions.
:param value: A value to be passed to the database driver
:param field: A field having the same properties as the field
the value comes from
:param field_kind: Equal to field.get_internal_type()
:param db_type: Same as creation.db_type(field)
Note: lookup values never get deconverted.
"""
# We did not convert Nones.
if value is None:
return None
# Deconvert items or values of a collection field.
if field_kind in ('ListField', 'SetField', 'DictField',):
value = self._value_from_db_collection(value, field,
field_kind, db_type)
# Reinstatiate a serialized model.
elif field_kind == 'EmbeddedModelField':
value = self._value_from_db_model(value, field,
field_kind, db_type)
return value
def _value_for_db_collection(self, value, field, field_kind, db_type,
lookup):
"""
Recursively converts values from AbstractIterableFields.
Note that collection lookup values are plain values rather than
lists, sets or dicts, but they still should be converted as a
collection item (assuming all items or values are converted in
the same way).
We base the conversion on field class / kind and assume some
knowledge about field internals (e.g. that the field has an
"item_field" property that gives the right subfield for any of
its values), to avoid adding a framework for determination of
parameters for items' conversions; we do the conversion here
rather than inside get_db_prep_save/lookup for symmetry with
deconversion (which can't be in to_python because the method is
also used for data not coming from the database).
Returns a list, set, dict, string or bytes according to the
db_type given.
If the "list" db_type used for DictField, a list with keys and
values interleaved will be returned (list of pairs is not good,
because lists / tuples may need conversion themselves; the list
may still be nested for dicts containing collections).
The "string" and "bytes" db_types use serialization with pickle
protocol 0 or 2 respectively.
If an unknown db_type is specified, returns a generator
yielding converted elements / pairs with converted values.
"""
subfield, subkind, db_subtype = self._convert_as(field.item_field,
lookup)
# Do convert filter parameters.
if lookup:
# Special case where we are looking for an empty list
if lookup == 'exact' and db_type == 'list' and value == u'[]':
return []
value = self._value_for_db(value, subfield,
subkind, db_subtype, lookup)
# Convert list/set items or dict values.
else:
if field_kind == 'DictField':
# Generator yielding pairs with converted values.
value = (
(key, self._value_for_db(subvalue, subfield,
subkind, db_subtype, lookup))
for key, subvalue in value.iteritems())
# Return just a dict, a once-flattened list;
if db_type == 'dict':
return dict(value)
elif db_type == 'list':
return list(item for pair in value for item in pair)
else:
# Generator producing converted items.
value = (
self._value_for_db(subvalue, subfield,
subkind, db_subtype, lookup)
for subvalue in value)
# "list" may be used for SetField.
if db_type in 'list':
return list(value)
elif db_type == 'set':
# assert field_kind != 'ListField'
return set(value)
# Pickled formats may be used for all collection fields,
# the fields "natural" type is serialized (something
# concrete is needed, pickle can't handle generators :-)
if db_type == 'bytes':
return pickle.dumps(field._type(value), protocol=2)
elif db_type == 'string':
return pickle.dumps(field._type(value))
# If nothing matched, pass the generator to the back-end.
return value
def _value_from_db_collection(self, value, field, field_kind, db_type):
"""
Recursively deconverts values for AbstractIterableFields.
Assumes that all values in a collection can be deconverted
using a single field (Field.item_field, possibly a RawField).
Returns a value in a format proper for the field kind (the
value will normally not go through to_python).
"""
subfield, subkind, db_subtype = self._convert_as(field.item_field)
# Unpickle (a dict) if a serialized storage is used.
if db_type == 'bytes' or db_type == 'string':
value = pickle.loads(value)
if field_kind == 'DictField':
# Generator yielding pairs with deconverted values, the
# "list" db_type stores keys and values interleaved.
if db_type == 'list':
value = zip(value[::2], value[1::2])
else:
value = value.iteritems()
# DictField needs to hold a dict.
return dict(
(key, self._value_from_db(subvalue, subfield,
subkind, db_subtype))
for key, subvalue in value)
else:
# Generator yielding deconverted items.
value = (
self._value_from_db(subvalue, subfield,
subkind, db_subtype)
for subvalue in value)
# The value will be available from the field without any
# further processing and it has to have the right type.
if field_kind == 'ListField':
return list(value)
elif field_kind == 'SetField':
return set(value)
# A new field kind? Maybe it can take a generator.
return value
def _value_for_db_model(self, value, field, field_kind, db_type, lookup):
"""
Converts a field => value mapping received from an
EmbeddedModelField the format chosen for the field storage.
The embedded instance fields' values are also converted /
deconverted using value_for/from_db, so any back-end
conversions will be applied.
Returns (field.column, value) pairs, possibly augmented with
model info (to be able to deconvert the embedded instance for
untyped fields) encoded according to the db_type chosen.
If "dict" db_type is given a Python dict is returned.
If "list db_type is chosen a list with columns and values
interleaved will be returned. Note that just a single level of
the list is flattened, so it still may be nested -- when the
embedded instance holds other embedded models or collections).
Using "bytes" or "string" pickles the mapping using pickle
protocol 0 or 2 respectively.
If an unknown db_type is used a generator yielding (column,
value) pairs with values converted will be returned.
TODO: How should EmbeddedModelField lookups work?
"""
if lookup:
# raise NotImplementedError("Needs specification.")
return value
# Convert using proper instance field's info, change keys from
# fields to columns.
# TODO/XXX: Arguments order due to Python 2.5 compatibility.
value = (
(subfield.column, self._value_for_db(
subvalue, lookup=lookup, *self._convert_as(subfield, lookup)))
for subfield, subvalue in value.iteritems())
# Cast to a dict, interleave columns with values on a list,
# serialize, or return a generator.
if db_type == 'dict':
value = dict(value)
elif db_type == 'list':
value = list(item for pair in value for item in pair)
elif db_type == 'bytes':
value = pickle.dumps(dict(value), protocol=2)
elif db_type == 'string':
value = pickle.dumps(dict(value))
return value
def _value_from_db_model(self, value, field, field_kind, db_type):
"""
Deconverts values stored for EmbeddedModelFields.
Embedded instances are stored as a (column, value) pairs in a
dict, a single-flattened list or a serialized dict.
Returns a tuple with model class and field.attname => value
mapping.
"""
# Separate keys from values and create a dict or unpickle one.
if db_type == 'list':
value = dict(zip(value[::2], value[1::2]))
elif db_type == 'bytes' or db_type == 'string':
value = pickle.loads(value)
# Let untyped fields determine the embedded instance's model.
embedded_model = field.stored_model(value)
# Deconvert fields' values and prepare a dict that can be used
# to initialize a model (by changing keys from columns to
# attribute names).
return embedded_model, dict(
(subfield.attname, self._value_from_db(
value[subfield.column], *self._convert_as(subfield)))
for subfield in embedded_model._meta.fields
if subfield.column in value)
def _value_for_db_key(self, value, field_kind):
"""
Converts value to be used as a key to an acceptable type.
On default we do no encoding, only allowing key values directly
acceptable by the database for its key type (if any).
The conversion has to be reversible given the field type,
encoding should preserve comparisons.
Use this to expand the set of fields that can be used as
primary keys, return value suitable for a key rather than
a key itself.
"""
raise DatabaseError(
"%s may not be used as primary key field." % field_kind)
def _value_from_db_key(self, value, field_kind):
"""
Decodes a value previously encoded for a key.
"""
return value
class NonrelDatabaseClient(BaseDatabaseClient):
pass
class NonrelDatabaseValidation(BaseDatabaseValidation):
pass
class NonrelDatabaseIntrospection(BaseDatabaseIntrospection):
def table_names(self):
"""
Returns a list of names of all tables that exist in the
database.
"""
return self.django_table_names()
class FakeCursor(object):
def __getattribute__(self, name):
raise NotImplementedError("Cursors are not supported.")
def __setattr__(self, name, value):
raise NotImplementedError("Cursors are not supported.")
class NonrelDatabaseWrapper(BaseDatabaseWrapper):
# These fake operators are required for SQLQuery.as_sql() support.
operators = {
'exact': '= %s',
'iexact': '= UPPER(%s)',
'contains': 'LIKE %s',
'icontains': 'LIKE UPPER(%s)',
'regex': '~ %s',
'iregex': '~* %s',
'gt': '> %s',
'gte': '>= %s',
'lt': '< %s',
'lte': '<= %s',
'startswith': 'LIKE %s',
'endswith': 'LIKE %s',
'istartswith': 'LIKE UPPER(%s)',
'iendswith': 'LIKE UPPER(%s)',
}
def _cursor(self):
return FakeCursor()
| Python |
from django import http
from django.template import RequestContext, loader
def server_error(request, template_name='500.html'):
"""
500 error handler.
Templates: `500.html`
Context:
request_path
The path of the requested URL (e.g., '/app/pages/bad_page/')
"""
# You need to create a 500.html template.
t = loader.get_template(template_name)
return http.HttpResponseServerError(
t.render(RequestContext(request, {'request_path': request.path})))
| Python |
from djangoappengine.settings_base import *
import os
DEBUG = False
TEMPLATE_DEBUG = DEBUG
SECRET_KEY = '=r-$bi9LA73jc58&9003mmk5ch1k-3d3vfc4(wk0rn3wa1dhvi'
INSTALLED_APPS = (
'djangoappengine',
'djangotoolbox',
'django.contrib.contenttypes',
'django.contrib.sessions',
'userTools',
'itemTools',
'searchTools',
'commTools',
)
MIDDLEWARE_CLASSES = (
'django.contrib.sessions.middleware.SessionMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
)
TEMPLATE_CONTEXT_PROCESSORS = (
'views.common_proc',
'django.contrib.auth.context_processors.auth',
'django.core.context_processors.request',
)
TEMPLATE_DIRS = (os.path.join(os.path.dirname(__file__), 'templates'),
os.path.join(os.path.dirname(__file__), 'userTools/templates'),
os.path.join(os.path.dirname(__file__), 'itemTools/templates'),
os.path.join(os.path.dirname(__file__), 'searchTools/templates'),
os.path.join(os.path.dirname(__file__), 'commTools/templates'),
)
try:
import dbindexer
DATABASES['native'] = DATABASES['default']
DATABASES['default'] = {'ENGINE': 'dbindexer', 'TARGET': 'native'}
INSTALLED_APPS += ('autoload', 'dbindexer',)
AUTOLOAD_SITECONF = 'dbindexes'
MIDDLEWARE_CLASSES = ('autoload.middleware.AutoloadMiddleware',) + \
MIDDLEWARE_CLASSES
except ImportError:
pass
DBINDEXER_BACKENDS = ('dbindexer.backends.BaseResolver',
'dbindexer.backends.FKNullFix',
'dbindexer.backends.InMemoryJOINResolver',
# 'dbindexer.backends.ConstantFieldJOINResolver',
)
ROOT_URLCONF = 'urls' | Python |
from django import forms
from itemTools.forms import max_price
class SearchItemForm(forms.Form):
term = forms.CharField(min_length=3, max_length=20, label='Search Term')
search_descrip = forms.BooleanField(label='Search in Item Description', required=False)
price_from = forms.IntegerField(label='Min Price (-1: Skip)', min_value=-1, max_value=max_price, initial=-1)
price_to = forms.IntegerField(label='Max Price (-1: Skip)', min_value=-1, max_value=max_price, initial=-1)
| Python |
from django.db import models
# Create your models here.
| Python |
"""
This file demonstrates writing tests using the unittest module. These will pass
when you run "manage.py test".
Replace this with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.assertEqual(1 + 1, 2)
| Python |
from userTools.main import handle_login_register, user, handle_optional_login, myprofile_links
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from itemTools.models import items
from userTools.models import user_profile
from forms import SearchItemForm, max_price
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from itemTools.views import item_per_page
import datetime
def search_items(request):
if request.method == 'GET':
if len(request.GET) == 0:
return render(request, 'search_form.html', {'form': SearchItemForm()})
form = SearchItemForm(request.GET)
if form.is_valid():
if form.cleaned_data['search_descrip']:
ret = items.objects.filter(is_sold=False, is_expired=False, title_join_descrip__icontains=form.cleaned_data['term']).order_by()
else:
ret = items.objects.filter(is_sold=False,is_expired=False, title__icontains=form.cleaned_data['term']).order_by()
a1 = form.cleaned_data['price_from']
a2 = form.cleaned_data['price_to']
if a1 == -1 and a2 == -1:
ret2 = ret
else:
ret2 = []
if a2 == -1:
a2 = max_price
for i in ret:
if a1 <= i.price <= a2:
ret2.append(i)
ret2 = ret2[::-1]
page_no = request.GET.get('page_no', 1)
paginator = Paginator(ret2, item_per_page)
try:
ret = paginator.page(page_no)
except PageNotAnInteger:
ret = paginator.page(1)
except EmptyPage:
ret = paginator.page(1)
url = '?'
for key,val in request.GET.items():
if key != 'page_no':
url += '%s=%s&' % (key, val)
return render(request, 'search_result.html',
{'url': url, 'items': ret, 'title': 'Search Result for \'%s\'' % form.cleaned_data['term'], 'num_pages': paginator.num_pages})
else:
return render(request, 'search_form.html', {'form': form})
else:
return render(request, 'search_form.html', {'form': SearchItemForm()})
def search_users(request, nick):
try:
user = user_profile.objects.get(nick=nick)
except user_profile.DoesNotExist:
return render(request, 'error.html', {'error': 'User \'%s\' Not Found!' % nick})
return HttpResponseRedirect(user.get_url())
def search_handle(request):
if request.method == 'GET':
if request.GET.get('field', False) == 'user':
return search_users(request, request.GET.get('term', ''))
elif request.GET.get('field', False) == 'item':
return HttpResponseRedirect('/search/?term=%s&price_from=-1&price_to=-1' % request.GET.get('term',''))
return HttpResponseRedirect('/') | Python |
from itemTools.models import items
class foo:
def __init__(self, boo):
self.str = boo
import re
regex = re.compile('.*'+str(boo)+'.*')
def __eq__(self, tmp):
print tmp
if regex.match(str(tmp)) != None:
return False
return True | Python |
from django import forms
from models import items
max_price = 1000000
class SellForm(forms.ModelForm):
class Meta:
model = items
exclude = ['title_join_descrip', 'is_expired', 'is_active', 'is_sold', 'buyer']
def clean_price(self):
price = self.cleaned_data['price']
if price<0:
raise forms.ValidationError('Non-Negative prices not allowed')
elif price > max_price:
raise forms.ValidationError('Max Price allowed is %d' % 1000000)
return price
def clean_descrip(self):
descrip = self.cleaned_data['descrip']
if len(descrip)>500:
raise forms.ValidationError('Maximum Length allowed is 500')
return descrip
class DelForm(forms.Form):
confirm = forms.BooleanField(label='Confirm item Deletion?') | Python |
from django.db import models
from userTools.models import user_profile
import logging
class items(models.Model):
"""
is_active yet to be implemented into the App
"""
user = models.ForeignKey(user_profile, editable=False)
title = models.CharField(max_length=30)
descrip = models.TextField(max_length=400, verbose_name='Description')
price = models.IntegerField()
time_create = models.DateTimeField(auto_now_add=True, editable=False)
is_active = models.BooleanField(default=True, verbose_name='Active? (Inactive items are hidden from others)')
is_expired = models.BooleanField(default=False)
is_sold = models.BooleanField(default=False)
buyer = models.ForeignKey(user_profile, related_name='items_buyer', blank=True, null=True)
title_join_descrip = models.TextField(max_length=500)
def __unicode__(self):
return self.title
def get_url(self):
return '/item/?id=%d' % self.id
def get_comm_url(self):
return '/item/%d/comm/' % self.id
class Meta:
ordering = ['-time_create']
def get_date(self):
return str(self.time_create)
def save2(self, *args, **kwargs):
super(items, self).save(*args, **kwargs)
def save(self, *args, **kwargs):
self.title_join_descrip = self.title + self.descrip
if self.id != None:
new = False
else:
new = True
super(items, self).save(*args, **kwargs)
if new:
logging.info('items: %s created' % self.title)
else:
logging.info('item: %s changed' % self.title)
def delete(self, *args, **kwargs):
super(items, self).delete(*args, **kwargs)
logging.info('items: deleted %s' % self.title)
| Python |
from userTools.main import handle_login_register, user, handle_optional_login, myprofile_links
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import render
from models import items
from forms import SellForm, DelForm
from django.core.paginator import Paginator, EmptyPage, PageNotAnInteger
from commTools.models import Comm
import datetime
expiry = datetime.timedelta(days=30)
item_per_page = 6
@handle_login_register
def sell(request, curr_user):
"Sell Page"
if request.method == 'POST':
form = SellForm(request.POST, instance=items(user=curr_user.user_obj))
if form.is_valid():
new_item = form.save()
# Redirect to the new item page
return HttpResponseRedirect('/item/?id=%d' % new_item.id)
else:
return render(request, 'sell.html', {'form': form})
else:
# Display the Form
return render(request, 'sell.html', {'form': SellForm()})
def buy(request):
"Browse Page"
# expiry_date = datetime.date.today()-expiry
page_no = request.GET.get('page_no', 1)
paginator = Paginator(items.objects.filter(is_expired=False, is_sold=False), item_per_page)
try:
ret = paginator.page(page_no)
except PageNotAnInteger:
ret = paginator.page(1)
except EmptyPage:
ret = paginator.page(1)
return render(request, 'buy.html', {'items': ret, 'title': 'Buy', 'num_pages': paginator.num_pages})
@handle_optional_login
def item_view(request, curr_user):
"Individual Item View"
if request.method == 'GET':
try:
item_no = int(request.GET.get('id', -1))
except:
item_no = -1
else:
item_no = -1
if item_no<=0:
return render(request, 'error.html', {'error': 'Item Not Found'})
# expiry_date = datetime.date.today()-expiry
try:
curr_item = items.objects.get(id=item_no)
# if item is not expired
if not curr_item.is_expired and not curr_item.is_sold:
return render(request, 'item.html', {'item': curr_item})
if curr_item.is_sold and (curr_item.user == curr_user.user_obj or curr_item.buyer == curr_user.user_obj):
return render(request, 'item.html', {'item': curr_item})
# if the curr_user has rights to see this expired item
if curr_user != None and (curr_item.user == curr_user.user_obj or curr_user.is_admin):
return render(request, 'item.html', {'item': curr_item, 'expired': True})
else:
return render(request, 'error.html', {'error': 'Item Not Found'})
except items.DoesNotExist:
return render(request, 'error.html', {'error': 'Item Not Found'})
@handle_login_register
def item_delete(request, item_id, curr_user):
"Delete Item (item_id)"
item_id = int(item_id)
try:
item = items.objects.get(id=item_id, is_sold=False)
except items.DoesNotExist:
return render(request, 'error.html', {'error': 'Item Does Not Exist or You don\'t have permission to be here!'})
# if the curr_user has no rights
if curr_user.user_obj != item.user and not curr_user.is_admin:
return render(request, 'error.html', {'error': 'Item Does Not Exist or You don\'t have permission to be here!'})
# User has rights, Perform Delete operations
if request.method == 'POST':
form = DelForm(request.POST)
if form.is_valid() and form.cleaned_data['confirm']:
item.delete()
return render(request, 'msg.html', {'msg': 'Item Deleted!'})
else:
return HttpResponseRedirect('/item/?id=%d' % item.id)
else:
return render(request, 'item_delete.html', {'form': DelForm(), 'item': item})
@handle_login_register
def item_edit(request, item_id, curr_user):
"Edit Item item_id"
item_id = int(item_id)
try:
item = items.objects.get(id=item_id, is_sold=False)
except items.DoesNotExist:
return render(request, 'error.html', {'error': 'Item Does Not Exist or You don\'t have permission to be here!'})
if curr_user.user_obj != item.user:
return render(request, 'error.html', {'error': 'Item Does Not Exist or You don\'t have permission to be here!'})
# User has rights, start the Edit Procedure
if request.method == 'POST':
form = SellForm(request.POST, instance=item)
if form.is_valid():
form.save()
return HttpResponseRedirect('/item/?id=%d' % item.id)
else:
return render(request, 'edit_item.html', {'form': form, 'item': item})
else:
return render(request, 'edit_item.html', {'form': SellForm(instance=item), 'item': item})
@handle_login_register
def my_items(request, curr_user):
"View for all items of a user"
page_no = request.GET.get('page_no', 1)
paginator = Paginator(items.objects.filter(user=curr_user.user_obj, is_sold=False), item_per_page)
try:
ret = paginator.page(page_no)
except PageNotAnInteger:
ret = paginator.page(1)
except EmptyPage:
ret = paginator.page(1)
return render(request, 'my_items.html', {'items': ret, 'title': 'My Items', 'num_pages': paginator.num_pages})
@handle_login_register
def my_items_buys(request, curr_user):
page_no = request.GET.get('page_no', 1)
paginator = Paginator(items.objects.filter(is_sold=True, buyer=curr_user.user_obj), item_per_page)
try:
ret = paginator.page(page_no)
except PageNotAnInteger:
ret = paginator.page(1)
except EmptyPage:
ret = paginator.page(1)
return render(request, 'my_items.html', {'items': ret, 'title': 'My Purchases', 'num_pages': paginator.num_pages})
@handle_login_register
def my_items_sold(request, curr_user):
page_no = request.GET.get('page_no', 1)
paginator = Paginator(items.objects.filter(is_sold=True, user=curr_user.user_obj), item_per_page)
try:
ret = paginator.page(page_no)
except PageNotAnInteger:
ret = paginator.page(1)
except EmptyPage:
ret = paginator.page(1)
return render(request, 'my_items.html', {'items': ret, 'title': 'My Sells', 'num_pages': paginator.num_pages})
@handle_login_register
def ongoing_deals(request, curr_user):
page_no = request.GET.get('page_no', 1)
from itertools import chain
res1 = Comm.objects.filter(status=0, buyer=curr_user.user_obj)
res2 = Comm.objects.filter(status=1, buyer=curr_user.user_obj)
paginator = Paginator(list(chain(res1, res2)), item_per_page)
try:
ret = paginator.page(page_no)
except PageNotAnInteger:
ret = paginator.page(1)
except EmptyPage:
ret = paginator.page(1)
return render(request, 'ongoing.html', {'items': ret, 'title': 'Ongoing Deals', 'num_pages': paginator.num_pages}) | Python |
from models import items
from dbindexer.api import register_index
register_index(items, {'title': ('icontains'), 'title_join_descrip': ('icontains')}) | Python |
from django.http import HttpResponse
from django.template import Template, Context, loader, RequestContext
from django.shortcuts import render
from google.appengine.api import users
from userTools.models import user_profile, admins
from userTools.main import user
def common_proc(request):
"""
Provides common details
TODO: Handle redirection to critical pages
"""
user_g = users.get_current_user()
user_reg = None
if user_g:
foo = user(email=user_g.email())
if foo.user_obj == None:
user_reg = None
else:
user_reg = foo
user_nick = False
if user_reg:
user_nick = user_reg.user_obj.nick
elif user_g:
user_nick = user_g.nickname()
return {
'app_name': 'Foo Barketplace',
'app_tagline': 'An open market for everyone',
'user_nick': user_nick,
'user_reg': user_reg,
'user_nonreg': user_g,
'app_login_URL': users.create_login_url(request.path),
'app_logout_URL': users.create_logout_url(request.path),
}
def home(request):
return render(request, 'home.html')
def reset(request):
from itemTools.models import items
from userTools.models import user_profile, admins
from commTools.models import Comm, Messages
ret = ''
for i in Messages.objects.all():
i.delete()
for i in Comm.objects.all():
i.delete()
for i in items.objects.all():
i.delete()
for i in user_profile.objects.all():
i.delete()
for i in admins.objects.all():
i.delete()
ret += "Previous Records Deleted!<br />"
for i in range(1,26):
user_profile(
google_user_id=-1,
nick="test%d"%i,
f_name="Bob%d"%i,
l_name="Dylan%d"%i,
email="foo%d@bar.com"%i,
email_visibility=True,
about_me="abcd efgh ijkl mnop").save2()
ret += "25 Dummy Accounts Created<br />"
cnt = 1
from random import randint
for user in user_profile.objects.all():
for i in range(10):
items(
user=user,
title="item%d"%cnt,
descrip="Awesome Item with %d blessings"%cnt,
price=randint(1,1000000),
).save2()
cnt+=1
ret += "250 Dummy Items Created<br />"
admins(email="hardcodetest1@gmail.com").save()
admins(email="hardcodetest2@gmail.com").save()
ret += "\"hardcodetest2@gmail.com\" and \"hardcodetest2@gmail.com\" Given Admin Rights<br />"
return HttpResponse(ret) | Python |
# Load the siteconf module
from django.conf import settings
from django.utils.importlib import import_module
SITECONF_MODULE = getattr(settings, 'AUTOLOAD_SITECONF', settings.ROOT_URLCONF)
import_module(SITECONF_MODULE)
| Python |
from django.utils.importlib import import_module
from django.conf import settings
# load all models.py to ensure signal handling installation or index loading
# of some apps
for app in settings.INSTALLED_APPS:
try:
import_module('%s.models' % (app))
except ImportError:
pass
class AutoloadMiddleware(object):
"""Empty because the import above already does everything for us"""
pass
| Python |
def autodiscover(module_name):
"""
Automatically loads modules specified by module_name for each app in
installed apps.
"""
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's module.
try:
import_module('%s.%s' % (app, module_name))
except:
# Decide whether to bubble up this error. If the app just
# doesn't have an module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, module_name):
raise
| Python |
import datetime
import os
import re
import sys
import types
from django.conf import settings
from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotFound
from django.template import (Template, Context, TemplateDoesNotExist,
TemplateSyntaxError)
from django.template.defaultfilters import force_escape, pprint
from django.utils.html import escape
from django.utils.importlib import import_module
from django.utils.encoding import smart_unicode, smart_str
HIDDEN_SETTINGS = re.compile('SECRET|PASSWORD|PROFANITIES_LIST|SIGNATURE')
def linebreak_iter(template_source):
yield 0
p = template_source.find('\n')
while p >= 0:
yield p+1
p = template_source.find('\n', p+1)
yield len(template_source) + 1
def cleanse_setting(key, value):
"""Cleanse an individual setting key/value of sensitive content.
If the value is a dictionary, recursively cleanse the keys in
that dictionary.
"""
try:
if HIDDEN_SETTINGS.search(key):
cleansed = '********************'
else:
if isinstance(value, dict):
cleansed = dict((k, cleanse_setting(k, v)) for k,v in value.items())
else:
cleansed = value
except TypeError:
# If the key isn't regex-able, just return as-is.
cleansed = value
return cleansed
def get_safe_settings():
"Returns a dictionary of the settings module, with sensitive settings blurred out."
settings_dict = {}
for k in dir(settings):
if k.isupper():
settings_dict[k] = cleanse_setting(k, getattr(settings, k))
return settings_dict
def technical_500_response(request, exc_type, exc_value, tb):
"""
Create a technical server error response. The last three arguments are
the values returned from sys.exc_info() and friends.
"""
reporter = ExceptionReporter(request, exc_type, exc_value, tb)
html = reporter.get_traceback_html()
return HttpResponseServerError(html, mimetype='text/html')
class ExceptionReporter(object):
"""
A class to organize and coordinate reporting on exceptions.
"""
def __init__(self, request, exc_type, exc_value, tb, is_email=False):
self.request = request
self.exc_type = exc_type
self.exc_value = exc_value
self.tb = tb
self.is_email = is_email
self.template_info = None
self.template_does_not_exist = False
self.loader_debug_info = None
# Handle deprecated string exceptions
if isinstance(self.exc_type, basestring):
self.exc_value = Exception('Deprecated String Exception: %r' % self.exc_type)
self.exc_type = type(self.exc_value)
def get_traceback_html(self):
"Return HTML code for traceback."
if self.exc_type and issubclass(self.exc_type, TemplateDoesNotExist):
from django.template.loader import template_source_loaders
self.template_does_not_exist = True
self.loader_debug_info = []
for loader in template_source_loaders:
try:
module = import_module(loader.__module__)
if hasattr(loader, '__class__'):
source_list_func = loader.get_template_sources
else: # NOTE: Remember to remove this branch when we deprecate old template loaders in 1.4
source_list_func = module.get_template_sources
# NOTE: This assumes exc_value is the name of the template that
# the loader attempted to load.
template_list = [{'name': t, 'exists': os.path.exists(t)} \
for t in source_list_func(str(self.exc_value))]
except (ImportError, AttributeError):
template_list = []
if hasattr(loader, '__class__'):
loader_name = loader.__module__ + '.' + loader.__class__.__name__
else: # NOTE: Remember to remove this branch when we deprecate old template loaders in 1.4
loader_name = loader.__module__ + '.' + loader.__name__
self.loader_debug_info.append({
'loader': loader_name,
'templates': template_list,
})
if (settings.TEMPLATE_DEBUG and hasattr(self.exc_value, 'source') and
isinstance(self.exc_value, TemplateSyntaxError)):
self.get_template_exception_info()
frames = self.get_traceback_frames()
for i, frame in enumerate(frames):
if 'vars' in frame:
frame['vars'] = [(k, force_escape(pprint(v))) for k, v in frame['vars']]
frames[i] = frame
unicode_hint = ''
if self.exc_type and issubclass(self.exc_type, UnicodeError):
start = getattr(self.exc_value, 'start', None)
end = getattr(self.exc_value, 'end', None)
if start is not None and end is not None:
unicode_str = self.exc_value.args[1]
unicode_hint = smart_unicode(unicode_str[max(start-5, 0):min(end+5, len(unicode_str))], 'ascii', errors='replace')
from django import get_version
t = Template(TECHNICAL_500_TEMPLATE, name='Technical 500 template')
c = Context({
'is_email': self.is_email,
'unicode_hint': unicode_hint,
'frames': frames,
'request': self.request,
'settings': get_safe_settings(),
'sys_executable': sys.executable,
'sys_version_info': '%d.%d.%d' % sys.version_info[0:3],
'server_time': datetime.datetime.now(),
'django_version_info': get_version(),
'sys_path' : sys.path,
'template_info': self.template_info,
'template_does_not_exist': self.template_does_not_exist,
'loader_debug_info': self.loader_debug_info,
})
# Check whether exception info is available
if self.exc_type:
c['exception_type'] = self.exc_type.__name__
if self.exc_value:
c['exception_value'] = smart_unicode(self.exc_value, errors='replace')
if frames:
c['lastframe'] = frames[-1]
return t.render(c)
def get_template_exception_info(self):
origin, (start, end) = self.exc_value.source
template_source = origin.reload()
context_lines = 10
line = 0
upto = 0
source_lines = []
before = during = after = ""
for num, next in enumerate(linebreak_iter(template_source)):
if start >= upto and end <= next:
line = num
before = escape(template_source[upto:start])
during = escape(template_source[start:end])
after = escape(template_source[end:next])
source_lines.append( (num, escape(template_source[upto:next])) )
upto = next
total = len(source_lines)
top = max(1, line - context_lines)
bottom = min(total, line + 1 + context_lines)
self.template_info = {
'message': self.exc_value.args[0],
'source_lines': source_lines[top:bottom],
'before': before,
'during': during,
'after': after,
'top': top,
'bottom': bottom,
'total': total,
'line': line,
'name': origin.name,
}
def _get_lines_from_file(self, filename, lineno, context_lines, loader=None, module_name=None):
"""
Returns context_lines before and after lineno from file.
Returns (pre_context_lineno, pre_context, context_line, post_context).
"""
source = None
if loader is not None and hasattr(loader, "get_source"):
source = loader.get_source(module_name)
if source is not None:
source = source.splitlines()
if source is None:
try:
f = open(filename)
try:
source = f.readlines()
finally:
f.close()
except (OSError, IOError):
pass
if source is None:
return None, [], None, []
encoding = 'ascii'
for line in source[:2]:
# File coding may be specified. Match pattern from PEP-263
# (http://www.python.org/dev/peps/pep-0263/)
match = re.search(r'coding[:=]\s*([-\w.]+)', line)
if match:
encoding = match.group(1)
break
source = [unicode(sline, encoding, 'replace') for sline in source]
lower_bound = max(0, lineno - context_lines)
upper_bound = lineno + context_lines
pre_context = [line.strip('\n') for line in source[lower_bound:lineno]]
context_line = source[lineno].strip('\n')
post_context = [line.strip('\n') for line in source[lineno+1:upper_bound]]
return lower_bound, pre_context, context_line, post_context
def get_traceback_frames(self):
frames = []
tb = self.tb
while tb is not None:
# support for __traceback_hide__ which is used by a few libraries
# to hide internal frames.
if tb.tb_frame.f_locals.get('__traceback_hide__'):
tb = tb.tb_next
continue
filename = tb.tb_frame.f_code.co_filename
function = tb.tb_frame.f_code.co_name
lineno = tb.tb_lineno - 1
loader = tb.tb_frame.f_globals.get('__loader__')
module_name = tb.tb_frame.f_globals.get('__name__')
pre_context_lineno, pre_context, context_line, post_context = self._get_lines_from_file(filename, lineno, 7, loader, module_name)
if pre_context_lineno is not None:
frames.append({
'tb': tb,
'filename': filename,
'function': function,
'lineno': lineno + 1,
'vars': tb.tb_frame.f_locals.items(),
'id': id(tb),
'pre_context': pre_context,
'context_line': context_line,
'post_context': post_context,
'pre_context_lineno': pre_context_lineno + 1,
})
tb = tb.tb_next
return frames
def format_exception(self):
"""
Return the same data as from traceback.format_exception.
"""
import traceback
frames = self.get_traceback_frames()
tb = [ (f['filename'], f['lineno'], f['function'], f['context_line']) for f in frames ]
list = ['Traceback (most recent call last):\n']
list += traceback.format_list(tb)
list += traceback.format_exception_only(self.exc_type, self.exc_value)
return list
def technical_404_response(request, exception):
"Create a technical 404 error response. The exception should be the Http404."
try:
tried = exception.args[0]['tried']
except (IndexError, TypeError, KeyError):
tried = []
else:
if not tried:
# tried exists but is an empty list. The URLconf must've been empty.
return empty_urlconf(request)
urlconf = getattr(request, 'urlconf', settings.ROOT_URLCONF)
if isinstance(urlconf, types.ModuleType):
urlconf = urlconf.__name__
t = Template(TECHNICAL_404_TEMPLATE, name='Technical 404 template')
c = Context({
'urlconf': urlconf,
'root_urlconf': settings.ROOT_URLCONF,
'request_path': request.path_info[1:], # Trim leading slash
'urlpatterns': tried,
'reason': smart_str(exception, errors='replace'),
'request': request,
'settings': get_safe_settings(),
})
return HttpResponseNotFound(t.render(c), mimetype='text/html')
def empty_urlconf(request):
"Create an empty URLconf 404 error response."
t = Template(EMPTY_URLCONF_TEMPLATE, name='Empty URLConf template')
c = Context({
'project_name': settings.SETTINGS_MODULE.split('.')[0]
})
return HttpResponse(t.render(c), mimetype='text/html')
#
# Templates are embedded in the file so that we know the error handler will
# always work even if the template loader is broken.
#
TECHNICAL_500_TEMPLATE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}{% if request %} at {{ request.path_info|escape }}{% endif %}</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; }
h2 { margin-bottom:.8em; }
h2 span { font-size:80%; color:#666; font-weight:normal; }
h3 { margin:1em 0 .5em 0; }
h4 { margin:0 0 .5em 0; font-weight: normal; }
code, pre { font-size: 100%; }
table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; }
tbody td, tbody th { vertical-align:top; padding:2px 3px; }
thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; }
tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; }
table.vars { margin:5px 0 2px 40px; }
table.vars td, table.req td { font-family:monospace; }
table td.code { width:100%; }
table td.code pre { overflow:hidden; }
table.source th { color:#666; }
table.source td { font-family:monospace; white-space:pre; border-bottom:1px solid #eee; }
ul.traceback { list-style-type:none; }
ul.traceback li.frame { padding-bottom:1em; }
div.context { padding:10px 0; overflow:hidden; }
div.context ol { padding-left:30px; margin:0 10px; list-style-position: inside; }
div.context ol li { font-family:monospace; white-space:pre; color:#666; cursor:pointer; }
div.context ol li pre { display:inline; }
div.context ol.context-line li { color:black; background-color:#ccc; }
div.context ol.context-line li span { position:absolute; right:32px; }
div.commands { margin-left: 40px; }
div.commands a { color:black; text-decoration:none; }
#summary { background: #ffc; }
#summary h2 { font-weight: normal; color: #666; }
#explanation { background:#eee; }
#template, #template-not-exist { background:#f6f6f6; }
#template-not-exist ul { margin: 0 0 0 20px; }
#unicode-hint { background:#eee; }
#traceback { background:#eee; }
#requestinfo { background:#f6f6f6; padding-left:120px; }
#summary table { border:none; background:transparent; }
#requestinfo h2, #requestinfo h3 { position:relative; margin-left:-100px; }
#requestinfo h3 { margin-bottom:-1em; }
.error { background: #ffc; }
.specific { color:#cc3300; font-weight:bold; }
h2 span.commands { font-size:.7em;}
span.commands a:link {color:#5E5694;}
pre.exception_value { font-family: sans-serif; color: #666; font-size: 1.5em; margin: 10px 0 10px 0; }
</style>
{% if not is_email %}
<script type="text/javascript">
//<!--
function getElementsByClassName(oElm, strTagName, strClassName){
// Written by Jonathan Snook, http://www.snook.ca/jon; Add-ons by Robert Nyman, http://www.robertnyman.com
var arrElements = (strTagName == "*" && document.all)? document.all :
oElm.getElementsByTagName(strTagName);
var arrReturnElements = new Array();
strClassName = strClassName.replace(/\-/g, "\\-");
var oRegExp = new RegExp("(^|\\s)" + strClassName + "(\\s|$)");
var oElement;
for(var i=0; i<arrElements.length; i++){
oElement = arrElements[i];
if(oRegExp.test(oElement.className)){
arrReturnElements.push(oElement);
}
}
return (arrReturnElements)
}
function hideAll(elems) {
for (var e = 0; e < elems.length; e++) {
elems[e].style.display = 'none';
}
}
window.onload = function() {
hideAll(getElementsByClassName(document, 'table', 'vars'));
hideAll(getElementsByClassName(document, 'ol', 'pre-context'));
hideAll(getElementsByClassName(document, 'ol', 'post-context'));
hideAll(getElementsByClassName(document, 'div', 'pastebin'));
}
function toggle() {
for (var i = 0; i < arguments.length; i++) {
var e = document.getElementById(arguments[i]);
if (e) {
e.style.display = e.style.display == 'none' ? 'block' : 'none';
}
}
return false;
}
function varToggle(link, id) {
toggle('v' + id);
var s = link.getElementsByTagName('span')[0];
var uarr = String.fromCharCode(0x25b6);
var darr = String.fromCharCode(0x25bc);
s.innerHTML = s.innerHTML == uarr ? darr : uarr;
return false;
}
function switchPastebinFriendly(link) {
s1 = "Switch to copy-and-paste view";
s2 = "Switch back to interactive view";
link.innerHTML = link.innerHTML == s1 ? s2 : s1;
toggle('browserTraceback', 'pastebinTraceback');
return false;
}
//-->
</script>
{% endif %}
</head>
<body>
<div id="summary">
<h1>{% if exception_type %}{{ exception_type }}{% else %}Report{% endif %}{% if request %} at {{ request.path_info|escape }}{% endif %}</h1>
<pre class="exception_value">{% if exception_value %}{{ exception_value|force_escape }}{% else %}No exception supplied{% endif %}</pre>
<table class="meta">
{% if request %}
<tr>
<th>Request Method:</th>
<td>{{ request.META.REQUEST_METHOD }}</td>
</tr>
<tr>
<th>Request URL:</th>
<td>{{ request.build_absolute_uri|escape }}</td>
</tr>
{% endif %}
<tr>
<th>Django Version:</th>
<td>{{ django_version_info }}</td>
</tr>
{% if exception_type %}
<tr>
<th>Exception Type:</th>
<td>{{ exception_type }}</td>
</tr>
{% endif %}
{% if exception_type and exception_value %}
<tr>
<th>Exception Value:</th>
<td><pre>{{ exception_value|force_escape }}</pre></td>
</tr>
{% endif %}
{% if lastframe %}
<tr>
<th>Exception Location:</th>
<td>{{ lastframe.filename|escape }} in {{ lastframe.function|escape }}, line {{ lastframe.lineno }}</td>
</tr>
{% endif %}
<tr>
<th>Python Executable:</th>
<td>{{ sys_executable|escape }}</td>
</tr>
<tr>
<th>Python Version:</th>
<td>{{ sys_version_info }}</td>
</tr>
<tr>
<th>Python Path:</th>
<td><pre>{{ sys_path|pprint }}</pre></td>
</tr>
<tr>
<th>Server time:</th>
<td>{{server_time|date:"r"}}</td>
</tr>
</table>
</div>
{% if unicode_hint %}
<div id="unicode-hint">
<h2>Unicode error hint</h2>
<p>The string that could not be encoded/decoded was: <strong>{{ unicode_hint|force_escape }}</strong></p>
</div>
{% endif %}
{% if template_does_not_exist %}
<div id="template-not-exist">
<h2>Template-loader postmortem</h2>
{% if loader_debug_info %}
<p>Django tried loading these templates, in this order:</p>
<ul>
{% for loader in loader_debug_info %}
<li>Using loader <code>{{ loader.loader }}</code>:
<ul>{% for t in loader.templates %}<li><code>{{ t.name }}</code> (File {% if t.exists %}exists{% else %}does not exist{% endif %})</li>{% endfor %}</ul>
</li>
{% endfor %}
</ul>
{% else %}
<p>Django couldn't find any templates because your <code>TEMPLATE_LOADERS</code> setting is empty!</p>
{% endif %}
</div>
{% endif %}
{% if template_info %}
<div id="template">
<h2>Template error</h2>
<p>In template <code>{{ template_info.name }}</code>, error at line <strong>{{ template_info.line }}</strong></p>
<h3>{{ template_info.message }}</h3>
<table class="source{% if template_info.top %} cut-top{% endif %}{% ifnotequal template_info.bottom template_info.total %} cut-bottom{% endifnotequal %}">
{% for source_line in template_info.source_lines %}
{% ifequal source_line.0 template_info.line %}
<tr class="error"><th>{{ source_line.0 }}</th>
<td>{{ template_info.before }}<span class="specific">{{ template_info.during }}</span>{{ template_info.after }}</td></tr>
{% else %}
<tr><th>{{ source_line.0 }}</th>
<td>{{ source_line.1 }}</td></tr>
{% endifequal %}
{% endfor %}
</table>
</div>
{% endif %}
{% if frames %}
<div id="traceback">
<h2>Traceback <span class="commands">{% if not is_email %}<a href="#" onclick="return switchPastebinFriendly(this);">Switch to copy-and-paste view</a></span>{% endif %}</h2>
{% autoescape off %}
<div id="browserTraceback">
<ul class="traceback">
{% for frame in frames %}
<li class="frame">
<code>{{ frame.filename|escape }}</code> in <code>{{ frame.function|escape }}</code>
{% if frame.context_line %}
<div class="context" id="c{{ frame.id }}">
{% if frame.pre_context and not is_email %}
<ol start="{{ frame.pre_context_lineno }}" class="pre-context" id="pre{{ frame.id }}">{% for line in frame.pre_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li>{% endfor %}</ol>
{% endif %}
<ol start="{{ frame.lineno }}" class="context-line"><li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ frame.context_line|escape }}</pre>{% if not is_email %} <span>...</span>{% endif %}</li></ol>
{% if frame.post_context and not is_email %}
<ol start='{{ frame.lineno|add:"1" }}' class="post-context" id="post{{ frame.id }}">{% for line in frame.post_context %}<li onclick="toggle('pre{{ frame.id }}', 'post{{ frame.id }}')"><pre>{{ line|escape }}</pre></li>{% endfor %}</ol>
{% endif %}
</div>
{% endif %}
{% if frame.vars %}
<div class="commands">
{% if is_email %}
<h2>Local Vars</h2>
{% else %}
<a href="#" onclick="return varToggle(this, '{{ frame.id }}')"><span>▶</span> Local vars</a>
{% endif %}
</div>
<table class="vars" id="v{{ frame.id }}">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in frame.vars|dictsort:"0" %}
<tr>
<td>{{ var.0|force_escape }}</td>
<td class="code"><pre>{{ var.1 }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% endif %}
</li>
{% endfor %}
</ul>
</div>
{% endautoescape %}
<form action="http://dpaste.com/" name="pasteform" id="pasteform" method="post">
{% if not is_email %}
<div id="pastebinTraceback" class="pastebin">
<input type="hidden" name="language" value="PythonConsole">
<input type="hidden" name="title" value="{{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}">
<input type="hidden" name="source" value="Django Dpaste Agent">
<input type="hidden" name="poster" value="Django">
<textarea name="content" id="traceback_area" cols="140" rows="25">
Environment:
{% if request %}
Request Method: {{ request.META.REQUEST_METHOD }}
Request URL: {{ request.build_absolute_uri|escape }}
{% endif %}
Django Version: {{ django_version_info }}
Python Version: {{ sys_version_info }}
Installed Applications:
{{ settings.INSTALLED_APPS|pprint }}
Installed Middleware:
{{ settings.MIDDLEWARE_CLASSES|pprint }}
{% if template_does_not_exist %}Template Loader Error:
{% if loader_debug_info %}Django tried loading these templates, in this order:
{% for loader in loader_debug_info %}Using loader {{ loader.loader }}:
{% for t in loader.templates %}{{ t.name }} (File {% if t.exists %}exists{% else %}does not exist{% endif %})
{% endfor %}{% endfor %}
{% else %}Django couldn't find any templates because your TEMPLATE_LOADERS setting is empty!
{% endif %}
{% endif %}{% if template_info %}
Template error:
In template {{ template_info.name }}, error at line {{ template_info.line }}
{{ template_info.message }}{% for source_line in template_info.source_lines %}{% ifequal source_line.0 template_info.line %}
{{ source_line.0 }} : {{ template_info.before }} {{ template_info.during }} {{ template_info.after }}
{% else %}
{{ source_line.0 }} : {{ source_line.1 }}
{% endifequal %}{% endfor %}{% endif %}
Traceback:
{% for frame in frames %}File "{{ frame.filename|escape }}" in {{ frame.function|escape }}
{% if frame.context_line %} {{ frame.lineno }}. {{ frame.context_line|escape }}{% endif %}
{% endfor %}
Exception Type: {{ exception_type|escape }}{% if request %} at {{ request.path_info|escape }}{% endif %}
Exception Value: {{ exception_value|force_escape }}
</textarea>
<br><br>
<input type="submit" value="Share this traceback on a public Web site">
</div>
</form>
</div>
{% endif %}
{% endif %}
<div id="requestinfo">
<h2>Request information</h2>
{% if request %}
<h3 id="get-info">GET</h3>
{% if request.GET %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.GET.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No GET data</p>
{% endif %}
<h3 id="post-info">POST</h3>
{% if request.POST %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.POST.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No POST data</p>
{% endif %}
<h3 id="files-info">FILES</h3>
{% if request.FILES %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.FILES.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No FILES data</p>
{% endif %}
<h3 id="cookie-info">COOKIES</h3>
{% if request.COOKIES %}
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.COOKIES.items %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>No cookie data</p>
{% endif %}
<h3 id="meta-info">META</h3>
<table class="req">
<thead>
<tr>
<th>Variable</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in request.META.items|dictsort:"0" %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
{% else %}
<p>Request data not supplied</p>
{% endif %}
<h3 id="settings-info">Settings</h3>
<h4>Using settings module <code>{{ settings.SETTINGS_MODULE }}</code></h4>
<table class="req">
<thead>
<tr>
<th>Setting</th>
<th>Value</th>
</tr>
</thead>
<tbody>
{% for var in settings.items|dictsort:"0" %}
<tr>
<td>{{ var.0 }}</td>
<td class="code"><pre>{{ var.1|pprint }}</pre></td>
</tr>
{% endfor %}
</tbody>
</table>
</div>
{% if not is_email %}
<div id="explanation">
<p>
You're seeing this error because you have <code>DEBUG = True</code> in your
Django settings file. Change that to <code>False</code>, and Django will
display a standard 500 page.
</p>
</div>
{% endif %}
</body>
</html>
"""
TECHNICAL_404_TEMPLATE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<title>Page not found at {{ request.path_info|escape }}</title>
<meta name="robots" content="NONE,NOARCHIVE">
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
table { border:none; border-collapse: collapse; width:100%; }
td, th { vertical-align:top; padding:2px 3px; }
th { width:12em; text-align:right; color:#666; padding-right:.5em; }
#info { background:#f6f6f6; }
#info ol { margin: 0.5em 4em; }
#info ol li { font-family: monospace; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>Page not found <span>(404)</span></h1>
<table class="meta">
<tr>
<th>Request Method:</th>
<td>{{ request.META.REQUEST_METHOD }}</td>
</tr>
<tr>
<th>Request URL:</th>
<td>{{ request.build_absolute_uri|escape }}</td>
</tr>
</table>
</div>
<div id="info">
{% if urlpatterns %}
<p>
Using the URLconf defined in <code>{{ urlconf }}</code>,
Django tried these URL patterns, in this order:
</p>
<ol>
{% for pattern in urlpatterns %}
<li>
{% for pat in pattern %}
{{ pat.regex.pattern }}
{% if forloop.last and pat.name %}[name='{{ pat.name }}']{% endif %}
{% endfor %}
</li>
{% endfor %}
</ol>
<p>The current URL, <code>{{ request_path|escape }}</code>, didn't match any of these.</p>
{% else %}
<p>{{ reason }}</p>
{% endif %}
</div>
<div id="explanation">
<p>
You're seeing this error because you have <code>DEBUG = True</code> in
your Django settings file. Change that to <code>False</code>, and Django
will display a standard 404 page.
</p>
</div>
</body>
</html>
"""
EMPTY_URLCONF_TEMPLATE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html lang="en"><head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE"><title>Welcome to Django</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; }
h2 { margin-bottom:.8em; }
h2 span { font-size:80%; color:#666; font-weight:normal; }
h3 { margin:1em 0 .5em 0; }
h4 { margin:0 0 .5em 0; font-weight: normal; }
table { border:1px solid #ccc; border-collapse: collapse; width:100%; background:white; }
tbody td, tbody th { vertical-align:top; padding:2px 3px; }
thead th { padding:1px 6px 1px 3px; background:#fefefe; text-align:left; font-weight:normal; font-size:11px; border:1px solid #ddd; }
tbody th { width:12em; text-align:right; color:#666; padding-right:.5em; }
ul { margin-left: 2em; margin-top: 1em; }
#summary { background: #e0ebff; }
#summary h2 { font-weight: normal; color: #666; }
#explanation { background:#eee; }
#instructions { background:#f6f6f6; }
#summary table { border:none; background:transparent; }
</style>
</head>
<body>
<div id="summary">
<h1>It worked!</h1>
<h2>Congratulations on your first Django-powered page.</h2>
</div>
<div id="instructions">
<p>Of course, you haven't actually done any work yet. Here's what to do next:</p>
<ul>
<li>If you plan to use a database, edit the <code>DATABASES</code> setting in <code>{{ project_name }}/settings.py</code>.</li>
<li>Start your first app by running <code>python {{ project_name }}/manage.py startapp [appname]</code>.</li>
</ul>
</div>
<div id="explanation">
<p>
You're seeing this message because you have <code>DEBUG = True</code> in your
Django settings file and you haven't configured any URLs. Get to work!
</p>
</div>
</body></html>
"""
| Python |
"""
Decorators for views based on HTTP headers.
"""
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from calendar import timegm
from datetime import timedelta
from django.utils.decorators import decorator_from_middleware, available_attrs
from django.utils.http import http_date, parse_http_date_safe, parse_etags, quote_etag
from django.utils.log import getLogger
from django.middleware.http import ConditionalGetMiddleware
from django.http import HttpResponseNotAllowed, HttpResponseNotModified, HttpResponse
conditional_page = decorator_from_middleware(ConditionalGetMiddleware)
logger = getLogger('django.request')
def require_http_methods(request_method_list):
"""
Decorator to make a view only accept particular request methods. Usage::
@require_http_methods(["GET", "POST"])
def my_view(request):
# I can assume now that only GET or POST requests make it this far
# ...
Note that request methods should be in uppercase.
"""
def decorator(func):
def inner(request, *args, **kwargs):
if request.method not in request_method_list:
logger.warning('Method Not Allowed (%s): %s' % (request.method, request.path),
extra={
'status_code': 405,
'request': request
}
)
return HttpResponseNotAllowed(request_method_list)
return func(request, *args, **kwargs)
return wraps(func, assigned=available_attrs(func))(inner)
return decorator
require_GET = require_http_methods(["GET"])
require_GET.__doc__ = "Decorator to require that a view only accept the GET method."
require_POST = require_http_methods(["POST"])
require_POST.__doc__ = "Decorator to require that a view only accept the POST method."
def condition(etag_func=None, last_modified_func=None):
"""
Decorator to support conditional retrieval (or change) for a view
function.
The parameters are callables to compute the ETag and last modified time for
the requested resource, respectively. The callables are passed the same
parameters as the view itself. The Etag function should return a string (or
None if the resource doesn't exist), whilst the last_modified function
should return a datetime object (or None if the resource doesn't exist).
If both parameters are provided, all the preconditions must be met before
the view is processed.
This decorator will either pass control to the wrapped view function or
return an HTTP 304 response (unmodified) or 412 response (preconditions
failed), depending upon the request method.
Any behavior marked as "undefined" in the HTTP spec (e.g. If-none-match
plus If-modified-since headers) will result in the view function being
called.
"""
def decorator(func):
def inner(request, *args, **kwargs):
# Get HTTP request headers
if_modified_since = request.META.get("HTTP_IF_MODIFIED_SINCE")
if if_modified_since:
if_modified_since = parse_http_date_safe(if_modified_since)
if_none_match = request.META.get("HTTP_IF_NONE_MATCH")
if_match = request.META.get("HTTP_IF_MATCH")
if if_none_match or if_match:
# There can be more than one ETag in the request, so we
# consider the list of values.
try:
etags = parse_etags(if_none_match or if_match)
except ValueError:
# In case of invalid etag ignore all ETag headers.
# Apparently Opera sends invalidly quoted headers at times
# (we should be returning a 400 response, but that's a
# little extreme) -- this is Django bug #10681.
if_none_match = None
if_match = None
# Compute values (if any) for the requested resource.
if etag_func:
res_etag = etag_func(request, *args, **kwargs)
else:
res_etag = None
if last_modified_func:
dt = last_modified_func(request, *args, **kwargs)
if dt:
res_last_modified = timegm(dt.utctimetuple())
else:
res_last_modified = None
else:
res_last_modified = None
response = None
if not ((if_match and (if_modified_since or if_none_match)) or
(if_match and if_none_match)):
# We only get here if no undefined combinations of headers are
# specified.
if ((if_none_match and (res_etag in etags or
"*" in etags and res_etag)) and
(not if_modified_since or
(res_last_modified and if_modified_since and
res_last_modified <= if_modified_since))):
if request.method in ("GET", "HEAD"):
response = HttpResponseNotModified()
else:
logger.warning('Precondition Failed: %s' % request.path,
extra={
'status_code': 412,
'request': request
}
)
response = HttpResponse(status=412)
elif if_match and ((not res_etag and "*" in etags) or
(res_etag and res_etag not in etags)):
logger.warning('Precondition Failed: %s' % request.path,
extra={
'status_code': 412,
'request': request
}
)
response = HttpResponse(status=412)
elif (not if_none_match and request.method == "GET" and
res_last_modified and if_modified_since and
res_last_modified <= if_modified_since):
response = HttpResponseNotModified()
if response is None:
response = func(request, *args, **kwargs)
# Set relevant headers on the response if they don't already exist.
if res_last_modified and not response.has_header('Last-Modified'):
response['Last-Modified'] = http_date(res_last_modified)
if res_etag and not response.has_header('ETag'):
response['ETag'] = quote_etag(res_etag)
return response
return inner
return decorator
# Shortcut decorators for common cases based on ETag or Last-Modified only
def etag(etag_func):
return condition(etag_func=etag_func)
def last_modified(last_modified_func):
return condition(last_modified_func=last_modified_func)
| Python |
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from django.utils.cache import patch_vary_headers
from django.utils.decorators import available_attrs
def vary_on_headers(*headers):
"""
A view decorator that adds the specified headers to the Vary header of the
response. Usage:
@vary_on_headers('Cookie', 'Accept-language')
def index(request):
...
Note that the header names are not case-sensitive.
"""
def decorator(func):
def inner_func(*args, **kwargs):
response = func(*args, **kwargs)
patch_vary_headers(response, headers)
return response
return wraps(func, assigned=available_attrs(func))(inner_func)
return decorator
def vary_on_cookie(func):
"""
A view decorator that adds "Cookie" to the Vary header of a response. This
indicates that a page's contents depends on cookies. Usage:
@vary_on_cookie
def index(request):
...
"""
def inner_func(*args, **kwargs):
response = func(*args, **kwargs)
patch_vary_headers(response, ('Cookie',))
return response
return wraps(func, assigned=available_attrs(func))(inner_func)
| Python |
from django.utils.decorators import decorator_from_middleware
from django.middleware.gzip import GZipMiddleware
gzip_page = decorator_from_middleware(GZipMiddleware)
gzip_page.__doc__ = "Decorator for views that gzips pages if the client supports it."
| Python |
from django.middleware.csrf import CsrfViewMiddleware
from django.utils.decorators import decorator_from_middleware, available_attrs
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
csrf_protect = decorator_from_middleware(CsrfViewMiddleware)
csrf_protect.__name__ = "csrf_protect"
csrf_protect.__doc__ = """
This decorator adds CSRF protection in exactly the same way as
CsrfViewMiddleware, but it can be used on a per view basis. Using both, or
using the decorator multiple times, is harmless and efficient.
"""
class _EnsureCsrfToken(CsrfViewMiddleware):
# We need this to behave just like the CsrfViewMiddleware, but not reject
# requests.
def _reject(self, request, reason):
return None
requires_csrf_token = decorator_from_middleware(_EnsureCsrfToken)
requires_csrf_token.__name__ = 'requires_csrf_token'
requires_csrf_token.__doc__ = """
Use this decorator on views that need a correct csrf_token available to
RequestContext, but without the CSRF protection that csrf_protect
enforces.
"""
def csrf_response_exempt(view_func):
"""
Modifies a view function so that its response is exempt
from the post-processing of the CSRF middleware.
"""
def wrapped_view(*args, **kwargs):
resp = view_func(*args, **kwargs)
resp.csrf_exempt = True
return resp
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def csrf_view_exempt(view_func):
"""
Marks a view function as being exempt from CSRF view protection.
"""
# We could just do view_func.csrf_exempt = True, but decorators
# are nicer if they don't have side-effects, so we return a new
# function.
def wrapped_view(*args, **kwargs):
return view_func(*args, **kwargs)
wrapped_view.csrf_exempt = True
return wraps(view_func, assigned=available_attrs(view_func))(wrapped_view)
def csrf_exempt(view_func):
"""
Marks a view function as being exempt from the CSRF checks
and post processing.
This is the same as using both the csrf_view_exempt and
csrf_response_exempt decorators.
"""
return csrf_response_exempt(csrf_view_exempt(view_func))
| Python |
try:
from functools import wraps
except ImportError:
from django.utils.functional import wraps # Python 2.4 fallback.
from django.utils.decorators import decorator_from_middleware_with_args, available_attrs
from django.utils.cache import patch_cache_control, add_never_cache_headers
from django.middleware.cache import CacheMiddleware
def cache_page(*args, **kwargs):
"""
Decorator for views that tries getting the page from the cache and
populates the cache if the page isn't in the cache yet.
The cache is keyed by the URL and some data from the headers.
Additionally there is the key prefix that is used to distinguish different
cache areas in a multi-site setup. You could use the
sites.get_current().domain, for example, as that is unique across a Django
project.
Additionally, all headers from the response's Vary header will be taken
into account on caching -- just like the middleware does.
"""
# We need backwards compatibility with code which spells it this way:
# def my_view(): pass
# my_view = cache_page(my_view, 123)
# and this way:
# my_view = cache_page(123)(my_view)
# and this:
# my_view = cache_page(my_view, 123, key_prefix="foo")
# and this:
# my_view = cache_page(123, key_prefix="foo")(my_view)
# and possibly this way (?):
# my_view = cache_page(123, my_view)
# and also this way:
# my_view = cache_page(my_view)
# and also this way:
# my_view = cache_page()(my_view)
# We also add some asserts to give better error messages in case people are
# using other ways to call cache_page that no longer work.
cache_alias = kwargs.pop('cache', None)
key_prefix = kwargs.pop('key_prefix', None)
assert not kwargs, "The only keyword arguments are cache and key_prefix"
if len(args) > 1:
assert len(args) == 2, "cache_page accepts at most 2 arguments"
if callable(args[0]):
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[1], cache_alias=cache_alias, key_prefix=key_prefix)(args[0])
elif callable(args[1]):
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[0], cache_alias=cache_alias, key_prefix=key_prefix)(args[1])
else:
assert False, "cache_page must be passed a view function if called with two arguments"
elif len(args) == 1:
if callable(args[0]):
return decorator_from_middleware_with_args(CacheMiddleware)(cache_alias=cache_alias, key_prefix=key_prefix)(args[0])
else:
return decorator_from_middleware_with_args(CacheMiddleware)(cache_timeout=args[0], cache_alias=cache_alias, key_prefix=key_prefix)
else:
return decorator_from_middleware_with_args(CacheMiddleware)(cache_alias=cache_alias, key_prefix=key_prefix)
def cache_control(**kwargs):
def _cache_controller(viewfunc):
def _cache_controlled(request, *args, **kw):
response = viewfunc(request, *args, **kw)
patch_cache_control(response, **kwargs)
return response
return wraps(viewfunc, assigned=available_attrs(viewfunc))(_cache_controlled)
return _cache_controller
def never_cache(view_func):
"""
Decorator that adds headers to a response so that it will
never be cached.
"""
def _wrapped_view_func(request, *args, **kwargs):
response = view_func(request, *args, **kwargs)
add_never_cache_headers(response)
return response
return wraps(view_func, assigned=available_attrs(view_func))(_wrapped_view_func)
| Python |
"""
Views and functions for serving static files. These are only to be used
during development, and SHOULD NOT be used in a production setting.
"""
import mimetypes
import os
import posixpath
import re
import urllib
from django.http import Http404, HttpResponse, HttpResponseRedirect, HttpResponseNotModified
from django.template import loader, Template, Context, TemplateDoesNotExist
from django.utils.http import http_date, parse_http_date
def serve(request, path, document_root=None, show_indexes=False):
"""
Serve static files below a given point in the directory structure.
To use, put a URL pattern such as::
(r'^(?P<path>.*)$', 'django.views.static.serve', {'document_root' : '/path/to/my/files/'})
in your URLconf. You must provide the ``document_root`` param. You may
also set ``show_indexes`` to ``True`` if you'd like to serve a basic index
of the directory. This index view will use the template hardcoded below,
but if you'd like to override it, you can create a template called
``static/directory_index.html``.
"""
path = posixpath.normpath(urllib.unquote(path))
path = path.lstrip('/')
newpath = ''
for part in path.split('/'):
if not part:
# Strip empty path components.
continue
drive, part = os.path.splitdrive(part)
head, part = os.path.split(part)
if part in (os.curdir, os.pardir):
# Strip '.' and '..' in path.
continue
newpath = os.path.join(newpath, part).replace('\\', '/')
if newpath and path != newpath:
return HttpResponseRedirect(newpath)
fullpath = os.path.join(document_root, newpath)
if os.path.isdir(fullpath):
if show_indexes:
return directory_index(newpath, fullpath)
raise Http404("Directory indexes are not allowed here.")
if not os.path.exists(fullpath):
raise Http404('"%s" does not exist' % fullpath)
# Respect the If-Modified-Since header.
statobj = os.stat(fullpath)
mimetype, encoding = mimetypes.guess_type(fullpath)
mimetype = mimetype or 'application/octet-stream'
if not was_modified_since(request.META.get('HTTP_IF_MODIFIED_SINCE'),
statobj.st_mtime, statobj.st_size):
return HttpResponseNotModified(mimetype=mimetype)
response = HttpResponse(open(fullpath, 'rb').read(), mimetype=mimetype)
response["Last-Modified"] = http_date(statobj.st_mtime)
response["Content-Length"] = statobj.st_size
if encoding:
response["Content-Encoding"] = encoding
return response
DEFAULT_DIRECTORY_INDEX_TEMPLATE = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Transitional//EN" "http://www.w3.org/TR/xhtml1/DTD/xhtml1-transitional.dtd">
<html xmlns="http://www.w3.org/1999/xhtml" xml:lang="en" lang="en">
<head>
<meta http-equiv="Content-type" content="text/html; charset=utf-8" />
<meta http-equiv="Content-Language" content="en-us" />
<meta name="robots" content="NONE,NOARCHIVE" />
<title>Index of {{ directory }}</title>
</head>
<body>
<h1>Index of {{ directory }}</h1>
<ul>
{% ifnotequal directory "/" %}
<li><a href="../">../</a></li>
{% endifnotequal %}
{% for f in file_list %}
<li><a href="{{ f|urlencode }}">{{ f }}</a></li>
{% endfor %}
</ul>
</body>
</html>
"""
def directory_index(path, fullpath):
try:
t = loader.select_template(['static/directory_index.html',
'static/directory_index'])
except TemplateDoesNotExist:
t = Template(DEFAULT_DIRECTORY_INDEX_TEMPLATE, name='Default directory index template')
files = []
for f in os.listdir(fullpath):
if not f.startswith('.'):
if os.path.isdir(os.path.join(fullpath, f)):
f += '/'
files.append(f)
c = Context({
'directory' : path + '/',
'file_list' : files,
})
return HttpResponse(t.render(c))
def was_modified_since(header=None, mtime=0, size=0):
"""
Was something modified since the user last downloaded it?
header
This is the value of the If-Modified-Since header. If this is None,
I'll just return True.
mtime
This is the modification time of the item we're talking about.
size
This is the size of the item we're talking about.
"""
try:
if header is None:
raise ValueError
matches = re.match(r"^([^;]+)(; length=([0-9]+))?$", header,
re.IGNORECASE)
header_mtime = parse_http_date(matches.group(1))
header_len = matches.group(3)
if header_len and int(header_len) != size:
raise ValueError
if mtime > header_mtime:
raise ValueError
except (AttributeError, ValueError, OverflowError):
return True
return False
| Python |
from django.http import HttpResponseForbidden
from django.template import Context, Template
from django.conf import settings
# We include the template inline since we need to be able to reliably display
# this error message, especially for the sake of developers, and there isn't any
# other way of making it available independent of what is in the settings file.
CSRF_FAILRE_TEMPLATE = """
<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01 Transitional//EN" "http://www.w3.org/TR/html4/loose.dtd">
<html lang="en">
<head>
<meta http-equiv="content-type" content="text/html; charset=utf-8">
<meta name="robots" content="NONE,NOARCHIVE">
<title>403 Forbidden</title>
<style type="text/css">
html * { padding:0; margin:0; }
body * { padding:10px 20px; }
body * * { padding:0; }
body { font:small sans-serif; background:#eee; }
body>div { border-bottom:1px solid #ddd; }
h1 { font-weight:normal; margin-bottom:.4em; }
h1 span { font-size:60%; color:#666; font-weight:normal; }
#info { background:#f6f6f6; }
#info ul { margin: 0.5em 4em; }
#info p, #summary p { padding-top:10px; }
#summary { background: #ffc; }
#explanation { background:#eee; border-bottom: 0px none; }
</style>
</head>
<body>
<div id="summary">
<h1>Forbidden <span>(403)</span></h1>
<p>CSRF verification failed. Request aborted.</p>
{% if no_referer %}
<p>You are seeing this message because this HTTPS site requires a 'Referer
header' to be sent by your Web browser, but none was sent. This header is
required for security reasons, to ensure that your browser is not being
hijacked by third parties.</p>
<p>If you have configured your browser to disable 'Referer' headers, please
re-enable them, at least for this site, or for HTTPS connections, or for
'same-origin' requests.</p>
{% endif %}
</div>
{% if DEBUG %}
<div id="info">
<h2>Help</h2>
{% if reason %}
<p>Reason given for failure:</p>
<pre>
{{ reason }}
</pre>
{% endif %}
<p>In general, this can occur when there is a genuine Cross Site Request Forgery, or when
<a
href='http://docs.djangoproject.com/en/dev/ref/contrib/csrf/#ref-contrib-csrf'>Django's
CSRF mechanism</a> has not been used correctly. For POST forms, you need to
ensure:</p>
<ul>
<li>The view function uses <a
href='http://docs.djangoproject.com/en/dev/ref/templates/api/#subclassing-context-requestcontext'><code>RequestContext</code></a>
for the template, instead of <code>Context</code>.</li>
<li>In the template, there is a <code>{% templatetag openblock %} csrf_token
{% templatetag closeblock %}</code> template tag inside each POST form that
targets an internal URL.</li>
<li>If you are not using <code>CsrfViewMiddleware</code>, then you must use
<code>csrf_protect</code> on any views that use the <code>csrf_token</code>
template tag, as well as those that accept the POST data.</li>
</ul>
<p>You're seeing the help section of this page because you have <code>DEBUG =
True</code> in your Django settings file. Change that to <code>False</code>,
and only the initial error message will be displayed. </p>
<p>You can customize this page using the CSRF_FAILURE_VIEW setting.</p>
</div>
{% else %}
<div id="explanation">
<p><small>More information is available with DEBUG=True.</small></p>
</div>
{% endif %}
</body>
</html>
"""
def csrf_failure(request, reason=""):
"""
Default view used when request fails CSRF protection
"""
from django.middleware.csrf import REASON_NO_REFERER
t = Template(CSRF_FAILRE_TEMPLATE)
c = Context({'DEBUG': settings.DEBUG,
'reason': reason,
'no_referer': reason == REASON_NO_REFERER
})
return HttpResponseForbidden(t.render(c), mimetype='text/html')
| Python |
import time
import datetime
from django.db import models
from django.core.exceptions import ImproperlyConfigured
from django.http import Http404
from django.utils.encoding import force_unicode
from django.utils.translation import ugettext as _
from django.views.generic.base import View
from django.views.generic.detail import BaseDetailView, SingleObjectTemplateResponseMixin
from django.views.generic.list import MultipleObjectMixin, MultipleObjectTemplateResponseMixin
class YearMixin(object):
year_format = '%Y'
year = None
def get_year_format(self):
"""
Get a year format string in strptime syntax to be used to parse the
year from url variables.
"""
return self.year_format
def get_year(self):
"Return the year for which this view should display data"
year = self.year
if year is None:
try:
year = self.kwargs['year']
except KeyError:
try:
year = self.request.GET['year']
except KeyError:
raise Http404(_(u"No year specified"))
return year
class MonthMixin(object):
month_format = '%b'
month = None
def get_month_format(self):
"""
Get a month format string in strptime syntax to be used to parse the
month from url variables.
"""
return self.month_format
def get_month(self):
"Return the month for which this view should display data"
month = self.month
if month is None:
try:
month = self.kwargs['month']
except KeyError:
try:
month = self.request.GET['month']
except KeyError:
raise Http404(_(u"No month specified"))
return month
def get_next_month(self, date):
"""
Get the next valid month.
"""
first_day, last_day = _month_bounds(date)
next = (last_day + datetime.timedelta(days=1)).replace(day=1)
return _get_next_prev_month(self, next, is_previous=False, use_first_day=True)
def get_previous_month(self, date):
"""
Get the previous valid month.
"""
first_day, last_day = _month_bounds(date)
prev = (first_day - datetime.timedelta(days=1))
return _get_next_prev_month(self, prev, is_previous=True, use_first_day=True)
class DayMixin(object):
day_format = '%d'
day = None
def get_day_format(self):
"""
Get a day format string in strptime syntax to be used to parse the day
from url variables.
"""
return self.day_format
def get_day(self):
"Return the day for which this view should display data"
day = self.day
if day is None:
try:
day = self.kwargs['day']
except KeyError:
try:
day = self.request.GET['day']
except KeyError:
raise Http404(_(u"No day specified"))
return day
def get_next_day(self, date):
"""
Get the next valid day.
"""
next = date + datetime.timedelta(days=1)
return _get_next_prev_month(self, next, is_previous=False, use_first_day=False)
def get_previous_day(self, date):
"""
Get the previous valid day.
"""
prev = date - datetime.timedelta(days=1)
return _get_next_prev_month(self, prev, is_previous=True, use_first_day=False)
class WeekMixin(object):
week_format = '%U'
week = None
def get_week_format(self):
"""
Get a week format string in strptime syntax to be used to parse the
week from url variables.
"""
return self.week_format
def get_week(self):
"Return the week for which this view should display data"
week = self.week
if week is None:
try:
week = self.kwargs['week']
except KeyError:
try:
week = self.request.GET['week']
except KeyError:
raise Http404(_(u"No week specified"))
return week
class DateMixin(object):
"""
Mixin class for views manipulating date-based data.
"""
date_field = None
allow_future = False
def get_date_field(self):
"""
Get the name of the date field to be used to filter by.
"""
if self.date_field is None:
raise ImproperlyConfigured(u"%s.date_field is required." % self.__class__.__name__)
return self.date_field
def get_allow_future(self):
"""
Returns `True` if the view should be allowed to display objects from
the future.
"""
return self.allow_future
class BaseDateListView(MultipleObjectMixin, DateMixin, View):
"""
Abstract base class for date-based views display a list of objects.
"""
allow_empty = False
def get(self, request, *args, **kwargs):
self.date_list, self.object_list, extra_context = self.get_dated_items()
context = self.get_context_data(object_list=self.object_list,
date_list=self.date_list)
context.update(extra_context)
return self.render_to_response(context)
def get_dated_items(self):
"""
Obtain the list of dates and itesm
"""
raise NotImplementedError('A DateView must provide an implementation of get_dated_items()')
def get_dated_queryset(self, **lookup):
"""
Get a queryset properly filtered according to `allow_future` and any
extra lookup kwargs.
"""
qs = self.get_queryset().filter(**lookup)
date_field = self.get_date_field()
allow_future = self.get_allow_future()
allow_empty = self.get_allow_empty()
if not allow_future:
qs = qs.filter(**{'%s__lte' % date_field: datetime.datetime.now()})
if not allow_empty and not qs:
raise Http404(_(u"No %(verbose_name_plural)s available") % {
'verbose_name_plural': force_unicode(qs.model._meta.verbose_name_plural)
})
return qs
def get_date_list(self, queryset, date_type):
"""
Get a date list by calling `queryset.dates()`, checking along the way
for empty lists that aren't allowed.
"""
date_field = self.get_date_field()
allow_empty = self.get_allow_empty()
date_list = queryset.dates(date_field, date_type)[::-1]
if date_list is not None and not date_list and not allow_empty:
name = force_unicode(queryset.model._meta.verbose_name_plural)
raise Http404(_(u"No %(verbose_name_plural)s available") %
{'verbose_name_plural': name})
return date_list
def get_context_data(self, **kwargs):
"""
Get the context. Must return a Context (or subclass) instance.
"""
items = kwargs.pop('object_list')
context = super(BaseDateListView, self).get_context_data(object_list=items)
context.update(kwargs)
return context
class BaseArchiveIndexView(BaseDateListView):
"""
Base class for archives of date-based items.
Requires a response mixin.
"""
context_object_name = 'latest'
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
qs = self.get_dated_queryset()
date_list = self.get_date_list(qs, 'year')
if date_list:
object_list = qs.order_by('-' + self.get_date_field())
else:
object_list = qs.none()
return (date_list, object_list, {})
class ArchiveIndexView(MultipleObjectTemplateResponseMixin, BaseArchiveIndexView):
"""
Top-level archive of date-based items.
"""
template_name_suffix = '_archive'
class BaseYearArchiveView(YearMixin, BaseDateListView):
"""
List of objects published in a given year.
"""
make_object_list = False
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
# Yes, no error checking: the URLpattern ought to validate this; it's
# an error if it doesn't.
year = self.get_year()
date_field = self.get_date_field()
qs = self.get_dated_queryset(**{date_field+'__year': year})
date_list = self.get_date_list(qs, 'month')
if self.get_make_object_list():
object_list = qs.order_by('-'+date_field)
else:
# We need this to be a queryset since parent classes introspect it
# to find information about the model.
object_list = qs.none()
return (date_list, object_list, {'year': year})
def get_make_object_list(self):
"""
Return `True` if this view should contain the full list of objects in
the given year.
"""
return self.make_object_list
class YearArchiveView(MultipleObjectTemplateResponseMixin, BaseYearArchiveView):
"""
List of objects published in a given year.
"""
template_name_suffix = '_archive_year'
class BaseMonthArchiveView(YearMixin, MonthMixin, BaseDateListView):
"""
List of objects published in a given year.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
month = self.get_month()
date_field = self.get_date_field()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format())
# Construct a date-range lookup.
first_day, last_day = _month_bounds(date)
lookup_kwargs = {
'%s__gte' % date_field: first_day,
'%s__lt' % date_field: last_day,
}
qs = self.get_dated_queryset(**lookup_kwargs)
date_list = self.get_date_list(qs, 'day')
return (date_list, qs, {
'month': date,
'next_month': self.get_next_month(date),
'previous_month': self.get_previous_month(date),
})
class MonthArchiveView(MultipleObjectTemplateResponseMixin, BaseMonthArchiveView):
"""
List of objects published in a given year.
"""
template_name_suffix = '_archive_month'
class BaseWeekArchiveView(YearMixin, WeekMixin, BaseDateListView):
"""
List of objects published in a given week.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
week = self.get_week()
date_field = self.get_date_field()
week_format = self.get_week_format()
week_start = {
'%W': '1',
'%U': '0',
}[week_format]
date = _date_from_string(year, self.get_year_format(),
week_start, '%w',
week, week_format)
# Construct a date-range lookup.
first_day = date
last_day = date + datetime.timedelta(days=7)
lookup_kwargs = {
'%s__gte' % date_field: first_day,
'%s__lt' % date_field: last_day,
}
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {'week': date})
class WeekArchiveView(MultipleObjectTemplateResponseMixin, BaseWeekArchiveView):
"""
List of objects published in a given week.
"""
template_name_suffix = '_archive_week'
class BaseDayArchiveView(YearMixin, MonthMixin, DayMixin, BaseDateListView):
"""
List of objects published on a given day.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
return self._get_dated_items(date)
def _get_dated_items(self, date):
"""
Do the actual heavy lifting of getting the dated items; this accepts a
date object so that TodayArchiveView can be trivial.
"""
date_field = self.get_date_field()
field = self.get_queryset().model._meta.get_field(date_field)
lookup_kwargs = _date_lookup_for_field(field, date)
qs = self.get_dated_queryset(**lookup_kwargs)
return (None, qs, {
'day': date,
'previous_day': self.get_previous_day(date),
'next_day': self.get_next_day(date),
'previous_month': self.get_previous_month(date),
'next_month': self.get_next_month(date)
})
class DayArchiveView(MultipleObjectTemplateResponseMixin, BaseDayArchiveView):
"""
List of objects published on a given day.
"""
template_name_suffix = "_archive_day"
class BaseTodayArchiveView(BaseDayArchiveView):
"""
List of objects published today.
"""
def get_dated_items(self):
"""
Return (date_list, items, extra_context) for this request.
"""
return self._get_dated_items(datetime.date.today())
class TodayArchiveView(MultipleObjectTemplateResponseMixin, BaseTodayArchiveView):
"""
List of objects published today.
"""
template_name_suffix = "_archive_day"
class BaseDateDetailView(YearMixin, MonthMixin, DayMixin, DateMixin, BaseDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
def get_object(self, queryset=None):
"""
Get the object this request displays.
"""
year = self.get_year()
month = self.get_month()
day = self.get_day()
date = _date_from_string(year, self.get_year_format(),
month, self.get_month_format(),
day, self.get_day_format())
qs = self.get_queryset()
if not self.get_allow_future() and date > datetime.date.today():
raise Http404(_(u"Future %(verbose_name_plural)s not available because %(class_name)s.allow_future is False.") % {
'verbose_name_plural': qs.model._meta.verbose_name_plural,
'class_name': self.__class__.__name__,
})
# Filter down a queryset from self.queryset using the date from the
# URL. This'll get passed as the queryset to DetailView.get_object,
# which'll handle the 404
date_field = self.get_date_field()
field = qs.model._meta.get_field(date_field)
lookup = _date_lookup_for_field(field, date)
qs = qs.filter(**lookup)
return super(BaseDetailView, self).get_object(queryset=qs)
class DateDetailView(SingleObjectTemplateResponseMixin, BaseDateDetailView):
"""
Detail view of a single object on a single date; this differs from the
standard DetailView by accepting a year/month/day in the URL.
"""
template_name_suffix = '_detail'
def _date_from_string(year, year_format, month, month_format, day='', day_format='', delim='__'):
"""
Helper: get a datetime.date object given a format string and a year,
month, and possibly day; raise a 404 for an invalid date.
"""
format = delim.join((year_format, month_format, day_format))
datestr = delim.join((year, month, day))
try:
return datetime.date(*time.strptime(datestr, format)[:3])
except ValueError:
raise Http404(_(u"Invalid date string '%(datestr)s' given format '%(format)s'") % {
'datestr': datestr,
'format': format,
})
def _month_bounds(date):
"""
Helper: return the first and last days of the month for the given date.
"""
first_day = date.replace(day=1)
if first_day.month == 12:
last_day = first_day.replace(year=first_day.year + 1, month=1)
else:
last_day = first_day.replace(month=first_day.month + 1)
return first_day, last_day
def _get_next_prev_month(generic_view, naive_result, is_previous, use_first_day):
"""
Helper: Get the next or the previous valid date. The idea is to allow
links on month/day views to never be 404s by never providing a date
that'll be invalid for the given view.
This is a bit complicated since it handles both next and previous months
and days (for MonthArchiveView and DayArchiveView); hence the coupling to generic_view.
However in essence the logic comes down to:
* If allow_empty and allow_future are both true, this is easy: just
return the naive result (just the next/previous day or month,
reguardless of object existence.)
* If allow_empty is true, allow_future is false, and the naive month
isn't in the future, then return it; otherwise return None.
* If allow_empty is false and allow_future is true, return the next
date *that contains a valid object*, even if it's in the future. If
there are no next objects, return None.
* If allow_empty is false and allow_future is false, return the next
date that contains a valid object. If that date is in the future, or
if there are no next objects, return None.
"""
date_field = generic_view.get_date_field()
allow_empty = generic_view.get_allow_empty()
allow_future = generic_view.get_allow_future()
# If allow_empty is True the naive value will be valid
if allow_empty:
result = naive_result
# Otherwise, we'll need to go to the database to look for an object
# whose date_field is at least (greater than/less than) the given
# naive result
else:
# Construct a lookup and an ordering depending on whether we're doing
# a previous date or a next date lookup.
if is_previous:
lookup = {'%s__lte' % date_field: naive_result}
ordering = '-%s' % date_field
else:
lookup = {'%s__gte' % date_field: naive_result}
ordering = date_field
qs = generic_view.get_queryset().filter(**lookup).order_by(ordering)
# Snag the first object from the queryset; if it doesn't exist that
# means there's no next/previous link available.
try:
result = getattr(qs[0], date_field)
except IndexError:
result = None
# Convert datetimes to a dates
if hasattr(result, 'date'):
result = result.date()
# For month views, we always want to have a date that's the first of the
# month for consistency's sake.
if result and use_first_day:
result = result.replace(day=1)
# Check against future dates.
if result and (allow_future or result < datetime.date.today()):
return result
else:
return None
def _date_lookup_for_field(field, date):
"""
Get the lookup kwargs for looking up a date against a given Field. If the
date field is a DateTimeField, we can't just do filter(df=date) because
that doesn't take the time into account. So we need to make a range lookup
in those cases.
"""
if isinstance(field, models.DateTimeField):
date_range = (
datetime.datetime.combine(date, datetime.time.min),
datetime.datetime.combine(date, datetime.time.max)
)
return {'%s__range' % field.name: date_range}
else:
return {field.name: date}
| Python |
from django.template import loader, RequestContext
from django.http import HttpResponse, HttpResponseRedirect, HttpResponsePermanentRedirect, HttpResponseGone
from django.utils.log import getLogger
import warnings
warnings.warn(
'Function-based generic views have been deprecated; use class-based views instead.',
PendingDeprecationWarning
)
logger = getLogger('django.request')
def direct_to_template(request, template, extra_context=None, mimetype=None, **kwargs):
"""
Render a given template with any extra URL parameters in the context as
``{{ params }}``.
"""
if extra_context is None: extra_context = {}
dictionary = {'params': kwargs}
for key, value in extra_context.items():
if callable(value):
dictionary[key] = value()
else:
dictionary[key] = value
c = RequestContext(request, dictionary)
t = loader.get_template(template)
return HttpResponse(t.render(c), mimetype=mimetype)
def redirect_to(request, url, permanent=True, query_string=False, **kwargs):
"""
Redirect to a given URL.
The given url may contain dict-style string formatting, which will be
interpolated against the params in the URL. For example, to redirect from
``/foo/<id>/`` to ``/bar/<id>/``, you could use the following URLconf::
urlpatterns = patterns('',
('^foo/(?P<id>\d+)/$', 'django.views.generic.simple.redirect_to', {'url' : '/bar/%(id)s/'}),
)
If the given url is ``None``, a HttpResponseGone (410) will be issued.
If the ``permanent`` argument is False, then the response will have a 302
HTTP status code. Otherwise, the status code will be 301.
If the ``query_string`` argument is True, then the GET query string
from the request is appended to the URL.
"""
args = request.META["QUERY_STRING"]
if args and query_string and url is not None:
url = "%s?%s" % (url, args)
if url is not None:
klass = permanent and HttpResponsePermanentRedirect or HttpResponseRedirect
return klass(url % kwargs)
else:
logger.warning('Gone: %s' % request.path,
extra={
'status_code': 410,
'request': request
})
return HttpResponseGone()
| Python |
from django.forms.models import ModelFormMetaclass, ModelForm
from django.template import RequestContext, loader
from django.http import Http404, HttpResponse, HttpResponseRedirect
from django.core.xheaders import populate_xheaders
from django.core.exceptions import ObjectDoesNotExist, ImproperlyConfigured
from django.utils.translation import ugettext
from django.contrib.auth.views import redirect_to_login
from django.views.generic import GenericViewError
from django.contrib import messages
import warnings
warnings.warn(
'Function-based generic views have been deprecated; use class-based views instead.',
PendingDeprecationWarning
)
def apply_extra_context(extra_context, context):
"""
Adds items from extra_context dict to context. If a value in extra_context
is callable, then it is called and the result is added to context.
"""
for key, value in extra_context.iteritems():
if callable(value):
context[key] = value()
else:
context[key] = value
def get_model_and_form_class(model, form_class):
"""
Returns a model and form class based on the model and form_class
parameters that were passed to the generic view.
If ``form_class`` is given then its associated model will be returned along
with ``form_class`` itself. Otherwise, if ``model`` is given, ``model``
itself will be returned along with a ``ModelForm`` class created from
``model``.
"""
if form_class:
return form_class._meta.model, form_class
if model:
# The inner Meta class fails if model = model is used for some reason.
tmp_model = model
# TODO: we should be able to construct a ModelForm without creating
# and passing in a temporary inner class.
class Meta:
model = tmp_model
class_name = model.__name__ + 'Form'
form_class = ModelFormMetaclass(class_name, (ModelForm,), {'Meta': Meta})
return model, form_class
raise GenericViewError("Generic view must be called with either a model or"
" form_class argument.")
def redirect(post_save_redirect, obj):
"""
Returns a HttpResponseRedirect to ``post_save_redirect``.
``post_save_redirect`` should be a string, and can contain named string-
substitution place holders of ``obj`` field names.
If ``post_save_redirect`` is None, then redirect to ``obj``'s URL returned
by ``get_absolute_url()``. If ``obj`` has no ``get_absolute_url`` method,
then raise ImproperlyConfigured.
This function is meant to handle the post_save_redirect parameter to the
``create_object`` and ``update_object`` views.
"""
if post_save_redirect:
return HttpResponseRedirect(post_save_redirect % obj.__dict__)
elif hasattr(obj, 'get_absolute_url'):
return HttpResponseRedirect(obj.get_absolute_url())
else:
raise ImproperlyConfigured(
"No URL to redirect to. Either pass a post_save_redirect"
" parameter to the generic view or define a get_absolute_url"
" method on the Model.")
def lookup_object(model, object_id, slug, slug_field):
"""
Return the ``model`` object with the passed ``object_id``. If
``object_id`` is None, then return the object whose ``slug_field``
equals the passed ``slug``. If ``slug`` and ``slug_field`` are not passed,
then raise Http404 exception.
"""
lookup_kwargs = {}
if object_id:
lookup_kwargs['%s__exact' % model._meta.pk.name] = object_id
elif slug and slug_field:
lookup_kwargs['%s__exact' % slug_field] = slug
else:
raise GenericViewError(
"Generic view must be called with either an object_id or a"
" slug/slug_field.")
try:
return model.objects.get(**lookup_kwargs)
except ObjectDoesNotExist:
raise Http404("No %s found for %s"
% (model._meta.verbose_name, lookup_kwargs))
def create_object(request, model=None, template_name=None,
template_loader=loader, extra_context=None, post_save_redirect=None,
login_required=False, context_processors=None, form_class=None):
"""
Generic object-creation function.
Templates: ``<app_label>/<model_name>_form.html``
Context:
form
the form for the object
"""
if extra_context is None: extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
model, form_class = get_model_and_form_class(model, form_class)
if request.method == 'POST':
form = form_class(request.POST, request.FILES)
if form.is_valid():
new_object = form.save()
msg = ugettext("The %(verbose_name)s was created successfully.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return redirect(post_save_redirect, new_object)
else:
form = form_class()
# Create the template, context, response
if not template_name:
template_name = "%s/%s_form.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'form': form,
}, context_processors)
apply_extra_context(extra_context, c)
return HttpResponse(t.render(c))
def update_object(request, model=None, object_id=None, slug=None,
slug_field='slug', template_name=None, template_loader=loader,
extra_context=None, post_save_redirect=None, login_required=False,
context_processors=None, template_object_name='object',
form_class=None):
"""
Generic object-update function.
Templates: ``<app_label>/<model_name>_form.html``
Context:
form
the form for the object
object
the original object being edited
"""
if extra_context is None: extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
model, form_class = get_model_and_form_class(model, form_class)
obj = lookup_object(model, object_id, slug, slug_field)
if request.method == 'POST':
form = form_class(request.POST, request.FILES, instance=obj)
if form.is_valid():
obj = form.save()
msg = ugettext("The %(verbose_name)s was updated successfully.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return redirect(post_save_redirect, obj)
else:
form = form_class(instance=obj)
if not template_name:
template_name = "%s/%s_form.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
'form': form,
template_object_name: obj,
}, context_processors)
apply_extra_context(extra_context, c)
response = HttpResponse(t.render(c))
populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.attname))
return response
def delete_object(request, model, post_delete_redirect, object_id=None,
slug=None, slug_field='slug', template_name=None,
template_loader=loader, extra_context=None, login_required=False,
context_processors=None, template_object_name='object'):
"""
Generic object-delete function.
The given template will be used to confirm deletetion if this view is
fetched using GET; for safty, deletion will only be performed if this
view is POSTed.
Templates: ``<app_label>/<model_name>_confirm_delete.html``
Context:
object
the original object being deleted
"""
if extra_context is None: extra_context = {}
if login_required and not request.user.is_authenticated():
return redirect_to_login(request.path)
obj = lookup_object(model, object_id, slug, slug_field)
if request.method == 'POST':
obj.delete()
msg = ugettext("The %(verbose_name)s was deleted.") %\
{"verbose_name": model._meta.verbose_name}
messages.success(request, msg, fail_silently=True)
return HttpResponseRedirect(post_delete_redirect)
else:
if not template_name:
template_name = "%s/%s_confirm_delete.html" % (model._meta.app_label, model._meta.object_name.lower())
t = template_loader.get_template(template_name)
c = RequestContext(request, {
template_object_name: obj,
}, context_processors)
apply_extra_context(extra_context, c)
response = HttpResponse(t.render(c))
populate_xheaders(request, response, model, getattr(obj, obj._meta.pk.attname))
return response
| Python |
import re
from django.core.paginator import Paginator, InvalidPage
from django.core.exceptions import ImproperlyConfigured
from django.http import Http404
from django.utils.encoding import smart_str
from django.utils.translation import ugettext as _
from django.views.generic.base import TemplateResponseMixin, View
class MultipleObjectMixin(object):
allow_empty = True
queryset = None
model = None
paginate_by = None
context_object_name = None
paginator_class = Paginator
def get_queryset(self):
"""
Get the list of items for this view. This must be an interable, and may
be a queryset (in which qs-specific behavior will be enabled).
"""
if self.queryset is not None:
queryset = self.queryset
if hasattr(queryset, '_clone'):
queryset = queryset._clone()
elif self.model is not None:
queryset = self.model._default_manager.all()
else:
raise ImproperlyConfigured(u"'%s' must define 'queryset' or 'model'"
% self.__class__.__name__)
return queryset
def paginate_queryset(self, queryset, page_size):
"""
Paginate the queryset, if needed.
"""
paginator = self.get_paginator(queryset, page_size, allow_empty_first_page=self.get_allow_empty())
page = self.kwargs.get('page') or self.request.GET.get('page') or 1
try:
page_number = int(page)
except ValueError:
if page == 'last':
page_number = paginator.num_pages
else:
raise Http404(_(u"Page is not 'last', nor can it be converted to an int."))
try:
page = paginator.page(page_number)
return (paginator, page, page.object_list, page.has_other_pages())
except InvalidPage:
raise Http404(_(u'Invalid page (%(page_number)s)') % {
'page_number': page_number
})
def get_paginate_by(self, queryset):
"""
Get the number of items to paginate by, or ``None`` for no pagination.
"""
return self.paginate_by
def get_paginator(self, queryset, per_page, orphans=0, allow_empty_first_page=True):
"""
Return an instance of the paginator for this view.
"""
return self.paginator_class(queryset, per_page, orphans=orphans, allow_empty_first_page=allow_empty_first_page)
def get_allow_empty(self):
"""
Returns ``True`` if the view should display empty lists, and ``False``
if a 404 should be raised instead.
"""
return self.allow_empty
def get_context_object_name(self, object_list):
"""
Get the name of the item to be used in the context.
"""
if self.context_object_name:
return self.context_object_name
elif hasattr(object_list, 'model'):
return smart_str('%s_list' % object_list.model._meta.object_name.lower())
else:
return None
def get_context_data(self, **kwargs):
"""
Get the context for this view.
"""
queryset = kwargs.pop('object_list')
page_size = self.get_paginate_by(queryset)
context_object_name = self.get_context_object_name(queryset)
if page_size:
paginator, page, queryset, is_paginated = self.paginate_queryset(queryset, page_size)
context = {
'paginator': paginator,
'page_obj': page,
'is_paginated': is_paginated,
'object_list': queryset
}
else:
context = {
'paginator': None,
'page_obj': None,
'is_paginated': False,
'object_list': queryset
}
context.update(kwargs)
if context_object_name is not None:
context[context_object_name] = queryset
return context
class BaseListView(MultipleObjectMixin, View):
def get(self, request, *args, **kwargs):
self.object_list = self.get_queryset()
allow_empty = self.get_allow_empty()
if not allow_empty and len(self.object_list) == 0:
raise Http404(_(u"Empty list and '%(class_name)s.allow_empty' is False.")
% {'class_name': self.__class__.__name__})
context = self.get_context_data(object_list=self.object_list)
return self.render_to_response(context)
class MultipleObjectTemplateResponseMixin(TemplateResponseMixin):
template_name_suffix = '_list'
def get_template_names(self):
"""
Return a list of template names to be used for the request. Must return
a list. May not be called if get_template is overridden.
"""
try:
names = super(MultipleObjectTemplateResponseMixin, self).get_template_names()
except ImproperlyConfigured:
# If template_name isn't specified, it's not a problem --
# we just start with an empty list.
names = []
# If the list is a queryset, we'll invent a template name based on the
# app and model name. This name gets put at the end of the template
# name list so that user-supplied names override the automatically-
# generated ones.
if hasattr(self.object_list, 'model'):
opts = self.object_list.model._meta
names.append("%s/%s%s.html" % (opts.app_label, opts.object_name.lower(), self.template_name_suffix))
return names
class ListView(MultipleObjectTemplateResponseMixin, BaseListView):
"""
Render some list of objects, set by `self.model` or `self.queryset`.
`self.queryset` can actually be any iterable of items, not just a queryset.
"""
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.