code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
#!/usr/bin/python
import time, os, stat, random, sys, logging, socket, shutil, tempfile
from binascii import crc32
from StringIO import StringIO
from twisted.python import failure
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
from flud.FludNode import FludNode
from flud.protocol.FludClient import FludClient
from flud.FludCrypto import generateRandom, hashfile
from flud.protocol.FludCommUtil import *
from flud.fencode import fencode
"""
Test code for primitive operations. These ops include all of the descendents
of ROOT and REQUEST in FludProtocol.
"""
smallfilekey = ""
smallfilename = ""
smallfilenamebad = ""
largefilekey = ""
largefilename = ""
largefilenamebad = ""
metadata = 'aaaa'
def testerror(failure, message, node):
"""
error handler for test errbacks
"""
print "testerror message: %s" % message
print "testerror: %s" % str(failure)
print "At least 1 test FAILED"
raise failure
def testUnexpectedSuccess(res, message, node):
print "unexpected success message: %s" % message
print "At least 1 test succeeded when it should have failed"
raise "bad"
def testDELETEBadKeyFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.NotFoundException'):
print "%s" % msg
# the end
else:
print "\nDELETEBadKey expected NotFoundException," \
" but got a different failure:"
raise failure
def testDELETEBadKey(nKu, node, host, port):
print "starting testDELETEBadKey"
path = os.path.join("somedir", largefilekey)
deferred = node.client.sendDelete(path, crc32(path), host, port, nKu)
deferred.addCallback(testUnexpectedSuccess, "DELETE with bad key succeeded",
node)
deferred.addErrback(testDELETEBadKeyFailed,
"DELETE with bad key failed as expected", node, nKu, host, port)
return deferred
def testVERIFYBadKeyFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.NotFoundException'):
print "%s" % msg
return testDELETEBadKey(nKu, node, host, port)
else:
print "\nVERIFYBadKey expected NotFoundException," \
" but got a different failure:"
raise failure
def testVERIFYBadKey(nKu, node, host, port):
print "starting testVERIFYBadKey"
fsize = os.stat(smallfilename)[stat.ST_SIZE]
offset = fsize-20
deferred = node.client.sendVerify(smallfilenamebad, offset, 5, host,
port, nKu)
deferred.addCallback(testUnexpectedSuccess,
"verified file with bad key succeeded", node)
deferred.addErrback(testVERIFYBadKeyFailed,
"VERIFY of bad filekey failed as expected", node, nKu, host, port)
return deferred
def testVERIFYBadLengthFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.BadRequestException'):
print "%s" % msg
return testVERIFYBadKey(nKu, node, host, port)
else:
print "\nVERIFYBadLength expected BadRequestException," \
" but got a different failure:"
raise failure
def testVERIFYBadLength(nKu, node, host, port):
print "starting testVERIFYBadOffset"
fsize = os.stat(smallfilename)[stat.ST_SIZE]
offset = fsize-10
deferred = node.client.sendVerify(smallfilekey, offset, 20, host, port, nKu)
deferred.addCallback(testUnexpectedSuccess, "verified file with bad length",
node)
deferred.addErrback(testVERIFYBadLengthFailed,
"VERIFY of bad length failed as expected", node, nKu, host, port)
return deferred
def testVERIFYBadOffsetFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.BadRequestException'):
print "%s" % msg
return testVERIFYBadLength(nKu, node, host, port)
else:
print "\nVERIFYBadOffset expected BadRequestException," \
" but got a different failure:"
raise failure
def testVERIFYBadOffset(nKu, node, host, port):
print "starting testVERIFYBadOffset"
fsize = os.stat(smallfilename)[stat.ST_SIZE]
offset = fsize+2
deferred = node.client.sendVerify(smallfilekey, offset, 20, host, port, nKu)
deferred.addCallback(testUnexpectedSuccess, "verified file with bad offset",
node)
deferred.addErrback(testVERIFYBadOffsetFailed,
"VERIFY of bad offset failed as expected", node, nKu, host, port)
return deferred
def testVERIFYNotFoundFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.NotFoundException'):
print "%s" % msg
return testVERIFYBadOffset(nKu, node, host, port)
else:
print "\nVERIFYNotFound expected NotFoundException," \
" but got a different failure:"
raise failure
def testVERIFYNotFound(nKu, node, host, port):
print "starting testVERIFYNotFound"
deferred = node.client.sendVerify(largefilekey, 10, 10, host, port, nKu)
deferred.addCallback(testUnexpectedSuccess, "verified non-existent file",
node)
deferred.addErrback(testVERIFYNotFoundFailed,
"VERIFY of non-existent file failed as expected", node, nKu,
host, port)
return deferred
def testRETRIEVEIllegalPathFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.NotFoundException'):
print "%s" % msg
return testVERIFYNotFound(nKu, node, host, port)
else:
print "\nRETRIEVEIllegalPath expected NotFoundException," \
" but got a different failure:"
raise failure
def testRETRIEVEIllegalPath(nKu, node, host, port):
print "starting testRETRIEVEIllegalPath"
deferred = node.client.sendRetrieve(os.path.join("somedir",smallfilekey),
host, port, nKu)
deferred.addCallback(testUnexpectedSuccess,
"retrieved file with illegal path", node)
deferred.addErrback(testRETRIEVEIllegalPathFailed,
"RETRIEVE using illegal path failed as expected", node, nKu,
host, port)
return deferred
def testRETRIEVENotFoundFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.NotFoundException'):
print "%s" % msg
return testRETRIEVEIllegalPath(nKu, node, host, port)
else:
print "\nRETRIEVENotFound expected NotFoundException," \
" but got a different failure:"
raise failure
def testRETRIEVENotFound(nKu, node, host, port):
print "starting testRETRIEVENotFound"
deferred = node.client.sendRetrieve(largefilekey, host, port, nKu)
deferred.addCallback(testUnexpectedSuccess,
"retrieved file that shouldn't exist", node)
deferred.addErrback(testRETRIEVENotFoundFailed,
"RETRIEVE of non-existent file failed as expected", node, nKu,
host, port)
return deferred
def testSTORELargeFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.BadCASKeyException'):
print "%s" % msg
return testRETRIEVENotFound(nKu, node, host, port)
else:
print "\nSTORELarge expected BadCASKeyException," \
" but got a different failure:"
raise failure
def testSTOREBadKeyLarge(nKu, node, host, port):
print "starting testSTOREBadKeyLarge"
deferred = node.client.sendStore(largefilenamebad,
(crc32(largefilenamebad), StringIO(metadata)), host, port, nKu)
deferred.addCallback(testUnexpectedSuccess, "large file, bad key succeeded",
node)
deferred.addErrback(testSTORELargeFailed,
"large STORE with bad key failed as expected", node, nKu,
host, port)
return deferred
def testSTORESmallFailed(failure, msg, node, nKu, host, port):
if failure.check('flud.protocol.FludCommUtil.BadCASKeyException'):
print "%s" % msg
return testSTOREBadKeyLarge(nKu, node, host, port)
else:
print "\nSTORESmall expected BadCASKeyException," \
" but got a different failure:"
raise failure
def testSTOREBadKeySmall(nKu, node, host, port):
print "starting testSTOREBadKeySmall"
deferred = node.client.sendStore(smallfilenamebad,
(crc32(smallfilenamebad), StringIO(metadata)), host, port, nKu)
deferred.addCallback(testUnexpectedSuccess, "small file, bad key succeeded",
node)
deferred.addErrback(testSTORESmallFailed,
"small STORE with bad key failed as expected", node, nKu,
host, port)
return deferred
def testSTORESuccess(res, nKu, node, host, port):
print "testSTORE succeeded: %s" % res
return testSTOREBadKeySmall(nKu, node, host, port)
def testSTORE(nKu, node, host, port):
# store a file successfully for later failure tests (VERIFY, etc)
print "starting testSTORE"
deferred = node.client.sendStore(smallfilename,
(crc32(smallfilename), StringIO(metadata)), host, port, nKu)
deferred.addCallback(testSTORESuccess, nKu, node, host, port)
deferred.addErrback(testerror, "failed at testSTORE", node)
return deferred
# XXX: need to test bogus headers for all commands (BAD_REQUEST)
# XXX: need to test failures for authentication
def testID(node, host, port):
""" Tests sendGetID(), and invokes testSTORE on success """
print "starting testID"
deferred = node.client.sendGetID(host, port)
deferred.addCallback(testSTORE, node, host, port)
#deferred.addCallback(testSTOREBadKeySmall, node, host, port)
deferred.addErrback(testerror, "failed at testID", node)
return deferred
def cleanup(err, node):
if err:
print "cleaning up: %s" % err
else:
print "cleaning up"
os.remove(smallfilename)
os.remove(smallfilenamebad)
os.remove(largefilename)
os.remove(largefilenamebad)
reactor.callLater(1, node.stop)
def generateTestData():
def generateFiles(minsize):
fname = tempfile.mktemp()
f = open(fname, 'w')
f.write('\0'*minsize)
f.write(generateRandom(random.randrange(256)+1))
f.close()
filekey = hashfile(fname)
filekey = fencode(int(filekey, 16))
filename = os.path.join("/tmp",filekey)
os.rename(fname,filename)
filenamebad = os.path.join("/tmp/","bad"+filekey[3:])
shutil.copy(filename, filenamebad)
return (filekey, filename, filenamebad)
global smallfilekey
global smallfilename
global smallfilenamebad
(smallfilekey, smallfilename, smallfilenamebad) = generateFiles(1024)
global largefilekey
global largefilename
global largefilenamebad
(largefilekey, largefilename, largefilenamebad) = generateFiles(512000)
def runTests(host, port=None, listenport=None):
generateTestData()
node = FludNode(port=listenport)
if port == None:
port = node.config.port
node.run()
d = testID(node, host, port)
d.addBoth(cleanup, node)
node.join()
"""
Main currently invokes test code
"""
if __name__ == '__main__':
localhost = socket.getfqdn()
if len(sys.argv) == 1:
runTests(localhost) # test by talking to self
elif len(sys.argv) == 2:
runTests(localhost, eval(sys.argv[1])) # talk to self on port [1]
elif len(sys.argv) == 3:
runTests(sys.argv[1], eval(sys.argv[2])) # talk to [1] on port [2]
elif len(sys.argv) == 4:
# talk to [1] on port [2], listen on port [3]
runTests(sys.argv[1], eval(sys.argv[2]), eval(sys.argv[3]))
| Python |
#!/usr/bin/python
"""
FludFileOpTest.py, (c) 2003-2006 Alen Peacock. This program is distributed
under the terms of the GNU General Public License (the GPL), version 3.
System tests for FludFileOperations
"""
import sys, os, time, logging, tempfile, shutil
from twisted.internet import reactor
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
from flud.FludConfig import FludConfig
from flud.FludNode import FludNode
from flud.fencode import fencode, fdecode
from flud.FludCrypto import generateRandom
from flud.FludFileOperations import *
import flud.FludDefer as FludDefer
from flud.protocol.LocalClient import listMeta
logger = logging.getLogger('flud')
def testError(failure, message, node):
print "testError message: %s" % message
print "testError: %s" % str(failure)
print "At least 1 test FAILED"
return failure
def verifySuccess(r, desc):
print "%s succeeded" % desc
def checkRetrieveFile(res, node, fname):
print "retrieve of %s succeeded" % fname
return res # <- *VITAL* for concurrent dup ops to succeed.
def testRetrieveFile(node, fname):
d = RetrieveFilename(node, fname).deferred
d.addCallback(checkRetrieveFile, node, fname)
d.addErrback(testError, fname, node)
return d
def retrieveSequential(r, node, filenamelist, desc):
def loop(r, node, filenamelist, desc):
if filenamelist:
fname = filenamelist.pop()
print "testing retrieve (%s) %s" % (desc, fname)
d = testRetrieveFile(node, fname)
d.addCallback(loop, node, filenamelist, desc)
d.addErrback(testError)
return d
else:
print "retrieve sequential (%s) done" % desc
print "test retrieveSequential %s" % desc
return loop(None, node, filenamelist, desc)
def storeSuccess(r, desc):
print "%s succeeded" % desc
def storeConcurrent(r, node, files, desc):
#print "r was %s" % r
print "test storeConcurrent %s" % desc
dlist = []
for file in files:
d = testStoreFile(node, file)
dlist.append(d)
dl = FludDefer.ErrDeferredList(dlist)
dl.addCallback(storeSuccess, desc)
dl.addErrback(testError)
return dl
def checkStoreFile(res, node, fname):
master = listMeta(node.config)
if fname not in master:
return defer.fail(failure.DefaultException("file not stored"))
else:
print "store of %s appeared successful" % fname
return res # <- *VITAL* for concurrent dup ops to succeed.
def testStoreFile(node, fname):
d = StoreFile(node, fname).deferred
d.addCallback(checkStoreFile, node, fname)
d.addErrback(testError, fname, node)
return d
def doTests(node, smallfnames, largefnames, dupsmall, duplarge):
d = testStoreFile(node, smallfnames[0])
d.addCallback(storeConcurrent, node, smallfnames, "small")
d.addCallback(storeConcurrent, node, largefnames, "large")
d.addCallback(storeConcurrent, node, dupsmall, "small duplicates")
d.addCallback(storeConcurrent, node, duplarge, "large duplicates")
#d = storeConcurrent(None, node, dupsmall, "small duplicates")
#d = storeConcurrent(None, node, duplarge, "large duplicates")
d.addCallback(retrieveSequential, node, smallfnames, "small")
d.addCallback(retrieveSequential, node, largefnames, "large")
d.addCallback(retrieveSequential, node, dupsmall, "small duplicates")
d.addCallback(retrieveSequential, node, duplarge, "large duplicates")
return d
def cleanup(_, node, filenamelist):
#print _
for f in filenamelist:
try:
print "deleting %s" % f
os.remove(f)
except:
print "couldn't remove %s" % f
reactor.callLater(1, node.stop)
def generateTestFile(minSize):
fname = tempfile.mktemp()
f = open(fname, 'w')
data = generateRandom(minSize/50)
for i in range(0, 51+random.randrange(50)):
f.write(data)
f.close()
filename = os.path.join("/tmp",fname)
os.rename(fname,filename)
return filename
def runTests(host, port, listenport=None):
f1 = generateTestFile(5120)
f2 = generateTestFile(5120)
f3 = f2+".dup"
shutil.copy(f2, f3)
f4 = generateTestFile(513000)
f5 = generateTestFile(513000)
f6 = f5+".dup"
shutil.copy(f5, f6)
node = FludNode(port=listenport)
if port == None:
port = node.config.port
node.run()
node.connectViaGateway(host, port)
d = doTests(node, [f1, f2], [f4, f5], [f2, f3], [f5, f6])
d.addBoth(cleanup, node, [f1, f2, f3, f4, f5, f6])
node.join()
if __name__ == '__main__':
localhost = socket.getfqdn()
if len(sys.argv) == 3:
runTests(sys.argv[1], eval(sys.argv[2])) # talk to [1] on port [2]
elif len(sys.argv) == 4:
# talk to [1] on port [2], listen on port [3]
runTests(sys.argv[1], eval(sys.argv[2]), eval(sys.argv[3]))
else:
print "must run this test against a flud network (no single node op)"
print "usage: %s [<othernodehost othernodeport> |"\
" <othernodehost othernodeport listenport>]" % sys.argv[0]
| Python |
#!/usr/bin/python
import time, os, stat, random, sys, logging, socket
from twisted.python import failure
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
from flud.FludNode import FludNode
from flud.protocol.FludClient import FludClient
from flud.protocol.FludCommUtil import *
from flud.fencode import fencode, fdecode
"""
Test code for primitive DHT operations.
"""
stay_alive = 1
filename = "/tmp/tempstoredata"
filekey = os.path.basename(filename)
key = 87328673569979667228965797330646992089697345905484734072690869757741450870337L
# format of block metadata is
# {(i, datakey): storingNodeID, ..., 'n': n, 'm': m}
# where i<n+m, datakey is a sha256 of the data stored, and storingNodeID is
# either a nodeID or a list of nodeIDs.
testval = {(0, 802484L): 465705L, (1, 780638L): 465705L, (2, 169688L): 465705L,
(3, 267175L): 465705L, (4, 648636L): 465705L, (5, 838315L): 465705L,
(6, 477619L): 465705L, (7, 329906L): 465705L, (8, 610565L): 465705L,
(9, 217811L): 465705L, (10, 374124L): 465705L, (11, 357214L): 465705L,
(12, 147307L): 465705L, (13, 427751L): 465705L, (14, 927853L): 465705L,
(15, 760369L): 465705L, (16, 707029L): 465705L, (17, 479234L): 465705L,
(18, 190455L): 465705L, (19, 647489L): 465705L, (20, 620470L): 465705L,
(21, 777532L): 465705L, (22, 622383L): 465705L, (23, 573283L): 465705L,
(24, 613082L): 465705L, (25, 433593L): 465705L, (26, 584543L): 465705L,
(27, 337485L): 465705L, (28, 911014L): 465705L, (29, 594065L): 465705L,
(30, 375876L): 465705L, (31, 726818L): 465705L, (32, 835759L): 465705L,
(33, 814060L): 465705L, (34, 237176L): 465705L, (35, 538268L): 465705L,
(36, 272650L): 465705L, (37, 314058L): 465705L, (38, 257714L): 465705L,
(39, 439931L): 465705L, 'k': 20, 'n': 20}
logger = logging.getLogger('test')
screenhandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s:'
' %(message)s', datefmt='%H:%M:%S')
screenhandler.setFormatter(formatter)
logger.addHandler(screenhandler)
#logger.setLevel(logging.DEBUG)
logger.setLevel(logging.INFO)
def cleanup(_, node):
logger.info("waiting %ds to shutdown..." % stay_alive)
reactor.callLater(stay_alive, node.stop)
def testerror(failure, message, node):
"""
error handler for test errbacks
"""
logger.warn("testerror message: %s" % message)
logger.warn("testerror: '%s'" % str(failure))
logger.warn("At least 1 test FAILED")
def endtests(res, nKu, node, host, port):
""" executes after all tests """
try:
res = fdecode(res)
except ValueError:
pass
if res != testval:
return testerror(None, "retrieved value does not match stored value:"
" '%s' != '%s'" % (res, testval), node)
logger.info("testkFindVal PASSED")
logger.debug("testkFindVal result: %s" % str(res))
logger.info("all tests PASSED")
return res
def testkFindVal(res, nKu, node, host, port):
logger.info("testSendkFindVal PASSED")
logger.debug("testSendkFindVal result: %s" % str(res))
logger.info("attempting testkFindValue")
deferred = node.client.kFindValue(key)
deferred.addCallback(endtests, nKu, node, host, port)
deferred.addErrback(testerror, "failed at testkFindValue", node)
return deferred
def testSendkFindVal(res, nKu, node, host, port):
logger.info("testkStore PASSED")
logger.debug("testkStore result: %s" % str(res))
logger.info("attempting testSendkFindValue")
deferred = node.client.sendkFindValue(host, port, key)
deferred.addCallback(testkFindVal, nKu, node, host, port)
deferred.addErrback(testerror, "failed at testSendkFindValue", node)
return deferred
def testkStore(res, nKu, node, host, port):
logger.info("testSendkStore PASSED")
logger.debug("testSendkStore result: %s" % str(res))
logger.info("attempting testkStore")
deferred = node.client.kStore(key, testval)
deferred.addCallback(testSendkFindVal, nKu, node, host, port)
deferred.addErrback(testerror, "failed at testkStore", node)
return deferred
def testSendkStore(res, nKu, node, host, port):
logger.info("testkFindNode PASSED")
logger.debug("testkFindNode result: %s" % str(res))
logger.info("attempting testSendkStore")
deferred = node.client.sendkStore(host, port, key, testval)
deferred.addCallback(testkStore, nKu, node, host, port)
deferred.addErrback(testerror, "failed at testkStore", node)
return deferred
def testkFindNode(res, nKu, node, host, port):
""" executes after testSendkFindNode """
logger.info("testSendkFindNode PASSED")
logger.debug("testSendkFindNode result: %s" % str(res))
logger.info("attempting kFindNode")
deferred = node.client.kFindNode(key)
deferred.addCallback(testSendkStore, nKu, node, host, port)
deferred.addErrback(testerror, "failed at kFindNode", node)
return deferred
def testSendkFindNode(nKu, node, host, port):
""" executes after testGetID """
logger.info("testkGetID PASSED")
logger.info("attempting sendkFindNode")
deferred = node.client.sendkFindNode(host, port, key)
deferred.addCallback(testkFindNode, nKu, node, host, port)
deferred.addErrback(testerror, "failed at sendkFindNode", node)
return deferred
def testGetID(node, host, port):
""" Tests sendGetID(), and invokes testSendkFindNode on success """
deferred = node.client.sendGetID(host, port)
deferred.addCallback(testSendkFindNode, node, host, port)
deferred.addErrback(testerror, "failed at testGetID", node)
return deferred
def runTests(host, port=None, listenport=None):
host = getCanonicalIP(host)
node = FludNode(port=listenport)
if port == None:
port = node.config.port
logger.info("testing against %s:%s, localport=%s" % (host,
port, listenport))
node.run()
d = testGetID(node, host, port)
d.addBoth(cleanup, node)
#testkFindVal("blah", node.config.Ku, node, host, port)
node.join()
"""
Main currently invokes test code
"""
if __name__ == '__main__':
localhost = socket.getfqdn()
if len(sys.argv) == 1:
runTests(localhost) # test by talking to self
elif len(sys.argv) == 2:
runTests(localhost, eval(sys.argv[1]))
elif len(sys.argv) == 3:
runTests(sys.argv[1], eval(sys.argv[2]))
elif len(sys.argv) == 4:
runTests(sys.argv[1], eval(sys.argv[2]), eval(sys.argv[3]))
| Python |
#!/usr/bin/python
import tarfile, tempfile, random, os, sys
import gzip
from Crypto.Hash import SHA256
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
from flud.fencode import fencode
import flud.TarfileUtils as TarfileUtils
def maketarball(numfiles, avgsize, hashnames=False, addmetas=False):
tarballname = tempfile.mktemp()+".tar"
tarball = tarfile.open(tarballname, 'w')
if addmetas:
metafname = tempfile.mktemp()
metaf = file(metafname, 'w')
metaf.write('m'*48)
metaf.close()
for i in xrange(numfiles):
fname = tempfile.mktemp()
f = file(fname, 'wb')
size = int(avgsize * (random.random()+0.5))
blocksize = 65*1024
if hashnames:
sha256 = SHA256.new()
for j in range(0, size, blocksize):
if j+blocksize > size:
block = 'a'*(size-j)
else:
block = 'a'*blocksize
if hashnames:
sha256.update(block)
f.write(block)
f.close()
arcname = fname
if hashnames:
arcname = fencode(int(sha256.hexdigest(),16))
tarball.add(fname, arcname)
if addmetas:
tarball.add(metafname, arcname+".343434.meta")
os.remove(fname)
if addmetas:
os.remove(metafname)
contents = tarball.getnames()
tarball.close()
return tarballname, contents
def gzipTarball(tarball):
f = gzip.GzipFile(tarball+".gz", 'wb')
f.write(file(tarball, 'rb').read())
f.close()
os.remove(tarball)
return tarball+".gz"
def main():
# test plain TarfileUtils.delete()
(tballname, contents) = maketarball(5, 4096)
TarfileUtils.delete(tballname, contents[2:4])
tarball = tarfile.open(tballname, 'r')
os.remove(tballname)
assert(tarball.getnames() == contents[:2]+contents[4:])
tarball.close()
# test gzip TarfileUtils.delete()
(tballname, contents) = maketarball(5, 4096)
tballname = gzipTarball(tballname)
TarfileUtils.delete(tballname, contents[2:4])
tarball = tarfile.open(tballname, 'r')
os.remove(tballname)
assert(tarball.getnames() == contents[:2]+contents[4:])
tarball.close()
# test plain TarfileUtils.concatenate()
(tballname1, contents1) = maketarball(5, 4096)
(tballname2, contents2) = maketarball(5, 4096)
TarfileUtils.concatenate(tballname1, tballname2)
assert(not os.path.exists(tballname2))
tarball = tarfile.open(tballname1, 'r')
os.remove(tballname1)
assert(tarball.getnames() == contents1+contents2)
# test TarfileUtils.concatenate(gz, plain)
(tballname1, contents1) = maketarball(5, 4096)
(tballname2, contents2) = maketarball(5, 4096)
tballname1 = gzipTarball(tballname1)
TarfileUtils.concatenate(tballname1, tballname2)
assert(not os.path.exists(tballname2))
tarball = tarfile.open(tballname1, 'r')
os.remove(tballname1)
assert(tarball.getnames() == contents1+contents2)
# test TarfileUtils.concatenate(plain, gz)
(tballname1, contents1) = maketarball(5, 4096)
(tballname2, contents2) = maketarball(5, 4096)
tballname2 = gzipTarball(tballname2)
TarfileUtils.concatenate(tballname1, tballname2)
assert(not os.path.exists(tballname2))
tarball = tarfile.open(tballname1, 'r')
os.remove(tballname1)
assert(tarball.getnames() == contents1+contents2)
# test TarfileUtils.concatenate(gz, gz)
(tballname1, contents1) = maketarball(5, 4096)
(tballname2, contents2) = maketarball(5, 4096)
tballname1 = gzipTarball(tballname1)
tballname2 = gzipTarball(tballname2)
TarfileUtils.concatenate(tballname1, tballname2)
assert(not os.path.exists(tballname2))
tarball = tarfile.open(tballname1, 'r')
os.remove(tballname1)
assert(tarball.getnames() == contents1+contents2)
# test TarfileUtils.verifyHashes(plain no meta)
(tballname, contents) = maketarball(5, 4096, True)
assert(TarfileUtils.verifyHashes(tballname, contents[2:4]))
os.remove(tballname)
# test TarfileUtils.verifyHashes(plain with meta)
(tballname, contents) = maketarball(5, 4096, True, True)
assert(TarfileUtils.verifyHashes(tballname, contents[2:4]), ".meta")
os.remove(tballname)
# test TarfileUtils.verifyHashes(gzipped no meta)
(tballname, contents) = maketarball(5, 4096, True)
tballname = gzipTarball(tballname)
assert(TarfileUtils.verifyHashes(tballname, contents[2:4]))
os.remove(tballname)
# test TarfileUtils.verifyHashes(gzipped with meta)
(tballname, contents) = maketarball(5, 4096, True, True)
tballname = gzipTarball(tballname)
assert(TarfileUtils.verifyHashes(tballname, contents[2:4]), ".meta")
os.remove(tballname)
print "all tests passed"
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/python
import time, os, stat, random, sys, logging, socket
from twisted.python import failure
from twisted.internet import defer
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
import flud.FludCrypto
from flud.FludNode import FludNode
from flud.protocol.FludClient import FludClient
from flud.protocol.FludCommUtil import *
from flud.fencode import fencode, fdecode
from flud.FludDefer import ErrDeferredList
"""
Test code for primitive operations. These ops include all of the descendents
of ROOT and REQUEST in FludProtocol.
"""
CONCURRENT=300
CONCREPORT=50
node = None
files = None
logger = logging.getLogger('test')
screenhandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s:'
' %(message)s', datefmt='%H:%M:%S')
screenhandler.setFormatter(formatter)
logger.addHandler(screenhandler)
logger.setLevel(logging.DEBUG)
def suitesuccess(results):
logger.info("all tests in suite passed")
#print results
return results
def suiteerror(failure):
logger.info("suite did not complete")
logger.info("DEBUG: %s" % failure)
return failure
def stagesuccess(result, message):
logger.info("stage %s succeeded" % message)
return result
def stageerror(failure, message):
logger.info("stage %s failed" % message)
#logger.info("DEBUG: %s" % failure)
return failure
def itersuccess(res, i, message):
if i % CONCREPORT == 0:
logger.info("itersuccess: %s" % message)
return res
def itererror(failure, message):
logger.info("itererror message: %s" % message)
#logger.info("DEBUG: %s" % failure)
#logger.info("DEBUG: %s" % dir(failure)
failure.printTraceback()
return failure
def checkVERIFY(results, nKu, host, port, hashes, num=CONCURRENT):
logger.info(" checking VERIFY results...")
for i in range(num):
hash = hashes[i]
res = results[i][1]
if long(hash, 16) != long(res, 16):
raise failure.DefaultException("verify didn't match: %s != %s"
% (hash, res))
logger.info(" ...VERIFY results good.")
return results #True
def testVERIFY(res, nKu, host, port, num=CONCURRENT):
logger.info("testVERIFY started...")
dlist = []
hashes = []
for i in range(num):
#if i == 4:
# port = 21
fd = os.open(files[i], os.O_RDONLY)
fsize = os.fstat(fd)[stat.ST_SIZE]
length = 20
offset = random.randrange(fsize-length)
os.lseek(fd, offset, 0)
data = os.read(fd, length)
os.close(fd)
hashes.append(flud.FludCrypto.hashstring(data))
filekey = os.path.basename(files[i])
deferred = node.client.sendVerify(filekey, offset, length, host,
port, nKu)
deferred.addCallback(itersuccess, i, "succeeded at testVERIFY %d" % i)
deferred.addErrback(itererror, "failed at testVERIFY %d: %s"
% (i, filekey))
dlist.append(deferred)
d = ErrDeferredList(dlist)
d.addCallback(stagesuccess, "testVERIFY")
d.addErrback(stageerror, 'failed at testVERIFY')
d.addCallback(checkVERIFY, nKu, host, port, hashes, num)
return d
def checkRETRIEVE(res, nKu, host, port, num=CONCURRENT):
logger.info(" checking RETRIEVE results...")
for i in range(num):
f1 = open(files[i])
filekey = os.path.basename(files[i])
f2 = open(node.config.clientdir+"/"+filekey)
if (f1.read() != f2.read()):
f1.close()
f2.close()
raise failure.DefaultException("upload/download files don't match")
f2.close()
f1.close()
logger.info(" ...RETRIEVE results good.")
return testVERIFY(res, nKu, host, port, num)
def testRETRIEVE(res, nKu, host, port, num=CONCURRENT):
logger.info("testRETRIEVE started...")
dlist = []
for i in range(num):
#if i == 4:
# port = 21
filekey = os.path.basename(files[i])
deferred = node.client.sendRetrieve(filekey, host, port, nKu)
deferred.addCallback(itersuccess, i, "succeeded at testRETRIEVE %d" % i)
deferred.addErrback(itererror, "failed at testRETRIEVE %d: %s"
% (i, filekey))
dlist.append(deferred)
d = ErrDeferredList(dlist)
d.addCallback(stagesuccess, "testRETRIEVE")
d.addErrback(stageerror, 'failed at testRETRIEVE')
d.addCallback(checkRETRIEVE, nKu, host, port, num)
return d
def testSTORE(nKu, host, port, num=CONCURRENT):
logger.info("testSTORE started...")
dlist = []
for i in range(num):
#if i == 4:
# port = 21
deferred = node.client.sendStore(files[i], None, host, port, nKu)
deferred.addCallback(itersuccess, i, "succeeded at testSTORE %d" % i)
deferred.addErrback(itererror, "failed at testSTORE %d" % i)
dlist.append(deferred)
d = ErrDeferredList(dlist)
d.addCallback(stagesuccess, "testSTORE")
d.addErrback(stageerror, 'failed at testSTORE')
d.addCallback(testRETRIEVE, nKu, host, port, num)
#d.addCallback(testVERIFY, nKu, host, port, num)
return d
def testID(host, port, num=CONCURRENT):
logger.info("testID started...")
dlist = []
for i in range(num):
#if i == 4:
# port = 21
deferred = node.client.sendGetID(host, port)
deferred.debug = True
deferred.addErrback(itererror, "failed at testID %d" % i)
dlist.append(deferred)
d = ErrDeferredList(dlist, returnOne=True)
d.addCallback(stagesuccess, "testID")
d.addErrback(stageerror, 'testID')
d.addCallback(testSTORE, host, port, num)
return d
def runTests(host, port=None, listenport=None):
num = CONCURRENT
#num = 5
global files, node
files = createFakeData()
node = FludNode(port=listenport)
if port == None:
port = node.config.port
node.run()
if num > len(files):
num = len(files)
d1 = testID(host, port, num)
d1.addCallback(suitesuccess)
d1.addErrback(suiteerror)
d1.addBoth(cleanup)
#nku = FludRSA.importPublicKey({'e': 65537L, 'n': 138646504113696863667807411690225283099791076530135000331764542300161152585426296356409290228001197773401729468267448145387041995053893737880473447042984919037843163552727823101445272608470814297563395471329917904393936481407769396601027233955938405001434483474847834031774504827822809611707032477570548179411L})
#d2 = testSTORE(nku, node, host, port, files, num)
#d2.addErrback(suiteerror, 'failed at %s' % d2.testname)
node.join()
#node.start() # doesn't work, because reactor may not have started
# listening by time requests start flying
def createFakeData(dir="/tmp", num=CONCURRENT):
randsrc = open("/dev/urandom", 'rb')
files = []
for i in range(num):
randdata = randsrc.read(256)
filekey = fencode(int(flud.FludCrypto.hashstring(randdata), 16))
filename = dir+'/'+filekey
f = open(filename, 'wb')
f.write(randdata)
f.close()
files.append(filename)
randsrc.close()
return files
def deleteFakeData(files):
for f in files:
if os.path.exists(f):
os.remove(f)
else:
logger.warn("s already deleted!" % f)
def cleanup(dummy=None):
logger.info("cleaning up files and shutting down in 1 seconds...")
time.sleep(1)
deleteFakeData(files)
reactor.callLater(1, node.stop)
logger.info("done cleaning up")
"""
Main currently invokes test code
"""
if __name__ == '__main__':
localhost = socket.getfqdn()
if len(sys.argv) == 1:
print "Warning: testing against self my result in timeout failures"
runTests(localhost) # test by talking to self
elif len(sys.argv) == 2:
runTests(localhost, eval(sys.argv[1])) # talk to self on port [1]
elif len(sys.argv) == 3:
runTests(sys.argv[1], eval(sys.argv[2])) # talk to [1] on port [2]
elif len(sys.argv) == 4:
# talk to [1] on port [2], listen on port [3]
runTests(sys.argv[1], eval(sys.argv[2]), eval(sys.argv[3]))
| Python |
#!/usr/bin/python
import time, os, stat, random, sys, logging, socket, tempfile
from twisted.python import failure
from StringIO import StringIO
from zlib import crc32
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
from flud.FludNode import FludNode
from flud.protocol.FludClient import FludClient
import flud.FludCrypto as FludCrypto
from flud.fencode import fencode, fdecode
from flud.protocol.FludCommUtil import *
from flud.FludDefer import ErrDeferredList
"""
Test code for primitive operations. These ops include all of the descendents
of ROOT and REQUEST in FludProtocol.
"""
# metadatablock: (block#,n,k,blockdata)
metadatablock = fencode((1,20,40,'adfdsfdffffffddddddddddddddd'))
fake_mkey_offset = 111111
def testerror(failure, message, node):
"""
error handler for test errbacks
"""
print "testerror message: %s" % message
print "testerror: %s" % str(failure)
print "At least 1 test FAILED"
return failure
def allGood(_, nKu):
print "all tests PASSED"
return nKu
def checkDELETE(res, nKu, fname, fkey, mkey, node, host, port, totalDelete):
""" checks to ensure the file was deleted """
# totalDelete = True if this delete op should remove all meta (and data)
if totalDelete:
# try to retrieve with any metakey, should fail
print "expecting failed retrieve, any metakey"
return testRETRIEVE(res, nKu, fname, fkey, True, node, host, port,
lambda args=(res, nKu): allGood(*args), False)
else:
# try to retrieve with any metakey, should succeed
print "expecting successful retrieve, any metakey"
return testRETRIEVE(res, nKu, fname, fkey, True, node, host, port,
lambda args=(res, nKu, fname, fkey, mkey+fake_mkey_offset,
node, host, port, True): testDELETE(*args))
def testDELETE(res, nKu, fname, fkey, mkey, node, host, port, totalDelete):
""" Tests sendDelete, and invokes checkDELETE on success """
print "starting testDELETE %s.%s" % (fname, mkey)
#return checkDELETE(None, nKu, fname, fkey, mkey, node, host, port, False)
deferred = node.client.sendDelete(fkey, mkey, host, port, nKu)
deferred.addCallback(checkDELETE, nKu, fname, fkey, mkey, node, host, port,
totalDelete)
deferred.addErrback(testerror, "failed at testDELETE", node)
return deferred
def checkVERIFY(res, nKu, fname, fkey, mkey, node, host, port, hash, newmeta):
""" executes after testVERIFY """
if long(hash, 16) != long(res, 16):
raise failure.DefaultException("verify didn't match: %s != %s"
% (hash, res))
print "checkVERIFY (%s) %s success" % (newmeta, fname)
if newmeta:
return testDELETE(res, nKu, fname, fkey, mkey, node, host, port, False)
else:
return testVERIFY(nKu, fname, fkey, mkey, node, host, port, True)
def testVERIFY(nKu, fname, fkey, mkey, node, host, port, newmeta):
""" Test sendVerify """
# newmeta, if True, will generate new metadata to be stored during verify
if newmeta:
thismkey = mkey+fake_mkey_offset
else:
thismkey = mkey
print "starting testVERIFY (%s) %s.%s" % (newmeta, fname, thismkey)
fd = os.open(fname, os.O_RDONLY)
fsize = os.fstat(fd)[stat.ST_SIZE]
length = 20
offset = random.randrange(fsize-length)
os.lseek(fd, offset, 0)
data = os.read(fd, length)
os.close(fd)
hash = FludCrypto.hashstring(data)
deferred = node.client.sendVerify(fkey, offset, length, host, port, nKu,
(thismkey, StringIO(metadatablock)))
deferred.addCallback(checkVERIFY, nKu, fname, fkey, mkey, node, host,
port, hash, newmeta)
deferred.addErrback(testerror, "failed at testVERIFY (%s)" % newmeta, node)
return deferred
def failedRETRIEVE(res, nextCallable):
return nextCallable();
def checkRETRIEVE(res, nKu, fname, fkey, mkey, node, host, port, nextCallable):
""" Compares the file that was stored with the one that was retrieved """
f1 = open(fname)
filename = [f for f in res if f[-len(fkey):] == fkey][0]
f2 = open(filename)
if (f1.read() != f2.read()):
f1.close()
f2.close()
raise failure.DefaultException(
"upload/download (%s, %s) files don't match" % (fname,
os.path.join(node.config.clientdir, fkey)))
#print "%s (%d) and %s (%d) match" % (fname, os.stat(fname)[stat.ST_SIZE],
# filename, os.stat(filename)[stat.ST_SIZE])
f1.close()
f2.close()
if mkey != True:
expectedmeta = "%s.%s.meta" % (fkey, mkey)
metanames = [f for f in res if f[-len(expectedmeta):] == expectedmeta]
if not metanames:
raise failure.DefaultException("expected metadata was missing")
f3 = open(metanames[0])
md = f3.read()
if md != metadatablock:
raise failure.DefaultException("upload/download metadata doesn't"
" match (%s != %s)" % (md, metadatablock))
return nextCallable()
def testRETRIEVE(res, nKu, fname, fkey, mkey, node, host, port, nextCallable,
expectSuccess=True):
""" Tests sendRetrieve, and invokes checkRETRIEVE on success """
print "starting testRETRIEVE %s.%s" % (fname, mkey)
deferred = node.client.sendRetrieve(fkey, host, port, nKu, mkey)
deferred.addCallback(checkRETRIEVE, nKu, fname, fkey, mkey, node, host,
port, nextCallable)
if expectSuccess:
deferred.addErrback(testerror, "failed at testRETRIEVE", node)
else:
deferred.addErrback(failedRETRIEVE, nextCallable)
return deferred
def testSTORE2(nKu, fname, fkey, node, host, port):
mkey = crc32(fname)
mkey2 = mkey+(2*fake_mkey_offset)
print "starting testSTORE %s.%s" % (fname, mkey2)
deferred = node.client.sendStore(fname, (mkey2, StringIO(metadatablock)),
host, port, nKu)
deferred.addCallback(testRETRIEVE, nKu, fname, fkey, mkey2, node, host,
port, lambda args=(nKu, fname, fkey, mkey, node, host, port,
False): testVERIFY(*args))
deferred.addErrback(testerror, "failed at testSTORE", node)
return deferred
def testSTORE(nKu, fname, fkey, node, host, port):
""" Tests sendStore, and invokes testRETRIEVE on success """
mkey = crc32(fname)
print "starting testSTORE %s.%s" % (fname, mkey)
deferred = node.client.sendStore(fname, (mkey, StringIO(metadatablock)),
host, port, nKu)
deferred.addCallback(testRETRIEVE, nKu, fname, fkey, mkey, node, host, port,
lambda args=(nKu, fname, fkey, node, host, port): testSTORE2(*args))
deferred.addErrback(testerror, "failed at testSTORE", node)
return deferred
def testID(node, host, port):
""" Tests sendGetID(), and invokes testSTORE on success """
print "starting testID"
deferred = node.client.sendGetID(host, port)
deferred.addErrback(testerror, "failed at testID", node)
return deferred
def testAggSTORE(nKu, aggFiles, node, host, port):
print "starting testAggSTORE"
dlist = []
for fname, fkey in aggFiles:
mkey = crc32(fname)
print "testAggSTORE %s (%s)" % (fname, mkey)
deferred = node.client.sendStore(fname, (mkey, StringIO(metadatablock)),
host, port, nKu)
deferred.addCallback(testRETRIEVE, nKu, fname, fkey, mkey, node, host,
port, lambda args=(nKu, fname, fkey, mkey, node, host,
port, False): testVERIFY(*args))
deferred.addErrback(testerror, "failed at testAggSTORE", node)
dlist.append(deferred)
dl = ErrDeferredList(dlist)
dl.addCallback(allGood, nKu)
dl.addErrback(testerror, "failed at testAggSTORE", node)
return dl
def cleanup(_, node, filenamelist):
for f in filenamelist:
try:
os.remove(f)
except:
print "couldn't remove %s" % f
reactor.callLater(1, node.stop)
def generateTestData(minSize):
fname = tempfile.mktemp()
f = open(fname, 'w')
data = FludCrypto.generateRandom(minSize/50)
for i in range(0, 51+random.randrange(50)):
f.write(data)
f.close()
filekey = FludCrypto.hashfile(fname)
filekey = fencode(int(filekey, 16))
filename = os.path.join("/tmp",filekey)
os.rename(fname,filename)
return (filename, filekey)
def runTests(host, port=None, listenport=None):
(largeFilename, largeFilekey) = generateTestData(512000)
(smallFilename, smallFilekey) = generateTestData(5120)
aggFiles = []
for i in range(4):
aggFiles.append(generateTestData(4096))
node = FludNode(port=listenport)
if port == None:
port = node.config.port
node.run()
d = testID(node, host, port)
d.addCallback(testSTORE, largeFilename, largeFilekey, node, host, port)
d.addCallback(testSTORE, smallFilename, smallFilekey, node, host, port)
d.addCallback(testAggSTORE, aggFiles, node, host, port)
d.addBoth(cleanup, node, [i[0] for i in aggFiles] + [largeFilename,
smallFilename])
node.join()
def main():
localhost = socket.getfqdn()
if len(sys.argv) == 1:
runTests(localhost) # test by talking to self
elif len(sys.argv) == 2:
runTests(localhost, eval(sys.argv[1])) # talk to self on port [1]
elif len(sys.argv) == 3:
runTests(sys.argv[1], eval(sys.argv[2])) # talk to [1] on port [2]
elif len(sys.argv) == 4:
# talk to [1] on port [2], listen on port [3]
runTests(sys.argv[1], eval(sys.argv[2]), eval(sys.argv[3]))
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import time, os, stat, random, sys, logging, socket
from twisted.python import failure
from twisted.internet import defer
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
import flud.FludCrypto
from flud.FludNode import FludNode
from flud.protocol.FludClient import FludClient
from flud.protocol.FludCommUtil import *
from flud.fencode import fencode, fdecode
from flud.FludDefer import ErrDeferredList
"""
Test code for primitive operations. These ops include all of the descendents
of ROOT and REQUEST in FludProtocol.
"""
CONCURRENT=300
CONCREPORT=50
node = None
files = None
logger = logging.getLogger('test')
screenhandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s %(name)s %(levelname)s:'
' %(message)s', datefmt='%H:%M:%S')
screenhandler.setFormatter(formatter)
logger.addHandler(screenhandler)
logger.setLevel(logging.DEBUG)
def suitesuccess(results):
logger.info("all tests in suite passed")
#print results
return results
def suiteerror(failure):
logger.info("suite did not complete")
logger.info("DEBUG: %s" % failure)
return failure
def stagesuccess(result, message):
logger.info("stage %s succeeded" % message)
return result
def stageerror(failure, message):
logger.info("stage %s failed" % message)
#logger.info("DEBUG: %s" % failure)
return failure
def itersuccess(res, i, message):
if i % CONCREPORT == 0:
logger.info("itersuccess: %s" % message)
return res
def itererror(failure, message):
logger.info("itererror message: %s" % message)
#logger.info("DEBUG: %s" % failure)
#logger.info("DEBUG: %s" % dir(failure)
failure.printTraceback()
return failure
def checkVERIFY(results, nKu, host, port, hashes, num=CONCURRENT):
logger.info(" checking VERIFY results...")
for i in range(num):
hash = hashes[i]
res = results[i][1]
if long(hash, 16) != long(res, 16):
raise failure.DefaultException("verify didn't match: %s != %s"
% (hash, res))
logger.info(" ...VERIFY results good.")
return results #True
def testVERIFY(res, nKu, host, port, num=CONCURRENT):
logger.info("testVERIFY started...")
dlist = []
hashes = []
for i in range(num):
#if i == 4:
# port = 21
fd = os.open(files[i], os.O_RDONLY)
fsize = os.fstat(fd)[stat.ST_SIZE]
length = 20
offset = random.randrange(fsize-length)
os.lseek(fd, offset, 0)
data = os.read(fd, length)
os.close(fd)
hashes.append(flud.FludCrypto.hashstring(data))
filekey = os.path.basename(files[i])
deferred = node.client.sendVerify(filekey, offset, length, host,
port, nKu)
deferred.addCallback(itersuccess, i, "succeeded at testVERIFY %d" % i)
deferred.addErrback(itererror, "failed at testVERIFY %d: %s"
% (i, filekey))
dlist.append(deferred)
d = ErrDeferredList(dlist)
d.addCallback(stagesuccess, "testVERIFY")
d.addErrback(stageerror, 'failed at testVERIFY')
d.addCallback(checkVERIFY, nKu, host, port, hashes, num)
return d
def checkRETRIEVE(res, nKu, host, port, num=CONCURRENT):
logger.info(" checking RETRIEVE results...")
for i in range(num):
f1 = open(files[i])
filekey = os.path.basename(files[i])
f2 = open(node.config.clientdir+"/"+filekey)
if (f1.read() != f2.read()):
f1.close()
f2.close()
raise failure.DefaultException("upload/download files don't match")
f2.close()
f1.close()
logger.info(" ...RETRIEVE results good.")
return testVERIFY(res, nKu, host, port, num)
def testRETRIEVE(res, nKu, host, port, num=CONCURRENT):
logger.info("testRETRIEVE started...")
dlist = []
for i in range(num):
#if i == 4:
# port = 21
filekey = os.path.basename(files[i])
deferred = node.client.sendRetrieve(filekey, host, port, nKu)
deferred.addCallback(itersuccess, i, "succeeded at testRETRIEVE %d" % i)
deferred.addErrback(itererror, "failed at testRETRIEVE %d: %s"
% (i, filekey))
dlist.append(deferred)
d = ErrDeferredList(dlist)
d.addCallback(stagesuccess, "testRETRIEVE")
d.addErrback(stageerror, 'failed at testRETRIEVE')
d.addCallback(checkRETRIEVE, nKu, host, port, num)
return d
def testSTORE(nKu, host, port, num=CONCURRENT):
logger.info("testSTORE started...")
dlist = []
for i in range(num):
#if i == 4:
# port = 21
deferred = node.client.sendStore(files[i], None, host, port, nKu)
deferred.addCallback(itersuccess, i, "succeeded at testSTORE %d" % i)
deferred.addErrback(itererror, "failed at testSTORE %d" % i)
dlist.append(deferred)
d = ErrDeferredList(dlist)
d.addCallback(stagesuccess, "testSTORE")
d.addErrback(stageerror, 'failed at testSTORE')
d.addCallback(testRETRIEVE, nKu, host, port, num)
#d.addCallback(testVERIFY, nKu, host, port, num)
return d
def testID(host, port, num=CONCURRENT):
logger.info("testID started...")
dlist = []
for i in range(num):
#if i == 4:
# port = 21
deferred = node.client.sendGetID(host, port)
deferred.debug = True
deferred.addErrback(itererror, "failed at testID %d" % i)
dlist.append(deferred)
d = ErrDeferredList(dlist, returnOne=True)
d.addCallback(stagesuccess, "testID")
d.addErrback(stageerror, 'testID')
d.addCallback(testSTORE, host, port, num)
return d
def runTests(host, port=None, listenport=None):
num = CONCURRENT
#num = 5
global files, node
files = createFakeData()
node = FludNode(port=listenport)
if port == None:
port = node.config.port
node.run()
if num > len(files):
num = len(files)
d1 = testID(host, port, num)
d1.addCallback(suitesuccess)
d1.addErrback(suiteerror)
d1.addBoth(cleanup)
#nku = FludRSA.importPublicKey({'e': 65537L, 'n': 138646504113696863667807411690225283099791076530135000331764542300161152585426296356409290228001197773401729468267448145387041995053893737880473447042984919037843163552727823101445272608470814297563395471329917904393936481407769396601027233955938405001434483474847834031774504827822809611707032477570548179411L})
#d2 = testSTORE(nku, node, host, port, files, num)
#d2.addErrback(suiteerror, 'failed at %s' % d2.testname)
node.join()
#node.start() # doesn't work, because reactor may not have started
# listening by time requests start flying
def createFakeData(dir="/tmp", num=CONCURRENT):
randsrc = open("/dev/urandom", 'rb')
files = []
for i in range(num):
randdata = randsrc.read(256)
filekey = fencode(int(flud.FludCrypto.hashstring(randdata), 16))
filename = dir+'/'+filekey
f = open(filename, 'wb')
f.write(randdata)
f.close()
files.append(filename)
randsrc.close()
return files
def deleteFakeData(files):
for f in files:
if os.path.exists(f):
os.remove(f)
else:
logger.warn("s already deleted!" % f)
def cleanup(dummy=None):
logger.info("cleaning up files and shutting down in 1 seconds...")
time.sleep(1)
deleteFakeData(files)
reactor.callLater(1, node.stop)
logger.info("done cleaning up")
"""
Main currently invokes test code
"""
if __name__ == '__main__':
localhost = socket.getfqdn()
if len(sys.argv) == 1:
print "Warning: testing against self my result in timeout failures"
runTests(localhost) # test by talking to self
elif len(sys.argv) == 2:
runTests(localhost, eval(sys.argv[1])) # talk to self on port [1]
elif len(sys.argv) == 3:
runTests(sys.argv[1], eval(sys.argv[2])) # talk to [1] on port [2]
elif len(sys.argv) == 4:
# talk to [1] on port [2], listen on port [3]
runTests(sys.argv[1], eval(sys.argv[2]), eval(sys.argv[3]))
| Python |
#!/usr/bin/python
import tarfile, tempfile, random, os, sys
import gzip
from Crypto.Hash import SHA256
sys.path.append(os.path.dirname(os.path.dirname(os.path.dirname(
os.path.abspath(__file__)))))
from flud.fencode import fencode
import flud.TarfileUtils as TarfileUtils
def maketarball(numfiles, avgsize, hashnames=False, addmetas=False):
tarballname = tempfile.mktemp()+".tar"
tarball = tarfile.open(tarballname, 'w')
if addmetas:
metafname = tempfile.mktemp()
metaf = file(metafname, 'w')
metaf.write('m'*48)
metaf.close()
for i in xrange(numfiles):
fname = tempfile.mktemp()
f = file(fname, 'wb')
size = int(avgsize * (random.random()+0.5))
blocksize = 65*1024
if hashnames:
sha256 = SHA256.new()
for j in range(0, size, blocksize):
if j+blocksize > size:
block = 'a'*(size-j)
else:
block = 'a'*blocksize
if hashnames:
sha256.update(block)
f.write(block)
f.close()
arcname = fname
if hashnames:
arcname = fencode(int(sha256.hexdigest(),16))
tarball.add(fname, arcname)
if addmetas:
tarball.add(metafname, arcname+".343434.meta")
os.remove(fname)
if addmetas:
os.remove(metafname)
contents = tarball.getnames()
tarball.close()
return tarballname, contents
def gzipTarball(tarball):
f = gzip.GzipFile(tarball+".gz", 'wb')
f.write(file(tarball, 'rb').read())
f.close()
os.remove(tarball)
return tarball+".gz"
def main():
# test plain TarfileUtils.delete()
(tballname, contents) = maketarball(5, 4096)
TarfileUtils.delete(tballname, contents[2:4])
tarball = tarfile.open(tballname, 'r')
os.remove(tballname)
assert(tarball.getnames() == contents[:2]+contents[4:])
tarball.close()
# test gzip TarfileUtils.delete()
(tballname, contents) = maketarball(5, 4096)
tballname = gzipTarball(tballname)
TarfileUtils.delete(tballname, contents[2:4])
tarball = tarfile.open(tballname, 'r')
os.remove(tballname)
assert(tarball.getnames() == contents[:2]+contents[4:])
tarball.close()
# test plain TarfileUtils.concatenate()
(tballname1, contents1) = maketarball(5, 4096)
(tballname2, contents2) = maketarball(5, 4096)
TarfileUtils.concatenate(tballname1, tballname2)
assert(not os.path.exists(tballname2))
tarball = tarfile.open(tballname1, 'r')
os.remove(tballname1)
assert(tarball.getnames() == contents1+contents2)
# test TarfileUtils.concatenate(gz, plain)
(tballname1, contents1) = maketarball(5, 4096)
(tballname2, contents2) = maketarball(5, 4096)
tballname1 = gzipTarball(tballname1)
TarfileUtils.concatenate(tballname1, tballname2)
assert(not os.path.exists(tballname2))
tarball = tarfile.open(tballname1, 'r')
os.remove(tballname1)
assert(tarball.getnames() == contents1+contents2)
# test TarfileUtils.concatenate(plain, gz)
(tballname1, contents1) = maketarball(5, 4096)
(tballname2, contents2) = maketarball(5, 4096)
tballname2 = gzipTarball(tballname2)
TarfileUtils.concatenate(tballname1, tballname2)
assert(not os.path.exists(tballname2))
tarball = tarfile.open(tballname1, 'r')
os.remove(tballname1)
assert(tarball.getnames() == contents1+contents2)
# test TarfileUtils.concatenate(gz, gz)
(tballname1, contents1) = maketarball(5, 4096)
(tballname2, contents2) = maketarball(5, 4096)
tballname1 = gzipTarball(tballname1)
tballname2 = gzipTarball(tballname2)
TarfileUtils.concatenate(tballname1, tballname2)
assert(not os.path.exists(tballname2))
tarball = tarfile.open(tballname1, 'r')
os.remove(tballname1)
assert(tarball.getnames() == contents1+contents2)
# test TarfileUtils.verifyHashes(plain no meta)
(tballname, contents) = maketarball(5, 4096, True)
assert(TarfileUtils.verifyHashes(tballname, contents[2:4]))
os.remove(tballname)
# test TarfileUtils.verifyHashes(plain with meta)
(tballname, contents) = maketarball(5, 4096, True, True)
assert(TarfileUtils.verifyHashes(tballname, contents[2:4]), ".meta")
os.remove(tballname)
# test TarfileUtils.verifyHashes(gzipped no meta)
(tballname, contents) = maketarball(5, 4096, True)
tballname = gzipTarball(tballname)
assert(TarfileUtils.verifyHashes(tballname, contents[2:4]))
os.remove(tballname)
# test TarfileUtils.verifyHashes(gzipped with meta)
(tballname, contents) = maketarball(5, 4096, True, True)
tballname = gzipTarball(tballname)
assert(TarfileUtils.verifyHashes(tballname, contents[2:4]), ".meta")
os.remove(tballname)
print "all tests passed"
if __name__ == "__main__":
main()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
if number != 1:
return """<Error number="%s" />""" % (number)
else:
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Utility functions for the File Manager Connector for Python
"""
import string, re
import os
import config as Config
# Generic manipulation functions
def removeExtension(fileName):
index = fileName.rindex(".")
newFileName = fileName[0:index]
return newFileName
def getExtension(fileName):
index = fileName.rindex(".") + 1
fileExtension = fileName[index:]
return fileExtension
def removeFromStart(string, char):
return string.lstrip(char)
def removeFromEnd(string, char):
return string.rstrip(char)
# Path functions
def combinePaths( basePath, folder ):
return removeFromEnd( basePath, '/' ) + '/' + removeFromStart( folder, '/' )
def getFileName(filename):
" Purpose: helper function to extrapolate the filename "
for splitChar in ["/", "\\"]:
array = filename.split(splitChar)
if (len(array) > 1):
filename = array[-1]
return filename
def sanitizeFolderName( newFolderName ):
"Do a cleanup of the folder name to avoid possible problems"
# Remove . \ / | : ? * " < > and control characters
return re.sub( '\\.|\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[\x00-\x1f\x7f-\x9f]', '_', newFolderName )
def sanitizeFileName( newFileName ):
"Do a cleanup of the file name to avoid possible problems"
# Replace dots in the name with underscores (only one dot can be there... security issue).
if ( Config.ForceSingleExtension ): # remove dots
newFileName = re.sub ( '\\.(?![^.]*$)', '_', newFileName ) ;
newFileName = newFileName.replace('\\','/') # convert windows to unix path
newFileName = os.path.basename (newFileName) # strip directories
# Remove \ / | : ? *
return re.sub ( '\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[\x00-\x1f\x7f-\x9f]/', '_', newFileName )
def getCurrentFolder(currentFolder):
if not currentFolder:
currentFolder = '/'
# Check the current folder syntax (must begin and end with a slash).
if (currentFolder[-1] <> "/"):
currentFolder += "/"
if (currentFolder[0] <> "/"):
currentFolder = "/" + currentFolder
# Ensure the folder path has no double-slashes
while '//' in currentFolder:
currentFolder = currentFolder.replace('//','/')
# Check for invalid folder paths (..)
if '..' in currentFolder or '\\' in currentFolder:
return None
# Check for invalid folder paths (..)
if re.search( '(/\\.)|(//)|([\\\\:\\*\\?\\""\\<\\>\\|]|[\x00-\x1F]|[\x7f-\x9f])', currentFolder ):
return None
return currentFolder
def mapServerPath( environ, url):
" Emulate the asp Server.mapPath function. Given an url path return the physical directory that it corresponds to "
# This isn't correct but for the moment there's no other solution
# If this script is under a virtual directory or symlink it will detect the problem and stop
return combinePaths( getRootPath(environ), url )
def mapServerFolder(resourceTypePath, folderPath):
return combinePaths ( resourceTypePath , folderPath )
def getRootPath(environ):
"Purpose: returns the root path on the server"
# WARNING: this may not be thread safe, and doesn't work w/ VirtualServer/mod_python
# Use Config.UserFilesAbsolutePath instead
if environ.has_key('DOCUMENT_ROOT'):
return environ['DOCUMENT_ROOT']
else:
realPath = os.path.realpath( './' )
selfPath = environ['SCRIPT_FILENAME']
selfPath = selfPath [ : selfPath.rfind( '/' ) ]
selfPath = selfPath.replace( '/', os.path.sep)
position = realPath.find(selfPath)
# This can check only that this script isn't run from a virtual dir
# But it avoids the problems that arise if it isn't checked
raise realPath
if ( position < 0 or position <> len(realPath) - len(selfPath) or realPath[ : position ]==''):
raise Exception('Sorry, can\'t map "UserFilesPath" to a physical path. You must set the "UserFilesAbsolutePath" value in "editor/filemanager/connectors/py/config.py".')
return realPath[ : position ]
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
This is the "File Uploader" for Python
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorQuickUpload( FCKeditorConnectorBase,
UploadFileCommandMixin,
BaseHttpMixin, BaseHtmlMixin):
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendUploadResults(1, "This file uploader is disabled. Please check the \"editor/filemanager/connectors/py/config.py\"")
command = 'QuickUpload'
# The file type (from the QueryString, by default 'File').
resourceType = self.request.get('Type','File')
currentFolder = "/"
# Check for invalid paths
if currentFolder is None:
return self.sendUploadResults(102, '', '', "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendUploadResults( 1, '', '', 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendUploadResults( 1, '', '', 'Invalid type specified' )
# Setup paths
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFoldercreateServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
return self.uploadFile(resourceType, currentFolder)
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorQuickUpload()
data = conn.doResponse()
for header in conn.headers:
if not header is None:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Utility functions for the File Manager Connector for Python
"""
import string, re
import os
import config as Config
# Generic manipulation functions
def removeExtension(fileName):
index = fileName.rindex(".")
newFileName = fileName[0:index]
return newFileName
def getExtension(fileName):
index = fileName.rindex(".") + 1
fileExtension = fileName[index:]
return fileExtension
def removeFromStart(string, char):
return string.lstrip(char)
def removeFromEnd(string, char):
return string.rstrip(char)
# Path functions
def combinePaths( basePath, folder ):
return removeFromEnd( basePath, '/' ) + '/' + removeFromStart( folder, '/' )
def getFileName(filename):
" Purpose: helper function to extrapolate the filename "
for splitChar in ["/", "\\"]:
array = filename.split(splitChar)
if (len(array) > 1):
filename = array[-1]
return filename
def sanitizeFolderName( newFolderName ):
"Do a cleanup of the folder name to avoid possible problems"
# Remove . \ / | : ? * " < > and control characters
return re.sub( '\\.|\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[\x00-\x1f\x7f-\x9f]', '_', newFolderName )
def sanitizeFileName( newFileName ):
"Do a cleanup of the file name to avoid possible problems"
# Replace dots in the name with underscores (only one dot can be there... security issue).
if ( Config.ForceSingleExtension ): # remove dots
newFileName = re.sub ( '\\.(?![^.]*$)', '_', newFileName ) ;
newFileName = newFileName.replace('\\','/') # convert windows to unix path
newFileName = os.path.basename (newFileName) # strip directories
# Remove \ / | : ? *
return re.sub ( '\\\\|\\/|\\||\\:|\\?|\\*|"|<|>|[\x00-\x1f\x7f-\x9f]/', '_', newFileName )
def getCurrentFolder(currentFolder):
if not currentFolder:
currentFolder = '/'
# Check the current folder syntax (must begin and end with a slash).
if (currentFolder[-1] <> "/"):
currentFolder += "/"
if (currentFolder[0] <> "/"):
currentFolder = "/" + currentFolder
# Ensure the folder path has no double-slashes
while '//' in currentFolder:
currentFolder = currentFolder.replace('//','/')
# Check for invalid folder paths (..)
if '..' in currentFolder or '\\' in currentFolder:
return None
# Check for invalid folder paths (..)
if re.search( '(/\\.)|(//)|([\\\\:\\*\\?\\""\\<\\>\\|]|[\x00-\x1F]|[\x7f-\x9f])', currentFolder ):
return None
return currentFolder
def mapServerPath( environ, url):
" Emulate the asp Server.mapPath function. Given an url path return the physical directory that it corresponds to "
# This isn't correct but for the moment there's no other solution
# If this script is under a virtual directory or symlink it will detect the problem and stop
return combinePaths( getRootPath(environ), url )
def mapServerFolder(resourceTypePath, folderPath):
return combinePaths ( resourceTypePath , folderPath )
def getRootPath(environ):
"Purpose: returns the root path on the server"
# WARNING: this may not be thread safe, and doesn't work w/ VirtualServer/mod_python
# Use Config.UserFilesAbsolutePath instead
if environ.has_key('DOCUMENT_ROOT'):
return environ['DOCUMENT_ROOT']
else:
realPath = os.path.realpath( './' )
selfPath = environ['SCRIPT_FILENAME']
selfPath = selfPath [ : selfPath.rfind( '/' ) ]
selfPath = selfPath.replace( '/', os.path.sep)
position = realPath.find(selfPath)
# This can check only that this script isn't run from a virtual dir
# But it avoids the problems that arise if it isn't checked
raise realPath
if ( position < 0 or position <> len(realPath) - len(selfPath) or realPath[ : position ]==''):
raise Exception('Sorry, can\'t map "UserFilesPath" to a physical path. You must set the "UserFilesAbsolutePath" value in "editor/filemanager/connectors/py/config.py".')
return realPath[ : position ]
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python and Zope.
This code was not tested at all.
It just was ported from pre 2.5 release, so for further reference see
\editor\filemanager\browser\default\connectors\py\connector.py in previous
releases.
"""
from fckutil import *
from connector import *
import config as Config
class FCKeditorConnectorZope(FCKeditorConnector):
"""
Zope versiof FCKeditorConnector
"""
# Allow access (Zope)
__allow_access_to_unprotected_subobjects__ = 1
def __init__(self, context=None):
"""
Constructor
"""
FCKeditorConnector.__init__(self, environ=None) # call superclass constructor
# Instance Attributes
self.context = context
self.request = FCKeditorRequest(context)
def getZopeRootContext(self):
if self.zopeRootContext is None:
self.zopeRootContext = self.context.getPhysicalRoot()
return self.zopeRootContext
def getZopeUploadContext(self):
if self.zopeUploadContext is None:
folderNames = self.userFilesFolder.split("/")
c = self.getZopeRootContext()
for folderName in folderNames:
if (folderName <> ""):
c = c[folderName]
self.zopeUploadContext = c
return self.zopeUploadContext
def setHeader(self, key, value):
self.context.REQUEST.RESPONSE.setHeader(key, value)
def getFolders(self, resourceType, currentFolder):
# Open the folders node
s = ""
s += """<Folders>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["Folder"]):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(name)
)
# Close the folders node
s += """</Folders>"""
return s
def getZopeFoldersAndFiles(self, resourceType, currentFolder):
folders = self.getZopeFolders(resourceType, currentFolder)
files = self.getZopeFiles(resourceType, currentFolder)
s = folders + files
return s
def getZopeFiles(self, resourceType, currentFolder):
# Open the files node
s = ""
s += """<Files>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["File","Image"]):
s += """<File name="%s" size="%s" />""" % (
convertToXmlAttribute(name),
((o.get_size() / 1024) + 1)
)
# Close the files node
s += """</Files>"""
return s
def findZopeFolder(self, resourceType, folderName):
# returns the context of the resource / folder
zopeFolder = self.getZopeUploadContext()
folderName = self.removeFromStart(folderName, "/")
folderName = self.removeFromEnd(folderName, "/")
if (resourceType <> ""):
try:
zopeFolder = zopeFolder[resourceType]
except:
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=resourceType, title=resourceType)
zopeFolder = zopeFolder[resourceType]
if (folderName <> ""):
folderNames = folderName.split("/")
for folderName in folderNames:
zopeFolder = zopeFolder[folderName]
return zopeFolder
def createFolder(self, resourceType, currentFolder):
# Find out where we are
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=newFolder, title=newFolder)
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def uploadFile(self, resourceType, currentFolder, count=None):
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
file = self.request.get("NewFile", None)
fileName = self.getFileName(file.filename)
fileNameOnly = self.removeExtension(fileName)
fileExtension = self.getExtension(fileName).lower()
if (count):
nid = "%s.%s.%s" % (fileNameOnly, count, fileExtension)
else:
nid = fileName
title = nid
try:
zopeFolder.manage_addProduct['OFSP'].manage_addFile(
id=nid,
title=title,
file=file.read()
)
except:
if (count):
count += 1
else:
count = 1
return self.zopeFileUpload(resourceType, currentFolder, count)
return self.sendUploadResults( 0 )
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, context=None):
r = context.REQUEST
self.request = r
def has_key(self, key):
return self.request.has_key(key)
def get(self, key, default=None):
return self.request.get(key, default)
"""
Running from zope, you will need to modify this connector.
If you have uploaded the FCKeditor into Zope (like me), you need to
move this connector out of Zope, and replace the "connector" with an
alias as below. The key to it is to pass the Zope context in, as
we then have a like to the Zope context.
## Script (Python) "connector.py"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=*args, **kws
##title=ALIAS
##
import Products.zope as connector
return connector.FCKeditorConnectorZope(context=context).doResponse()
"""
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
import os
try: # Windows needs stdio set for binary mode for file upload to work.
import msvcrt
msvcrt.setmode (0, os.O_BINARY) # stdin = 0
msvcrt.setmode (1, os.O_BINARY) # stdout = 1
except ImportError:
pass
from fckutil import *
from fckoutput import *
import config as Config
class GetFoldersCommandMixin (object):
def getFolders(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
s = """<Folders>""" # Open the folders node
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
s += """</Folders>""" # Close the folders node
return s
class GetFoldersAndFilesCommandMixin (object):
def getFoldersAndFiles(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders and files
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
if size > 0:
size = round(size/1024)
if size < 1:
size = 1
files += """<File name="%s" size="%d" />""" % (
convertToXmlAttribute(someObject),
size
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
return folders + files
class CreateFolderCommandMixin (object):
def createFolder(self, resourceType, currentFolder):
"""
Purpose: command to create a new folder
"""
errorNo = 0; errorMsg ='';
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
newFolder = sanitizeFolderName (newFolder)
try:
newFolderPath = mapServerFolder(self.userFilesFolder, combinePaths(currentFolder, newFolder))
self.createServerFolder(newFolderPath)
except Exception, e:
errorMsg = str(e).decode('iso-8859-1').encode('utf-8') # warning with encodigns!!!
if hasattr(e,'errno'):
if e.errno==17: #file already exists
errorNo=0
elif e.errno==13: # permission denied
errorNo = 103
elif e.errno==36 or e.errno==2 or e.errno==22: # filename too long / no such file / invalid name
errorNo = 102
else:
errorNo = 110
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def createServerFolder(self, folderPath):
"Purpose: physically creates a folder on the server"
# No need to check if the parent exists, just create all hierachy
try:
permissions = Config.ChmodOnFolderCreate
if not permissions:
os.makedirs(folderPath)
except AttributeError: #ChmodOnFolderCreate undefined
permissions = 0755
if permissions:
oldumask = os.umask(0)
os.makedirs(folderPath,mode=0755)
os.umask( oldumask )
class UploadFileCommandMixin (object):
def uploadFile(self, resourceType, currentFolder):
"""
Purpose: command to upload files to server (same as FileUpload)
"""
errorNo = 0
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileName = sanitizeFileName( newFileName )
newFileNameOnly = removeExtension(newFileName)
newFileExtension = getExtension(newFileName).lower()
allowedExtensions = Config.AllowedExtensions[resourceType]
deniedExtensions = Config.DeniedExtensions[resourceType]
if (allowedExtensions):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = mapServerFolder(self.userFilesFolder, currentFolder)
i = 0
while (True):
newFilePath = os.path.join (currentFolderPath,newFileName)
if os.path.exists(newFilePath):
i += 1
newFileName = "%s(%d).%s" % (
newFileNameOnly, i, newFileExtension
)
errorNo= 201 # file renamed
else:
# Read file contents and write to the desired path (similar to php's move_uploaded_file)
fout = file(newFilePath, 'wb')
while (True):
chunk = newFile.file.read(100000)
if not chunk: break
fout.write (chunk)
fout.close()
if os.path.exists ( newFilePath ):
doChmod = False
try:
doChmod = Config.ChmodOnUpload
permissions = Config.ChmodOnUpload
except AttributeError: #ChmodOnUpload undefined
doChmod = True
permissions = 0755
if ( doChmod ):
oldumask = os.umask(0)
os.chmod( newFilePath, permissions )
os.umask( oldumask )
newFileUrl = combinePaths(self.webUserFilesFolder, currentFolder) + newFileName
return self.sendUploadResults( errorNo , newFileUrl, newFileName )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "" )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "No File" )
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
from time import gmtime, strftime
import string
def escape(text, replace=string.replace):
"""
Converts the special characters '<', '>', and '&'.
RFC 1866 specifies that these characters be represented
in HTML as < > and & respectively. In Python
1.5 we use the new string.replace() function for speed.
"""
text = replace(text, '&', '&') # must be done 1st
text = replace(text, '<', '<')
text = replace(text, '>', '>')
text = replace(text, '"', '"')
return text
def convertToXmlAttribute(value):
if (value is None):
value = ""
return escape(value)
class BaseHttpMixin(object):
def setHttpHeaders(self, content_type='text/xml'):
"Purpose: to prepare the headers for the xml to return"
# Prevent the browser from caching the result.
# Date in the past
self.setHeader('Expires','Mon, 26 Jul 1997 05:00:00 GMT')
# always modified
self.setHeader('Last-Modified',strftime("%a, %d %b %Y %H:%M:%S GMT", gmtime()))
# HTTP/1.1
self.setHeader('Cache-Control','no-store, no-cache, must-revalidate')
self.setHeader('Cache-Control','post-check=0, pre-check=0')
# HTTP/1.0
self.setHeader('Pragma','no-cache')
# Set the response format.
self.setHeader( 'Content-Type', content_type + '; charset=utf-8' )
return
class BaseXmlMixin(object):
def createXmlHeader(self, command, resourceType, currentFolder, url):
"Purpose: returns the xml header"
self.setHttpHeaders()
# Create the XML document header
s = """<?xml version="1.0" encoding="utf-8" ?>"""
# Create the main connector node
s += """<Connector command="%s" resourceType="%s">""" % (
command,
resourceType
)
# Add the current folder node
s += """<CurrentFolder path="%s" url="%s" />""" % (
convertToXmlAttribute(currentFolder),
convertToXmlAttribute(url),
)
return s
def createXmlFooter(self):
"Purpose: returns the xml footer"
return """</Connector>"""
def sendError(self, number, text):
"Purpose: in the event of an error, return an xml based error"
self.setHttpHeaders()
return ("""<?xml version="1.0" encoding="utf-8" ?>""" +
"""<Connector>""" +
self.sendErrorNode (number, text) +
"""</Connector>""" )
def sendErrorNode(self, number, text):
if number != 1:
return """<Error number="%s" />""" % (number)
else:
return """<Error number="%s" text="%s" />""" % (number, convertToXmlAttribute(text))
class BaseHtmlMixin(object):
def sendUploadResults( self, errorNo = 0, fileUrl = '', fileName = '', customMsg = '' ):
self.setHttpHeaders("text/html")
"This is the function that sends the results of the uploading process"
"Minified version of the document.domain automatic fix script (#1919)."
"The original script can be found at _dev/domain_fix_template.js"
return """<script type="text/javascript">
(function(){var d=document.domain;while (true){try{var A=window.parent.document.domain;break;}catch(e) {};d=d.replace(/.*?(?:\.|$)/,'');if (d.length==0) break;try{document.domain=d;}catch (e){break;}}})();
window.parent.OnUploadCompleted(%(errorNumber)s,"%(fileUrl)s","%(fileName)s","%(customMsg)s");
</script>""" % {
'errorNumber': errorNo,
'fileUrl': fileUrl.replace ('"', '\\"'),
'fileName': fileName.replace ( '"', '\\"' ) ,
'customMsg': customMsg.replace ( '"', '\\"' ),
}
| Python |
#!/usr/bin/env python
"""
* FCKeditor - The text editor for Internet - http://www.fckeditor.net
* Copyright (C) 2003-2010 Frederico Caldeira Knabben
*
* == BEGIN LICENSE ==
*
* Licensed under the terms of any of the following licenses at your
* choice:
*
* - GNU General Public License Version 2 or later (the "GPL")
* http://www.gnu.org/licenses/gpl.html
*
* - GNU Lesser General Public License Version 2.1 or later (the "LGPL")
* http://www.gnu.org/licenses/lgpl.html
*
* - Mozilla Public License Version 1.1 or later (the "MPL")
* http://www.mozilla.org/MPL/MPL-1.1.html
*
* == END LICENSE ==
*
* Configuration file for the File Manager Connector for Python
"""
# INSTALLATION NOTE: You must set up your server environment accordingly to run
# python scripts. This connector requires Python 2.4 or greater.
#
# Supported operation modes:
# * WSGI (recommended): You'll need apache + mod_python + modpython_gateway
# or any web server capable of the WSGI python standard
# * Plain Old CGI: Any server capable of running standard python scripts
# (although mod_python is recommended for performance)
# This was the previous connector version operation mode
#
# If you're using Apache web server, replace the htaccess.txt to to .htaccess,
# and set the proper options and paths.
# For WSGI and mod_python, you may need to download modpython_gateway from:
# http://projects.amor.org/misc/svn/modpython_gateway.py and copy it in this
# directory.
# SECURITY: You must explicitly enable this "connector". (Set it to "True").
# WARNING: don't just set "ConfigIsEnabled = True", you must be sure that only
# authenticated users can access this file or use some kind of session checking.
Enabled = False
# Path to user files relative to the document root.
UserFilesPath = '/userfiles/'
# Fill the following value it you prefer to specify the absolute path for the
# user files directory. Useful if you are using a virtual directory, symbolic
# link or alias. Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'UserFilesPath' must point to the same directory.
# WARNING: GetRootPath may not work in virtual or mod_python configurations, and
# may not be thread safe. Use this configuration parameter instead.
UserFilesAbsolutePath = ''
# Due to security issues with Apache modules, it is recommended to leave the
# following setting enabled.
ForceSingleExtension = True
# What the user can do with this connector
ConfigAllowedCommands = [ 'QuickUpload', 'FileUpload', 'GetFolders', 'GetFoldersAndFiles', 'CreateFolder' ]
# Allowed Resource Types
ConfigAllowedTypes = ['File', 'Image', 'Flash', 'Media']
# After file is uploaded, sometimes it is required to change its permissions
# so that it was possible to access it at the later time.
# If possible, it is recommended to set more restrictive permissions, like 0755.
# Set to 0 to disable this feature.
# Note: not needed on Windows-based servers.
ChmodOnUpload = 0755
# See comments above.
# Used when creating folders that does not exist.
ChmodOnFolderCreate = 0755
# Do not touch this 3 lines, see "Configuration settings for each Resource Type"
AllowedExtensions = {}; DeniedExtensions = {};
FileTypesPath = {}; FileTypesAbsolutePath = {};
QuickUploadPath = {}; QuickUploadAbsolutePath = {};
# Configuration settings for each Resource Type
#
# - AllowedExtensions: the possible extensions that can be allowed.
# If it is empty then any file type can be uploaded.
# - DeniedExtensions: The extensions that won't be allowed.
# If it is empty then no restrictions are done here.
#
# For a file to be uploaded it has to fulfill both the AllowedExtensions
# and DeniedExtensions (that's it: not being denied) conditions.
#
# - FileTypesPath: the virtual folder relative to the document root where
# these resources will be located.
# Attention: It must start and end with a slash: '/'
#
# - FileTypesAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'FileTypesPath' must point to the same directory.
# Attention: It must end with a slash: '/'
#
#
# - QuickUploadPath: the virtual folder relative to the document root where
# these resources will be uploaded using the Upload tab in the resources
# dialogs.
# Attention: It must start and end with a slash: '/'
#
# - QuickUploadAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'QuickUploadPath' must point to the same directory.
# Attention: It must end with a slash: '/'
AllowedExtensions['File'] = ['7z','aiff','asf','avi','bmp','csv','doc','fla','flv','gif','gz','gzip','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','ods','odt','pdf','png','ppt','pxd','qt','ram','rar','rm','rmi','rmvb','rtf','sdc','sitd','swf','sxc','sxw','tar','tgz','tif','tiff','txt','vsd','wav','wma','wmv','xls','xml','zip']
DeniedExtensions['File'] = []
FileTypesPath['File'] = UserFilesPath + 'file/'
FileTypesAbsolutePath['File'] = (not UserFilesAbsolutePath == '') and (UserFilesAbsolutePath + 'file/') or ''
QuickUploadPath['File'] = FileTypesPath['File']
QuickUploadAbsolutePath['File'] = FileTypesAbsolutePath['File']
AllowedExtensions['Image'] = ['bmp','gif','jpeg','jpg','png']
DeniedExtensions['Image'] = []
FileTypesPath['Image'] = UserFilesPath + 'image/'
FileTypesAbsolutePath['Image'] = (not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'image/' or ''
QuickUploadPath['Image'] = FileTypesPath['Image']
QuickUploadAbsolutePath['Image']= FileTypesAbsolutePath['Image']
AllowedExtensions['Flash'] = ['swf','flv']
DeniedExtensions['Flash'] = []
FileTypesPath['Flash'] = UserFilesPath + 'flash/'
FileTypesAbsolutePath['Flash'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'flash/' or ''
QuickUploadPath['Flash'] = FileTypesPath['Flash']
QuickUploadAbsolutePath['Flash']= FileTypesAbsolutePath['Flash']
AllowedExtensions['Media'] = ['aiff','asf','avi','bmp','fla', 'flv','gif','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','png','qt','ram','rm','rmi','rmvb','swf','tif','tiff','wav','wma','wmv']
DeniedExtensions['Media'] = []
FileTypesPath['Media'] = UserFilesPath + 'media/'
FileTypesAbsolutePath['Media'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'media/' or ''
QuickUploadPath['Media'] = FileTypesPath['Media']
QuickUploadAbsolutePath['Media']= FileTypesAbsolutePath['Media']
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python and Zope.
This code was not tested at all.
It just was ported from pre 2.5 release, so for further reference see
\editor\filemanager\browser\default\connectors\py\connector.py in previous
releases.
"""
from fckutil import *
from connector import *
import config as Config
class FCKeditorConnectorZope(FCKeditorConnector):
"""
Zope versiof FCKeditorConnector
"""
# Allow access (Zope)
__allow_access_to_unprotected_subobjects__ = 1
def __init__(self, context=None):
"""
Constructor
"""
FCKeditorConnector.__init__(self, environ=None) # call superclass constructor
# Instance Attributes
self.context = context
self.request = FCKeditorRequest(context)
def getZopeRootContext(self):
if self.zopeRootContext is None:
self.zopeRootContext = self.context.getPhysicalRoot()
return self.zopeRootContext
def getZopeUploadContext(self):
if self.zopeUploadContext is None:
folderNames = self.userFilesFolder.split("/")
c = self.getZopeRootContext()
for folderName in folderNames:
if (folderName <> ""):
c = c[folderName]
self.zopeUploadContext = c
return self.zopeUploadContext
def setHeader(self, key, value):
self.context.REQUEST.RESPONSE.setHeader(key, value)
def getFolders(self, resourceType, currentFolder):
# Open the folders node
s = ""
s += """<Folders>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["Folder"]):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(name)
)
# Close the folders node
s += """</Folders>"""
return s
def getZopeFoldersAndFiles(self, resourceType, currentFolder):
folders = self.getZopeFolders(resourceType, currentFolder)
files = self.getZopeFiles(resourceType, currentFolder)
s = folders + files
return s
def getZopeFiles(self, resourceType, currentFolder):
# Open the files node
s = ""
s += """<Files>"""
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
for (name, o) in zopeFolder.objectItems(["File","Image"]):
s += """<File name="%s" size="%s" />""" % (
convertToXmlAttribute(name),
((o.get_size() / 1024) + 1)
)
# Close the files node
s += """</Files>"""
return s
def findZopeFolder(self, resourceType, folderName):
# returns the context of the resource / folder
zopeFolder = self.getZopeUploadContext()
folderName = self.removeFromStart(folderName, "/")
folderName = self.removeFromEnd(folderName, "/")
if (resourceType <> ""):
try:
zopeFolder = zopeFolder[resourceType]
except:
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=resourceType, title=resourceType)
zopeFolder = zopeFolder[resourceType]
if (folderName <> ""):
folderNames = folderName.split("/")
for folderName in folderNames:
zopeFolder = zopeFolder[folderName]
return zopeFolder
def createFolder(self, resourceType, currentFolder):
# Find out where we are
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
errorNo = 0
errorMsg = ""
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
zopeFolder.manage_addProduct["OFSP"].manage_addFolder(id=newFolder, title=newFolder)
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def uploadFile(self, resourceType, currentFolder, count=None):
zopeFolder = self.findZopeFolder(resourceType, currentFolder)
file = self.request.get("NewFile", None)
fileName = self.getFileName(file.filename)
fileNameOnly = self.removeExtension(fileName)
fileExtension = self.getExtension(fileName).lower()
if (count):
nid = "%s.%s.%s" % (fileNameOnly, count, fileExtension)
else:
nid = fileName
title = nid
try:
zopeFolder.manage_addProduct['OFSP'].manage_addFile(
id=nid,
title=title,
file=file.read()
)
except:
if (count):
count += 1
else:
count = 1
return self.zopeFileUpload(resourceType, currentFolder, count)
return self.sendUploadResults( 0 )
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, context=None):
r = context.REQUEST
self.request = r
def has_key(self, key):
return self.request.has_key(key)
def get(self, key, default=None):
return self.request.get(key, default)
"""
Running from zope, you will need to modify this connector.
If you have uploaded the FCKeditor into Zope (like me), you need to
move this connector out of Zope, and replace the "connector" with an
alias as below. The key to it is to pass the Zope context in, as
we then have a like to the Zope context.
## Script (Python) "connector.py"
##bind container=container
##bind context=context
##bind namespace=
##bind script=script
##bind subpath=traverse_subpath
##parameters=*args, **kws
##title=ALIAS
##
import Products.zope as connector
return connector.FCKeditorConnectorZope(context=context).doResponse()
"""
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Base Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import cgi, os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
import config as Config
class FCKeditorConnectorBase( object ):
"The base connector class. Subclass it to extend functionality (see Zope example)"
def __init__(self, environ=None):
"Constructor: Here you should parse request fields, initialize variables, etc."
self.request = FCKeditorRequest(environ) # Parse request
self.headers = [] # Clean Headers
if environ:
self.environ = environ
else:
self.environ = os.environ
# local functions
def setHeader(self, key, value):
self.headers.append ((key, value))
return
class FCKeditorRequest(object):
"A wrapper around the request object"
def __init__(self, environ):
if environ: # WSGI
self.request = cgi.FieldStorage(fp=environ['wsgi.input'],
environ=environ,
keep_blank_values=1)
self.environ = environ
else: # plain old cgi
self.environ = os.environ
self.request = cgi.FieldStorage()
if 'REQUEST_METHOD' in self.environ and 'QUERY_STRING' in self.environ:
if self.environ['REQUEST_METHOD'].upper()=='POST':
# we are in a POST, but GET query_string exists
# cgi parses by default POST data, so parse GET QUERY_STRING too
self.get_request = cgi.FieldStorage(fp=None,
environ={
'REQUEST_METHOD':'GET',
'QUERY_STRING':self.environ['QUERY_STRING'],
},
)
else:
self.get_request={}
def has_key(self, key):
return self.request.has_key(key) or self.get_request.has_key(key)
def get(self, key, default=None):
if key in self.request.keys():
field = self.request[key]
elif key in self.get_request.keys():
field = self.get_request[key]
else:
return default
if hasattr(field,"filename") and field.filename: #file upload, do not convert return value
return field
else:
return field.value
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
"""
import os
try: # Windows needs stdio set for binary mode for file upload to work.
import msvcrt
msvcrt.setmode (0, os.O_BINARY) # stdin = 0
msvcrt.setmode (1, os.O_BINARY) # stdout = 1
except ImportError:
pass
from fckutil import *
from fckoutput import *
import config as Config
class GetFoldersCommandMixin (object):
def getFolders(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
s = """<Folders>""" # Open the folders node
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
s += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
s += """</Folders>""" # Close the folders node
return s
class GetFoldersAndFilesCommandMixin (object):
def getFoldersAndFiles(self, resourceType, currentFolder):
"""
Purpose: command to recieve a list of folders and files
"""
# Map the virtual path to our local server
serverPath = mapServerFolder(self.userFilesFolder,currentFolder)
# Open the folders / files node
folders = """<Folders>"""
files = """<Files>"""
for someObject in os.listdir(serverPath):
someObjectPath = mapServerFolder(serverPath, someObject)
if os.path.isdir(someObjectPath):
folders += """<Folder name="%s" />""" % (
convertToXmlAttribute(someObject)
)
elif os.path.isfile(someObjectPath):
size = os.path.getsize(someObjectPath)
if size > 0:
size = round(size/1024)
if size < 1:
size = 1
files += """<File name="%s" size="%d" />""" % (
convertToXmlAttribute(someObject),
size
)
# Close the folders / files node
folders += """</Folders>"""
files += """</Files>"""
return folders + files
class CreateFolderCommandMixin (object):
def createFolder(self, resourceType, currentFolder):
"""
Purpose: command to create a new folder
"""
errorNo = 0; errorMsg ='';
if self.request.has_key("NewFolderName"):
newFolder = self.request.get("NewFolderName", None)
newFolder = sanitizeFolderName (newFolder)
try:
newFolderPath = mapServerFolder(self.userFilesFolder, combinePaths(currentFolder, newFolder))
self.createServerFolder(newFolderPath)
except Exception, e:
errorMsg = str(e).decode('iso-8859-1').encode('utf-8') # warning with encodigns!!!
if hasattr(e,'errno'):
if e.errno==17: #file already exists
errorNo=0
elif e.errno==13: # permission denied
errorNo = 103
elif e.errno==36 or e.errno==2 or e.errno==22: # filename too long / no such file / invalid name
errorNo = 102
else:
errorNo = 110
else:
errorNo = 102
return self.sendErrorNode ( errorNo, errorMsg )
def createServerFolder(self, folderPath):
"Purpose: physically creates a folder on the server"
# No need to check if the parent exists, just create all hierachy
try:
permissions = Config.ChmodOnFolderCreate
if not permissions:
os.makedirs(folderPath)
except AttributeError: #ChmodOnFolderCreate undefined
permissions = 0755
if permissions:
oldumask = os.umask(0)
os.makedirs(folderPath,mode=0755)
os.umask( oldumask )
class UploadFileCommandMixin (object):
def uploadFile(self, resourceType, currentFolder):
"""
Purpose: command to upload files to server (same as FileUpload)
"""
errorNo = 0
if self.request.has_key("NewFile"):
# newFile has all the contents we need
newFile = self.request.get("NewFile", "")
# Get the file name
newFileName = newFile.filename
newFileName = sanitizeFileName( newFileName )
newFileNameOnly = removeExtension(newFileName)
newFileExtension = getExtension(newFileName).lower()
allowedExtensions = Config.AllowedExtensions[resourceType]
deniedExtensions = Config.DeniedExtensions[resourceType]
if (allowedExtensions):
# Check for allowed
isAllowed = False
if (newFileExtension in allowedExtensions):
isAllowed = True
elif (deniedExtensions):
# Check for denied
isAllowed = True
if (newFileExtension in deniedExtensions):
isAllowed = False
else:
# No extension limitations
isAllowed = True
if (isAllowed):
# Upload to operating system
# Map the virtual path to the local server path
currentFolderPath = mapServerFolder(self.userFilesFolder, currentFolder)
i = 0
while (True):
newFilePath = os.path.join (currentFolderPath,newFileName)
if os.path.exists(newFilePath):
i += 1
newFileName = "%s(%d).%s" % (
newFileNameOnly, i, newFileExtension
)
errorNo= 201 # file renamed
else:
# Read file contents and write to the desired path (similar to php's move_uploaded_file)
fout = file(newFilePath, 'wb')
while (True):
chunk = newFile.file.read(100000)
if not chunk: break
fout.write (chunk)
fout.close()
if os.path.exists ( newFilePath ):
doChmod = False
try:
doChmod = Config.ChmodOnUpload
permissions = Config.ChmodOnUpload
except AttributeError: #ChmodOnUpload undefined
doChmod = True
permissions = 0755
if ( doChmod ):
oldumask = os.umask(0)
os.chmod( newFilePath, permissions )
os.umask( oldumask )
newFileUrl = combinePaths(self.webUserFilesFolder, currentFolder) + newFileName
return self.sendUploadResults( errorNo , newFileUrl, newFileName )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "" )
else:
return self.sendUploadResults( errorNo = 202, customMsg = "No File" )
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector/QuickUpload for Python (WSGI wrapper).
See config.py for configuration settings
"""
from connector import FCKeditorConnector
from upload import FCKeditorQuickUpload
import cgitb
from cStringIO import StringIO
# Running from WSGI capable server (recomended)
def App(environ, start_response):
"WSGI entry point. Run the connector"
if environ['SCRIPT_NAME'].endswith("connector.py"):
conn = FCKeditorConnector(environ)
elif environ['SCRIPT_NAME'].endswith("upload.py"):
conn = FCKeditorQuickUpload(environ)
else:
start_response ("200 Ok", [('Content-Type','text/html')])
yield "Unknown page requested: "
yield environ['SCRIPT_NAME']
return
try:
# run the connector
data = conn.doResponse()
# Start WSGI response:
start_response ("200 Ok", conn.headers)
# Send response text
yield data
except:
start_response("500 Internal Server Error",[("Content-type","text/html")])
file = StringIO()
cgitb.Hook(file = file).handle()
yield file.getvalue()
| Python |
#!/usr/bin/env python
"""
FCKeditor - The text editor for Internet - http://www.fckeditor.net
Copyright (C) 2003-2010 Frederico Caldeira Knabben
== BEGIN LICENSE ==
Licensed under the terms of any of the following licenses at your
choice:
- GNU General Public License Version 2 or later (the "GPL")
http://www.gnu.org/licenses/gpl.html
- GNU Lesser General Public License Version 2.1 or later (the "LGPL")
http://www.gnu.org/licenses/lgpl.html
- Mozilla Public License Version 1.1 or later (the "MPL")
http://www.mozilla.org/MPL/MPL-1.1.html
== END LICENSE ==
Connector for Python (CGI and WSGI).
See config.py for configuration settings
"""
import os
from fckutil import *
from fckcommands import * # default command's implementation
from fckoutput import * # base http, xml and html output mixins
from fckconnector import FCKeditorConnectorBase # import base connector
import config as Config
class FCKeditorConnector( FCKeditorConnectorBase,
GetFoldersCommandMixin,
GetFoldersAndFilesCommandMixin,
CreateFolderCommandMixin,
UploadFileCommandMixin,
BaseHttpMixin, BaseXmlMixin, BaseHtmlMixin ):
"The Standard connector class."
def doResponse(self):
"Main function. Process the request, set headers and return a string as response."
s = ""
# Check if this connector is disabled
if not(Config.Enabled):
return self.sendError(1, "This connector is disabled. Please check the connector configurations in \"editor/filemanager/connectors/py/config.py\" and try again.")
# Make sure we have valid inputs
for key in ("Command","Type","CurrentFolder"):
if not self.request.has_key (key):
return
# Get command, resource type and current folder
command = self.request.get("Command")
resourceType = self.request.get("Type")
currentFolder = getCurrentFolder(self.request.get("CurrentFolder"))
# Check for invalid paths
if currentFolder is None:
if (command == "FileUpload"):
return self.sendUploadResults( errorNo = 102, customMsg = "" )
else:
return self.sendError(102, "")
# Check if it is an allowed command
if ( not command in Config.ConfigAllowedCommands ):
return self.sendError( 1, 'The %s command isn\'t allowed' % command )
if ( not resourceType in Config.ConfigAllowedTypes ):
return self.sendError( 1, 'Invalid type specified' )
# Setup paths
if command == "QuickUpload":
self.userFilesFolder = Config.QuickUploadAbsolutePath[resourceType]
self.webUserFilesFolder = Config.QuickUploadPath[resourceType]
else:
self.userFilesFolder = Config.FileTypesAbsolutePath[resourceType]
self.webUserFilesFolder = Config.FileTypesPath[resourceType]
if not self.userFilesFolder: # no absolute path given (dangerous...)
self.userFilesFolder = mapServerPath(self.environ,
self.webUserFilesFolder)
# Ensure that the directory exists.
if not os.path.exists(self.userFilesFolder):
try:
self.createServerFolder( self.userFilesFolder )
except:
return self.sendError(1, "This connector couldn\'t access to local user\'s files directories. Please check the UserFilesAbsolutePath in \"editor/filemanager/connectors/py/config.py\" and try again. ")
# File upload doesn't have to return XML, so intercept here
if (command == "FileUpload"):
return self.uploadFile(resourceType, currentFolder)
# Create Url
url = combinePaths( self.webUserFilesFolder, currentFolder )
# Begin XML
s += self.createXmlHeader(command, resourceType, currentFolder, url)
# Execute the command
selector = {"GetFolders": self.getFolders,
"GetFoldersAndFiles": self.getFoldersAndFiles,
"CreateFolder": self.createFolder,
}
s += selector[command](resourceType, currentFolder)
s += self.createXmlFooter()
return s
# Running from command line (plain old CGI)
if __name__ == '__main__':
try:
# Create a Connector Instance
conn = FCKeditorConnector()
data = conn.doResponse()
for header in conn.headers:
print '%s: %s' % header
print
print data
except:
print "Content-Type: text/plain"
print
import cgi
cgi.print_exception()
| Python |
#!/usr/bin/env python
"""
* FCKeditor - The text editor for Internet - http://www.fckeditor.net
* Copyright (C) 2003-2010 Frederico Caldeira Knabben
*
* == BEGIN LICENSE ==
*
* Licensed under the terms of any of the following licenses at your
* choice:
*
* - GNU General Public License Version 2 or later (the "GPL")
* http://www.gnu.org/licenses/gpl.html
*
* - GNU Lesser General Public License Version 2.1 or later (the "LGPL")
* http://www.gnu.org/licenses/lgpl.html
*
* - Mozilla Public License Version 1.1 or later (the "MPL")
* http://www.mozilla.org/MPL/MPL-1.1.html
*
* == END LICENSE ==
*
* Configuration file for the File Manager Connector for Python
"""
# INSTALLATION NOTE: You must set up your server environment accordingly to run
# python scripts. This connector requires Python 2.4 or greater.
#
# Supported operation modes:
# * WSGI (recommended): You'll need apache + mod_python + modpython_gateway
# or any web server capable of the WSGI python standard
# * Plain Old CGI: Any server capable of running standard python scripts
# (although mod_python is recommended for performance)
# This was the previous connector version operation mode
#
# If you're using Apache web server, replace the htaccess.txt to to .htaccess,
# and set the proper options and paths.
# For WSGI and mod_python, you may need to download modpython_gateway from:
# http://projects.amor.org/misc/svn/modpython_gateway.py and copy it in this
# directory.
# SECURITY: You must explicitly enable this "connector". (Set it to "True").
# WARNING: don't just set "ConfigIsEnabled = True", you must be sure that only
# authenticated users can access this file or use some kind of session checking.
Enabled = False
# Path to user files relative to the document root.
UserFilesPath = '/userfiles/'
# Fill the following value it you prefer to specify the absolute path for the
# user files directory. Useful if you are using a virtual directory, symbolic
# link or alias. Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'UserFilesPath' must point to the same directory.
# WARNING: GetRootPath may not work in virtual or mod_python configurations, and
# may not be thread safe. Use this configuration parameter instead.
UserFilesAbsolutePath = ''
# Due to security issues with Apache modules, it is recommended to leave the
# following setting enabled.
ForceSingleExtension = True
# What the user can do with this connector
ConfigAllowedCommands = [ 'QuickUpload', 'FileUpload', 'GetFolders', 'GetFoldersAndFiles', 'CreateFolder' ]
# Allowed Resource Types
ConfigAllowedTypes = ['File', 'Image', 'Flash', 'Media']
# After file is uploaded, sometimes it is required to change its permissions
# so that it was possible to access it at the later time.
# If possible, it is recommended to set more restrictive permissions, like 0755.
# Set to 0 to disable this feature.
# Note: not needed on Windows-based servers.
ChmodOnUpload = 0755
# See comments above.
# Used when creating folders that does not exist.
ChmodOnFolderCreate = 0755
# Do not touch this 3 lines, see "Configuration settings for each Resource Type"
AllowedExtensions = {}; DeniedExtensions = {};
FileTypesPath = {}; FileTypesAbsolutePath = {};
QuickUploadPath = {}; QuickUploadAbsolutePath = {};
# Configuration settings for each Resource Type
#
# - AllowedExtensions: the possible extensions that can be allowed.
# If it is empty then any file type can be uploaded.
# - DeniedExtensions: The extensions that won't be allowed.
# If it is empty then no restrictions are done here.
#
# For a file to be uploaded it has to fulfill both the AllowedExtensions
# and DeniedExtensions (that's it: not being denied) conditions.
#
# - FileTypesPath: the virtual folder relative to the document root where
# these resources will be located.
# Attention: It must start and end with a slash: '/'
#
# - FileTypesAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'FileTypesPath' must point to the same directory.
# Attention: It must end with a slash: '/'
#
#
# - QuickUploadPath: the virtual folder relative to the document root where
# these resources will be uploaded using the Upload tab in the resources
# dialogs.
# Attention: It must start and end with a slash: '/'
#
# - QuickUploadAbsolutePath: the physical path to the above folder. It must be
# an absolute path.
# If it's an empty string then it will be autocalculated.
# Useful if you are using a virtual directory, symbolic link or alias.
# Examples: 'C:\\MySite\\userfiles\\' or '/root/mysite/userfiles/'.
# Attention: The above 'QuickUploadPath' must point to the same directory.
# Attention: It must end with a slash: '/'
AllowedExtensions['File'] = ['7z','aiff','asf','avi','bmp','csv','doc','fla','flv','gif','gz','gzip','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','ods','odt','pdf','png','ppt','pxd','qt','ram','rar','rm','rmi','rmvb','rtf','sdc','sitd','swf','sxc','sxw','tar','tgz','tif','tiff','txt','vsd','wav','wma','wmv','xls','xml','zip']
DeniedExtensions['File'] = []
FileTypesPath['File'] = UserFilesPath + 'file/'
FileTypesAbsolutePath['File'] = (not UserFilesAbsolutePath == '') and (UserFilesAbsolutePath + 'file/') or ''
QuickUploadPath['File'] = FileTypesPath['File']
QuickUploadAbsolutePath['File'] = FileTypesAbsolutePath['File']
AllowedExtensions['Image'] = ['bmp','gif','jpeg','jpg','png']
DeniedExtensions['Image'] = []
FileTypesPath['Image'] = UserFilesPath + 'image/'
FileTypesAbsolutePath['Image'] = (not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'image/' or ''
QuickUploadPath['Image'] = FileTypesPath['Image']
QuickUploadAbsolutePath['Image']= FileTypesAbsolutePath['Image']
AllowedExtensions['Flash'] = ['swf','flv']
DeniedExtensions['Flash'] = []
FileTypesPath['Flash'] = UserFilesPath + 'flash/'
FileTypesAbsolutePath['Flash'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'flash/' or ''
QuickUploadPath['Flash'] = FileTypesPath['Flash']
QuickUploadAbsolutePath['Flash']= FileTypesAbsolutePath['Flash']
AllowedExtensions['Media'] = ['aiff','asf','avi','bmp','fla', 'flv','gif','jpeg','jpg','mid','mov','mp3','mp4','mpc','mpeg','mpg','png','qt','ram','rm','rmi','rmvb','swf','tif','tiff','wav','wma','wmv']
DeniedExtensions['Media'] = []
FileTypesPath['Media'] = UserFilesPath + 'media/'
FileTypesAbsolutePath['Media'] = ( not UserFilesAbsolutePath == '') and UserFilesAbsolutePath + 'media/' or ''
QuickUploadPath['Media'] = FileTypesPath['Media']
QuickUploadAbsolutePath['Media']= FileTypesAbsolutePath['Media']
| Python |
#!/usr/bin/python2.6
#
# Simple http server to emulate api.playfoursquare.com
import logging
import shutil
import sys
import urlparse
import SimpleHTTPServer
import BaseHTTPServer
class RequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Handle playfoursquare.com requests, for testing."""
def do_GET(self):
logging.warn('do_GET: %s, %s', self.command, self.path)
url = urlparse.urlparse(self.path)
logging.warn('do_GET: %s', url)
query = urlparse.parse_qs(url.query)
query_keys = [pair[0] for pair in query]
response = self.handle_url(url)
if response != None:
self.send_200()
shutil.copyfileobj(response, self.wfile)
self.wfile.close()
do_POST = do_GET
def handle_url(self, url):
path = None
if url.path == '/v1/venue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/addvenue':
path = '../captures/api/v1/venue.xml'
elif url.path == '/v1/venues':
path = '../captures/api/v1/venues.xml'
elif url.path == '/v1/user':
path = '../captures/api/v1/user.xml'
elif url.path == '/v1/checkcity':
path = '../captures/api/v1/checkcity.xml'
elif url.path == '/v1/checkins':
path = '../captures/api/v1/checkins.xml'
elif url.path == '/v1/cities':
path = '../captures/api/v1/cities.xml'
elif url.path == '/v1/switchcity':
path = '../captures/api/v1/switchcity.xml'
elif url.path == '/v1/tips':
path = '../captures/api/v1/tips.xml'
elif url.path == '/v1/checkin':
path = '../captures/api/v1/checkin.xml'
elif url.path == '/history/12345.rss':
path = '../captures/api/v1/feed.xml'
if path is None:
self.send_error(404)
else:
logging.warn('Using: %s' % path)
return open(path)
def send_200(self):
self.send_response(200)
self.send_header('Content-type', 'text/xml')
self.end_headers()
def main():
if len(sys.argv) > 1:
port = int(sys.argv[1])
else:
port = 8080
server_address = ('0.0.0.0', port)
httpd = BaseHTTPServer.HTTPServer(server_address, RequestHandler)
sa = httpd.socket.getsockname()
print "Serving HTTP on", sa[0], "port", sa[1], "..."
httpd.serve_forever()
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import os
import subprocess
import sys
BASEDIR = '../main/src/com/joelapenna/foursquare'
TYPESDIR = '../captures/types/v1'
captures = sys.argv[1:]
if not captures:
captures = os.listdir(TYPESDIR)
for f in captures:
basename = f.split('.')[0]
javaname = ''.join([c.capitalize() for c in basename.split('_')])
fullpath = os.path.join(TYPESDIR, f)
typepath = os.path.join(BASEDIR, 'types', javaname + '.java')
parserpath = os.path.join(BASEDIR, 'parsers', javaname + 'Parser.java')
cmd = 'python gen_class.py %s > %s' % (fullpath, typepath)
print cmd
subprocess.call(cmd, stdout=sys.stdout, shell=True)
cmd = 'python gen_parser.py %s > %s' % (fullpath, parserpath)
print cmd
subprocess.call(cmd, stdout=sys.stdout, shell=True)
| Python |
#!/usr/bin/python
"""
Pull a oAuth protected page from foursquare.
Expects ~/.oget to contain (one on each line):
CONSUMER_KEY
CONSUMER_KEY_SECRET
USERNAME
PASSWORD
Don't forget to chmod 600 the file!
"""
import httplib
import os
import re
import sys
import urllib
import urllib2
import urlparse
import user
from xml.dom import pulldom
from xml.dom import minidom
import oauth
"""From: http://groups.google.com/group/foursquare-api/web/oauth
@consumer = OAuth::Consumer.new("consumer_token","consumer_secret", {
:site => "http://foursquare.com",
:scheme => :header,
:http_method => :post,
:request_token_path => "/oauth/request_token",
:access_token_path => "/oauth/access_token",
:authorize_path => "/oauth/authorize"
})
"""
SERVER = 'api.foursquare.com:80'
CONTENT_TYPE_HEADER = {'Content-Type' :'application/x-www-form-urlencoded'}
SIGNATURE_METHOD = oauth.OAuthSignatureMethod_HMAC_SHA1()
AUTHEXCHANGE_URL = 'http://api.foursquare.com/v1/authexchange'
def parse_auth_response(auth_response):
return (
re.search('<oauth_token>(.*)</oauth_token>', auth_response).groups()[0],
re.search('<oauth_token_secret>(.*)</oauth_token_secret>',
auth_response).groups()[0]
)
def create_signed_oauth_request(username, password, consumer):
oauth_request = oauth.OAuthRequest.from_consumer_and_token(
consumer, http_method='POST', http_url=AUTHEXCHANGE_URL,
parameters=dict(fs_username=username, fs_password=password))
oauth_request.sign_request(SIGNATURE_METHOD, consumer, None)
return oauth_request
def main():
url = urlparse.urlparse(sys.argv[1])
# Nevermind that the query can have repeated keys.
parameters = dict(urlparse.parse_qsl(url.query))
password_file = open(os.path.join(user.home, '.oget'))
lines = [line.strip() for line in password_file.readlines()]
if len(lines) == 4:
cons_key, cons_key_secret, username, password = lines
access_token = None
else:
cons_key, cons_key_secret, username, password, token, secret = lines
access_token = oauth.OAuthToken(token, secret)
consumer = oauth.OAuthConsumer(cons_key, cons_key_secret)
if not access_token:
oauth_request = create_signed_oauth_request(username, password, consumer)
connection = httplib.HTTPConnection(SERVER)
headers = {'Content-Type' :'application/x-www-form-urlencoded'}
connection.request(oauth_request.http_method, AUTHEXCHANGE_URL,
body=oauth_request.to_postdata(), headers=headers)
auth_response = connection.getresponse().read()
token = parse_auth_response(auth_response)
access_token = oauth.OAuthToken(*token)
open(os.path.join(user.home, '.oget'), 'w').write('\n'.join((
cons_key, cons_key_secret, username, password, token[0], token[1])))
oauth_request = oauth.OAuthRequest.from_consumer_and_token(consumer,
access_token, http_method='POST', http_url=url.geturl(),
parameters=parameters)
oauth_request.sign_request(SIGNATURE_METHOD, consumer, access_token)
connection = httplib.HTTPConnection(SERVER)
connection.request(oauth_request.http_method, oauth_request.to_url(),
body=oauth_request.to_postdata(), headers=CONTENT_TYPE_HEADER)
print connection.getresponse().read()
#print minidom.parse(connection.getresponse()).toprettyxml(indent=' ')
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import datetime
import sys
import textwrap
import common
from xml.dom import pulldom
PARSER = """\
/**
* Copyright 2009 Joe LaPenna
*/
package com.joelapenna.foursquare.parsers;
import com.joelapenna.foursquare.Foursquare;
import com.joelapenna.foursquare.error.FoursquareError;
import com.joelapenna.foursquare.error.FoursquareParseException;
import com.joelapenna.foursquare.types.%(type_name)s;
import org.xmlpull.v1.XmlPullParser;
import org.xmlpull.v1.XmlPullParserException;
import java.io.IOException;
import java.util.logging.Level;
import java.util.logging.Logger;
/**
* Auto-generated: %(timestamp)s
*
* @author Joe LaPenna (joe@joelapenna.com)
* @param <T>
*/
public class %(type_name)sParser extends AbstractParser<%(type_name)s> {
private static final Logger LOG = Logger.getLogger(%(type_name)sParser.class.getCanonicalName());
private static final boolean DEBUG = Foursquare.PARSER_DEBUG;
@Override
public %(type_name)s parseInner(XmlPullParser parser) throws XmlPullParserException, IOException,
FoursquareError, FoursquareParseException {
parser.require(XmlPullParser.START_TAG, null, null);
%(type_name)s %(top_node_name)s = new %(type_name)s();
while (parser.nextTag() == XmlPullParser.START_TAG) {
String name = parser.getName();
%(stanzas)s
} else {
// Consume something we don't understand.
if (DEBUG) LOG.log(Level.FINE, "Found tag that we don't recognize: " + name);
skipSubTree(parser);
}
}
return %(top_node_name)s;
}
}"""
BOOLEAN_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(Boolean.valueOf(parser.nextText()));
"""
GROUP_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new GroupParser(new %(sub_parser_camel_case)s()).parse(parser));
"""
COMPLEX_STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(new %(parser_name)s().parse(parser));
"""
STANZA = """\
} else if ("%(name)s".equals(name)) {
%(top_node_name)s.set%(camel_name)s(parser.nextText());
"""
def main():
type_name, top_node_name, attributes = common.WalkNodesForAttributes(
sys.argv[1])
GenerateClass(type_name, top_node_name, attributes)
def GenerateClass(type_name, top_node_name, attributes):
"""generate it.
type_name: the type of object the parser returns
top_node_name: the name of the object the parser returns.
per common.WalkNodsForAttributes
"""
stanzas = []
for name in sorted(attributes):
typ, children = attributes[name]
replacements = Replacements(top_node_name, name, typ, children)
if typ == common.BOOLEAN:
stanzas.append(BOOLEAN_STANZA % replacements)
elif typ == common.GROUP:
stanzas.append(GROUP_STANZA % replacements)
elif typ in common.COMPLEX:
stanzas.append(COMPLEX_STANZA % replacements)
else:
stanzas.append(STANZA % replacements)
if stanzas:
# pop off the extranious } else for the first conditional stanza.
stanzas[0] = stanzas[0].replace('} else ', '', 1)
replacements = Replacements(top_node_name, name, typ, [None])
replacements['stanzas'] = '\n'.join(stanzas).strip()
print PARSER % replacements
def Replacements(top_node_name, name, typ, children):
# CameCaseClassName
type_name = ''.join([word.capitalize() for word in top_node_name.split('_')])
# CamelCaseClassName
camel_name = ''.join([word.capitalize() for word in name.split('_')])
# camelCaseLocalName
attribute_name = camel_name.lower().capitalize()
# mFieldName
field_name = 'm' + camel_name
if children[0]:
sub_parser_camel_case = children[0] + 'Parser'
else:
sub_parser_camel_case = (camel_name[:-1] + 'Parser')
return {
'type_name': type_name,
'name': name,
'top_node_name': top_node_name,
'camel_name': camel_name,
'parser_name': typ + 'Parser',
'attribute_name': attribute_name,
'field_name': field_name,
'typ': typ,
'timestamp': datetime.datetime.now(),
'sub_parser_camel_case': sub_parser_camel_case,
'sub_type': children[0]
}
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/python
import logging
from xml.dom import minidom
from xml.dom import pulldom
BOOLEAN = "boolean"
STRING = "String"
GROUP = "Group"
# Interfaces that all FoursquareTypes implement.
DEFAULT_INTERFACES = ['FoursquareType']
# Interfaces that specific FoursqureTypes implement.
INTERFACES = {
}
DEFAULT_CLASS_IMPORTS = [
]
CLASS_IMPORTS = {
# 'Checkin': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
# 'Venue': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
# 'Tip': DEFAULT_CLASS_IMPORTS + [
# 'import com.joelapenna.foursquare.filters.VenueFilterable'
# ],
}
COMPLEX = [
'Group',
'Badge',
'Beenhere',
'Checkin',
'CheckinResponse',
'City',
'Credentials',
'Data',
'Mayor',
'Rank',
'Score',
'Scoring',
'Settings',
'Stats',
'Tags',
'Tip',
'User',
'Venue',
]
TYPES = COMPLEX + ['boolean']
def WalkNodesForAttributes(path):
"""Parse the xml file getting all attributes.
<venue>
<attribute>value</attribute>
</venue>
Returns:
type_name - The java-style name the top node will have. "Venue"
top_node_name - unadultured name of the xml stanza, probably the type of
java class we're creating. "venue"
attributes - {'attribute': 'value'}
"""
doc = pulldom.parse(path)
type_name = None
top_node_name = None
attributes = {}
level = 0
for event, node in doc:
# For skipping parts of a tree.
if level > 0:
if event == pulldom.END_ELEMENT:
level-=1
logging.warn('(%s) Skip end: %s' % (str(level), node))
continue
elif event == pulldom.START_ELEMENT:
logging.warn('(%s) Skipping: %s' % (str(level), node))
level+=1
continue
if event == pulldom.START_ELEMENT:
logging.warn('Parsing: ' + node.tagName)
# Get the type name to use.
if type_name is None:
type_name = ''.join([word.capitalize()
for word in node.tagName.split('_')])
top_node_name = node.tagName
logging.warn('Found Top Node Name: ' + top_node_name)
continue
typ = node.getAttribute('type')
child = node.getAttribute('child')
# We don't want to walk complex types.
if typ in COMPLEX:
logging.warn('Found Complex: ' + node.tagName)
level = 1
elif typ not in TYPES:
logging.warn('Found String: ' + typ)
typ = STRING
else:
logging.warn('Found Type: ' + typ)
logging.warn('Adding: ' + str((node, typ)))
attributes.setdefault(node.tagName, (typ, [child]))
logging.warn('Attr: ' + str((type_name, top_node_name, attributes)))
return type_name, top_node_name, attributes
| Python |
#!/usr/bin/python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
#!/usr/bin/python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
#!/usr/bin/env python
import os
os.system("python manage.py runserver 127.0.0.1:8000")
| Python |
from django.conf.urls.defaults import *
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^api/', include('fluffycentral.api.urls')),
# Uncomment the admin/doc line below to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
)
from django.conf import settings
if settings.DEBUG:
urlpatterns += patterns('',
(r'^site_media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
'show_indexes': True
}))
| Python |
#!/usr/bin/env python
import os
os.system("python manage.py runserver 127.0.0.1:8000")
| Python |
# Django settings for fluffycentral project.
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(PROJECT_ROOT, 'baza.db'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'site_media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = '_q=4)d=wb%s&_^bwyd%rc1rn2103gsdi!74ip-@lggce(5xtk3'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'fluffycentral.urls'
TEMPLATE_DIRS = ( os.path.join(PROJECT_ROOT, 'templates' )
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
EXTERNAL_APPS_PATH = os.path.join(PROJECT_ROOT, "lib")
import sys
sys.path.append(EXTERNAL_APPS_PATH)
INSTALLED_APPS = (
'repository',
'piston',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
)
# Security
import security
CRKey = security.generateKeys()
CRPubKey = security.getPublicKey(CRKey)
| Python |
from django.db import models
class ServiceProvider(models.Model):
name = models.CharField(max_length=200, unique=True)
address = models.CharField(max_length=20)
pubkey = models.CharField(max_length=2000)
def __unicode__(self):
return self.name
class File(models.Model):
name = models.CharField(max_length=100, unique=True)
author = models.CharField(max_length=100)
description = models.CharField(max_length=200)
sp = models.ForeignKey(ServiceProvider)
def __unicode__(self):
return self.name
| Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
from repository.models import ServiceProvider, File
from django.contrib import admin
admin.site.register(ServiceProvider)
admin.site.register(File)
| Python |
# Create your views here.
| Python |
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from piston.handler import AnonymousBaseHandler, BaseHandler
from piston.utils import rc
import security
from fluffycentral.repository.models import ServiceProvider, File
from fluffycentral.settings import CRKey, CRPubKey
try:
import simplejson as json
except ImportError:
import json
class CatchAndReturn(object):
def __init__(self, err, response):
self.err = err
self.response = response
def __call__(self, fn):
def wrapper(*args, **kwargs):
try:
return fn(*args, **kwargs)
except self.err:
return self.response
return wrapper
# http://domain/api/sp-list
class SPListHandler(AnonymousBaseHandler):
allowed_methods = ('PUT',)
def update(self, request):
postdata = json.loads(request.raw_post_data)
name = postdata['name']
address = postdata['address']
status = 'EXISTS'
sp = None
try:
sp = ServiceProvider.objects.get(name = name)
except:
sp = ServiceProvider(name = name, address = address)
status = 'OK'
sp.address = address
sp.save()
response = {'status': status,
'name': name,
'address': address}
return response
# http://domain/api/make-cert
class MakeCertHandler(AnonymousBaseHandler):
allowed_methods = ('PUT',)
def update(self, request):
postdata = json.loads(request.raw_post_data)
name = postdata['name']
address = postdata['address']
status = 'EXISTS'
sp = None
try:
sp = ServiceProvider.objects.get(name = name)
cert = security.generateKeys()
sp.pubkey = security.getPublicKey(cert)
sp.save()
spKey = security.keyToStr(cert)
response = {'status': "OK", 'keys': spKey}
except:
response = {'status': "FAIL", 'keys': ""}
return response
# http://domain/api/file-list
class FileListHandler(AnonymousBaseHandler):
allowed_methods = ('GET','PUT')
def read(self, request):
response = {}
responselist = []
filelist = File.objects.all()
for filedata in filelist:
filedict = {}
filedict['name'] = filedata.name
filedict['author'] = filedata.author
filedict['description'] = filedata.description
filedict['sp'] = filedata.sp.name
filedict['sp-address'] = filedata.sp.address
responselist.append(filedict)
response['file-list'] = responselist
return response
def update(self, request):
postdata = json.loads(request.raw_post_data)
files = postdata['file-list']
spname = postdata['service-provider']
sp = ServiceProvider.objects.get(name = spname)
responselist = []
for filedata in files:
status = 'EXISTS'
name = filedata['name']
author = filedata['author']
description = filedata['description']
newfile = None
try:
newfile = File.objects.get(name = name)
except:
newfile = File(name = name)
status = 'OK'
newfile.author = author
newfile.description = description
newfile.sp = sp
newfile.save()
responselist.append({'name': name, 'sp': sp.name, 'status': status})
response = {'file-list': responselist}
return response
| Python |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from piston.authentication import HttpBasicAuthentication
from piston.resource import Resource
from fluffycentral.api.handlers import *
sp_list_handler = Resource(SPListHandler)
sp_list_handler.csrf_exempt = getattr(sp_list_handler.handler, 'csrf_exempt', True)
file_list_handler = Resource(FileListHandler)
file_list_handler.csrf_exempt = getattr(file_list_handler.handler, 'csrf_exempt', True)
make_cert_handler = Resource(MakeCertHandler)
make_cert_handler.csrf_exempt = getattr(make_cert_handler.handler, 'csrf_exempt', True)
urlpatterns = patterns('',
url(r'sp_list/$', sp_list_handler, name='sp_list'),
url(r'file_list/$', file_list_handler, name='file_list'),
url(r'make_cert/$', make_cert_handler, name='make_cert'),
#url(r'get_pub_key/$', get_pub_key_handler, name='get_pub_key'),
#url(r'get_sp_pub_key/$', get_sp_pub_key_handler, name='get_sp_pub_key'),
)
| Python |
import hmac, base64
from django import forms
from django.conf import settings
class Form(forms.Form):
pass
class ModelForm(forms.ModelForm):
"""
Subclass of `forms.ModelForm` which makes sure
that the initial values are present in the form
data, so you don't have to send all old values
for the form to actually validate. Django does not
do this on its own, which is really annoying.
"""
def merge_from_initial(self):
self.data._mutable = True
filt = lambda v: v not in self.data.keys()
for field in filter(filt, getattr(self.Meta, 'fields', ())):
self.data[field] = self.initial.get(field, None)
class OAuthAuthenticationForm(forms.Form):
oauth_token = forms.CharField(widget=forms.HiddenInput)
oauth_callback = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.BooleanField(required=True)
csrf_signature = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
forms.Form.__init__(self, *args, **kwargs)
self.fields['csrf_signature'].initial = self.initial_csrf_signature
def clean_csrf_signature(self):
sig = self.cleaned_data['csrf_signature']
token = self.cleaned_data['oauth_token']
sig1 = OAuthAuthenticationForm.get_csrf_signature(settings.SECRET_KEY, token)
if sig != sig1:
raise forms.ValidationError("CSRF signature is not valid")
return sig
def initial_csrf_signature(self):
token = self.initial['oauth_token']
return OAuthAuthenticationForm.get_csrf_signature(settings.SECRET_KEY, token)
@staticmethod
def get_csrf_signature(key, token):
# Check signature...
try:
import hashlib # 2.5
hashed = hmac.new(key, token, hashlib.sha1)
except:
import sha # deprecated
hashed = hmac.new(key, token, sha)
# calculate the digest base 64
return base64.b64encode(hashed.digest())
| Python |
"""
Decorator module, see
http://www.phyast.pitt.edu/~micheles/python/documentation.html
for the documentation and below for the licence.
"""
## The basic trick is to generate the source code for the decorated function
## with the right signature and to evaluate it.
## Uncomment the statement 'print >> sys.stderr, func_src' in _decorator
## to understand what is going on.
__all__ = ["decorator", "new_wrapper", "getinfo"]
import inspect, sys
try:
set
except NameError:
from sets import Set as set
def getinfo(func):
"""
Returns an info dictionary containing:
- name (the name of the function : str)
- argnames (the names of the arguments : list)
- defaults (the values of the default arguments : tuple)
- signature (the signature : str)
- doc (the docstring : str)
- module (the module name : str)
- dict (the function __dict__ : str)
>>> def f(self, x=1, y=2, *args, **kw): pass
>>> info = getinfo(f)
>>> info["name"]
'f'
>>> info["argnames"]
['self', 'x', 'y', 'args', 'kw']
>>> info["defaults"]
(1, 2)
>>> info["signature"]
'self, x, y, *args, **kw'
"""
assert inspect.ismethod(func) or inspect.isfunction(func)
regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
argnames = list(regargs)
if varargs:
argnames.append(varargs)
if varkwargs:
argnames.append(varkwargs)
signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults,
formatvalue=lambda value: "")[1:-1]
return dict(name=func.__name__, argnames=argnames, signature=signature,
defaults = func.func_defaults, doc=func.__doc__,
module=func.__module__, dict=func.__dict__,
globals=func.func_globals, closure=func.func_closure)
# akin to functools.update_wrapper
def update_wrapper(wrapper, model, infodict=None):
infodict = infodict or getinfo(model)
try:
wrapper.__name__ = infodict['name']
except: # Python version < 2.4
pass
wrapper.__doc__ = infodict['doc']
wrapper.__module__ = infodict['module']
wrapper.__dict__.update(infodict['dict'])
wrapper.func_defaults = infodict['defaults']
wrapper.undecorated = model
return wrapper
def new_wrapper(wrapper, model):
"""
An improvement over functools.update_wrapper. The wrapper is a generic
callable object. It works by generating a copy of the wrapper with the
right signature and by updating the copy, not the original.
Moreovoer, 'model' can be a dictionary with keys 'name', 'doc', 'module',
'dict', 'defaults'.
"""
if isinstance(model, dict):
infodict = model
else: # assume model is a function
infodict = getinfo(model)
assert not '_wrapper_' in infodict["argnames"], (
'"_wrapper_" is a reserved argument name!')
src = "lambda %(signature)s: _wrapper_(%(signature)s)" % infodict
funcopy = eval(src, dict(_wrapper_=wrapper))
return update_wrapper(funcopy, model, infodict)
# helper used in decorator_factory
def __call__(self, func):
infodict = getinfo(func)
for name in ('_func_', '_self_'):
assert not name in infodict["argnames"], (
'%s is a reserved argument name!' % name)
src = "lambda %(signature)s: _self_.call(_func_, %(signature)s)"
new = eval(src % infodict, dict(_func_=func, _self_=self))
return update_wrapper(new, func, infodict)
def decorator_factory(cls):
"""
Take a class with a ``.caller`` method and return a callable decorator
object. It works by adding a suitable __call__ method to the class;
it raises a TypeError if the class already has a nontrivial __call__
method.
"""
attrs = set(dir(cls))
if '__call__' in attrs:
raise TypeError('You cannot decorate a class with a nontrivial '
'__call__ method')
if 'call' not in attrs:
raise TypeError('You cannot decorate a class without a '
'.call method')
cls.__call__ = __call__
return cls
def decorator(caller):
"""
General purpose decorator factory: takes a caller function as
input and returns a decorator with the same attributes.
A caller function is any function like this::
def caller(func, *args, **kw):
# do something
return func(*args, **kw)
Here is an example of usage:
>>> @decorator
... def chatty(f, *args, **kw):
... print "Calling %r" % f.__name__
... return f(*args, **kw)
>>> chatty.__name__
'chatty'
>>> @chatty
... def f(): pass
...
>>> f()
Calling 'f'
decorator can also take in input a class with a .caller method; in this
case it converts the class into a factory of callable decorator objects.
See the documentation for an example.
"""
if inspect.isclass(caller):
return decorator_factory(caller)
def _decorator(func): # the real meat is here
infodict = getinfo(func)
argnames = infodict['argnames']
assert not ('_call_' in argnames or '_func_' in argnames), (
'You cannot use _call_ or _func_ as argument names!')
src = "lambda %(signature)s: _call_(_func_, %(signature)s)" % infodict
# import sys; print >> sys.stderr, src # for debugging purposes
dec_func = eval(src, dict(_func_=func, _call_=caller))
return update_wrapper(dec_func, func, infodict)
return update_wrapper(_decorator, caller)
if __name__ == "__main__":
import doctest; doctest.testmod()
########################## LEGALESE ###############################
## Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## Redistributions in bytecode form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
## INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
## OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
## TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
## USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
## DAMAGE.
| Python |
from utils import rc
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
typemapper = { }
handler_tracker = [ ]
class HandlerMetaClass(type):
"""
Metaclass that keeps a registry of class -> handler
mappings.
"""
def __new__(cls, name, bases, attrs):
new_cls = type.__new__(cls, name, bases, attrs)
if hasattr(new_cls, 'model'):
typemapper[new_cls] = (new_cls.model, new_cls.is_anonymous)
else:
typemapper[new_cls] = (None, new_cls.is_anonymous)
if name not in ('BaseHandler', 'AnonymousBaseHandler'):
handler_tracker.append(new_cls)
return new_cls
class BaseHandler(object):
"""
Basehandler that gives you CRUD for free.
You are supposed to subclass this for specific
functionality.
All CRUD methods (`read`/`update`/`create`/`delete`)
receive a request as the first argument from the
resource. Use this for checking `request.user`, etc.
"""
__metaclass__ = HandlerMetaClass
allowed_methods = ('GET', 'POST', 'PUT', 'DELETE')
anonymous = is_anonymous = False
exclude = ( 'id', )
fields = ( )
def flatten_dict(self, dct):
return dict([ (str(k), dct.get(k)) for k in dct.keys() ])
def has_model(self):
return hasattr(self, 'model') or hasattr(self, 'queryset')
def queryset(self, request):
return self.model.objects.all()
def value_from_tuple(tu, name):
for int_, n in tu:
if n == name:
return int_
return None
def exists(self, **kwargs):
if not self.has_model():
raise NotImplementedError
try:
self.model.objects.get(**kwargs)
return True
except self.model.DoesNotExist:
return False
def read(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
pkfield = self.model._meta.pk.name
if pkfield in kwargs:
try:
return self.queryset(request).get(pk=kwargs.get(pkfield))
except ObjectDoesNotExist:
return rc.NOT_FOUND
except MultipleObjectsReturned: # should never happen, since we're using a PK
return rc.BAD_REQUEST
else:
return self.queryset(request).filter(*args, **kwargs)
def create(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
attrs = self.flatten_dict(request.POST)
try:
inst = self.queryset(request).get(**attrs)
return rc.DUPLICATE_ENTRY
except self.model.DoesNotExist:
inst = self.model(**attrs)
inst.save()
return inst
except self.model.MultipleObjectsReturned:
return rc.DUPLICATE_ENTRY
def update(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
pkfield = self.model._meta.pk.name
if pkfield not in kwargs:
# No pk was specified
return rc.BAD_REQUEST
try:
inst = self.queryset(request).get(pk=kwargs.get(pkfield))
except ObjectDoesNotExist:
return rc.NOT_FOUND
except MultipleObjectsReturned: # should never happen, since we're using a PK
return rc.BAD_REQUEST
attrs = self.flatten_dict(request.POST)
for k,v in attrs.iteritems():
setattr( inst, k, v )
inst.save()
return rc.ALL_OK
def delete(self, request, *args, **kwargs):
if not self.has_model():
raise NotImplementedError
try:
inst = self.queryset(request).get(*args, **kwargs)
inst.delete()
return rc.DELETED
except self.model.MultipleObjectsReturned:
return rc.DUPLICATE_ENTRY
except self.model.DoesNotExist:
return rc.NOT_HERE
class AnonymousBaseHandler(BaseHandler):
"""
Anonymous handler.
"""
is_anonymous = True
allowed_methods = ('GET',)
| Python |
import sys, inspect
from django.http import (HttpResponse, Http404, HttpResponseNotAllowed,
HttpResponseForbidden, HttpResponseServerError)
from django.views.debug import ExceptionReporter
from django.views.decorators.vary import vary_on_headers
from django.conf import settings
from django.core.mail import send_mail, EmailMessage
from django.db.models.query import QuerySet
from emitters import Emitter
from handler import typemapper
from doc import HandlerMethod
from authentication import NoAuthentication
from utils import coerce_put_post, FormValidationError, HttpStatusCode
from utils import rc, format_error, translate_mime, MimerDataException
class Resource(object):
"""
Resource. Create one for your URL mappings, just
like you would with Django. Takes one argument,
the handler. The second argument is optional, and
is an authentication handler. If not specified,
`NoAuthentication` will be used by default.
"""
callmap = { 'GET': 'read', 'POST': 'create',
'PUT': 'update', 'DELETE': 'delete' }
def __init__(self, handler, authentication=None):
if not callable(handler):
raise AttributeError, "Handler not callable."
self.handler = handler()
if not authentication:
self.authentication = NoAuthentication()
else:
self.authentication = authentication
# Erroring
self.email_errors = getattr(settings, 'PISTON_EMAIL_ERRORS', True)
self.display_errors = getattr(settings, 'PISTON_DISPLAY_ERRORS', True)
self.stream = getattr(settings, 'PISTON_STREAM_OUTPUT', False)
def determine_emitter(self, request, *args, **kwargs):
"""
Function for determening which emitter to use
for output. It lives here so you can easily subclass
`Resource` in order to change how emission is detected.
You could also check for the `Accept` HTTP header here,
since that pretty much makes sense. Refer to `Mimer` for
that as well.
"""
em = kwargs.pop('emitter_format', None)
if not em:
em = request.GET.get('format', 'json')
return em
@property
def anonymous(self):
"""
Gets the anonymous handler. Also tries to grab a class
if the `anonymous` value is a string, so that we can define
anonymous handlers that aren't defined yet (like, when
you're subclassing your basehandler into an anonymous one.)
"""
if hasattr(self.handler, 'anonymous'):
anon = self.handler.anonymous
if callable(anon):
return anon
for klass in typemapper.keys():
if anon == klass.__name__:
return klass
return None
@vary_on_headers('Authorization')
def __call__(self, request, *args, **kwargs):
"""
NB: Sends a `Vary` header so we don't cache requests
that are different (OAuth stuff in `Authorization` header.)
"""
rm = request.method.upper()
# Django's internal mechanism doesn't pick up
# PUT request, so we trick it a little here.
if rm == "PUT":
coerce_put_post(request)
if not self.authentication.is_authenticated(request):
if self.anonymous and \
rm in self.anonymous.allowed_methods:
handler = self.anonymous()
anonymous = True
else:
return self.authentication.challenge()
else:
handler = self.handler
anonymous = handler.is_anonymous
# Translate nested datastructs into `request.data` here.
if rm in ('POST', 'PUT'):
try:
translate_mime(request)
except MimerDataException:
return rc.BAD_REQUEST
if not rm in handler.allowed_methods:
return HttpResponseNotAllowed(handler.allowed_methods)
meth = getattr(handler, self.callmap.get(rm), None)
if not meth:
raise Http404
# Support emitter both through (?P<emitter_format>) and ?format=emitter.
em_format = self.determine_emitter(request, *args, **kwargs)
kwargs.pop('emitter_format', None)
# Clean up the request object a bit, since we might
# very well have `oauth_`-headers in there, and we
# don't want to pass these along to the handler.
request = self.cleanup_request(request)
try:
result = meth(request, *args, **kwargs)
except FormValidationError, e:
# TODO: Use rc.BAD_REQUEST here
return HttpResponse("Bad Request: %s" % e.form.errors, status=400)
except TypeError, e:
result = rc.BAD_REQUEST
hm = HandlerMethod(meth)
sig = hm.get_signature()
msg = 'Method signature does not match.\n\n'
if sig:
msg += 'Signature should be: %s' % sig
else:
msg += 'Resource does not expect any parameters.'
if self.display_errors:
msg += '\n\nException was: %s' % str(e)
result.content = format_error(msg)
except HttpStatusCode, e:
#result = e ## why is this being passed on and not just dealt with now?
return e.response
except Exception, e:
"""
On errors (like code errors), we'd like to be able to
give crash reports to both admins and also the calling
user. There's two setting parameters for this:
Parameters::
- `PISTON_EMAIL_ERRORS`: Will send a Django formatted
error email to people in `settings.ADMINS`.
- `PISTON_DISPLAY_ERRORS`: Will return a simple traceback
to the caller, so he can tell you what error they got.
If `PISTON_DISPLAY_ERRORS` is not enabled, the caller will
receive a basic "500 Internal Server Error" message.
"""
exc_type, exc_value, tb = sys.exc_info()
rep = ExceptionReporter(request, exc_type, exc_value, tb.tb_next)
if self.email_errors:
self.email_exception(rep)
if self.display_errors:
return HttpResponseServerError(
format_error('\n'.join(rep.format_exception())))
else:
raise
emitter, ct = Emitter.get(em_format)
fields = handler.fields
if hasattr(handler, 'list_fields') and (
isinstance(result, list) or isinstance(result, QuerySet)):
fields = handler.list_fields
srl = emitter(result, typemapper, handler, fields, anonymous)
try:
"""
Decide whether or not we want a generator here,
or we just want to buffer up the entire result
before sending it to the client. Won't matter for
smaller datasets, but larger will have an impact.
"""
if self.stream: stream = srl.stream_render(request)
else: stream = srl.render(request)
resp = HttpResponse(stream, mimetype=ct)
resp.streaming = self.stream
return resp
except HttpStatusCode, e:
return e.response
@staticmethod
def cleanup_request(request):
"""
Removes `oauth_` keys from various dicts on the
request object, and returns the sanitized version.
"""
for method_type in ('GET', 'PUT', 'POST', 'DELETE'):
block = getattr(request, method_type, { })
if True in [ k.startswith("oauth_") for k in block.keys() ]:
sanitized = block.copy()
for k in sanitized.keys():
if k.startswith("oauth_"):
sanitized.pop(k)
setattr(request, method_type, sanitized)
return request
# --
def email_exception(self, reporter):
subject = "Piston crash report"
html = reporter.get_traceback_html()
message = EmailMessage(settings.EMAIL_SUBJECT_PREFIX+subject,
html, settings.SERVER_EMAIL,
[ admin[1] for admin in settings.ADMINS ])
message.content_subtype = 'html'
message.send(fail_silently=True)
| Python |
from __future__ import generators
import decimal, re, inspect
import copy
try:
# yaml isn't standard with python. It shouldn't be required if it
# isn't used.
import yaml
except ImportError:
yaml = None
# Fallback since `any` isn't in Python <2.5
try:
any
except NameError:
def any(iterable):
for element in iterable:
if element:
return True
return False
from django.db.models.query import QuerySet
from django.db.models import Model, permalink
from django.utils import simplejson
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.encoding import smart_unicode
from django.core.urlresolvers import reverse, NoReverseMatch
from django.core.serializers.json import DateTimeAwareJSONEncoder
from django.http import HttpResponse
from django.core import serializers
from utils import HttpStatusCode, Mimer
try:
import cStringIO as StringIO
except ImportError:
import StringIO
try:
import cPickle as pickle
except ImportError:
import pickle
# Allow people to change the reverser (default `permalink`).
reverser = permalink
class Emitter(object):
"""
Super emitter. All other emitters should subclass
this one. It has the `construct` method which
conveniently returns a serialized `dict`. This is
usually the only method you want to use in your
emitter. See below for examples.
"""
EMITTERS = { }
def __init__(self, payload, typemapper, handler, fields=(), anonymous=True):
self.typemapper = typemapper
self.data = payload
self.handler = handler
self.fields = fields
self.anonymous = anonymous
if isinstance(self.data, Exception):
raise
def method_fields(self, data, fields):
if not data:
return { }
has = dir(data)
ret = dict()
for field in fields:
if field in has and callable(field):
ret[field] = getattr(data, field)
return ret
def construct(self):
"""
Recursively serialize a lot of types, and
in cases where it doesn't recognize the type,
it will fall back to Django's `smart_unicode`.
Returns `dict`.
"""
def _any(thing, fields=()):
"""
Dispatch, all types are routed through here.
"""
ret = None
if isinstance(thing, QuerySet):
ret = _qs(thing, fields=fields)
elif isinstance(thing, (tuple, list)):
ret = _list(thing)
elif isinstance(thing, dict):
ret = _dict(thing)
elif isinstance(thing, decimal.Decimal):
ret = str(thing)
elif isinstance(thing, Model):
ret = _model(thing, fields=fields)
elif isinstance(thing, HttpResponse):
raise HttpStatusCode(thing)
elif inspect.isfunction(thing):
if not inspect.getargspec(thing)[0]:
ret = _any(thing())
elif hasattr(thing, '__emittable__'):
f = thing.__emittable__
if inspect.ismethod(f) and len(inspect.getargspec(f)[0]) == 1:
ret = _any(f())
else:
ret = smart_unicode(thing, strings_only=True)
return ret
def _fk(data, field):
"""
Foreign keys.
"""
return _any(getattr(data, field.name))
def _related(data, fields=()):
"""
Foreign keys.
"""
return [ _model(m, fields) for m in data.iterator() ]
def _m2m(data, field, fields=()):
"""
Many to many (re-route to `_model`.)
"""
return [ _model(m, fields) for m in getattr(data, field.name).iterator() ]
def _model(data, fields=()):
"""
Models. Will respect the `fields` and/or
`exclude` on the handler (see `typemapper`.)
"""
ret = { }
handler = self.in_typemapper(type(data), self.anonymous)
get_absolute_uri = False
if handler or fields:
v = lambda f: getattr(data, f.attname)
if not fields:
"""
Fields was not specified, try to find teh correct
version in the typemapper we were sent.
"""
mapped = self.in_typemapper(type(data), self.anonymous)
get_fields = set(mapped.fields)
exclude_fields = set(mapped.exclude).difference(get_fields)
if 'absolute_uri' in get_fields:
get_absolute_uri = True
if not get_fields:
get_fields = set([ f.attname.replace("_id", "", 1)
for f in data._meta.fields ])
# sets can be negated.
for exclude in exclude_fields:
if isinstance(exclude, basestring):
get_fields.discard(exclude)
elif isinstance(exclude, re._pattern_type):
for field in get_fields.copy():
if exclude.match(field):
get_fields.discard(field)
else:
get_fields = set(fields)
met_fields = self.method_fields(handler, get_fields)
for f in data._meta.local_fields:
if f.serialize and not any([ p in met_fields for p in [ f.attname, f.name ]]):
if not f.rel:
if f.attname in get_fields:
ret[f.attname] = _any(v(f))
get_fields.remove(f.attname)
else:
if f.attname[:-3] in get_fields:
ret[f.name] = _fk(data, f)
get_fields.remove(f.name)
for mf in data._meta.many_to_many:
if mf.serialize and mf.attname not in met_fields:
if mf.attname in get_fields:
ret[mf.name] = _m2m(data, mf)
get_fields.remove(mf.name)
# try to get the remainder of fields
for maybe_field in get_fields:
if isinstance(maybe_field, (list, tuple)):
model, fields = maybe_field
inst = getattr(data, model, None)
if inst:
if hasattr(inst, 'all'):
ret[model] = _related(inst, fields)
elif callable(inst):
if len(inspect.getargspec(inst)[0]) == 1:
ret[model] = _any(inst(), fields)
else:
ret[model] = _model(inst, fields)
elif maybe_field in met_fields:
# Overriding normal field which has a "resource method"
# so you can alter the contents of certain fields without
# using different names.
ret[maybe_field] = _any(met_fields[maybe_field](data))
else:
maybe = getattr(data, maybe_field, None)
if maybe:
if callable(maybe):
if len(inspect.getargspec(maybe)[0]) == 1:
ret[maybe_field] = _any(maybe())
else:
ret[maybe_field] = _any(maybe)
else:
handler_f = getattr(handler or self.handler, maybe_field, None)
if handler_f:
ret[maybe_field] = _any(handler_f(data))
else:
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields]
for k in add_ons:
ret[k] = _any(getattr(data, k))
# resouce uri
if self.in_typemapper(type(data), self.anonymous):
handler = self.in_typemapper(type(data), self.anonymous)
if hasattr(handler, 'resource_uri'):
url_id, fields = handler.resource_uri(data)
try:
ret['resource_uri'] = reverser( lambda: (url_id, fields) )()
except NoReverseMatch, e:
pass
if hasattr(data, 'get_api_url') and 'resource_uri' not in ret:
try: ret['resource_uri'] = data.get_api_url()
except: pass
# absolute uri
if hasattr(data, 'get_absolute_url') and get_absolute_uri:
try: ret['absolute_uri'] = data.get_absolute_url()
except: pass
return ret
def _qs(data, fields=()):
"""
Querysets.
"""
return [ _any(v, fields) for v in data ]
def _list(data):
"""
Lists.
"""
return [ _any(v) for v in data ]
def _dict(data):
"""
Dictionaries.
"""
return dict([ (k, _any(v)) for k, v in data.iteritems() ])
# Kickstart the seralizin'.
return _any(self.data, self.fields)
def in_typemapper(self, model, anonymous):
for klass, (km, is_anon) in self.typemapper.iteritems():
if model is km and is_anon is anonymous:
return klass
def render(self):
"""
This super emitter does not implement `render`,
this is a job for the specific emitter below.
"""
raise NotImplementedError("Please implement render.")
def stream_render(self, request, stream=True):
"""
Tells our patched middleware not to look
at the contents, and returns a generator
rather than the buffered string. Should be
more memory friendly for large datasets.
"""
yield self.render(request)
@classmethod
def get(cls, format):
"""
Gets an emitter, returns the class and a content-type.
"""
if cls.EMITTERS.has_key(format):
return cls.EMITTERS.get(format)
raise ValueError("No emitters found for type %s" % format)
@classmethod
def register(cls, name, klass, content_type='text/plain'):
"""
Register an emitter.
Parameters::
- `name`: The name of the emitter ('json', 'xml', 'yaml', ...)
- `klass`: The emitter class.
- `content_type`: The content type to serve response as.
"""
cls.EMITTERS[name] = (klass, content_type)
@classmethod
def unregister(cls, name):
"""
Remove an emitter from the registry. Useful if you don't
want to provide output in one of the built-in emitters.
"""
return cls.EMITTERS.pop(name, None)
class XMLEmitter(Emitter):
def _to_xml(self, xml, data):
if isinstance(data, (list, tuple)):
for item in data:
xml.startElement("resource", {})
self._to_xml(xml, item)
xml.endElement("resource")
elif isinstance(data, dict):
for key, value in data.iteritems():
xml.startElement(key, {})
self._to_xml(xml, value)
xml.endElement(key)
else:
xml.characters(smart_unicode(data))
def render(self, request):
stream = StringIO.StringIO()
xml = SimplerXMLGenerator(stream, "utf-8")
xml.startDocument()
xml.startElement("response", {})
self._to_xml(xml, self.construct())
xml.endElement("response")
xml.endDocument()
return stream.getvalue()
Emitter.register('xml', XMLEmitter, 'text/xml; charset=utf-8')
Mimer.register(lambda *a: None, ('text/xml',))
class JSONEmitter(Emitter):
"""
JSON emitter, understands timestamps.
"""
def render(self, request):
cb = request.GET.get('callback')
seria = simplejson.dumps(self.construct(), cls=DateTimeAwareJSONEncoder, ensure_ascii=False, indent=4)
# Callback
if cb:
return '%s(%s)' % (cb, seria)
return seria
Emitter.register('json', JSONEmitter, 'application/json; charset=utf-8')
Mimer.register(simplejson.loads, ('application/json',))
class YAMLEmitter(Emitter):
"""
YAML emitter, uses `safe_dump` to omit the
specific types when outputting to non-Python.
"""
def render(self, request):
return yaml.safe_dump(self.construct())
if yaml: # Only register yaml if it was import successfully.
Emitter.register('yaml', YAMLEmitter, 'application/x-yaml; charset=utf-8')
Mimer.register(yaml.load, ('application/x-yaml',))
class PickleEmitter(Emitter):
"""
Emitter that returns Python pickled.
"""
def render(self, request):
return pickle.dumps(self.construct())
Emitter.register('pickle', PickleEmitter, 'application/python-pickle')
Mimer.register(pickle.loads, ('application/python-pickle',))
class DjangoEmitter(Emitter):
"""
Emitter for the Django serialized format.
"""
def render(self, request, format='xml'):
if isinstance(self.data, HttpResponse):
return self.data
elif isinstance(self.data, (int, str)):
response = self.data
else:
response = serializers.serialize(format, self.data, indent=True)
return response
Emitter.register('django', DjangoEmitter, 'text/xml; charset=utf-8')
| Python |
import urllib
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from django.conf import settings
from django.core.mail import send_mail, mail_admins
from django.template import loader
from managers import TokenManager, ConsumerManager, ResourceManager, KEY_SIZE, SECRET_SIZE
CONSUMER_STATES = (
('pending', 'Pending approval'),
('accepted', 'Accepted'),
('canceled', 'Canceled'),
)
class Nonce(models.Model):
token_key = models.CharField(max_length=KEY_SIZE)
consumer_key = models.CharField(max_length=KEY_SIZE)
key = models.CharField(max_length=255)
def __unicode__(self):
return u"Nonce %s for %s" % (self.key, self.consumer_key)
admin.site.register(Nonce)
class Resource(models.Model):
name = models.CharField(max_length=255)
url = models.TextField(max_length=2047)
is_readonly = models.BooleanField(default=True)
objects = ResourceManager()
def __unicode__(self):
return u"Resource %s with url %s" % (self.name, self.url)
admin.site.register(Resource)
class Consumer(models.Model):
name = models.CharField(max_length=255)
description = models.TextField()
key = models.CharField(max_length=KEY_SIZE)
secret = models.CharField(max_length=SECRET_SIZE)
status = models.CharField(max_length=16, choices=CONSUMER_STATES, default='pending')
user = models.ForeignKey(User, null=True, blank=True, related_name='consumers')
objects = ConsumerManager()
def __unicode__(self):
return u"Consumer %s with key %s" % (self.name, self.key)
def save(self, **kwargs):
super(Consumer, self).save(**kwargs)
if self.id and self.user:
subject = "API Consumer"
rcpt = [ self.user.email, ]
if self.status == "accepted":
template = "api/mails/consumer_accepted.txt"
subject += " was accepted!"
elif self.status == "canceled":
template = "api/mails/consumer_canceled.txt"
subject += " has been canceled"
else:
template = "api/mails/consumer_pending.txt"
subject += " application received"
for admin in settings.ADMINS:
bcc.append(admin[1])
body = loader.render_to_string(template,
{ 'consumer': self, 'user': self.user })
send_mail(subject, body, settings.DEFAULT_FROM_EMAIL,
rcpt, fail_silently=True)
if self.status == 'pending':
mail_admins(subject, body, fail_silently=True)
if settings.DEBUG:
print "Mail being sent, to=%s" % rcpt
print "Subject: %s" % subject
print body
admin.site.register(Consumer)
class Token(models.Model):
REQUEST = 1
ACCESS = 2
TOKEN_TYPES = ((REQUEST, u'Request'), (ACCESS, u'Access'))
key = models.CharField(max_length=KEY_SIZE)
secret = models.CharField(max_length=SECRET_SIZE)
token_type = models.IntegerField(choices=TOKEN_TYPES)
timestamp = models.IntegerField()
is_approved = models.BooleanField(default=False)
user = models.ForeignKey(User, null=True, blank=True, related_name='tokens')
consumer = models.ForeignKey(Consumer)
objects = TokenManager()
def __unicode__(self):
return u"%s Token %s for %s" % (self.get_token_type_display(), self.key, self.consumer)
def to_string(self, only_key=False):
token_dict = {
'oauth_token': self.key,
'oauth_token_secret': self.secret
}
if only_key:
del token_dict['oauth_token_secret']
return urllib.urlencode(token_dict)
admin.site.register(Token)
| Python |
import cgi
import urllib
import time
import random
import urlparse
import hmac
import base64
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
# Generic exception class
class OAuthError(RuntimeError):
def get_message(self):
return self._message
def set_message(self, message):
self._message = message
message = property(get_message, set_message)
def __init__(self, message='OAuth error occured.'):
self.message = message
# optional WWW-Authenticate header (401 error)
def build_authenticate_header(realm=''):
return { 'WWW-Authenticate': 'OAuth realm="%s"' % realm }
# url escape
def escape(s):
# escape '/' too
return urllib.quote(s, safe='~')
# util function: current timestamp
# seconds since epoch (UTC)
def generate_timestamp():
return int(time.time())
# util function: nonce
# pseudorandom number
def generate_nonce(length=8):
return ''.join(str(random.randint(0, 9)) for i in range(length))
# OAuthConsumer is a data type that represents the identity of the Consumer
# via its shared secret with the Service Provider.
class OAuthConsumer(object):
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
# OAuthToken is a data type that represents an End User via either an access
# or request token.
class OAuthToken(object):
# access tokens and request tokens
key = None
secret = None
'''
key = the token
secret = the token secret
'''
def __init__(self, key, secret):
self.key = key
self.secret = secret
def to_string(self):
return urllib.urlencode({'oauth_token': self.key, 'oauth_token_secret': self.secret})
# return a token from something like:
# oauth_token_secret=digg&oauth_token=digg
@staticmethod
def from_string(s):
params = cgi.parse_qs(s, keep_blank_values=False)
key = params['oauth_token'][0]
secret = params['oauth_token_secret'][0]
return OAuthToken(key, secret)
def __str__(self):
return self.to_string()
# OAuthRequest represents the request and can be serialized
class OAuthRequest(object):
'''
OAuth parameters:
- oauth_consumer_key
- oauth_token
- oauth_signature_method
- oauth_signature
- oauth_timestamp
- oauth_nonce
- oauth_version
... any additional parameters, as defined by the Service Provider.
'''
parameters = None # oauth parameters
http_method = HTTP_METHOD
http_url = None
version = VERSION
def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None):
self.http_method = http_method
self.http_url = http_url
self.parameters = parameters or {}
def set_parameter(self, parameter, value):
self.parameters[parameter] = value
def get_parameter(self, parameter):
try:
return self.parameters[parameter]
except:
raise OAuthError('Parameter not found: %s' % parameter)
def _get_timestamp_nonce(self):
return self.get_parameter('oauth_timestamp'), self.get_parameter('oauth_nonce')
# get any non-oauth parameters
def get_nonoauth_parameters(self):
parameters = {}
for k, v in self.parameters.iteritems():
# ignore oauth parameters
if k.find('oauth_') < 0:
parameters[k] = v
return parameters
# serialize as a header for an HTTPAuth request
def to_header(self, realm=''):
auth_header = 'OAuth realm="%s"' % realm
# add the oauth parameters
if self.parameters:
for k, v in self.parameters.iteritems():
auth_header += ', %s="%s"' % (k, escape(str(v)))
return {'Authorization': auth_header}
# serialize as post data for a POST request
def to_postdata(self):
return '&'.join('%s=%s' % (escape(str(k)), escape(str(v))) for k, v in self.parameters.iteritems())
# serialize as a url for a GET request
def to_url(self):
return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata())
# return a string that consists of all the parameters that need to be signed
def get_normalized_parameters(self):
params = self.parameters
try:
# exclude the signature if it exists
del params['oauth_signature']
except:
pass
key_values = params.items()
# sort lexicographically, first after key, then after value
key_values.sort()
# combine key value pairs in string and escape
return '&'.join('%s=%s' % (escape(str(k)), escape(str(v))) for k, v in key_values)
# just uppercases the http method
def get_normalized_http_method(self):
return self.http_method.upper()
# parses the url and rebuilds it to be scheme://host/path
def get_normalized_http_url(self):
parts = urlparse.urlparse(self.http_url)
url_string = '%s://%s%s' % (parts[0], parts[1], parts[2]) # scheme, netloc, path
return url_string
# set the signature parameter to the result of build_signature
def sign_request(self, signature_method, consumer, token):
# set the signature method
self.set_parameter('oauth_signature_method', signature_method.get_name())
# set the signature
self.set_parameter('oauth_signature', self.build_signature(signature_method, consumer, token))
def build_signature(self, signature_method, consumer, token):
# call the build signature method within the signature method
return signature_method.build_signature(self, consumer, token)
@staticmethod
def from_request(http_method, http_url, headers=None, parameters=None, query_string=None):
# combine multiple parameter sources
if parameters is None:
parameters = {}
# headers
if headers and 'HTTP_AUTHORIZATION' in headers:
auth_header = headers['HTTP_AUTHORIZATION']
# check that the authorization header is OAuth
if auth_header.index('OAuth') > -1:
try:
# get the parameters from the header
header_params = OAuthRequest._split_header(auth_header)
parameters.update(header_params)
except:
raise OAuthError('Unable to parse OAuth parameters from Authorization header.')
# GET or POST query string
if query_string:
query_params = OAuthRequest._split_url_string(query_string)
parameters.update(query_params)
# URL parameters
param_str = urlparse.urlparse(http_url)[4] # query
url_params = OAuthRequest._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return OAuthRequest(http_method, http_url, parameters)
return None
@staticmethod
def from_consumer_and_token(oauth_consumer, token=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': oauth_consumer.key,
'oauth_timestamp': generate_timestamp(),
'oauth_nonce': generate_nonce(),
'oauth_version': OAuthRequest.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
return OAuthRequest(http_method, http_url, parameters)
@staticmethod
def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = escape(callback)
return OAuthRequest(http_method, http_url, parameters)
# util function: turn Authorization: header into parameters, has to do some unescaping
@staticmethod
def _split_header(header):
params = {}
header = header.replace('OAuth ', '', 1)
parts = header.split(',')
for param in parts:
# ignore realm parameter
if param.find('realm') > -1:
continue
# remove whitespace
param = param.strip()
# split key-value
param_parts = param.split('=', 1)
# remove quotes and unescape the value
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
# util function: turn url string into parameters, has to do some unescaping
@staticmethod
def _split_url_string(param_str):
parameters = cgi.parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
# OAuthServer is a worker to check a requests validity against a data store
class OAuthServer(object):
timestamp_threshold = 300 # in seconds, five minutes
version = VERSION
signature_methods = None
data_store = None
def __init__(self, data_store=None, signature_methods=None):
self.data_store = data_store
self.signature_methods = signature_methods or {}
def set_data_store(self, oauth_data_store):
self.data_store = data_store
def get_data_store(self):
return self.data_store
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.get_name()] = signature_method
return self.signature_methods
# process a request_token request
# returns the request token on success
def fetch_request_token(self, oauth_request):
try:
# get the request token for authorization
token = self._get_token(oauth_request, 'request')
except OAuthError:
# no token required for the initial token request
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
self._check_signature(oauth_request, consumer, None)
# fetch a new token
token = self.data_store.fetch_request_token(consumer)
return token
# process an access_token request
# returns the access token on success
def fetch_access_token(self, oauth_request):
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# get the request token
token = self._get_token(oauth_request, 'request')
self._check_signature(oauth_request, consumer, token)
new_token = self.data_store.fetch_access_token(consumer, token)
return new_token
# verify an api call, checks all the parameters
def verify_request(self, oauth_request):
# -> consumer and token
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# get the access token
token = self._get_token(oauth_request, 'access')
self._check_signature(oauth_request, consumer, token)
parameters = oauth_request.get_nonoauth_parameters()
return consumer, token, parameters
# authorize a request token
def authorize_token(self, token, user):
return self.data_store.authorize_request_token(token, user)
# get the callback url
def get_callback(self, oauth_request):
return oauth_request.get_parameter('oauth_callback')
# optional support for the authenticate header
def build_authenticate_header(self, realm=''):
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
# verify the correct version request for this server
def _get_version(self, oauth_request):
try:
version = oauth_request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise OAuthError('OAuth version %s not supported.' % str(version))
return version
# figure out the signature with some defaults
def _get_signature_method(self, oauth_request):
try:
signature_method = oauth_request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# get the signature method object
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise OAuthError('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_consumer(self, oauth_request):
consumer_key = oauth_request.get_parameter('oauth_consumer_key')
if not consumer_key:
raise OAuthError('Invalid consumer key.')
consumer = self.data_store.lookup_consumer(consumer_key)
if not consumer:
raise OAuthError('Invalid consumer.')
return consumer
# try to find the token for the provided request token key
def _get_token(self, oauth_request, token_type='access'):
token_field = oauth_request.get_parameter('oauth_token')
token = self.data_store.lookup_token(token_type, token_field)
if not token:
raise OAuthError('Invalid %s token: %s' % (token_type, token_field))
return token
def _check_signature(self, oauth_request, consumer, token):
timestamp, nonce = oauth_request._get_timestamp_nonce()
self._check_timestamp(timestamp)
self._check_nonce(consumer, token, nonce)
signature_method = self._get_signature_method(oauth_request)
try:
signature = oauth_request.get_parameter('oauth_signature')
except:
raise OAuthError('Missing signature.')
# validate the signature
valid_sig = signature_method.check_signature(oauth_request, consumer, token, signature)
if not valid_sig:
key, base = signature_method.build_signature_base_string(oauth_request, consumer, token)
raise OAuthError('Invalid signature. Expected signature base string: %s' % base)
built = signature_method.build_signature(oauth_request, consumer, token)
def _check_timestamp(self, timestamp):
# verify that timestamp is recentish
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise OAuthError('Expired timestamp: given %d and now %s has a greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold))
def _check_nonce(self, consumer, token, nonce):
# verify that the nonce is uniqueish
nonce = self.data_store.lookup_nonce(consumer, token, nonce)
if nonce:
raise OAuthError('Nonce already used: %s' % str(nonce))
# OAuthClient is a worker to attempt to execute a request
class OAuthClient(object):
consumer = None
token = None
def __init__(self, oauth_consumer, oauth_token):
self.consumer = oauth_consumer
self.token = oauth_token
def get_consumer(self):
return self.consumer
def get_token(self):
return self.token
def fetch_request_token(self, oauth_request):
# -> OAuthToken
raise NotImplementedError
def fetch_access_token(self, oauth_request):
# -> OAuthToken
raise NotImplementedError
def access_resource(self, oauth_request):
# -> some protected resource
raise NotImplementedError
# OAuthDataStore is a database abstraction used to lookup consumers and tokens
class OAuthDataStore(object):
def lookup_consumer(self, key):
# -> OAuthConsumer
raise NotImplementedError
def lookup_token(self, oauth_consumer, token_type, token_token):
# -> OAuthToken
raise NotImplementedError
def lookup_nonce(self, oauth_consumer, oauth_token, nonce, timestamp):
# -> OAuthToken
raise NotImplementedError
def fetch_request_token(self, oauth_consumer):
# -> OAuthToken
raise NotImplementedError
def fetch_access_token(self, oauth_consumer, oauth_token):
# -> OAuthToken
raise NotImplementedError
def authorize_request_token(self, oauth_token, user):
# -> OAuthToken
raise NotImplementedError
# OAuthSignatureMethod is a strategy class that implements a signature method
class OAuthSignatureMethod(object):
def get_name(self):
# -> str
raise NotImplementedError
def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token):
# -> str key, str raw
raise NotImplementedError
def build_signature(self, oauth_request, oauth_consumer, oauth_token):
# -> str
raise NotImplementedError
def check_signature(self, oauth_request, consumer, token, signature):
built = self.build_signature(oauth_request, consumer, token)
return built == signature
class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod):
def get_name(self):
return 'HMAC-SHA1'
def build_signature_base_string(self, oauth_request, consumer, token):
sig = (
escape(oauth_request.get_normalized_http_method()),
escape(oauth_request.get_normalized_http_url()),
escape(oauth_request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def build_signature(self, oauth_request, consumer, token):
# build the base signature string
key, raw = self.build_signature_base_string(oauth_request, consumer, token)
# hmac object
try:
import hashlib # 2.5
hashed = hmac.new(key, raw, hashlib.sha1)
except:
import sha # deprecated
hashed = hmac.new(key, raw, sha)
# calculate the digest base 64
return base64.b64encode(hashed.digest())
class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod):
def get_name(self):
return 'PLAINTEXT'
def build_signature_base_string(self, oauth_request, consumer, token):
# concatenate the consumer key and secret
sig = escape(consumer.secret) + '&'
if token:
sig = sig + escape(token.secret)
return sig
def build_signature(self, oauth_request, consumer, token):
return self.build_signature_base_string(oauth_request, consumer, token)
| Python |
from django.db import models
from django.contrib.auth.models import User
KEY_SIZE = 18
SECRET_SIZE = 32
class KeyManager(models.Manager):
'''Add support for random key/secret generation
'''
def generate_random_codes(self):
key = User.objects.make_random_password(length=KEY_SIZE)
secret = User.objects.make_random_password(length=SECRET_SIZE)
while self.filter(key__exact=key, secret__exact=secret).count():
secret = User.objects.make_random_password(length=SECRET_SIZE)
return key, secret
class ConsumerManager(KeyManager):
def create_consumer(self, name, description=None, user=None):
"""
Shortcut to create a consumer with random key/secret.
"""
consumer, created = self.get_or_create(name=name)
if user:
consumer.user = user
if description:
consumer.description = description
if created:
consumer.key, consumer.secret = self.generate_random_codes()
consumer.save()
return consumer
_default_consumer = None
class ResourceManager(models.Manager):
_default_resource = None
def get_default_resource(self, name):
"""
Add cache if you use a default resource.
"""
if not self._default_resource:
self._default_resource = self.get(name=name)
return self._default_resource
class TokenManager(KeyManager):
def create_token(self, consumer, token_type, timestamp, user=None):
"""
Shortcut to create a token with random key/secret.
"""
token, created = self.get_or_create(consumer=consumer,
token_type=token_type,
timestamp=timestamp,
user=user)
if created:
token.key, token.secret = self.generate_random_codes()
token.save()
return token
| Python |
import binascii
import oauth
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.template import loader
from django.contrib.auth import authenticate
from django.conf import settings
from django.core.urlresolvers import get_callable
from django.core.exceptions import ImproperlyConfigured
from django.shortcuts import render_to_response
from django.template import RequestContext
from piston import forms
class NoAuthentication(object):
"""
Authentication handler that always returns
True, so no authentication is needed, nor
initiated (`challenge` is missing.)
"""
def is_authenticated(self, request):
return True
class HttpBasicAuthentication(object):
"""
Basic HTTP authenticater. Synopsis:
Authentication handlers must implement two methods:
- `is_authenticated`: Will be called when checking for
authentication. Receives a `request` object, please
set your `User` object on `request.user`, otherwise
return False (or something that evaluates to False.)
- `challenge`: In cases where `is_authenticated` returns
False, the result of this method will be returned.
This will usually be a `HttpResponse` object with
some kind of challenge headers and 401 code on it.
"""
def __init__(self, auth_func=authenticate, realm='API'):
self.auth_func = auth_func
self.realm = realm
def is_authenticated(self, request):
auth_string = request.META.get('HTTP_AUTHORIZATION', None)
if not auth_string:
return False
try:
(authmeth, auth) = auth_string.split(" ", 1)
if not authmeth.lower() == 'basic':
return False
auth = auth.strip().decode('base64')
(username, password) = auth.split(':', 1)
except (ValueError, binascii.Error):
return False
request.user = self.auth_func(username=username, password=password) \
or AnonymousUser()
return not request.user in (False, None, AnonymousUser())
def challenge(self):
resp = HttpResponse("Authorization Required")
resp['WWW-Authenticate'] = 'Basic realm="%s"' % self.realm
resp.status_code = 401
return resp
def load_data_store():
'''Load data store for OAuth Consumers, Tokens, Nonces and Resources
'''
path = getattr(settings, 'OAUTH_DATA_STORE', 'piston.store.DataStore')
# stolen from django.contrib.auth.load_backend
i = path.rfind('.')
module, attr = path[:i], path[i+1:]
try:
mod = __import__(module, {}, {}, attr)
except ImportError, e:
raise ImproperlyConfigured, 'Error importing OAuth data store %s: "%s"' % (module, e)
try:
cls = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured, 'Module %s does not define a "%s" OAuth data store' % (module, attr)
return cls
# Set the datastore here.
oauth_datastore = load_data_store()
def initialize_server_request(request):
"""
Shortcut for initialization.
"""
oauth_request = oauth.OAuthRequest.from_request(
request.method, request.build_absolute_uri(),
headers=request.META, parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(oauth_datastore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""
Shortcut for sending an error.
"""
response = HttpResponse(err.message.encode('utf-8'))
response.status_code = 401
realm = 'OAuth'
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
def oauth_request_token(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_server is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_request_token(oauth_request)
response = HttpResponse(token.to_string())
except oauth.OAuthError, err:
response = send_oauth_error(err)
return response
def oauth_auth_view(request, token, callback, params):
form = forms.OAuthAuthenticationForm(initial={
'oauth_token': token.key,
'oauth_callback': callback,
})
return render_to_response('piston/authorize_token.html',
{ 'form': form }, RequestContext(request))
@login_required
def oauth_user_auth(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_request_token(oauth_request)
except oauth.OAuthError, err:
return send_oauth_error(err)
try:
callback = oauth_server.get_callback(oauth_request)
except:
callback = None
if request.method == "GET":
params = oauth_request.get_normalized_parameters()
oauth_view = getattr(settings, 'OAUTH_AUTH_VIEW', None)
if oauth_view is None:
return oauth_auth_view(request, token, callback, params)
else:
return get_callable(oauth_view)(request, token, callback, params)
elif request.method == "POST":
try:
form = forms.OAuthAuthenticationForm(request.POST)
if form.is_valid():
token = oauth_server.authorize_token(token, request.user)
args = '?'+token.to_string(only_key=True)
else:
args = '?error=%s' % 'Access not granted by user.'
if not callback:
callback = getattr(settings, 'OAUTH_CALLBACK_VIEW')
return get_callable(callback)(request, token)
response = HttpResponseRedirect(callback+args)
except oauth.OAuthError, err:
response = send_oauth_error(err)
else:
response = HttpResponse('Action not allowed.')
return response
def oauth_access_token(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_access_token(oauth_request)
return HttpResponse(token.to_string())
except oauth.OAuthError, err:
return send_oauth_error(err)
INVALID_PARAMS_RESPONSE = send_oauth_error(oauth.OAuthError('Invalid request parameters.'))
class OAuthAuthentication(object):
"""
OAuth authentication. Based on work by Leah Culver.
"""
def __init__(self, realm='API'):
self.realm = realm
self.builder = oauth.build_authenticate_header
def is_authenticated(self, request):
"""
Checks whether a means of specifying authentication
is provided, and if so, if it is a valid token.
Read the documentation on `HttpBasicAuthentication`
for more information about what goes on here.
"""
if self.is_valid_request(request):
try:
consumer, token, parameters = self.validate_token(request)
except oauth.OAuthError, err:
print send_oauth_error(err)
return False
if consumer and token:
request.user = token.user
request.throttle_extra = token.consumer.id
return True
return False
def challenge(self):
"""
Returns a 401 response with a small bit on
what OAuth is, and where to learn more about it.
When this was written, browsers did not understand
OAuth authentication on the browser side, and hence
the helpful template we render. Maybe some day in the
future, browsers will take care of this stuff for us
and understand the 401 with the realm we give it.
"""
response = HttpResponse()
response.status_code = 401
realm = 'API'
for k, v in self.builder(realm=realm).iteritems():
response[k] = v
tmpl = loader.render_to_string('oauth/challenge.html',
{ 'MEDIA_URL': settings.MEDIA_URL })
response.content = tmpl
return response
@staticmethod
def is_valid_request(request):
"""
Checks whether the required parameters are either in
the http-authorization header sent by some clients,
which is by the way the preferred method according to
OAuth spec, but otherwise fall back to `GET` and `POST`.
"""
must_have = [ 'oauth_'+s for s in [
'consumer_key', 'token', 'signature',
'signature_method', 'timestamp', 'nonce' ] ]
is_in = lambda l: all([ (p in l) for p in must_have ])
auth_params = request.META.get("HTTP_AUTHORIZATION", "")
req_params = request.REQUEST
return is_in(auth_params) or is_in(req_params)
@staticmethod
def validate_token(request, check_timestamp=True, check_nonce=True):
oauth_server, oauth_request = initialize_server_request(request)
return oauth_server.verify_request(oauth_request)
| Python |
from django.middleware.http import ConditionalGetMiddleware
from django.middleware.common import CommonMiddleware
def compat_middleware_factory(klass):
"""
Class wrapper that only executes `process_response`
if `streaming` is not set on the `HttpResponse` object.
Django has a bad habbit of looking at the content,
which will prematurely exhaust the data source if we're
using generators or buffers.
"""
class compatwrapper(klass):
def process_response(self, req, resp):
if not hasattr(resp, 'streaming'):
return klass.process_response(self, req, resp)
return resp
return compatwrapper
ConditionalMiddlewareCompatProxy = compat_middleware_factory(ConditionalGetMiddleware)
CommonMiddlewareCompatProxy = compat_middleware_factory(CommonMiddleware)
| Python |
from django.http import HttpResponseNotAllowed, HttpResponseForbidden, HttpResponse, HttpResponseBadRequest
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django import get_version as django_version
from decorator import decorator
from datetime import datetime, timedelta
__version__ = '0.2.2'
def get_version():
return __version__
def format_error(error):
return u"Piston/%s (Django %s) crash report:\n\n%s" % \
(get_version(), django_version(), error)
class rc_factory(object):
"""
Status codes.
"""
CODES = dict(ALL_OK = ('OK', 200),
CREATED = ('Created', 201),
DELETED = ('', 204), # 204 says "Don't send a body!"
BAD_REQUEST = ('Bad Request', 400),
FORBIDDEN = ('Forbidden', 401),
NOT_FOUND = ('Not Found', 404),
DUPLICATE_ENTRY = ('Conflict/Duplicate', 409),
NOT_HERE = ('Gone', 410),
NOT_IMPLEMENTED = ('Not Implemented', 501),
THROTTLED = ('Throttled', 503))
def __getattr__(self, attr):
"""
Returns a fresh `HttpResponse` when getting
an "attribute". This is backwards compatible
with 0.2, which is important.
"""
try:
(r, c) = self.CODES.get(attr)
except TypeError:
raise AttributeError(attr)
return HttpResponse(r, content_type='text/plain', status=c)
rc = rc_factory()
class FormValidationError(Exception):
def __init__(self, form):
self.form = form
class HttpStatusCode(Exception):
def __init__(self, response):
self.response = response
def validate(v_form, operation='POST'):
@decorator
def wrap(f, self, request, *a, **kwa):
form = v_form(getattr(request, operation))
if form.is_valid():
return f(self, request, *a, **kwa)
else:
raise FormValidationError(form)
return wrap
def throttle(max_requests, timeout=60*60, extra=''):
"""
Simple throttling decorator, caches
the amount of requests made in cache.
If used on a view where users are required to
log in, the username is used, otherwise the
IP address of the originating request is used.
Parameters::
- `max_requests`: The maximum number of requests
- `timeout`: The timeout for the cache entry (default: 1 hour)
"""
@decorator
def wrap(f, self, request, *args, **kwargs):
if request.user.is_authenticated():
ident = request.user.username
else:
ident = request.META.get('REMOTE_ADDR', None)
if hasattr(request, 'throttle_extra'):
"""
Since we want to be able to throttle on a per-
application basis, it's important that we realize
that `throttle_extra` might be set on the request
object. If so, append the identifier name with it.
"""
ident += ':%s' % str(request.throttle_extra)
if ident:
"""
Preferrably we'd use incr/decr here, since they're
atomic in memcached, but it's in django-trunk so we
can't use it yet. If someone sees this after it's in
stable, you can change it here.
"""
ident += ':%s' % extra
now = datetime.now()
ts_key = 'throttle:ts:%s' % ident
timestamp = cache.get(ts_key)
offset = now + timedelta(seconds=timeout)
if timestamp and timestamp < offset:
t = rc.THROTTLED
wait = timeout - (offset-timestamp).seconds
t.content = 'Throttled, wait %d seconds.' % wait
return t
count = cache.get(ident, 1)
cache.set(ident, count+1)
if count >= max_requests:
cache.set(ts_key, offset, timeout)
cache.set(ident, 1)
return f(self, request, *args, **kwargs)
return wrap
def coerce_put_post(request):
"""
Django doesn't particularly understand REST.
In case we send data over PUT, Django won't
actually look at the data and load it. We need
to twist its arm here.
The try/except abominiation here is due to a bug
in mod_python. This should fix it.
"""
if request.method == "PUT":
try:
request.method = "POST"
request._load_post_and_files()
request.method = "PUT"
except AttributeError:
request.META['REQUEST_METHOD'] = 'POST'
request._load_post_and_files()
request.META['REQUEST_METHOD'] = 'PUT'
request.PUT = request.POST
class MimerDataException(Exception):
"""
Raised if the content_type and data don't match
"""
pass
class Mimer(object):
TYPES = dict()
def __init__(self, request):
self.request = request
def is_multipart(self):
content_type = self.content_type()
if content_type is not None:
return content_type.lstrip().startswith('multipart')
return False
def loader_for_type(self, ctype):
"""
Gets a function ref to deserialize content
for a certain mimetype.
"""
for loadee, mimes in Mimer.TYPES.iteritems():
for mime in mimes:
if ctype.startswith(mime):
return loadee
def content_type(self):
"""
Returns the content type of the request in all cases where it is
different than a submitted form - application/x-www-form-urlencoded
"""
type_formencoded = "application/x-www-form-urlencoded"
ctype = self.request.META.get('CONTENT_TYPE', type_formencoded)
if ctype.startswith(type_formencoded):
return None
return ctype
def translate(self):
"""
Will look at the `Content-type` sent by the client, and maybe
deserialize the contents into the format they sent. This will
work for JSON, YAML, XML and Pickle. Since the data is not just
key-value (and maybe just a list), the data will be placed on
`request.data` instead, and the handler will have to read from
there.
It will also set `request.content_type` so the handler has an easy
way to tell what's going on. `request.content_type` will always be
None for form-encoded and/or multipart form data (what your browser sends.)
"""
ctype = self.content_type()
self.request.content_type = ctype
if not self.is_multipart() and ctype:
loadee = self.loader_for_type(ctype)
try:
self.request.data = loadee(self.request.raw_post_data)
# Reset both POST and PUT from request, as its
# misleading having their presence around.
self.request.POST = self.request.PUT = dict()
except (TypeError, ValueError):
raise MimerDataException
return self.request
@classmethod
def register(cls, loadee, types):
cls.TYPES[loadee] = types
@classmethod
def unregister(cls, loadee):
return cls.TYPES.pop(loadee)
def translate_mime(request):
request = Mimer(request).translate()
def require_mime(*mimes):
"""
Decorator requiring a certain mimetype. There's a nifty
helper called `require_extended` below which requires everything
we support except for post-data via form.
"""
@decorator
def wrap(f, self, request, *args, **kwargs):
m = Mimer(request)
realmimes = set()
rewrite = { 'json': 'application/json',
'yaml': 'application/x-yaml',
'xml': 'text/xml',
'pickle': 'application/python-pickle' }
for idx, mime in enumerate(mimes):
realmimes.add(rewrite.get(mime, mime))
if not m.content_type() in realmimes:
return rc.BAD_REQUEST
return f(self, request, *args, **kwargs)
return wrap
require_extended = require_mime('json', 'yaml', 'xml', 'pickle')
| Python |
import inspect, handler
from piston.handler import typemapper
from piston.handler import handler_tracker
from django.core.urlresolvers import get_resolver, get_callable, get_script_prefix
from django.shortcuts import render_to_response
from django.template import RequestContext
def generate_doc(handler_cls):
"""
Returns a `HandlerDocumentation` object
for the given handler. Use this to generate
documentation for your API.
"""
if not type(handler_cls) is handler.HandlerMetaClass:
raise ValueError("Give me handler, not %s" % type(handler_cls))
return HandlerDocumentation(handler_cls)
class HandlerMethod(object):
def __init__(self, method, stale=False):
self.method = method
self.stale = stale
def iter_args(self):
args, _, _, defaults = inspect.getargspec(self.method)
for idx, arg in enumerate(args):
if arg in ('self', 'request', 'form'):
continue
didx = len(args)-idx
if defaults and len(defaults) >= didx:
yield (arg, str(defaults[-didx]))
else:
yield (arg, None)
@property
def signature(self, parse_optional=True):
spec = ""
for argn, argdef in self.iter_args():
spec += argn
if argdef:
spec += '=%s' % argdef
spec += ', '
spec = spec.rstrip(", ")
if parse_optional:
return spec.replace("=None", "=<optional>")
return spec
@property
def doc(self):
return inspect.getdoc(self.method)
@property
def name(self):
return self.method.__name__
@property
def http_name(self):
if self.name == 'read':
return 'GET'
elif self.name == 'create':
return 'POST'
elif self.name == 'delete':
return 'DELETE'
elif self.name == 'update':
return 'PUT'
def __repr__(self):
return "<Method: %s>" % self.name
class HandlerDocumentation(object):
def __init__(self, handler):
self.handler = handler
def get_methods(self, include_default=False):
for method in "read create update delete".split():
met = getattr(self.handler, method, None)
if not met:
continue
stale = inspect.getmodule(met) is handler
if not self.handler.is_anonymous:
if met and (not stale or include_default):
yield HandlerMethod(met, stale)
else:
if not stale or met.__name__ == "read" \
and 'GET' in self.allowed_methods:
yield HandlerMethod(met, stale)
def get_all_methods(self):
return self.get_methods(include_default=True)
@property
def is_anonymous(self):
return handler.is_anonymous
def get_model(self):
return getattr(self, 'model', None)
@property
def has_anonymous(self):
return self.handler.anonymous
@property
def anonymous(self):
if self.has_anonymous:
return HandlerDocumentation(self.handler.anonymous)
@property
def doc(self):
return self.handler.__doc__
@property
def name(self):
return self.handler.__name__
@property
def allowed_methods(self):
return self.handler.allowed_methods
def get_resource_uri_template(self):
"""
URI template processor.
See http://bitworking.org/projects/URI-Templates/
"""
def _convert(template, params=[]):
"""URI template converter"""
paths = template % dict([p, "{%s}" % p] for p in params)
return u'%s%s' % (get_script_prefix(), paths)
try:
resource_uri = self.handler.resource_uri()
components = [None, [], {}]
for i, value in enumerate(resource_uri):
components[i] = value
lookup_view, args, kwargs = components
lookup_view = get_callable(lookup_view, True)
possibilities = get_resolver(None).reverse_dict.getlist(lookup_view)
for possibility, pattern in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
return _convert(result, params)
else:
if set(kwargs.keys()) != set(params):
continue
return _convert(result, params)
except:
return None
resource_uri_template = property(get_resource_uri_template)
def __repr__(self):
return u'<Documentation for "%s">' % self.name
def documentation_view(request):
"""
Generic documentation view. Generates documentation
from the handlers you've defined.
"""
docs = [ ]
for handler in handler_tracker:
docs.append(generate_doc(handler))
def _compare(doc1, doc2):
#handlers and their anonymous counterparts are put next to each other.
name1 = doc1.name.replace("Anonymous", "")
name2 = doc2.name.replace("Anonymous", "")
return cmp(name1, name2)
docs.sort(_compare)
return render_to_response('documentation.html',
{ 'docs': docs }, RequestContext(request))
| Python |
import oauth
from models import Nonce, Token, Consumer
class DataStore(oauth.OAuthDataStore):
"""Layer between Python OAuth and Django database."""
def __init__(self, oauth_request):
self.signature = oauth_request.parameters.get('oauth_signature', None)
self.timestamp = oauth_request.parameters.get('oauth_timestamp', None)
self.scope = oauth_request.parameters.get('scope', 'all')
def lookup_consumer(self, key):
try:
self.consumer = Consumer.objects.get(key=key)
return self.consumer
except Consumer.DoesNotExist:
return None
def lookup_token(self, token_type, token):
if token_type == 'request':
token_type = Token.REQUEST
elif token_type == 'access':
token_type = Token.ACCESS
try:
self.request_token = Token.objects.get(key=token,
token_type=token_type)
return self.request_token
except Token.DoesNotExist:
return None
def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
if oauth_token is None:
return None
nonce, created = Nonce.objects.get_or_create(consumer_key=oauth_consumer.key,
token_key=oauth_token.key,
key=nonce)
if created:
return None
else:
return nonce.key
def fetch_request_token(self, oauth_consumer):
if oauth_consumer.key == self.consumer.key:
self.request_token = Token.objects.create_token(consumer=self.consumer,
token_type=Token.REQUEST,
timestamp=self.timestamp)
return self.request_token
return None
def fetch_access_token(self, oauth_consumer, oauth_token):
if oauth_consumer.key == self.consumer.key \
and oauth_token.key == self.request_token.key \
and self.request_token.is_approved:
self.access_token = Token.objects.create_token(consumer=self.consumer,
token_type=Token.ACCESS,
timestamp=self.timestamp,
user=self.request_token.user)
return self.access_token
return None
def authorize_request_token(self, oauth_token, user):
if oauth_token.key == self.request_token.key:
# authorize the request token in the store
self.request_token.is_approved = True
self.request_token.user = user
self.request_token.save()
return self.request_token
return None | Python |
from M2Crypto import RSA
def generateKeys():
keys = RSA.gen_key(2048, 65537)
return keys
def keyToStr(keys):
return keys.as_pem(cipher=None)
def getPublicKey(keys):
pubKey = RSA.new_pub_key(keys.pub())
return pubKey.as_pem(cipher=None)
def encrypt(plaintext,keys):
encText = keys.private_encrypt(plaintext, 1)
return encText
def decrypt(encText,pubKey):
dec = pubKey.public_decrypt(encText, 1)
return dec
| Python |
#!/usr/bin/python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
#!/usr/bin/python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
#!/usr/bin/env python
import os
os.system("python manage.py runserver 127.0.0.1:8002")
| Python |
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template, redirect_to
from django.core.urlresolvers import reverse
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^api/', include('fluffyshare.api.urls')),
url(r'^register$', 'fluffyshare.provider.views.register', name='register_provider'),
url(r'^send_file_list$', 'fluffyshare.provider.views.send_file_list'),
url(r'^file_list$', 'fluffyshare.provider.views.get_file_list'),
url(r'^remote/(?P<address>.*?)/(?P<filename>.*)/$', 'fluffyshare.provider.views.get_remote_file'),
url(r'^login$', 'fluffyshare.provider.views.login'),
url(r'^logout$', 'fluffyshare.provider.views.logout'),
# Uncomment the admin/doc line below to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
(r'^$', 'fluffyshare.provider.views.index'),
)
from django.conf import settings
if settings.DEBUG:
urlpatterns += patterns('',
(r'^site_media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
'show_indexes': True
}))
| Python |
#!/usr/bin/env python
import os
os.system("python manage.py runserver 127.0.0.1:8002")
| Python |
# Django settings for fluffyprovider project.
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
CENTRAL_REGISTER = '127.0.0.1:8000'
PROVIDER_NAME = 'fluffyshare'
PROVIDER_ADDRESS = '127.0.0.1:8002'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(PROJECT_ROOT, 'baza.db'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'site_media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
LOGIN_URL = "/"
# Make this unique, and don't share it with anybody.
SECRET_KEY = '3u$_7_ltwa1_1c0+$%7=fa&-g!i9tz*aboh7_0x-#g4-5aa%_o'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'fluffyshare.urls'
TEMPLATE_DIRS = ( os.path.join(PROJECT_ROOT, 'templates' )
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
EXTERNAL_APPS_PATH = os.path.join(PROJECT_ROOT, "lib")
import sys
sys.path.append(EXTERNAL_APPS_PATH)
INSTALLED_APPS = (
'provider',
'piston',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
)
# Security
myKeys = None
| Python |
from django.db import models
class File(models.Model):
path = models.CharField(max_length=100, unique=True)
author = models.CharField(max_length=100)
description = models.CharField(max_length=500)
def __unicode__(self):
return self.path
| Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
from provider.models import File
from django.contrib import admin
admin.site.register(File)
| Python |
# Create your views here.
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib import auth
from django.contrib.auth.decorators import login_required
from provider.models import File
from fluffyshare import settings
import security
from fluffyshare.settings import CENTRAL_REGISTER, PROVIDER_ADDRESS ,PROVIDER_NAME, MEDIA_ROOT
import urllib2
import datetime
import base64
import mimetypes
try:
import simplejson as json
except ImportError:
import json
REGISTER_API = "http://" + CENTRAL_REGISTER + "/api"
@login_required()
def register(request):
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "%s/%s/" % (REGISTER_API, 'sp_list')
name = PROVIDER_NAME
address = PROVIDER_ADDRESS
datadict = {'name': name, 'address':address}
data = json.dumps(datadict)
print url
apirequest = urllib2.Request(url, data=data)
apirequest.add_header('Content-Type', 'application/json')
apirequest.get_method = lambda: 'PUT'
result = opener.open(apirequest).read()
# result izgleda ovako:
#{
#"status": "EXISTS", ## ili "OK"
#"name": "provider1",
#"address": "127.0.0.1:8001"
#}
#
resultobjs = json.loads(result)
get_cert()
return render_to_response('register.html',
{'status': resultobjs['status'],'name':resultobjs['name']})
def get_cert():
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "%s/%s/" % (REGISTER_API, 'make_cert')
name = PROVIDER_NAME
address = PROVIDER_ADDRESS
datadict = {'name': name, 'address':address}
data = json.dumps(datadict)
apirequest = urllib2.Request(url, data=data)
apirequest.add_header('Content-Type', 'application/json')
apirequest.get_method = lambda: 'PUT'
result = opener.open(apirequest).read()
resultobjs = json.loads(result)
if resultobjs['status'] == "OK":
keys = security.loadKeys(resultobjs['keys'])
settings.myKeys = keys
@login_required()
def send_file_list(request):
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "%s/%s/" % (REGISTER_API, 'file_list')
filelist = []
files = File.objects.all()
for f in files:
filelist.append({'name': f.path,
'author': f.author,
'description': f.description})
datadict = {'service-provider': PROVIDER_NAME, 'file-list': filelist}
data = json.dumps(datadict)
print url
apirequest = urllib2.Request(url, data=data)
apirequest.add_header('Content-Type', 'application/json')
apirequest.get_method = lambda: 'PUT'
result = opener.open(apirequest).read()
resultobjs = json.loads(result)
# rezultat izgleda ovako:
#{
#"file-list": [
# {
# "status": "EXISTS",
# "sp": "provider1",
# "name": "/home/stef/nesto.txt"
# },
# {
# "status": "EXISTS",
# "sp": "provider1",
# "name": "/home/stef/nesto2.txt"
# }
#]
#}
return render_to_response('send_file_list.html',
{"file_list":resultobjs["file-list"]})
@login_required()
def get_file_list(request):
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "%s/%s/" % (REGISTER_API, 'file_list')
json_src = urllib2.urlopen(url).read()
file_list = json.loads(json_src)["file-list"]
print file_list
# json_src, tj. file_list izgleda ovako
#{
#"file-list": [
# {
# "author": "nesto",
# "sp": "provider1",
# "sp-address": "127.0.0.1:8001",
# "description": "Neka bolesna knjiga",
# "name": "/home/stef/nesto.txt"
# },
# {
# "author": "nesto 2",
# "sp": "provider1",
# "sp-address": "127.0.0.1:8001",
# "description": "Ova je jos bolja",
# "name": "/home/stef/nesto2.txt"
# }
#]
#}
local = []
remote = []
for file in file_list:
if file['sp'] == PROVIDER_NAME:
local.append(file)
else:
file['address']=file['sp-address']
remote.append(file)
return render_to_response('file_list.html',
{'local':local,'remote':remote})
def get_remote_file(request, address, filename):
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "http://" + address + "/api/get_file/" # url apija od drugog sp-a
datadict = {'service-provider': PROVIDER_NAME,
'username': request.user.username,
'filename': filename}
data = json.dumps(datadict)
apirequest = urllib2.Request(url, data=data)
apirequest.add_header('Content-Type', 'application/json')
apirequest.get_method = lambda: 'PUT'
print url
json_src = opener.open(apirequest).read()
result = json.loads(json_src)
if result["status"] == "NOT FOUND":
return Http404
filecontent = base64.b64decode(result["data"]) #dekodiraj podatke
mimetype = mimetypes.guess_type(filename)[0] #odredi mimetype (uzimamo index 0 jer je 1 encoding)
return HttpResponse(content = filecontent, mimetype = mimetype) #salji fajl s ispravnim mimetypeom
def index(request):
name = "Guest"
user = request.user
if not user.is_anonymous() :
name = user.username
return render_to_response('index.html', {}, context_instance=RequestContext(request))
def login(request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = auth.authenticate(username=username, password=password)
if user is not None and user.is_active:
# Correct password, and the user is marked "active"
auth.login(request, user)
# Redirect to a success page.
print "welcome"
return render_to_response('index.html', {}, context_instance=RequestContext(request))
else:
# Show an erroro page
return render_to_response("index.html",{"error":"error"}, context_instance=RequestContext(request))
def logout(request):
auth.logout(request)
return HttpResponseRedirect("/")
| Python |
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from piston.handler import AnonymousBaseHandler, BaseHandler
from piston.utils import rc
from fluffyshare.settings import MEDIA_ROOT, PROVIDER_NAME
from fluffyshare.provider.models import File
import base64
import os
try:
import simplejson as json
except ImportError:
import json
# http://domain/api/get-file
class RemoteFileHandler(AnonymousBaseHandler):
allowed_methods = ('PUT',)
def update(self, request):
postdata = json.loads(request.raw_post_data)
sp_from = postdata['service-provider']
user = postdata['username']
filename = postdata['filename']
found = True
filecontent = None
status = 'OK'
fullpath = os.path.join(MEDIA_ROOT, filename)
try:
File.objects.get(path = filename)
filecontent = open(fullpath).read()
except:
found = False
if not found:
status = 'NOT FOUND'
if filecontent:
filecontent = base64.b64encode(filecontent)
return {'service-provider': PROVIDER_NAME,
'username': user,
'status': status,
'filename': filename,
'full path': fullpath,
'data': filecontent }
| Python |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from piston.authentication import HttpBasicAuthentication
from piston.resource import Resource
from fluffyshare.api.handlers import *
remote_file_handler = Resource(RemoteFileHandler)
remote_file_handler.csrf_exempt = getattr(remote_file_handler.handler, 'csrf_exempt', True)
urlpatterns = patterns('',
url(r'get_file/$', remote_file_handler, name='get_file'),
)
| Python |
import hmac, base64
from django import forms
from django.conf import settings
class Form(forms.Form):
pass
class ModelForm(forms.ModelForm):
"""
Subclass of `forms.ModelForm` which makes sure
that the initial values are present in the form
data, so you don't have to send all old values
for the form to actually validate. Django does not
do this on its own, which is really annoying.
"""
def merge_from_initial(self):
self.data._mutable = True
filt = lambda v: v not in self.data.keys()
for field in filter(filt, getattr(self.Meta, 'fields', ())):
self.data[field] = self.initial.get(field, None)
class OAuthAuthenticationForm(forms.Form):
oauth_token = forms.CharField(widget=forms.HiddenInput)
oauth_callback = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.BooleanField(required=True)
csrf_signature = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
forms.Form.__init__(self, *args, **kwargs)
self.fields['csrf_signature'].initial = self.initial_csrf_signature
def clean_csrf_signature(self):
sig = self.cleaned_data['csrf_signature']
token = self.cleaned_data['oauth_token']
sig1 = OAuthAuthenticationForm.get_csrf_signature(settings.SECRET_KEY, token)
if sig != sig1:
raise forms.ValidationError("CSRF signature is not valid")
return sig
def initial_csrf_signature(self):
token = self.initial['oauth_token']
return OAuthAuthenticationForm.get_csrf_signature(settings.SECRET_KEY, token)
@staticmethod
def get_csrf_signature(key, token):
# Check signature...
try:
import hashlib # 2.5
hashed = hmac.new(key, token, hashlib.sha1)
except:
import sha # deprecated
hashed = hmac.new(key, token, sha)
# calculate the digest base 64
return base64.b64encode(hashed.digest())
| Python |
"""
Decorator module, see
http://www.phyast.pitt.edu/~micheles/python/documentation.html
for the documentation and below for the licence.
"""
## The basic trick is to generate the source code for the decorated function
## with the right signature and to evaluate it.
## Uncomment the statement 'print >> sys.stderr, func_src' in _decorator
## to understand what is going on.
__all__ = ["decorator", "new_wrapper", "getinfo"]
import inspect, sys
try:
set
except NameError:
from sets import Set as set
def getinfo(func):
"""
Returns an info dictionary containing:
- name (the name of the function : str)
- argnames (the names of the arguments : list)
- defaults (the values of the default arguments : tuple)
- signature (the signature : str)
- doc (the docstring : str)
- module (the module name : str)
- dict (the function __dict__ : str)
>>> def f(self, x=1, y=2, *args, **kw): pass
>>> info = getinfo(f)
>>> info["name"]
'f'
>>> info["argnames"]
['self', 'x', 'y', 'args', 'kw']
>>> info["defaults"]
(1, 2)
>>> info["signature"]
'self, x, y, *args, **kw'
"""
assert inspect.ismethod(func) or inspect.isfunction(func)
regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
argnames = list(regargs)
if varargs:
argnames.append(varargs)
if varkwargs:
argnames.append(varkwargs)
signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults,
formatvalue=lambda value: "")[1:-1]
return dict(name=func.__name__, argnames=argnames, signature=signature,
defaults = func.func_defaults, doc=func.__doc__,
module=func.__module__, dict=func.__dict__,
globals=func.func_globals, closure=func.func_closure)
# akin to functools.update_wrapper
def update_wrapper(wrapper, model, infodict=None):
infodict = infodict or getinfo(model)
try:
wrapper.__name__ = infodict['name']
except: # Python version < 2.4
pass
wrapper.__doc__ = infodict['doc']
wrapper.__module__ = infodict['module']
wrapper.__dict__.update(infodict['dict'])
wrapper.func_defaults = infodict['defaults']
wrapper.undecorated = model
return wrapper
def new_wrapper(wrapper, model):
"""
An improvement over functools.update_wrapper. The wrapper is a generic
callable object. It works by generating a copy of the wrapper with the
right signature and by updating the copy, not the original.
Moreovoer, 'model' can be a dictionary with keys 'name', 'doc', 'module',
'dict', 'defaults'.
"""
if isinstance(model, dict):
infodict = model
else: # assume model is a function
infodict = getinfo(model)
assert not '_wrapper_' in infodict["argnames"], (
'"_wrapper_" is a reserved argument name!')
src = "lambda %(signature)s: _wrapper_(%(signature)s)" % infodict
funcopy = eval(src, dict(_wrapper_=wrapper))
return update_wrapper(funcopy, model, infodict)
# helper used in decorator_factory
def __call__(self, func):
infodict = getinfo(func)
for name in ('_func_', '_self_'):
assert not name in infodict["argnames"], (
'%s is a reserved argument name!' % name)
src = "lambda %(signature)s: _self_.call(_func_, %(signature)s)"
new = eval(src % infodict, dict(_func_=func, _self_=self))
return update_wrapper(new, func, infodict)
def decorator_factory(cls):
"""
Take a class with a ``.caller`` method and return a callable decorator
object. It works by adding a suitable __call__ method to the class;
it raises a TypeError if the class already has a nontrivial __call__
method.
"""
attrs = set(dir(cls))
if '__call__' in attrs:
raise TypeError('You cannot decorate a class with a nontrivial '
'__call__ method')
if 'call' not in attrs:
raise TypeError('You cannot decorate a class without a '
'.call method')
cls.__call__ = __call__
return cls
def decorator(caller):
"""
General purpose decorator factory: takes a caller function as
input and returns a decorator with the same attributes.
A caller function is any function like this::
def caller(func, *args, **kw):
# do something
return func(*args, **kw)
Here is an example of usage:
>>> @decorator
... def chatty(f, *args, **kw):
... print "Calling %r" % f.__name__
... return f(*args, **kw)
>>> chatty.__name__
'chatty'
>>> @chatty
... def f(): pass
...
>>> f()
Calling 'f'
decorator can also take in input a class with a .caller method; in this
case it converts the class into a factory of callable decorator objects.
See the documentation for an example.
"""
if inspect.isclass(caller):
return decorator_factory(caller)
def _decorator(func): # the real meat is here
infodict = getinfo(func)
argnames = infodict['argnames']
assert not ('_call_' in argnames or '_func_' in argnames), (
'You cannot use _call_ or _func_ as argument names!')
src = "lambda %(signature)s: _call_(_func_, %(signature)s)" % infodict
# import sys; print >> sys.stderr, src # for debugging purposes
dec_func = eval(src, dict(_func_=func, _call_=caller))
return update_wrapper(dec_func, func, infodict)
return update_wrapper(_decorator, caller)
if __name__ == "__main__":
import doctest; doctest.testmod()
########################## LEGALESE ###############################
## Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## Redistributions in bytecode form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
## INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
## OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
## TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
## USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
## DAMAGE.
| Python |
from utils import rc
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
typemapper = { }
handler_tracker = [ ]
class HandlerMetaClass(type):
"""
Metaclass that keeps a registry of class -> handler
mappings.
"""
def __new__(cls, name, bases, attrs):
new_cls = type.__new__(cls, name, bases, attrs)
if hasattr(new_cls, 'model'):
typemapper[new_cls] = (new_cls.model, new_cls.is_anonymous)
else:
typemapper[new_cls] = (None, new_cls.is_anonymous)
if name not in ('BaseHandler', 'AnonymousBaseHandler'):
handler_tracker.append(new_cls)
return new_cls
class BaseHandler(object):
"""
Basehandler that gives you CRUD for free.
You are supposed to subclass this for specific
functionality.
All CRUD methods (`read`/`update`/`create`/`delete`)
receive a request as the first argument from the
resource. Use this for checking `request.user`, etc.
"""
__metaclass__ = HandlerMetaClass
allowed_methods = ('GET', 'POST', 'PUT', 'DELETE')
anonymous = is_anonymous = False
exclude = ( 'id', )
fields = ( )
def flatten_dict(self, dct):
return dict([ (str(k), dct.get(k)) for k in dct.keys() ])
def has_model(self):
return hasattr(self, 'model') or hasattr(self, 'queryset')
def queryset(self, request):
return self.model.objects.all()
def value_from_tuple(tu, name):
for int_, n in tu:
if n == name:
return int_
return None
def exists(self, **kwargs):
if not self.has_model():
raise NotImplementedError
try:
self.model.objects.get(**kwargs)
return True
except self.model.DoesNotExist:
return False
def read(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
pkfield = self.model._meta.pk.name
if pkfield in kwargs:
try:
return self.queryset(request).get(pk=kwargs.get(pkfield))
except ObjectDoesNotExist:
return rc.NOT_FOUND
except MultipleObjectsReturned: # should never happen, since we're using a PK
return rc.BAD_REQUEST
else:
return self.queryset(request).filter(*args, **kwargs)
def create(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
attrs = self.flatten_dict(request.POST)
try:
inst = self.queryset(request).get(**attrs)
return rc.DUPLICATE_ENTRY
except self.model.DoesNotExist:
inst = self.model(**attrs)
inst.save()
return inst
except self.model.MultipleObjectsReturned:
return rc.DUPLICATE_ENTRY
def update(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
pkfield = self.model._meta.pk.name
if pkfield not in kwargs:
# No pk was specified
return rc.BAD_REQUEST
try:
inst = self.queryset(request).get(pk=kwargs.get(pkfield))
except ObjectDoesNotExist:
return rc.NOT_FOUND
except MultipleObjectsReturned: # should never happen, since we're using a PK
return rc.BAD_REQUEST
attrs = self.flatten_dict(request.POST)
for k,v in attrs.iteritems():
setattr( inst, k, v )
inst.save()
return rc.ALL_OK
def delete(self, request, *args, **kwargs):
if not self.has_model():
raise NotImplementedError
try:
inst = self.queryset(request).get(*args, **kwargs)
inst.delete()
return rc.DELETED
except self.model.MultipleObjectsReturned:
return rc.DUPLICATE_ENTRY
except self.model.DoesNotExist:
return rc.NOT_HERE
class AnonymousBaseHandler(BaseHandler):
"""
Anonymous handler.
"""
is_anonymous = True
allowed_methods = ('GET',)
| Python |
import sys, inspect
from django.http import (HttpResponse, Http404, HttpResponseNotAllowed,
HttpResponseForbidden, HttpResponseServerError)
from django.views.debug import ExceptionReporter
from django.views.decorators.vary import vary_on_headers
from django.conf import settings
from django.core.mail import send_mail, EmailMessage
from django.db.models.query import QuerySet
from emitters import Emitter
from handler import typemapper
from doc import HandlerMethod
from authentication import NoAuthentication
from utils import coerce_put_post, FormValidationError, HttpStatusCode
from utils import rc, format_error, translate_mime, MimerDataException
class Resource(object):
"""
Resource. Create one for your URL mappings, just
like you would with Django. Takes one argument,
the handler. The second argument is optional, and
is an authentication handler. If not specified,
`NoAuthentication` will be used by default.
"""
callmap = { 'GET': 'read', 'POST': 'create',
'PUT': 'update', 'DELETE': 'delete' }
def __init__(self, handler, authentication=None):
if not callable(handler):
raise AttributeError, "Handler not callable."
self.handler = handler()
if not authentication:
self.authentication = NoAuthentication()
else:
self.authentication = authentication
# Erroring
self.email_errors = getattr(settings, 'PISTON_EMAIL_ERRORS', True)
self.display_errors = getattr(settings, 'PISTON_DISPLAY_ERRORS', True)
self.stream = getattr(settings, 'PISTON_STREAM_OUTPUT', False)
def determine_emitter(self, request, *args, **kwargs):
"""
Function for determening which emitter to use
for output. It lives here so you can easily subclass
`Resource` in order to change how emission is detected.
You could also check for the `Accept` HTTP header here,
since that pretty much makes sense. Refer to `Mimer` for
that as well.
"""
em = kwargs.pop('emitter_format', None)
if not em:
em = request.GET.get('format', 'json')
return em
@property
def anonymous(self):
"""
Gets the anonymous handler. Also tries to grab a class
if the `anonymous` value is a string, so that we can define
anonymous handlers that aren't defined yet (like, when
you're subclassing your basehandler into an anonymous one.)
"""
if hasattr(self.handler, 'anonymous'):
anon = self.handler.anonymous
if callable(anon):
return anon
for klass in typemapper.keys():
if anon == klass.__name__:
return klass
return None
@vary_on_headers('Authorization')
def __call__(self, request, *args, **kwargs):
"""
NB: Sends a `Vary` header so we don't cache requests
that are different (OAuth stuff in `Authorization` header.)
"""
rm = request.method.upper()
# Django's internal mechanism doesn't pick up
# PUT request, so we trick it a little here.
if rm == "PUT":
coerce_put_post(request)
if not self.authentication.is_authenticated(request):
if self.anonymous and \
rm in self.anonymous.allowed_methods:
handler = self.anonymous()
anonymous = True
else:
return self.authentication.challenge()
else:
handler = self.handler
anonymous = handler.is_anonymous
# Translate nested datastructs into `request.data` here.
if rm in ('POST', 'PUT'):
try:
translate_mime(request)
except MimerDataException:
return rc.BAD_REQUEST
if not rm in handler.allowed_methods:
return HttpResponseNotAllowed(handler.allowed_methods)
meth = getattr(handler, self.callmap.get(rm), None)
if not meth:
raise Http404
# Support emitter both through (?P<emitter_format>) and ?format=emitter.
em_format = self.determine_emitter(request, *args, **kwargs)
kwargs.pop('emitter_format', None)
# Clean up the request object a bit, since we might
# very well have `oauth_`-headers in there, and we
# don't want to pass these along to the handler.
request = self.cleanup_request(request)
try:
result = meth(request, *args, **kwargs)
except FormValidationError, e:
# TODO: Use rc.BAD_REQUEST here
return HttpResponse("Bad Request: %s" % e.form.errors, status=400)
except TypeError, e:
result = rc.BAD_REQUEST
hm = HandlerMethod(meth)
sig = hm.get_signature()
msg = 'Method signature does not match.\n\n'
if sig:
msg += 'Signature should be: %s' % sig
else:
msg += 'Resource does not expect any parameters.'
if self.display_errors:
msg += '\n\nException was: %s' % str(e)
result.content = format_error(msg)
except HttpStatusCode, e:
#result = e ## why is this being passed on and not just dealt with now?
return e.response
except Exception, e:
"""
On errors (like code errors), we'd like to be able to
give crash reports to both admins and also the calling
user. There's two setting parameters for this:
Parameters::
- `PISTON_EMAIL_ERRORS`: Will send a Django formatted
error email to people in `settings.ADMINS`.
- `PISTON_DISPLAY_ERRORS`: Will return a simple traceback
to the caller, so he can tell you what error they got.
If `PISTON_DISPLAY_ERRORS` is not enabled, the caller will
receive a basic "500 Internal Server Error" message.
"""
exc_type, exc_value, tb = sys.exc_info()
rep = ExceptionReporter(request, exc_type, exc_value, tb.tb_next)
if self.email_errors:
self.email_exception(rep)
if self.display_errors:
return HttpResponseServerError(
format_error('\n'.join(rep.format_exception())))
else:
raise
emitter, ct = Emitter.get(em_format)
fields = handler.fields
if hasattr(handler, 'list_fields') and (
isinstance(result, list) or isinstance(result, QuerySet)):
fields = handler.list_fields
srl = emitter(result, typemapper, handler, fields, anonymous)
try:
"""
Decide whether or not we want a generator here,
or we just want to buffer up the entire result
before sending it to the client. Won't matter for
smaller datasets, but larger will have an impact.
"""
if self.stream: stream = srl.stream_render(request)
else: stream = srl.render(request)
resp = HttpResponse(stream, mimetype=ct)
resp.streaming = self.stream
return resp
except HttpStatusCode, e:
return e.response
@staticmethod
def cleanup_request(request):
"""
Removes `oauth_` keys from various dicts on the
request object, and returns the sanitized version.
"""
for method_type in ('GET', 'PUT', 'POST', 'DELETE'):
block = getattr(request, method_type, { })
if True in [ k.startswith("oauth_") for k in block.keys() ]:
sanitized = block.copy()
for k in sanitized.keys():
if k.startswith("oauth_"):
sanitized.pop(k)
setattr(request, method_type, sanitized)
return request
# --
def email_exception(self, reporter):
subject = "Piston crash report"
html = reporter.get_traceback_html()
message = EmailMessage(settings.EMAIL_SUBJECT_PREFIX+subject,
html, settings.SERVER_EMAIL,
[ admin[1] for admin in settings.ADMINS ])
message.content_subtype = 'html'
message.send(fail_silently=True)
| Python |
from __future__ import generators
import decimal, re, inspect
import copy
try:
# yaml isn't standard with python. It shouldn't be required if it
# isn't used.
import yaml
except ImportError:
yaml = None
# Fallback since `any` isn't in Python <2.5
try:
any
except NameError:
def any(iterable):
for element in iterable:
if element:
return True
return False
from django.db.models.query import QuerySet
from django.db.models import Model, permalink
from django.utils import simplejson
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.encoding import smart_unicode
from django.core.urlresolvers import reverse, NoReverseMatch
from django.core.serializers.json import DateTimeAwareJSONEncoder
from django.http import HttpResponse
from django.core import serializers
from utils import HttpStatusCode, Mimer
try:
import cStringIO as StringIO
except ImportError:
import StringIO
try:
import cPickle as pickle
except ImportError:
import pickle
# Allow people to change the reverser (default `permalink`).
reverser = permalink
class Emitter(object):
"""
Super emitter. All other emitters should subclass
this one. It has the `construct` method which
conveniently returns a serialized `dict`. This is
usually the only method you want to use in your
emitter. See below for examples.
"""
EMITTERS = { }
def __init__(self, payload, typemapper, handler, fields=(), anonymous=True):
self.typemapper = typemapper
self.data = payload
self.handler = handler
self.fields = fields
self.anonymous = anonymous
if isinstance(self.data, Exception):
raise
def method_fields(self, data, fields):
if not data:
return { }
has = dir(data)
ret = dict()
for field in fields:
if field in has and callable(field):
ret[field] = getattr(data, field)
return ret
def construct(self):
"""
Recursively serialize a lot of types, and
in cases where it doesn't recognize the type,
it will fall back to Django's `smart_unicode`.
Returns `dict`.
"""
def _any(thing, fields=()):
"""
Dispatch, all types are routed through here.
"""
ret = None
if isinstance(thing, QuerySet):
ret = _qs(thing, fields=fields)
elif isinstance(thing, (tuple, list)):
ret = _list(thing)
elif isinstance(thing, dict):
ret = _dict(thing)
elif isinstance(thing, decimal.Decimal):
ret = str(thing)
elif isinstance(thing, Model):
ret = _model(thing, fields=fields)
elif isinstance(thing, HttpResponse):
raise HttpStatusCode(thing)
elif inspect.isfunction(thing):
if not inspect.getargspec(thing)[0]:
ret = _any(thing())
elif hasattr(thing, '__emittable__'):
f = thing.__emittable__
if inspect.ismethod(f) and len(inspect.getargspec(f)[0]) == 1:
ret = _any(f())
else:
ret = smart_unicode(thing, strings_only=True)
return ret
def _fk(data, field):
"""
Foreign keys.
"""
return _any(getattr(data, field.name))
def _related(data, fields=()):
"""
Foreign keys.
"""
return [ _model(m, fields) for m in data.iterator() ]
def _m2m(data, field, fields=()):
"""
Many to many (re-route to `_model`.)
"""
return [ _model(m, fields) for m in getattr(data, field.name).iterator() ]
def _model(data, fields=()):
"""
Models. Will respect the `fields` and/or
`exclude` on the handler (see `typemapper`.)
"""
ret = { }
handler = self.in_typemapper(type(data), self.anonymous)
get_absolute_uri = False
if handler or fields:
v = lambda f: getattr(data, f.attname)
if not fields:
"""
Fields was not specified, try to find teh correct
version in the typemapper we were sent.
"""
mapped = self.in_typemapper(type(data), self.anonymous)
get_fields = set(mapped.fields)
exclude_fields = set(mapped.exclude).difference(get_fields)
if 'absolute_uri' in get_fields:
get_absolute_uri = True
if not get_fields:
get_fields = set([ f.attname.replace("_id", "", 1)
for f in data._meta.fields ])
# sets can be negated.
for exclude in exclude_fields:
if isinstance(exclude, basestring):
get_fields.discard(exclude)
elif isinstance(exclude, re._pattern_type):
for field in get_fields.copy():
if exclude.match(field):
get_fields.discard(field)
else:
get_fields = set(fields)
met_fields = self.method_fields(handler, get_fields)
for f in data._meta.local_fields:
if f.serialize and not any([ p in met_fields for p in [ f.attname, f.name ]]):
if not f.rel:
if f.attname in get_fields:
ret[f.attname] = _any(v(f))
get_fields.remove(f.attname)
else:
if f.attname[:-3] in get_fields:
ret[f.name] = _fk(data, f)
get_fields.remove(f.name)
for mf in data._meta.many_to_many:
if mf.serialize and mf.attname not in met_fields:
if mf.attname in get_fields:
ret[mf.name] = _m2m(data, mf)
get_fields.remove(mf.name)
# try to get the remainder of fields
for maybe_field in get_fields:
if isinstance(maybe_field, (list, tuple)):
model, fields = maybe_field
inst = getattr(data, model, None)
if inst:
if hasattr(inst, 'all'):
ret[model] = _related(inst, fields)
elif callable(inst):
if len(inspect.getargspec(inst)[0]) == 1:
ret[model] = _any(inst(), fields)
else:
ret[model] = _model(inst, fields)
elif maybe_field in met_fields:
# Overriding normal field which has a "resource method"
# so you can alter the contents of certain fields without
# using different names.
ret[maybe_field] = _any(met_fields[maybe_field](data))
else:
maybe = getattr(data, maybe_field, None)
if maybe:
if callable(maybe):
if len(inspect.getargspec(maybe)[0]) == 1:
ret[maybe_field] = _any(maybe())
else:
ret[maybe_field] = _any(maybe)
else:
handler_f = getattr(handler or self.handler, maybe_field, None)
if handler_f:
ret[maybe_field] = _any(handler_f(data))
else:
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields]
for k in add_ons:
ret[k] = _any(getattr(data, k))
# resouce uri
if self.in_typemapper(type(data), self.anonymous):
handler = self.in_typemapper(type(data), self.anonymous)
if hasattr(handler, 'resource_uri'):
url_id, fields = handler.resource_uri(data)
try:
ret['resource_uri'] = reverser( lambda: (url_id, fields) )()
except NoReverseMatch, e:
pass
if hasattr(data, 'get_api_url') and 'resource_uri' not in ret:
try: ret['resource_uri'] = data.get_api_url()
except: pass
# absolute uri
if hasattr(data, 'get_absolute_url') and get_absolute_uri:
try: ret['absolute_uri'] = data.get_absolute_url()
except: pass
return ret
def _qs(data, fields=()):
"""
Querysets.
"""
return [ _any(v, fields) for v in data ]
def _list(data):
"""
Lists.
"""
return [ _any(v) for v in data ]
def _dict(data):
"""
Dictionaries.
"""
return dict([ (k, _any(v)) for k, v in data.iteritems() ])
# Kickstart the seralizin'.
return _any(self.data, self.fields)
def in_typemapper(self, model, anonymous):
for klass, (km, is_anon) in self.typemapper.iteritems():
if model is km and is_anon is anonymous:
return klass
def render(self):
"""
This super emitter does not implement `render`,
this is a job for the specific emitter below.
"""
raise NotImplementedError("Please implement render.")
def stream_render(self, request, stream=True):
"""
Tells our patched middleware not to look
at the contents, and returns a generator
rather than the buffered string. Should be
more memory friendly for large datasets.
"""
yield self.render(request)
@classmethod
def get(cls, format):
"""
Gets an emitter, returns the class and a content-type.
"""
if cls.EMITTERS.has_key(format):
return cls.EMITTERS.get(format)
raise ValueError("No emitters found for type %s" % format)
@classmethod
def register(cls, name, klass, content_type='text/plain'):
"""
Register an emitter.
Parameters::
- `name`: The name of the emitter ('json', 'xml', 'yaml', ...)
- `klass`: The emitter class.
- `content_type`: The content type to serve response as.
"""
cls.EMITTERS[name] = (klass, content_type)
@classmethod
def unregister(cls, name):
"""
Remove an emitter from the registry. Useful if you don't
want to provide output in one of the built-in emitters.
"""
return cls.EMITTERS.pop(name, None)
class XMLEmitter(Emitter):
def _to_xml(self, xml, data):
if isinstance(data, (list, tuple)):
for item in data:
xml.startElement("resource", {})
self._to_xml(xml, item)
xml.endElement("resource")
elif isinstance(data, dict):
for key, value in data.iteritems():
xml.startElement(key, {})
self._to_xml(xml, value)
xml.endElement(key)
else:
xml.characters(smart_unicode(data))
def render(self, request):
stream = StringIO.StringIO()
xml = SimplerXMLGenerator(stream, "utf-8")
xml.startDocument()
xml.startElement("response", {})
self._to_xml(xml, self.construct())
xml.endElement("response")
xml.endDocument()
return stream.getvalue()
Emitter.register('xml', XMLEmitter, 'text/xml; charset=utf-8')
Mimer.register(lambda *a: None, ('text/xml',))
class JSONEmitter(Emitter):
"""
JSON emitter, understands timestamps.
"""
def render(self, request):
cb = request.GET.get('callback')
seria = simplejson.dumps(self.construct(), cls=DateTimeAwareJSONEncoder, ensure_ascii=False, indent=4)
# Callback
if cb:
return '%s(%s)' % (cb, seria)
return seria
Emitter.register('json', JSONEmitter, 'application/json; charset=utf-8')
Mimer.register(simplejson.loads, ('application/json',))
class YAMLEmitter(Emitter):
"""
YAML emitter, uses `safe_dump` to omit the
specific types when outputting to non-Python.
"""
def render(self, request):
return yaml.safe_dump(self.construct())
if yaml: # Only register yaml if it was import successfully.
Emitter.register('yaml', YAMLEmitter, 'application/x-yaml; charset=utf-8')
Mimer.register(yaml.load, ('application/x-yaml',))
class PickleEmitter(Emitter):
"""
Emitter that returns Python pickled.
"""
def render(self, request):
return pickle.dumps(self.construct())
Emitter.register('pickle', PickleEmitter, 'application/python-pickle')
Mimer.register(pickle.loads, ('application/python-pickle',))
class DjangoEmitter(Emitter):
"""
Emitter for the Django serialized format.
"""
def render(self, request, format='xml'):
if isinstance(self.data, HttpResponse):
return self.data
elif isinstance(self.data, (int, str)):
response = self.data
else:
response = serializers.serialize(format, self.data, indent=True)
return response
Emitter.register('django', DjangoEmitter, 'text/xml; charset=utf-8')
| Python |
import urllib
from django.db import models
from django.contrib.auth.models import User
from django.contrib import admin
from django.conf import settings
from django.core.mail import send_mail, mail_admins
from django.template import loader
from managers import TokenManager, ConsumerManager, ResourceManager, KEY_SIZE, SECRET_SIZE
CONSUMER_STATES = (
('pending', 'Pending approval'),
('accepted', 'Accepted'),
('canceled', 'Canceled'),
)
class Nonce(models.Model):
token_key = models.CharField(max_length=KEY_SIZE)
consumer_key = models.CharField(max_length=KEY_SIZE)
key = models.CharField(max_length=255)
def __unicode__(self):
return u"Nonce %s for %s" % (self.key, self.consumer_key)
admin.site.register(Nonce)
class Resource(models.Model):
name = models.CharField(max_length=255)
url = models.TextField(max_length=2047)
is_readonly = models.BooleanField(default=True)
objects = ResourceManager()
def __unicode__(self):
return u"Resource %s with url %s" % (self.name, self.url)
admin.site.register(Resource)
class Consumer(models.Model):
name = models.CharField(max_length=255)
description = models.TextField()
key = models.CharField(max_length=KEY_SIZE)
secret = models.CharField(max_length=SECRET_SIZE)
status = models.CharField(max_length=16, choices=CONSUMER_STATES, default='pending')
user = models.ForeignKey(User, null=True, blank=True, related_name='consumers')
objects = ConsumerManager()
def __unicode__(self):
return u"Consumer %s with key %s" % (self.name, self.key)
def save(self, **kwargs):
super(Consumer, self).save(**kwargs)
if self.id and self.user:
subject = "API Consumer"
rcpt = [ self.user.email, ]
if self.status == "accepted":
template = "api/mails/consumer_accepted.txt"
subject += " was accepted!"
elif self.status == "canceled":
template = "api/mails/consumer_canceled.txt"
subject += " has been canceled"
else:
template = "api/mails/consumer_pending.txt"
subject += " application received"
for admin in settings.ADMINS:
bcc.append(admin[1])
body = loader.render_to_string(template,
{ 'consumer': self, 'user': self.user })
send_mail(subject, body, settings.DEFAULT_FROM_EMAIL,
rcpt, fail_silently=True)
if self.status == 'pending':
mail_admins(subject, body, fail_silently=True)
if settings.DEBUG:
print "Mail being sent, to=%s" % rcpt
print "Subject: %s" % subject
print body
admin.site.register(Consumer)
class Token(models.Model):
REQUEST = 1
ACCESS = 2
TOKEN_TYPES = ((REQUEST, u'Request'), (ACCESS, u'Access'))
key = models.CharField(max_length=KEY_SIZE)
secret = models.CharField(max_length=SECRET_SIZE)
token_type = models.IntegerField(choices=TOKEN_TYPES)
timestamp = models.IntegerField()
is_approved = models.BooleanField(default=False)
user = models.ForeignKey(User, null=True, blank=True, related_name='tokens')
consumer = models.ForeignKey(Consumer)
objects = TokenManager()
def __unicode__(self):
return u"%s Token %s for %s" % (self.get_token_type_display(), self.key, self.consumer)
def to_string(self, only_key=False):
token_dict = {
'oauth_token': self.key,
'oauth_token_secret': self.secret
}
if only_key:
del token_dict['oauth_token_secret']
return urllib.urlencode(token_dict)
admin.site.register(Token)
| Python |
import cgi
import urllib
import time
import random
import urlparse
import hmac
import base64
VERSION = '1.0' # Hi Blaine!
HTTP_METHOD = 'GET'
SIGNATURE_METHOD = 'PLAINTEXT'
# Generic exception class
class OAuthError(RuntimeError):
def get_message(self):
return self._message
def set_message(self, message):
self._message = message
message = property(get_message, set_message)
def __init__(self, message='OAuth error occured.'):
self.message = message
# optional WWW-Authenticate header (401 error)
def build_authenticate_header(realm=''):
return { 'WWW-Authenticate': 'OAuth realm="%s"' % realm }
# url escape
def escape(s):
# escape '/' too
return urllib.quote(s, safe='~')
# util function: current timestamp
# seconds since epoch (UTC)
def generate_timestamp():
return int(time.time())
# util function: nonce
# pseudorandom number
def generate_nonce(length=8):
return ''.join(str(random.randint(0, 9)) for i in range(length))
# OAuthConsumer is a data type that represents the identity of the Consumer
# via its shared secret with the Service Provider.
class OAuthConsumer(object):
key = None
secret = None
def __init__(self, key, secret):
self.key = key
self.secret = secret
# OAuthToken is a data type that represents an End User via either an access
# or request token.
class OAuthToken(object):
# access tokens and request tokens
key = None
secret = None
'''
key = the token
secret = the token secret
'''
def __init__(self, key, secret):
self.key = key
self.secret = secret
def to_string(self):
return urllib.urlencode({'oauth_token': self.key, 'oauth_token_secret': self.secret})
# return a token from something like:
# oauth_token_secret=digg&oauth_token=digg
@staticmethod
def from_string(s):
params = cgi.parse_qs(s, keep_blank_values=False)
key = params['oauth_token'][0]
secret = params['oauth_token_secret'][0]
return OAuthToken(key, secret)
def __str__(self):
return self.to_string()
# OAuthRequest represents the request and can be serialized
class OAuthRequest(object):
'''
OAuth parameters:
- oauth_consumer_key
- oauth_token
- oauth_signature_method
- oauth_signature
- oauth_timestamp
- oauth_nonce
- oauth_version
... any additional parameters, as defined by the Service Provider.
'''
parameters = None # oauth parameters
http_method = HTTP_METHOD
http_url = None
version = VERSION
def __init__(self, http_method=HTTP_METHOD, http_url=None, parameters=None):
self.http_method = http_method
self.http_url = http_url
self.parameters = parameters or {}
def set_parameter(self, parameter, value):
self.parameters[parameter] = value
def get_parameter(self, parameter):
try:
return self.parameters[parameter]
except:
raise OAuthError('Parameter not found: %s' % parameter)
def _get_timestamp_nonce(self):
return self.get_parameter('oauth_timestamp'), self.get_parameter('oauth_nonce')
# get any non-oauth parameters
def get_nonoauth_parameters(self):
parameters = {}
for k, v in self.parameters.iteritems():
# ignore oauth parameters
if k.find('oauth_') < 0:
parameters[k] = v
return parameters
# serialize as a header for an HTTPAuth request
def to_header(self, realm=''):
auth_header = 'OAuth realm="%s"' % realm
# add the oauth parameters
if self.parameters:
for k, v in self.parameters.iteritems():
auth_header += ', %s="%s"' % (k, escape(str(v)))
return {'Authorization': auth_header}
# serialize as post data for a POST request
def to_postdata(self):
return '&'.join('%s=%s' % (escape(str(k)), escape(str(v))) for k, v in self.parameters.iteritems())
# serialize as a url for a GET request
def to_url(self):
return '%s?%s' % (self.get_normalized_http_url(), self.to_postdata())
# return a string that consists of all the parameters that need to be signed
def get_normalized_parameters(self):
params = self.parameters
try:
# exclude the signature if it exists
del params['oauth_signature']
except:
pass
key_values = params.items()
# sort lexicographically, first after key, then after value
key_values.sort()
# combine key value pairs in string and escape
return '&'.join('%s=%s' % (escape(str(k)), escape(str(v))) for k, v in key_values)
# just uppercases the http method
def get_normalized_http_method(self):
return self.http_method.upper()
# parses the url and rebuilds it to be scheme://host/path
def get_normalized_http_url(self):
parts = urlparse.urlparse(self.http_url)
url_string = '%s://%s%s' % (parts[0], parts[1], parts[2]) # scheme, netloc, path
return url_string
# set the signature parameter to the result of build_signature
def sign_request(self, signature_method, consumer, token):
# set the signature method
self.set_parameter('oauth_signature_method', signature_method.get_name())
# set the signature
self.set_parameter('oauth_signature', self.build_signature(signature_method, consumer, token))
def build_signature(self, signature_method, consumer, token):
# call the build signature method within the signature method
return signature_method.build_signature(self, consumer, token)
@staticmethod
def from_request(http_method, http_url, headers=None, parameters=None, query_string=None):
# combine multiple parameter sources
if parameters is None:
parameters = {}
# headers
if headers and 'HTTP_AUTHORIZATION' in headers:
auth_header = headers['HTTP_AUTHORIZATION']
# check that the authorization header is OAuth
if auth_header.index('OAuth') > -1:
try:
# get the parameters from the header
header_params = OAuthRequest._split_header(auth_header)
parameters.update(header_params)
except:
raise OAuthError('Unable to parse OAuth parameters from Authorization header.')
# GET or POST query string
if query_string:
query_params = OAuthRequest._split_url_string(query_string)
parameters.update(query_params)
# URL parameters
param_str = urlparse.urlparse(http_url)[4] # query
url_params = OAuthRequest._split_url_string(param_str)
parameters.update(url_params)
if parameters:
return OAuthRequest(http_method, http_url, parameters)
return None
@staticmethod
def from_consumer_and_token(oauth_consumer, token=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
defaults = {
'oauth_consumer_key': oauth_consumer.key,
'oauth_timestamp': generate_timestamp(),
'oauth_nonce': generate_nonce(),
'oauth_version': OAuthRequest.version,
}
defaults.update(parameters)
parameters = defaults
if token:
parameters['oauth_token'] = token.key
return OAuthRequest(http_method, http_url, parameters)
@staticmethod
def from_token_and_callback(token, callback=None, http_method=HTTP_METHOD, http_url=None, parameters=None):
if not parameters:
parameters = {}
parameters['oauth_token'] = token.key
if callback:
parameters['oauth_callback'] = escape(callback)
return OAuthRequest(http_method, http_url, parameters)
# util function: turn Authorization: header into parameters, has to do some unescaping
@staticmethod
def _split_header(header):
params = {}
header = header.replace('OAuth ', '', 1)
parts = header.split(',')
for param in parts:
# ignore realm parameter
if param.find('realm') > -1:
continue
# remove whitespace
param = param.strip()
# split key-value
param_parts = param.split('=', 1)
# remove quotes and unescape the value
params[param_parts[0]] = urllib.unquote(param_parts[1].strip('\"'))
return params
# util function: turn url string into parameters, has to do some unescaping
@staticmethod
def _split_url_string(param_str):
parameters = cgi.parse_qs(param_str, keep_blank_values=False)
for k, v in parameters.iteritems():
parameters[k] = urllib.unquote(v[0])
return parameters
# OAuthServer is a worker to check a requests validity against a data store
class OAuthServer(object):
timestamp_threshold = 300 # in seconds, five minutes
version = VERSION
signature_methods = None
data_store = None
def __init__(self, data_store=None, signature_methods=None):
self.data_store = data_store
self.signature_methods = signature_methods or {}
def set_data_store(self, oauth_data_store):
self.data_store = data_store
def get_data_store(self):
return self.data_store
def add_signature_method(self, signature_method):
self.signature_methods[signature_method.get_name()] = signature_method
return self.signature_methods
# process a request_token request
# returns the request token on success
def fetch_request_token(self, oauth_request):
try:
# get the request token for authorization
token = self._get_token(oauth_request, 'request')
except OAuthError:
# no token required for the initial token request
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
self._check_signature(oauth_request, consumer, None)
# fetch a new token
token = self.data_store.fetch_request_token(consumer)
return token
# process an access_token request
# returns the access token on success
def fetch_access_token(self, oauth_request):
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# get the request token
token = self._get_token(oauth_request, 'request')
self._check_signature(oauth_request, consumer, token)
new_token = self.data_store.fetch_access_token(consumer, token)
return new_token
# verify an api call, checks all the parameters
def verify_request(self, oauth_request):
# -> consumer and token
version = self._get_version(oauth_request)
consumer = self._get_consumer(oauth_request)
# get the access token
token = self._get_token(oauth_request, 'access')
self._check_signature(oauth_request, consumer, token)
parameters = oauth_request.get_nonoauth_parameters()
return consumer, token, parameters
# authorize a request token
def authorize_token(self, token, user):
return self.data_store.authorize_request_token(token, user)
# get the callback url
def get_callback(self, oauth_request):
return oauth_request.get_parameter('oauth_callback')
# optional support for the authenticate header
def build_authenticate_header(self, realm=''):
return {'WWW-Authenticate': 'OAuth realm="%s"' % realm}
# verify the correct version request for this server
def _get_version(self, oauth_request):
try:
version = oauth_request.get_parameter('oauth_version')
except:
version = VERSION
if version and version != self.version:
raise OAuthError('OAuth version %s not supported.' % str(version))
return version
# figure out the signature with some defaults
def _get_signature_method(self, oauth_request):
try:
signature_method = oauth_request.get_parameter('oauth_signature_method')
except:
signature_method = SIGNATURE_METHOD
try:
# get the signature method object
signature_method = self.signature_methods[signature_method]
except:
signature_method_names = ', '.join(self.signature_methods.keys())
raise OAuthError('Signature method %s not supported try one of the following: %s' % (signature_method, signature_method_names))
return signature_method
def _get_consumer(self, oauth_request):
consumer_key = oauth_request.get_parameter('oauth_consumer_key')
if not consumer_key:
raise OAuthError('Invalid consumer key.')
consumer = self.data_store.lookup_consumer(consumer_key)
if not consumer:
raise OAuthError('Invalid consumer.')
return consumer
# try to find the token for the provided request token key
def _get_token(self, oauth_request, token_type='access'):
token_field = oauth_request.get_parameter('oauth_token')
token = self.data_store.lookup_token(token_type, token_field)
if not token:
raise OAuthError('Invalid %s token: %s' % (token_type, token_field))
return token
def _check_signature(self, oauth_request, consumer, token):
timestamp, nonce = oauth_request._get_timestamp_nonce()
self._check_timestamp(timestamp)
self._check_nonce(consumer, token, nonce)
signature_method = self._get_signature_method(oauth_request)
try:
signature = oauth_request.get_parameter('oauth_signature')
except:
raise OAuthError('Missing signature.')
# validate the signature
valid_sig = signature_method.check_signature(oauth_request, consumer, token, signature)
if not valid_sig:
key, base = signature_method.build_signature_base_string(oauth_request, consumer, token)
raise OAuthError('Invalid signature. Expected signature base string: %s' % base)
built = signature_method.build_signature(oauth_request, consumer, token)
def _check_timestamp(self, timestamp):
# verify that timestamp is recentish
timestamp = int(timestamp)
now = int(time.time())
lapsed = now - timestamp
if lapsed > self.timestamp_threshold:
raise OAuthError('Expired timestamp: given %d and now %s has a greater difference than threshold %d' % (timestamp, now, self.timestamp_threshold))
def _check_nonce(self, consumer, token, nonce):
# verify that the nonce is uniqueish
nonce = self.data_store.lookup_nonce(consumer, token, nonce)
if nonce:
raise OAuthError('Nonce already used: %s' % str(nonce))
# OAuthClient is a worker to attempt to execute a request
class OAuthClient(object):
consumer = None
token = None
def __init__(self, oauth_consumer, oauth_token):
self.consumer = oauth_consumer
self.token = oauth_token
def get_consumer(self):
return self.consumer
def get_token(self):
return self.token
def fetch_request_token(self, oauth_request):
# -> OAuthToken
raise NotImplementedError
def fetch_access_token(self, oauth_request):
# -> OAuthToken
raise NotImplementedError
def access_resource(self, oauth_request):
# -> some protected resource
raise NotImplementedError
# OAuthDataStore is a database abstraction used to lookup consumers and tokens
class OAuthDataStore(object):
def lookup_consumer(self, key):
# -> OAuthConsumer
raise NotImplementedError
def lookup_token(self, oauth_consumer, token_type, token_token):
# -> OAuthToken
raise NotImplementedError
def lookup_nonce(self, oauth_consumer, oauth_token, nonce, timestamp):
# -> OAuthToken
raise NotImplementedError
def fetch_request_token(self, oauth_consumer):
# -> OAuthToken
raise NotImplementedError
def fetch_access_token(self, oauth_consumer, oauth_token):
# -> OAuthToken
raise NotImplementedError
def authorize_request_token(self, oauth_token, user):
# -> OAuthToken
raise NotImplementedError
# OAuthSignatureMethod is a strategy class that implements a signature method
class OAuthSignatureMethod(object):
def get_name(self):
# -> str
raise NotImplementedError
def build_signature_base_string(self, oauth_request, oauth_consumer, oauth_token):
# -> str key, str raw
raise NotImplementedError
def build_signature(self, oauth_request, oauth_consumer, oauth_token):
# -> str
raise NotImplementedError
def check_signature(self, oauth_request, consumer, token, signature):
built = self.build_signature(oauth_request, consumer, token)
return built == signature
class OAuthSignatureMethod_HMAC_SHA1(OAuthSignatureMethod):
def get_name(self):
return 'HMAC-SHA1'
def build_signature_base_string(self, oauth_request, consumer, token):
sig = (
escape(oauth_request.get_normalized_http_method()),
escape(oauth_request.get_normalized_http_url()),
escape(oauth_request.get_normalized_parameters()),
)
key = '%s&' % escape(consumer.secret)
if token:
key += escape(token.secret)
raw = '&'.join(sig)
return key, raw
def build_signature(self, oauth_request, consumer, token):
# build the base signature string
key, raw = self.build_signature_base_string(oauth_request, consumer, token)
# hmac object
try:
import hashlib # 2.5
hashed = hmac.new(key, raw, hashlib.sha1)
except:
import sha # deprecated
hashed = hmac.new(key, raw, sha)
# calculate the digest base 64
return base64.b64encode(hashed.digest())
class OAuthSignatureMethod_PLAINTEXT(OAuthSignatureMethod):
def get_name(self):
return 'PLAINTEXT'
def build_signature_base_string(self, oauth_request, consumer, token):
# concatenate the consumer key and secret
sig = escape(consumer.secret) + '&'
if token:
sig = sig + escape(token.secret)
return sig
def build_signature(self, oauth_request, consumer, token):
return self.build_signature_base_string(oauth_request, consumer, token)
| Python |
from django.db import models
from django.contrib.auth.models import User
KEY_SIZE = 18
SECRET_SIZE = 32
class KeyManager(models.Manager):
'''Add support for random key/secret generation
'''
def generate_random_codes(self):
key = User.objects.make_random_password(length=KEY_SIZE)
secret = User.objects.make_random_password(length=SECRET_SIZE)
while self.filter(key__exact=key, secret__exact=secret).count():
secret = User.objects.make_random_password(length=SECRET_SIZE)
return key, secret
class ConsumerManager(KeyManager):
def create_consumer(self, name, description=None, user=None):
"""
Shortcut to create a consumer with random key/secret.
"""
consumer, created = self.get_or_create(name=name)
if user:
consumer.user = user
if description:
consumer.description = description
if created:
consumer.key, consumer.secret = self.generate_random_codes()
consumer.save()
return consumer
_default_consumer = None
class ResourceManager(models.Manager):
_default_resource = None
def get_default_resource(self, name):
"""
Add cache if you use a default resource.
"""
if not self._default_resource:
self._default_resource = self.get(name=name)
return self._default_resource
class TokenManager(KeyManager):
def create_token(self, consumer, token_type, timestamp, user=None):
"""
Shortcut to create a token with random key/secret.
"""
token, created = self.get_or_create(consumer=consumer,
token_type=token_type,
timestamp=timestamp,
user=user)
if created:
token.key, token.secret = self.generate_random_codes()
token.save()
return token
| Python |
import binascii
import oauth
from django.http import HttpResponse, HttpResponseRedirect
from django.contrib.auth.models import User, AnonymousUser
from django.contrib.auth.decorators import login_required
from django.template import loader
from django.contrib.auth import authenticate
from django.conf import settings
from django.core.urlresolvers import get_callable
from django.core.exceptions import ImproperlyConfigured
from django.shortcuts import render_to_response
from django.template import RequestContext
from piston import forms
class NoAuthentication(object):
"""
Authentication handler that always returns
True, so no authentication is needed, nor
initiated (`challenge` is missing.)
"""
def is_authenticated(self, request):
return True
class HttpBasicAuthentication(object):
"""
Basic HTTP authenticater. Synopsis:
Authentication handlers must implement two methods:
- `is_authenticated`: Will be called when checking for
authentication. Receives a `request` object, please
set your `User` object on `request.user`, otherwise
return False (or something that evaluates to False.)
- `challenge`: In cases where `is_authenticated` returns
False, the result of this method will be returned.
This will usually be a `HttpResponse` object with
some kind of challenge headers and 401 code on it.
"""
def __init__(self, auth_func=authenticate, realm='API'):
self.auth_func = auth_func
self.realm = realm
def is_authenticated(self, request):
auth_string = request.META.get('HTTP_AUTHORIZATION', None)
if not auth_string:
return False
try:
(authmeth, auth) = auth_string.split(" ", 1)
if not authmeth.lower() == 'basic':
return False
auth = auth.strip().decode('base64')
(username, password) = auth.split(':', 1)
except (ValueError, binascii.Error):
return False
request.user = self.auth_func(username=username, password=password) \
or AnonymousUser()
return not request.user in (False, None, AnonymousUser())
def challenge(self):
resp = HttpResponse("Authorization Required")
resp['WWW-Authenticate'] = 'Basic realm="%s"' % self.realm
resp.status_code = 401
return resp
def load_data_store():
'''Load data store for OAuth Consumers, Tokens, Nonces and Resources
'''
path = getattr(settings, 'OAUTH_DATA_STORE', 'piston.store.DataStore')
# stolen from django.contrib.auth.load_backend
i = path.rfind('.')
module, attr = path[:i], path[i+1:]
try:
mod = __import__(module, {}, {}, attr)
except ImportError, e:
raise ImproperlyConfigured, 'Error importing OAuth data store %s: "%s"' % (module, e)
try:
cls = getattr(mod, attr)
except AttributeError:
raise ImproperlyConfigured, 'Module %s does not define a "%s" OAuth data store' % (module, attr)
return cls
# Set the datastore here.
oauth_datastore = load_data_store()
def initialize_server_request(request):
"""
Shortcut for initialization.
"""
oauth_request = oauth.OAuthRequest.from_request(
request.method, request.build_absolute_uri(),
headers=request.META, parameters=dict(request.REQUEST.items()),
query_string=request.environ.get('QUERY_STRING', ''))
if oauth_request:
oauth_server = oauth.OAuthServer(oauth_datastore(oauth_request))
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_PLAINTEXT())
oauth_server.add_signature_method(oauth.OAuthSignatureMethod_HMAC_SHA1())
else:
oauth_server = None
return oauth_server, oauth_request
def send_oauth_error(err=None):
"""
Shortcut for sending an error.
"""
response = HttpResponse(err.message.encode('utf-8'))
response.status_code = 401
realm = 'OAuth'
header = oauth.build_authenticate_header(realm=realm)
for k, v in header.iteritems():
response[k] = v
return response
def oauth_request_token(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_server is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_request_token(oauth_request)
response = HttpResponse(token.to_string())
except oauth.OAuthError, err:
response = send_oauth_error(err)
return response
def oauth_auth_view(request, token, callback, params):
form = forms.OAuthAuthenticationForm(initial={
'oauth_token': token.key,
'oauth_callback': callback,
})
return render_to_response('piston/authorize_token.html',
{ 'form': form }, RequestContext(request))
@login_required
def oauth_user_auth(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_request_token(oauth_request)
except oauth.OAuthError, err:
return send_oauth_error(err)
try:
callback = oauth_server.get_callback(oauth_request)
except:
callback = None
if request.method == "GET":
params = oauth_request.get_normalized_parameters()
oauth_view = getattr(settings, 'OAUTH_AUTH_VIEW', None)
if oauth_view is None:
return oauth_auth_view(request, token, callback, params)
else:
return get_callable(oauth_view)(request, token, callback, params)
elif request.method == "POST":
try:
form = forms.OAuthAuthenticationForm(request.POST)
if form.is_valid():
token = oauth_server.authorize_token(token, request.user)
args = '?'+token.to_string(only_key=True)
else:
args = '?error=%s' % 'Access not granted by user.'
if not callback:
callback = getattr(settings, 'OAUTH_CALLBACK_VIEW')
return get_callable(callback)(request, token)
response = HttpResponseRedirect(callback+args)
except oauth.OAuthError, err:
response = send_oauth_error(err)
else:
response = HttpResponse('Action not allowed.')
return response
def oauth_access_token(request):
oauth_server, oauth_request = initialize_server_request(request)
if oauth_request is None:
return INVALID_PARAMS_RESPONSE
try:
token = oauth_server.fetch_access_token(oauth_request)
return HttpResponse(token.to_string())
except oauth.OAuthError, err:
return send_oauth_error(err)
INVALID_PARAMS_RESPONSE = send_oauth_error(oauth.OAuthError('Invalid request parameters.'))
class OAuthAuthentication(object):
"""
OAuth authentication. Based on work by Leah Culver.
"""
def __init__(self, realm='API'):
self.realm = realm
self.builder = oauth.build_authenticate_header
def is_authenticated(self, request):
"""
Checks whether a means of specifying authentication
is provided, and if so, if it is a valid token.
Read the documentation on `HttpBasicAuthentication`
for more information about what goes on here.
"""
if self.is_valid_request(request):
try:
consumer, token, parameters = self.validate_token(request)
except oauth.OAuthError, err:
print send_oauth_error(err)
return False
if consumer and token:
request.user = token.user
request.throttle_extra = token.consumer.id
return True
return False
def challenge(self):
"""
Returns a 401 response with a small bit on
what OAuth is, and where to learn more about it.
When this was written, browsers did not understand
OAuth authentication on the browser side, and hence
the helpful template we render. Maybe some day in the
future, browsers will take care of this stuff for us
and understand the 401 with the realm we give it.
"""
response = HttpResponse()
response.status_code = 401
realm = 'API'
for k, v in self.builder(realm=realm).iteritems():
response[k] = v
tmpl = loader.render_to_string('oauth/challenge.html',
{ 'MEDIA_URL': settings.MEDIA_URL })
response.content = tmpl
return response
@staticmethod
def is_valid_request(request):
"""
Checks whether the required parameters are either in
the http-authorization header sent by some clients,
which is by the way the preferred method according to
OAuth spec, but otherwise fall back to `GET` and `POST`.
"""
must_have = [ 'oauth_'+s for s in [
'consumer_key', 'token', 'signature',
'signature_method', 'timestamp', 'nonce' ] ]
is_in = lambda l: all([ (p in l) for p in must_have ])
auth_params = request.META.get("HTTP_AUTHORIZATION", "")
req_params = request.REQUEST
return is_in(auth_params) or is_in(req_params)
@staticmethod
def validate_token(request, check_timestamp=True, check_nonce=True):
oauth_server, oauth_request = initialize_server_request(request)
return oauth_server.verify_request(oauth_request)
| Python |
from django.middleware.http import ConditionalGetMiddleware
from django.middleware.common import CommonMiddleware
def compat_middleware_factory(klass):
"""
Class wrapper that only executes `process_response`
if `streaming` is not set on the `HttpResponse` object.
Django has a bad habbit of looking at the content,
which will prematurely exhaust the data source if we're
using generators or buffers.
"""
class compatwrapper(klass):
def process_response(self, req, resp):
if not hasattr(resp, 'streaming'):
return klass.process_response(self, req, resp)
return resp
return compatwrapper
ConditionalMiddlewareCompatProxy = compat_middleware_factory(ConditionalGetMiddleware)
CommonMiddlewareCompatProxy = compat_middleware_factory(CommonMiddleware)
| Python |
from django.http import HttpResponseNotAllowed, HttpResponseForbidden, HttpResponse, HttpResponseBadRequest
from django.core.urlresolvers import reverse
from django.core.cache import cache
from django import get_version as django_version
from decorator import decorator
from datetime import datetime, timedelta
__version__ = '0.2.2'
def get_version():
return __version__
def format_error(error):
return u"Piston/%s (Django %s) crash report:\n\n%s" % \
(get_version(), django_version(), error)
class rc_factory(object):
"""
Status codes.
"""
CODES = dict(ALL_OK = ('OK', 200),
CREATED = ('Created', 201),
DELETED = ('', 204), # 204 says "Don't send a body!"
BAD_REQUEST = ('Bad Request', 400),
FORBIDDEN = ('Forbidden', 401),
NOT_FOUND = ('Not Found', 404),
DUPLICATE_ENTRY = ('Conflict/Duplicate', 409),
NOT_HERE = ('Gone', 410),
NOT_IMPLEMENTED = ('Not Implemented', 501),
THROTTLED = ('Throttled', 503))
def __getattr__(self, attr):
"""
Returns a fresh `HttpResponse` when getting
an "attribute". This is backwards compatible
with 0.2, which is important.
"""
try:
(r, c) = self.CODES.get(attr)
except TypeError:
raise AttributeError(attr)
return HttpResponse(r, content_type='text/plain', status=c)
rc = rc_factory()
class FormValidationError(Exception):
def __init__(self, form):
self.form = form
class HttpStatusCode(Exception):
def __init__(self, response):
self.response = response
def validate(v_form, operation='POST'):
@decorator
def wrap(f, self, request, *a, **kwa):
form = v_form(getattr(request, operation))
if form.is_valid():
return f(self, request, *a, **kwa)
else:
raise FormValidationError(form)
return wrap
def throttle(max_requests, timeout=60*60, extra=''):
"""
Simple throttling decorator, caches
the amount of requests made in cache.
If used on a view where users are required to
log in, the username is used, otherwise the
IP address of the originating request is used.
Parameters::
- `max_requests`: The maximum number of requests
- `timeout`: The timeout for the cache entry (default: 1 hour)
"""
@decorator
def wrap(f, self, request, *args, **kwargs):
if request.user.is_authenticated():
ident = request.user.username
else:
ident = request.META.get('REMOTE_ADDR', None)
if hasattr(request, 'throttle_extra'):
"""
Since we want to be able to throttle on a per-
application basis, it's important that we realize
that `throttle_extra` might be set on the request
object. If so, append the identifier name with it.
"""
ident += ':%s' % str(request.throttle_extra)
if ident:
"""
Preferrably we'd use incr/decr here, since they're
atomic in memcached, but it's in django-trunk so we
can't use it yet. If someone sees this after it's in
stable, you can change it here.
"""
ident += ':%s' % extra
now = datetime.now()
ts_key = 'throttle:ts:%s' % ident
timestamp = cache.get(ts_key)
offset = now + timedelta(seconds=timeout)
if timestamp and timestamp < offset:
t = rc.THROTTLED
wait = timeout - (offset-timestamp).seconds
t.content = 'Throttled, wait %d seconds.' % wait
return t
count = cache.get(ident, 1)
cache.set(ident, count+1)
if count >= max_requests:
cache.set(ts_key, offset, timeout)
cache.set(ident, 1)
return f(self, request, *args, **kwargs)
return wrap
def coerce_put_post(request):
"""
Django doesn't particularly understand REST.
In case we send data over PUT, Django won't
actually look at the data and load it. We need
to twist its arm here.
The try/except abominiation here is due to a bug
in mod_python. This should fix it.
"""
if request.method == "PUT":
try:
request.method = "POST"
request._load_post_and_files()
request.method = "PUT"
except AttributeError:
request.META['REQUEST_METHOD'] = 'POST'
request._load_post_and_files()
request.META['REQUEST_METHOD'] = 'PUT'
request.PUT = request.POST
class MimerDataException(Exception):
"""
Raised if the content_type and data don't match
"""
pass
class Mimer(object):
TYPES = dict()
def __init__(self, request):
self.request = request
def is_multipart(self):
content_type = self.content_type()
if content_type is not None:
return content_type.lstrip().startswith('multipart')
return False
def loader_for_type(self, ctype):
"""
Gets a function ref to deserialize content
for a certain mimetype.
"""
for loadee, mimes in Mimer.TYPES.iteritems():
for mime in mimes:
if ctype.startswith(mime):
return loadee
def content_type(self):
"""
Returns the content type of the request in all cases where it is
different than a submitted form - application/x-www-form-urlencoded
"""
type_formencoded = "application/x-www-form-urlencoded"
ctype = self.request.META.get('CONTENT_TYPE', type_formencoded)
if ctype.startswith(type_formencoded):
return None
return ctype
def translate(self):
"""
Will look at the `Content-type` sent by the client, and maybe
deserialize the contents into the format they sent. This will
work for JSON, YAML, XML and Pickle. Since the data is not just
key-value (and maybe just a list), the data will be placed on
`request.data` instead, and the handler will have to read from
there.
It will also set `request.content_type` so the handler has an easy
way to tell what's going on. `request.content_type` will always be
None for form-encoded and/or multipart form data (what your browser sends.)
"""
ctype = self.content_type()
self.request.content_type = ctype
if not self.is_multipart() and ctype:
loadee = self.loader_for_type(ctype)
try:
self.request.data = loadee(self.request.raw_post_data)
# Reset both POST and PUT from request, as its
# misleading having their presence around.
self.request.POST = self.request.PUT = dict()
except (TypeError, ValueError):
raise MimerDataException
return self.request
@classmethod
def register(cls, loadee, types):
cls.TYPES[loadee] = types
@classmethod
def unregister(cls, loadee):
return cls.TYPES.pop(loadee)
def translate_mime(request):
request = Mimer(request).translate()
def require_mime(*mimes):
"""
Decorator requiring a certain mimetype. There's a nifty
helper called `require_extended` below which requires everything
we support except for post-data via form.
"""
@decorator
def wrap(f, self, request, *args, **kwargs):
m = Mimer(request)
realmimes = set()
rewrite = { 'json': 'application/json',
'yaml': 'application/x-yaml',
'xml': 'text/xml',
'pickle': 'application/python-pickle' }
for idx, mime in enumerate(mimes):
realmimes.add(rewrite.get(mime, mime))
if not m.content_type() in realmimes:
return rc.BAD_REQUEST
return f(self, request, *args, **kwargs)
return wrap
require_extended = require_mime('json', 'yaml', 'xml', 'pickle')
| Python |
import inspect, handler
from piston.handler import typemapper
from piston.handler import handler_tracker
from django.core.urlresolvers import get_resolver, get_callable, get_script_prefix
from django.shortcuts import render_to_response
from django.template import RequestContext
def generate_doc(handler_cls):
"""
Returns a `HandlerDocumentation` object
for the given handler. Use this to generate
documentation for your API.
"""
if not type(handler_cls) is handler.HandlerMetaClass:
raise ValueError("Give me handler, not %s" % type(handler_cls))
return HandlerDocumentation(handler_cls)
class HandlerMethod(object):
def __init__(self, method, stale=False):
self.method = method
self.stale = stale
def iter_args(self):
args, _, _, defaults = inspect.getargspec(self.method)
for idx, arg in enumerate(args):
if arg in ('self', 'request', 'form'):
continue
didx = len(args)-idx
if defaults and len(defaults) >= didx:
yield (arg, str(defaults[-didx]))
else:
yield (arg, None)
@property
def signature(self, parse_optional=True):
spec = ""
for argn, argdef in self.iter_args():
spec += argn
if argdef:
spec += '=%s' % argdef
spec += ', '
spec = spec.rstrip(", ")
if parse_optional:
return spec.replace("=None", "=<optional>")
return spec
@property
def doc(self):
return inspect.getdoc(self.method)
@property
def name(self):
return self.method.__name__
@property
def http_name(self):
if self.name == 'read':
return 'GET'
elif self.name == 'create':
return 'POST'
elif self.name == 'delete':
return 'DELETE'
elif self.name == 'update':
return 'PUT'
def __repr__(self):
return "<Method: %s>" % self.name
class HandlerDocumentation(object):
def __init__(self, handler):
self.handler = handler
def get_methods(self, include_default=False):
for method in "read create update delete".split():
met = getattr(self.handler, method, None)
if not met:
continue
stale = inspect.getmodule(met) is handler
if not self.handler.is_anonymous:
if met and (not stale or include_default):
yield HandlerMethod(met, stale)
else:
if not stale or met.__name__ == "read" \
and 'GET' in self.allowed_methods:
yield HandlerMethod(met, stale)
def get_all_methods(self):
return self.get_methods(include_default=True)
@property
def is_anonymous(self):
return handler.is_anonymous
def get_model(self):
return getattr(self, 'model', None)
@property
def has_anonymous(self):
return self.handler.anonymous
@property
def anonymous(self):
if self.has_anonymous:
return HandlerDocumentation(self.handler.anonymous)
@property
def doc(self):
return self.handler.__doc__
@property
def name(self):
return self.handler.__name__
@property
def allowed_methods(self):
return self.handler.allowed_methods
def get_resource_uri_template(self):
"""
URI template processor.
See http://bitworking.org/projects/URI-Templates/
"""
def _convert(template, params=[]):
"""URI template converter"""
paths = template % dict([p, "{%s}" % p] for p in params)
return u'%s%s' % (get_script_prefix(), paths)
try:
resource_uri = self.handler.resource_uri()
components = [None, [], {}]
for i, value in enumerate(resource_uri):
components[i] = value
lookup_view, args, kwargs = components
lookup_view = get_callable(lookup_view, True)
possibilities = get_resolver(None).reverse_dict.getlist(lookup_view)
for possibility, pattern in possibilities:
for result, params in possibility:
if args:
if len(args) != len(params):
continue
return _convert(result, params)
else:
if set(kwargs.keys()) != set(params):
continue
return _convert(result, params)
except:
return None
resource_uri_template = property(get_resource_uri_template)
def __repr__(self):
return u'<Documentation for "%s">' % self.name
def documentation_view(request):
"""
Generic documentation view. Generates documentation
from the handlers you've defined.
"""
docs = [ ]
for handler in handler_tracker:
docs.append(generate_doc(handler))
def _compare(doc1, doc2):
#handlers and their anonymous counterparts are put next to each other.
name1 = doc1.name.replace("Anonymous", "")
name2 = doc2.name.replace("Anonymous", "")
return cmp(name1, name2)
docs.sort(_compare)
return render_to_response('documentation.html',
{ 'docs': docs }, RequestContext(request))
| Python |
import oauth
from models import Nonce, Token, Consumer
class DataStore(oauth.OAuthDataStore):
"""Layer between Python OAuth and Django database."""
def __init__(self, oauth_request):
self.signature = oauth_request.parameters.get('oauth_signature', None)
self.timestamp = oauth_request.parameters.get('oauth_timestamp', None)
self.scope = oauth_request.parameters.get('scope', 'all')
def lookup_consumer(self, key):
try:
self.consumer = Consumer.objects.get(key=key)
return self.consumer
except Consumer.DoesNotExist:
return None
def lookup_token(self, token_type, token):
if token_type == 'request':
token_type = Token.REQUEST
elif token_type == 'access':
token_type = Token.ACCESS
try:
self.request_token = Token.objects.get(key=token,
token_type=token_type)
return self.request_token
except Token.DoesNotExist:
return None
def lookup_nonce(self, oauth_consumer, oauth_token, nonce):
if oauth_token is None:
return None
nonce, created = Nonce.objects.get_or_create(consumer_key=oauth_consumer.key,
token_key=oauth_token.key,
key=nonce)
if created:
return None
else:
return nonce.key
def fetch_request_token(self, oauth_consumer):
if oauth_consumer.key == self.consumer.key:
self.request_token = Token.objects.create_token(consumer=self.consumer,
token_type=Token.REQUEST,
timestamp=self.timestamp)
return self.request_token
return None
def fetch_access_token(self, oauth_consumer, oauth_token):
if oauth_consumer.key == self.consumer.key \
and oauth_token.key == self.request_token.key \
and self.request_token.is_approved:
self.access_token = Token.objects.create_token(consumer=self.consumer,
token_type=Token.ACCESS,
timestamp=self.timestamp,
user=self.request_token.user)
return self.access_token
return None
def authorize_request_token(self, oauth_token, user):
if oauth_token.key == self.request_token.key:
# authorize the request token in the store
self.request_token.is_approved = True
self.request_token.user = user
self.request_token.save()
return self.request_token
return None | Python |
from M2Crypto import RSA
def getPublicKey(keys):
pubKey = RSA.new_pub_key(keys.pub())
return pubKey.as_pem(cipher=None)
def encrypt(plaintext,keys):
encText = keys.private_encrypt(plaintext, 1)
return encText
def decrypt(encText,pubKey):
dec = pubKey.public_decrypt(encText, 1)
return decl
def loadKeys(pem):
return RSA.load_key_string(pem)
| Python |
#!/usr/bin/python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
#!/usr/bin/python
from django.core.management import execute_manager
try:
import settings # Assumed to be in the same directory.
except ImportError:
import sys
sys.stderr.write("Error: Can't find the file 'settings.py' in the directory containing %r. It appears you've customized things.\nYou'll have to run django-admin.py, passing it your settings module.\n(If the file settings.py does indeed exist, it's causing an ImportError somehow.)\n" % __file__)
sys.exit(1)
if __name__ == "__main__":
execute_manager(settings)
| Python |
#!/usr/bin/env python
import os
os.system("python manage.py runserver 127.0.0.1:8001")
| Python |
from django.conf.urls.defaults import *
from django.views.generic.simple import direct_to_template, redirect_to
from django.core.urlresolvers import reverse
# Uncomment the next two lines to enable the admin:
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^api/', include('fluffyprovider.api.urls')),
url(r'^register$', 'fluffyprovider.provider.views.register', name='register_provider'),
url(r'^send_file_list$', 'fluffyprovider.provider.views.send_file_list'),
url(r'^file_list$', 'fluffyprovider.provider.views.get_file_list'),
url(r'^remote/(?P<address>.*?)/(?P<filename>.*)/$', 'fluffyprovider.provider.views.get_remote_file'),
url(r'^login$', 'fluffyprovider.provider.views.login'),
url(r'^logout$', 'fluffyprovider.provider.views.logout'),
# Uncomment the admin/doc line below to enable admin documentation:
(r'^admin/doc/', include('django.contrib.admindocs.urls')),
# Uncomment the next line to enable the admin:
(r'^admin/', include(admin.site.urls)),
(r'^$', 'fluffyprovider.provider.views.index'),
)
from django.conf import settings
if settings.DEBUG:
urlpatterns += patterns('',
(r'^site_media/(?P<path>.*)$', 'django.views.static.serve', {
'document_root': settings.MEDIA_ROOT,
'show_indexes': True
}))
| Python |
#!/usr/bin/env python
import os
os.system("python manage.py runserver 127.0.0.1:8001")
| Python |
# Django settings for fluffyprovider project.
import os
PROJECT_ROOT = os.path.abspath(os.path.dirname(__file__))
CENTRAL_REGISTER = '127.0.0.1:8000'
PROVIDER_NAME = 'provider1'
PROVIDER_ADDRESS = '127.0.0.1:8001'
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': os.path.join(PROJECT_ROOT, 'baza.db'), # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Chicago'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute filesystem path to the directory that will hold user-uploaded files.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = os.path.join(PROJECT_ROOT, 'site_media')
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = '/site_media/'
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/media/'
LOGIN_URL = "/"
# Make this unique, and don't share it with anybody.
SECRET_KEY = '3u$_7_ltwa1_1c0+$%7=fa&-g!i9tz*aboh7_0x-#g4-5aa%_o'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'fluffyprovider.urls'
TEMPLATE_DIRS = ( os.path.join(PROJECT_ROOT, 'templates' )
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
)
EXTERNAL_APPS_PATH = os.path.join(PROJECT_ROOT, "lib")
import sys
sys.path.append(EXTERNAL_APPS_PATH)
INSTALLED_APPS = (
'provider',
'piston',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.messages',
'django.contrib.admin',
'django.contrib.admindocs',
)
# Security
myKeys = None
| Python |
from django.db import models
class File(models.Model):
path = models.CharField(max_length=100, unique=True)
author = models.CharField(max_length=100)
description = models.CharField(max_length=500)
def __unicode__(self):
return self.path
| Python |
"""
This file demonstrates two different styles of tests (one doctest and one
unittest). These will both pass when you run "manage.py test".
Replace these with more appropriate tests for your application.
"""
from django.test import TestCase
class SimpleTest(TestCase):
def test_basic_addition(self):
"""
Tests that 1 + 1 always equals 2.
"""
self.failUnlessEqual(1 + 1, 2)
__test__ = {"doctest": """
Another way to test that 1 + 1 is equal to 2.
>>> 1 + 1 == 2
True
"""}
| Python |
from provider.models import File
from django.contrib import admin
admin.site.register(File)
| Python |
# Create your views here.
from django.http import HttpResponseRedirect, Http404, HttpResponse
from django.shortcuts import render_to_response
from django.template import RequestContext
from django.contrib import auth
from django.contrib.auth.decorators import login_required
from provider.models import File
from fluffyprovider import settings
import security
from fluffyprovider.settings import CENTRAL_REGISTER, PROVIDER_ADDRESS ,PROVIDER_NAME, MEDIA_ROOT
import urllib2
import datetime
import base64
import mimetypes
try:
import simplejson as json
except ImportError:
import json
REGISTER_API = "http://" + CENTRAL_REGISTER + "/api"
@login_required()
def register(request):
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "%s/%s/" % (REGISTER_API, 'sp_list')
name = PROVIDER_NAME
address = PROVIDER_ADDRESS
datadict = {'name': name, 'address':address}
data = json.dumps(datadict)
print url
apirequest = urllib2.Request(url, data=data)
apirequest.add_header('Content-Type', 'application/json')
apirequest.get_method = lambda: 'PUT'
result = opener.open(apirequest).read()
# result izgleda ovako:
#{
#"status": "EXISTS", ## ili "OK"
#"name": "provider1",
#"address": "127.0.0.1:8001"
#}
#
resultobjs = json.loads(result)
get_cert()
return render_to_response('register.html',
{'status': resultobjs['status'],'name':resultobjs['name']})
def get_cert():
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "%s/%s/" % (REGISTER_API, 'make_cert')
name = PROVIDER_NAME
address = PROVIDER_ADDRESS
datadict = {'name': name, 'address':address}
data = json.dumps(datadict)
apirequest = urllib2.Request(url, data=data)
apirequest.add_header('Content-Type', 'application/json')
apirequest.get_method = lambda: 'PUT'
result = opener.open(apirequest).read()
resultobjs = json.loads(result)
if resultobjs['status'] == "OK":
keys = security.loadKeys(resultobjs['keys'])
settings.myKeys = keys
@login_required()
def send_file_list(request):
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "%s/%s/" % (REGISTER_API, 'file_list')
filelist = []
files = File.objects.all()
for f in files:
filelist.append({'name': f.path,
'author': f.author,
'description': f.description})
datadict = {'service-provider': PROVIDER_NAME, 'file-list': filelist}
data = json.dumps(datadict)
print url
apirequest = urllib2.Request(url, data=data)
apirequest.add_header('Content-Type', 'application/json')
apirequest.get_method = lambda: 'PUT'
result = opener.open(apirequest).read()
resultobjs = json.loads(result)
# rezultat izgleda ovako:
#{
#"file-list": [
# {
# "status": "EXISTS",
# "sp": "provider1",
# "name": "/home/stef/nesto.txt"
# },
# {
# "status": "EXISTS",
# "sp": "provider1",
# "name": "/home/stef/nesto2.txt"
# }
#]
#}
return render_to_response('send_file_list.html',
{"file_list":resultobjs["file-list"]})
@login_required()
def get_file_list(request):
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "%s/%s/" % (REGISTER_API, 'file_list')
json_src = urllib2.urlopen(url).read()
file_list = json.loads(json_src)["file-list"]
print file_list
# json_src, tj. file_list izgleda ovako
#{
#"file-list": [
# {
# "author": "nesto",
# "sp": "provider1",
# "sp-address": "127.0.0.1:8001",
# "description": "Neka bolesna knjiga",
# "name": "/home/stef/nesto.txt"
# },
# {
# "author": "nesto 2",
# "sp": "provider1",
# "sp-address": "127.0.0.1:8001",
# "description": "Ova je jos bolja",
# "name": "/home/stef/nesto2.txt"
# }
#]
#}
local = []
remote = []
for file in file_list:
if file['sp'] == PROVIDER_NAME:
local.append(file)
else:
file['address']=file['sp-address']
remote.append(file)
return render_to_response('file_list.html',
{'local':local,'remote':remote})
def get_remote_file(request, address, filename):
opener = urllib2.build_opener()
urllib2.install_opener(opener)
url = "http://" + address + "/api/get_file/" # url apija od drugog sp-a
datadict = {'service-provider': PROVIDER_NAME,
'username': request.user.username,
'filename': filename}
data = json.dumps(datadict)
apirequest = urllib2.Request(url, data=data)
apirequest.add_header('Content-Type', 'application/json')
apirequest.get_method = lambda: 'PUT'
print url
json_src = opener.open(apirequest).read()
result = json.loads(json_src)
if result["status"] == "NOT FOUND":
return Http404
filecontent = base64.b64decode(result["data"]) #dekodiraj podatke
mimetype = mimetypes.guess_type(filename)[0] #odredi mimetype (uzimamo index 0 jer je 1 encoding)
return HttpResponse(content = filecontent, mimetype = mimetype) #salji fajl s ispravnim mimetypeom
def index(request):
name = "Guest"
user = request.user
if not user.is_anonymous() :
name = user.username
return render_to_response('index.html', {}, context_instance=RequestContext(request))
def login(request):
username = request.POST.get('username', '')
password = request.POST.get('password', '')
user = auth.authenticate(username=username, password=password)
if user is not None and user.is_active:
# Correct password, and the user is marked "active"
auth.login(request, user)
# Redirect to a success page.
print "welcome"
return render_to_response('index.html', {}, context_instance=RequestContext(request))
else:
# Show an erroro page
return render_to_response("index.html",{"error":"error"}, context_instance=RequestContext(request))
def logout(request):
auth.logout(request)
return HttpResponseRedirect("/")
| Python |
# -*- coding: utf-8 -*-
from django.contrib.auth.models import User
from django.core.exceptions import ObjectDoesNotExist
from piston.handler import AnonymousBaseHandler, BaseHandler
from piston.utils import rc
from fluffyprovider.settings import MEDIA_ROOT, PROVIDER_NAME
from fluffyprovider.provider.models import File
import base64
import os
try:
import simplejson as json
except ImportError:
import json
# http://domain/api/get-file
class RemoteFileHandler(AnonymousBaseHandler):
allowed_methods = ('PUT',)
def update(self, request):
postdata = json.loads(request.raw_post_data)
sp_from = postdata['service-provider']
user = postdata['username']
filename = postdata['filename']
found = True
filecontent = None
status = 'OK'
fullpath = os.path.join(MEDIA_ROOT, filename)
try:
File.objects.get(path = filename)
filecontent = open(fullpath).read()
except:
found = False
if not found:
status = 'NOT FOUND'
if filecontent:
filecontent = base64.b64encode(filecontent)
return {'service-provider': PROVIDER_NAME,
'username': user,
'status': status,
'filename': filename,
'full path': fullpath,
'data': filecontent }
| Python |
# -*- coding: utf-8 -*-
from django.conf.urls.defaults import *
from piston.authentication import HttpBasicAuthentication
from piston.resource import Resource
from fluffyprovider.api.handlers import *
remote_file_handler = Resource(RemoteFileHandler)
remote_file_handler.csrf_exempt = getattr(remote_file_handler.handler, 'csrf_exempt', True)
urlpatterns = patterns('',
url(r'get_file/$', remote_file_handler, name='get_file'),
)
| Python |
import hmac, base64
from django import forms
from django.conf import settings
class Form(forms.Form):
pass
class ModelForm(forms.ModelForm):
"""
Subclass of `forms.ModelForm` which makes sure
that the initial values are present in the form
data, so you don't have to send all old values
for the form to actually validate. Django does not
do this on its own, which is really annoying.
"""
def merge_from_initial(self):
self.data._mutable = True
filt = lambda v: v not in self.data.keys()
for field in filter(filt, getattr(self.Meta, 'fields', ())):
self.data[field] = self.initial.get(field, None)
class OAuthAuthenticationForm(forms.Form):
oauth_token = forms.CharField(widget=forms.HiddenInput)
oauth_callback = forms.CharField(widget=forms.HiddenInput)
authorize_access = forms.BooleanField(required=True)
csrf_signature = forms.CharField(widget=forms.HiddenInput)
def __init__(self, *args, **kwargs):
forms.Form.__init__(self, *args, **kwargs)
self.fields['csrf_signature'].initial = self.initial_csrf_signature
def clean_csrf_signature(self):
sig = self.cleaned_data['csrf_signature']
token = self.cleaned_data['oauth_token']
sig1 = OAuthAuthenticationForm.get_csrf_signature(settings.SECRET_KEY, token)
if sig != sig1:
raise forms.ValidationError("CSRF signature is not valid")
return sig
def initial_csrf_signature(self):
token = self.initial['oauth_token']
return OAuthAuthenticationForm.get_csrf_signature(settings.SECRET_KEY, token)
@staticmethod
def get_csrf_signature(key, token):
# Check signature...
try:
import hashlib # 2.5
hashed = hmac.new(key, token, hashlib.sha1)
except:
import sha # deprecated
hashed = hmac.new(key, token, sha)
# calculate the digest base 64
return base64.b64encode(hashed.digest())
| Python |
"""
Decorator module, see
http://www.phyast.pitt.edu/~micheles/python/documentation.html
for the documentation and below for the licence.
"""
## The basic trick is to generate the source code for the decorated function
## with the right signature and to evaluate it.
## Uncomment the statement 'print >> sys.stderr, func_src' in _decorator
## to understand what is going on.
__all__ = ["decorator", "new_wrapper", "getinfo"]
import inspect, sys
try:
set
except NameError:
from sets import Set as set
def getinfo(func):
"""
Returns an info dictionary containing:
- name (the name of the function : str)
- argnames (the names of the arguments : list)
- defaults (the values of the default arguments : tuple)
- signature (the signature : str)
- doc (the docstring : str)
- module (the module name : str)
- dict (the function __dict__ : str)
>>> def f(self, x=1, y=2, *args, **kw): pass
>>> info = getinfo(f)
>>> info["name"]
'f'
>>> info["argnames"]
['self', 'x', 'y', 'args', 'kw']
>>> info["defaults"]
(1, 2)
>>> info["signature"]
'self, x, y, *args, **kw'
"""
assert inspect.ismethod(func) or inspect.isfunction(func)
regargs, varargs, varkwargs, defaults = inspect.getargspec(func)
argnames = list(regargs)
if varargs:
argnames.append(varargs)
if varkwargs:
argnames.append(varkwargs)
signature = inspect.formatargspec(regargs, varargs, varkwargs, defaults,
formatvalue=lambda value: "")[1:-1]
return dict(name=func.__name__, argnames=argnames, signature=signature,
defaults = func.func_defaults, doc=func.__doc__,
module=func.__module__, dict=func.__dict__,
globals=func.func_globals, closure=func.func_closure)
# akin to functools.update_wrapper
def update_wrapper(wrapper, model, infodict=None):
infodict = infodict or getinfo(model)
try:
wrapper.__name__ = infodict['name']
except: # Python version < 2.4
pass
wrapper.__doc__ = infodict['doc']
wrapper.__module__ = infodict['module']
wrapper.__dict__.update(infodict['dict'])
wrapper.func_defaults = infodict['defaults']
wrapper.undecorated = model
return wrapper
def new_wrapper(wrapper, model):
"""
An improvement over functools.update_wrapper. The wrapper is a generic
callable object. It works by generating a copy of the wrapper with the
right signature and by updating the copy, not the original.
Moreovoer, 'model' can be a dictionary with keys 'name', 'doc', 'module',
'dict', 'defaults'.
"""
if isinstance(model, dict):
infodict = model
else: # assume model is a function
infodict = getinfo(model)
assert not '_wrapper_' in infodict["argnames"], (
'"_wrapper_" is a reserved argument name!')
src = "lambda %(signature)s: _wrapper_(%(signature)s)" % infodict
funcopy = eval(src, dict(_wrapper_=wrapper))
return update_wrapper(funcopy, model, infodict)
# helper used in decorator_factory
def __call__(self, func):
infodict = getinfo(func)
for name in ('_func_', '_self_'):
assert not name in infodict["argnames"], (
'%s is a reserved argument name!' % name)
src = "lambda %(signature)s: _self_.call(_func_, %(signature)s)"
new = eval(src % infodict, dict(_func_=func, _self_=self))
return update_wrapper(new, func, infodict)
def decorator_factory(cls):
"""
Take a class with a ``.caller`` method and return a callable decorator
object. It works by adding a suitable __call__ method to the class;
it raises a TypeError if the class already has a nontrivial __call__
method.
"""
attrs = set(dir(cls))
if '__call__' in attrs:
raise TypeError('You cannot decorate a class with a nontrivial '
'__call__ method')
if 'call' not in attrs:
raise TypeError('You cannot decorate a class without a '
'.call method')
cls.__call__ = __call__
return cls
def decorator(caller):
"""
General purpose decorator factory: takes a caller function as
input and returns a decorator with the same attributes.
A caller function is any function like this::
def caller(func, *args, **kw):
# do something
return func(*args, **kw)
Here is an example of usage:
>>> @decorator
... def chatty(f, *args, **kw):
... print "Calling %r" % f.__name__
... return f(*args, **kw)
>>> chatty.__name__
'chatty'
>>> @chatty
... def f(): pass
...
>>> f()
Calling 'f'
decorator can also take in input a class with a .caller method; in this
case it converts the class into a factory of callable decorator objects.
See the documentation for an example.
"""
if inspect.isclass(caller):
return decorator_factory(caller)
def _decorator(func): # the real meat is here
infodict = getinfo(func)
argnames = infodict['argnames']
assert not ('_call_' in argnames or '_func_' in argnames), (
'You cannot use _call_ or _func_ as argument names!')
src = "lambda %(signature)s: _call_(_func_, %(signature)s)" % infodict
# import sys; print >> sys.stderr, src # for debugging purposes
dec_func = eval(src, dict(_func_=func, _call_=caller))
return update_wrapper(dec_func, func, infodict)
return update_wrapper(_decorator, caller)
if __name__ == "__main__":
import doctest; doctest.testmod()
########################## LEGALESE ###############################
## Redistributions of source code must retain the above copyright
## notice, this list of conditions and the following disclaimer.
## Redistributions in bytecode form must reproduce the above copyright
## notice, this list of conditions and the following disclaimer in
## the documentation and/or other materials provided with the
## distribution.
## THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
## "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
## LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
## A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
## HOLDERS OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
## INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING,
## BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS
## OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
## ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR
## TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE
## USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH
## DAMAGE.
| Python |
from utils import rc
from django.core.exceptions import ObjectDoesNotExist, MultipleObjectsReturned
typemapper = { }
handler_tracker = [ ]
class HandlerMetaClass(type):
"""
Metaclass that keeps a registry of class -> handler
mappings.
"""
def __new__(cls, name, bases, attrs):
new_cls = type.__new__(cls, name, bases, attrs)
if hasattr(new_cls, 'model'):
typemapper[new_cls] = (new_cls.model, new_cls.is_anonymous)
else:
typemapper[new_cls] = (None, new_cls.is_anonymous)
if name not in ('BaseHandler', 'AnonymousBaseHandler'):
handler_tracker.append(new_cls)
return new_cls
class BaseHandler(object):
"""
Basehandler that gives you CRUD for free.
You are supposed to subclass this for specific
functionality.
All CRUD methods (`read`/`update`/`create`/`delete`)
receive a request as the first argument from the
resource. Use this for checking `request.user`, etc.
"""
__metaclass__ = HandlerMetaClass
allowed_methods = ('GET', 'POST', 'PUT', 'DELETE')
anonymous = is_anonymous = False
exclude = ( 'id', )
fields = ( )
def flatten_dict(self, dct):
return dict([ (str(k), dct.get(k)) for k in dct.keys() ])
def has_model(self):
return hasattr(self, 'model') or hasattr(self, 'queryset')
def queryset(self, request):
return self.model.objects.all()
def value_from_tuple(tu, name):
for int_, n in tu:
if n == name:
return int_
return None
def exists(self, **kwargs):
if not self.has_model():
raise NotImplementedError
try:
self.model.objects.get(**kwargs)
return True
except self.model.DoesNotExist:
return False
def read(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
pkfield = self.model._meta.pk.name
if pkfield in kwargs:
try:
return self.queryset(request).get(pk=kwargs.get(pkfield))
except ObjectDoesNotExist:
return rc.NOT_FOUND
except MultipleObjectsReturned: # should never happen, since we're using a PK
return rc.BAD_REQUEST
else:
return self.queryset(request).filter(*args, **kwargs)
def create(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
attrs = self.flatten_dict(request.POST)
try:
inst = self.queryset(request).get(**attrs)
return rc.DUPLICATE_ENTRY
except self.model.DoesNotExist:
inst = self.model(**attrs)
inst.save()
return inst
except self.model.MultipleObjectsReturned:
return rc.DUPLICATE_ENTRY
def update(self, request, *args, **kwargs):
if not self.has_model():
return rc.NOT_IMPLEMENTED
pkfield = self.model._meta.pk.name
if pkfield not in kwargs:
# No pk was specified
return rc.BAD_REQUEST
try:
inst = self.queryset(request).get(pk=kwargs.get(pkfield))
except ObjectDoesNotExist:
return rc.NOT_FOUND
except MultipleObjectsReturned: # should never happen, since we're using a PK
return rc.BAD_REQUEST
attrs = self.flatten_dict(request.POST)
for k,v in attrs.iteritems():
setattr( inst, k, v )
inst.save()
return rc.ALL_OK
def delete(self, request, *args, **kwargs):
if not self.has_model():
raise NotImplementedError
try:
inst = self.queryset(request).get(*args, **kwargs)
inst.delete()
return rc.DELETED
except self.model.MultipleObjectsReturned:
return rc.DUPLICATE_ENTRY
except self.model.DoesNotExist:
return rc.NOT_HERE
class AnonymousBaseHandler(BaseHandler):
"""
Anonymous handler.
"""
is_anonymous = True
allowed_methods = ('GET',)
| Python |
import sys, inspect
from django.http import (HttpResponse, Http404, HttpResponseNotAllowed,
HttpResponseForbidden, HttpResponseServerError)
from django.views.debug import ExceptionReporter
from django.views.decorators.vary import vary_on_headers
from django.conf import settings
from django.core.mail import send_mail, EmailMessage
from django.db.models.query import QuerySet
from emitters import Emitter
from handler import typemapper
from doc import HandlerMethod
from authentication import NoAuthentication
from utils import coerce_put_post, FormValidationError, HttpStatusCode
from utils import rc, format_error, translate_mime, MimerDataException
class Resource(object):
"""
Resource. Create one for your URL mappings, just
like you would with Django. Takes one argument,
the handler. The second argument is optional, and
is an authentication handler. If not specified,
`NoAuthentication` will be used by default.
"""
callmap = { 'GET': 'read', 'POST': 'create',
'PUT': 'update', 'DELETE': 'delete' }
def __init__(self, handler, authentication=None):
if not callable(handler):
raise AttributeError, "Handler not callable."
self.handler = handler()
if not authentication:
self.authentication = NoAuthentication()
else:
self.authentication = authentication
# Erroring
self.email_errors = getattr(settings, 'PISTON_EMAIL_ERRORS', True)
self.display_errors = getattr(settings, 'PISTON_DISPLAY_ERRORS', True)
self.stream = getattr(settings, 'PISTON_STREAM_OUTPUT', False)
def determine_emitter(self, request, *args, **kwargs):
"""
Function for determening which emitter to use
for output. It lives here so you can easily subclass
`Resource` in order to change how emission is detected.
You could also check for the `Accept` HTTP header here,
since that pretty much makes sense. Refer to `Mimer` for
that as well.
"""
em = kwargs.pop('emitter_format', None)
if not em:
em = request.GET.get('format', 'json')
return em
@property
def anonymous(self):
"""
Gets the anonymous handler. Also tries to grab a class
if the `anonymous` value is a string, so that we can define
anonymous handlers that aren't defined yet (like, when
you're subclassing your basehandler into an anonymous one.)
"""
if hasattr(self.handler, 'anonymous'):
anon = self.handler.anonymous
if callable(anon):
return anon
for klass in typemapper.keys():
if anon == klass.__name__:
return klass
return None
@vary_on_headers('Authorization')
def __call__(self, request, *args, **kwargs):
"""
NB: Sends a `Vary` header so we don't cache requests
that are different (OAuth stuff in `Authorization` header.)
"""
rm = request.method.upper()
# Django's internal mechanism doesn't pick up
# PUT request, so we trick it a little here.
if rm == "PUT":
coerce_put_post(request)
if not self.authentication.is_authenticated(request):
if self.anonymous and \
rm in self.anonymous.allowed_methods:
handler = self.anonymous()
anonymous = True
else:
return self.authentication.challenge()
else:
handler = self.handler
anonymous = handler.is_anonymous
# Translate nested datastructs into `request.data` here.
if rm in ('POST', 'PUT'):
try:
translate_mime(request)
except MimerDataException:
return rc.BAD_REQUEST
if not rm in handler.allowed_methods:
return HttpResponseNotAllowed(handler.allowed_methods)
meth = getattr(handler, self.callmap.get(rm), None)
if not meth:
raise Http404
# Support emitter both through (?P<emitter_format>) and ?format=emitter.
em_format = self.determine_emitter(request, *args, **kwargs)
kwargs.pop('emitter_format', None)
# Clean up the request object a bit, since we might
# very well have `oauth_`-headers in there, and we
# don't want to pass these along to the handler.
request = self.cleanup_request(request)
try:
result = meth(request, *args, **kwargs)
except FormValidationError, e:
# TODO: Use rc.BAD_REQUEST here
return HttpResponse("Bad Request: %s" % e.form.errors, status=400)
except TypeError, e:
result = rc.BAD_REQUEST
hm = HandlerMethod(meth)
sig = hm.get_signature()
msg = 'Method signature does not match.\n\n'
if sig:
msg += 'Signature should be: %s' % sig
else:
msg += 'Resource does not expect any parameters.'
if self.display_errors:
msg += '\n\nException was: %s' % str(e)
result.content = format_error(msg)
except HttpStatusCode, e:
#result = e ## why is this being passed on and not just dealt with now?
return e.response
except Exception, e:
"""
On errors (like code errors), we'd like to be able to
give crash reports to both admins and also the calling
user. There's two setting parameters for this:
Parameters::
- `PISTON_EMAIL_ERRORS`: Will send a Django formatted
error email to people in `settings.ADMINS`.
- `PISTON_DISPLAY_ERRORS`: Will return a simple traceback
to the caller, so he can tell you what error they got.
If `PISTON_DISPLAY_ERRORS` is not enabled, the caller will
receive a basic "500 Internal Server Error" message.
"""
exc_type, exc_value, tb = sys.exc_info()
rep = ExceptionReporter(request, exc_type, exc_value, tb.tb_next)
if self.email_errors:
self.email_exception(rep)
if self.display_errors:
return HttpResponseServerError(
format_error('\n'.join(rep.format_exception())))
else:
raise
emitter, ct = Emitter.get(em_format)
fields = handler.fields
if hasattr(handler, 'list_fields') and (
isinstance(result, list) or isinstance(result, QuerySet)):
fields = handler.list_fields
srl = emitter(result, typemapper, handler, fields, anonymous)
try:
"""
Decide whether or not we want a generator here,
or we just want to buffer up the entire result
before sending it to the client. Won't matter for
smaller datasets, but larger will have an impact.
"""
if self.stream: stream = srl.stream_render(request)
else: stream = srl.render(request)
resp = HttpResponse(stream, mimetype=ct)
resp.streaming = self.stream
return resp
except HttpStatusCode, e:
return e.response
@staticmethod
def cleanup_request(request):
"""
Removes `oauth_` keys from various dicts on the
request object, and returns the sanitized version.
"""
for method_type in ('GET', 'PUT', 'POST', 'DELETE'):
block = getattr(request, method_type, { })
if True in [ k.startswith("oauth_") for k in block.keys() ]:
sanitized = block.copy()
for k in sanitized.keys():
if k.startswith("oauth_"):
sanitized.pop(k)
setattr(request, method_type, sanitized)
return request
# --
def email_exception(self, reporter):
subject = "Piston crash report"
html = reporter.get_traceback_html()
message = EmailMessage(settings.EMAIL_SUBJECT_PREFIX+subject,
html, settings.SERVER_EMAIL,
[ admin[1] for admin in settings.ADMINS ])
message.content_subtype = 'html'
message.send(fail_silently=True)
| Python |
from __future__ import generators
import decimal, re, inspect
import copy
try:
# yaml isn't standard with python. It shouldn't be required if it
# isn't used.
import yaml
except ImportError:
yaml = None
# Fallback since `any` isn't in Python <2.5
try:
any
except NameError:
def any(iterable):
for element in iterable:
if element:
return True
return False
from django.db.models.query import QuerySet
from django.db.models import Model, permalink
from django.utils import simplejson
from django.utils.xmlutils import SimplerXMLGenerator
from django.utils.encoding import smart_unicode
from django.core.urlresolvers import reverse, NoReverseMatch
from django.core.serializers.json import DateTimeAwareJSONEncoder
from django.http import HttpResponse
from django.core import serializers
from utils import HttpStatusCode, Mimer
try:
import cStringIO as StringIO
except ImportError:
import StringIO
try:
import cPickle as pickle
except ImportError:
import pickle
# Allow people to change the reverser (default `permalink`).
reverser = permalink
class Emitter(object):
"""
Super emitter. All other emitters should subclass
this one. It has the `construct` method which
conveniently returns a serialized `dict`. This is
usually the only method you want to use in your
emitter. See below for examples.
"""
EMITTERS = { }
def __init__(self, payload, typemapper, handler, fields=(), anonymous=True):
self.typemapper = typemapper
self.data = payload
self.handler = handler
self.fields = fields
self.anonymous = anonymous
if isinstance(self.data, Exception):
raise
def method_fields(self, data, fields):
if not data:
return { }
has = dir(data)
ret = dict()
for field in fields:
if field in has and callable(field):
ret[field] = getattr(data, field)
return ret
def construct(self):
"""
Recursively serialize a lot of types, and
in cases where it doesn't recognize the type,
it will fall back to Django's `smart_unicode`.
Returns `dict`.
"""
def _any(thing, fields=()):
"""
Dispatch, all types are routed through here.
"""
ret = None
if isinstance(thing, QuerySet):
ret = _qs(thing, fields=fields)
elif isinstance(thing, (tuple, list)):
ret = _list(thing)
elif isinstance(thing, dict):
ret = _dict(thing)
elif isinstance(thing, decimal.Decimal):
ret = str(thing)
elif isinstance(thing, Model):
ret = _model(thing, fields=fields)
elif isinstance(thing, HttpResponse):
raise HttpStatusCode(thing)
elif inspect.isfunction(thing):
if not inspect.getargspec(thing)[0]:
ret = _any(thing())
elif hasattr(thing, '__emittable__'):
f = thing.__emittable__
if inspect.ismethod(f) and len(inspect.getargspec(f)[0]) == 1:
ret = _any(f())
else:
ret = smart_unicode(thing, strings_only=True)
return ret
def _fk(data, field):
"""
Foreign keys.
"""
return _any(getattr(data, field.name))
def _related(data, fields=()):
"""
Foreign keys.
"""
return [ _model(m, fields) for m in data.iterator() ]
def _m2m(data, field, fields=()):
"""
Many to many (re-route to `_model`.)
"""
return [ _model(m, fields) for m in getattr(data, field.name).iterator() ]
def _model(data, fields=()):
"""
Models. Will respect the `fields` and/or
`exclude` on the handler (see `typemapper`.)
"""
ret = { }
handler = self.in_typemapper(type(data), self.anonymous)
get_absolute_uri = False
if handler or fields:
v = lambda f: getattr(data, f.attname)
if not fields:
"""
Fields was not specified, try to find teh correct
version in the typemapper we were sent.
"""
mapped = self.in_typemapper(type(data), self.anonymous)
get_fields = set(mapped.fields)
exclude_fields = set(mapped.exclude).difference(get_fields)
if 'absolute_uri' in get_fields:
get_absolute_uri = True
if not get_fields:
get_fields = set([ f.attname.replace("_id", "", 1)
for f in data._meta.fields ])
# sets can be negated.
for exclude in exclude_fields:
if isinstance(exclude, basestring):
get_fields.discard(exclude)
elif isinstance(exclude, re._pattern_type):
for field in get_fields.copy():
if exclude.match(field):
get_fields.discard(field)
else:
get_fields = set(fields)
met_fields = self.method_fields(handler, get_fields)
for f in data._meta.local_fields:
if f.serialize and not any([ p in met_fields for p in [ f.attname, f.name ]]):
if not f.rel:
if f.attname in get_fields:
ret[f.attname] = _any(v(f))
get_fields.remove(f.attname)
else:
if f.attname[:-3] in get_fields:
ret[f.name] = _fk(data, f)
get_fields.remove(f.name)
for mf in data._meta.many_to_many:
if mf.serialize and mf.attname not in met_fields:
if mf.attname in get_fields:
ret[mf.name] = _m2m(data, mf)
get_fields.remove(mf.name)
# try to get the remainder of fields
for maybe_field in get_fields:
if isinstance(maybe_field, (list, tuple)):
model, fields = maybe_field
inst = getattr(data, model, None)
if inst:
if hasattr(inst, 'all'):
ret[model] = _related(inst, fields)
elif callable(inst):
if len(inspect.getargspec(inst)[0]) == 1:
ret[model] = _any(inst(), fields)
else:
ret[model] = _model(inst, fields)
elif maybe_field in met_fields:
# Overriding normal field which has a "resource method"
# so you can alter the contents of certain fields without
# using different names.
ret[maybe_field] = _any(met_fields[maybe_field](data))
else:
maybe = getattr(data, maybe_field, None)
if maybe:
if callable(maybe):
if len(inspect.getargspec(maybe)[0]) == 1:
ret[maybe_field] = _any(maybe())
else:
ret[maybe_field] = _any(maybe)
else:
handler_f = getattr(handler or self.handler, maybe_field, None)
if handler_f:
ret[maybe_field] = _any(handler_f(data))
else:
for f in data._meta.fields:
ret[f.attname] = _any(getattr(data, f.attname))
fields = dir(data.__class__) + ret.keys()
add_ons = [k for k in dir(data) if k not in fields]
for k in add_ons:
ret[k] = _any(getattr(data, k))
# resouce uri
if self.in_typemapper(type(data), self.anonymous):
handler = self.in_typemapper(type(data), self.anonymous)
if hasattr(handler, 'resource_uri'):
url_id, fields = handler.resource_uri(data)
try:
ret['resource_uri'] = reverser( lambda: (url_id, fields) )()
except NoReverseMatch, e:
pass
if hasattr(data, 'get_api_url') and 'resource_uri' not in ret:
try: ret['resource_uri'] = data.get_api_url()
except: pass
# absolute uri
if hasattr(data, 'get_absolute_url') and get_absolute_uri:
try: ret['absolute_uri'] = data.get_absolute_url()
except: pass
return ret
def _qs(data, fields=()):
"""
Querysets.
"""
return [ _any(v, fields) for v in data ]
def _list(data):
"""
Lists.
"""
return [ _any(v) for v in data ]
def _dict(data):
"""
Dictionaries.
"""
return dict([ (k, _any(v)) for k, v in data.iteritems() ])
# Kickstart the seralizin'.
return _any(self.data, self.fields)
def in_typemapper(self, model, anonymous):
for klass, (km, is_anon) in self.typemapper.iteritems():
if model is km and is_anon is anonymous:
return klass
def render(self):
"""
This super emitter does not implement `render`,
this is a job for the specific emitter below.
"""
raise NotImplementedError("Please implement render.")
def stream_render(self, request, stream=True):
"""
Tells our patched middleware not to look
at the contents, and returns a generator
rather than the buffered string. Should be
more memory friendly for large datasets.
"""
yield self.render(request)
@classmethod
def get(cls, format):
"""
Gets an emitter, returns the class and a content-type.
"""
if cls.EMITTERS.has_key(format):
return cls.EMITTERS.get(format)
raise ValueError("No emitters found for type %s" % format)
@classmethod
def register(cls, name, klass, content_type='text/plain'):
"""
Register an emitter.
Parameters::
- `name`: The name of the emitter ('json', 'xml', 'yaml', ...)
- `klass`: The emitter class.
- `content_type`: The content type to serve response as.
"""
cls.EMITTERS[name] = (klass, content_type)
@classmethod
def unregister(cls, name):
"""
Remove an emitter from the registry. Useful if you don't
want to provide output in one of the built-in emitters.
"""
return cls.EMITTERS.pop(name, None)
class XMLEmitter(Emitter):
def _to_xml(self, xml, data):
if isinstance(data, (list, tuple)):
for item in data:
xml.startElement("resource", {})
self._to_xml(xml, item)
xml.endElement("resource")
elif isinstance(data, dict):
for key, value in data.iteritems():
xml.startElement(key, {})
self._to_xml(xml, value)
xml.endElement(key)
else:
xml.characters(smart_unicode(data))
def render(self, request):
stream = StringIO.StringIO()
xml = SimplerXMLGenerator(stream, "utf-8")
xml.startDocument()
xml.startElement("response", {})
self._to_xml(xml, self.construct())
xml.endElement("response")
xml.endDocument()
return stream.getvalue()
Emitter.register('xml', XMLEmitter, 'text/xml; charset=utf-8')
Mimer.register(lambda *a: None, ('text/xml',))
class JSONEmitter(Emitter):
"""
JSON emitter, understands timestamps.
"""
def render(self, request):
cb = request.GET.get('callback')
seria = simplejson.dumps(self.construct(), cls=DateTimeAwareJSONEncoder, ensure_ascii=False, indent=4)
# Callback
if cb:
return '%s(%s)' % (cb, seria)
return seria
Emitter.register('json', JSONEmitter, 'application/json; charset=utf-8')
Mimer.register(simplejson.loads, ('application/json',))
class YAMLEmitter(Emitter):
"""
YAML emitter, uses `safe_dump` to omit the
specific types when outputting to non-Python.
"""
def render(self, request):
return yaml.safe_dump(self.construct())
if yaml: # Only register yaml if it was import successfully.
Emitter.register('yaml', YAMLEmitter, 'application/x-yaml; charset=utf-8')
Mimer.register(yaml.load, ('application/x-yaml',))
class PickleEmitter(Emitter):
"""
Emitter that returns Python pickled.
"""
def render(self, request):
return pickle.dumps(self.construct())
Emitter.register('pickle', PickleEmitter, 'application/python-pickle')
Mimer.register(pickle.loads, ('application/python-pickle',))
class DjangoEmitter(Emitter):
"""
Emitter for the Django serialized format.
"""
def render(self, request, format='xml'):
if isinstance(self.data, HttpResponse):
return self.data
elif isinstance(self.data, (int, str)):
response = self.data
else:
response = serializers.serialize(format, self.data, indent=True)
return response
Emitter.register('django', DjangoEmitter, 'text/xml; charset=utf-8')
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.