code stringlengths 1 1.72M | language stringclasses 1 value |
|---|---|
import struct
# Chunk types
UNDEFINED = -1
DATA_CHUNK = 0
END_CHUNK = 1
# Data structure: (type, length of id, seq#) + id
class Chunk:
def __init__(self, mid, seq):
self.mid = mid
self.seq = seq
self.type = -1
def raw(self):
return struct.pack('iii', self.type, self.mid, self.seq)
@staticmethod
def from_raw(raw):
ctype, = struct.unpack('i', raw[:4])
return types[ctype].from_raw(raw)
class DataChunk(Chunk):
def __init__(self, mid, seq, data):
Chunk.__init__(self, mid, seq)
self.data = data
self.type = 0
def raw(self):
return Chunk.raw(self) + self.data
@staticmethod
def from_raw(raw):
ctype, mid, seq = struct.unpack('iii', raw[:12])
data = raw[12:]
return DataChunk(mid, seq, data)
class EndChunk(Chunk):
def __init__(self, mid, seq):
Chunk.__init__(self, mid, seq)
self.type = 1
def raw(self):
return Chunk.raw(self)
@staticmethod
def from_raw(raw):
ctype, mid, seq = struct.unpack('iii', raw[:12])
return EndChunk(mid, seq)
types = {
-1: Chunk,
0: DataChunk,
1: EndChunk,
}
| Python |
import threading
import gobject
import os
import time
from net import network, bt, queues
from presence import ps
from util.decorators import *
from util import config
from db import db
import message
import control
import receiver
import strategy
import timesync
from util import config
import logging
logger = logging.getLogger('async')
class MessageTable(db.Table):
# TODO:
# - Need a way of clearing this table out every so often
# Or at least only returning the last n expired/delivered objects
def __init__(self):
db.Table.__init__(self)
def _init(self):
self._conn.execute('''
CREATE TABLE messages
(mid INTEGER PRIMARY KEY NOT NULL, received INTEGER NOT NULL, expired INTEGER, delivered INTEGER)
''')
def _drop(self):
self._conn.execute('''
DROP TABLE messages
''')
def log_received(self, mid):
c = self._conn
c.execute(''' INSERT INTO messages VALUES (?,?,?,?) ''', (mid, time.time(), None, None))
def log_expired(self, mid):
c = self._conn
c.execute(''' UPDATE messages SET expired=? WHERE mid=? ''', (time.time(), mid))
def log_delivered(self, mid):
c = self._conn
c.execute(''' UPDATE messages SET delivered=? WHERE mid=? ''', (time.time(), mid))
def get_expired(self):
c = self._conn
return map(self.__map_mid, c.execute(''' SELECT mid FROM messages WHERE expired > ? ''', (time.time(),)))
def get_delivered(self):
c = self._conn
return map(self.__map_mid, c.execute(''' SELECT mid FROM messages WHERE delivered > ? ''', (time.time(),)))
def __map_mid(self, dic):
return dic['mid']
class AsyncManager(gobject.GObject):
__gsignals__ = {
# Need to document that this is a header object
'message-received' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
lock = threading.Lock()
def __init__(self, iface, _presence, config_files=[]):
gobject.GObject.__init__(self)
config.add_file(os.path.join(os.path.dirname(__file__), 'async.config'))
if config_files:
config.add_files(config_files)
# TODO:
# -- Determine whether bluetooth is possible
# -- If so, add the proper meta key, etc
# -- For now, a hack
bt_mac = '00:15:83:0A:19:AC'
bt_unicast_port = config.get_base16_int('async.ports', 'bluetooth-unicast')
bt_broadcast_port = config.get_base16_int('async.ports', 'bluetooth-broadcast')
self.__presence = _presence
self.__presence.add_meta(config.get_int('async.metas', 'control-port-key'), config.get_int('async.ports', 'control-in'))
self.__presence.add_meta(config.get_int('async.metas', 'bluetooth-mac-key'), bt_mac)
self.__presence.add_meta(config.get_int('async.metas', 'bluetooth-unicast-port-key'), bt_unicast_port)
self.__presence.add_meta(config.get_int('async.metas', 'bluetooth-broadcast-port-key'), bt_broadcast_port)
self.__presence.add_meta(config.get_int('async.metas', 'unicast-port-key'), config.get_int('async.ports', 'unicast'))
self.__time_sync = timesync.TimeSync(self.__presence)
self.__complete = {}
self.__incoming = {}
self.__busy = False
self.__ip = network.get_ip_addr(iface)
self.__receiver = receiver.Receiver()
self.__control = control.Control(self, iface, config.get_int('async.ports', 'control-in'))
self.__receiver.connect('message-complete', self.__on_message_complete)
self.__receiver.connect('need-retransmission', self.__on_need_retransmission)
self.__receiver.connect('busy', self.__on_busy)
self.__queues = {
config.get_int('async.networks', 'unicast') : queues.MessageQueue(),
config.get_int('async.networks', 'broadcast') : queues.MessageQueue(),
config.get_int('async.networks', 'bluetooth-unicast') : queues.MessageQueue(),
config.get_int('async.networks', 'bluetooth-broadcast') : queues.MessageQueue(),
}
self.retransmission_queue = self.__queues[config.get_int('async.networks', 'unicast')]
for queue in self.__queues.values():
queue.connect('busy', self.__on_busy)
network.Unicast(iface, config.get_int('async.ports', 'unicast'), self.__queues[config.get_int('async.networks', 'unicast')],
self.__receiver.receive)
network.Broadcast(iface, config.get_int('async.ports', 'broadcast'), self.__queues[config.get_int('async.networks', 'broadcast')],
self.__receiver.receive)
#bt.Unicast(config.get_base16_int('async.ports', 'bluetooth-unicast'),
# self.__queues[config.get_int('async.networks', 'bluetooth-unicast')], self.__receiver.receive)
#bt.Broadcast(config.get_base16_int('async.ports', 'bluetooth-broadcast'),
# self.__queues[config.get_int('async.networks', 'bluetooth-broadcast')], self.__receiver.receive)
self.__presence_types = {}
for (name, value) in config.items('async.presence-types'):
(module, seperator, cls) = value.rpartition('.')
exec('import ' + module )
self.__presence_types[int(name)] = eval(value)
self.__strategy = strategy.Strategy()
self.__rt_strategy = strategy.RetransmissionStrategy()
self.__strategy_timer = gobject.timeout_add(config.get_int('async', 'strategy-interval-ms'), self.__run_strategy)
self.__table = db.get_database(config.get('async','db-dir'), config.get('async', 'db-file')).get_or_create(MessageTable)
for (name, value) in config.items('async.folders'):
if not os.path.exists(value):
os.makedirs(value)
# Load persisted messages
logger.debug('Async active-root: %s', config.get('async.folders', 'active-root'))
for fname in os.listdir(config.get('async.folders', 'active-root')):
logger.debug('Loading %s',fname)
if not os.path.isdir(config.get('async.folders', 'active-root') + fname):
msg = message.depersist(config.get('async.folders', 'active-root') + fname)
logger.debug('msg %i is complete? %s',msg.header.mid,msg.is_complete())
if msg.is_complete():
self.__complete[msg.header.mid] = msg
self.__presence.add_object(self.__create_presence(msg))
if msg.is_expired():
self.__handle_expired(msg)
else:
self.__incoming[msg.header.mid] = msg.header
self.__receiver.resume_incomplete(msg)
self.__presence.add_object(ps.PartialPresence(msg.header.mid))
# Sanity check that we've killed all expired messages
for msg in self.__complete.values():
assert not msg.is_expired()
# Advertise expired and delivered messages
expired, delivered = self.__table.get_expired(), self.__table.get_delivered()
for mid in expired:
self.__presence.add_object(ps.ExpiredPresence(mid))
for mid in delivered:
self.__presence.add_object(ps.DeliveredPresence(mid))
# Handlers for presence-specific things
self.__presence.connect('object-appeared', self.__on_object_appeared)
self.__presence.connect('neighbor-update', self.__on_neighbor)
self.__presence.connect('neighbor-online', self.__on_neighbor)
self.__object_handlers = {
ps.ExpiredPresence : self.__handle_expired_presence,
ps.DeliveredPresence : self.__handle_delivered_presence,
}
# Not sure that we need a per-usertype handler setup
#self.__neighbor_handlers = {
# ps.UserPresence : self.__handle_user_presence,
# ps.ServicePresence : self.__handle_service_presence,
# }
self.__neighbor_handlers = (self.__handle_neighbor_presence)
@synchronized(lock)
def shutdown(self):
incoming = self.__receiver.shutdown()
for msg in incoming.values():
msg.persist()
@synchronized(lock)
def __remove(self, msg):
pass
@synchronized(lock)
def send_message(self, url, msg_type, dests, meta={}, csize=None, expiration_delta=60*60*24*7, explicit_mid=None): # 7 day expiration
"""
Sends a message to a set of destinations.
@return: The header of the message. This is of type L(message.Header)
"""
if csize is None:
csize=config.get_int('async', 'max-chunk-size')
src = self.__presence.me.pid
msg = message.create_new(url, msg_type, src, dests, meta, csize, expiration_delta, explicit_mid)
self.__complete[msg.header.mid] = msg
self.__presence.add_object(self.__create_presence(msg))
msg.persist()
self.__table.log_received(msg.header.mid)
return msg.header
def has(self, mid):
return mid in self.__complete or mid in self.__incoming
# Called by the control channel when wanting to retransmit
# ---
def get_message(self, mid):
return self.__complete[mid]
def get_retransmission_queue(self):
return self.__queues[config.RETRANSMISSION_NETWORK]
@synchronized(lock)
def register_incoming(self, header):
# Sometimes we will get headers from multiple people simultaneously.
# We don't want to kill the presence service by registering multiple of the same
# object ids as present, so we will just fail silently.
if header.mid in self.__incoming or header.mid in self.__complete:
return
else:
header.hops.append(self.__presence.me.pid)
self.__incoming[header.mid] = header
self.__presence.add_object(ps.PartialPresence(header.mid))
self.__receiver.register_incoming(header)
@synchronized(lock)
def set_busy(self, busy):
logger.info('setting busy: %s',busy)
self.__busy = busy
def is_busy(self):
return self.__busy
def __create_presence(self, msg):
return self.__presence_types[msg.header.msg_type](msg.header.mid, msg.header.fname)
def __run_strategy(self):
# Need to create a copy of self.__complete because we are going to remove
# complete messages that have expired.
if self.__busy:
return True
logger.debug('Running strategy')
for (guid, msg) in self.__complete.copy().iteritems():
if msg.is_expired():
logger.debug('message %i expired', msg.header.mid)
self.__handle_expired(msg)
continue
neighbors = self.__presence.get_neighbors()
(network_name, targets) = self.__strategy.evaluate(msg, neighbors)
for target in targets:
if not self.is_busy():
logger.debug('Sending control channel send_message mid %s to target %s', msg.header.mid, target)
self.__control.send_message(msg, target, self.__queues[network_name])
else:
logger.debug('Wanted send header mid %s to %s but was busy.', msg.header.mid, target)
return True
def __handle_expired(self, msg):
self.__presence.replace_with_new(ps.ExpiredPresence(msg.header.mid))
if msg.header.mid in self.__complete:
del self.__complete[msg.header.mid]
elif msg.header.mid in self.__incoming:
del self.__incoming[msg.header.mid]
#msg.destroy_persisted()
# Log in the database that this has expired
self.__table.log_expired(msg.header.mid)
self.__presence.replace_with_new(ps.ExpiredPresence(msg.header.mid))
# Event handlers
def __on_message_complete(self, gobject, msg):
msg.decode()
self.__complete[msg.header.mid] = msg
self.__presence.replace_with_new(self.__create_presence(msg))
self.__table.log_received(msg.header.mid)
logger.info('Received message id %i from %i to %s',msg.header.mid, msg.header.src, msg.header.dests)
if msg.is_for(self.__presence.me.pid):
logger.info('Message id %i was for me', msg.header.mid)
self.emit('message-received', msg.header)
def __on_need_retransmission(self, gobject, header, seqs):
logger.debug('Detected that I need retransmission for message id %i. Seqs: %s', header.mid, seqs)
neighbors = self.__presence.get_neighbors()
(network, target) = self.__rt_strategy.evaluate(header.mid, seqs, neighbors)
if target is None:
return
if not self.is_busy():
logger.debug('Sending control message request_retransmission mid %s seqs %s to target %s', header.mid, seqs, target)
self.__control.request_retransmission(header, seqs, target, self.__presence.me)
def __on_busy(self, source, busy):
self.set_busy(busy)
def __on_appeared(self, presence, obj):
# Called when an object appears on the presence service
if isinstance(obj, ps.DeliveredPresence):
logger.info('Neighbor has DeliveredPresence for mid %i. Setting this message to delivered', obj.pid)
# Remove from active,
# Remove from inactive,
# Log delivered in the db
raise NotImplementedError
pass
def __on_neighbor(self, presence, neighbor, new=False):
#handler = self.__neighbor_handlers.get(neighbor.__class__, None)
#if handler:
# handler(neighbor)
pass
def __on_object_appeared(self, presence, object):
handler = self.__object_handlers.get(object.__class__, None)
if handler:
handler(object)
def __handle_expired_presence(self, expired_presence):
pass
def __handle_delivered_presence(self, delivered_presence):
pass
def __handle_neighbor_presence(self, neighbor):
pass
gobject.type_register(AsyncManager)
| Python |
import cPickle as pickle
import gobject
import random
from net import network, queues
import bits, message
from util import config
SEPERATOR = ':::'
import logging
logger = logging.getLogger('async.control')
class Control:
def __init__(self, manager, iface, control_port):
self.__manager = manager
self.__queue = queues.Queue()
self.__socket = network.Unicast(iface, control_port, self.__queue, self.__receive)
self.__tasks = {}
def __receive(self, data, addr):
ip, port = addr
elts = data.split(SEPERATOR)
command = elts[0]
logger.debug('Received command: %s',command)
if command == 'busy?':
if self.__manager.is_busy():
response = '__busy__' + SEPERATOR + elts[1]
else:
response = '__go__' + SEPERATOR + elts[1]
self.__queue.put(response, addr)
elif command == 'header':
self.__handle_header(addr, *elts[1:])
elif command == 'retransmit':
self.__handle_retransmit(addr, *elts[1:])
elif command == '__busy__':
tid = int(elts[1])
task = self.__tasks.get(tid, None)
if task:
task.stop()
elif command == '__go__':
tid = int(elts[1])
task = self.__tasks.get(tid, None)
if task:
task.go(addr)
elif command == '__stop__':
tid = int(elts[1])
task = self.__tasks.get(tid, None)
if task:
task.stop()
# handlers
def __handle_header(self, addr, raw_header, tid):
ip, port = addr
header = message.Header.from_raw(raw_header)
logger.debug('Handling incoming header: mid %i', header.mid)
if not self.__manager.has(header.mid):
self.__manager.register_incoming(header)
self.__queue.put('__go__' + SEPERATOR + tid, addr)
else:
self.__queue.put('__stop__' + SEPERATOR + tid, addr)
def __handle_retransmit(self, addr, mid, pickled_target, packed_seqs, tid):
mid = int(mid)
msg = self.__manager.get_message(mid)
presence = pickle.loads(pickled_target)
seqs = bits.unpack_seqs(packed_seqs)
logger.debug('Retransmitting %s for mid %i', seqs, mid)
# TODO: major hack)
self.__manager.retransmission_queue.put(msg, (presence.ip, presence.meta[config.get_int('async.metas', 'unicast-port-key')]), seqs)
def send_message(self, msg, target, net_queue):
task = Control.SendMessage(self.__queue, msg, target, net_queue)
self.__tasks[task.tid] = task
task.connect('stopped', self.__on_task_stopped)
task.run()
def request_retransmission(self, header, seqs, target, me):
task = Control.RequestRetransmission(self.__queue, header, seqs, target, me)
self.__tasks[task.tid] = task
task.connect('stopped', self.__on_task_stopped)
task.run()
def __on_task_stopped(self, gobject, tid):
del self.__tasks[tid]
class Task(gobject.GObject):
__gsignals__ = {
'stopped' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,))
}
def __init__(self, control_queue, target):
gobject.GObject.__init__(self)
self.tid = random.randint(0,1000000)
self._queue = control_queue
self._target = target
#self.__max_retries = 2
#self.__retry_count = 0
self._step = -1
self._previous_step = -1
def run(self):
self._queue.put('busy?' + SEPERATOR + str(self.tid), self._target.control_addr)
#self.__retry_timer = gobject.timeout_add(500, self.__retry)
self.__stop_timer = gobject.timeout_add(1000, self.stop)
def __retry(self):
if self._step == self._previous_step:
self.__retry_count += 1
if self.__retry_count > self.__max_retries:
self.stop()
return False
else:
self.run()
return True
def stop(self):
#gobject.source_remove(self.__retry_timer)
gobject.source_remove(self.__stop_timer)
self.emit('stopped', self.tid)
def go(self):
raise NotImplementedError
class SendMessage(Task):
def __init__(self, control_queue, msg, target, net_queue):
Control.Task.__init__(self, control_queue, target)
self.__msg = msg
self.__net_queue = net_queue
def go(self, addr):
self._previous_step = self._step
self._step += 1
if self._step == 0:
self._queue.put('header' + SEPERATOR + self.__msg.header.raw() + SEPERATOR + str(self.tid), addr)
elif self._step == 1:
self.__net_queue.put(self.__msg, self._target.send_addr) # , range(10,self.__msg.header.lsn)) # Use this is you want to force dropped packets
else:
raise Exception
class RequestRetransmission(Task):
def __init__(self, control_queue, header, seqs, target, me):
Control.Task.__init__(self, control_queue, target)
self.__header = header
self.__seqs = seqs
self.__me = me
def go(self, addr):
self._previous_step = self._step
self._step += 1
self._queue.put('retransmit' + SEPERATOR + str(self.__header.mid) + SEPERATOR + pickle.dumps(self.__me, pickle.HIGHEST_PROTOCOL) + SEPERATOR + bits.pack_seqs(self.__seqs, self.__header.lsn) + SEPERATOR + str(self.tid), addr)
gobject.type_register(Control.Task) | Python |
import gobject
import asyncmanager
from net import network
from presence import ps
from util import config
class Engine:
def __init__(self, iface, my_id, my_name, sender):
p = ps.PresenceService(iface, config.PORTS['presence-out'], config.PORTS['presence-in'], my_id, my_name, standalone=False)
am = asyncmanager.AsyncManager(iface, p)
self.am = am
am.connect('message-received', self.__on_message_received)
if sender:
url = config.APP_DIRS[config.AUDIO] + '/hello.wav'
msg_type = config.AUDIO
dests = ['receiver']
am.send_message(url, msg_type, dests)
network.start()
def run(self):
# start main application loop
gobject.threads_init()
network.start()
gobject.MainLoop().run()
def shutdown(self):
self.am.shutdown()
def __on_message_received(self, gobject, msg):
print 'Received ',msg.header.fname
if __name__ == '__main__':
import sys
if len(sys.argv) < 6:
print "Usage: python engine.py <iface> <my_id> <my_name> <sender?> <crosswire?>"
iface, my_id, my_name, sender,crosswire = sys.argv[1:]
if sender == 'True':
sender = True
else:
sender = False
if crosswire == 'True':
crosswire = True
else:
crosswire = False
if crosswire:
if sender:
config.PORTS = {
'presence-out' : 8000,
'presence-in' : 8001,
'unicast-out' : 8002,
'unicast-in' : 8003,
'broadcast-out' : 8004,
'broadcast-in' : 8005,
# ---
'control-in' : 8006,
}
#config.MEDIA_ROOT = '/home/camick/camick/memento/src/sender/'
else:
config.PORTS = {
'presence-out' : 8001,
'presence-in' : 8000,
'unicast-out' : 8003,
'unicast-in' : 8002,
'broadcast-out' : 8005,
'broadcast-in' : 8004,
# ---
'control-in' : 8007,
}
# config.MEDIA_ROOT = '/home/camick/camick/memento/src/sender/'
config.CONTROL_PORT = config.PORTS['control-in']
e = Engine(iface, my_id, my_name, sender)
try:
e.run()
except KeyboardInterrupt:
e.shutdown()
| Python |
import struct
import logging
logger = logging.getLogger('async')
def pack_seqs(seqs, lsn):
logger.debug('bits.py -- pack_seqs with args seqs=%s, lsn=%s', seqs, lsn)
# Input: list of sequence numbers
string = ''
for i in xrange(lsn + 1):
if i in seqs:
string = string + '0'
else:
string = string + '1'
logger.debug('bits.py -- pack_seqs bitstring: %s', string)
return pack_bits(string)
def unpack_seqs(packed):
string = unpack_bits(packed)
count = -1
seqs = []
for c in string:
count+=1
if c == '0':
seqs.append(count)
return seqs
def pack_bits(bitstring):
padding_bits = 0
bytes = []
count = 0
while bitstring != '':
count += 1
if len(bitstring) < 8:
padding_bits = 8 - len(bitstring)
while len(bitstring) < 8:
bitstring = '0' + bitstring
bytes.append(int(bitstring[0:8],2))
bitstring = bitstring[8:]
return struct.pack('hh' + str(count) + 'B', padding_bits, count, *bytes)
def unpack_bits(packed):
padding, count = struct.unpack('hh', packed[:4])
bytes = list(struct.unpack(str(count) + 'B', packed[4:]))
def to_bits(byte):
string = ''
count = 0
while byte:
string = str(byte % 2) + string
byte /= 2
count += 1
while count < 8:
string = '0' + string
count += 1
return string
strings = map(to_bits, bytes)
if padding:
last_index = len(strings) - 1
last = strings[last_index]
strings[last_index] = last[padding:]
return ''.join(strings)
if __name__ == '__main__':
while True:
cmd = raw_input('>>')
try:
exec(cmd)
except Exception, e:
print e
| Python |
import time
import cPickle as pickle
import os
import math
import gobject
from util import device, config
import logging
logger = logging.getLogger('async.timesync')
last_reliable = None
class TimeSync:
TIME = 460
LAST_RELIABLE = 461
def __init__(self, presence):
if not config.get_bool('async', 'timesync-enabled'):
# Disable this functionality
return
self.__presence = presence
self.__time_file = os.path.join(os.path.dirname(__file__), 'time')
logger.debug('Time file: %s. Exists? %s', self.__time_file, os.path.exists(self.__time_file))
if os.path.exists(self.__time_file):
f = open(self.__time_file, mode='rb')
self.__last_reliable = pickle.load(f)
f.close()
logger.info('Loaded reliable time: %s', self.__last_reliable)
else:
self.__last_reliable = None
self.__presence.connect('neighbor-online', self.__on_neighbor)
self.__presence.connect('neighbor-update', self.__on_neighbor)
# Minutes between updates
# TODO: Replace with config
minutes = 1
gobject.timeout_add(1000*60*minutes, self.__update)
self.__update()
def __update(self):
logger.info('Updating. Time: %s. Last reliable: %s', self.my_time(), self.__last_reliable)
self.__presence.add_meta(TimeSync.TIME, self.my_time())
self.__presence.add_meta(TimeSync.LAST_RELIABLE, self.__last_reliable)
return True
def __on_neighbor(self, presence, neighbor, new=False):
other_time = neighbor.meta.get(TimeSync.TIME, None)
other_last_reliable = neighbor.meta.get(TimeSync.LAST_RELIABLE, None)
logger.info('Got neighbor input. Their time: %s. Their reliable time: %s.', other_time, other_last_reliable)
if other_time and other_last_reliable:
self.input_unreliable(other_time, other_last_reliable)
def input_reliable(self, reliable_time):
"""
Input the time from a reliable source.
This source could be GPS, NTP, etc.
@param reliable_time: A reliable time, in ticks
@type reliable_time: float
"""
logger.info('Inputting reliable time: %s', reliable_time)
self.__last_reliable = reliable_time
self.__set_time(reliable_time)
f = open(self.__time_file, mode='wb')
pickle.dump(self.__last_reliable, f)
f.close()
def input_unreliable(self, other_time, other_last_reliable):
logger.info('Inputting neighbor time %s with last reliable %s',
other_time, other_last_reliable)
logger.debug('Other_last_reliable>self.__last_reliable? %s', other_last_reliable > self.__last_reliable)
if other_last_reliable > self.__last_reliable:
# If more than 1/10th of a second apart, set my time to
# the time my neighbor is advertising.
logger.debug('Other_time - self.my_time(): %s', other_time - self.my_time())
if math.fabs(other_time - self.my_time()) > 0.1:
logger.debug('My time: %s. Other time: %s. Setting to other.', self.my_time(), other_time)
self.__set_time(other_time)
def __set_time(self, new_time):
logger.info('Setting time to %s', new_time)
if device.current() in (device.NOKIA_TABLET, device.LINUX):
time_string = time.strftime("%Y-%m-%d/%H:%M:%S",time.gmtime(new_time))
exit_code = os.system('/mnt/initfs/usr/bin/retutime --set-time "%s"' % time_string)
logger.info('Set time with time string %s. Exit code: %i', time_string, exit_code)
else:
raise NotImplementedError('No support for time setting on Windows yet')
self.__update()
def my_time(self):
return time.time()
def last_reliable(self):
return self.__last_reliable | Python |
from util import config
import random
import logging
logger = logging.getLogger('async.strategy')
# Can implement other translations here
def to_bluetooth_target(presence):
send_addr = presence.meta[config.get_int('async.metas', 'bluetooth-mac-key')]
send_port = presence.meta[config.get_int('async.metas', 'bluetooth-unicast-port-key')]
control_port = presence.meta[config.get_int('async.metas', 'control-port-key')]
control_addr = presence.ip
logger.debug('Bluetooth target: control addr: %s:%s ; send addr: %s:%s',
control_addr, control_port, send_addr, send_port)
return Target(presence, control_addr, control_port, send_addr, send_port)
def to_wifi_target(presence):
send_addr = presence.ip
send_port = presence.meta[config.get_int('async.metas', 'unicast-port-key')]
control_addr = presence.ip
control_port = presence.meta[config.get_int('async.metas', 'control-port-key')]
logger.debug('Wifi target: control addr: %s:%s ; send addr: %s:%s',
control_addr, control_port, send_addr, send_port)
return Target(presence, control_addr, control_port, send_addr, send_port)
class Target:
def __init__(self, presence, control_addr, control_port, send_addr, send_port):
self.presence = presence
self.control_addr = (control_addr, control_port)
self.send_addr = (send_addr, send_port)
class Strategy:
def __init__(self):
pass
def evaluate(self, msg, neighbors):
def not_has(neighbor):
return not neighbor.has_at_all(msg.header.mid)
possibles = filter(not_has, neighbors)
logger.debug('Strategy for msg %s: Possible targets are %s', msg.header.mid, possibles)
# Sort according to some criteria
# ...
targets = map(to_wifi_target, possibles)
logger.debug('Strategy msg %s, ordered targets are %s', msg.header.mid, targets)
network = config.get_int('async.networks', 'unicast')
#etwork = config.get_int('async.networks', 'bluetooth-unicast')
return (network, targets)
class RetransmissionStrategy:
def __init__(self):
pass
def evaluate(self, mid, seqs, neighbors):
def has(neighbor):
return neighbor.has_complete(mid)
possibles = filter(has, neighbors)
# Randomize it a bit so that we don't
# continually target the same user
random.shuffle(possibles)
if possibles:
#target = to_bluetooth_target(possibles[0])
target = to_wifi_target(possibles[0])
else:
target = None
network = config.get_int('async.networks', 'unicast')
#network = config.get_int('async.networks', 'bluetooth-unicast')
return (network, target)
| Python |
import os
import time
import datetime
import cPickle as pickle
import struct
import cache, chunk
from util import config
# Constsants used
LSN_NOT_CALCULATED = -1
NO_GOOD = -1
import threading
from util.decorators import *
def add_days(time, days):
return time + days * 24 * 60 * 60
class Header:
def __init__(self, url, msg_type, src, dests, meta, csize, expiration_delta, explicit_mid):
"""
Creates a header for a message.
This method generates a message id which is an integer.
@param url: The absolute path of the file backing the message on the local device
@type url: string
@param msg_type: The type of the message to send
@type msg_type: integer
@param src: The ID of the node that sent this message
@type src: integer
@param dests: The destinations of this message. A list of node IDs (integers). Empty if this is broadcast.
@type dests: List(integer)
@param meta: A dictionary of arbitrary metadata associated with this message.
This tends to be set by the application creating messages.
@type meta: dictionary
@param csize: The chunk size, in bytes, to use. The default is set in the async.config file. (Something like 1000)
@type csize: integer
@param expiration_delta: The time before expiration
@type expiration_delta: integer
"""
self.url = url
self.src = src
self.dests = dests
self.meta = meta
self.csize = csize
self.created = time.time()
# 1 minute, to test expiration
self.expires = time.time() + expiration_delta
self.received = -1
self.msg_type = msg_type
self.lsn = LSN_NOT_CALCULATED
self.delivered = False
self.hops = []
self.fname = url.replace(config.get('async.folders', self.msg_type), '')
if explicit_mid:
self.mid = int(explicit_mid)
else:
self.mid = int(hash(str(self.fname) + str(self.src) + str(self.dests) + str(self.csize) + str(self.created)))
# ttl?
# self.ttl = #
# self.mid = str(1)
def get_file_path(self):
# Gets a path, given the message type and the filename in the header
return config.get('async.folders', self.msg_type) + '/' + self.fname
def raw(self):
pickled = pickle.dumps(self, pickle.HIGHEST_PROTOCOL)
length = len(pickled)
return struct.pack('i' + str(length) + 's', length, pickled)
def am_sender(self, my_id):
return self.src == my_id
def is_expired(self):
return time.time() > self.expires
@staticmethod
def from_raw(raw):
length = struct.unpack('i', raw[:4])[0]
pickled = struct.unpack(str(length) + 's', raw[4:])[0]
msg = pickle.loads(pickled)
# Set the received time here, as we must have just received this
msg.received = time.time()
return msg
def create_new(url, msg_type, src, dests, meta, csize, expiration_delta, explicit_mid):
"""
Used when sending a new message.
Creates a new header, message, and initializes the chunks for this message.
"""
header = Header(url, msg_type, src, dests, meta, csize, expiration_delta, explicit_mid)
msg = Message(header)
msg._create_chunks()
return msg
def create_from_header(header):
"""
Used when assembling an incoming message.
Creates a message from a header, but does not initialize the chunks.
"""
msg = Message(header)
return msg
def depersist(url):
f = open(url, mode='rb')
msg = pickle.load(f)
f.close()
return msg
def get_persist_path(header):
return config.get('async.folders', 'active-root') + str(header.mid) + '.message'
class Message:
lock = threading.Lock()
def __init__(self, header):
self.header = header
self.__last_good = NO_GOOD
self.__chunks = {}
self.__next = -1
def _create_chunks(self):
assert self.header is not None
assert os.path.exists(self.header.url)
fsize = os.stat(self.header.url).st_size
seq = 0
while fsize > 0:
self.__chunks[seq] = True
fsize -= self.header.csize
seq +=1
lsn = seq-1
self.header.lsn = lsn
self.__last_good = self.header.lsn
@synchronized(lock)
def get_chunk(self, seq):
return cache.get_outgoing().get_chunk(self.header, seq)
@synchronized(lock)
def put_chunk(self, chunk):
cache.get_incoming().put_chunk(self.header, chunk)
self.__chunks[chunk.seq] = True
@synchronized(lock)
def get_incomplete_seqs(self):
incomplete = []
# Range(n) goes 0->n-1.
# We need range(0...n), so add 1.
for i in xrange(self.header.lsn + 1):
if not i in self.__chunks:
incomplete.append(i)
return incomplete
def is_complete(self):
return len(self.get_incomplete_seqs()) == 0
@synchronized(lock)
def decode(self):
# This just renames the cached file to the target file
cache.get_incoming().msg_complete(self.header)
# Also persists the file to disk
self.__persist()
assert os.path.exists(self.header.get_file_path())
# FOR INTERNAL USE ONLY. Need to avoid some
# locking headaches.
def __persist(self):
path = get_persist_path(self.header)
f = open(path, mode='wb')
pickle.dump(self, f, pickle.HIGHEST_PROTOCOL)
f.close()
@synchronized(lock)
def persist(self):
self.__persist()
@synchronized(lock)
def destroy_persisted(self):
path = get_persist_path(self.header)
if os.path.exists(path):
os.remove(path)
assert not os.path.exists(path)
def is_expired(self):
return self.header.is_expired()
def is_for(self, dest):
return len(self.header.dests) == 0 or dest in self.header.dests
def is_for_any(self, dests):
if len(self.header.dests) == 0:
return True
else:
for dest in dests:
if dest in self.header.dests:
return True
return False
def is_for_all(self, dests):
if len(self.header.dests) == 0:
return True
else:
if len(dests) == len(self.header.dests):
for dest in dests:
if dest not in self.header.dests:
return False
return True
return False
def __iter__(self):
return self
def next(self):
self.__next += 1
if self.__next > self.header.lsn:
raise StopIteration
elif self.__next == self.header.lsn:
c = chunk.EndChunk(self.header.mid, self.__next)
else:
c = self.get_chunk(self.__next)
return c
| Python |
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
class JSONDecodeError(ValueError):
"""Subclass of ValueError with the following additional properties:
msg: The unformatted error message
doc: The JSON document being parsed
pos: The start index of doc where parsing failed
end: The end index of doc where parsing failed (may be None)
lineno: The line corresponding to pos
colno: The column corresponding to pos
endlineno: The line corresponding to end (may be None)
endcolno: The column corresponding to end (may be None)
"""
def __init__(self, msg, doc, pos, end=None):
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
self.msg = msg
self.doc = doc
self.pos = pos
self.end = end
self.lineno, self.colno = linecol(doc, pos)
if end is not None:
self.endlineno, self.endcolno = linecol(doc, pos)
else:
self.endlineno, self.endcolno = None, None
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise JSONDecodeError(msg, s, end)
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise JSONDecodeError(msg, s, end)
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise JSONDecodeError(msg, s, end)
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
object_pairs_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting : delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise JSONDecodeError("No JSON object could be decoded", s, idx)
return obj, end
| Python |
"""Drop-in replacement for collections.OrderedDict by Raymond Hettinger
http://code.activestate.com/recipes/576693/
"""
from UserDict import DictMixin
# Modified from original to support Python 2.4, see
# http://code.google.com/p/simplejson/issues/detail?id=53
try:
all
except NameError:
def all(seq):
for elem in seq:
if not elem:
return False
return True
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
# Modified from original to support Python 2.4, see
# http://code.google.com/p/simplejson/issues/detail?id=53
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
return len(self)==len(other) and \
all(p==q for p, q in zip(self.items(), other.items()))
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
| Python |
"""JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
object_pairs_hook = context.object_pairs_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict,
_scan_once, object_hook, object_pairs_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner
| Python |
import unittest
import doctest
class OptionalExtensionTestSuite(unittest.TestSuite):
def run(self, result):
import simplejson
run = unittest.TestSuite.run
run(self, result)
simplejson._toggle_speedups(False)
run(self, result)
simplejson._toggle_speedups(True)
return result
def additional_tests(suite=None):
import simplejson
import simplejson.encoder
import simplejson.decoder
if suite is None:
suite = unittest.TestSuite()
for mod in (simplejson, simplejson.encoder, simplejson.decoder):
suite.addTest(doctest.DocTestSuite(mod))
suite.addTest(doctest.DocFileSuite('../../index.rst'))
return suite
def all_tests_suite():
suite = unittest.TestLoader().loadTestsFromNames([
'simplejson.tests.test_check_circular',
'simplejson.tests.test_decode',
'simplejson.tests.test_default',
'simplejson.tests.test_dump',
'simplejson.tests.test_encode_basestring_ascii',
'simplejson.tests.test_fail',
'simplejson.tests.test_float',
'simplejson.tests.test_indent',
'simplejson.tests.test_pass1',
'simplejson.tests.test_pass2',
'simplejson.tests.test_pass3',
'simplejson.tests.test_recursion',
'simplejson.tests.test_scanstring',
'simplejson.tests.test_separators',
'simplejson.tests.test_unicode',
])
suite = additional_tests(suite)
return OptionalExtensionTestSuite([suite])
def main():
runner = unittest.TextTestRunner()
suite = all_tests_suite()
runner.run(suite)
if __name__ == '__main__':
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
main()
| Python |
"""Implementation of JSONEncoder
"""
import re
try:
from simplejson._speedups import encode_basestring_ascii as \
c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
from simplejson.decoder import PosInf
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
if isinstance(indent, (int, long)):
indent = ' ' * indent
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if (_one_shot and c_make_encoder is not None
and not self.indent and not self.sort_keys):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| Python |
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.1.0'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decoder import JSONDecoder, JSONDecodeError
from encoder import JSONEncoder
try:
from collections import OrderedDict
except ImportError:
from ordered_dict import OrderedDict
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
**kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
import simplejson.decoder as dec
import simplejson.encoder as enc
import simplejson.scanner as scan
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
) | Python |
r"""Command-line tool to validate and pretty-print JSON
Usage::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
import sys
import simplejson as json
def main():
if len(sys.argv) == 1:
infile = sys.stdin
outfile = sys.stdout
elif len(sys.argv) == 2:
infile = open(sys.argv[1], 'rb')
outfile = sys.stdout
elif len(sys.argv) == 3:
infile = open(sys.argv[1], 'rb')
outfile = open(sys.argv[2], 'wb')
else:
raise SystemExit(sys.argv[0] + " [infile [outfile]]")
try:
obj = json.load(infile, object_pairs_hook=json.OrderedDict)
except ValueError, e:
raise SystemExit(e)
json.dump(obj, outfile, sort_keys=True, indent=' ')
outfile.write('\n')
if __name__ == '__main__':
main()
| Python |
macs = {}
f = open('wireless-kwan.csv', mode='ra')
for line in f:
# name,ip address,cname,mac - wired,wireless - a,wireless - b,notes
name, ip, cname, wired_mac, wireless_a, wireless_b, notes = line.split(',')
wireless_a = wireless_a.strip()
wireless_b = wireless_b.strip()
cname = cname.strip()
if wireless_a not in ['n/a', '']:
macs[wireless_a] = cname
if wireless_b not in ['n/a', '']:
macs[wireless_b] = cname
f.close()
import re
r = re.compile('.*-(\d+)')
import sqlite3
conn = sqlite3.connect('aps.db')
c = conn.cursor()
c.execute(''' create table aps (mac string, room string) ''')
conn.commit()
for mac, name in macs.iteritems():
room = str(r.match(name).groups(0)[0])
c = conn.cursor()
c.execute(''' insert into aps values (?,?) ''', (mac, room))
conn.commit()
# Sanity check
c = conn.cursor()
c.execute (''' select * from aps ''')
for row in c:
print row
| Python |
import os
import re
import device
def scan(ifname):
# Some debugging stuff
live = True
if live:
a = os.popen("iwlist " + ifname + " scanning")
lines = a.read().strip().split('\n')
a.close()
else:
a = open('n800-aps.txt')
lines = a.read().strip().split('\n')
a.close()
current = -1
aps = []
while lines:
if current > -1:
ap = aps[current]
line = lines[0]
mac = re.compile("[ ]+Cell [0-9]+ - Address: (?P<mac>[\w:]+)")
macmatch = mac.match(line)
if macmatch:
ap = {}
ap['mac'] = macmatch.groups('mac')[0]
aps.append(ap)
current += 1
lines = lines[1:]
continue
essid = re.compile("[ ]+ESSID:\"(?P<essid>[\w\s\.]*)\"")
essidmatch = essid.match(line)
if essidmatch:
ap['essid'] = essidmatch.group('essid')
lines = lines[1:]
continue
if device.current_device() == device.NOKIA_TABLET:
signal = re.compile(".*Signal level:(?P<level>-\d+) dBm")
else:
signal = re.compile(".*Signal level=(?P<level>-\d+) dBm")
signalmatch = signal.match(line)
if signalmatch:
ap['level'] = signalmatch.group('level')
lines = lines[1:]
continue
enc = re.compile("[ ]+Encryption key:(?P<encryption>[\w]*)")
encmatch = enc.match(line)
if encmatch:
encrypted = encmatch.group('encryption').trim()
if encrypted == 'off':
encrypted = False
else:
encrypted = True
ap['encryption'] = encrypted
lines = lines[1:]
continue
lines = lines[1:]
return aps
if __name__ == '__main__':
# Sort by level
aps = scan('eth1')
from operator import itemgetter
aps = sorted(aps, key= itemgetter('level'))
# Filter ML only
ml_re = re.compile("media lab 802\.11")
def ml_filter(x):
if 'essid' in x:
return ml_re.match(x['essid']) != None
else:
return False
print filter(ml_filter, aps)
| Python |
import sqlite3
DB_URL = 'apps.db'
def where_am_i(self, macs):
locations = []
for mac in macs:
c = sqlite3.connect(DB_URL).cursor()
c.execute(''' select room from aps where mac=? ''', (mac,))
location = None
for row in c:
if location:
raise
location = row[0]
if location:
locations.append(location)
return locations
if __name__ == '__main__':
# Some test code
a = APLookup()
locations = a.where_am_i(['00-30-F1-79-2C-57'])
# Should be 023
assert len(locations) == 1
assert locations[0] == 23
def are_comprable(list1, list2):
if len(list1) != len(list2):
return False
for item in list1:
if item not in list2:
return False
for item in list2:
if item not in list1:
return False
return True
locations = a.where_am_i(['00-02-2D-2E-57-10', '00-20-A6-4F-2A-F6'])
assert are_comprable([441, 3], locations)
print 'yes!'
| Python |
class TheMixerException(Exception): pass | Python |
# For audio recording and messaging
import time
import random
import gobject
import os
from messaging2 import ahconstants as ahcon
import ui.uiconstants as uicon
from setup import common
import os, datetime
from util.persist import *
SENT_FILE = "_sent_tvm.data"
RECV_FILE = "_recv_tvm.data"
"""
TivoManager manages the recorded conferences that has happened.
It is a little more than audio message since it contains information about
the users who participated in the conference.
It will appear as a message and when you click it, you will see users
that participated in the conference.
"""
class ConferenceManager(gobject.GObject):
__gsignals__ = {
'sent_audio' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_INT,)),
'received_audio' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
def __init__(self, am, mixer):
# stores audio messages indexed by guid
gobject.GObject.__init__( self )
self.am = am
self.mixer = mixer
#self.__createMessagingPipelines()
self.path = ahcon.MESSAGING_PATH+'apps/tivo/'
if not os.path.exists(self.path):
print "Creating path for tivo"
os.makedirs(self.path)
if os.path.exists(self.path + SENT_FILE):
# Load persisted texts from disk
print "Loading persisted sent tivo messages"
self.sent_msgs = depersist(self.path+SENT_FILE)
else:
# No persisted texts to load
self.sent_msgs = {}
if os.path.exists(self.path + RECV_FILE):
# Load persisted texts from disk
print "Loading persisted received tivo messages"
self.recv_msgs = depersist(self.path+RECV_FILE)
else:
# No persisted texts to load
self.recv_msgs = {}
self.temp_rec_file_name = self.path+"temp_tivo.wav"
self._type = uicon.TIVOMSG
def onStartConference(self, obj, ids, filename ):
"""
Called when the conference is initiated and a user drops out
and set the destination
"""
# store list of guids
pass
def onStopConference(self, obj, ids, filename):
"""
Send message to the id that is offline
"""
pass
def onOffline(self, obj, guid):
"""
@param obj: L{PresenceService}
@param guid: id of the user or service
"""
#Need to start tivo if the guid is in current conference
pass
def onAddToConference(self, obj, guid, filename):
"""
When new user is added to the conference
"""
def sendAudio(self, senddata):
"""
Called when the UI sends a request to send a message
@param senddata: dictionary with fields
1. 'id': temporary id
2. 'tags': tags of information
3. 'priority': priority of data
4. 'recipients': the recipients of data
5. 'strength': the strength of spread
6. 'reply': the guid of the message this message is replying to
@return the official guid to the UI of the sent message, -1 if it failed sending
"""
tags = senddata['tags']
id = senddata['id']
recipients = senddata['recipients']
# metadata to send with audio
meta = {}
meta[ahcon.PRIORITY_KEY] = senddata['priority']
meta[ahcon.EXPIRES_KEY] = time.time()+3000 # datetime.datetime.now() + datetime.timedelta(days=1)
meta[ahcon.STRENGTH_KEY] = senddata['strength']
try:
reply = senddata['reply']
meta[ahcon.REPLY_KEY] = reply
except KeyError:
print "Not a reply audio message"
# Right now, let's rename to the current time
newid = random.randint(1,100000)
newpath = self.path + "a_" + str(newid) + ".audio"
if not os.path.exists(self.temp_rec_file_name):
print "Requested audio file to send didn't exist"
self.emit('sent_audio', -1, uicon.AUDIOMSG)
return
length = common.GetAudioDuration(self.temp_rec_file_name)
meta[ahcon.LENGTH_KEY] = length
#newid = time.time()
#os.chdir(self.path)
#print os.getcwd()
#oldpath = str(id) + ".audio"
#newpath = str(newid) + ".audio"
print "Renaming from",self.temp_rec_file_name,"to",newpath
os.rename(self.temp_rec_file_name, newpath)
guid, src = self.am.sendMessage(uicon.AUDIOMSG, recipients, newpath, tags, meta)
self.sent_msgs[guid] = {'id':guid, 'src':src, 'recipients':recipients, 'tags':tags, 'url':newpath, 'meta':meta}
print "Sent audio message: guid:",guid,"sub:",tags,"recip:",recipients
persist(self.sent_msgs, self.path+SENT_FILE)
self.emit('sent_audio', guid, uicon.AUDIOMSG)
def onMessageReceived(self, guid, src, recipients, tags, url, meta):
"""
Callback from AsyncManager when audio message is received over the network.
@param guid: the guid of message received
@type guid: number
@param src: The guid of the message sender
@type src: number
@param recipients: The target recipients of this message
@type recipients: List[number]
@param tags: String of comma seperated tags
@type tags: String
@param url: The url to the received message
@type url: String
@param meta: Metadata attached to this message
@type meta: dictionary
"""
print "Received message: ",guid,src,recipients,meta
print "url: ",url
meta[ahcon.RECV_TIME_KEY] = time.time()
self.recv_msgs[guid] = {'id':guid, 'src':src, 'recipients':recipients, 'tags':tags, 'url':url,'meta':meta}
name = "Voice message from " + str(src) + " tag: " + str(tags)
persist(self.recv_msgs, self.path+RECV_FILE)
# url is needed for mixer to play back audio
self.emit('received_audio', { 'subcommand': uicon.ADD,
'id':guid,
'type':self._type,
'url':url,
'label':tags,
'status': uicon.EXPIRES_SOON,
'priority': meta.get(ahcon.PRIORITY_KEY, uicon.PRIORITY_LOW),
'date': meta[ahcon.RECV_TIME_KEY]
})
def getAudioDetails(self, guid):
"""
Called from L{ObjectBroker} when user requests OPEN_OBJECT by double clicking on audio message
"""
detail = self.recv_msgs[guid]
detail['type'] = self._type
return detail
def getAllAudio(self):
"""
Used during initialization to return all items user has
@return details for every received audio messages
"""
audio_list = []
for msg in self.recv_msgs.values():
detail = {'subcommand':uicon.ADD, 'id': msg['id'], 'type': self._type, 'label': msg['tags'],
'url': msg['url'], 'status': -1,
'priority': msg['meta'][ahcon.PRIORITY_KEY],
'date' : msg['meta'][ahcon.RECV_TIME_KEY] }
audio_list.append( detail )
self.mixer.onAddInput( self, detail )
return audio_list
gobject.type_register( AudioManager ) | Python |
import pygst
pygst.require("0.10")
import gst
import gobject
from util import device
class MicController( gobject.GObject ):
__gsignals__ = {
'my_ssrc' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_UINT,))
}
def __init__(self, bcast_host, port):
gobject.GObject.__init__( self )
self._bcast_host = bcast_host
self._port = port
# record to file pipeline
self._file_pipeline = gst.Pipeline( "FVFileRecord" )
if device.current() == device.NOKIA_TABLET:
# Nokia DSP
self._file_mic_src = gst.element_factory_make("dsppcmsrc", "fv_out_audiosrc")
else:
self._file_mic_src = gst.element_factory_make("alsasrc", "fv_out_audiosrc")
self._file_pipeline.add(self._file_mic_src)
caps = gst.element_factory_make("capsfilter", "fv_in_capsfilter")
#caps.set_property("caps", gst.caps_from_string("audio/x-mulaw"))
caps.set_property( "caps",
gst.caps_from_string("audio/x-raw-int, endianness=(int)1234,width=(int)16," \
"depth=(int)16,signed=(boolean)true,channels=(int)1," \
"rate=(int)8000"))
self._file_pipeline.add(caps)
self._wavenc = gst.element_factory_make("wavenc", "wavencoder")
self._file_pipeline.add( self._wavenc )
self._file_out = gst.element_factory_make("filesink", "sink")
self._file_pipeline.add(self._file_out)
gst.element_link_many(self._file_mic_src, caps, self._wavenc, self._file_out)
self._file_bus = self._file_pipeline.get_bus()
self._file_bus.add_signal_watch()
self._file_bus.connect( 'message', self._onRecordBusMessage )
self._net_pipeline = None
def _newNetPipeline(self):
# record to network pipeline
self._net_pipeline = gst.Pipeline( "FVNetworkOut")
if device.current() == device.NOKIA_TABLET:
# Nokia DSP
self._net_mic_src = gst.element_factory_make("dsppcmsrc", "fv_out_audiosrc")
else:
self._net_mic_src = gst.element_factory_make("alsasrc", "fv_out_audiosrc")
print self._net_mic_src
self._net_pipeline.add(self._net_mic_src)
self._net_enc = gst.element_factory_make("mulawenc", "fv_out_mulawenc")
self._net_pipeline.add(self._net_enc)
#netcaps = gst.element_factory_make("capsfilter", "fv_in_capsfilter")
#netcaps.set_property("caps", gst.caps_from_string("audio/x-mulaw"))
#gst.caps_from_string("audio/x-raw-int, endianness=(int)1234,width=(int)16," \
# "depth=(int)16,signed=(boolean)true,channels=(int)1," \
# "rate=(int)8000"))
#mulaw_cap = gst.caps_from_string("audio/x-mulaw")
#self._net_pipeline.add(netcaps)
self._rtp_pay = gst.element_factory_make("rtppcmupay", "payloader")
self._net_pipeline.add(self._rtp_pay)
self._rtp_bin = gst.element_factory_make("gstrtpbin", "rtpbin")
#self._net_pipeline.add(self._rtp_bin)
self._udp_sink = gst.element_factory_make("udpsink", "udpsink")
self._udp_sink.set_property("clients", self._bcast_host+":"+str(self._port))
self._net_pipeline.add(self._udp_sink)
self._rtcp_sink = gst.element_factory_make("udpsink", "rtcpsink")
self._rtcp_sink.set_property("clients", self._bcast_host+":"+str(self._port+1))
#self._net_pipeline.add(self._rtcp_sink)
#self._net_mic_src.link_pads_filtered( 'src', self._rtp_pay, 'sink', mulaw_cap )
#self._net_mic_src.link_filtered( self._rtp_pay, netcaps )
#self._net_mic_src.link( self._rtp_pay )
#self._rtp_pay.link_pads( 'src', self._rtp_bin, 'send_rtp_sink_0' )
#self._rtp_bin.link_pads( 'send_rtp_src_0', self._udp_sink, 'sink')
#self._rtp_bin.link_pads( 'send_rtcp_src_0', self._rtcp_sink, 'sink' )
#self._rtp_pay.link( self._udp_sink )
gst.element_link_many( self._net_mic_src, self._rtp_pay, self._udp_sink )
self._net_bus = self._net_pipeline.get_bus()
self._net_bus.add_signal_watch()
self._net_bus.connect( 'message', self._onSendNetBusMessage )
def _onRecordBusMessage(self, bus, message):
t = message.type
if t == gst.MESSAGE_EOS:
self._file_pipeline.set_state(gst.STATE_NULL)
print "eos"
# TODO: fix this
#self.emit('stopped_play', self.currentPlayingID)
elif t == gst.MESSAGE_ERROR:
self._file_pipeline.set_state(gst.STATE_NULL)
print "error:",message
def _onSendNetBusMessage(self, bus, message):
t = message.type
if t == gst.MESSAGE_EOS:
self._net_pipeline.set_state(gst.STATE_NULL)
print "eos"
# TODO: fix this
#self.emit('stopped_play', self.currentPlayingID)
elif t == gst.MESSAGE_ERROR:
self._net_pipeline.set_state(gst.STATE_NULL)
print "error",message
def startRecord( self, filename ):
self._file_out.set_property('location', filename)
#self._net_mic_src.set_state(gst.STATE_NULL)
if self._net_pipeline is not None:
self._net_pipeline.set_state(gst.STATE_NULL)
self._file_pipeline.set_state(gst.STATE_PLAYING)
def stopRecord( self ):
self._file_pipeline.set_state(gst.STATE_NULL)
#self._net_pipeline.set_state(gst.STATE_PLAYING)
def micMute( self ):
#self._net_mic_src.set_state(gst.STATE_NULL)
self._net_pipeline.set_state(gst.STATE_NULL)
def micUnmute( self ):
self._file_pipeline.set_state(gst.STATE_NULL)
if self._net_pipeline is not None:
if self._net_pipeline.get_state()[1] is not gst.STATE_NULL:
self._net_pipeline.set_state(gst.STATE_NULL)
self._newNetPipeline()
#self._net_mic_src.set_state(gst.STATE_PLAYING)
self._net_pipeline.set_state(gst.STATE_PLAYING)
#print "Net pipeline state:",self._net_pipeline.get_state()[0], self._net_pipeline.get_state()[1]
#if self._net_pipeline.get_state()[1] is not gst.STATE_PLAYING:
# print "Net pipepline is starting to play again."
# self._net_pipeline.set_state(gst.STATE_PLAYING)
#rtppad = self._rtp_bin.get_pad( 'send_rtp_src_0' )
rtppad = self._rtp_pay.get_pad( 'src' )
self._probe_id = rtppad.add_buffer_probe( self._probe, 'ssrc')
#rtpcaps = rtppad.proxy_getcaps()
#print rtpcaps
def _probe(self, pad, data, id):
"""
Method to find the ssrc generated by this source
@param data: the audio buffer
@param id: the id to search for 'ssrc'
"""
self.emit( 'my_ssrc', int(data.get_caps()[0][id]) )
pad.remove_buffer_probe( self._probe_id )
return True
gobject.type_register( MicController )
if __name__ == "__main__":
import time
ain = MicController("18.85.19.255", 30033)
i = 'g'
recording = False
streaming = False
while i != 'q':
# n to stream to network or stop stream to network, f to start recording to file or stop recording
# q to stop
i = raw_input("n, f or q: ")
if i is 'f':
if recording:
print "Stop recording"
ain.stopRecord()
recording = False
else:
print "Start recording"
ain.startRecord('fv'+str(time.time())+".wav")
recording = True
if i is 'n':
if streaming:
print "Stop streaming"
ain.micMute()
streaming = False
else:
print "Start streaming"
ain.micUnmute()
streaming = True
| Python |
from util import device
if device.current() == device.NOKIA_TABLET:
AUDIO_SINK = "dsppcmsink"
else:
AUDIO_SINK = "alsasink"
depays = {8:"rtppcmadepay",
0:"rtppcmudepay",
96:"rtppcmudepay"}
decoders = {8:"alawdec",
0:"mulawdec",
96:"mulawdec"}
| Python |
import gobject
import gstcomponents
import recordsrc
import gst
import os
from util import config
from fvutil import fvcon
class MixerInterface(gobject.GObject):
__gsignals__ = {
# create signals that will be emitted from mixer
# emitted when the message finished playing back
'end_message' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
# user_idle is when no incoming packets from network (no packets for 5 seconds)
'user_idle' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
# user_active is when packets start to come in from network (new packets for 5 secs)
'user_active' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'ack_push_to_talk' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'started_record' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'stopped_record' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'started_play' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'stopped_play' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'my_ssrc' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_UINT,)),
}
def __init__( self, live_audio_dest):
"""
@param live_audio_dest: the broadcast address that the live audio needs to be sent to
@type live_audio_dest: String
"""
gobject.GObject.__init__(self)
LIVE_AUDIO_PORT = config.get_int('fv', 'live-audio-port')
self._file_sender = gstcomponents.FileInputSender(LIVE_AUDIO_PORT)
self._file_sender.connect('eos', self._fileFinished)
self._rtp_receiver = gstcomponents.RtpStreamsReceiver(LIVE_AUDIO_PORT,
enableLevelMeasurement=True,
enableTimeShifting=True,
play=True)
self._rtp_receiver.connect('new-rtp-stream', self._newRTPStream)
self._rtp_receiver.connect('rtp-stream-timeout', self._timeoutRTPStream)
#: map ssrc to pipelines
self._mixers = {}
self._file_players = {}
#: map id to file finished True/False
self._file_finished = {}
#: map guid to their type and properties
self._inputs = {}
#: maps guid to (volume, pan)
self._mixer_params = {}
#: time machine units
self._tmus = {}
self._my_ssrc = 0
print 'Live audio port:',LIVE_AUDIO_PORT,';live audio_dest',live_audio_dest
self._mic_controller = recordsrc.MicController(live_audio_dest,
LIVE_AUDIO_PORT)
self._mic_controller.connect( 'my_ssrc', self._startTalk )
self._path = config.get('async.folders', fvcon.AUDIO)
if not os.path.exists(self._path):
print "MixerInterface: Creating path for audio"
os.makedirs(self._path)
self.temp_rec_file_name = self._path + "temp_audio.wav"
# user_id -> tivo_id
self.__tivos_awaiting_ssrcs = {}
def _startTalk(self, obj, ssrc):
print "My SSRC:",ssrc
# silence my volume
self._my_ssrc = ssrc
self._mixer_params[self._my_ssrc] = (0,0)
self.emit('my_ssrc', ssrc)
def _newRTPStream(self, obj, session, ssrc, pt, id):
"""
Callback when new RTP stream appears in the mixer
@param obj: L{RtpStreamsReceiver}
"""
print "New RTP stream:",session,ssrc,pt,id
self._mixers[ssrc] = self._rtp_receiver.getMixer( id )
volume, pan = self._mixer_params.get(ssrc, (1,0) )
self._mixers[ssrc].setVolume(volume)
self._mixers[ssrc].setPan(pan)
def _timeoutRTPStream( self, obj, session, ssrc, pt, id):
print "Timeout RTP stream:",session,ssrc,pt,id
if ssrc in self._mixers:
self._mixers[ssrc].remove()
self._mixers.pop(ssrc, None)
self._mixer_params.pop(ssrc, None)
def _fileFinished(self, obj, id):
self._file_finished[id] = True
print "Finished playing",id
def onAddInput(self, obj, properties ):
"""
Add an input to the audio mixer.
The input can be an audio message or network stream.
@param guid:
@type guid: Number
@param properties: type; IP address, port for network and filename for message
@type properties: Dictionary
"""
print 'properties.get(status)',properties['status']
print properties['status']==fvcon.OFFLINE
if properties.get('status', fvcon.OFFLINE) == fvcon.OFFLINE:
return
print 'wasnt offline'
guid = properties['id']
print properties
print "Adding INPUT", guid
self._inputs[guid] = properties
#if guid in self.__tivos_awaiting_ssrcs:
# tivo = self.__tivos_awaiting_ssrcs[guid].addTee
def onChange(self, obj, ui_data):
"""
When state changes from either background or muted or inactive
@param ui_data: id, volume, pan
volume 0: mute
volume 1: soft
volume 2: loud
volume 3: conference (TODO: need to start tivo)
@type ui_data: Dictionary
"""
print 'on_change',ui_data
guid = ui_data['id']
volume = ui_data['volume']
pan = ui_data['pan']
type = ui_data['type']
if pan > 1:
pan = 1
elif pan < -1:
pan = -1
# if object type is modifiable
if ui_data['type'] not in [fvcon.FV, fvcon.SERVICE, fvcon.PHONE, fvcon.AUDIO, fvcon.TIVO]:
print "MIXER: Ignored",ui_data['type']
return
# if it was moved out of the playback area, stop it
if volume < 0:
if guid in self._file_players:
player = self._file_players[guid]
ssrc = player.getSsrc()
self._file_finished.pop(player.getID())
player.stop()
player.remove()
self._file_players.pop(guid)
self._mixers[ssrc].remove()
self._mixers.pop(ssrc)
self._mixer_params.pop(ssrc)
return
# if the node is offline, then ignore
properties = self._inputs.get(guid, None)
print self._inputs
print 'guid:',guid,properties
if properties is None or properties.get('status', fvcon.OFFLINE) == fvcon.OFFLINE:
print 'Offline. Ignoring.'
return
# get the ssrc
ssrc = properties.get('ssrc', 0)
print 'Got ssrc',ssrc
print self._mixers
if ssrc in self._mixers:
# Existing
print "MIXER: Changing VOLUME"
self._mixers[ssrc].setVolume( volume )
self._mixers[ssrc].setPan( pan )
# check if file finished playing back and replay
if guid in self._file_players and volume > 0:
if self._file_finished[self._file_players[guid].getID()]:
self._file_players[guid].play()
self._file_finished[self._file_players[guid].getID()] = False
print "Replaying"
else:
# New
print "New or Inactive Input",guid
# create new pipeline for this guid
if ui_data['type'] is fvcon.AUDIO:
print "MIXER: Playing",properties['local_path']
ssrc = self._startMessage( guid, properties['local_path'], volume, pan )
else:
print "No input streams available for ssrc:",ssrc
return
self._mixer_params[ssrc] = (volume, pan)
def _startMessage(self, guid, filename, volume, pan):
"""
Starts the file stream
@param guid: the source globally unique identifier
@type guid: Number
@param filename: name of the file to play
@type filename: String
"""
self._file_players[guid] = self._file_sender.addFileInput( filename )
playerid = self._file_players[guid].getID()
self._file_finished[playerid] = False
ssrc = self._file_players[guid].getSsrc()
self._inputs[guid]['ssrc'] = ssrc
return ssrc
def onOffline( self, obj, params ):
"""
Deactivate the pipeline when an object is moved out of the audio play layer
or goes offline
@param params: various parameters of user/service that has gone offline
@type params: Dictionary
"""
guid = params['id']
if guid in self._inputs:
ssrc = self._inputs[guid].get('ssrc', None)
if ssrc and ssrc in self._mixers:
# if it's an active pipeline stop and return True
self._mixers[ssrc].remove()
def onMIC(self, obj, action):
"""
Called when push to talk is deactivated or when push to talk was inactive
and message recording stops
Stops recording from MIC
@param action: 1 when push to talk on, 2 when attention pressed, 0 when push to talk off
@type action: Number
"""
if action in [1,2]:
print "MIC starting"
self._mic_controller.micUnmute()
else:
print "MIC stopping"
self._mic_controller.micMute()
self.emit( 'ack_push_to_talk', action)
def onStartRecord(self, obj, recdata):
"""
Called when the UI sends a request to start recording.
@param recdata: dictionary with fields
1. 'id': temporary id
2. 'filename': used for recording to specific file
"""
print "OnStartRecord",recdata
id = recdata['id']
print "id:",id
filename = recdata.get('filename', None)
if filename is None:
self._mic_controller.startRecord( self.temp_rec_file_name )
else:
self._mic_controller.startRecord( filename )
print "Exit StartRecord"
self.emit('started_record', id)
def onStopRecord(self, obj, recdata):
"""
Called when the UI sends a request to stop recording.
@param recdata: dictionary with fields
1. 'id': temporary id
2. 'filename': file name to record to
"""
print "OnStopRecord"
id = recdata['id']
self._mic_controller.stopRecord()
filename = recdata.get('filename', None)
if filename is None:
detail = {'subcommand':fvcon.ADD, 'id':id, 'type':fvcon.AUDIO, 'label':'Temp Record Audio',
'local_path':self.temp_rec_file_name, 'status':-1,
'priority':fvcon.PRIORITY_LOW }
else:
detail = {'subcommand':fvcon.ADD, 'id':id, 'type':fvcon.AUDIO, 'label':'Temp Record Audio',
'local_path':filename, 'status':-1,
'priority':fvcon.PRIORITY_LOW }
# add to the mixer so you can playback for review
self.onAddInput( None, detail )
self.emit('stopped_record', id)
def onStartPlay(self, obj, playdata):
"""
Called when the UI sends a request to start playing a message
"""
print "OnStartPlay",playdata
print "id:", playdata['id']
id = playdata['id']
playinfo = {'id':id, 'volume':1, 'pan':0, 'type':fvcon.AUDIO }
self.onChange( None, playinfo )
self.emit('started_play', id)
def onStopPlay(self, obj, playdata):
"""
Called when the UI sends a request to stop playing a message
"""
print 'OnStopPlay'
print "id", playdata['id']
id = playdata['id']
playinfo = {'id':id, 'volume':0, 'pan':0, 'type':fvcon.AUDIO }
self.onChange( None, playinfo )
self.emit('stopped_play', id)
def onStartTivo(self, obj, user_ids, tivo_id, filename):
"""
Record inputs of the people in ids to the filename
@param user_ids: The ids of the users to record
@param tivo_id: The ID of the tivo object (generated externally)
@param filename: The filename to record to
"""
mixers = []
# Map to the SSRC
for user_id in user_ids:
ssrc = self._inputs[user_id].get('ssrc', None)
if ssrc:
print 'GOT ssrc %s for TIVO' %(ssrc,)
print self._mixers
mixer_id = self._mixers[ssrc].id
mixers.append(mixer_id)
else:
self.__tivos_awaiting_ssrcs[user_id] = tivo_id
mixers.append(self._mixers[self._my_ssrc].id)
print 'About to tivo with mixers %s to filename %s' % (mixers, filename)
tmu = self._rtp_receiver.createTivo(mixers, filename)
self._tmus[tivo_id] = tmu
def onTivoAddUser(self, obj, id):
"""
TODO:
Find an existing tmu and add this new id
@param id: the mixer id
"""
pass
def onStopTivo(self, obj, tivo_id):
"""
Stop a tivo session from recording.
@param tivo_id: The id of the tmu to stop recording.
"""
self._tmus[tivo_id].stop()
del self._tmus[tivo_id]
gobject.type_register( MixerInterface )
| Python |
import pygst
pygst.require("0.10")
import gst
#import pygtk
#pygtk.require("2.0")
#import gtk
import gobject
import constants
import themixerexceptions
import random
import struct
#gtk.gdk.threads_init() #TODO - should it be here? shouldn't be gobject threads?
class UnexpectedInput(themixerexceptions.TheMixerException): pass
class IllegalArgument(themixerexceptions.TheMixerException): pass
def getPadNameRtpInfo(pad):
name = pad.get_property("name")
#recv_rtp_src_%d_%d_%d (session, ssrc, pt)
tokens = name.split("_")
session = int(tokens[3])
ssrc = int(tokens[4])
pt = int(tokens[5])
return session, ssrc, pt
class FileInputSender(gobject.GObject):
"""
Class for playing back file (archived audio).
It sends the file via RTP to the localhost.
"""
__gsignals__ = {
'eos' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'error' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_STRING, gobject.TYPE_STRING)),
}
def __init__(self, port):
gobject.GObject.__init__( self )
self._senders = {}
self._idCount = 0
self._port = port
def _getAvailableID(self):
"""
Called from L{addFileInput}
@return id for identifying new audio that's played back
"""
id = self._idCount
self._idCount += 1
return id
def _messageHandler(self, bus, message, id):
t = message.type
if t == gst.MESSAGE_EOS:
#bus.remove_signal_watch()
self._senders[id].stop()
self.emit('eos', id)
elif t == gst.MESSAGE_ERROR:
#bus.remove_signal_watch()
print message
self._senders[id].stop()
err, debug = message.parse_error()
self.emit('error', id, err, debug)
def _decodebinPadAdded(self, dec, pad, last_pad, next_pad):
pad.link(next_pad)
def addFileInput(self, path):
"""
Adds a new file input to the sender, it is automatically set to playing.
@param path: path to the file to be played
@return: a file input control
"""
id = self._getAvailableID()
pipe = gst.Pipeline("pipeline" + str(id))
src = gst.element_factory_make("filesrc", "src")
src.set_property("location", path)
dec = gst.element_factory_make("decodebin", "dec")
aconv = gst.element_factory_make("audioconvert", "aconv")
ares = gst.element_factory_make("audioresample", "ares")
enc = gst.element_factory_make("mulawenc", "enc")
rtppay = gst.element_factory_make("rtppcmupay", "rtppay")
sink = gst.element_factory_make("udpsink", "sink")
sink.set_property("clients", "localhost:"+str(self._port))
pipe.add(src)
pipe.add(dec)
pipe.add(aconv)
pipe.add(ares)
pipe.add(enc)
pipe.add(rtppay)
pipe.add(sink)
src.link(dec)
dec.connect('new-decoded-pad', self._decodebinPadAdded, aconv.get_pad("sink"))
aconv.link(ares)
ares.link(enc)
enc.link(rtppay)
rtppay.link(sink)
# sets a new random ssrc to identify from the mixer
ssrc = random.randint(1000000,9999999)
rtppay.set_property('ssrc', ssrc)
bus = pipe.get_bus()
bus.add_signal_watch()
bus.connect("message", self._messageHandler, id)
pipe.set_state(gst.STATE_PLAYING)
self._senders[id] = FileInputControl(self, pipe, id, ssrc)
return self._senders[id]
def getFileInputControl(self, id):
"""
@param id: id of the file player
@return control: FileInputControl of the stream with id id
"""
return self._senders[id]
def hasStream(self, id):
"""
Checks if a stream with an id exists
@return: True if it exists
"""
return id in self._pipes.keys()
def remove(self, id):
"""
Removes stream of an ID. If no stream has that id, nothing is done.
@param id: the id of the stream to be removed
"""
if (id not in self._senders.keys()):
return
self._senders[id].stop()
self._senders[id].getPipeline().get_bus().remove_signal_watch()
del self._senders[id]
def enqueueRemoval(self, id):
"""
Enqueue a removal action later.
Enqueue a removal of a stream, useful for calling inside a signal
handler call (some deadlock issues might prevent from calling remove
directly).
@param id: id of the stream to be removed.
"""
gobject.idle_add(self.remove, id)
class FileInputControl:
"""
Class for controlling the file (archived audio) playback
Returned by L{FileInputSender.getFileInputControl}
"""
def __init__(self, parentSender, pipeline, id, ssrc):
self.fileInputSender = parentSender
self.pipe = pipeline
self.id = id
self.ssrc = ssrc
def getSsrc(self):
return self.ssrc
def getID(self):
return self.id
def getPipeline(self):
return self.pipe
def play(self):
self.pipe.set_state(gst.STATE_PLAYING)
def stop(self):
self.pipe.set_state(gst.STATE_NULL)
def remove(self):
self.fileInputSender.enqueueRemoval(self.id)
class RtpStreamsReceiver(gobject.GObject):
"""
Responsible for receiving rtp input streams and playing them.
It receives both live network streams and archived streams
"""
__gsignals__ = {
#notifies about eos (dummy int)
'eos' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
#notifies about ocurred errors (error, message)
'error' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_STRING)),
#new-rtp-stream(session, ssrc, pt, id)
'new-rtp-stream' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_UINT , gobject.TYPE_UINT , gobject.TYPE_UINT, gobject.TYPE_UINT )),
#rtp-stream-timeout(session, ssrc)
'rtp-stream-timeout' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_UINT , gobject.TYPE_UINT , gobject.TYPE_UINT, gobject.TYPE_UINT ))
}
def __init__(self, port, enableLevelMeasurement=True, enableTimeShifting=True,
bufferStatsFilePrefix=None, play=True):
"""Creates a RtpStreamsReceiver. Responsible for listening on a UDP port for incoming
RTP streams.
@param port: port to listen to
@param enableLevelMeasurement: If streams audio level should be inspected periodically
@param enableTimeShifting: If the operations to do "timeshifting" should be enabled
@param bufferStatsFilePrefix: default file path prefix to save rtp buffer statistics.
Use None for disabling. (i.e. if it is 'stats/data' is provided, files for each stream
are going to be saved inside stats directory (it should exist already), using file names that start
with 'data' followed by specific stream parameters, like ssrc and session.
@param play: If the pipeline should be set to playing at startup
"""
gobject.GObject.__init__(self)
self.pipe = None
self.rtpbin = None
self.inputIDCounter = 0
self.port = port
self._enableLevelMeasurement = enableLevelMeasurement
self._enableTimeShift = enableTimeShifting
self._bufferStatsFilePrefix = bufferStatsFilePrefix
# if (self._bufferStatsFilePrefix):
# #contains a map of id to tuples(file, filename)
self._statsFileMap = {}
self.inputs = {}
self._initRtpPipeline()
self._tivo = TivoManager(self.pipe)
if(play):
self.play()
def _initRtpPipeline(self):
"""Initiates the basic elements of the pipeline"""
self.pipe = gst.Pipeline("rtp-receiver")
self.src = gst.element_factory_make("udpsrc", "src")
self.src.set_property("port", self.port)
self.src.set_property("caps", gst.Caps("application/x-rtp, clock-rate = (int) 8000"))
self.rtpbin = gst.element_factory_make("gstrtpbin", "rtpbin")
self.rtpbin.set_property("latency", 50)
self.adder = gst.element_factory_make("liveadder", "adder")
self.sink = gst.element_factory_make(constants.AUDIO_SINK, "audiosink")
self.pipe.add(self.src)
self.pipe.add(self.rtpbin)
self.pipe.add(self.adder)
self.pipe.add(self.sink)
self.src.link(self.rtpbin)
self.adder.link(self.sink)
bus = self.pipe.get_bus()
bus.add_signal_watch() #remove signal watch
bus.connect('message', self._messageHandler)
self.rtpbin.connect("pad-added", self._rtpbinNewPad)
self.rtpbin.connect("on-timeout", self._rtpbinStreamTimeout)
def _messageHandler(self, bus, message):
t = message.type
if t == gst.MESSAGE_ELEMENT:
struct = message.structure
name = struct.get_name()
if ("level" in name):
name = message.src.get_name()
index = name.find('-')
value = name[index+1:]
self._measureLevel(value, struct)
elif t == gst.MESSAGE_EOS:
self.null()
self.emit('eos', 0)
elif t == gst.MESSAGE_ERROR:
self.null()
err, debug = message.parse_error()
self.emit('error', err, debug)
def _measureLevel(self, value, struct):
"""
@param value: audio level
"""
pass
#print "got level:", struct.to_string(), "from id:", str(id)
def _getInput(self, session, ssrc):
for x in self.inputs.values():
if x.getSsrc() == ssrc and x.getSession() == session:
return x
return None
def _rtpbinStreamTimeout(self, rtpbin, session, ssrc):
input = self._getInput(session, ssrc)
if (input == None):
return
self.emit('rtp-stream-timeout', input.getSession(), input.getSsrc(),
input.getPT(), input.getID())
def _newDecodedPad(self, dec, pad, last, connectpad):
#TODO protection against non-audio pads
pad.link(connectpad)
def _rtpbinNewPad(self, rtpbin, pad):
session, ssrc, pt = getPadNameRtpInfo(pad)
id = self._getNewID()
if (self._enableTimeShift):
tee = gst.element_factory_make("tee", "tee-" + str(id))
else:
tee = None
queue = gst.element_factory_make("queue", "queue-" + str(id))
rtpdepay = gst.element_factory_make(constants.depays[pt], "rtpdepay-" + str(id))
#check if we should save buffer statistics
if (self._bufferStatsFilePrefix != None):
probed_pad = rtpdepay.get_pad("sink")
self._initBufferStatsFile(id, str(id))
probed_pad.add_buffer_probe(self._bufferStatsProbe, id)
dec = gst.element_factory_make("decodebin", "dec-"+ str(id))
mix = MixerBin("mixer-" + str(id), id, self._enableLevelMeasurement)
if (self._enableTimeShift):
self.pipe.add(tee)
self._tivo.addTee(id, tee)
self.pipe.add(queue)
self.pipe.add(rtpdepay)
self.pipe.add(dec)
self.pipe.add(mix)
pad.link(rtpdepay.get_pad("sink"))
rtpdepay.link(queue)
queue.link(dec)
if (self._enableTimeShift):
tee.link(mix)
connectpad = tee.get_pad("sink")
else:
connectpad = mix.get_pad("sink")
pad = self.adder.get_request_pad("sink%d")
mix.get_pad("src").link(pad)
dec.connect("new-decoded-pad", self._newDecodedPad, connectpad)
mix.sync_state_with_parent()
if (self._enableTimeShift):
tee.sync_state_with_parent()
dec.sync_state_with_parent()
queue.sync_state_with_parent()
rtpdepay.sync_state_with_parent()
netstream = RtpInput(session, ssrc, pt, id, Mixer(mix, None, id), tee)
self.inputs[id] = netstream
self.emit('new-rtp-stream', session, ssrc, pt, id)
def _getNewID(self):
id = self.inputIDCounter
self.inputIDCounter += 1
return id
def play(self):
self.pipe.set_state(gst.STATE_PLAYING)
def null(self):
self.pipe.set_state(gst.STATE_NULL)
self._closeStatsFile()
def getMixer(self, id):
if (id in self.inputs):
return self.inputs[id].getMixer()
else:
raise IllegalArgument, "Invalid ID: " + str(id)
def createTivo(self, idList, destFile):
if (not self._enableTimeShift):
raise FeatureDisabled, "Timemachine feature is disabled"
tmu = self._tivo.startTivo(idList, destFile)
return tmu
def _bufferStatsProbe(self, pad, buffer, id):
file = self._statsFileMap[id][0]
self._writeBufferStats(file, buffer)
return True
def _getBufferUsefulInfo(self, buffer):
seqn = struct.unpack('H', buffer.data[2:4][::-1])[0]
timestamp = struct.unpack('I', buffer.data[4:8][::-1])[0]
return seqn, timestamp
def _writeBufferStats(self, file, buffer):
#write useful info
seqn, timestamp = self._getBufferUsefulInfo(buffer)
msg = [str(seqn), str(timestamp), "\n"]
file.write(' '.join(msg))
def _initBufferStatsFile(self, id, suffix):
filename = self._bufferStatsFilePrefix + suffix
file = open(filename, "w")
self._statsFileMap[id] = (file, filename)
#no header :)
#file.write("BUFFER STATISTICS FILE - " + str(id) + "\n")
#file.write(20 * "#" + "\n")
def _closeStatsFile(self):
for x in self._statsFileMap.values():
x[0].close()
self._statsFileMap = {}
class RtpInput:
def __init__(self, session, ssrc, pt, id, mixer, elementlist=None):
self.session = session
self.ssrc = ssrc
self.pt = pt
self.id = id
self.mixer = mixer
#elements involved in this part of the stream
#should be in the order from upstream to dowstream
self.elementList = elementlist
def getSession(self):
return self.session
def getSsrc(self):
return self.ssrc
def getPT(self):
return self.pt
def getID(self):
return self.id
def getMixer(self):
return self.mixer
class TheMixerPipelineController(gobject.GObject):
"""
Responsible for holding the mixer pipeline, and provides a methods to control it.
Holds the mixer pipeline and provide methods to manage its state, flow and parameters.
Also provides ways to get information about it.
TODO - put a graph representing the pipeline here
"""
__gsignals__ = {
'eos' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'error' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_STRING)),
'file-stream-ended' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
#new-rtp-stream(session, ssrc, pt, id)
'new-rtp-stream' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_UINT , gobject.TYPE_UINT , gobject.TYPE_UINT, gobject.TYPE_UINT )),
#rtp-stream-timeout(session, ssrc)
'rtp-stream-timeout' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_UINT , gobject.TYPE_UINT , gobject.TYPE_UINT, gobject.TYPE_UINT ))
}
def __init__(self, name, port=0):
gobject.GObject.__init__( self )
#store all elements in a tuple (from downstream to upstream)
#related to some input stream
self.fileInputStreams = {}
self.networkInputStreams = {}
self.mixerIDCounter = 0
self.messageListener = None
self.gtkloop = False
self.port = port
self.udpsrc = None
self.rtpbin = None
self._initPipeline(name)
self.toRemove = 0
def _initRtpSrc(self):
self.rtpbin = gst.element_factory_make("gstrtpbin", "rtpbin")
self.udpsrc = gst.element_factory_make("udpsrc", "udpsrc")
self.udpsrc.set_property("port", self.port)
self.udpsrc.set_property("caps", gst.Caps("application/x-rtp, clock-rate = (int) 8000"))
self.pipeline.add(self.rtpbin)
self.pipeline.add(self.udpsrc)
pad = self.rtpbin.get_request_pad("recv_rtp_sink_%d")
udppad = self.udpsrc.get_pad("src")
udppad.link(pad)
self.rtcpsrc = gst.element_factory_make("udpsrc", "rtcpsrc")
self.rtcpsrc.set_property("port", self.port+1)
self.pipeline.add(self.rtcpsrc)
pad = self.rtpbin.get_request_pad( "recv_rtcp_sink_%d" )
rtcppad = self.rtcpsrc.get_pad("src")
rtcppad.link( pad )
self.rtpbin.connect("pad-added", self._newPad)
self.rtpbin.connect("on-ssrc-sdes", self._handleSdes)
self.rtpbin.connect("on-new-ssrc", self._newStream)
self.rtpbin.connect("on-timeout", self._streamTimeout)
def _initPipeline(self, name):
"""
Inits the controller pipeline.
Creates a pipeline, then instantiates an output sink and two adders,
one for each channel, adding them to the pipeline and linking them.
"""
self.pipeline = gst.Pipeline(name)
self.sink = gst.element_factory_make(constants.AUDIO_SINK, "audiosink")
self.pipeline.add(self.sink)
self.adder = gst.element_factory_make("adder", "adder")
self.pipeline.add(self.adder)
if (self.port > 0):
self._initRtpSrc()
self.adder.link(self.sink)
bus = self.pipeline.get_bus()
bus.add_signal_watch() #remove signal watch
bus.connect('message', self._messageHandler)
def _messageHandler(self, bus, message):
#TODO - add some kind of message listeners
t = message.type
if t == gst.MESSAGE_EOS:
if (self.gtkloop):
pass
#gtk.main_quit()
self.pipeline.set_state(gst.STATE_NULL)
self.emit('eos', 0)
elif t == gst.MESSAGE_ERROR:
if (self.gtkloop):
pass
#gtk.main_quit()
self.pipeline.set_state(gst.STATE_NULL)
err, debug = message.parse_error()
self.emit('error', err, debug)
if (self.messageListener != None):
self.messageListener(message)
def _getPadNameInfo(self, pad):
name = pad.get_property("name")
#recv_rtp_src_%d_%d_%d (session, ssrc, pt)
tokens = name.split("_")
session = int(tokens[3])
ssrc = int(tokens[4])
pt = int(tokens[5])
return session, ssrc, pt
def _removedPad(self, rtpbin, pad):
print pad
def _newPad(self, element, pad):
session, ssrc, pt = self._getPadNameInfo(pad)
print "New PAD being created", session, pt, ssrc
id = self.mixerIDCounter
self.mixerIDCounter += 1
unique = str(session) + "-" + str(ssrc) + "-" + str(pt) + "-" + str(id)
rtpdepay = gst.element_factory_make(constants.depays[pt], "rtp-" + unique)
self.pipeline.add(rtpdepay)
pad.link(rtpdepay.get_pad("sink"))
mixer = MixerBin("rtp-mixer-" + unique)
self.pipeline.add(mixer)
rtpdepay.link(mixer)
adder_pad = self.adder.get_request_pad("sink%d")
mixer.get_pad("src").link(adder_pad)
mixercontrol = Mixer(mixer, self, id)
netManager = NetworkInputManager([rtpdepay], mixer, mixercontrol, session, ssrc, pt)
self.networkInputStreams[id] = netManager
mixer.sync_state_with_parent()
rtpdepay.sync_state_with_parent()
self.emit('new-rtp-stream', session, ssrc, pt, id)
def _newStream(self, a1, a2, ud ):
print "New Stream appeared:", a1, a2, ud
def _handleSdes(self, a1, a2, ud ):
print "New SDES:", a1.get_property('sdes-cname'), a2, ud
def _streamTimeout(self, rtpbin, session, ssrc):
manager = self._getNetworkManagerByRTPInfo(session, ssrc)
if (manager != None):
manager.setInactive()
self.emit('rtp-stream-timeout', session, ssrc, manager.getPt(), manager.getID())
# DEPRECATED
# def addMessageListener(self, messageListener):
# self.messageListener = messageListener
#
def _getNetworkManagerByRTPInfo(self, session, ssrc):
for x in self.networkInputStreams.values():
if x.getSession() == session and x.getSsrc() == ssrc:
return x
return None
def getInputCount(self):
count = 0
for x in self.fileInputStreams.values():
if (x.isActive()):
count += 1
for x in self.networkInputStreams.values():
if (x.isActive()):
count += 1
return count - self.toRemove
def _padEventProbeCB(self, pad, event, id):
if event.type == gst.EVENT_EOS:
self.getMixer(id).setInactive()
self.emit('file-stream-ended', id)
return True
def addInput(self, inputElement, inputPad=None):
if (inputPad == None):
inputPad = inputElement.get_pad("src")
id = self.mixerIDCounter
inputElement.set_property("name", "input-" + str(id))
inputBin = MixerBin("input-bin-" + str(self.mixerIDCounter))
self.pipeline.add(inputElement)
self.pipeline.add(inputBin)
inputElement.link(inputBin)
inputBin.link(self.adder)
pad = inputBin.get_pad("src")
probeId = pad.add_event_probe(self._padEventProbeCB, id)
mixer = Mixer(inputBin, self, id)
fileInputManager = FileInputManager([inputElement], inputBin, mixer)
if (self.fileInputStreams.has_key(id)):
print "Unallowed input" #TODO - raise exception
pass
self.fileInputStreams[id] = fileInputManager
self.mixerIDCounter += 1
inputBin.sync_state_with_parent()
inputElement.sync_state_with_parent()
return mixer
def addFileInput(self, filepath):
"""
Adds a new file input to the pipeline.
@param filepath:path to the file to be read
@return: True if succeeded, False otherwise
"""
input = gst.element_factory_make("filesrc", "filesrc")
input.set_property("location", filepath)
return self.addInput(input, None)
def play(self):
if (self.getInputCount() == 0):
pass #TODO
self.pipeline.set_state(gst.STATE_PLAYING)
def getPipelineStatus(self):
return self.pipeline.get_state()
def stop(self):
self.pipeline.set_state(gst.STATE_READY)
def null(self):
self.pipeline.set_state(gst.STATE_NULL)
def pause(self):
self.pipeline.set_state(gst.STATE_PAUSED) # TODO - check if it is blocked what should be done
def playAndBlock(self):
self.play()
self.gtkloop = True
#gtk.main()
def remove(self, index):
self.toRemove -= 1
inputManager = None
error = False
try:
inputManager = self.fileInputStreams[index]
except:
try:
inputManager = self.networkInputStreams[index]
except:
error = True
if (error):
raise IllegalArgument, "input with ID " + str(index) + " not found"
if (self.getInputCount() == 1):
#this is the last guy, so we set everything to null
self.null()
self.emit('eos', 0)
inputManager.setInactive()
inputbin = inputManager.getMixerBin()
pad = (inputbin.get_pad("src")).get_peer()
if (pad == None):
return
self.adder.release_request_pad(pad)
inputbin.set_state(gst.STATE_NULL)
for x in inputManager.getInputElements():
x.set_state(gst.STATE_NULL)
self.pipeline.remove(inputbin)
for x in inputManager.getInputElements():
self.pipeline.remove(x)
del self.fileInputStreams[index]
def getMixer(self, id):
if (id in self.fileInputStreams):
return self.fileInputStreams[id].getMixer()
if (id in self.networkInputStreams):
return self.networkInputStreams[id].getMixer()
return None
def enqueueRemoval(self, id):
self.toRemove += 1
gobject.idle_add(self.remove, id)
gobject.type_register( TheMixerPipelineController )
class MixerBin(gst.Bin):
def __init__(self, name, id, enableLevel=True):
"""
Creates a new input bin.
@param input:gstreamer element that must have one source pad
"""
super(MixerBin,self).__init__(name)
self.id = id
self._enableLevel = enableLevel
self._initBaseElements()
self._initGhostPads()
def _initBaseElements(self):
self.volume = gst.element_factory_make("volume", "volume-" + str(self.id))
self.panorama = gst.element_factory_make("audiopanorama", "panorama-"+ str(self.id))
self.add(self.volume)
self.add(self.panorama)
self.panorama.link(self.volume)
if (self._enableLevel):
self.level = gst.element_factory_make("level", "level-"+ str(self.id))
self.add(self.level)
self.level.link(self.panorama)
def setPan(self, pan):
self.panorama.set_property("panorama", pan);
def getPan(self):
return self.panorama.get_property("panorama")
def setVolume(self, vol):
self.volume.set_property("volume", vol)
def getVolume(self):
return self.volume.get_property("volume")
def mute(self):
self.volume.set_property("mute", True)
def unmute(self):
self.volume.set_property("mute", False)
def isMuted(self):
return self.volume.get_property("mute")
def _initGhostPads(self):
volumepad = self.volume.get_pad("src")
self.ghostpad = gst.GhostPad("src", volumepad)
if (self._enableLevel):
sink = self.level.get_pad("sink")
else:
sink = self.panorama.get_pad("sink")
self.sinkpad = gst.GhostPad("sink", sink)
self.add_pad(self.ghostpad)
self.add_pad(self.sinkpad)
gobject.type_register( MixerBin )
class Mixer(gobject.GObject):
def __init__(self, mixerbin, controller, id, active=True):
self.mixerbin = mixerbin
self.controller = controller
self.id = id
self.active = active
def getPanRange(self):
return -1, 1
def getVolumeRange(self):
return 0, 1
def getID(self):
return self.id
def mute(self):
self.mixerbin.mute()
def unmute(self):
self.mixerbin.unmute()
def isMuted(self):
return self.mixerbin.isMuted()
def setPan(self, pan):
self.mixerbin.setPan(pan)
def getPan(self):
return self.mixerbin.getPan()
def setVolume(self, vol):
self.mixerbin.setVolume(vol)
def getVolume(self):
return self.mixerbin.getVolume()
def remove(self):
if (self.controller != None):
self.controller.enqueueRemoval(self.id)
def setInactive(self):
self.active = False
def setActive(self):
self.active = True
def isActive(self):
return self.active
gobject.type_register( Mixer )
class InputManager:
def __init__(self, inputElements, mixerBin, mixer):
self.inputElements = inputElements
self.mixerBin = mixerBin
self.mixerController = mixer
def getMixer(self):
return self.mixerController
def getMixerBin(self):
return self.mixerBin
def getInputElements(self):
return self.inputElements
def getID(self):
return self.mixerController.getID()
def isActive(self):
return self.mixerController.isActive()
def setInactive(self):
self.mixerController.setInactive()
class FileInputManager(InputManager):
def __init__(self, inputElement, mixerBin, mixer):
InputManager.__init__(self, inputElement, mixerBin, mixer)
def getFilePath(self):
return self.inputElement[0].get_property("location")
class NetworkInputManager(InputManager):
def __init__(self, inputElement, mixerBin, mixer, session, ssrc, pt):
InputManager.__init__(self, inputElement, mixerBin, mixer)
self.session = session
self.ssrc = ssrc
self.pt = pt
def getSession(self):
return self.session
def getSsrc(self):
return self.ssrc
def getPt(self):
return self.pt
class TivoManager:
def __init__(self, pipeline):
self._tees = {}
self._tivos = {}
self._pipeline= pipeline
def addTee(self, id, tee):
self._tees[id] = tee
def removeTee(self, id):
for tivo in self._tivos.values():
tivo.removeTee(id)
del self._tees[id]
def startTivo(self, ids, destFile):
if (ids == None or len(ids) == 0):
raise IllegalArgument, "id list must be non-empty"
if (destFile == None):
raise IllegalArgument, "destination file can't be None"
filesink = gst.element_factory_make('filesink', 'tivo-sink-' + str(destFile))
filesink.set_property('location', destFile)
encoder = gst.element_factory_make('mulawenc', 'tivo-encoder-' + str(destFile))
encqueue = gst.element_factory_make('queue', 'tivo-queue-' + str(destFile))
liveadder = gst.element_factory_make('liveadder', 'tivo-adder-' + str(destFile))
self._pipeline.add(filesink)
self._pipeline.add(encoder)
self._pipeline.add(encqueue)
self._pipeline.add(liveadder)
encoder.link(filesink)
encqueue.link(encoder)
liveadder.link(encqueue)
filesink.sync_state_with_parent()
encoder.sync_state_with_parent()
encqueue.sync_state_with_parent()
liveadder.sync_state_with_parent()
queuelist = {}
padlist = {}
for x in ids:
tee = self._tees[x]
queue = gst.element_factory_make("queue", 'tivo-queue-' +
str(destFile) + '-' + str(x))
self._pipeline.add(queue)
queue.link(liveadder)
queue.sync_state_with_parent()
queuelist[x] = queue
pad = tee.get_request_pad("src%d")
padlist[x] = (tee,pad)
pad.link(queue.get_pad("sink"))
tmu = TivoUnit(self._pipeline, filesink, encoder, encqueue, liveadder, queuelist, padlist)
self._tivos[destFile] = tmu
return tmu
def _sendNewSegment(self, pad):
event = gst.event_new_new_segment(False, 1, gst.FORMAT_TIME, 0, -1,0)
pad.push_event(event)
def stop(self):
for x in self._tivos:
x.stop()
del self._tivos[x]
TIVO_STATE_RUNNING = "running"
TIVO_STATE_STOPPING = "stopping"
TIVO_STATE_STOPPED = "stopped"
class TivoUnit:
def __init__(self, pipe, sink, enc, encqueue, adder, queueMap, padMap):
self._pipeline = pipe
self._sink = sink
self._enc = enc
self._encQueue = encqueue
self._adder = adder
self._queues = queueMap
self._pads = padMap
self._state = TIVO_STATE_RUNNING
def getIDList(self):
return self._pads.keys()
def stop(self):
for x in self._pads:
self.unlinkPad(x)
handler_id = 0
self._state = TIVO_STATE_STOPPING
if (self._encQueue.get_property("current-level-buffers") == 0):
self._stopTivo()
else:
bus = self._pipeline.get_bus()
handler_id = bus.add_signal_watch()
def removeTee(self, id):
print "received remove tee " + str(id)
self.unlinkPad(id)
q = self._queues[id]
peer = q.get_pad("src").get_peer()
self._adder.release_request_pad(peer)
self._pipeline.remove(q)
def _messageHandler(self, message):
t = message.type
if t == gst.MESSAGE_EOS:
if (message.src == self._sink):
bus = self._pipeline.get_bus()
bus.remove_signal_watch()
self._stopTivo()
return False
else:
print "eos not from our sink"
return True
def _stopTivo(self):
print "stopping tivo"
for x in self._queues.keys():
q = self._queues[x]
q.set_state(gst.STATE_NULL)
self._pipeline.remove(q)
self._queues[id] = None
self._pads[id] = None
self._adder.set_state(gst.STATE_NULL)
self._enc.set_state(gst.STATE_NULL)
self._encQueue.set_state(gst.STATE_NULL)
self._sink.set_state(gst.STATE_NULL)
self._pipeline.remove(self._adder)
self._pipeline.remove(self._enc)
self._pipeline.remove(self._encQueue)
self._pipeline.remove(self._sink)
self._state = TIVO_STATE_STOPPED
def unlinkPad(self, id):
tee,pad = self._pads[id]
tee.release_request_pad(pad)
q = self._queues[id]
pad = q.get_pad('sink')
pad.send_event(gst.event_new_eos())
| Python |
import gobject
from ui import uiconstants as uicon
from messaging2 import ahconstants as ahcon
from setup.fvconstants import LIVE_AUDIO_PORT
import gstcomponents
import recordsrc
import gst
import os
class MixerInterface(gobject.GObject):
__gsignals__ = {
# create signals that will be emitted from mixer
# emitted when the message finished playing back
'end_message' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
# user_idle is when no incoming packets from network (no packets for 5 seconds)
'user_idle' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
# user_active is when packets start to come in from network (new packets for 5 secs)
'user_active' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'ack_push_to_talk' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'started_record' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'stopped_record' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'started_play' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'stopped_play' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'my_ssrc' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_UINT,)),
}
def __init__( self, live_audio_dest):
"""
@param live_audio_dest: the broadcast address that the live audio needs to be sent to
@type live_audio_dest: String
"""
gobject.GObject.__init__(self)
self._file_sender = gstcomponents.FileInputSender(LIVE_AUDIO_PORT)
self._file_sender.connect('eos', self._fileFinished)
self._rtp_receiver = gstcomponents.RtpStreamsReceiver(LIVE_AUDIO_PORT, True)
self._rtp_receiver.connect('new-rtp-stream', self._newRTPStream)
self._rtp_receiver.connect('rtp-stream-timeout', self._timeoutRTPStream)
self._mixers = {} # map guid to pipelines
self._file_players = {}
self._file_control = {}
self._file_finished = {} # map id to file finished True/False
self._inputs = {} # map guid to their type and properties # to notify
self._mixer_params = {} # maps guid to (volume, pan)
self._my_ssrc = 0
self._mic_controller = recordsrc.MicController(live_audio_dest,
LIVE_AUDIO_PORT)
self._mic_controller.connect( 'my_ssrc', self._startTalk )
self._path = ahcon.MESSAGING_PATH+'apps/audio/'
if not os.path.exists(self._path):
print "MixerInterface: Creating path for audio"
os.makedirs(self._path)
self.temp_rec_file_name = self._path + "temp_audio.wav"
def _startTalk(self, obj, ssrc):
print "My SSRC:",ssrc
self._my_ssrc = ssrc
# silence my volume
self._mixer_params[self._my_ssrc] = (0,0)
self.emit('my_ssrc', ssrc)
def _newRTPStream(self, obj, session, ssrc, pt, id):
"""
Callback when new RTP stream appears in the mixer
@param obj: L{RtpStreamsReceiver}
"""
print "New RTP stream:",session,ssrc,pt,id
self._mixers[ssrc] = self._rtp_receiver.getMixer( id )
volume, pan = self._mixer_params.get(ssrc, (1,0) )
self._mixers[ssrc].setVolume(volume)
self._mixers[ssrc].setPan(pan)
def _timeoutRTPStream( self, obj, session, ssrc, pt, id):
print "Timeout RTP stream:",session,ssrc,pt,id
if ssrc in self._mixers:
self._mixers[ssrc].remove()
self._mixers.pop(ssrc)
self._mixer_params.pop(ssrc, None)
def _fileFinished(self, obj, id):
self._file_finished[id] = True
print "Finished playing",id
def onAddInput(self, obj, properties ):
"""
Add an input to the audio mixer.
The input can be an audio message or network stream.
@param guid:
@type guid: Number
@param properties: type; IP address, port for network and filename for message
@type properties: Dictionary
"""
if properties.get('status', uicon.OFFLINE) == uicon.OFFLINE:
return
guid = properties['id']
print "Adding INPUT", guid
self._inputs[guid] = properties
def onChange(self, obj, ui_data):
"""
When state changes from either background or muted or inactive
@param ui_data: id, volume, pan
@type ui_data: Dictionary
"""
guid = ui_data['id']
volume = ui_data['volume']
pan = ui_data['pan']
if pan > 1:
pan = 1
elif pan < -1:
pan = -1
# if object type is modifiable
if ui_data['type'] not in [uicon.FV, uicon.SERVICE, uicon.PHONE, uicon.AUDIOMSG]:
print "MIXER: Ignored",type
return
# if it was moved out of the playback area, stop it
if volume < 0:
if guid in self._file_players:
player = self._file_players[guid]
ssrc = player.getSsrc()
self._file_finished.pop(player.getID())
player.stop()
player.remove()
self._file_players.pop(guid)
self._mixers[ssrc].remove()
self._mixers.pop(ssrc)
self._mixer_params.pop(ssrc)
return
# if the node is offline, then ignore
properties = self._inputs.get(guid, None)
if properties == None or properties.get('status', uicon.OFFLINE) == uicon.OFFLINE:
return
# handle files
if ui_data['type'] is uicon.AUDIOMSG:
if guid in self._file_players:
self._file_players[guid].setVolume( volume )
self._file_players[guid].setPan( pan )
# check if file finished playing back and replay
if self._file_finished[self._file_players[guid].getID()]:
self._file_control[guid].play()
self._file_finished[self._file_players[guid].getID()] = False
print "Replaying"
return
else:
# create new pipeline for this guid
print "New or Inactive Input",guid
print "MIXER: Playing",properties['url']
self._startMessage( guid, properties['url'], volume, pan )
# handle network
ssrc = properties.get('ssrc', 0)
if ssrc in self._mixers:
# Existing
print "MIXER: Changing VOLUME"
self._mixers[ssrc].setVolume( volume )
self._mixers[ssrc].setPan( pan )
else:
print "No input streams available for ssrc:",ssrc
return
self._mixer_params[ssrc] = (volume, pan)
def _startMessage(self, guid, filename, volume, pan):
"""
Starts the file stream
@param guid: the source globally unique identifier
@type guid: Number
@param filename: name of the file to play
@type filename: String
"""
self._file_players[guid] = self._file_sender.addFileInput( filename )
playerid = self._file_players[guid].getID()
self._file_finished[playerid] = False
ssrc = self._file_players[guid].getSsrc()
self._inputs[guid]['ssrc'] = ssrc
return ssrc
def _startMessage(self, guid, filename, volume, pan):
self._file_control[guid] = self._file_sender.addFileInput( filename )
self._file_players[guid] = self._file_control[guid].getMixer()
self._file_finished[self._file_players[guid].getID()] = False
self._file_players[guid].setVolume( volume )
self._file_players[guid].setPan( pan )
self._file_control[guid].play()
def onOffline( self, obj, params ):
"""
Deactivate the pipeline when an object is moved out of the audio play layer
or goes offline
@param params: various parameters of user/service that has gone offline
@type params: Dictionary
"""
guid = params['id']
ssrc = self._inputs[guid]['ssrc']
if ssrc in self._mixers:
# if it's an active pipeline stop and return True
self._mixers[ssrc].remove()
def onMIC(self, obj, action):
"""
Called when push to talk is deactivated or when push to talk was inactive
and message recording stops
Stops recording from MIC
@param action: 1 when push to talk on, 2 when attention pressed, 0 when push to talk off
@type action: Number
"""
if action in [1,2]:
print "MIC starting"
self._mic_controller.micUnmute()
else:
print "MIC stopping"
self._mic_controller.micMute()
self.emit( 'ack_push_to_talk', action)
def onStartRecord(self, obj, recdata):
"""
Called when the UI sends a request to start recording.
@param recdata: dictionary with fields
1. 'id': temporary id
"""
print "OnStartRecord"
id = recdata['id']
print "id:",id
self._mic_controller.startRecord( self.temp_rec_file_name )
print "Exit StartRecord"
self.emit('started_record', id)
def onStopRecord(self, obj, recdata):
"""
Called when the UI sends a request to stop recording.
@param recdata: dictionary with fields
1. 'id': temporary id
"""
print "OnStopRecord"
id = recdata['id']
self._mic_controller.stopRecord()
detail = {'subcommand':uicon.ADD, 'id':id, 'type':uicon.AUDIOMSG, 'label':'Temp Record Audio',
'url':self.temp_rec_file_name, 'status':-1,
'priority':uicon.PRIORITY_LOW }
self.onAddInput( None, detail )
self.emit('stopped_record', id)
def onStartPlay(self, obj, playdata):
"""
Called when the UI sends a request to start playing a message
"""
print "OnStartPlay"
print "id:", playdata['id']
id = playdata['id']
playinfo = {'id':id, 'volume':1, 'pan':0, 'type':uicon.AUDIOMSG }
self.onChange( None, playinfo )
self.emit('started_play', id)
def onStopPlay(self, obj, playdata):
"""
Called when the UI sends a request to stop playing a message
"""
print 'OnStopPlay'
print "id", playdata['id']
id = playdata['id']
playinfo = {'id':id, 'volume':0, 'pan':0, 'type':uicon.AUDIOMSG }
self.onChange( None, playinfo )
self.emit('stopped_play', id)
gobject.type_register( MixerInterface )
class NewMsgController:
def __init__(self, guid, filename):
print "Start pipeline"
self.id = guid
self.filename = filename
def pause(self):
print "Pausing msg pipeline"
def change( self, volume, pan ):
print volume,"and",pan,"volume change"
def stop(self):
print "Stop pipeline"
def play(self):
print "Start play"
class NewLiveController:
def __init__(self, guid, address, port):
print "Start pipeline"
self.id = guid
self.address = address
self.port = port
def pause(self):
print "Pausing live pipeline"
def setVolume(self, volume):
print "changed volume", volume
def setPan( self, pan ):
print "changed pan", pan
def stop(self):
print "Stop pipeline"
def play(self):
print "Start play"
def remove(self):
print "Removing live"
| Python |
# For audio recording and messaging
import time
import random
import os, datetime
from util.persist import *
SENT_FILE = "_sent_am.data"
RECV_FILE = "_recv_am.data"
import gobject
class AudioManager(gobject.GObject):
"""
Manages audio messages
"""
__gsignals__ = {
'sent_audio' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_INT,)),
'received_audio' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
def __init__(self, am, mixer, db):
# stores audio messages indexed by guid
gobject.GObject.__init__( self )
self.am = am
self.mixer = mixer
#self.__createMessagingPipelines()
self.path = ahcon.MESSAGING_PATH+'apps/audio/'
if not os.path.exists(self.path):
print "Creating path for audio"
os.makedirs(self.path)
self.temp_rec_file_name = self.path+"temp_audio.wav"
self._type = uicon.AUDIOMSG
self.__db = db
def sendAudio(self, senddata):
"""
Called when the UI sends a request to send a message
@param senddata: dictionary with fields
1. 'id': temporary id
2. 'tags': tags of information
3. 'priority': priority of data
4. 'recipients': the recipients of data
5. 'strength': the strength of spread
6. 'reply': the guid of the message this message is replying to
@return the official guid to the UI of the sent message, -1 if it failed sending
"""
tags = senddata['tags']
id = senddata['id']
recipients = senddata['recipients']
# metadata to send with audio
meta = {}
meta[ahcon.PRIORITY_KEY] = senddata['priority']
meta[ahcon.EXPIRES_KEY] = time.time()+3000 # datetime.datetime.now() + datetime.timedelta(days=1)
meta[ahcon.STRENGTH_KEY] = senddata['strength']
try:
reply = senddata['reply']
meta[ahcon.REPLY_KEY] = reply
except KeyError:
print "Not a reply audio message"
# Right now, let's rename to the current time
newid = random.randint(1,100000)
newpath = self.path + "a_" + str(newid) + ".audio"
if not os.path.exists(self.temp_rec_file_name):
print "Requested audio file to send didn't exist"
self.emit('sent_audio', -1, uicon.AUDIOMSG)
return
length = common.GetAudioDuration(self.temp_rec_file_name)
meta[ahcon.LENGTH_KEY] = length
#newid = time.time()
#os.chdir(self.path)
#print os.getcwd()
#oldpath = str(id) + ".audio"
#newpath = str(newid) + ".audio"
print "Renaming from",self.temp_rec_file_name,"to",newpath
os.rename(self.temp_rec_file_name, newpath)
guid, src = self.am.sendMessage(uicon.AUDIOMSG, recipients, newpath, tags, meta)
msg = {'guid':guid, 'type':self._type, 'src':src, 'destinations':recipients, 'tags':tags, 'url':newpath,'meta':meta, 'created':time.time()}
self.__db.putSentMsgMeta(msg)
print "Sent audio message: guid:",guid,"sub:",tags,"recip:",recipients
#persist(self.sent_msgs, self.path+SENT_FILE)
self.emit('sent_audio', guid, uicon.AUDIOMSG)
def onMessageReceived(self, guid, src, recipients, tags, url, meta):
"""
Callback from AsyncManager when audio message is received over the network.
@param guid: the guid of message received
@type guid: number
@param src: The guid of the message sender
@type src: number
@param recipients: The target recipients of this message
@type recipients: List[number]
@param tags: String of comma seperated tags
@type tags: String
@param url: The url to the received message
@type url: String
@param meta: Metadata attached to this message
@type meta: dictionary
"""
print "Received message: ",guid,src,recipients,meta
print "url: ",url
meta[ahcon.RECV_TIME_KEY] = time.time()
msg = {'guid':guid, 'type':self._type, 'src':src, 'destinations':recipients, 'tags':tags, 'url':url,'meta':meta, 'created':meta[ahcon.CREATED_KEY]}
self.__db.putRcvdMsgMeta(msg)
name = "Voice message from " + str(src) + " tag: " + str(tags)
# url is needed for mixer to play back audio
self.emit('received_audio', { 'subcommand': uicon.ADD,
'id':guid,
'type':self._type,
'url':url,
'label':tags,
'status': uicon.EXPIRES_SOON,
'priority': meta.get(ahcon.PRIORITY_KEY, uicon.PRIORITY_LOW),
'date': meta[ahcon.CREATED_KEY]
})
def getAudioDetails(self, guid):
"""
Called from L{ObjectBroker} when user requests OPEN_OBJECT by double clicking on audio message
"""
#detail = self.recv_msgs[guid]
#detail['type'] = self._type
detail = self.__db.getMsgMeta(guid)
return detail
def getAllAudio(self):
"""
Used during initialization to return all items user has
@return details for every received audio messages
"""
audio_list = []
for msg in self.__db.getAllOfType(self._type):
detail = {'subcommand':uicon.ADD, 'id': msg['id'], 'type': self._type, 'label': msg['tags'],
'url': msg['url'], 'status': -1,
'priority': msg['meta'][ahcon.PRIORITY_KEY],
'date' : msg['meta'][ahcon.RECV_TIME_KEY] }
audio_list.append( detail )
self.mixer.onAddInput( self, detail )
return audio_list
gobject.type_register( AudioManager ) | Python |
import thread
import socket
import struct
import asyncore
import gobject
import time
from util import device
MAX_BUF_SIZE = 4096
import logging
logger = logging.getLogger('network')
def get_broadcast_addr(iface):
if device.current() == device.WINDOWS:
# TODO: Implement this properly for windows.
return '192.168.2.255'
else:
import fcntl
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8919, # SIOCGIFBRDADDR
struct.pack('256s', iface[:15])
)[20:24])
def get_ip_addr(iface):
if device.current() == device.WINDOWS:
# TODO: Implement this properly for windows.
return '192.168.2.2'
else:
import fcntl
s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
return socket.inet_ntoa(fcntl.ioctl(
s.fileno(),
0x8915, # SIOCGIFADDR
struct.pack('256s', iface[:15])
)[20:24])
def start():
thread.start_new_thread(asyncore.loop,(0.001, True))
class NetTune:
def __init__(self):
self._last_sent = time.time()
self._interval = 0.01
def set_interval(self, interval):
self._interval = interval
class Broadcast(asyncore.dispatcher, NetTune):
def __init__(self, iface, recv_port, queue, rec_func):
asyncore.dispatcher.__init__(self)
NetTune.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_DGRAM)
self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.setsockopt(socket.SOL_SOCKET, socket.SO_BROADCAST, 1)
self.setblocking(1)
self.__my_ip = get_ip_addr(iface)
self.__broadcast_ip = get_broadcast_addr(iface)
self.__recv_port = recv_port
self.__queue = queue
self.__rec_func = rec_func
self.bind(('', self.__recv_port))
logger.debug('Broadcast: listening on %s:%s. Broadcasting to %s',self.__my_ip, self.__recv_port, self.__broadcast_ip)
def handle_connect(self):
pass
def handle_read(self):
data, addr = self.recvfrom(MAX_BUF_SIZE)
if addr == (self.__my_ip, self.__recv_port):
return
else:
self.__rec_func(data, addr)
def handle_write (self):
data, addr = self.__queue.next()
if data is None:
logger.info('Broadcast: data was none')
return
ip, port = addr
if not port:
port = self.__recv_port
addr = (self.__broadcast_ip, port)
sent = 0
while sent < len(data):
sent = self.sendto(data, addr)
logger.debug('Broadcast: Sent %i bytes to %s', sent, addr)
self._last_sent = time.time()
def writable (self):
return len(self.__queue) > 0 and time.time() - self._last_sent > self._interval
class Unicast(asyncore.dispatcher, gobject.GObject, NetTune):
def __init__(self, iface, recv_port, queue, rec_func):
asyncore.dispatcher.__init__(self)
NetTune.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_DGRAM)
self.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.setblocking(1)
self.__my_ip = get_ip_addr(iface)
self.__recv_port = recv_port
self.__queue = queue
self.__rec_func = rec_func
self.bind(('', self.__recv_port))
logger.debug('Unicast: listening on %s:%s',self.__my_ip, self.__recv_port)
def handle_connect(self):
pass
def handle_read(self):
data, addr = self.recvfrom(MAX_BUF_SIZE)
logger.debug('Unicast: read %s bytes from %s', len(data), addr)
self.__rec_func(data, addr)
def handle_write (self):
data, addr = self.__queue.next()
if data is None:
logger.warn('Unicast: data was none')
return
ip, port = addr
if not port:
port = self.__recv_port
addr = (ip, port)
logger.debug('Unicast: writing %s bytes to %s', len(data),addr)
sent = 0
while sent < len(data):
sent = self.sendto(data, addr)
logger.debug('Unicast: wrote %s bytes to %s',len(data), addr)
self._last_sent = time.time()
def writable (self):
return len(self.__queue) > 0 and time.time() - self._last_sent > self._interval | Python |
import threading
from util.decorators import *
import gobject
# TODO: Note the Queue interface.
try:
from async import chunk
except:
print 'Couldnt load chunk'
pass
import logging
logger = logging.getLogger('network.queues')
class Queue:
lock = threading.Lock()
def __init__(self):
self.__queue = []
@synchronized(lock)
def put(self, data, addr=(None, None)):
self.__queue.append((data, addr))
def next(self):
if len(self.__queue) == 0:
return (None, (None, None))
else:
return self.__queue.pop(0)
def __len__(self):
return len(self.__queue)
class MessageQueue(gobject.GObject):
__gsignals__ = {
'busy' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_BOOLEAN,)),
}
lock = threading.Lock()
def __init__(self):
gobject.GObject.__init__(self)
self.__queue = []
self.__busy = False
@synchronized(lock)
def put(self, msg, addr, seqs = []):
p = MessageQueue.PendingSend(msg, addr, seqs)
if p not in self.__queue:
logger.debug('Putting message %s in queue', msg.header.mid)
self.__queue.append(p)
@synchronized(lock)
def next(self):
if len(self.__queue) == 0:
next = (None, (None, None))
else:
if not self.__busy:
self.__busy = True
self.emit('busy', True)
c = self.__queue[0].next()
next = (c.raw(), self.__queue[0].addr)
if isinstance(c, chunk.EndChunk):
self.__queue.pop(0)
self.__busy = False
self.emit('busy', False)
return next
def __len__(self):
return len(self.__queue)
class PendingSend:
def __init__(self, msg, addr, seqs):
self.msg = msg
self.addr = addr
self.seqs = seqs
self.__i = -1
logger.debug('Putting a PendingSend: msg_id %s, target addr %s, seqs, %s', msg.header.mid, addr, seqs)
def next(self):
if self.seqs:
self.__i += 1
if self.__i > len(self.seqs):
raise StopIteration
elif self.__i == len(self.seqs):
return chunk.EndChunk(self.msg.header.mid, -1)
else:
return self.msg.get_chunk(self.seqs[self.__i])
else:
try:
return self.msg.next()
except StopIteration:
return chunk.EndChunk(self.msg.header.mid, -1)
def __eq__(self, other):
return (self.msg.header.mid == other.msg.header.mid
and self.addr == other.addr
and self.seqs == other.seqs)
def __hash__(self):
return hash(self.msg.header.mid)
gobject.type_register(MessageQueue)
| Python |
import bluetooth
import threading
import time
from util.decorators import *
# TODO: Work out what this has to be
MAX_BUF_SIZE = 1024
import logging
logger = logging.getLogger('async.bt')
class _L2CAPReceiveChannel(threading.Thread):
def __init__(self, conn, addr, receive_cb, close_cb):
threading.Thread.__init__(self)
self.__conn = conn
self.__addr = addr
self.__receive_cb = receive_cb
self.__close_cb = close_cb
self.__closed = False
self.start()
def run(self):
while not self.__closed:
data = self.__conn.recv(MAX_BUF_SIZE)
print 'L2CAP Received',len(data),'bytes'
if data in ['x00', '']:
break
self.__receive_cb(data, self.__addr)
self.__close_cb(self.__addr)
def close(self):
print 'L2CAP: Closing receiver'
self.__closed = True
class _L2CAPSendChannel(threading.Thread):
def __init__(self, addr):
threading.Thread.__init__(self)
self.__sock = bluetooth.BluetoothSocket(bluetooth.L2CAP)
print 'L2CAP: Connecting to',addr
self.__sock.connect(addr)
print 'L2CAP: Connected.'
self.__closed = False
self.__queue = []
self.start()
def run(self):
while not self.__closed:
print 'L2CAP: In loop.'
if len(self.__queue) > 0:
data = self.__queue.pop(0)
data = data + '\n'
print 'L2CAP: Sending',len(data),'bytes'
logger.debug('Sending %s bytes: %s', len(data), data)
self.__sock.send(data)
if data == '\x00':
self.close()
else:
pass
time.sleep(0.005)
print 'L2CAP: Send channel closed.'
self.__sock.close()
def send(self, data):
print 'L2CAP: Appending data to queue'
self.__queue.append(data)
def close(self):
print 'L2CAP: Set __closed = True'
self.__closed = True
class _L2CAPListener(threading.Thread):
def __init__(self, port, received):
threading.Thread.__init__(self)
self.__listener = bluetooth.BluetoothSocket(bluetooth.L2CAP)
self.__listener.bind(('', port))
self.__listener.setblocking(True)
self.__listener.listen(1)
self.__closed = False
self.__received = received
self.__receivers = {}
self.__lock = threading.Lock()
self.start()
def run(self):
while not self.__closed:
print '~*~*~ Listener: Trying accept'
conn, addr = self.__listener.accept()
self.__lock.acquire()
(mac, port) = addr
if mac in self.__receivers:
assert False
else:
print 'L2CAP: Accepted connection from',conn,time.time()
self.__receivers[mac] = _L2CAPReceiveChannel(conn, addr, self.__received, self.__on_finished)
self.__lock.release()
def close(self):
self.__closed = True
def __on_finished(self, addr):
self.__lock.acquire()
print 'L2CAP: Connection closed',addr
(mac, port) = addr
print 'L2CAP: receivers :',self.__receivers
self.__receivers[mac].close()
del self.__receivers[mac]
print 'L2CAP: Active receivers:',self.__receivers
self.__lock.release()
class Unicast(threading.Thread):
def __init__(self, port, queue, recv_func):
threading.Thread.__init__(self)
self.__port = port
self.__queue = queue
self.__listener = _L2CAPListener(self.__port, recv_func)
self.__closed = False
self.__senders = {}
self.start()
def run(self):
while not self.__closed:
if len(self.__queue) > 0:
(data, (mac, port)) = self.__queue.next()
print 'L2CAP: Got item from queue',len(data),mac,port,time.time()
if data == None:
print 'L2CAP: Closing send channel'
self.__senders[mac].send('\x00')
self.__senders[mac].close()
del self.__senders[mac]
else:
if mac in self.__senders:
print 'L2CAP: Sending data to existing channel'
self.__senders[mac].send(data)
else:
print 'L2CAP: Creating channel and sending data'
self.__senders[mac] = _L2CAPSendChannel((mac, port))
self.__senders[mac].send(data)
time.sleep(0.005)
print 'L2CAP: Done'
def close(self):
self.__closed = True
class Broadcast(threading.Thread):
def __init__(self, port, queue, recv_func):
pass | Python |
from db import db
from fvutil import fvcon
from util import config
from presence import ps
# NOTE: Same API
class PresenceWrapper:
def __init__(self, presence):
self.__presence = presence
#OFFLINE = config.get_int('presence.status', 'offline')
#ONLINE = config.get_int('presence.status', 'online')
self.__fv_to_presence_status= {
fvcon.OFFLINE: config.get_int('presence.status', 'offline'),
fvcon.ONLINE : config.get_int('presence.status', 'online'),
}
def get_all(self):
print '!@#!@#!@# GETTING PRESENCE!'
users = self.__presence.get_all_known_users()
return map(self.to_flash, users)
def get(self, uid):
user = self.__presence.get_known_user(uid)
return self.to_flash(user)
def __to_fv_status(self, header_dict):
if header_dict['status'] == fvcon.ONLINE:
del header_dict['status']
header_dict['status'] = header_dict['meta'][fvcon.STATUS]
def to_flash(self, header_dict):
self.__to_fv_status(header_dict)
return {'id':header_dict['uid'],
'label':header_dict['name'],
'address':'unknown..', # TODO: Do we need address?
# TODO: Get this right
'subcommand':fvcon.ADD,
'type':fvcon.FV,
'status':header_dict['status'],
}
| Python |
import gobject
from fvutil.managers import objectmanager
from fvutil import fvcon
class Timeline(objectmanager.ObjectManager):
def __init__(self, my_id):
objectmanager.ObjectManager.__init__(self, fvcon.ALL, my_id)
def __get_messages(self, begin, end):
c = self._table._conn
created_times = c.execute(''' SELECT created FROM objects''')
for ct in created_times:
created = ct['created']
print 'created',created
print 'created > begin?',(created>begin)
print 'created < end?',(created<end)
msgs = c.execute(''' SELECT * FROM objects WHERE created > ? AND created < ? ''', (begin, end))
return msgs
def get_messages(self, flash, begin, end):
msgs = self.__get_messages(begin, end)
result = {}
result['command'] = fvcon.ACK_GET_MESSAGES
result['messages'] = msgs
print 'Getting messages',msgs
flash.sendResult(self, result)
"""
result = {}
result['command'] = uicon.ACK_GET_MESSAGES
result['messages'] = []
# TODO: Message need to be sent in chronological order
result['messages'].append({'id':34, 'src_id':57, 'src_name':"Aidan", 'reply_id':-1, 'tags':"Business, Urgent",
'text':"Visitor from BoA", 'msg_type': uicon.PICMSG})
result['messages'].append({'id':45, 'src_id':55, 'src_name':"Bob", 'reply_id':34, 'tags':"Business, Letter",
'text':"Demo is ready", 'msg_type': uicon.TXTMSG})
result['messages'].append({'id':46, 'src_id':56, 'src_name':"Teresa", 'reply_id':34, 'tags':"Picnic, Letter",
'text':"Debuggin in the lake", 'msg_type': uicon.AUDIOMSG})
"""
def get_message_count(self, flash, begin, end):
msgs = self.__get_messages(begin, end)
result = {}
result['command'] = fvcon.ACK_GET_MESSAGE_COUNT
result['begin'] = begin
result['end'] = end
msg_by_type = {}
for msg in msgs:
type = msg['type']
if type not in msg_by_type:
msg_by_type[type] = 0
msg_by_type[type] += 1
ui_ordered = []
for (type, count) in msg_by_type.iteritems():
ui_ordered.append({'msg_type' : type, 'count':count})
result['messages'] = ui_ordered
flash.sendResult(self, result)
| Python |
import gobject
from util import config
from db import db
from fvutil import fvcon
import time
import cPickle as pickle
class ObjectManager(gobject.GObject):
def __init__(self, o_type, my_id, table=None):
gobject.GObject.__init__(self)
if table:
self._table = table
else:
_db = db.get_database(config.get('fv', 'db-path'), config.get('fv', 'object-db'))
self._table = _db.get_or_create(ObjectTable)
self._type = o_type
self._table._set_id(my_id)
#self._path = self.__config.get('fv', ')
def get(self, oid, do_map=True):
obj = self._table.get(oid)
if do_map:
return self.to_flash(obj)
else:
return obj
def get_all_of_type(self, type=None, do_map=True):
if not type:
type = self._type
all_of_type = self._table.get_all_of_type(type)
if do_map:
return map(self.to_flash, all_of_type)
else:
return all_of_type
def get_all(self, do_map=True):
# TODO: This is a little bit weird; should think about whether this can be made clearer
if self._type == fvcon.ALL:
all = self._table.get_all()
if do_map:
return map(self.to_flash, all)
else:
return all
else:
return self.get_all_of_type(do_map=do_map)
def add(self, obj_details):
self._table.add(obj_details)
def get_active(self, o_type=None,do_map=True):
if not o_type:
o_type = self._type
if do_map:
return map(self.to_flash, self._table.get_active(o_type))
else:
return self._table.get_active(o_type)
gobject.type_register(ObjectManager)
class ObjectTable(db.Table):
def __init__(self):
db.Table.__init__(self)
def _set_id(self, my_id):
self.__my_id = my_id
def _init(self):
self._conn.execute('''
create table objects
(oid INTEGER, type INTEGER, src INTEGER, dests STRING,
tags STRING, local_path STRING, created INTEGER, expires INTEGER, received INTEGER,
meta STRING, sender BOOLEAN, header STRING, timestamp INTEGER)
''')
def _drop(self):
self._conn.execute('''
drop table objects
''')
def get(self, oid):
c = self._conn
rows = c.execute(''' SELECT header FROM objects WHERE oid=? ''', (oid,))
result = None
if rows:
result = self.__db_to_py(rows[0]['header'])
return result
def get_all(self):
c = self._conn
rows = c.execute(''' SELECT header FROM objects ''')
results = []
for r in rows:
r = self.__db_to_py(r['header'])
results.append(r)
return results
def get_all_of_type(self, o_type):
c = self._conn
rows = c.execute(''' SELECT header FROM objects WHERE type=? ''', (o_type,))
results = []
for r in rows:
r = self.__db_to_py(r['header'])
results.append(r)
return results
def get_active(self,o_type):
c = self._conn
rows = c.execute(''' SELECT * FROM objects WHERE expires>? AND type=?''', (time.time(),o_type))
results = []
for r in rows:
r = self.__db_to_py(r['header'])
results.append(r)
return results
def __db_to_py(self, header):
return pickle.loads(header.encode('ascii'))
def __py_to_db(self, header):
return pickle.dumps(header)
def add(self, header):
c = self._conn
data = self.__py_to_db(header)
c.execute(''' INSERT INTO objects VALUES(?,?,?,?,?,?,?,?,?,?,?,?, ?) ''',
(header.mid,
header.msg_type,
header.src,
pickle.dumps(header.dests),
header.meta[fvcon.TAGS],
header.get_file_path(),
header.created,
header.expires,
header.received,
pickle.dumps(header.meta),
header.am_sender(self.__my_id),
pickle.dumps(header),
time.time()))
| Python |
from db import db
from fvutil.managers import objectmanager
from fvutil import fvcon
from util import config
from presence import ps
import gobject
import random
import os
def get_audio_duration(path):
return os.path.getsize(path) / 16000 * 1000 # 1 channel, 16 bts, 8000 times a second in MS
class AudioPresence(ps.ObjectPresence):
def __init__(self, oid, name):
ps.ObjectPresence.__init__(self, oid, name)
class AudioManager(objectmanager.ObjectManager):
__gsignals__ = {
'sent_audio' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_INT,)),
'received_audio' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
LENGTH = 14001
def __init__(self, async, my_id, mixer):
objectmanager.ObjectManager.__init__(self, fvcon.AUDIO, my_id)
self.__async = async
self.__path = config.get('async.folders', fvcon.AUDIO)
print 'AudioManager: Path - ',self.__path
self.__temp_rec_file_name = self.__path+"temp_audio.wav"
for detail in self.get_all():
print 'Adding input for',detail
mixer.onAddInput(self, detail)
def send(self, data):
tags = data['tags']
dests = data['recipients']
meta = {
AudioManager.LENGTH: get_audio_duration(self.__temp_rec_file_name),
fvcon.TAGS: tags
}
id = random.randint(1,100000)
newpath = self.__path + '/' + str(id) + '.audio'
os.rename(self.__temp_rec_file_name, newpath)
header = self.__async.send_message(newpath, fvcon.AUDIO, dests, meta)
self.add(header)
print 'Added audio:',header.mid
self.emit('sent_audio', header.mid, fvcon.AUDIO)
def receive(self, header):
self.add(header)
flash = self.to_flash(header)
self.emit('received_audio', flash)
def to_flash(self, header):
return {'type':fvcon.AUDIO,
'subcommand':fvcon.ADD,
'id':header.mid,
'label':header.meta[fvcon.TAGS],
'tags':header.meta[fvcon.TAGS],
'date':header.created,
'authorID':header.src,
'src':header.src,
'local_path':header.get_file_path(),
'status': fvcon.ONLINE,
} | Python |
import gobject
from db import db
from fvutil.managers import objectmanager
from fvutil import fvcon
from util import config
import random
from presence import ps
class TextPresence(ps.ObjectPresence):
def __init__(self, oid, name):
ps.ObjectPresence.__init__(self, oid, name)
class TextManager(objectmanager.ObjectManager):
__gsignals__ = {
'created_text' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_INT,)),
'received_text' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
def __init__(self, async, my_id):
objectmanager.ObjectManager.__init__(self, fvcon.TEXT, my_id)
self.__async = async
self.__path = config.get('async.folders', str(fvcon.TEXT))
def send(self, data):
"""
Sends a text message.
@param data
A dictionary containing
"""
tags = data['tags']
text = data['text']
dests = data['recipients']
id = random.randint(1,100000)
path = self.__path + '/' + str(id) + '.text'
f = open(path, mode='wa')
f.write(text)
f.close()
meta = {fvcon.TAGS: tags}
header = self.__async.send_message(path, fvcon.TEXT, dests, meta)
self.add(header)
print header.mid, fvcon.TEXT
self.emit('created_text', header.mid, fvcon.TEXT)
def receive(self, header):
self.add(header)
flash = self.to_flash(header)
self.emit('received_text', flash )
def to_flash(self, header):
path = header.get_file_path()
f = open(path, mode='ra')
text = f.read()
f.close()
return {'type':fvcon.TEXT, 'subcommand':fvcon.ADD, 'id':header.mid, 'label':header.meta[fvcon.TAGS], 'time':header.created, 'authorID':header.src, 'tags':header.meta[fvcon.TAGS], 'text':text, 'src':header.src}
gobject.type_register(TextManager) | Python |
import gobject
import random
import os
import shutil
import cPickle as pickle
from fvutil.managers import objectmanager
from fvutil import fvcon
from util import config
from presence import ps
# For RubyVote
import simplejson
import sys
import logging
logger = logging.getLogger('fluidvoice.poll')
class PollQPresence(ps.ObjectPresence):
def __init__(self, oid, name):
ps.ObjectPresence.__init__(self, oid, name)
class PollVPresence(ps.ObjectPresence):
def __init__(self, oid, name):
ps.ObjectPresence.__init__(self, oid, name)
class PollRPresence(ps.ObjectPresence):
def __init__(self, oid, name):
ps.ObjectPresence.__init__(self, oid, name)
class PollQManager(objectmanager.ObjectManager):
def __init__(self, my_id):
objectmanager.ObjectManager.__init__(self, fvcon.POLL_Q, my_id)
def get_active(self, do_map):
return objectmanager.ObjectManager.get_active(self, do_map=do_map)
def to_flash(self, header):
return {'subcommand': fvcon.ADD,
'type':fvcon.POLL_Q,
'id': header.mid,
'label' : header.meta[fvcon.TAGS],
'date' : header.created,
'status' : fvcon.ONLINE,
'priority' : None,
'subject' : header.meta[fvcon.TAGS],
'tags' : header.meta[fvcon.TAGS],
'question' : header.meta[PollManager.QUESTION],
'questionID' : header.meta[PollManager.QUESTION_ID],
'category' : header.meta[PollManager.TYPE],
'choices' : [c.__dict__ for c in header.meta[PollManager.CHOICES].values()],
'senderName' : 'to do..',
}
class PollVManager(objectmanager.ObjectManager):
def __init__(self, my_id):
objectmanager.ObjectManager.__init__(self, fvcon.POLL_V, my_id)
def get_votes_for(self, poll_id):
# TODO: Wish there was a better way of doing this.
# Might actually have to create own table for custom search criteria..
# kinda sucks.
votes = self.get_all(do_map=False)
results = []
for vote in votes:
if vote.meta[PollManager.VOTE_FOR] == poll_id:
results.append(vote)
return results
class PollRManager(objectmanager.ObjectManager):
def __init__(self, my_id):
objectmanager.ObjectManager.__init__(self, fvcon.POLL_R, my_id)
def to_flash(self, header):
f = open(header.get_file_path(), mode='rb')
results = pickle.load(f)
f.close()
return {
'id':header.mid,
'subcommand':fvcon.ADD,
'label':header.meta[fvcon.TAGS],
'type':fvcon.POLL_R,
'tags':header.meta[fvcon.TAGS],
'question':header.meta[PollManager.QUESTION],
'results': results,
'poll_type':header.meta[PollManager.TYPE],
'src':header.src,
}
class Poll:
PLURALITY = 0
APPROVAL = 1
CONDORCET = 2
class Choice:
def __init__(self, id, text='', relative_audio_path=''):
if not text and not relative_audio_path:
raise ValueError # Can't have a choice with no text and no audio_path
self.text = text
self.relative_audio_path = relative_audio_path
self.id = id
# TODO:
# - Bundling, unbundling
# - Add creator metadata
# - Resolve relative/absolute paths
# - Continue writing send/receive code
class PollManager(gobject.GObject):
CREATOR = 1870
QUESTION = 1871
CHOICES = 1872
TYPE = 1873
VOTE_FOR = 1874
RESULT_FOR = 1875
QUESTION_ID = 1876 # The audio id of the question
POLL_CREATOR = 1878
__gsignals__ = {
'created_poll' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,gobject.TYPE_INT, )),
'received_poll' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'submitted_pollvote' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_INT,)),
'received_pollresults' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
def __init__(self, async, my_id, mixer):
gobject.GObject.__init__(self)
self.__q_manager = PollQManager(my_id)
self.__v_manager = PollVManager(my_id)
self.__r_manager = PollRManager(my_id)
self.__async = async
self.__my_id = my_id
self.__mixer = mixer
self.__root = config.get('async.folders', fvcon.POLL_Q)
self.__temp_path = self.__root + '/temp/'
if os.path.exists(self.__temp_path):
# Remove the contents of the temp folder because there could
# be a lot of old recordings that we really don't care about at all.
for file in os.listdir(self.__temp_path):
os.remove(os.path.join(self.__temp_path, file))
else:
os.makedirs(self.__temp_path)
self.__active = self.__q_manager.get_active(False)
logger.debug('Active: %s', str(self.__active))
# Add all the active poll audio choices to the mixer
for active in self.__active:
poll_id = active.mid
question_id = active.meta[PollManager.QUESTION_ID]
if question_id is not None:
props = {}
props['status'] = fvcon.ONLINE
props['id'] = question_id
props['local_path'] = os.path.join(
self.__bundle_root(poll_id),
'question.audio')
logger.debug('Adding input for question %i',question_id)
self.__mixer.onAddInput(self, props)
for (id, choice) in active.meta[PollManager.CHOICES].iteritems():
props = {}
props['status'] = fvcon.ONLINE
props['id'] = id
props['local_path'] = os.path.join(
self.__bundle_root(poll_id),
choice.relative_audio_path)
logger.debug('Adding input for poll choice %i',id)
self.__mixer.onAddInput(self, props)
self.__rubyvote_dir = os.path.join(os.path.dirname(__file__), 'rubyvote')
assert os.path.exists(self.__rubyvote_dir)
self.__rubyvote_path = 'rubyvote.rb'
assert os.path.exists(os.path.join(self.__rubyvote_dir, self.__rubyvote_path))
self.__poll_expiration_check = gobject.timeout_add(10000, self.__check_expired)
def get_poll(self, pid):
return self.__q_manager.get(pid)
def get_result(self, rid):
return self.__r_manager.get(rid)
def get_all(self):
return self.__q_manager.get_active(True) + self.__r_manager.get_all()
def __temp_audio_choice_path(self, choice_id):
"""
Way of determining audio path of temporary audio file
"""
return self.__temp_path + '/' + str(choice_id) + '.audio'
def __bundle_root(self, poll_id):
"""
Root folder of poll bundle
"""
return self.__root + '/' + str(poll_id) + '/'
def __bundled_audio_path(self, poll_id, choice_counter):
"""
Where a particular choice audio file is stored
"""
return self.__bundle_root(poll_id) + '/choice' + str(choice_counter) + '.audio'
def __pack_audio(self, question_path, choices, poll_id):
audio = {}
if os.path.exists(question_path):
f = open(question_path, mode='rb')
question_data = f.read()
f.close()
audio['question'] = question_data
bundle_root = self.__bundle_root(poll_id)
for (id, choice) in choices.iteritems():
if choice.relative_audio_path:
f = open(os.path.join(bundle_root,
choice.relative_audio_path), mode='rb')
audio_data = f.read()
f.close()
else:
audio_data = ''
audio[id] = audio_data
poll_path = os.path.join(bundle_root, 'poll' + str(poll_id) + '.poll')
f = open(poll_path, mode='wb')
pickled = pickle.dump(audio, f, pickle.HIGHEST_PROTOCOL)
f.close()
return poll_path
def __unpack_audio(self, audio, choices, poll_id, questionID):
bundle_root = self.__bundle_root(poll_id)
if 'question' in audio:
question_path = os.path.join(bundle_root, str(questionID) + '.audio')
f = open(question_path, mode='wb')
f.write(audio['question'])
f.close()
del audio['question']
audio_ids = []
for (id, audio) in audio.iteritems():
relative_audio_path = choices[id].relative_audio_path
f = open(os.path.join(bundle_root, relative_audio_path), mode='wb')
f.write(audio)
f.close()
audio_ids.append(id)
return audio_ids
def send_question(self, data):
logger.debug('send_question: %s',str(data))
poll_id = random.randint(0,10000000)
# Make the directory structure for this poll
os.makedirs(self.__bundle_root(poll_id))
# Get the question into question.audio
question_id = data['questionID']
temp_question_path = self.__temp_audio_choice_path(question_id)
question_path = os.path.join(self.__bundle_root(poll_id), 'question.audio')
if os.path.exists(temp_question_path):
shutil.move(temp_question_path, question_path)
else:
# No question
question_id = None
# Construct the choices
flash_choices = data['choices']
choices = {}
choice_counter = -1
for choice in flash_choices:
# Move the audio around so it's in the right place
temp_audio_path = self.__temp_audio_choice_path(choice['id'])
if os.path.exists(temp_audio_path):
choice_counter += 1
audio_path = self.__bundled_audio_path(poll_id, choice_counter)
shutil.move(temp_audio_path, audio_path)
else:
audio_path = ''
choice = Choice(choice['id'], choice['text'], audio_path)
choices[choice.id] = choice
poll_path = self.__pack_audio(question_path, choices, poll_id)
dests = data['recipients']
meta = {
PollManager.CREATOR: self.__my_id,
PollManager.QUESTION: data['question'],
PollManager.CHOICES: choices,
PollManager.TYPE: data['poll_type'],
PollManager.QUESTION_ID: question_id,
fvcon.TAGS: data['tags'],
}
# TODO: Get rid of expiration delta here.
# Just put in so polls persist for testing
header = self.__async.send_message(poll_path, fvcon.POLL_Q, dests, meta, expiration_delta=180, explicit_mid=poll_id)
logger.debug('Sent poll question mid: %s', poll_id)
self.__q_manager.add(header)
self.__active.append(header)
self.emit('created_poll', header.mid, fvcon.POLL_Q)
def send_vote(self, data):
logger.debug('send_vote %s',str(data))
vote_for_poll_id = data['id']
votes = data['votes']
vote_path = os.path.join(self.__bundle_root(vote_for_poll_id),
'vote-' + str(vote_for_poll_id) + '-' + str(self.__my_id) + '.vote')
f = open(vote_path, mode='wb')
pickle.dump(votes, f, pickle.HIGHEST_PROTOCOL)
f.close()
poll = self.__q_manager.get(vote_for_poll_id, do_map=False)
dests = [poll.src]
meta = {
PollManager.VOTE_FOR : vote_for_poll_id,
fvcon.TAGS: '',
PollManager.POLL_CREATOR : poll.src,
}
header = self.__async.send_message(vote_path, fvcon.POLL_V, dests, meta)
self.__v_manager.add(header)
self.emit('submitted_pollvote', header.mid, fvcon.POLL_V)
def send_result(self, data):
logger.debug('send_result: %s',str(data))
poll = data['poll']
dests = poll.dests
meta = {
fvcon.TAGS : 'Results: ' + poll.meta[fvcon.TAGS],
PollManager.QUESTION: poll.meta[PollManager.QUESTION],
PollManager.QUESTION_ID: poll.meta[PollManager.QUESTION_ID],
PollManager.TYPE : poll.meta[PollManager.TYPE],
PollManager.RESULT_FOR: poll.mid,
}
results = data['results']
def to_text(result):
result['candidate'] = poll.meta[PollManager.CHOICES][int(result['candidate'])].text
return result
results = map(to_text, results)
result_path = os.path.join(self.__bundle_root(poll.mid),
'results')
f = open(result_path, mode='wb')
pickle.dump(data['results'], f, pickle.HIGHEST_PROTOCOL)
f.close()
header = self.__async.send_message(result_path, fvcon.POLL_R, dests, meta)
self.__r_manager.add(header)
self.emit('received_pollresults', self.__r_manager.to_flash(header))
logger.debug('Sent results')
# Receivers
def receive_question(self, header):
logger.debug('received question %i', header.mid)
self.__q_manager.add(header)
f = open(header.get_file_path(), mode='rb')
audio = pickle.load(f)
f.close()
choices = header.meta[PollManager.CHOICES]
poll_id = header.mid
question_id = header.meta[PollManager.QUESTION_ID]
audio_ids = self.__unpack_audio(audio, choices, poll_id, question_id)
for id in audio_ids:
props = {}
props['status'] = fvcon.ONLINE
props['id'] = id
props['local_path'] = os.path.join(
self.__bundle_root(poll_id),
choices[id].relative_audio_path)
logger.debug('Adding input for poll choice %i',id)
self.__mixer.onAddInput(self, props)
if question_id:
props = {}
props['status'] = fvcon.ONLINE
props['id'] = question_id
props['local_path'] = os.path.join(
self.__bundle_root(poll_id),
str(question_id) + '.audio')
logger.debug('Adding input for poll question %i',question_id)
self.__mixer.onAddInput(self, props)
self.emit('received_poll', self.__q_manager.to_flash(header))
def receive_vote(self, header):
print 'received vote for poll id',header.meta[PollManager.VOTE_FOR]
logger.debug('received vote for poll id %i', header.meta[PollManager.VOTE_FOR])
if header.meta[PollManager.POLL_CREATOR] == self.__my_id:
print 'added'
self.__v_manager.add(header)
def receive_result(self, header):
logger.debug('received result for poll id %i', header.meta[PollManager.RESULT_FOR])
self.__r_manager.add(header)
self.emit('received_pollresults', self.__r_manager.to_flash(header))
def on_start_record(self, obj, data):
logger.debug('on_start_record %s',str(data))
rec_data = {'id':data['id'],
'filename':self.__temp_audio_choice_path(data['id']),
}
self.__mixer.onStartRecord(None, rec_data)
def on_stop_record(self, obj, data):
logger.debug('on_stop_record %s',str(data))
rec_data = {'id':data['id'],
'filename':self.__temp_audio_choice_path(data['id']),
}
self.__mixer.onStopRecord(self, rec_data)
def on_start_play(self, obj, data):
logger.debug('on_start_play %s',str(data))
play_data = {'id':data['id']}
self.__mixer.onStartPlay(self, play_data)
def on_stop_play(self, obj, data):
logger.debug('on_stop_play %s',str(data))
play_data = {'id':data['id']}
self.__mixer.onStopPlay(self, play_data)
def __check_expired(self):
"""
Called via gobject timeout. Gets rid of expired polls.
"""
#print 'check-expired'
new_active = []
for poll in self.__active:
import time
print poll.expires - time.time(),'remaining on ',poll.mid
if poll.is_expired():
logger.info('poll expired')
vote_headers = self.__v_manager.get_votes_for(poll.mid)
if vote_headers:
votes = []
for vote_header in vote_headers:
f = open(vote_header.get_file_path(), mode='rb')
choice_ids = pickle.load(f)
f.close()
#print 'loaded',choice_ids
votes.append(','.join(map(str, choice_ids)))
vote_string = ';'.join(map(str,votes))
logger.debug('Vote string: %s',vote_string)
prev_dir = os.getcwd()
# Need to change working directory to
# run rubyvote.. stupid
os.chdir(self.__rubyvote_dir)
command = 'ruby %s %i %s' % (self.__rubyvote_path,
poll.meta[PollManager.TYPE], vote_string)
rubyvote = os.popen(command, 'r')
sys.stdout.flush()
result = rubyvote.readline()
rubyvote.close()
# Change dir back
os.chdir(prev_dir)
print 'Result: ', result
results = simplejson.loads(result)
logger.debug('results: %s', results)
data = {
'results':results,
'poll':poll,
}
self.send_result(data)
else:
new_active.append(poll)
self.__active = new_active
return True
gobject.type_register(PollManager) | Python |
from db import db
from fvutil.managers import objectmanager
from fvutil import fvcon
from util import config
from presence import ps
import gobject
import random
import os
import logging.config
logging.config.fileConfig('logging.config')
logger = logging.getLogger('fluidvoice.tivo')
from controlmsgs import StartTivo, ExcludeFromTivo
class TivoPresence(ps.ObjectPresence):
def __init__(self, oid, name):
ps.ObjectPresence.__init__(self, oid, name)
class TivoManager(objectmanager.ObjectManager):
__gsignals__ = {
'sent_tivo' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_INT,)),
'received_tivo' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'warning_tivo' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'started_tivo' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
}
ACTIVES = 17652
OFFLINES = 17653
def __init__(self, async, my_id, mixer, control):
objectmanager.ObjectManager.__init__(self, fvcon.TIVO, my_id)
self.__async = async
self.__control = control
self.__mixer = mixer
self.__control.register_type_receiver(StartTivo, self.__on_recv_start_tivo)
self.__control.register_type_receiver(ExcludeFromTivo, self.__on_recv_exclude_from_tivo)
self.__path = config.get('async.folders', fvcon.TIVO)
self.__my_id = my_id
# tivo_id -> (user_ids, offline_user_ids, offline_user_ids, callback)
self.__pending_tivos = {}
# tivo_id -> (user_ids, offline_user_ids)
self.__current_tivos = {}
print 'TivoManager: Path - ',self.__path
for detail in self.get_all():
detail['status'] = fvcon.ONLINE
print 'Adding input for',detail
self.__mixer.onAddInput(self, detail)
def send(self, data):
logger.debug('Sending tivo with data %s', data)
tivo_id = data['tivo_id']
actives = data['actives']
offlines = data['offlines']
# TODO: tag with the usernames, not the user ids
dests = actives + offlines
actives = map(str, actives)
offlines = map(str, offlines)
tags = ','.join(actives + offlines)
tivo_path = os.path.join(self.__path, str(tivo_id) + '.tivo')
meta = {
fvcon.TAGS: tags,
TivoManager.ACTIVES : actives,
TivoManager.OFFLINES : offlines,
}
header = self.__async.send_message(tivo_path, fvcon.TIVO, dests, meta)
self.add(header)
print 'Added tivo:',header.mid
self.emit('sent_tivo', header.mid, fvcon.TIVO)
def receive(self, header):
logger.debug('Received tivo id %s', header.mid)
self.add(header)
flash = self.to_flash(header)
self.emit('received_tivo', flash)
def __on_recv_exclude_from_tivo(self, eft):
user_id = eft.user_id
tivo_id = eft.tivo_id
logger.info('Received exclude_from_tv. user_id: %s, tivo_id: %s', user_id, tivo_id)
if tivo_id in self.__pending_tivos:
(user_ids, offline_user_ids, callback) = self.__pending_tivos[tivo_id]
user_ids.remove(user_id)
logger.debug('Removed user_id %s from pending tivo_id %s', user_id, tivo_id)
else:
logger.debug('Pending_tivos didnt have a tivo_id %s', tivo_id)
# TODO: Abstract this. I'm doing the same thing twice.
def __on_recv_start_tivo(self, st):
user_ids = st.user_ids
tivo_id = st.tivo_id
logger.info('Received start_tivo. user_ids: %s, tivo_id: %s', user_ids, tivo_id)
user_ids.remove(self.__my_id)
self.on_start_tivo(None, tivo_id, user_ids , from_me=False)
def on_start_tivo(self, obj, tivo_id, user_ids, from_me=True):
logger.debug('on_start_tivo: %s', user_ids)
offline_user_ids = []
for user_id in user_ids:
if not self.__control.is_neighbor(user_id):
offline_user_ids.append(user_id)
else:
if from_me:
self.__control.send(StartTivo(tivo_id, user_ids + [self.__my_id]), user_id)
# TODO: There must be sugar around this
for ou in offline_user_ids:
user_ids.remove(ou)
tivo_path = os.path.join(self.__path, str(tivo_id) + '.tivo')
callback = gobject.timeout_add(config.get_int('fv', 'tivo-start-timeout-ms'), self.__on_do_start_tivo, tivo_id, tivo_path)
self.__pending_tivos[tivo_id] = (user_ids, offline_user_ids, callback)
print self.__pending_tivos[tivo_id]
self.emit('warning_tivo', tivo_id)
def on_stop_tivo(self, obj, tivo_id):
logger.debug('on_stop_tivo: %s', tivo_id)
self.__mixer.onStopTivo(self, tivo_id)
(user_ids, offline_user_ids) = self.__current_tivos[tivo_id]
del self.__current_tivos[tivo_id]
data = {'tivo_id': tivo_id,
'actives': user_ids,
'offlines': offline_user_ids,
}
self.send(data)
def on_exclude_me_from_tivo(self, obj, tivo_id):
logger.debug('on_exclude_from_tivo: %s', tivo_id)
(callback, user_ids) = self.__pending_tivos[tivo_id]
gobject.source_remove(callback)
for user_id in user_ids:
# Exclude me from all these people's TIVOs
self.__control.send(ExcludeFromTivo(tivo_id, self.__my_id), user_id)
def __on_do_start_tivo(self, tivo_id, tivo_path):
logger.debug('__on_do_start_tivo: %s', tivo_id)
(user_ids, offline_user_ids, callback) = self.__pending_tivos[tivo_id]
logger.debug('About to start tivo with participating user_ids %s', user_ids)
self.__mixer.onStartTivo(self, user_ids, tivo_id, tivo_path)
del self.__pending_tivos[tivo_id]
self.__current_tivos[tivo_id] = (user_ids, offline_user_ids)
self.emit('started_tivo', tivo_id)
def to_flash(self, header):
return {'type':fvcon.TIVO,
'subcommand':fvcon.ADD,
'id':header.mid,
'label':header.meta[fvcon.TAGS],
'tags':header.meta[fvcon.TAGS],
'date':header.created,
'authorID':header.src,
'src':header.src,
'local_path':header.get_file_path(),
} | Python |
import gobject
import time
import random
import os
from fvutil.managers import objectmanager
from fvutil import fvcon
from util import config
from presence import ps
class ShoppingPresence(ps.ObjectPresence):
def __init__(self, oid, name):
ps.ObjectPresence.__init__(self, oid, name)
class ShoppingManager(objectmanager.ObjectManager):
__gsignals__ = {
'created_list' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_INT,)),
'received_list' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
# Custom meta keys
STORE = 13001
DURATION = 13002
ITEMS = 13003
def __init__(self, async, my_id):
objectmanager.ObjectManager.__init__(self, fvcon.SHOPPING, my_id)
self.__async = async
self.__path = config.get('async.folders', str(fvcon.SHOPPING))
def send(self, shopping_list):
tags = shopping_list['tags']
items = shopping_list['items']
dests = shopping_list['recipients']
expires = time.time() + shopping_list['duration']
meta = {
fvcon.TAGS: tags,
ShoppingManager.STORE: shopping_list['store'],
ShoppingManager.DURATION: shopping_list['duration'],
ShoppingManager.ITEMS: shopping_list['items']
}
id = random.randint(1,100000)
path = os.path.join(self.__path, str(id) + '.shop')
f = open(path, mode='wa')
f.write(str(items))
f.close()
assert os.path.exists(path)
header = self.__async.send_message(path, fvcon.SHOPPING, dests, meta)
self.add(header)
def receive(self, header):
self.add(header)
flash = self.to_flash(header)
self.emit('received_list', flash )
def to_flash(self, header):
return {'type':fvcon.SHOPPING,
'subcommand':fvcon.ADD,
'id':header.mid,
'label':header.meta[fvcon.TAGS],
'time':header.created,
'authorID':header.src,
'tags':header.meta[fvcon.TAGS],
'src':header.src,
'items':header.meta[ShoppingManager.ITEMS],
'store':header.meta[ShoppingManager.STORE],
'duration':header.meta[ShoppingManager.DURATION] } | Python |
import gobject
from db import db
from fvutil import fvcon
from util import config
import random
import time
class GroupManager(gobject.GObject):
__gsignals__ = {
'return_groups' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'group_appeared' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
def __init__(self, async, my_id):
gobject.GObject.__init__(self)
_db = db.get_database(config.get('fv', 'db-path'), config.get('fv', 'object-db'))
self._table = _db.get_or_create(GroupTable)
self._type = fvcon.GROUP
# ObjectManager API
def get(self, oid):
return self.to_flash(self._table.get(oid))
def get_all(self):
return map(self.to_flash, self._table.get_all())
def on_new_group(self, obj, data):
name = 'New group'
gid = random.randint(1,1000000)
self._table.add_group(gid, name)
obj.updateObject(self, {'subcommand': fvcon.ADD,
'type': fvcon.GROUP,
'id':gid,
'label':name,
'status':fvcon.AVAILABLE
})
def on_add_user_to_group(self, obj, gid, uid):
self._table.add_user_to_group(gid, uid)
def on_del_user_from_group(self, obj, gid, uid):
self._table.del_user_from_group(gid, uid)
def on_show_group(self, obj, data):
raise NotImplementedError
def on_del_group(self, obj, data):
raise NotImplementedError
def on_update_group_name(self, obj, gid, name):
self._table.update_group_name(gid, name)
def to_flash(self, header_dict):
return {'type':fvcon.GROUP,
'subcommand': fvcon.ADD,
'label':header_dict['name'],
'members':header_dict['members']}
header_dict['type'] = fvcon.GROUP
return header_dict
gobject.type_register(GroupManager)
# Provides the same interface as the ObjectTable
class GroupTable(db.Table):
def __init__(self):
db.Table.__init__(self)
def _init(self):
# This is kind of bad. We are reproducing users here; we have users
# in the presence service as well. Oh well.
self._conn.execute('''
CREATE TABLE users
(uid INTEGER PRIMARY KEY, name STRING, timestamp INTEGER)''')
self._conn.execute('''
CREATE TABLE groups
(gid INTEGER PRIMARY KEY, name STRING, created INTEGER, deleted INTEGER)''')
self._conn.execute('''
CREATE TABLE groupmembers
(gid INTEGER, uid INTEGER, joined INTEGER, left INTEGER, PRIMARY KEY(gid, uid))''')
def _drop(self):
self._conn.execute('''
drop table users
''')
self._conn.execute('''
drop table groups
''')
self._conn.execute('''
drop table groupmembers
''')
def get(self, gid):
c = self._conn
groups = c.execute(''' SELECT * FROM groups WHERE gid=? ''', (gid,))
group = None
if groups:
group = groups[0]
if group is None:
raise ValueError
rows = c.execute(''' SELECT * FROM groupmembers INNER JOIN users ON groupmembers.uid = users.uid WHERE gid=? ''', (gid,))
print gid, rows
result = {'gid':group['gid'], 'name':group['name'], 'created':group['created'], 'deleted':group['deleted'], active:group['deleted'] is not False, 'members':[]}
for r in rows:
if r['left'] == -1:
result['members'].append(r)
return result
def get_all(self):
c = self._conn
rows = c.execute(''' SELECT * FROM groups ''')
groups = {}
for group in rows:
groups[group['gid']] = {'gid':group['gid'], 'name':group['name'], 'created':group['created'], 'deleted':group['deleted'], 'active':group['deleted'] is not False, 'members':[]}
print groups
for (gid, group) in groups.iteritems():
rows = c.execute(''' SELECT * FROM groupmembers INNER JOIN users ON groupmembers.uid = users.uid WHERE gid=? ''', (gid,))
for r in rows:
if r['left'] == -1:
group['members'].append(r)
print 'RETURNING GROUPS!!_!__!_!_!',groups.values()
return groups.values()
def __get_user_dets(self, uid):
c = self._conn
[user] = c.execute(''' SELECT * FROM users WHERE uid=?''', (uid,))
return user
def __get_group_dets(self, gid):
c = self._conn
[group] = c.execute(''' SELECT * FROM groups WHERE gid=?''', (gid,))
return group
def add_group(self, gid, name):
c = self._conn
print gid,name,int(time.time()), None
c.execute(''' INSERT INTO groups VALUES (?,?,?,?) ''', (gid, name, int(time.time()), None))
def add_user_to_group(self, uid, gid):
try:
# Migh be added multiple times, can't let that happen
c = self._conn
c.execute(''' INSERT INTO groupmembers VALUES (?,?,?,?) ''', (gid, uid, int(time.time()), None))
except:
c = self._conn
c.execute(''' UPDATE groupmembers SET left=? WHERE uid=? AND gid=? ''', (-1, uid,gid))
def del_user_from_group(self, uid, gid):
c = self._conn
c.execute(''' UPDATE groupmembers SET left=? WHERE uid=? AND gid=? ''', (int(time.time()), uid, gid))
def update_group_name(self, gid, name):
c = self._conn
c.execute(''' UPDATE groups SET name=? WHERE gid=? ''', (name, gid))
def __db_to_py(self, row):
row['meta'] = pickle.loads(row['meta'].encode('ascii'))
row['dests'] = pickle.loads(row['dests'].encode('ascii'))
return row
def __py_to_db(self, data):
data['meta'] = pickle.dumps(data['meta'])
data['dests'] = pickle.dumps(data['dests'])
return data
| Python |
from datetime import datetime
from guid import *
import cPickle
import time
import gobject
import os
from ui.uiconstants import GROUP, AVAILABLE, ADD, DELETE, PRIORITY_MEDIUM
from messaging2.ahconstants import MESSAGING_PATH
from data import db
from util.persist import *
class GroupService(gobject.GObject):
"""
GroupService manages storage and retrieval of groups of users.
"""
__gsignals__ = {
# create signals that will be emitted when message received from the Flash UI
'return_groups' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
# taken care of by OPEN_OBJECT
#'return_show_group' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
# (gobject.TYPE_PYOBJECT,)),
'group_appeared' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
}
def __init__(self, db):
"""
Initializes the GroupService
"""
gobject.GObject.__init__( self )
self._counter = 1 # used for creating new groups with distinct name
self._db = db
def addLookupNameCallback( self, lookupName ):
"""
Method used to get the string name of the user
"""
self.cbLookupName = lookupName
"""
Methods that respond to UI requests
"""
def onGetGroups(self, obj):
"""
@return list of list of groups and members
[[(gid,gname),(uid,uname),(uid,uname)]]
"""
list_of_groups = self._db.getGroups()
self.emit( 'return_groups', list_of_groups )
def onNewGroup( self, obj, dummy ):
"""
@param obj: L{FlashInterface}
@param dummy: It's just a dummy int required for gobject
"""
group_name = "New"+str(self._counter)
guid = GUID(datetime.now())
self._counter += 1
self._db.addGroup( guid, group_name )
obj.updateObject( self, {'subcommand':ADD, 'type':GROUP, 'id':guid, 'label':group_name, 'status':AVAILABLE} )
def onShowGroup(self, obj, group_id):
"""
Return the members of a group
@param obj: L{FlashInterface}
@param group_id: id of the group I am interested
"""
group = self.getGroup( group_id )
# group is a list of tuples. First tuple is group information
obj.showGroup( {'id':group_id, 'members':group[1:]} )
def onAddUsersToGroup( self, obj, group_id, user_ids ):
"""
Adds users to a group.
Convenience for getting a group, then adding a member.
If group is unknown, has no effect.
@param obj: the object that triggered the even
@type obj: FlashInterface
@param user_ids: The list of GUIDs of the user to add to the group
@type user_ids: List
@param group_id: The guid of the group to add to
@type group_id: Number
"""
for u in user_ids:
self._db.addMember( group_id, u)
def onRemUsersFromGroup( self, obj, group_id, user_ids ):
"""
Removes users from a group.
Convenience for getting a group, then removing a member.
If group is unknown, has no effect.
@param obj: the object that triggered the even
@type obj: FlashInterface
@param user_ids: The list of GUID of the user to remove from the group
@type user_ids: list of ids
@param group_ids: The guid of the group to remove from the group
@type group_ids: Number
"""
for u in user_ids:
self._db.remMember( group_id, u )
def onAddUserToGroup( self, obj, group_id, user_id ):
"""
Adds a user to a group.
Convenience for getting a group, then adding a member.
If group is unknown, has no effect.
@param obj: the object that triggered the even
@type obj: FlashInterface
@param user_id: GUID of the user to add to the group
@type user_id: Number
@param group_id: The guid of the group to add to
@type group_id: Number
"""
self._db.addMember( group_id, user_id )
def onDelUserFromGroup( self, obj, group_id, user_id ):
"""
Removes a user to a group.
Convenience for getting a group, then removing a member.
If group is unknown, has no effect.
@param obj: the object that triggered the even
@type obj: FlashInterface
@param user_id: UID of the user to remove from the group
@type user_id: Number
@param group_ids: The guid of the group to remove from the group
@type group_ids: Number
"""
self._db.remMember( group_id, user_id )
def onDelGroup( self, obj, group_id ):
"""
Removes the group with the given GUID from the GroupService.
If the group is unknown
@param group_id: the GUID of the group to remove
@type group_id: Number
"""
self._db.remGroup( group_id )
# Not needed any more
#obj.updateObject( self, {'subcommand':DELETE, 'type':GROUP, 'id':group_id} )
def onUpdateGroupName( self, obj, group_id, name ):
"""
Updates the name of a group
@param group_id: guid of the group
@type group_id: Number
@param name: new name of the group
@type name: String
"""
self._db.updateGroup( group_id, name )
"""
Methods to manage groups
"""
def generateGroup( self, name, members ):
"""
Group generated through learning that certain members have been near each other for
a long time
@param name: Name of group
@type name: String
@param members: List of members
@type members: List (GUID, names).
@return GUID of created group
"""
# TODO: need to generate group
g_list = [ (guid, name) ]
g_list += members
self.emit( 'group_appeared', g_list )
def getGroups(self):
"""
Returns all the groups owned by this GroupService
Used for initialization
@return list of all groups in the GroupService.
"""
group_list = []
db_list = self._db.getGroups()
print db_list
for group in db_list:
detail = {'subcommand':ADD, 'id': group[0][0], 'type': GROUP, 'label': group[0][1],
'status': AVAILABLE,
'priority': PRIORITY_MEDIUM,
'date' : time.time() }
group_list.append( detail )
return group_list
def getGroupDetails( self, group_id ):
"""
Called by L{ObjectBroker} when user requests to see the group by double clicking
on the UI.
"""
group = self._db.getGroup( group_id )
return {'type':GROUP, 'id':group_id, 'name':group[0][1], 'volume':1, 'balance':0, 'quality':1, 'members':group[1:]}
gobject.type_register( GroupService )
if __name__ == "__main__":
def testGroup():
g = Group('Temp', [(1,'hello'),(2,'why')])
print g.getMembers()
time.sleep(1)
g.removeMember(1)
print g.getMembers()
time.sleep(1)
g.addMember(3, 'johnny')
print g.getMembers()
time.sleep(1)
g.addMember(3, 'holy')
print g.getMembers()
time.sleep(1)
g.addMember(1, 'kate')
print g.getMembers()
time.sleep(1)
print g.getHistoryString()
print g.getHistoryOf(1)
print g.getHistoryOfString(1)
def testGroupService():
def flattenGroups(groups):
rs = ""
for g in groups:
rs += str(g) + "\n"
return rs
gs = GroupService()
guid = gs.createGroup([(1,'hello'),(2,'why')])
groups = gs.getGroups()
#print flattenGroups(groups_
testGroup()
testGroupService()
| Python |
from util import config
class Constants():
def __init__(self):
config.add_file('fv.config')
for (name, value) in config.items('fv.types'):
exec('self.' + name.upper() + ' = ' + value)
for (name, value) in config.items('fv.ui'):
exec('self.' + name.upper() + ' = ' + value)
for (name, value) in config.items('fv.metas'):
exec('self.' + name.upper() + ' = ' + value)
fvcon = Constants() | Python |
from db import db
from util import config
import sha
import random
class Accounts(db.Table):
def _init(self):
self._conn.execute('''
create table accounts
(uid INTEGER PRIMARY KEY, username STRING NOT NULL, password STRING NOT NULL)
''')
def _drop(self):
self._conn.execute('''
drop table accounts
''')
def authenticate(self, username, raw_password):
# TODO: experiment with case when the username, password aren't provided
[result] = self._conn.execute('''
SELECT * FROM accounts WHERE username=?''', (username,))
enc_password = result['password']
algo, salt, hash = enc_password.split('$')
return (hash == sha.new(salt+raw_password).hexdigest())
def create_user(self, username, password):
algo = 'sha1'
salt = sha.new(str(random.random())).hexdigest()[:5]
hash = sha.new(salt+password).hexdigest()
password = '%s$%s$%s' % (algo, salt, hash)
id = random.randint(0,1000000) #1 in a million!
self._conn.execute('''
INSERT INTO accounts VALUES (?,?,?) ''', (id, username, password))
return id
def user_exists(self, username):
[result] = self._conn.execute("""
SELECT COUNT(*) as count FROM accounts WHERE username=?""", (username,))
print result
return result['count'] == 1
@staticmethod
def instance():
database = db.get_database(
config.get('fv.accounts', 'db-dir'),
config.get('fv.accounts', 'db-file'))
return database.get_or_create(Accounts)
| Python |
from fvutil import fvcon
class ObjectBroker:
def __init__(self, presence_wrapper, group, audio, text, poll, shopping, tivo):
self.__on_open_handlers = {
fvcon.AUDIO : audio.get,
fvcon.TEXT : text.get,
fvcon.POLL_Q : poll.get_poll,
fvcon.POLL_R : poll.get_result,
fvcon.SHOPPING : shopping.get,
fvcon.FV : presence_wrapper.get,
fvcon.PHONE : presence_wrapper.get,
fvcon.SERVICE : presence_wrapper.get,
fvcon.TIVO : tivo.get,
}
"""
self.__on_edit_handlers = {
fvcon.TEXT : text.get_text_details,
fvcon.POLL_Q : poll.get_poll_details,
fvcon.POLL_R : poll.get_poll_result_details,
}
"""
self.__on_edit_handlers = {}
self.__on_send_handlers = {
fvcon.AUDIO : audio.send,
fvcon.TEXT : text.send,
fvcon.POLL_Q : poll.send_question,
fvcon.POLL_V : poll.send_vote,
fvcon.SHOPPING : shopping.send,
}
self.__on_receive_handlers = {
fvcon.AUDIO : audio.receive,
fvcon.TEXT : text.receive,
fvcon.POLL_Q : poll.receive_question,
fvcon.POLL_V : poll.receive_vote,
fvcon.POLL_R : poll.receive_result,
fvcon.SHOPPING : shopping.receive,
fvcon.TIVO : tivo.receive,
}
self.__get_all_handlers = (text.get_all,
audio.get_all,
poll.get_all,
group.get_all,
shopping.get_all,
presence_wrapper.get_all,
tivo.get_all)
def on_open_object(self, obj, oid, o_type):
print 'Broker: On_open_object',oid,o_type
handler = self.__on_open_handlers.get(o_type, None)
if handler:
#return handler(oid)
return obj.returnObject(handler(oid))
else:
raise ArgumentError('No open handler defined for object type ' + o_type)
def on_edit_object(self, obj, params):
# Not clear that this is used.
pass
def on_send_object(self, obj, params):
handler = self.__on_send_handlers.get(params['type'], None)
if handler:
handler(params)
else:
raise KeyError('No send handler defined for object type ' + str(params['type']))
def on_receive_object(self, obj, header):
print 'received message type',header.msg_type
handler = self.__on_receive_handlers.get(header.msg_type, None)
if handler:
handler(header)
else:
raise KeyError('No receive handler defined for object type ' + str(header.msg_type))
def get_all_objects(self, obj=None, data=None):
# obj, data exist if this is signal callback
objs = []
for handler in self.__get_all_handlers:
objs += handler()
print objs
return objs | Python |
class StartTivo():
def __init__(self, tivo_id, user_ids):
self.tivo_id = tivo_id
self.user_ids = user_ids
class ExcludeFromTivo():
"""
Sent by a user requesting to be excluded from Tivo
"""
def __init__(self, tivo_id, user_id):
self.tivo_id = tivo_id
self.user_id = user_id | Python |
import sys, time
import socket, asyncore
import Queue
import simplejson
import gobject
from util import config
from net import network
from fvutil import fvcon
FLASH_DEBUG = True
POLICY = """
<cross-domain-policy>
<allow-access-from domain="*" to-ports="*" />
</cross-domain-policy>
"""
class FlashInterface(gobject.GObject):
"""
Messages from the UI are received through this interface and are also sent to the UI
In order to receive, add the message type that is received in L{processRecv} and
add a signal that would be emitted for the application to respond.
In order to send a message, create a method in this class and call L{put} method to
put it in the queue for delivery to the user interface.
"""
__gsignals__ = {
# create signals that will be emitted when message received from the Flash UI
'login': (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING, gobject.TYPE_STRING,)),
'send_object' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'start_record' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'stop_record' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'start_play' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'stop_play' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'poll_start_record' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'poll_stop_record' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'poll_start_play' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'poll_stop_play' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'get_groups' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'new_group' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'show_group' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'add_user_to_group' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,gobject.TYPE_PYOBJECT,)),
'del_user_from_group' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_PYOBJECT,)),
'del_group' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'update_group_name' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_STRING,)),
'push_to_talk' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'open_object' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,gobject.TYPE_INT,)),
'edit_object' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT,)),
'edit_quality' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'get_messages' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT,)),
'get_message_count' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_PYOBJECT, gobject.TYPE_PYOBJECT,)),
'start_tivo' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT, gobject.TYPE_PYOBJECT,)),
'stop_tivo' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
'exclude_me_from_tivo' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_INT,)),
}
class Listener(asyncore.dispatcher):
def __init__(self, host, port, data_cb, init_cb):
asyncore.dispatcher.__init__(self)
self.create_socket(socket.AF_INET, socket.SOCK_STREAM)
self.socket.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.setblocking(0)
self.bind((host, port))
self.channel = None
self.__host = host
self.__port = port
self.__data_cb = data_cb
self.__init_cb = init_cb
self.listen(1)
print 'FlashInterface.Listener bound to ',host,port
def handle_accept(self):
channel, address = self.accept()
self.channel = FlashChannel(channel, self.__host, self.__port)
self.channel.add_receive_listener(self.__data_cb)
self.__init_cb()
def __init__(self, iface, local):
gobject.GObject.__init__(self)
if local:
host = 'localhost'
else:
host = network.get_ip_addr(iface)
port = config.get_int('fv', 'flash-port')
self.__commands = {}
for (name, value) in config.items('fv.ui'):
self.__commands[name.upper()] = int(value)
types = {}
for (name, value) in config.items('fv.types'):
types[name.upper()] = int(value)
self.__commands['types'] = types
self.__listener = FlashInterface.Listener(host, port, self.__process_recv, self.__initialize)
def __put(self, msg):
if self.__listener.channel is not None:
self.__listener.channel.toflash.put(msg)
def __process_recv( self, data ):
"""
Callback for responding to Flash user actions
Pass to L{FlashChannel.addReceiveListener}
@type data: JSON data
@param data: dictionary of command encoded in JSON
"""
local = False
if FLASH_DEBUG:
print "flash interface received: ", data
data_list = data.split('\x00')
if data_list[0] == '<policy-file-request/>':
# Flash asking what socket policy is
print 'policy'
self.__put(POLICY)
return
# Format : JSON-encoded data
for data in data_list[:-1]:
if len(data) == 0:
# This is probably a disconnect
return
# decode
data = simplejson.loads(data)
if FLASH_DEBUG:
print "flash interface decoded into ", data
# data is a dictionary
header = data['command']
if header == fvcon.LOGIN:
print "LOGIN"
self.emit("login", data['username'], data['password'])
elif header == fvcon.EDIT_OBJECT:
print 'edit-object'
self.emit('edit_object', data )
elif header == fvcon.EDIT_QUALITY:
print "EDIT_QUALITY", data['quality']
self.emit('edit_quality', data['quality'])
elif header == fvcon.REQUEST_START_RECORD_VOICE:
print "Start record"
self.emit("start_record", data )
elif header == fvcon.REQUEST_STOP_RECORD_VOICE:
print "Stop recording"
self.emit("stop_record", data )
elif header == fvcon.REQUEST_START_PLAY_VOICE:
print "Start play voice"
self.emit("start_play", data )
elif header == fvcon.REQUEST_STOP_PLAY_VOICE:
print "Stop play voice"
self.emit("stop_play", data )
elif header == fvcon.POLL_START_RECORD:
print "Start record poll"
self.emit("poll_start_record", data )
elif header == fvcon.POLL_STOP_RECORD:
print "Stop record poll"
self.emit("poll_stop_record", data )
elif header == fvcon.POLL_START_PLAY:
print "Start play poll"
self.emit("poll_start_play", data )
elif header == fvcon.POLL_STOP_PLAY:
print "Stop play poll"
self.emit("poll_stop_play", data )
elif header == fvcon.SEND_OBJECT:
print "Send voice or text"
self.emit("send_object", data )
elif header == fvcon.OPEN_OBJECT:
print "Get msgs"
self.emit("open_object", data["id"], data["type"])
elif header == fvcon.QUALITY_BAD:
print "Quality bad"
elif header == fvcon.GET_GROUPS:
print "Get the groups"
self.emit("get_groups", 1)
elif header == fvcon.NEW_GROUP:
print "Create new group"
self.emit("new_group", 1 )
elif header == fvcon.SHOW_GROUP:
self.emit("show_group", data["id"])
elif header == fvcon.ADD_USER_TO_GROUP:
print "Add user to group"
self.emit("add_user_to_group", data['id'], data['user_id'])
elif header == fvcon.DEL_USER_FROM_GROUP:
print "Remove user from group"
self.emit("del_user_from_group", data['id'], data['user_id'])
elif header == fvcon.DEL_GROUP:
print "Delete a group"
self.emit("del_group", data['id'])
elif header == fvcon.UPDATE_GROUP_NAME:
self.emit( "update_group_name", data['id'], data['label'] )
elif header == fvcon.PUSH_TO_TALK_RELEASE:
self.emit( "push_to_talk", 0 )
elif header == fvcon.PUSH_TO_TALK_PRESS:
self.emit( "push_to_talk", 1 )
elif header == fvcon.REQUEST_ATTENTION:
print "Attention pressed"
self.emit( "push_to_talk", 2 )
# methods for Timeline
elif header == fvcon.GET_MESSAGES:
self.emit( "get_messages", data['begin'], data['end'])
elif header == fvcon.GET_MESSAGE_COUNT:
self.emit( "get_message_count", data['begin'], data['end'])
elif header == fvcon.START_TIVO:
print "Start tivo"
self.emit("start_tivo", data['tivo_id'], data['user_ids'])
elif header == fvcon.STOP_TIVO:
print "Stop tivo"
self.emit("stop_tivo", data['tivo_id'])
elif header == fvcon.EXCLUDE_ME_FROM_TIVO:
print "Exclude from tivo"
self.emit("exclude_from_tivo", data['tivo_id'])
"""
Create methods to talk to flash
Compose message and put in the queue
"""
def loginAck(self, username, init_objs):
if username is None:
self.__put( {'command':fvcon.ACK_LOGIN, 'objs': {}, 'username': 0 } )
else:
self.__put( {'command':fvcon.ACK_LOGIN, 'objs': init_objs, 'username':username} )
def __initialize(self):
"""
When Flash UI connects, this method is called to transfer all
command value pair mapping
"""
self.__put( {'command':fvcon.INITIALIZE, 'commands': self.__commands} )
def updateObject(self, obj, changed_obj):
"""
@param obj: L{PresenceService}
@param changed_obj: object that has changed, new message arrived, offline users or services
"""
self.__put( {'command': fvcon.UPDATE_OBJECTS, 'objs':[changed_obj] } )
def onAckPushToTalk(self, obj, ack):
"""
@param ack: acknowledgement code, 0 for release ack, 1 for press ack
@type ack: Number
@param obj: L{MixerInterface}
"""
if ack == 1:
self.__put( {'command':fvcon.ACK_PUSH_TO_TALK_PRESS} )
elif ack == 0:
self.__put( {'command':fvcon.ACK_PUSH_TO_TALK_RELEASE} )
def pollsent( self ):
self.__put( {'command':fvcon.ACKCREATEPOLL} )
def startedRecord( self, obj, id ):
self.__put( {'command':fvcon.START_RECORD_VOICE, 'id':id} )
def stoppedRecord( self, obj, id):
self.__put( {'command':fvcon.STOP_RECORD_VOICE, 'id':id} )
def startedPlay( self, obj, id ):
self.__put( {'command':fvcon.START_PLAY_VOICE, 'id':id} )
def stoppedPlay( self, obj, id ):
self.__put( {'command':fvcon.STOP_PLAY_VOICE, 'id':id} )
def onStartedTivo(self, obj, id):
self.__put( {'command':fvcon.STARTED_TIVO, 'tivo_id':id })
def onWarningTivo(self, obj, id):
self.__put( {'command':fvcon.WARNING_TIVO, 'tivo_id':id })
def sentObject(self, obj, guid, type):
"""
@param obj: The specific object managers that confirm that this object was sent
@param guid: guid of the object
@param type: Type of the object that was sent out
"""
self.__put( {'command':fvcon.ACK_SEND_OBJECT, 'id':guid, 'type':type } )
def resultsReady(self, obj, obj_list):
self.__put({'command':fvcon.UPDATE_OBJECTS, 'objs':obj_list})
"""
Group related replies
"""
def sendGroups( self, obj, groupdata ):
"""
@param groupdata: Group ids and user ids of the members
@type groupdata: list of [(group id, group name), (id, name), (id, name), (id, name)]
"""
self.__put( {'command':fvcon.ACK_GET_GROUPS, 'groupdata':groupdata} )
def showGroup( self, group_info ):
"""
@param group_info: {'id': id of group, 'members':[(id,name), (id, name)..]}
"""
group_info['command'] = fvcon.ACK_SHOW_GROUP
self.__put( group_info )
def groupAppeared( self, obj, groupobjs ):
"""
@param groupdata: Group ids and user ids of the members
@type groupdata: [(group id, group name), (id, name), (id, name), (id, name)]
"""
#self.__put( {'command':GROUP_APPEARED, 'groupdata':groupdata } )
self.__put( {'command':fvcon.UPDATE_OBJECTS, 'objs':[{'subcommand': uicon.ADD, 'id':227, 'name':'Poll->John', 'status':uicon.AVAILABLE, 'type':uicon.GROUP, 'priority':uicon.PRIORITY_HIGH}]} )
def returnObject(self, metadata):
"""
@param metadata: contains information about a requested object along with its guid
@type metadata: Dictionary
"""
metadata['command'] = fvcon.ACK_OPEN_OBJECT
self.__put( metadata )
def attentionRequest(self, obj, guid ):
"""
@param guid: id of the node asking for attention
@param obj: L{PresenceService}
"""
self.__put( {'command':fvcon.ATTENTION, 'id':guid })
def sendResult(self, obj, result ):
"""
When the caller has all the result formatted
L{Timeline} uses it to update the timeline
"""
self.__put( result )
gobject.type_register(FlashInterface)
class FlashChannel( asyncore.dispatcher ):
"""
Class to handle connection (send and receive messages) from the Flash UI
"""
def __init__(self, channel, host, port ):
asyncore.dispatcher.__init__(self, channel)
self.toflash = Queue.Queue()
self._HOST = host
self._PORT = port
def add_receive_listener( self, callback ):
"""
@type callback: method
@param callback: method that receives messages from the Flash UI
and generates appropriate signals
"""
self.recvdMsg = callback
def handle_connect( self ):
pass
def handle_read( self ):
"""
Method that receives messages from the Flash UI
"""
pkt, address = self.recvfrom( 512 )
if FLASH_DEBUG:
print "Flash UI Received:",len( pkt )
if len(pkt) == 0:
# close network
self.close()
self.recvdMsg( pkt )
def handle_write( self ):
"""
Sends command to Flash UI
Messages are queued up into self.toflash by put method
"""
msg = self.toflash.get()
packet = simplejson.dumps( msg ) + "\x00" # Need to end with an EOF
if FLASH_DEBUG:
print "Sent " + str( packet )
self.sendto( packet, (self._HOST, self._PORT) )
def writable( self ):
return not self.toflash.empty()
if __name__ == "__main__":
f = FlashInterface( "eth1" )
asyncore.loop(0.01)
| Python |
import string,cgi,time
from os import curdir, sep
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
import gobject
import thread
import datetime
class WebHandler(BaseHTTPRequestHandler):
def do_GET(self):
if self.path == "/":
self.show_index()
return
if self.path.endswith(".html"):
self.show_page(self.path)
return
if self.path.endswith(".wav"):
print "path ends with .wav"
WebServer.instance().emit('send-audio', self.path)
self.show_index("Broadcasted " + self.path + " at " + str(datetime.datetime.now()) + "<br/>")
return
def show_page(self, path, response_text=""):
try:
f = open(curdir + sep + "web/" + path) #self.path has /test.html
#note that this potentially makes every file on your computer readable by the internet
self.send_response(200)
self.send_header('Content-type', 'text/html')
self.end_headers()
if response_text:
self.wfile.write(response_text)
self.wfile.write(f.read())
f.close()
except IOError:
self.send_error(404,'File Not Found: %s' % self.path)
def show_index(self, response_text=""):
self.show_page("index.html", response_text)
def do_POST(self):
clen = self.headers.getheader('content-length')
if clen:
clen = int(clen)
else:
print 'POST ERROR: missing content-length'
return
input_body = self.rfile.read(clen)
data = cgi.parse_qs(input_body)
if data['door']:
# Someone at door
WebServer.instance().emit('door',1)
self.show_page("index.html")
class WebServer(gobject.GObject):
__gsignals__ = {
# Someone is at the door
'door' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_LONG,)),
'send-audio' : (gobject.SIGNAL_RUN_LAST, gobject.TYPE_NONE,
(gobject.TYPE_STRING,)),
}
__instance = None
@classmethod
def instance(cls):
if WebServer.__instance is None:
WebServer.__instance = WebServer()
return WebServer.__instance
def __init__(self, ip):
gobject.GObject.__init__(self)
self.server = HTTPServer((ip, 8000), WebHandler)
WebServer.__instance = self
thread.start_new_thread(self.server.serve_forever, ())
#def connect_signal(self, signal, callback):
# self.server.connect(signal, callback)
gobject.type_register(WebServer)
| Python |
#!/usr/bin/env python
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
import serial
import os
import threading
ESCAPE = '\033'
KEY_a = '\141'
KEY_b = '\142'
KEY_c = '\143'
KEY_d = '\144'
KEY_e = '\145'
KEY_f = '\146'
#Initiate the glut window
window = 0
# Rotation angle for the quadrilateral.
X_AXIS = 0.0
Y_AXIS = 0.0
Z_AXIS = 0.0
#AXIS direction
DIRECTION = 1
# A general OpenGL initialization function. Sets all of the initial parameters.
# We call this right after our OpenGL window is created.
def InitGL(Width, Height):
# This Will Clear The Background Color To Black
glClearColor(0.0, 0.0, 0.0, 0.0)
# Enables Clearing Of The Depth Buffer
glClearDepth(1.0)
# The Type Of Depth Test To Do
glDepthFunc(GL_LESS)
# Enables Depth Testing
glEnable(GL_DEPTH_TEST)
# Enables Smooth Color Shading
glShadeModel(GL_SMOOTH)
glMatrixMode(GL_PROJECTION)
# Reset The Projection Matrix
glLoadIdentity()
# Calculate The Aspect Ratio Of The Window
gluPerspective(45.0, float(Width)/float(Height), 0.1, 100.0)
glMatrixMode(GL_MODELVIEW)
# The function called whenever a key is pressed.
# Note the use of Python tuples to pass in: (key, x, y)
def keyPressed(*args):
global X_AXIS,Y_AXIS,Z_AXIS
global DIRECTION
# If escape is pressed, kill everything.
if args[0] == ESCAPE:
sys.exit()
#elif args[0] == KEY_a:
# DIRECTION = 1
# X_AXIS = X_AXIS + 0.30
#elif args[0] == KEY_b:
# DIRECTION = 1
# Y_AXIS = Y_AXIS + 0.30
#elif args[0] == KEY_c:
# DIRECTION = 1
# Z_AXIS = Z_AXIS + 0.30
#elif args[0] == KEY_d:
# DIRECTION = -1
# X_AXIS = X_AXIS - 0.30
#elif args[0] == KEY_e:
# DIRECTION = -1
# Y_AXIS = Y_AXIS - 0.30
#elif args[0] == KEY_f:
# DIRECTION = -1
# Z_AXIS = Z_AXIS - 0.30
def DrawGLScene():
print "Draw scene"
global X_AXIS,Y_AXIS,Z_AXIS
print X_AXIS, Y_AXIS
global DIRECTION
#clear the screen and the depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# Reset The View
glLoadIdentity()
# Move Right And Into The Screen
glTranslatef(0.0,0.0,-6.0)
# Rotate The Cube On X
glRotatef(X_AXIS,1.0,0.0,0.0)
# Rotate The Cube On Y
glRotatef(Y_AXIS,0.0,1.0,0.0)
# Rotate The Cube On Z
glRotatef(Z_AXIS,0.0,0.0,1.0)
# Start Drawing The Cube
glBegin(GL_QUADS)
# Set The Color To Blue
glColor3f(0.0,1.0,0.0)
# Top Right Of The Quad (Top)
glVertex3f( 1.0, 1.0,-1.0)
# Top Left Of The Quad (Top)
glVertex3f(-1.0, 1.0,-1.0)
# Bottom Left Of The Quad (Top)
glVertex3f(-1.0, 1.0, 1.0)
# Bottom Right Of The Quad (Top)
glVertex3f( 1.0, 1.0, 1.0)
# Set The Color To Orange
glColor3f(1.0,0.5,0.0)
# Top Right Of The Quad (Bottom)
glVertex3f( 1.0,-1.0, 1.0)
# Top Left Of The Quad (Bottom)
glVertex3f(-1.0,-1.0, 1.0)
# Bottom Left Of The Quad (Bottom)
glVertex3f(-1.0,-1.0,-1.0)
# Bottom Right Of The Quad (Bottom)
glVertex3f( 1.0,-1.0,-1.0)
# Set The Color To Red
glColor3f(1.0,0.0,0.0)
# Top Right Of The Quad (Front)
glVertex3f( 1.0, 1.0, 1.0)
# Top Left Of The Quad (Front)
glVertex3f(-1.0, 1.0, 1.0)
# Bottom Left Of The Quad (Front)
glVertex3f(-1.0,-1.0, 1.0)
# Bottom Right Of The Quad (Front)
glVertex3f( 1.0,-1.0, 1.0)
# Set The Color To Yellow
glColor3f(1.0,1.0,0.0)
# Bottom Left Of The Quad (Back)
glVertex3f( 1.0,-1.0,-1.0)
# Bottom Right Of The Quad (Back)
glVertex3f(-1.0,-1.0,-1.0)
# Top Right Of The Quad (Back)
glVertex3f(-1.0, 1.0,-1.0)
# Top Left Of The Quad (Back)
glVertex3f( 1.0, 1.0,-1.0)
# Set The Color To Blue
glColor3f(0.0,0.0,1.0)
# Top Right Of The Quad (Left)
glVertex3f(-1.0, 1.0, 1.0)
# Top Left Of The Quad (Left)
glVertex3f(-1.0, 1.0,-1.0)
# Bottom Left Of The Quad (Left)
glVertex3f(-1.0,-1.0,-1.0)
# Bottom Right Of The Quad (Left)
glVertex3f(-1.0,-1.0, 1.0)
# Set The Color To Violet
glColor3f(1.0,0.0,1.0)
# Top Right Of The Quad (Right)
glVertex3f( 1.0, 1.0,-1.0)
# Top Left Of The Quad (Right)
glVertex3f( 1.0, 1.0, 1.0)
# Bottom Left Of The Quad (Right)
glVertex3f( 1.0,-1.0, 1.0)
# Bottom Right Of The Quad (Right)
glVertex3f( 1.0,-1.0,-1.0)
# Done Drawing The Quad
glEnd()
if X_AXIS > 360 or X_AXIS == 0 or X_AXIS < -360:
X_AXIS = 0.0
else:
X_AXIS = X_AXIS + 0.3*(DIRECTION)
if Y_AXIS > 360 or Y_AXIS == 0 or Y_AXIS < -360:
Y_AXIS = 0.0
else:
Y_AXIS = Y_AXIS + 0.3*(DIRECTION)
if Z_AXIS > 360 or Z_AXIS == 0 or Z_AXIS < -360:
Z_AXIS = 0.0
else:
Z_AXIS = Z_AXIS + 0.3*(DIRECTION)
# since this is double buffered, swap the buffers to display what just got drawn.
glutSwapBuffers()
def main():
global window
#initialize the GLUT library
glutInit(sys.argv)
# Bit mask to select an RGBA mode window
# GLUT_DOUBLE specifies we want a double buffer.
# Double buffering enables us to finish drawing before our
# image is sent to the screen, preventing flicker.
# GLUT_DEPTH specifies we want a depth buffer.
# The depth buffer ensures that objects near the camera will
# always be on top of those further away. The output tends to be
# a bit messy otherwise.
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH)
#get a window size
glutInitWindowSize(640,480)
#the window starts at the upper left corner of the screen
glutInitWindowPosition(200,200)
#create a window and set its title
window = glutCreateWindow('Accelerometer Cube')
# Register the drawing function with glut, BUT in Python land,
# at least using PyOpenGL, we need to set the function pointer and
# invoke a function to actually register the callback,
# otherwise it would be very much like the C version of the code.
glutDisplayFunc(DrawGLScene)
# When we are doing nothing, redraw the scene.
glutIdleFunc(DrawGLScene)
# Register the function called when the keyboard is pressed.
glutKeyboardFunc(keyPressed)
# Initialize our window.
# I don't think we need to call this function. -haley
InitGL(640, 480)
#start event processing engine
glutMainLoop()
class SerialThread:
def __init__(self,ser):
print "Serial thread"
self.ser = ser
self.serthread = threading.Thread(target=self.run)
self.serthread.daemon = True
def start(self):
self.serthread.start()
def run(self):
global X_AXIS, Y_AXIS, Z_AXIS
while True:
line = ser.readline()
try:
# the line should look like
# yaw pitch roll
acc = [float(x) for x in line.split()]
except Exception, e:
print e
continue
if len(acc) > 2:
acc = array(acc)
# for smoothing signals
acc = acc * .013563368 - 4.6389;
yaw = acc[0]#arctan(sqrt(acc[0] * acc[0] + acc[1] * acc[1]) / acc[2]) * 180 / pi
pitch = acc[1]#arctan(acc[0]/sqrt(acc[1] * acc[1] + acc[2] * acc[2])) * 180 / pi
roll = acc[2]#arctan(acc[1]/sqrt(acc[0] * acc[0] + acc[2] * acc[2])) * 180 / pi
X_AXIS = -.2 * pitch + .8 * X_AXIS # -pitch
Y_AXIS = -.2 * yaw + .8 * Y_AXIS#0.0 #yaw #roll # 0.0
Z_AXIS = .2 * roll + .8 * Z_AXIS # roll
print yaw, pitch, roll
import serial
from numpy import *
import threading
import sys
if __name__ == "__main__":
glthread = threading.Thread(target=main)
glthread.daemon = True
try:
ser = serial.Serial('/dev/cu.usbserial-11FP0084', 115200)
except serial.SerialException, e:
print e
print "Serial exception"
sys.exit(0)
s = SerialThread(ser)
s.start()
glthread.start()
while 1:
x = 1 + 1 # what's this line for?
pass # ???
| Python |
#!/usr/bin/env python
from OpenGL.GL import *
from OpenGL.GLUT import *
from OpenGL.GLU import *
import serial
import os
import threading
ESCAPE = '\033'
KEY_a = '\141'
KEY_b = '\142'
KEY_c = '\143'
KEY_d = '\144'
KEY_e = '\145'
KEY_f = '\146'
#Initiate the glut window
window = 0
# Rotation angle for the quadrilateral.
X_AXIS = 0.0
Y_AXIS = 0.0
Z_AXIS = 0.0
#AXIS direction
DIRECTION = 1
# A general OpenGL initialization function. Sets all of the initial parameters.
# We call this right after our OpenGL window is created.
def InitGL(Width, Height):
# This Will Clear The Background Color To Black
glClearColor(0.0, 0.0, 0.0, 0.0)
# Enables Clearing Of The Depth Buffer
glClearDepth(1.0)
# The Type Of Depth Test To Do
glDepthFunc(GL_LESS)
# Enables Depth Testing
glEnable(GL_DEPTH_TEST)
# Enables Smooth Color Shading
glShadeModel(GL_SMOOTH)
glMatrixMode(GL_PROJECTION)
# Reset The Projection Matrix
glLoadIdentity()
# Calculate The Aspect Ratio Of The Window
gluPerspective(45.0, float(Width)/float(Height), 0.1, 100.0)
glMatrixMode(GL_MODELVIEW)
# The function called whenever a key is pressed.
# Note the use of Python tuples to pass in: (key, x, y)
def keyPressed(*args):
global X_AXIS,Y_AXIS,Z_AXIS
global DIRECTION
# If escape is pressed, kill everything.
if args[0] == ESCAPE:
sys.exit()
#elif args[0] == KEY_a:
# DIRECTION = 1
# X_AXIS = X_AXIS + 0.30
#elif args[0] == KEY_b:
# DIRECTION = 1
# Y_AXIS = Y_AXIS + 0.30
#elif args[0] == KEY_c:
# DIRECTION = 1
# Z_AXIS = Z_AXIS + 0.30
#elif args[0] == KEY_d:
# DIRECTION = -1
# X_AXIS = X_AXIS - 0.30
#elif args[0] == KEY_e:
# DIRECTION = -1
# Y_AXIS = Y_AXIS - 0.30
#elif args[0] == KEY_f:
# DIRECTION = -1
# Z_AXIS = Z_AXIS - 0.30
def DrawGLScene():
print "Draw scene"
global X_AXIS,Y_AXIS,Z_AXIS
print X_AXIS, Y_AXIS
global DIRECTION
#clear the screen and the depth buffer
glClear(GL_COLOR_BUFFER_BIT | GL_DEPTH_BUFFER_BIT)
# Reset The View
glLoadIdentity()
# Move Right And Into The Screen
glTranslatef(0.0,0.0,-6.0)
# Rotate The Cube On X
glRotatef(X_AXIS,1.0,0.0,0.0)
# Rotate The Cube On Y
glRotatef(Y_AXIS,0.0,1.0,0.0)
# Rotate The Cube On Z
glRotatef(Z_AXIS,0.0,0.0,1.0)
# Start Drawing The Cube
glBegin(GL_QUADS)
# Set The Color To Blue
glColor3f(0.0,1.0,0.0)
# Top Right Of The Quad (Top)
glVertex3f( 1.0, 1.0,-1.0)
# Top Left Of The Quad (Top)
glVertex3f(-1.0, 1.0,-1.0)
# Bottom Left Of The Quad (Top)
glVertex3f(-1.0, 1.0, 1.0)
# Bottom Right Of The Quad (Top)
glVertex3f( 1.0, 1.0, 1.0)
# Set The Color To Orange
glColor3f(1.0,0.5,0.0)
# Top Right Of The Quad (Bottom)
glVertex3f( 1.0,-1.0, 1.0)
# Top Left Of The Quad (Bottom)
glVertex3f(-1.0,-1.0, 1.0)
# Bottom Left Of The Quad (Bottom)
glVertex3f(-1.0,-1.0,-1.0)
# Bottom Right Of The Quad (Bottom)
glVertex3f( 1.0,-1.0,-1.0)
# Set The Color To Red
glColor3f(1.0,0.0,0.0)
# Top Right Of The Quad (Front)
glVertex3f( 1.0, 1.0, 1.0)
# Top Left Of The Quad (Front)
glVertex3f(-1.0, 1.0, 1.0)
# Bottom Left Of The Quad (Front)
glVertex3f(-1.0,-1.0, 1.0)
# Bottom Right Of The Quad (Front)
glVertex3f( 1.0,-1.0, 1.0)
# Set The Color To Yellow
glColor3f(1.0,1.0,0.0)
# Bottom Left Of The Quad (Back)
glVertex3f( 1.0,-1.0,-1.0)
# Bottom Right Of The Quad (Back)
glVertex3f(-1.0,-1.0,-1.0)
# Top Right Of The Quad (Back)
glVertex3f(-1.0, 1.0,-1.0)
# Top Left Of The Quad (Back)
glVertex3f( 1.0, 1.0,-1.0)
# Set The Color To Blue
glColor3f(0.0,0.0,1.0)
# Top Right Of The Quad (Left)
glVertex3f(-1.0, 1.0, 1.0)
# Top Left Of The Quad (Left)
glVertex3f(-1.0, 1.0,-1.0)
# Bottom Left Of The Quad (Left)
glVertex3f(-1.0,-1.0,-1.0)
# Bottom Right Of The Quad (Left)
glVertex3f(-1.0,-1.0, 1.0)
# Set The Color To Violet
glColor3f(1.0,0.0,1.0)
# Top Right Of The Quad (Right)
glVertex3f( 1.0, 1.0,-1.0)
# Top Left Of The Quad (Right)
glVertex3f( 1.0, 1.0, 1.0)
# Bottom Left Of The Quad (Right)
glVertex3f( 1.0,-1.0, 1.0)
# Bottom Right Of The Quad (Right)
glVertex3f( 1.0,-1.0,-1.0)
# Done Drawing The Quad
glEnd()
if X_AXIS > 360 or X_AXIS == 0 or X_AXIS < -360:
X_AXIS = 0.0
else:
X_AXIS = X_AXIS + 0.3*(DIRECTION)
if Y_AXIS > 360 or Y_AXIS == 0 or Y_AXIS < -360:
Y_AXIS = 0.0
else:
Y_AXIS = Y_AXIS + 0.3*(DIRECTION)
if Z_AXIS > 360 or Z_AXIS == 0 or Z_AXIS < -360:
Z_AXIS = 0.0
else:
Z_AXIS = Z_AXIS + 0.3*(DIRECTION)
# since this is double buffered, swap the buffers to display what just got drawn.
glutSwapBuffers()
def main():
global window
#initialize the GLUT library
glutInit(sys.argv)
# Bit mask to select an RGBA mode window
# GLUT_DOUBLE specifies we want a double buffer.
# Double buffering enables us to finish drawing before our
# image is sent to the screen, preventing flicker.
# GLUT_DEPTH specifies we want a depth buffer.
# The depth buffer ensures that objects near the camera will
# always be on top of those further away. The output tends to be
# a bit messy otherwise.
glutInitDisplayMode(GLUT_RGBA | GLUT_DOUBLE | GLUT_DEPTH)
#get a window size
glutInitWindowSize(640,480)
#the window starts at the upper left corner of the screen
glutInitWindowPosition(200,200)
#create a window and set its title
window = glutCreateWindow('Accelerometer Cube')
# Register the drawing function with glut, BUT in Python land,
# at least using PyOpenGL, we need to set the function pointer and
# invoke a function to actually register the callback,
# otherwise it would be very much like the C version of the code.
glutDisplayFunc(DrawGLScene)
# When we are doing nothing, redraw the scene.
glutIdleFunc(DrawGLScene)
# Register the function called when the keyboard is pressed.
glutKeyboardFunc(keyPressed)
# Initialize our window.
# I don't think we need to call this function. -haley
InitGL(640, 480)
#start event processing engine
glutMainLoop()
class SerialThread:
def __init__(self,ser):
print "Serial thread"
self.ser = ser
self.serthread = threading.Thread(target=self.run)
self.serthread.daemon = True
def start(self):
self.serthread.start()
def run(self):
global X_AXIS, Y_AXIS, Z_AXIS
while True:
line = ser.readline()
try:
# the line should look like
# yaw pitch roll
acc = [float(x) for x in line.split()]
except Exception, e:
print e
continue
if len(acc) > 2:
acc = array(acc)
# for smoothing signals
acc = acc * .013563368 - 4.6389;
yaw = acc[0]#arctan(sqrt(acc[0] * acc[0] + acc[1] * acc[1]) / acc[2]) * 180 / pi
pitch = acc[1]#arctan(acc[0]/sqrt(acc[1] * acc[1] + acc[2] * acc[2])) * 180 / pi
roll = acc[2]#arctan(acc[1]/sqrt(acc[0] * acc[0] + acc[2] * acc[2])) * 180 / pi
X_AXIS = -.2 * pitch + .8 * X_AXIS # -pitch
Y_AXIS = -.2 * yaw + .8 * Y_AXIS#0.0 #yaw #roll # 0.0
Z_AXIS = .2 * roll + .8 * Z_AXIS # roll
print yaw, pitch, roll
import serial
from numpy import *
import threading
import sys
if __name__ == "__main__":
glthread = threading.Thread(target=main)
glthread.daemon = True
try:
ser = serial.Serial('/dev/cu.usbserial-11FP0084', 115200)
except serial.SerialException, e:
print e
print "Serial exception"
sys.exit(0)
s = SerialThread(ser)
s.start()
glthread.start()
while 1:
x = 1 + 1 # what's this line for?
pass # ???
| Python |
import urllib
import urllib2
import re
import json
from urlparse import urlparse, parse_qs
import StorageServer
from BeautifulSoup import BeautifulSoup
import xbmcplugin
import xbmcgui
import xbmcaddon
cache = StorageServer.StorageServer("flwoutdoors", 6)
addon = xbmcaddon.Addon()
addon_version = addon.getAddonInfo('version')
addon_id = addon.getAddonInfo('id')
icon = addon.getAddonInfo('icon')
def addon_log(string):
try:
log_message = string.encode('utf-8', 'ignore')
except:
log_message = 'addonException: addon_log'
xbmc.log("[%s-%s]: %s" %(addon_id, addon_version, log_message),level=xbmc.LOGNOTICE)
def make_request(url, post_data=None):
addon_log('Request URL: %s' %url)
headers = {
'User-agent': 'Mozilla/5.0 (Windows NT 6.1; WOW64; rv:24.0) Gecko/20100101 Firefox/24.0',
'Referer': 'http://www.flwoutdoors.com'
}
try:
req = urllib2.Request(url, post_data, headers)
response = urllib2.urlopen(req)
response_url = urllib.unquote_plus(response.geturl())
data = response.read()
response.close()
return data
except urllib2.URLError, e:
addon_log('We failed to open "%s".' % url)
if hasattr(e, 'reason'):
addon_log('We failed to reach a server.')
addon_log('Reason: %s' %e.reason)
if hasattr(e, 'code'):
addon_log('We failed with error code - %s.' % e.code)
def cache_categories():
url = 'http://www.flwoutdoors.com/flwondemand.cfm'
soup = BeautifulSoup(make_request(url))
items = soup.find('ul', attrs={'class': 'menu'})('a')
cats = []
for i in items:
cats.append({'callsign': i['id'].lstrip('divTab'),
'title': i.string.encode('utf-8')})
return repr(cats)
def display_categories():
cats = eval(cache.cacheFunction(cache_categories))
for i in cats:
add_dir(i['title'], i['callsign'], 'category', icon)
def display_category(callsign):
url = 'http://www.flwoutdoors.com/flwMedia/ajax.cfm'
post_data = {'method': 'getVideosInChannel',
'callsign': callsign}
data = json.loads(make_request(url, urllib.urlencode(post_data)))
items = data['CHANNEL']['AFILE']
for i in items:
youtube_embed = None
path = None
if i.has_key('YOUTUBEEMBED') and len(i['YOUTUBEEMBED']) > 0:
pattern = re.compile('src="(.+?)"')
youtube_embed = pattern.findall(i['YOUTUBEEMBED'])
if youtube_embed:
try:
youtube_id = youtube_embed[0].split('/embed/')[1]
path = 'plugin://plugin.video.youtube/?action=play_video&videoid=%s' %youtube_id
except:
pass
if not path:
keys = ['PATH_ORIGINAL','STREAMING_PATH', 'PODCAST_PATH', 'MOBILE_PATH']
for x in keys:
if i.has_key(x) and len(i[x]) > 0:
path = i[x]
break
if path.startswith('mp4:') and i.has_key('FILENAME_HD') and len(i['FILENAME_HD']) > 0:
path = path.replace(i['FILENAME'], i['FILENAME_HD'])
duration = get_duration(i['DURATION'].split('.')[0])
meta = {'Duration': duration}
if i.has_key('DESCRIPTION') and len(i['DESCRIPTION']) > 0:
meta['Plot'] = i['DESCRIPTION']
add_dir(i['TITLE'].encode('utf-8'), path, 'resolve', i['THUMBNAIL'], meta, False)
def add_dir(name, url, mode, iconimage, meta={}, isfolder=True):
params = {'name': name, 'url': url, 'mode': mode}
url = '%s?%s' %(sys.argv[0], urllib.urlencode(params))
listitem = xbmcgui.ListItem(name, iconImage="DefaultFolder.png", thumbnailImage=iconimage)
meta["Title"] = name
if not isfolder:
listitem.setProperty('IsPlayable', 'true')
listitem.setInfo(type="Video", infoLabels=meta)
xbmcplugin.addDirectoryItem(int(sys.argv[1]), url, listitem, isfolder)
def get_rtmp_url(path):
rtmp_url = (
'%s %s %s %s' %
('rtmp://flwoutdoorsfs.fplive.net/flwoutdoors',
'swfUrl=http://www.flwoutdoors.com/FLWMedia/FLWVideoPlayer.swf',
'playpath=' + path,
'app=flwoutdoors')
)
return rtmp_url
def get_duration(duration):
if duration is None:
return 1
d_split = duration.split(':')
if len(d_split) == 4:
del d_split[-1]
minutes = int(d_split[-2])
if int(d_split[-1]) >= 30:
minutes += 1
if len(d_split) >= 3:
minutes += (int(d_split[-3]) * 60)
if minutes < 1:
minutes = 1
return minutes
def get_params():
p = parse_qs(sys.argv[2][1:])
for i in p.keys():
p[i] = p[i][0]
return p
params = get_params()
try:
mode = params['mode']
except:
mode = None
addon_log(repr(params))
if not mode:
display_categories()
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode == 'category':
display_category(params['url'])
xbmcplugin.setContent(int(sys.argv[1]), 'episodes')
xbmc.executebuiltin('Container.SetViewMode(503)')
xbmcplugin.endOfDirectory(int(sys.argv[1]))
elif mode == 'resolve':
path = params['url']
if path.startswith('mp4:'):
path = get_rtmp_url(path)
item = xbmcgui.ListItem(path=path)
xbmcplugin.setResolvedUrl(int(sys.argv[1]), True, item) | Python |
# Copyright 2009 Daniel Schubert
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for Flowered user events.
Contains several RequestHandler subclasses used to handle put and get
operations, along with any helper functions. This script is designed to be
run directly as a WSGI application, and within Flowered handles all URLs
under /event.
UpdateHandler: Handles user requests for updated lists of events.
ChatHandler: Handles user chat input events.
MoveHandler: Handles user movement events.
RefreshCache(): Checks the age of the cache, and updates if necessary.
"""
import datetime
import logging
import os
import time
import datamodel
import json
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.runtime.apiproxy_errors import CapabilityDisabledError
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
# The time interval between syncs as a timedelta.
sync_interval = datetime.timedelta(0, 10)
# A datetime indicating the last time the chat cache was synced from the DB.
last_sync = datetime.datetime.now() - sync_interval
# A list storing the add cache.
add_cache = []
# A list storing the move cache.
move_cache = []
# A list storing the delete cache.
remove_cache = []
class UpdateHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
UpdateHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
"max_longitude", "zoom", and "since" fields.
"""
def get(self):
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# zoom = self.request.get('zoom')
if self.request.get('since') == '':
since = 0
else:
since = float(self.request.get('since'))
since_datetime = datetime.datetime.fromtimestamp(since)
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
add_events = []
move_events = []
remove_events = []
if since > 0:
RefreshCache()
for entry in add_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
add_events.append(entry)
for entry in move_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
move_events.append(entry)
for entry in remove_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
remove_events.append(entry)
output = {
'timestamp': time.time(),
'adds': add_events,
'moves': move_events,
'removes': remove_events,
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class InitialHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
InitialHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
and "max_longitude" fields.
"""
def get(self):
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
# Sync the add cache.
min_geopt = db.GeoPt(min_latitude, min_longitude)
max_geopt = db.GeoPt(max_latitude, max_longitude)
query = datamodel.Mark.gql('WHERE geopt > :min_geopt AND geopt < :max_geopt ',
min_geopt = min_geopt, max_geopt = max_geopt)
add_events = query.fetch(500)
output = {
'timestamp': time.time(),
'adds': add_events
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class MoveHandler(webapp.RequestHandler):
"""Handles user movement events.
MoveHandler only provides a post method for receiving new user co-ordinates,
and doesn't store any data to the datastore as ChatHandler does with
ChatEvents, instead just adding straight to the local cache.
"""
def post(self):
global move_cache
# Get the mark to modify and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Update current mark's position and timestamp.
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
try:
mark.put()
except CapabilityDisabledError:
# fail gracefully here
pass
#logging.info('#### move=' + str(mark.geopt))
# Append to the move cache, so we don't need to wait for a refresh.
#add_cache.remove(mark)
move_cache.append(mark)
class AddHandler(webapp.RequestHandler):
def post(self):
global add_cache
# Create new mark.
mark = datamodel.Mark(key_name = self.request.get('id'))
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
mark.type = str(self.request.get('type'))
mark.project = str(self.request.get('project'))
# Add mark to datastore.
try:
mark.put()
except CapabilityDisabledError:
# fail gracefully here
pass
# Append to the add cache, so we don't need to wait on a refresh.
add_cache.append(mark)
class DeleteHandler(webapp.RequestHandler):
def post(self):
global remove_cache
# Get the mark to delete and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Delete mark from datastore.
try:
db.delete(mark)
except CapabilityDisabledError:
# fail gracefully here
pass
# Append to the delete cache, so we don't need to wait for a refresh.
mark.timestamp = datetime.datetime.now()
#add_cache.remove(mark)
remove_cache.append(mark)
def RefreshCache():
"""Check the freshness of chat and move caches, and refresh if necessary.
RefreshCache relies on the globals "sync_interval" and "last_sync" to
determine the age of the existing cache and whether or not it should be
updated. All output goes to "chat_cache" and "move_cache" globals.
"""
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
now = datetime.datetime.now()
sync_frame = sync_interval * 2
if last_sync < now - sync_interval:
last_sync = datetime.datetime.now()
# Trim the caches.
add_cache = add_cache[-100:]
move_cache = move_cache[-100:]
remove_cache = remove_cache[-100:]
#add_cache = add_cache[:500]
#move_cache = move_cache[:500]
#remove_cache = remove_cache[:500]
def main():
"""Main method called when the script is executed directly.
This method is called each time the script is launched, and also has the
effect of enabling caching for global variables.
"""
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/event/initial', InitialHandler),
('/event/add', AddHandler),
('/event/move', MoveHandler),
('/event/delete', DeleteHandler),
('/event/update', UpdateHandler),
], debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main Flowered application.
Contains the MainHandler, which handles root requests to the server, along
with several other template-driven pages that don't have any significant DB
interaction.
SchwerinMainHandler: Handles requests to /schwerin
SchwerinStandaloneHandler: Handles requests to /schwerin/standalone
WorldMainHandler: Handles requests to /world
WorldStandaloneHandler: Handles requests to /world/standalone
"""
import datetime
import logging
import os
import time
import string
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import datamodel
import json
logging.info('Loading %s, app version = %s',
__name__, os.getenv('CURRENT_VERSION_ID'))
# Set to true if we want to have our webapp print stack traces, etc
_DEBUG = True
class BaseRequestHandler(webapp.RequestHandler):
"""Handles all requests
BaseRequestHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
# The different project output types we support: locations,
# zoom level and template file names
_PROJECT_TYPES = {
'world': ['52.523405, 13.4114', '15', 'flowered.html'],
'standalone_world': ['52.523405, 13.4114', '15', 'standalone.html'],
'schwerin': ['53.625706, 11.416855', '15', 'flowered.html'],
'standalone_schwerin': ['53.625706, 11.416855', '15', 'standalone.html'],}
def render_to_response(self, project_name):
# Choose a template based on the project name
if project_name not in BaseRequestHandler._PROJECT_TYPES:
project_name = 'world'
project_data = BaseRequestHandler._PROJECT_TYPES[project_name]
# Decode project data
location = project_data[0]
zoom = project_data[1]
template_file = project_data[2]
# Read location data or use default value
if self.request.get('ll') == '':
initial_location = location
initial_latitude, _, initial_longitude = initial_location.partition(",")
else:
initial_location = self.request.get('ll').lower()
initial_latitude, _, initial_longitude = initial_location.partition(",")
# Read zoom level or use default value
if self.request.get('z') == '':
initial_zoom = zoom
else:
initial_zoom = self.request.get('z').lower()
# javascript:void(prompt('',gApplication.getMap().getCenter()))
template_data = {}
# Assembly template data
template_data = {
'project_id': project_name,
'initial_latitude': initial_latitude,
'initial_longitude': initial_longitude,
'initial_zoom': initial_zoom,
'current_version_id' : self.version(),
}
# Apply data to site templates
template_path = os.path.join(os.path.dirname(__file__), 'templates', template_file)
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
def version(self):
current_version = os.getenv('CURRENT_VERSION_ID')
version = string.split(current_version, '.')
if len(version) >= 2:
return string.lower(version[0])
else:
return 'n/a'
class SchwerinHandler(BaseRequestHandler):
"""Handles requests to /schwerin
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('schwerin')
class StandaloneSchwerinHandler(BaseRequestHandler):
"""Handles requests to /schwerin/standalone
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('standalone_schwerin')
class WorldHandler(BaseRequestHandler):
"""Handles requests to /world
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('world')
class StandaloneWorldHandler(BaseRequestHandler):
"""Handles requests to /world/standalone
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('standalone_world')
class RedirectHandler(webapp.RequestHandler):
"""Handles requests to /
RedirectHandler handles requests for the server root, presenting the main user
interface for Flowered and redirects the user to the appropiate sub project
"""
def get(self):
self.redirect('/world')
def main():
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/schwerin/standalone.*', StandaloneSchwerinHandler),
('/schwerin.*', SchwerinHandler),
('/world/standalone.*', StandaloneWorldHandler),
('/world.*', WorldHandler),
('/.*', RedirectHandler)
], debug = _DEBUG)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes and methods for use with simplejson and appengine.
Provides both a specialized simplejson encoder, GqlEncoder, designed to simplify
encoding directly from GQL results to JSON. A helper function, encode, is also
provided to further simplify usage.
GqlEncoder: Adds support for GQL results and properties to simplejson.
encode(input): Direct method to encode GQL objects as JSON.
"""
import datetime
import time
import logging
from django.utils import simplejson
from google.appengine.api import users
from google.appengine.ext import db
class GqlEncoder(simplejson.JSONEncoder):
"""Extends JSONEncoder to add support for GQL results and properties.
Adds support to simplejson JSONEncoders for GQL results and properties by
overriding JSONEncoder's default method.
"""
# TODO Improve coverage for all of App Engine's Property types.
def default(self, obj):
"""Tests the input object, obj, to encode as JSON."""
if hasattr(obj, '__json__'):
return getattr(obj, '__json__')()
if isinstance(obj, db.GqlQuery):
return list(obj)
elif isinstance(obj, db.GeoPt):
output = {}
fields = ['lat', 'lon']
for field in fields:
output[field] = getattr(obj, field)
return output
elif isinstance(obj, db.Model):
properties = obj.properties().items()
output = {}
for field, value in properties:
output[field] = getattr(obj, field)
# map key name to ID field
key = obj.key()
if key.has_id_or_name():
output['id'] = key.id_or_name()
return output
elif isinstance(obj, datetime.datetime):
# output = {}
output = time.mktime(obj.timetuple())
return output
elif isinstance(obj, time.struct_time):
return list(obj)
return simplejson.JSONEncoder.default(self, obj)
def encode(input):
"""Encode an input GQL object as JSON
Args:
input: A GQL object or DB property.
Returns:
A JSON string based on the input object.
Raises:
TypeError: Typically occurs when an input object contains an unsupported
type.
"""
return GqlEncoder().encode(input)
| Python |
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import db
"""Database models used in the Flowered application.
"""
class Mark(db.Model):
timestamp = db.DateTimeProperty(auto_now_add = True)
geopt = db.GeoPtProperty()
type = db.StringProperty()
project = db.StringProperty()
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for Geochat user events.
Contains several RequestHandler subclasses used to handle put and get
operations, along with any helper functions. This script is designed to be
run directly as a WSGI application, and within Geochat handles all URLs
under /event.
UpdateHandler: Handles user requests for updated lists of events.
ChatHandler: Handles user chat input events.
MoveHandler: Handles user movement events.
RefreshCache(): Checks the age of the cache, and updates if necessary.
"""
# TODO Cache sync problems.
# TODO Problem with duplicate messages.
# TODO Spam controls.
import datetime
import logging
import os
import time
import datamodel
import json
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
# The time interval between syncs as a timedelta.
sync_interval = datetime.timedelta(0, 10)
# A datetime indicating the last time the chat cache was synced from the DB.
last_sync = datetime.datetime.now() - sync_interval
# A list storing the move cache.
move_cache = []
# A list storing the add cache.
add_cache = []
# A list storing the delete cache.
remove_cache = []
class UpdateHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
UpdateHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
"max_longitude", "zoom", and "since" fields.
"""
def get(self):
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
zoom = self.request.get('zoom')
if self.request.get('since') == '':
since = 0
else:
since = float(self.request.get('since'))
since_datetime = datetime.datetime.fromtimestamp(since)
# Restrict latitude/longitude to restrict bulk downloads.
if (max_latitude - min_latitude) > 1:
max_latitude = min_latitude + 1
if (max_longitude - min_longitude) > 1:
max_longitude = min_longitude + 1
add_events = []
move_events = []
remove_events = []
if since > 0:
RefreshCache()
for entry in add_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
add_events.append(entry)
for entry in move_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
move_events.append(entry)
for entry in remove_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
remove_events.append(entry)
output = {
'timestamp': time.time(),
'adds': add_events,
'moves': move_events,
'removes': remove_events,
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class InitialHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
UpdateHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
"max_longitude", "zoom", and "since" fields.
"""
def get(self):
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# Restrict latitude/longitude to restrict bulk downloads.
if (max_latitude - min_latitude) > 1:
max_latitude = min_latitude + 1
if (max_longitude - min_longitude) > 1:
max_longitude = min_longitude + 1
# Sync the add cache.
query = datamodel.Mark.gql('WHERE geopt > :min_geopt AND geopt < :max_geopt ',
min_geopt = db.GeoPt(min_latitude, min_longitude),
max_geopt = db.GeoPt(max_latitude, max_longitude))
add_list = list(query.fetch(100))
add_events = []
for entry in add_list:
add_events.append(entry)
output = {
'timestamp': time.time(),
'adds': add_events
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class MoveHandler(webapp.RequestHandler):
"""Handles user movement events.
MoveHandler only provides a post method for receiving new user co-ordinates,
and doesn't store any data to the datastore as ChatHandler does with
ChatEvents, instead just adding straight to the local cache.
"""
def post(self):
global move_cache
# Get the mark to modify and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Update current mark's position and timestamp
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
mark.put()
#logging.info('#### move=' + str(mark.geopt))
# Append to the move cache, so we don't need to wait for a refresh.
#add_cache.remove(mark)
move_cache.append(mark)
class AddHandler(webapp.RequestHandler):
def post(self):
global add_cache
# Create and insert the a new mark event.
event = datamodel.Mark(key_name = self.request.get('id'))
event.timestamp = datetime.datetime.now()
event.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
event.type = str(self.request.get('type'))
event.put()
# Append to the add cache, so we don't need to wait on a refresh.
add_cache.append(event)
class DeleteHandler(webapp.RequestHandler):
def post(self):
global remove_cache
# Get the mark to delete and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Delete mark from datastore
db.delete(mark)
# Append to the delete cache, so we don't need to wait for a refresh.
mark.timestamp = datetime.datetime.now()
#add_cache.remove(mark)
remove_cache.append(mark)
def RefreshCache():
"""Check the freshness of chat and move caches, and refresh if necessary.
RefreshCache relies on the globals "sync_interval" and "last_sync" to
determine the age of the existing cache and whether or not it should be
updated. All output goes to "chat_cache" and "move_cache" globals.
"""
global sync_interval
global last_sync
#global chat_cache
global add_cache
global move_cache
global remove_cache
now = datetime.datetime.now()
sync_frame = sync_interval * 2
if last_sync < now - sync_interval:
last_sync = datetime.datetime.now()
# Trim the move cache.
add_cache = add_cache[-100:]
move_cache = move_cache[-100:]
remove_cache = remove_cache[-100:]
def main():
"""Main method called when the script is executed directly.
This method is called each time the script is launched, and also has the
effect of enabling caching for global variables.
"""
application = webapp.WSGIApplication(
[
('/event/initial', InitialHandler),
('/event/add', AddHandler),
('/event/move', MoveHandler),
('/event/delete', DeleteHandler),
('/event/update', UpdateHandler),
],
debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main Geochat application.
Contains the MainHandler, which handles root requests to the server, along
with several other template-driven pages that don't have any significant DB
interaction.
MainHandler: Handles requests to /
HelpHandler: Handles requests to /help
"""
import datetime
import logging
import os
import time
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import datamodel
import json
class MainHandler(webapp.RequestHandler):
"""Handles requests to /
MainHandler handles requests for the server root, presenting the main user
interface for Geochat. It relies on the geochat.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
template_data = {}
template_data = {
'initial_latitude': 37.4221,
'initial_longitude': -122.0837,
}
template_path = os.path.join(os.path.dirname(__file__), 'flowered.html')
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
if __name__ == '__main__':
application = webapp.WSGIApplication([('/', MainHandler)], debug = True)
run_wsgi_app(application)
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes and methods for use with simplejson and appengine.
Provides both a specialized simplejson encoder, GqlEncoder, designed to simplify
encoding directly from GQL results to JSON. A helper function, encode, is also
provided to further simplify usage.
GqlEncoder: Adds support for GQL results and properties to simplejson.
encode(input): Direct method to encode GQL objects as JSON.
"""
import datetime
import simplejson
import time
import logging
from google.appengine.api import users
from google.appengine.ext import db
class GqlEncoder(simplejson.JSONEncoder):
"""Extends JSONEncoder to add support for GQL results and properties.
Adds support to simplejson JSONEncoders for GQL results and properties by
overriding JSONEncoder's default method.
"""
# TODO Improve coverage for all of App Engine's Property types.
def default(self, obj):
"""Tests the input object, obj, to encode as JSON."""
if hasattr(obj, '__json__'):
return getattr(obj, '__json__')()
if isinstance(obj, db.GqlQuery):
return list(obj)
elif isinstance(obj, db.GeoPt):
output = {}
fields = ['lat', 'lon']
for field in fields:
output[field] = getattr(obj, field)
return output
elif isinstance(obj, db.Model):
properties = obj.properties().items()
output = {}
for field, value in properties:
output[field] = getattr(obj, field)
# map key name to ID field
key = obj.key()
if key.has_id_or_name():
output['id'] = key.id_or_name()
return output
elif isinstance(obj, datetime.datetime):
# output = {}
output = time.mktime(obj.timetuple())
return output
elif isinstance(obj, time.struct_time):
return list(obj)
return simplejson.JSONEncoder.default(self, obj)
def encode(input):
"""Encode an input GQL object as JSON
Args:
input: A GQL object or DB property.
Returns:
A JSON string based on the input object.
Raises:
TypeError: Typically occurs when an input object contains an unsupported
type.
"""
return GqlEncoder().encode(input)
| Python |
"""Implementation of JSONEncoder
"""
import re
try:
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor- and/or
# platform-specific, so do tests which don't depend on the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| Python |
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise ValueError(errmsg(msg, s, end))
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise ValueError(errmsg(msg, s, end))
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError(errmsg(msg, s, end))
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = {}
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True):
"""``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
"""
self.encoding = encoding
self.object_hook = object_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
| Python |
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> import decimal
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.0.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decoder import JSONDecoder
from encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and object
members will be pretty-printed with that indent level. An indent level
of 0 will only insert newlines. ``None`` is the most compact representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
| Python |
r"""Command-line tool to validate and pretty-print JSON
Usage::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
import sys
import simplejson
def main():
if len(sys.argv) == 1:
infile = sys.stdin
outfile = sys.stdout
elif len(sys.argv) == 2:
infile = open(sys.argv[1], 'rb')
outfile = sys.stdout
elif len(sys.argv) == 3:
infile = open(sys.argv[1], 'rb')
outfile = open(sys.argv[2], 'wb')
else:
raise SystemExit(sys.argv[0] + " [infile [outfile]]")
try:
obj = simplejson.load(infile)
except ValueError, e:
raise SystemExit(e)
simplejson.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
if __name__ == '__main__':
main()
| Python |
"""JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner
| Python |
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import db
"""Database models used in the Flowered application.
"""
class Mark(db.Model):
timestamp = db.DateTimeProperty(auto_now_add = True)
geopt = db.GeoPtProperty()
type = db.StringProperty()
| Python |
# Copyright 2009 Daniel Schubert
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for Flowered user events.
Contains several RequestHandler subclasses used to handle put and get
operations, along with any helper functions. This script is designed to be
run directly as a WSGI application, and within Flowered handles all URLs
under /event.
UpdateHandler: Handles user requests for updated lists of events.
ChatHandler: Handles user chat input events.
MoveHandler: Handles user movement events.
RefreshCache(): Checks the age of the cache, and updates if necessary.
"""
import datetime
import logging
import os
import time
import datamodel
import json
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
# The time interval between syncs as a timedelta.
sync_interval = datetime.timedelta(0, 10)
# A datetime indicating the last time the chat cache was synced from the DB.
last_sync = datetime.datetime.now() - sync_interval
# A list storing the add cache.
add_cache = []
# A list storing the move cache.
move_cache = []
# A list storing the delete cache.
remove_cache = []
class UpdateHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
UpdateHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
"max_longitude", "zoom", and "since" fields.
"""
def get(self):
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# zoom = self.request.get('zoom')
if self.request.get('since') == '':
since = 0
else:
since = float(self.request.get('since'))
since_datetime = datetime.datetime.fromtimestamp(since)
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
add_events = []
move_events = []
remove_events = []
if since > 0:
RefreshCache()
for entry in add_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
add_events.append(entry)
for entry in move_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
move_events.append(entry)
for entry in remove_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
remove_events.append(entry)
output = {
'timestamp': time.time(),
'adds': add_events,
'moves': move_events,
'removes': remove_events,
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class InitialHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
InitialHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
and "max_longitude" fields.
"""
def get(self):
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
# Sync the add cache.
min_geopt = db.GeoPt(min_latitude, min_longitude)
max_geopt = db.GeoPt(max_latitude, max_longitude)
query = datamodel.Mark.gql('WHERE geopt > :min_geopt AND geopt < :max_geopt ',
min_geopt = min_geopt, max_geopt = max_geopt)
add_events = query.fetch(1000)
output = {
'timestamp': time.time(),
'adds': add_events
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class MoveHandler(webapp.RequestHandler):
"""Handles user movement events.
MoveHandler only provides a post method for receiving new user co-ordinates,
and doesn't store any data to the datastore as ChatHandler does with
ChatEvents, instead just adding straight to the local cache.
"""
def post(self):
global move_cache
# Get the mark to modify and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Update current mark's position and timestamp
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
mark.put()
#logging.info('#### move=' + str(mark.geopt))
# Append to the move cache, so we don't need to wait for a refresh.
#add_cache.remove(mark)
move_cache.append(mark)
class AddHandler(webapp.RequestHandler):
def post(self):
global add_cache
# Create and insert the a new mark event.
event = datamodel.Mark(key_name = self.request.get('id'))
event.timestamp = datetime.datetime.now()
event.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
event.type = str(self.request.get('type'))
event.project = str(self.request.get('project'))
event.put()
# Append to the add cache, so we don't need to wait on a refresh.
add_cache.append(event)
class DeleteHandler(webapp.RequestHandler):
def post(self):
global remove_cache
# Get the mark to delete and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Delete mark from datastore
db.delete(mark)
# Append to the delete cache, so we don't need to wait for a refresh.
mark.timestamp = datetime.datetime.now()
#add_cache.remove(mark)
remove_cache.append(mark)
def RefreshCache():
"""Check the freshness of chat and move caches, and refresh if necessary.
RefreshCache relies on the globals "sync_interval" and "last_sync" to
determine the age of the existing cache and whether or not it should be
updated. All output goes to "chat_cache" and "move_cache" globals.
"""
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
now = datetime.datetime.now()
sync_frame = sync_interval * 2
if last_sync < now - sync_interval:
last_sync = datetime.datetime.now()
# Trim the move cache.
#add_cache = add_cache[-100:]
#move_cache = move_cache[-100:]
#remove_cache = remove_cache[-100:]
add_cache = add_cache[:500]
move_cache = move_cache[:500]
remove_cache = remove_cache[:500]
def main():
"""Main method called when the script is executed directly.
This method is called each time the script is launched, and also has the
effect of enabling caching for global variables.
"""
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication(
[
('/event/initial', InitialHandler),
('/event/add', AddHandler),
('/event/move', MoveHandler),
('/event/delete', DeleteHandler),
('/event/update', UpdateHandler),
],
debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main Flowered application.
Contains the MainHandler, which handles root requests to the server, along
with several other template-driven pages that don't have any significant DB
interaction.
MainHandler: Handles requests to /
StandaloneHandler: Handles requests to /standalone
"""
import datetime
import logging
import os
import time
import string
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import datamodel
import json
logging.info('Loading %s, app version = %s',
__name__, os.getenv('CURRENT_VERSION_ID'))
class MainHandler(webapp.RequestHandler):
"""Handles requests to /
MainHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
if self.request.get('searchbox') == '':
show_searchbox = 'false'
else:
show_searchbox = self.request.get('searchbox').lower()
template_data = {}
template_data = {
'project_id': 'schwerin',
'initial_latitude': 53.625706,
'initial_longitude': 11.416855,
'initial_zoom': 15,
'show_searchbox': show_searchbox,
'current_version_id' : self.version()
}
template_path = os.path.join(os.path.dirname(__file__), 'flowered.html')
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
def version(self):
current_version = os.getenv('CURRENT_VERSION_ID')
version = string.split(current_version, '.')
if len(version) >= 2:
return string.lower(version[0])
else:
return 'n/a'
class StandaloneHandler(webapp.RequestHandler):
"""Handles requests to /standalone
MainHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
template_data = {}
template_data = {
'project_id': 'schwerin',
'initial_latitude': 53.625706,
'initial_longitude': 11.416855,
'initial_zoom': 15,
}
template_path = os.path.join(os.path.dirname(__file__), 'standalone.html')
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
class RedirectHandler(webapp.RequestHandler):
"""Handles requests to /
RedirectHandler handles requests for the server root, presenting the main user
interface for Flowered and redirects the user to the appropiate sub project
"""
def get(self):
self.redirect('/schwerin')
def main():
application = webapp.WSGIApplication([
('/schwerin/standalone.*', StandaloneHandler),
('/schwerin.*', MainHandler),
('/.*', RedirectHandler)],
debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes and methods for use with simplejson and appengine.
Provides both a specialized simplejson encoder, GqlEncoder, designed to simplify
encoding directly from GQL results to JSON. A helper function, encode, is also
provided to further simplify usage.
GqlEncoder: Adds support for GQL results and properties to simplejson.
encode(input): Direct method to encode GQL objects as JSON.
"""
import datetime
import simplejson
import time
import logging
from google.appengine.api import users
from google.appengine.ext import db
class GqlEncoder(simplejson.JSONEncoder):
"""Extends JSONEncoder to add support for GQL results and properties.
Adds support to simplejson JSONEncoders for GQL results and properties by
overriding JSONEncoder's default method.
"""
# TODO Improve coverage for all of App Engine's Property types.
def default(self, obj):
"""Tests the input object, obj, to encode as JSON."""
if hasattr(obj, '__json__'):
return getattr(obj, '__json__')()
if isinstance(obj, db.GqlQuery):
return list(obj)
elif isinstance(obj, db.GeoPt):
output = {}
fields = ['lat', 'lon']
for field in fields:
output[field] = getattr(obj, field)
return output
elif isinstance(obj, db.Model):
properties = obj.properties().items()
output = {}
for field, value in properties:
output[field] = getattr(obj, field)
# map key name to ID field
key = obj.key()
if key.has_id_or_name():
output['id'] = key.id_or_name()
return output
elif isinstance(obj, datetime.datetime):
# output = {}
output = time.mktime(obj.timetuple())
return output
elif isinstance(obj, time.struct_time):
return list(obj)
return simplejson.JSONEncoder.default(self, obj)
def encode(input):
"""Encode an input GQL object as JSON
Args:
input: A GQL object or DB property.
Returns:
A JSON string based on the input object.
Raises:
TypeError: Typically occurs when an input object contains an unsupported
type.
"""
return GqlEncoder().encode(input)
| Python |
import unittest
import doctest
class OptionalExtensionTestSuite(unittest.TestSuite):
def run(self, result):
import simplejson
run = unittest.TestSuite.run
run(self, result)
simplejson._toggle_speedups(False)
run(self, result)
simplejson._toggle_speedups(True)
return result
def additional_tests(suite=None):
import simplejson
import simplejson.encoder
import simplejson.decoder
if suite is None:
suite = unittest.TestSuite()
for mod in (simplejson, simplejson.encoder, simplejson.decoder):
suite.addTest(doctest.DocTestSuite(mod))
suite.addTest(doctest.DocFileSuite('../../index.rst'))
return suite
def all_tests_suite():
suite = unittest.TestLoader().loadTestsFromNames([
'simplejson.tests.test_check_circular',
'simplejson.tests.test_decode',
'simplejson.tests.test_default',
'simplejson.tests.test_dump',
'simplejson.tests.test_encode_basestring_ascii',
'simplejson.tests.test_encode_for_html',
'simplejson.tests.test_errors',
'simplejson.tests.test_fail',
'simplejson.tests.test_float',
'simplejson.tests.test_indent',
'simplejson.tests.test_pass1',
'simplejson.tests.test_pass2',
'simplejson.tests.test_pass3',
'simplejson.tests.test_recursion',
'simplejson.tests.test_scanstring',
'simplejson.tests.test_separators',
'simplejson.tests.test_speedups',
'simplejson.tests.test_unicode',
'simplejson.tests.test_decimal',
])
suite = additional_tests(suite)
return OptionalExtensionTestSuite([suite])
def main():
runner = unittest.TextTestRunner()
suite = all_tests_suite()
runner.run(suite)
if __name__ == '__main__':
import os
import sys
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.dirname(os.path.abspath(__file__)))))
main()
| Python |
"""Implementation of JSONEncoder
"""
import re
from decimal import Decimal
def _import_speedups():
try:
from simplejson import _speedups
return _speedups.encode_basestring_ascii, _speedups.make_encoder
except ImportError:
return None, None
c_encode_basestring_ascii, c_make_encoder = _import_speedups()
from simplejson.decoder import PosInf
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
return ESCAPE_DCT[match.group(0)]
return u'"' + ESCAPE.sub(replace, s) + u'"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = (
c_encode_basestring_ascii or py_encode_basestring_ascii)
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None,
use_decimal=False):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
If use_decimal is true (not the default), ``decimal.Decimal`` will
be supported directly by the encoder. For the inverse, decode JSON
with ``parse_float=decimal.Decimal``.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.use_decimal = use_decimal
if isinstance(indent, (int, long)):
indent = ' ' * indent
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> from simplejson import JSONEncoder
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan,
_repr=FLOAT_REPR, _inf=PosInf, _neginf=-PosInf):
# Check for specials. Note that this type of test is processor
# and/or platform-specific, so do tests which don't depend on
# the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
key_memo = {}
if (_one_shot and c_make_encoder is not None
and self.indent is None):
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan, key_memo, self.use_decimal)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot, self.use_decimal)
try:
return _iterencode(o, 0)
finally:
key_memo.clear()
class JSONEncoderForHTML(JSONEncoder):
"""An encoder that produces JSON safe to embed in HTML.
To embed JSON content in, say, a script tag on a web page, the
characters &, < and > should be escaped. They cannot be escaped
with the usual entities (e.g. &) because they are not expanded
within <script> tags.
"""
def encode(self, o):
# Override JSONEncoder.encode because it has hacks for
# performance that make things more complicated.
chunks = self.iterencode(o, True)
if self.ensure_ascii:
return ''.join(chunks)
else:
return u''.join(chunks)
def iterencode(self, o, _one_shot=False):
chunks = super(JSONEncoderForHTML, self).iterencode(o, _one_shot)
for chunk in chunks:
chunk = chunk.replace('&', '\\u0026')
chunk = chunk.replace('<', '\\u003c')
chunk = chunk.replace('>', '\\u003e')
yield chunk
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr,
_key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
_use_decimal,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
Decimal=Decimal,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield buf + str(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (_indent * _current_indent_level)
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
elif _use_decimal and isinstance(value, Decimal):
yield str(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (_indent * _current_indent_level)
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
elif _use_decimal and isinstance(o, Decimal):
yield str(o)
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| Python |
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
def _import_c_scanstring():
try:
from simplejson._speedups import scanstring
return scanstring
except ImportError:
return None
c_scanstring = _import_c_scanstring()
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
# The struct module in Python 2.4 would get frexp() out of range here
# when an endian is specified in the format string. Fixed in Python 2.5+
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
class JSONDecodeError(ValueError):
"""Subclass of ValueError with the following additional properties:
msg: The unformatted error message
doc: The JSON document being parsed
pos: The start index of doc where parsing failed
end: The end index of doc where parsing failed (may be None)
lineno: The line corresponding to pos
colno: The column corresponding to pos
endlineno: The line corresponding to end (may be None)
endcolno: The column corresponding to end (may be None)
"""
def __init__(self, msg, doc, pos, end=None):
ValueError.__init__(self, errmsg(msg, doc, pos, end=end))
self.msg = msg
self.doc = doc
self.pos = pos
self.end = end
self.lineno, self.colno = linecol(doc, pos)
if end is not None:
self.endlineno, self.endcolno = linecol(doc, end)
else:
self.endlineno, self.endcolno = None, None
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True,
_b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise JSONDecodeError(msg, s, end)
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise JSONDecodeError(
"Unterminated string starting at", s, begin)
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise JSONDecodeError(msg, s, end)
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise JSONDecodeError(msg, s, end)
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise JSONDecodeError(msg, s, end)
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise JSONDecodeError(msg, s, end)
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook,
object_pairs_hook, memo=None,
_w=WHITESPACE.match, _ws=WHITESPACE_STR):
# Backwards compatibility
if memo is None:
memo = {}
memo_get = memo.setdefault
pairs = []
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end + 1
pairs = {}
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end + 1
elif nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end)
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
key = memo_get(key, key)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise JSONDecodeError("Expecting : delimiter", s, end)
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
pairs.append((key, value))
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end - 1)
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise JSONDecodeError("Expecting property name", s, end - 1)
if object_pairs_hook is not None:
result = object_pairs_hook(pairs)
return result, end
pairs = dict(pairs)
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise JSONDecodeError("Expecting object", s, end)
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise JSONDecodeError("Expecting , delimiter", s, end)
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True,
object_pairs_hook=None):
"""
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
*strict* controls the parser's behavior when it encounters an
invalid control character in a string. The default setting of
``True`` means that unescaped control characters are parse errors, if
``False`` then control characters will be allowed in strings.
"""
self.encoding = encoding
self.object_hook = object_hook
self.object_pairs_hook = object_pairs_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.memo = {}
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise JSONDecodeError("Extra data", s, end, len(s))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode``
beginning with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise JSONDecodeError("No JSON object could be decoded", s, idx)
return obj, end
| Python |
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=' ')
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> from decimal import Decimal
>>> json.loads('1.1', parse_float=Decimal) == Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.1.3'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONDecodeError', 'JSONEncoder',
'OrderedDict',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decimal import Decimal
from decoder import JSONDecoder, JSONDecodeError
from encoder import JSONEncoder
def _import_OrderedDict():
import collections
try:
return collections.OrderedDict
except AttributeError:
import ordered_dict
return ordered_dict.OrderedDict
OrderedDict = _import_OrderedDict()
def _import_c_make_encoder():
try:
from simplejson._speedups import make_encoder
return make_encoder
except ImportError:
return None
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
use_decimal=False,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=False, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If *indent* is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``False``) then decimal.Decimal
will be natively serialized to JSON with full precision.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not use_decimal
and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, use_decimal=use_decimal, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, use_decimal=False, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a string, then JSON array elements and object members
will be pretty-printed with a newline followed by that string repeated
for each level of nesting. ``None`` (the default) selects the most compact
representation without any newlines. For backwards compatibility with
versions of simplejson earlier than 2.1.0, an integer is also accepted
and is converted to a string with that many spaces.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
If *use_decimal* is true (default: ``False``) then decimal.Decimal
will be natively serialized to JSON with full precision.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not use_decimal
and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
use_decimal=use_decimal, **kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None,
object_pairs_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, object_pairs_hook=object_pairs_hook,
use_decimal=use_decimal, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, object_pairs_hook=None,
use_decimal=False, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
*encoding* determines the encoding used to interpret any
:class:`str` objects decoded by this instance (``'utf-8'`` by
default). It has no effect when decoding :class:`unicode` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as :class:`unicode`.
*object_hook*, if specified, will be called with the result of every
JSON object decoded and its return value will be used in place of the
given :class:`dict`. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
*object_pairs_hook* is an optional function that will be called with
the result of any object literal decode with an ordered list of pairs.
The return value of *object_pairs_hook* will be used instead of the
:class:`dict`. This feature can be used to implement custom decoders
that rely on the order that the key and value pairs are decoded (for
example, :func:`collections.OrderedDict` will remember the order of
insertion). If *object_hook* is also defined, the *object_pairs_hook*
takes priority.
*parse_float*, if specified, will be called with the string of every
JSON float to be decoded. By default, this is equivalent to
``float(num_str)``. This can be used to use another datatype or parser
for JSON floats (e.g. :class:`decimal.Decimal`).
*parse_int*, if specified, will be called with the string of every
JSON int to be decoded. By default, this is equivalent to
``int(num_str)``. This can be used to use another datatype or parser
for JSON integers (e.g. :class:`float`).
*parse_constant*, if specified, will be called with one of the
following strings: ``'-Infinity'``, ``'Infinity'``, ``'NaN'``. This
can be used to raise an exception if invalid JSON numbers are
encountered.
If *use_decimal* is true (default: ``False``) then it implies
parse_float=decimal.Decimal for parity with ``dump``.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and object_pairs_hook is None
and not use_decimal and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if object_pairs_hook is not None:
kw['object_pairs_hook'] = object_pairs_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
if use_decimal:
if parse_float is not None:
raise TypeError("use_decimal=True implies parse_float=Decimal")
kw['parse_float'] = Decimal
return cls(encoding=encoding, **kw).decode(s)
def _toggle_speedups(enabled):
import simplejson.decoder as dec
import simplejson.encoder as enc
import simplejson.scanner as scan
c_make_encoder = _import_c_make_encoder()
if enabled:
dec.scanstring = dec.c_scanstring or dec.py_scanstring
enc.c_make_encoder = c_make_encoder
enc.encode_basestring_ascii = (enc.c_encode_basestring_ascii or
enc.py_encode_basestring_ascii)
scan.make_scanner = scan.c_make_scanner or scan.py_make_scanner
else:
dec.scanstring = dec.py_scanstring
enc.c_make_encoder = None
enc.encode_basestring_ascii = enc.py_encode_basestring_ascii
scan.make_scanner = scan.py_make_scanner
dec.make_scanner = scan.make_scanner
global _default_decoder
_default_decoder = JSONDecoder(
encoding=None,
object_hook=None,
object_pairs_hook=None,
)
global _default_encoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
| Python |
r"""Command-line tool to validate and pretty-print JSON
Usage::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
import sys
import simplejson as json
def main():
if len(sys.argv) == 1:
infile = sys.stdin
outfile = sys.stdout
elif len(sys.argv) == 2:
infile = open(sys.argv[1], 'rb')
outfile = sys.stdout
elif len(sys.argv) == 3:
infile = open(sys.argv[1], 'rb')
outfile = open(sys.argv[2], 'wb')
else:
raise SystemExit(sys.argv[0] + " [infile [outfile]]")
try:
obj = json.load(infile,
object_pairs_hook=json.OrderedDict,
use_decimal=True)
except ValueError, e:
raise SystemExit(e)
json.dump(obj, outfile, sort_keys=True, indent=' ', use_decimal=True)
outfile.write('\n')
if __name__ == '__main__':
main()
| Python |
"""JSON token scanner
"""
import re
def _import_c_make_scanner():
try:
from simplejson._speedups import make_scanner
return make_scanner
except ImportError:
return None
c_make_scanner = _import_c_make_scanner()
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
object_pairs_hook = context.object_pairs_hook
memo = context.memo
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict,
_scan_once, object_hook, object_pairs_hook, memo)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
def scan_once(string, idx):
try:
return _scan_once(string, idx)
finally:
memo.clear()
return scan_once
make_scanner = c_make_scanner or py_make_scanner
| Python |
"""Drop-in replacement for collections.OrderedDict by Raymond Hettinger
http://code.activestate.com/recipes/576693/
"""
from UserDict import DictMixin
# Modified from original to support Python 2.4, see
# http://code.google.com/p/simplejson/issues/detail?id=53
try:
all
except NameError:
def all(seq):
for elem in seq:
if not elem:
return False
return True
class OrderedDict(dict, DictMixin):
def __init__(self, *args, **kwds):
if len(args) > 1:
raise TypeError('expected at most 1 arguments, got %d' % len(args))
try:
self.__end
except AttributeError:
self.clear()
self.update(*args, **kwds)
def clear(self):
self.__end = end = []
end += [None, end, end] # sentinel node for doubly linked list
self.__map = {} # key --> [key, prev, next]
dict.clear(self)
def __setitem__(self, key, value):
if key not in self:
end = self.__end
curr = end[1]
curr[2] = end[1] = self.__map[key] = [key, curr, end]
dict.__setitem__(self, key, value)
def __delitem__(self, key):
dict.__delitem__(self, key)
key, prev, next = self.__map.pop(key)
prev[2] = next
next[1] = prev
def __iter__(self):
end = self.__end
curr = end[2]
while curr is not end:
yield curr[0]
curr = curr[2]
def __reversed__(self):
end = self.__end
curr = end[1]
while curr is not end:
yield curr[0]
curr = curr[1]
def popitem(self, last=True):
if not self:
raise KeyError('dictionary is empty')
# Modified from original to support Python 2.4, see
# http://code.google.com/p/simplejson/issues/detail?id=53
if last:
key = reversed(self).next()
else:
key = iter(self).next()
value = self.pop(key)
return key, value
def __reduce__(self):
items = [[k, self[k]] for k in self]
tmp = self.__map, self.__end
del self.__map, self.__end
inst_dict = vars(self).copy()
self.__map, self.__end = tmp
if inst_dict:
return (self.__class__, (items,), inst_dict)
return self.__class__, (items,)
def keys(self):
return list(self)
setdefault = DictMixin.setdefault
update = DictMixin.update
pop = DictMixin.pop
values = DictMixin.values
items = DictMixin.items
iterkeys = DictMixin.iterkeys
itervalues = DictMixin.itervalues
iteritems = DictMixin.iteritems
def __repr__(self):
if not self:
return '%s()' % (self.__class__.__name__,)
return '%s(%r)' % (self.__class__.__name__, self.items())
def copy(self):
return self.__class__(self)
@classmethod
def fromkeys(cls, iterable, value=None):
d = cls()
for key in iterable:
d[key] = value
return d
def __eq__(self, other):
if isinstance(other, OrderedDict):
return len(self)==len(other) and \
all(p==q for p, q in zip(self.items(), other.items()))
return dict.__eq__(self, other)
def __ne__(self, other):
return not self == other
| Python |
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import db
"""Database models used in the Flowered application.
"""
class Mark(db.Model):
timestamp = db.DateTimeProperty(auto_now_add = True)
geopt = db.GeoPtProperty()
type = db.StringProperty()
project = db.StringProperty()
| Python |
# Copyright 2009 Daniel Schubert
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for Flowered user events.
Contains several RequestHandler subclasses used to handle put and get
operations, along with any helper functions. This script is designed to be
run directly as a WSGI application, and within Flowered handles all URLs
under /event.
UpdateHandler: Handles user requests for updated lists of events.
ChatHandler: Handles user chat input events.
MoveHandler: Handles user movement events.
RefreshCache(): Checks the age of the cache, and updates if necessary.
"""
import datetime
import logging
import os
import time
import datamodel
import json
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.runtime.apiproxy_errors import CapabilityDisabledError
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
# The time interval between syncs as a timedelta.
sync_interval = datetime.timedelta(0, 10)
# A datetime indicating the last time the chat cache was synced from the DB.
last_sync = datetime.datetime.now() - sync_interval
# A list storing the add cache.
add_cache = []
# A list storing the move cache.
move_cache = []
# A list storing the delete cache.
remove_cache = []
class UpdateHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
UpdateHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
"max_longitude", "zoom", and "since" fields.
"""
def get(self):
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# zoom = self.request.get('zoom')
if self.request.get('since') == '':
since = 0
else:
since = float(self.request.get('since'))
since_datetime = datetime.datetime.fromtimestamp(since)
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
add_events = []
move_events = []
remove_events = []
if since > 0:
RefreshCache()
for entry in add_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
add_events.append(entry)
for entry in move_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
move_events.append(entry)
for entry in remove_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
remove_events.append(entry)
output = {
'timestamp': time.time(),
'adds': add_events,
'moves': move_events,
'removes': remove_events,
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class InitialHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
InitialHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
and "max_longitude" fields.
"""
def get(self):
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
# Sync the add cache.
min_geopt = db.GeoPt(min_latitude, min_longitude)
max_geopt = db.GeoPt(max_latitude, max_longitude)
query = datamodel.Mark.gql('WHERE geopt > :min_geopt AND geopt < :max_geopt ',
min_geopt = min_geopt, max_geopt = max_geopt)
add_events = query.fetch(1000)
output = {
'timestamp': time.time(),
'adds': add_events
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class MoveHandler(webapp.RequestHandler):
"""Handles user movement events.
MoveHandler only provides a post method for receiving new user co-ordinates,
and doesn't store any data to the datastore as ChatHandler does with
ChatEvents, instead just adding straight to the local cache.
"""
def post(self):
global move_cache
# Get the mark to modify and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Update current mark's position and timestamp.
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
try:
mark.put()
except CapabilityDisabledError:
# fail gracefully here
pass
#logging.info('#### move=' + str(mark.geopt))
# Append to the move cache, so we don't need to wait for a refresh.
#add_cache.remove(mark)
move_cache.append(mark)
class AddHandler(webapp.RequestHandler):
def post(self):
global add_cache
# Create new mark.
mark = datamodel.Mark(key_name = self.request.get('id'))
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
mark.type = str(self.request.get('type'))
mark.project = str(self.request.get('project'))
# Add mark to datastore.
try:
mark.put()
except CapabilityDisabledError:
# fail gracefully here
pass
# Append to the add cache, so we don't need to wait on a refresh.
add_cache.append(mark)
class DeleteHandler(webapp.RequestHandler):
def post(self):
global remove_cache
# Get the mark to delete and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Delete mark from datastore.
try:
db.delete(mark)
except CapabilityDisabledError:
# fail gracefully here
pass
# Append to the delete cache, so we don't need to wait for a refresh.
mark.timestamp = datetime.datetime.now()
#add_cache.remove(mark)
remove_cache.append(mark)
def RefreshCache():
"""Check the freshness of chat and move caches, and refresh if necessary.
RefreshCache relies on the globals "sync_interval" and "last_sync" to
determine the age of the existing cache and whether or not it should be
updated. All output goes to "chat_cache" and "move_cache" globals.
"""
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
now = datetime.datetime.now()
sync_frame = sync_interval * 2
if last_sync < now - sync_interval:
last_sync = datetime.datetime.now()
# Trim the caches.
add_cache = add_cache[-100:]
move_cache = move_cache[-100:]
remove_cache = remove_cache[-100:]
#add_cache = add_cache[:500]
#move_cache = move_cache[:500]
#remove_cache = remove_cache[:500]
def main():
"""Main method called when the script is executed directly.
This method is called each time the script is launched, and also has the
effect of enabling caching for global variables.
"""
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/event/initial', InitialHandler),
('/event/add', AddHandler),
('/event/move', MoveHandler),
('/event/delete', DeleteHandler),
('/event/update', UpdateHandler),
], debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main Flowered application.
Contains the MainHandler, which handles root requests to the server, along
with several other template-driven pages that don't have any significant DB
interaction.
MainHandler: Handles requests to /
StandaloneHandler: Handles requests to /standalone
"""
import datetime
import logging
import os
import time
import string
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import datamodel
import json
logging.info('Loading %s, app version = %s',
__name__, os.getenv('CURRENT_VERSION_ID'))
class MainHandler(webapp.RequestHandler):
"""Handles requests to /
MainHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
template_data = {}
template_data = {
'project_id': 'schwerin',
'initial_latitude': 53.625706,
'initial_longitude': 11.416855,
'initial_zoom': 15,
'current_version_id' : self.version()
}
template_path = os.path.join(os.path.dirname(__file__), 'templates', 'flowered.html')
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
def version(self):
current_version = os.getenv('CURRENT_VERSION_ID')
version = string.split(current_version, '.')
if len(version) >= 2:
return string.lower(version[0])
else:
return 'n/a'
class StandaloneHandler(webapp.RequestHandler):
"""Handles requests to /standalone
MainHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
template_data = {}
template_data = {
'project_id': 'schwerin',
'initial_latitude': 53.625706,
'initial_longitude': 11.416855,
'initial_zoom': 15,
}
template_path = os.path.join(os.path.dirname(__file__), 'templates', 'standalone.html')
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
class RedirectHandler(webapp.RequestHandler):
"""Handles requests to /
RedirectHandler handles requests for the server root, presenting the main user
interface for Flowered and redirects the user to the appropiate sub project
"""
def get(self):
self.redirect('/schwerin')
def main():
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/schwerin/standalone.*', StandaloneHandler),
('/schwerin.*', MainHandler),
('/.*', RedirectHandler)
], debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes and methods for use with simplejson and appengine.
Provides both a specialized simplejson encoder, GqlEncoder, designed to simplify
encoding directly from GQL results to JSON. A helper function, encode, is also
provided to further simplify usage.
GqlEncoder: Adds support for GQL results and properties to simplejson.
encode(input): Direct method to encode GQL objects as JSON.
"""
import datetime
import time
import logging
from django.utils import simplejson
from google.appengine.api import users
from google.appengine.ext import db
class GqlEncoder(simplejson.JSONEncoder):
"""Extends JSONEncoder to add support for GQL results and properties.
Adds support to simplejson JSONEncoders for GQL results and properties by
overriding JSONEncoder's default method.
"""
# TODO Improve coverage for all of App Engine's Property types.
def default(self, obj):
"""Tests the input object, obj, to encode as JSON."""
if hasattr(obj, '__json__'):
return getattr(obj, '__json__')()
if isinstance(obj, db.GqlQuery):
return list(obj)
elif isinstance(obj, db.GeoPt):
output = {}
fields = ['lat', 'lon']
for field in fields:
output[field] = getattr(obj, field)
return output
elif isinstance(obj, db.Model):
properties = obj.properties().items()
output = {}
for field, value in properties:
output[field] = getattr(obj, field)
# map key name to ID field
key = obj.key()
if key.has_id_or_name():
output['id'] = key.id_or_name()
return output
elif isinstance(obj, datetime.datetime):
# output = {}
output = time.mktime(obj.timetuple())
return output
elif isinstance(obj, time.struct_time):
return list(obj)
return simplejson.JSONEncoder.default(self, obj)
def encode(input):
"""Encode an input GQL object as JSON
Args:
input: A GQL object or DB property.
Returns:
A JSON string based on the input object.
Raises:
TypeError: Typically occurs when an input object contains an unsupported
type.
"""
return GqlEncoder().encode(input)
| Python |
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import db
"""Database models used in the Flowered application.
"""
class Mark(db.Model):
timestamp = db.DateTimeProperty(auto_now_add = True)
geopt = db.GeoPtProperty()
type = db.StringProperty()
project = db.StringProperty()
| Python |
# Copyright 2009 Daniel Schubert
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for Flowered user events.
Contains several RequestHandler subclasses used to handle put and get
operations, along with any helper functions. This script is designed to be
run directly as a WSGI application, and within Flowered handles all URLs
under /event.
UpdateHandler: Handles user requests for updated lists of events.
ChatHandler: Handles user chat input events.
MoveHandler: Handles user movement events.
RefreshCache(): Checks the age of the cache, and updates if necessary.
"""
import datetime
import logging
import os
import time
import datamodel
import json
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.runtime.apiproxy_errors import CapabilityDisabledError
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
# The time interval between syncs as a timedelta.
sync_interval = datetime.timedelta(0, 10)
# A datetime indicating the last time the chat cache was synced from the DB.
last_sync = datetime.datetime.now() - sync_interval
# A list storing the add cache.
add_cache = []
# A list storing the move cache.
move_cache = []
# A list storing the delete cache.
remove_cache = []
class UpdateHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
UpdateHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
"max_longitude", "zoom", and "since" fields.
"""
def get(self):
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# zoom = self.request.get('zoom')
if self.request.get('since') == '':
since = 0
else:
since = float(self.request.get('since'))
since_datetime = datetime.datetime.fromtimestamp(since)
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
add_events = []
move_events = []
remove_events = []
if since > 0:
RefreshCache()
for entry in add_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
add_events.append(entry)
for entry in move_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
move_events.append(entry)
for entry in remove_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
remove_events.append(entry)
output = {
'timestamp': time.time(),
'adds': add_events,
'moves': move_events,
'removes': remove_events,
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class InitialHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
InitialHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
and "max_longitude" fields.
"""
def get(self):
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
# Sync the add cache.
min_geopt = db.GeoPt(min_latitude, min_longitude)
max_geopt = db.GeoPt(max_latitude, max_longitude)
query = datamodel.Mark.gql('WHERE geopt > :min_geopt AND geopt < :max_geopt ',
min_geopt = min_geopt, max_geopt = max_geopt)
add_events = query.fetch(500)
output = {
'timestamp': time.time(),
'adds': add_events
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class MoveHandler(webapp.RequestHandler):
"""Handles user movement events.
MoveHandler only provides a post method for receiving new user co-ordinates,
and doesn't store any data to the datastore as ChatHandler does with
ChatEvents, instead just adding straight to the local cache.
"""
def post(self):
global move_cache
# Get the mark to modify and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Update current mark's position and timestamp.
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
try:
mark.put()
except CapabilityDisabledError:
# fail gracefully here
pass
#logging.info('#### move=' + str(mark.geopt))
# Append to the move cache, so we don't need to wait for a refresh.
#add_cache.remove(mark)
move_cache.append(mark)
class AddHandler(webapp.RequestHandler):
def post(self):
global add_cache
# Create new mark.
mark = datamodel.Mark(key_name = self.request.get('id'))
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
mark.type = str(self.request.get('type'))
mark.project = str(self.request.get('project'))
# Add mark to datastore.
try:
mark.put()
except CapabilityDisabledError:
# fail gracefully here
pass
# Append to the add cache, so we don't need to wait on a refresh.
add_cache.append(mark)
class DeleteHandler(webapp.RequestHandler):
def post(self):
global remove_cache
# Get the mark to delete and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Delete mark from datastore.
try:
db.delete(mark)
except CapabilityDisabledError:
# fail gracefully here
pass
# Append to the delete cache, so we don't need to wait for a refresh.
mark.timestamp = datetime.datetime.now()
#add_cache.remove(mark)
remove_cache.append(mark)
def RefreshCache():
"""Check the freshness of chat and move caches, and refresh if necessary.
RefreshCache relies on the globals "sync_interval" and "last_sync" to
determine the age of the existing cache and whether or not it should be
updated. All output goes to "chat_cache" and "move_cache" globals.
"""
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
now = datetime.datetime.now()
sync_frame = sync_interval * 2
if last_sync < now - sync_interval:
last_sync = datetime.datetime.now()
# Trim the caches.
add_cache = add_cache[-100:]
move_cache = move_cache[-100:]
remove_cache = remove_cache[-100:]
#add_cache = add_cache[:500]
#move_cache = move_cache[:500]
#remove_cache = remove_cache[:500]
def main():
"""Main method called when the script is executed directly.
This method is called each time the script is launched, and also has the
effect of enabling caching for global variables.
"""
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/event/initial', InitialHandler),
('/event/add', AddHandler),
('/event/move', MoveHandler),
('/event/delete', DeleteHandler),
('/event/update', UpdateHandler),
], debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main Flowered application.
Contains the MainHandler, which handles root requests to the server, along
with several other template-driven pages that don't have any significant DB
interaction.
SchwerinMainHandler: Handles requests to /schwerin
SchwerinStandaloneHandler: Handles requests to /schwerin/standalone
WorldMainHandler: Handles requests to /world
WorldStandaloneHandler: Handles requests to /world/standalone
"""
import datetime
import logging
import os
import time
import string
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import datamodel
import json
logging.info('Loading %s, app version = %s',
__name__, os.getenv('CURRENT_VERSION_ID'))
# Set to true if we want to have our webapp print stack traces, etc
_DEBUG = True
class BaseRequestHandler(webapp.RequestHandler):
"""Handles all requests
BaseRequestHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
# The different project output types we support: locations,
# zoom level and template file names
_PROJECT_TYPES = {
'world': ['52.523405, 13.4114', '15', 'flowered.html'],
'standalone_world': ['52.523405, 13.4114', '15', 'standalone.html'],
'schwerin': ['53.625706, 11.416855', '15', 'flowered.html'],
'standalone_schwerin': ['53.625706, 11.416855', '15', 'standalone.html'],}
def render_to_response(self, project_name):
# Choose a template based on the project name
if project_name not in BaseRequestHandler._PROJECT_TYPES:
project_name = 'world'
project_data = BaseRequestHandler._PROJECT_TYPES[project_name]
# Decode project data
location = project_data[0]
zoom = project_data[1]
template_file = project_data[2]
# Read location data or use default value
if self.request.get('ll') == '':
initial_location = location
initial_latitude, _, initial_longitude = initial_location.partition(",")
else:
initial_location = self.request.get('ll').lower()
initial_latitude, _, initial_longitude = initial_location.partition(",")
# Read zoom level or use default value
if self.request.get('z') == '':
initial_zoom = zoom
else:
initial_zoom = self.request.get('z').lower()
# javascript:void(prompt('',gApplication.getMap().getCenter()))
template_data = {}
# Assembly template data
template_data = {
'project_id': project_name,
'initial_latitude': initial_latitude,
'initial_longitude': initial_longitude,
'initial_zoom': initial_zoom,
'current_version_id' : self.version(),
}
# Apply data to site templates
template_path = os.path.join(os.path.dirname(__file__), 'templates', template_file)
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
def version(self):
current_version = os.getenv('CURRENT_VERSION_ID')
version = string.split(current_version, '.')
if len(version) >= 2:
return string.lower(version[0])
else:
return 'n/a'
class SchwerinHandler(BaseRequestHandler):
"""Handles requests to /schwerin
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('schwerin')
class StandaloneSchwerinHandler(BaseRequestHandler):
"""Handles requests to /schwerin/standalone
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('standalone_schwerin')
class WorldHandler(BaseRequestHandler):
"""Handles requests to /world
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('world')
class StandaloneWorldHandler(BaseRequestHandler):
"""Handles requests to /world/standalone
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('standalone_world')
class RedirectHandler(webapp.RequestHandler):
"""Handles requests to /
RedirectHandler handles requests for the server root, presenting the main user
interface for Flowered and redirects the user to the appropiate sub project
"""
def get(self):
self.redirect('/world')
def main():
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/schwerin/standalone.*', StandaloneSchwerinHandler),
('/schwerin.*', SchwerinHandler),
('/world/standalone.*', StandaloneWorldHandler),
('/world.*', WorldHandler),
('/.*', RedirectHandler)
], debug = _DEBUG)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes and methods for use with simplejson and appengine.
Provides both a specialized simplejson encoder, GqlEncoder, designed to simplify
encoding directly from GQL results to JSON. A helper function, encode, is also
provided to further simplify usage.
GqlEncoder: Adds support for GQL results and properties to simplejson.
encode(input): Direct method to encode GQL objects as JSON.
"""
import datetime
import time
import logging
from django.utils import simplejson
from google.appengine.api import users
from google.appengine.ext import db
class GqlEncoder(simplejson.JSONEncoder):
"""Extends JSONEncoder to add support for GQL results and properties.
Adds support to simplejson JSONEncoders for GQL results and properties by
overriding JSONEncoder's default method.
"""
# TODO Improve coverage for all of App Engine's Property types.
def default(self, obj):
"""Tests the input object, obj, to encode as JSON."""
if hasattr(obj, '__json__'):
return getattr(obj, '__json__')()
if isinstance(obj, db.GqlQuery):
return list(obj)
elif isinstance(obj, db.GeoPt):
output = {}
fields = ['lat', 'lon']
for field in fields:
output[field] = getattr(obj, field)
return output
elif isinstance(obj, db.Model):
properties = obj.properties().items()
output = {}
for field, value in properties:
output[field] = getattr(obj, field)
# map key name to ID field
key = obj.key()
if key.has_id_or_name():
output['id'] = key.id_or_name()
return output
elif isinstance(obj, datetime.datetime):
# output = {}
output = time.mktime(obj.timetuple())
return output
elif isinstance(obj, time.struct_time):
return list(obj)
return simplejson.JSONEncoder.default(self, obj)
def encode(input):
"""Encode an input GQL object as JSON
Args:
input: A GQL object or DB property.
Returns:
A JSON string based on the input object.
Raises:
TypeError: Typically occurs when an input object contains an unsupported
type.
"""
return GqlEncoder().encode(input)
| Python |
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import db
"""Database models used in the Flowered application.
"""
class Mark(db.Model):
timestamp = db.DateTimeProperty(auto_now_add = True)
geopt = db.GeoPtProperty()
type = db.StringProperty()
project = db.StringProperty()
| Python |
# Copyright 2009 Daniel Schubert
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for Flowered user events.
Contains several RequestHandler subclasses used to handle put and get
operations, along with any helper functions. This script is designed to be
run directly as a WSGI application, and within Flowered handles all URLs
under /event.
UpdateHandler: Handles user requests for updated lists of events.
ChatHandler: Handles user chat input events.
MoveHandler: Handles user movement events.
RefreshCache(): Checks the age of the cache, and updates if necessary.
"""
import datetime
import logging
import os
import time
import datamodel
import json
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.runtime.apiproxy_errors import CapabilityDisabledError
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
# The time interval between syncs as a timedelta.
sync_interval = datetime.timedelta(0, 10)
# A datetime indicating the last time the chat cache was synced from the DB.
last_sync = datetime.datetime.now() - sync_interval
# A list storing the add cache.
add_cache = []
# A list storing the move cache.
move_cache = []
# A list storing the delete cache.
remove_cache = []
class UpdateHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
UpdateHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
"max_longitude", "zoom", and "since" fields.
"""
def get(self):
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# zoom = self.request.get('zoom')
if self.request.get('since') == '':
since = 0
else:
since = float(self.request.get('since'))
since_datetime = datetime.datetime.fromtimestamp(since)
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
add_events = []
move_events = []
remove_events = []
if since > 0:
RefreshCache()
for entry in add_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
add_events.append(entry)
for entry in move_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
move_events.append(entry)
for entry in remove_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
remove_events.append(entry)
output = {
'timestamp': time.time(),
'adds': add_events,
'moves': move_events,
'removes': remove_events,
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class InitialHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
InitialHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
and "max_longitude" fields.
"""
def get(self):
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
# Sync the add cache.
min_geopt = db.GeoPt(min_latitude, min_longitude)
max_geopt = db.GeoPt(max_latitude, max_longitude)
query = datamodel.Mark.gql('WHERE geopt > :min_geopt AND geopt < :max_geopt ',
min_geopt = min_geopt, max_geopt = max_geopt)
add_events = query.fetch(500)
output = {
'timestamp': time.time(),
'adds': add_events
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class MoveHandler(webapp.RequestHandler):
"""Handles user movement events.
MoveHandler only provides a post method for receiving new user co-ordinates,
and doesn't store any data to the datastore as ChatHandler does with
ChatEvents, instead just adding straight to the local cache.
"""
def post(self):
global move_cache
# Get the mark to modify and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Update current mark's position and timestamp.
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
try:
mark.put()
except CapabilityDisabledError:
# fail gracefully here
pass
#logging.info('#### move=' + str(mark.geopt))
# Append to the move cache, so we don't need to wait for a refresh.
#add_cache.remove(mark)
move_cache.append(mark)
class AddHandler(webapp.RequestHandler):
def post(self):
global add_cache
# Create new mark.
mark = datamodel.Mark(key_name = self.request.get('id'))
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
mark.type = str(self.request.get('type'))
mark.project = str(self.request.get('project'))
# Add mark to datastore.
try:
mark.put()
except CapabilityDisabledError:
# fail gracefully here
pass
# Append to the add cache, so we don't need to wait on a refresh.
add_cache.append(mark)
class DeleteHandler(webapp.RequestHandler):
def post(self):
global remove_cache
# Get the mark to delete and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Delete mark from datastore.
try:
db.delete(mark)
except CapabilityDisabledError:
# fail gracefully here
pass
# Append to the delete cache, so we don't need to wait for a refresh.
mark.timestamp = datetime.datetime.now()
#add_cache.remove(mark)
remove_cache.append(mark)
def RefreshCache():
"""Check the freshness of chat and move caches, and refresh if necessary.
RefreshCache relies on the globals "sync_interval" and "last_sync" to
determine the age of the existing cache and whether or not it should be
updated. All output goes to "chat_cache" and "move_cache" globals.
"""
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
now = datetime.datetime.now()
sync_frame = sync_interval * 2
if last_sync < now - sync_interval:
last_sync = datetime.datetime.now()
# Trim the caches.
add_cache = add_cache[-100:]
move_cache = move_cache[-100:]
remove_cache = remove_cache[-100:]
#add_cache = add_cache[:500]
#move_cache = move_cache[:500]
#remove_cache = remove_cache[:500]
def main():
"""Main method called when the script is executed directly.
This method is called each time the script is launched, and also has the
effect of enabling caching for global variables.
"""
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/event/initial', InitialHandler),
('/event/add', AddHandler),
('/event/move', MoveHandler),
('/event/delete', DeleteHandler),
('/event/update', UpdateHandler),
], debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main Flowered application.
Contains the MainHandler, which handles root requests to the server, along
with several other template-driven pages that don't have any significant DB
interaction.
SchwerinMainHandler: Handles requests to /schwerin
SchwerinStandaloneHandler: Handles requests to /schwerin/standalone
WorldMainHandler: Handles requests to /world
WorldStandaloneHandler: Handles requests to /world/standalone
"""
import datetime
import logging
import os
import time
import string
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import datamodel
import json
logging.info('Loading %s, app version = %s',
__name__, os.getenv('CURRENT_VERSION_ID'))
# Set to true if we want to have our webapp print stack traces, etc
_DEBUG = True
class BaseRequestHandler(webapp.RequestHandler):
"""Handles all requests
BaseRequestHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
# The different project output types we support: locations,
# zoom level and template file names
_PROJECT_TYPES = {
'world': ['52.523405, 13.4114', '15', 'flowered.html'],
'standalone_world': ['52.523405, 13.4114', '15', 'standalone.html'],
'schwerin': ['53.625706, 11.416855', '15', 'flowered.html'],
'standalone_schwerin': ['53.625706, 11.416855', '15', 'standalone.html'],}
def render_to_response(self, project_name):
# Choose a template based on the project name
if project_name not in BaseRequestHandler._PROJECT_TYPES:
project_name = 'world'
project_data = BaseRequestHandler._PROJECT_TYPES[project_name]
# Decode project data
location = project_data[0]
zoom = project_data[1]
template_file = project_data[2]
# Read location data or use default value
if self.request.get('ll') == '':
initial_location = location
initial_latitude, _, initial_longitude = initial_location.partition(",")
else:
initial_location = self.request.get('ll').lower()
initial_latitude, _, initial_longitude = initial_location.partition(",")
# Read zoom level or use default value
if self.request.get('z') == '':
initial_zoom = zoom
else:
initial_zoom = self.request.get('z').lower()
# javascript:void(prompt('',gApplication.getMap().getCenter()))
template_data = {}
# Assembly template data
template_data = {
'project_id': project_name,
'initial_latitude': initial_latitude,
'initial_longitude': initial_longitude,
'initial_zoom': initial_zoom,
'current_version_id' : self.version(),
}
# Apply data to site templates
template_path = os.path.join(os.path.dirname(__file__), 'templates', template_file)
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
def version(self):
current_version = os.getenv('CURRENT_VERSION_ID')
version = string.split(current_version, '.')
if len(version) >= 2:
return string.lower(version[0])
else:
return 'n/a'
class SchwerinHandler(BaseRequestHandler):
"""Handles requests to /schwerin
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('schwerin')
class StandaloneSchwerinHandler(BaseRequestHandler):
"""Handles requests to /schwerin/standalone
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('standalone_schwerin')
class WorldHandler(BaseRequestHandler):
"""Handles requests to /world
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('world')
class StandaloneWorldHandler(BaseRequestHandler):
"""Handles requests to /world/standalone
WorldHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
self.render_to_response('standalone_world')
class RedirectHandler(webapp.RequestHandler):
"""Handles requests to /
RedirectHandler handles requests for the server root, presenting the main user
interface for Flowered and redirects the user to the appropiate sub project
"""
def get(self):
self.redirect('/world')
def main():
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/schwerin/standalone.*', StandaloneSchwerinHandler),
('/schwerin.*', SchwerinHandler),
('/world/standalone.*', StandaloneWorldHandler),
('/world.*', WorldHandler),
('/.*', RedirectHandler)
], debug = _DEBUG)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes and methods for use with simplejson and appengine.
Provides both a specialized simplejson encoder, GqlEncoder, designed to simplify
encoding directly from GQL results to JSON. A helper function, encode, is also
provided to further simplify usage.
GqlEncoder: Adds support for GQL results and properties to simplejson.
encode(input): Direct method to encode GQL objects as JSON.
"""
import datetime
import time
import logging
from django.utils import simplejson
from google.appengine.api import users
from google.appengine.ext import db
class GqlEncoder(simplejson.JSONEncoder):
"""Extends JSONEncoder to add support for GQL results and properties.
Adds support to simplejson JSONEncoders for GQL results and properties by
overriding JSONEncoder's default method.
"""
# TODO Improve coverage for all of App Engine's Property types.
def default(self, obj):
"""Tests the input object, obj, to encode as JSON."""
if hasattr(obj, '__json__'):
return getattr(obj, '__json__')()
if isinstance(obj, db.GqlQuery):
return list(obj)
elif isinstance(obj, db.GeoPt):
output = {}
fields = ['lat', 'lon']
for field in fields:
output[field] = getattr(obj, field)
return output
elif isinstance(obj, db.Model):
properties = obj.properties().items()
output = {}
for field, value in properties:
output[field] = getattr(obj, field)
# map key name to ID field
key = obj.key()
if key.has_id_or_name():
output['id'] = key.id_or_name()
return output
elif isinstance(obj, datetime.datetime):
# output = {}
output = time.mktime(obj.timetuple())
return output
elif isinstance(obj, time.struct_time):
return list(obj)
return simplejson.JSONEncoder.default(self, obj)
def encode(input):
"""Encode an input GQL object as JSON
Args:
input: A GQL object or DB property.
Returns:
A JSON string based on the input object.
Raises:
TypeError: Typically occurs when an input object contains an unsupported
type.
"""
return GqlEncoder().encode(input)
| Python |
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import db
"""Database models used in the Flowered application.
"""
class Mark(db.Model):
timestamp = db.DateTimeProperty(auto_now_add = True)
geopt = db.GeoPtProperty()
type = db.StringProperty()
project = db.StringProperty()
| Python |
# Copyright 2009 Daniel Schubert
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handlers for Flowered user events.
Contains several RequestHandler subclasses used to handle put and get
operations, along with any helper functions. This script is designed to be
run directly as a WSGI application, and within Flowered handles all URLs
under /event.
UpdateHandler: Handles user requests for updated lists of events.
ChatHandler: Handles user chat input events.
MoveHandler: Handles user movement events.
RefreshCache(): Checks the age of the cache, and updates if necessary.
"""
import datetime
import logging
import os
import time
import datamodel
import json
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
# The time interval between syncs as a timedelta.
sync_interval = datetime.timedelta(0, 10)
# A datetime indicating the last time the chat cache was synced from the DB.
last_sync = datetime.datetime.now() - sync_interval
# A list storing the add cache.
add_cache = []
# A list storing the move cache.
move_cache = []
# A list storing the delete cache.
remove_cache = []
class UpdateHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
UpdateHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
"max_longitude", "zoom", and "since" fields.
"""
def get(self):
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# zoom = self.request.get('zoom')
if self.request.get('since') == '':
since = 0
else:
since = float(self.request.get('since'))
since_datetime = datetime.datetime.fromtimestamp(since)
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
add_events = []
move_events = []
remove_events = []
if since > 0:
RefreshCache()
for entry in add_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
add_events.append(entry)
for entry in move_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
move_events.append(entry)
for entry in remove_cache:
if (entry.timestamp > since_datetime and
entry.geopt.lat > min_latitude and
entry.geopt.lat < max_latitude and
entry.geopt.lon > min_longitude and
entry.geopt.lon < max_longitude):
remove_events.append(entry)
output = {
'timestamp': time.time(),
'adds': add_events,
'moves': move_events,
'removes': remove_events,
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class InitialHandler(webapp.RequestHandler):
"""Handles user requests for updated lists of events.
InitialHandler only accepts "get" events, sent via web forms. It expects each
request to include "min_latitude", "min_longitude", "max_latitude",
and "max_longitude" fields.
"""
def get(self):
min_latitude = float(self.request.get('min_latitude'))
min_longitude = float(self.request.get('min_longitude'))
max_latitude = float(self.request.get('max_latitude'))
max_longitude = float(self.request.get('max_longitude'))
# Restrict latitude/longitude to restrict bulk downloads.
#if (max_latitude - min_latitude) > 1:
# max_latitude = min_latitude + 1
#if (max_longitude - min_longitude) > 1:
# max_longitude = min_longitude + 1
# Sync the add cache.
min_geopt = db.GeoPt(min_latitude, min_longitude)
max_geopt = db.GeoPt(max_latitude, max_longitude)
query = datamodel.Mark.gql('WHERE geopt > :min_geopt AND geopt < :max_geopt ',
min_geopt = min_geopt, max_geopt = max_geopt)
add_events = query.fetch(1000)
output = {
'timestamp': time.time(),
'adds': add_events
}
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(json.encode(output));
class MoveHandler(webapp.RequestHandler):
"""Handles user movement events.
MoveHandler only provides a post method for receiving new user co-ordinates,
and doesn't store any data to the datastore as ChatHandler does with
ChatEvents, instead just adding straight to the local cache.
"""
def post(self):
global move_cache
# Get the mark to modify and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Update current mark's position and timestamp
mark.timestamp = datetime.datetime.now()
mark.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
mark.put()
#logging.info('#### move=' + str(mark.geopt))
# Append to the move cache, so we don't need to wait for a refresh.
#add_cache.remove(mark)
move_cache.append(mark)
class AddHandler(webapp.RequestHandler):
def post(self):
global add_cache
# Create and insert the a new mark event.
event = datamodel.Mark(key_name = self.request.get('id'))
event.timestamp = datetime.datetime.now()
event.geopt = db.GeoPt(float(self.request.get('latitude')),
float(self.request.get('longitude')))
event.type = str(self.request.get('type'))
event.project = str(self.request.get('project'))
event.put()
# Append to the add cache, so we don't need to wait on a refresh.
add_cache.append(event)
class DeleteHandler(webapp.RequestHandler):
def post(self):
global remove_cache
# Get the mark to delete and return if not exists.
mark = datamodel.Mark.get_by_key_name(self.request.get('id'))
if mark == None:
return
# Delete mark from datastore
db.delete(mark)
# Append to the delete cache, so we don't need to wait for a refresh.
mark.timestamp = datetime.datetime.now()
#add_cache.remove(mark)
remove_cache.append(mark)
def RefreshCache():
"""Check the freshness of chat and move caches, and refresh if necessary.
RefreshCache relies on the globals "sync_interval" and "last_sync" to
determine the age of the existing cache and whether or not it should be
updated. All output goes to "chat_cache" and "move_cache" globals.
"""
global sync_interval
global last_sync
global add_cache
global move_cache
global remove_cache
now = datetime.datetime.now()
sync_frame = sync_interval * 2
if last_sync < now - sync_interval:
last_sync = datetime.datetime.now()
# Trim the move cache.
#add_cache = add_cache[-100:]
#move_cache = move_cache[-100:]
#remove_cache = remove_cache[-100:]
add_cache = add_cache[:500]
move_cache = move_cache[:500]
remove_cache = remove_cache[:500]
def main():
"""Main method called when the script is executed directly.
This method is called each time the script is launched, and also has the
effect of enabling caching for global variables.
"""
# logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication(
[
('/event/initial', InitialHandler),
('/event/add', AddHandler),
('/event/move', MoveHandler),
('/event/delete', DeleteHandler),
('/event/update', UpdateHandler),
],
debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The main Flowered application.
Contains the MainHandler, which handles root requests to the server, along
with several other template-driven pages that don't have any significant DB
interaction.
MainHandler: Handles requests to /
StandaloneHandler: Handles requests to /standalone
"""
import datetime
import logging
import os
import time
import string
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.ext.webapp.util import run_wsgi_app
import datamodel
import json
logging.info('Loading %s, app version = %s',
__name__, os.getenv('CURRENT_VERSION_ID'))
class MainHandler(webapp.RequestHandler):
"""Handles requests to /
MainHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
if self.request.get('searchbox') == '':
show_searchbox = 'false'
else:
show_searchbox = self.request.get('searchbox').lower()
template_data = {}
template_data = {
'project_id': 'schwerin',
'initial_latitude': 53.625706,
'initial_longitude': 11.416855,
'initial_zoom': 15,
'show_searchbox': show_searchbox,
'current_version_id' : self.version()
}
template_path = os.path.join(os.path.dirname(__file__), 'flowered.html')
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
def version(self):
current_version = os.getenv('CURRENT_VERSION_ID')
version = string.split(current_version, '.')
if len(version) >= 2:
return string.lower(version[0])
else:
return 'n/a'
class StandaloneHandler(webapp.RequestHandler):
"""Handles requests to /standalone
MainHandler handles requests for the server root, presenting the main user
interface for Flowered. It relies on the flowered.html template, with most
of the heavy lifting occuring client-side through JavaScript linked there.
"""
def get(self):
template_data = {}
template_data = {
'project_id': 'schwerin',
'initial_latitude': 53.625706,
'initial_longitude': 11.416855,
'initial_zoom': 15,
}
template_path = os.path.join(os.path.dirname(__file__), 'standalone.html')
self.response.headers['Content-Type'] = 'text/html'
self.response.out.write(template.render(template_path, template_data))
class RedirectHandler(webapp.RequestHandler):
"""Handles requests to /
RedirectHandler handles requests for the server root, presenting the main user
interface for Flowered and redirects the user to the appropiate sub project
"""
def get(self):
self.redirect('/schwerin')
def main():
application = webapp.WSGIApplication([
('/schwerin/standalone.*', StandaloneHandler),
('/schwerin.*', MainHandler),
('/.*', RedirectHandler)],
debug = True)
run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
# Copyright 2008 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utility classes and methods for use with simplejson and appengine.
Provides both a specialized simplejson encoder, GqlEncoder, designed to simplify
encoding directly from GQL results to JSON. A helper function, encode, is also
provided to further simplify usage.
GqlEncoder: Adds support for GQL results and properties to simplejson.
encode(input): Direct method to encode GQL objects as JSON.
"""
import datetime
import simplejson
import time
import logging
from google.appengine.api import users
from google.appengine.ext import db
class GqlEncoder(simplejson.JSONEncoder):
"""Extends JSONEncoder to add support for GQL results and properties.
Adds support to simplejson JSONEncoders for GQL results and properties by
overriding JSONEncoder's default method.
"""
# TODO Improve coverage for all of App Engine's Property types.
def default(self, obj):
"""Tests the input object, obj, to encode as JSON."""
if hasattr(obj, '__json__'):
return getattr(obj, '__json__')()
if isinstance(obj, db.GqlQuery):
return list(obj)
elif isinstance(obj, db.GeoPt):
output = {}
fields = ['lat', 'lon']
for field in fields:
output[field] = getattr(obj, field)
return output
elif isinstance(obj, db.Model):
properties = obj.properties().items()
output = {}
for field, value in properties:
output[field] = getattr(obj, field)
# map key name to ID field
key = obj.key()
if key.has_id_or_name():
output['id'] = key.id_or_name()
return output
elif isinstance(obj, datetime.datetime):
# output = {}
output = time.mktime(obj.timetuple())
return output
elif isinstance(obj, time.struct_time):
return list(obj)
return simplejson.JSONEncoder.default(self, obj)
def encode(input):
"""Encode an input GQL object as JSON
Args:
input: A GQL object or DB property.
Returns:
A JSON string based on the input object.
Raises:
TypeError: Typically occurs when an input object contains an unsupported
type.
"""
return GqlEncoder().encode(input)
| Python |
#!/usr/bin/python2.4
#
# Copyright 2007 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''A library that provides a python interface to the Twitter API'''
__author__ = 'dewitt@google.com'
__version__ = '0.6-devel'
import base64
import calendar
import os
import rfc822
import simplejson
import sys
import tempfile
import textwrap
import time
import urllib
import urllib2
import urlparse
try:
from hashlib import md5
except ImportError:
from md5 import md5
CHARACTER_LIMIT = 140
class TwitterError(Exception):
'''Base class for Twitter errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Status(object):
'''A class representing the Status structure used by the twitter API.
The Status structure exposes the following properties:
status.created_at
status.created_at_in_seconds # read only
status.favorited
status.in_reply_to_screen_name
status.in_reply_to_user_id
status.in_reply_to_status_id
status.truncated
status.source
status.id
status.text
status.relative_created_at # read only
status.user
'''
def __init__(self,
created_at=None,
favorited=None,
id=None,
text=None,
user=None,
in_reply_to_screen_name=None,
in_reply_to_user_id=None,
in_reply_to_status_id=None,
truncated=None,
source=None,
now=None):
'''An object to hold a Twitter status message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
created_at: The time this status message was posted
favorited: Whether this is a favorite of the authenticated user
id: The unique id of this status message
text: The text of this status message
relative_created_at:
A human readable string representing the posting time
user:
A twitter.User instance representing the person posting the message
now:
The current time, if the client choses to set it. Defaults to the
wall clock time.
'''
self.created_at = created_at
self.favorited = favorited
self.id = id
self.text = text
self.user = user
self.now = now
self.in_reply_to_screen_name = in_reply_to_screen_name
self.in_reply_to_user_id = in_reply_to_user_id
self.in_reply_to_status_id = in_reply_to_status_id
self.truncated = truncated
self.source = source
def GetCreatedAt(self):
'''Get the time this status message was posted.
Returns:
The time this status message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this status message was posted.
Args:
created_at: The time this status message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this status message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this status message was posted, in seconds since the epoch.
Returns:
The time this status message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this status message was "
"posted, in seconds since the epoch")
def GetFavorited(self):
'''Get the favorited setting of this status message.
Returns:
True if this status message is favorited; False otherwise
'''
return self._favorited
def SetFavorited(self, favorited):
'''Set the favorited state of this status message.
Args:
favorited: boolean True/False favorited state of this status message
'''
self._favorited = favorited
favorited = property(GetFavorited, SetFavorited,
doc='The favorited state of this status message.')
def GetId(self):
'''Get the unique id of this status message.
Returns:
The unique id of this status message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this status message.
Args:
id: The unique id of this status message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this status message.')
def GetInReplyToScreenName(self):
return self._in_reply_to_screen_name
def SetInReplyToScreenName(self, in_reply_to_screen_name):
self._in_reply_to_screen_name = in_reply_to_screen_name
in_reply_to_screen_name = property(GetInReplyToScreenName, SetInReplyToScreenName,
doc='')
def GetInReplyToUserId(self):
return self._in_reply_to_user_id
def SetInReplyToUserId(self, in_reply_to_user_id):
self._in_reply_to_user_id = in_reply_to_user_id
in_reply_to_user_id = property(GetInReplyToUserId, SetInReplyToUserId,
doc='')
def GetInReplyToStatusId(self):
return self._in_reply_to_status_id
def SetInReplyToStatusId(self, in_reply_to_status_id):
self._in_reply_to_status_id = in_reply_to_status_id
in_reply_to_status_id = property(GetInReplyToStatusId, SetInReplyToStatusId,
doc='')
def GetTruncated(self):
return self._truncated
def SetTruncated(self, truncated):
self._truncated = truncated
truncated = property(GetTruncated, SetTruncated,
doc='')
def GetSource(self):
return self._source
def SetSource(self, source):
self._source = source
source = property(GetSource, SetSource,
doc='')
def GetText(self):
'''Get the text of this status message.
Returns:
The text of this status message.
'''
return self._text
def SetText(self, text):
'''Set the text of this status message.
Args:
text: The text of this status message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this status message')
def GetRelativeCreatedAt(self):
'''Get a human redable string representing the posting time
Returns:
A human readable string representing the posting time
'''
fudge = 1.25
delta = int(self.now) - int(self.created_at_in_seconds)
if delta < (1 * fudge):
return 'about a second ago'
elif delta < (60 * (1/fudge)):
return 'about %d seconds ago' % (delta)
elif delta < (60 * fudge):
return 'about a minute ago'
elif delta < (60 * 60 * (1/fudge)):
return 'about %d minutes ago' % (delta / 60)
elif delta < (60 * 60 * fudge):
return 'about an hour ago'
elif delta < (60 * 60 * 24 * (1/fudge)):
return 'about %d hours ago' % (delta / (60 * 60))
elif delta < (60 * 60 * 24 * fudge):
return 'about a day ago'
else:
return 'about %d days ago' % (delta / (60 * 60 * 24))
relative_created_at = property(GetRelativeCreatedAt,
doc='Get a human readable string representing'
'the posting time')
def GetUser(self):
'''Get a twitter.User reprenting the entity posting this status message.
Returns:
A twitter.User reprenting the entity posting this status message
'''
return self._user
def SetUser(self, user):
'''Set a twitter.User reprenting the entity posting this status message.
Args:
user: A twitter.User reprenting the entity posting this status message
'''
self._user = user
user = property(GetUser, SetUser,
doc='A twitter.User reprenting the entity posting this '
'status message')
def GetNow(self):
'''Get the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Returns:
Whatever the status instance believes the current time to be,
in seconds since the epoch.
'''
if self._now is None:
self._now = time.time()
return self._now
def SetNow(self, now):
'''Set the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Args:
now: The wallclock time for this instance.
'''
self._now = now
now = property(GetNow, SetNow,
doc='The wallclock time for this status instance.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.created_at == other.created_at and \
self.id == other.id and \
self.text == other.text and \
self.user == other.user and \
self.in_reply_to_screen_name == other.in_reply_to_screen_name and \
self.in_reply_to_user_id == other.in_reply_to_user_id and \
self.in_reply_to_status_id == other.in_reply_to_status_id and \
self.truncated == other.truncated and \
self.favorited == other.favorited and \
self.source == other.source
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.Status instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.Status instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.Status instance.
Returns:
A JSON string representation of this twitter.Status instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance
'''
data = {}
if self.created_at:
data['created_at'] = self.created_at
if self.favorited:
data['favorited'] = self.favorited
if self.id:
data['id'] = self.id
if self.text:
data['text'] = self.text
if self.user:
data['user'] = self.user.AsDict()
if self.in_reply_to_screen_name:
data['in_reply_to_screen_name'] = self.in_reply_to_screen_name
if self.in_reply_to_user_id:
data['in_reply_to_user_id'] = self.in_reply_to_user_id
if self.in_reply_to_status_id:
data['in_reply_to_status_id'] = self.in_reply_to_status_id
if self.truncated is not None:
data['truncated'] = self.truncated
if self.favorited is not None:
data['favorited'] = self.favorited
if self.source:
data['source'] = self.source
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Status instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
return Status(created_at=data.get('created_at', None),
favorited=data.get('favorited', None),
id=data.get('id', None),
text=data.get('text', None),
in_reply_to_screen_name=data.get('in_reply_to_screen_name', None),
in_reply_to_user_id=data.get('in_reply_to_user_id', None),
in_reply_to_status_id=data.get('in_reply_to_status_id', None),
truncated=data.get('truncated', None),
source=data.get('source', None),
user=user)
class User(object):
'''A class representing the User structure used by the twitter API.
The User structure exposes the following properties:
user.id
user.name
user.screen_name
user.location
user.description
user.profile_image_url
user.profile_background_tile
user.profile_background_image_url
user.profile_sidebar_fill_color
user.profile_background_color
user.profile_link_color
user.profile_text_color
user.protected
user.utc_offset
user.time_zone
user.url
user.status
user.statuses_count
user.followers_count
user.friends_count
user.favourites_count
'''
def __init__(self,
id=None,
name=None,
screen_name=None,
location=None,
description=None,
profile_image_url=None,
profile_background_tile=None,
profile_background_image_url=None,
profile_sidebar_fill_color=None,
profile_background_color=None,
profile_link_color=None,
profile_text_color=None,
protected=None,
utc_offset=None,
time_zone=None,
followers_count=None,
friends_count=None,
statuses_count=None,
favourites_count=None,
url=None,
status=None):
self.id = id
self.name = name
self.screen_name = screen_name
self.location = location
self.description = description
self.profile_image_url = profile_image_url
self.profile_background_tile = profile_background_tile
self.profile_background_image_url = profile_background_image_url
self.profile_sidebar_fill_color = profile_sidebar_fill_color
self.profile_background_color = profile_background_color
self.profile_link_color = profile_link_color
self.profile_text_color = profile_text_color
self.protected = protected
self.utc_offset = utc_offset
self.time_zone = time_zone
self.followers_count = followers_count
self.friends_count = friends_count
self.statuses_count = statuses_count
self.favourites_count = favourites_count
self.url = url
self.status = status
def GetId(self):
'''Get the unique id of this user.
Returns:
The unique id of this user
'''
return self._id
def SetId(self, id):
'''Set the unique id of this user.
Args:
id: The unique id of this user.
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this user.')
def GetName(self):
'''Get the real name of this user.
Returns:
The real name of this user
'''
return self._name
def SetName(self, name):
'''Set the real name of this user.
Args:
name: The real name of this user
'''
self._name = name
name = property(GetName, SetName,
doc='The real name of this user.')
def GetScreenName(self):
'''Get the short username of this user.
Returns:
The short username of this user
'''
return self._screen_name
def SetScreenName(self, screen_name):
'''Set the short username of this user.
Args:
screen_name: the short username of this user
'''
self._screen_name = screen_name
screen_name = property(GetScreenName, SetScreenName,
doc='The short username of this user.')
def GetLocation(self):
'''Get the geographic location of this user.
Returns:
The geographic location of this user
'''
return self._location
def SetLocation(self, location):
'''Set the geographic location of this user.
Args:
location: The geographic location of this user
'''
self._location = location
location = property(GetLocation, SetLocation,
doc='The geographic location of this user.')
def GetDescription(self):
'''Get the short text description of this user.
Returns:
The short text description of this user
'''
return self._description
def SetDescription(self, description):
'''Set the short text description of this user.
Args:
description: The short text description of this user
'''
self._description = description
description = property(GetDescription, SetDescription,
doc='The short text description of this user.')
def GetUrl(self):
'''Get the homepage url of this user.
Returns:
The homepage url of this user
'''
return self._url
def SetUrl(self, url):
'''Set the homepage url of this user.
Args:
url: The homepage url of this user
'''
self._url = url
url = property(GetUrl, SetUrl,
doc='The homepage url of this user.')
def GetProfileImageUrl(self):
'''Get the url of the thumbnail of this user.
Returns:
The url of the thumbnail of this user
'''
return self._profile_image_url
def SetProfileImageUrl(self, profile_image_url):
'''Set the url of the thumbnail of this user.
Args:
profile_image_url: The url of the thumbnail of this user
'''
self._profile_image_url = profile_image_url
profile_image_url= property(GetProfileImageUrl, SetProfileImageUrl,
doc='The url of the thumbnail of this user.')
def GetProfileBackgroundTile(self):
'''Boolean for whether to tile the profile background image.
Returns:
True if the background is to be tiled, False if not, None if unset.
'''
return self._profile_background_tile
def SetProfileBackgroundTile(self, profile_background_tile):
'''Set the boolean flag for whether to tile the profile background image.
Args:
profile_background_tile: Boolean flag for whether to tile or not.
'''
self._profile_background_tile = profile_background_tile
profile_background_tile = property(GetProfileBackgroundTile, SetProfileBackgroundTile,
doc='Boolean for whether to tile the background image.')
def GetProfileBackgroundImageUrl(self):
return self._profile_background_image_url
def SetProfileBackgroundImageUrl(self, profile_background_image_url):
self._profile_background_image_url = profile_background_image_url
profile_background_image_url = property(GetProfileBackgroundImageUrl, SetProfileBackgroundImageUrl,
doc='The url of the profile background of this user.')
def GetProfileSidebarFillColor(self):
return self._profile_sidebar_fill_color
def SetProfileSidebarFillColor(self, profile_sidebar_fill_color):
self._profile_sidebar_fill_color = profile_sidebar_fill_color
profile_sidebar_fill_color = property(GetProfileSidebarFillColor, SetProfileSidebarFillColor)
def GetProfileBackgroundColor(self):
return self._profile_background_color
def SetProfileBackgroundColor(self, profile_background_color):
self._profile_background_color = profile_background_color
profile_background_color = property(GetProfileBackgroundColor, SetProfileBackgroundColor)
def GetProfileLinkColor(self):
return self._profile_link_color
def SetProfileLinkColor(self, profile_link_color):
self._profile_link_color = profile_link_color
profile_link_color = property(GetProfileLinkColor, SetProfileLinkColor)
def GetProfileTextColor(self):
return self._profile_text_color
def SetProfileTextColor(self, profile_text_color):
self._profile_text_color = profile_text_color
profile_text_color = property(GetProfileTextColor, SetProfileTextColor)
def GetProtected(self):
return self._protected
def SetProtected(self, protected):
self._protected = protected
protected = property(GetProtected, SetProtected)
def GetUtcOffset(self):
return self._utc_offset
def SetUtcOffset(self, utc_offset):
self._utc_offset = utc_offset
utc_offset = property(GetUtcOffset, SetUtcOffset)
def GetTimeZone(self):
'''Returns the current time zone string for the user.
Returns:
The descriptive time zone string for the user.
'''
return self._time_zone
def SetTimeZone(self, time_zone):
'''Sets the user's time zone string.
Args:
time_zone: The descriptive time zone to assign for the user.
'''
self._time_zone = time_zone
time_zone = property(GetTimeZone, SetTimeZone)
def GetStatus(self):
'''Get the latest twitter.Status of this user.
Returns:
The latest twitter.Status of this user
'''
return self._status
def SetStatus(self, status):
'''Set the latest twitter.Status of this user.
Args:
status: The latest twitter.Status of this user
'''
self._status = status
status = property(GetStatus, SetStatus,
doc='The latest twitter.Status of this user.')
def GetFriendsCount(self):
'''Get the friend count for this user.
Returns:
The number of users this user has befriended.
'''
return self._friends_count
def SetFriendsCount(self, count):
'''Set the friend count for this user.
Args:
count: The number of users this user has befriended.
'''
self._friends_count = count
friends_count = property(GetFriendsCount, SetFriendsCount,
doc='The number of friends for this user.')
def GetFollowersCount(self):
'''Get the follower count for this user.
Returns:
The number of users following this user.
'''
return self._followers_count
def SetFollowersCount(self, count):
'''Set the follower count for this user.
Args:
count: The number of users following this user.
'''
self._followers_count = count
followers_count = property(GetFollowersCount, SetFollowersCount,
doc='The number of users following this user.')
def GetStatusesCount(self):
'''Get the number of status updates for this user.
Returns:
The number of status updates for this user.
'''
return self._statuses_count
def SetStatusesCount(self, count):
'''Set the status update count for this user.
Args:
count: The number of updates for this user.
'''
self._statuses_count = count
statuses_count = property(GetStatusesCount, SetStatusesCount,
doc='The number of updates for this user.')
def GetFavouritesCount(self):
'''Get the number of favourites for this user.
Returns:
The number of favourites for this user.
'''
return self._favourites_count
def SetFavouritesCount(self, count):
'''Set the favourite count for this user.
Args:
count: The number of favourites for this user.
'''
self._favourites_count = count
favourites_count = property(GetFavouritesCount, SetFavouritesCount,
doc='The number of favourites for this user.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.screen_name == other.screen_name and \
self.location == other.location and \
self.description == other.description and \
self.profile_image_url == other.profile_image_url and \
self.profile_background_tile == other.profile_background_tile and \
self.profile_background_image_url == other.profile_background_image_url and \
self.profile_sidebar_fill_color == other.profile_sidebar_fill_color and \
self.profile_background_color == other.profile_background_color and \
self.profile_link_color == other.profile_link_color and \
self.profile_text_color == other.profile_text_color and \
self.protected == other.protected and \
self.utc_offset == other.utc_offset and \
self.time_zone == other.time_zone and \
self.url == other.url and \
self.statuses_count == other.statuses_count and \
self.followers_count == other.followers_count and \
self.favourites_count == other.favourites_count and \
self.friends_count == other.friends_count and \
self.status == other.status
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.User instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.User instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.User instance.
Returns:
A JSON string representation of this twitter.User instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.User instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.User instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.screen_name:
data['screen_name'] = self.screen_name
if self.location:
data['location'] = self.location
if self.description:
data['description'] = self.description
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.profile_background_tile is not None:
data['profile_background_tile'] = self.profile_background_tile
if self.profile_background_image_url:
data['profile_sidebar_fill_color'] = self.profile_background_image_url
if self.profile_background_color:
data['profile_background_color'] = self.profile_background_color
if self.profile_link_color:
data['profile_link_color'] = self.profile_link_color
if self.profile_text_color:
data['profile_text_color'] = self.profile_text_color
if self.protected is not None:
data['protected'] = self.protected
if self.utc_offset:
data['utc_offset'] = self.utc_offset
if self.time_zone:
data['time_zone'] = self.time_zone
if self.url:
data['url'] = self.url
if self.status:
data['status'] = self.status.AsDict()
if self.friends_count:
data['friends_count'] = self.friends_count
if self.followers_count:
data['followers_count'] = self.followers_count
if self.statuses_count:
data['statuses_count'] = self.statuses_count
if self.favourites_count:
data['favourites_count'] = self.favourites_count
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.User instance
'''
if 'status' in data:
status = Status.NewFromJsonDict(data['status'])
else:
status = None
return User(id=data.get('id', None),
name=data.get('name', None),
screen_name=data.get('screen_name', None),
location=data.get('location', None),
description=data.get('description', None),
statuses_count=data.get('statuses_count', None),
followers_count=data.get('followers_count', None),
favourites_count=data.get('favourites_count', None),
friends_count=data.get('friends_count', None),
profile_image_url=data.get('profile_image_url', None),
profile_background_tile = data.get('profile_background_tile', None),
profile_background_image_url = data.get('profile_background_image_url', None),
profile_sidebar_fill_color = data.get('profile_sidebar_fill_color', None),
profile_background_color = data.get('profile_background_color', None),
profile_link_color = data.get('profile_link_color', None),
profile_text_color = data.get('profile_text_color', None),
protected = data.get('protected', None),
utc_offset = data.get('utc_offset', None),
time_zone = data.get('time_zone', None),
url=data.get('url', None),
status=status)
class DirectMessage(object):
'''A class representing the DirectMessage structure used by the twitter API.
The DirectMessage structure exposes the following properties:
direct_message.id
direct_message.created_at
direct_message.created_at_in_seconds # read only
direct_message.sender_id
direct_message.sender_screen_name
direct_message.recipient_id
direct_message.recipient_screen_name
direct_message.text
'''
def __init__(self,
id=None,
created_at=None,
sender_id=None,
sender_screen_name=None,
recipient_id=None,
recipient_screen_name=None,
text=None):
'''An object to hold a Twitter direct message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
id: The unique id of this direct message
created_at: The time this direct message was posted
sender_id: The id of the twitter user that sent this message
sender_screen_name: The name of the twitter user that sent this message
recipient_id: The id of the twitter that received this message
recipient_screen_name: The name of the twitter that received this message
text: The text of this direct message
'''
self.id = id
self.created_at = created_at
self.sender_id = sender_id
self.sender_screen_name = sender_screen_name
self.recipient_id = recipient_id
self.recipient_screen_name = recipient_screen_name
self.text = text
def GetId(self):
'''Get the unique id of this direct message.
Returns:
The unique id of this direct message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this direct message.
Args:
id: The unique id of this direct message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this direct message.')
def GetCreatedAt(self):
'''Get the time this direct message was posted.
Returns:
The time this direct message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this direct message was posted.
Args:
created_at: The time this direct message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this direct message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this direct message was posted, in seconds since the epoch.
Returns:
The time this direct message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this direct message was "
"posted, in seconds since the epoch")
def GetSenderId(self):
'''Get the unique sender id of this direct message.
Returns:
The unique sender id of this direct message
'''
return self._sender_id
def SetSenderId(self, sender_id):
'''Set the unique sender id of this direct message.
Args:
sender id: The unique sender id of this direct message
'''
self._sender_id = sender_id
sender_id = property(GetSenderId, SetSenderId,
doc='The unique sender id of this direct message.')
def GetSenderScreenName(self):
'''Get the unique sender screen name of this direct message.
Returns:
The unique sender screen name of this direct message
'''
return self._sender_screen_name
def SetSenderScreenName(self, sender_screen_name):
'''Set the unique sender screen name of this direct message.
Args:
sender_screen_name: The unique sender screen name of this direct message
'''
self._sender_screen_name = sender_screen_name
sender_screen_name = property(GetSenderScreenName, SetSenderScreenName,
doc='The unique sender screen name of this direct message.')
def GetRecipientId(self):
'''Get the unique recipient id of this direct message.
Returns:
The unique recipient id of this direct message
'''
return self._recipient_id
def SetRecipientId(self, recipient_id):
'''Set the unique recipient id of this direct message.
Args:
recipient id: The unique recipient id of this direct message
'''
self._recipient_id = recipient_id
recipient_id = property(GetRecipientId, SetRecipientId,
doc='The unique recipient id of this direct message.')
def GetRecipientScreenName(self):
'''Get the unique recipient screen name of this direct message.
Returns:
The unique recipient screen name of this direct message
'''
return self._recipient_screen_name
def SetRecipientScreenName(self, recipient_screen_name):
'''Set the unique recipient screen name of this direct message.
Args:
recipient_screen_name: The unique recipient screen name of this direct message
'''
self._recipient_screen_name = recipient_screen_name
recipient_screen_name = property(GetRecipientScreenName, SetRecipientScreenName,
doc='The unique recipient screen name of this direct message.')
def GetText(self):
'''Get the text of this direct message.
Returns:
The text of this direct message.
'''
return self._text
def SetText(self, text):
'''Set the text of this direct message.
Args:
text: The text of this direct message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this direct message')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.created_at == other.created_at and \
self.sender_id == other.sender_id and \
self.sender_screen_name == other.sender_screen_name and \
self.recipient_id == other.recipient_id and \
self.recipient_screen_name == other.recipient_screen_name and \
self.text == other.text
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.DirectMessage instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.DirectMessage instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.DirectMessage instance.
Returns:
A JSON string representation of this twitter.DirectMessage instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.DirectMessage instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.DirectMessage instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.created_at:
data['created_at'] = self.created_at
if self.sender_id:
data['sender_id'] = self.sender_id
if self.sender_screen_name:
data['sender_screen_name'] = self.sender_screen_name
if self.recipient_id:
data['recipient_id'] = self.recipient_id
if self.recipient_screen_name:
data['recipient_screen_name'] = self.recipient_screen_name
if self.text:
data['text'] = self.text
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.DirectMessage instance
'''
return DirectMessage(created_at=data.get('created_at', None),
recipient_id=data.get('recipient_id', None),
sender_id=data.get('sender_id', None),
text=data.get('text', None),
sender_screen_name=data.get('sender_screen_name', None),
id=data.get('id', None),
recipient_screen_name=data.get('recipient_screen_name', None))
class Api(object):
'''A python interface into the Twitter API
By default, the Api caches results for 1 minute.
Example usage:
To create an instance of the twitter.Api class, with no authentication:
>>> import twitter
>>> api = twitter.Api()
To fetch the most recently posted public twitter status messages:
>>> statuses = api.GetPublicTimeline()
>>> print [s.user.name for s in statuses]
[u'DeWitt', u'Kesuke Miyagi', u'ev', u'Buzz Andersen', u'Biz Stone'] #...
To fetch a single user's public status messages, where "user" is either
a Twitter "short name" or their user id.
>>> statuses = api.GetUserTimeline(user)
>>> print [s.text for s in statuses]
To use authentication, instantiate the twitter.Api class with a
username and password:
>>> api = twitter.Api(username='twitter user', password='twitter pass')
To fetch your friends (after being authenticated):
>>> users = api.GetFriends()
>>> print [u.name for u in users]
To post a twitter status message (after being authenticated):
>>> status = api.PostUpdate('I love python-twitter!')
>>> print status.text
I love python-twitter!
There are many other methods, including:
>>> api.PostUpdates(status)
>>> api.PostDirectMessage(user, text)
>>> api.GetUser(user)
>>> api.GetReplies()
>>> api.GetUserTimeline(user)
>>> api.GetStatus(id)
>>> api.DestroyStatus(id)
>>> api.GetFriendsTimeline(user)
>>> api.GetFriends(user)
>>> api.GetFollowers()
>>> api.GetFeatured()
>>> api.GetDirectMessages()
>>> api.PostDirectMessage(user, text)
>>> api.DestroyDirectMessage(id)
>>> api.DestroyFriendship(user)
>>> api.CreateFriendship(user)
>>> api.GetUserByEmail(email)
'''
DEFAULT_CACHE_TIMEOUT = 60 # cache for 1 minute
_API_REALM = 'Twitter API'
def __init__(self,
username=None,
password=None,
input_encoding=None,
request_headers=None):
'''Instantiate a new twitter.Api object.
Args:
username: The username of the twitter account. [optional]
password: The password for the twitter account. [optional]
input_encoding: The encoding used to encode input strings. [optional]
request_header: A dictionary of additional HTTP request headers. [optional]
'''
# self._cache = _FileCache()
try:
import google.appengine.api
self._cache = None
except ImportError:
self._cache = _FileCache()
self._urllib = urllib2
self._cache_timeout = Api.DEFAULT_CACHE_TIMEOUT
self._InitializeRequestHeaders(request_headers)
self._InitializeUserAgent()
self._InitializeDefaultParameters()
self._input_encoding = input_encoding
self.SetCredentials(username, password)
def GetPublicTimeline(self, since_id=None):
'''Fetch the sequnce of public twitter.Status message for all users.
Args:
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
An sequence of twitter.Status instances, one for each message
'''
parameters = {}
if since_id:
parameters['since_id'] = since_id
url = 'http://twitter.com/statuses/public_timeline.json'
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetFriendsTimeline(self,
user=None,
count=None,
since=None,
since_id=None):
'''Fetch the sequence of twitter.Status messages for a user's friends
The twitter.Api instance must be authenticated if the user is private.
Args:
user:
Specifies the ID or screen name of the user for whom to return
the friends_timeline. If unspecified, the username and password
must be set in the twitter.Api instance. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. [Optional]
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [Optional]
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
'''
if user:
url = 'http://twitter.com/statuses/friends_timeline/%s.json' % user
elif not user and not self._username:
raise TwitterError("User must be specified if API is not authenticated.")
else:
url = 'http://twitter.com/statuses/friends_timeline.json'
parameters = {}
if count is not None:
try:
if int(count) > 200:
raise TwitterError("'count' may not be greater than 200")
except ValueError:
raise TwitterError("'count' must be an integer")
parameters['count'] = count
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetUserTimeline(self, user=None, count=None, since=None, since_id=None):
'''Fetch the sequence of public twitter.Status messages for a single user.
The twitter.Api instance must be authenticated if the user is private.
Args:
user:
either the username (short_name) or id of the user to retrieve. If
not specified, then the current authenticated user is used. [optional]
count: the number of status messages to retrieve [optional]
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [optional]
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message up to count
'''
try:
if count:
int(count)
except:
raise TwitterError("Count must be an integer")
parameters = {}
if count:
parameters['count'] = count
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if user:
url = 'http://twitter.com/statuses/user_timeline/%s.json' % user
elif not user and not self._username:
raise TwitterError("User must be specified if API is not authenticated.")
else:
url = 'http://twitter.com/statuses/user_timeline.json'
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetStatus(self, id):
'''Returns a single status message.
The twitter.Api instance must be authenticated if the status message is private.
Args:
id: The numerical ID of the status you're trying to retrieve.
Returns:
A twitter.Status instance representing that status message
'''
try:
if id:
int(id)
except:
raise TwitterError("id must be an integer")
url = 'http://twitter.com/statuses/show/%s.json' % id
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def DestroyStatus(self, id):
'''Destroys the status specified by the required ID parameter.
The twitter.Api instance must be authenticated and thee
authenticating user must be the author of the specified status.
Args:
id: The numerical ID of the status you're trying to destroy.
Returns:
A twitter.Status instance representing the destroyed status message
'''
try:
if id:
int(id)
except:
raise TwitterError("id must be an integer")
url = 'http://twitter.com/statuses/destroy/%s.json' % id
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def PostUpdate(self, status, in_reply_to_status_id=None):
'''Post a twitter status message from the authenticated user.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted. Must be less than or equal to
140 characters.
in_reply_to_status_id:
The ID of an existing status that the status to be posted is
in reply to. This implicitly sets the in_reply_to_user_id
attribute of the resulting status to the user ID of the
message being replied to. Invalid/missing status IDs will be
ignored. [Optional]
Returns:
A twitter.Status instance representing the message posted.
'''
if not self._username:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = 'http://twitter.com/statuses/update.json'
if len(status) > CHARACTER_LIMIT:
raise TwitterError("Text must be less than or equal to %d characters. "
"Consider using PostUpdates." % CHARACTER_LIMIT)
data = {'status': status}
if in_reply_to_status_id:
data['in_reply_to_status_id'] = in_reply_to_status_id
json = self._FetchUrl(url, post_data=data)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def PostUpdates(self, status, continuation=None, **kwargs):
'''Post one or more twitter status messages from the authenticated user.
Unlike api.PostUpdate, this method will post multiple status updates
if the message is longer than 140 characters.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted. May be longer than 140 characters.
continuation:
The character string, if any, to be appended to all but the
last message. Note that Twitter strips trailing '...' strings
from messages. Consider using the unicode \u2026 character
(horizontal ellipsis) instead. [Defaults to None]
**kwargs:
See api.PostUpdate for a list of accepted parameters.
Returns:
A of list twitter.Status instance representing the messages posted.
'''
results = list()
if continuation is None:
continuation = ''
line_length = CHARACTER_LIMIT - len(continuation)
lines = textwrap.wrap(status, line_length)
for line in lines[0:-1]:
results.append(self.PostUpdate(line + continuation, **kwargs))
results.append(self.PostUpdate(lines[-1], **kwargs))
return results
def GetReplies(self, since=None, since_id=None, page=None):
'''Get a sequence of status messages representing the 20 most recent
replies (status updates prefixed with @username) to the authenticating
user.
Args:
page:
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [optional]
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
A sequence of twitter.Status instances, one for each reply to the user.
'''
url = 'http://twitter.com/statuses/replies.json'
if not self._username:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetFriends(self, user=None, page=None):
'''Fetch the sequence of twitter.User instances, one for each friend.
Args:
user: the username or id of the user whose friends you are fetching. If
not specified, defaults to the authenticated user. [optional]
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances, one for each friend
'''
if not self._username:
raise TwitterError("twitter.Api instance must be authenticated")
if user:
url = 'http://twitter.com/statuses/friends/%s.json' % user
else:
url = 'http://twitter.com/statuses/friends.json'
parameters = {}
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data]
def GetFollowers(self, page=None):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances, one for each follower
'''
if not self._username:
raise TwitterError("twitter.Api instance must be authenticated")
url = 'http://twitter.com/statuses/followers.json'
parameters = {}
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data]
def GetFeatured(self):
'''Fetch the sequence of twitter.User instances featured on twitter.com
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances
'''
url = 'http://twitter.com/statuses/featured.json'
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data]
def GetUser(self, user):
'''Returns a single user.
The twitter.Api instance must be authenticated.
Args:
user: The username or id of the user to retrieve.
Returns:
A twitter.User instance representing that user
'''
url = 'http://twitter.com/users/show/%s.json' % user
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def GetDirectMessages(self, since=None, since_id=None, page=None):
'''Returns a list of the direct messages sent to the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [optional]
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = 'http://twitter.com/direct_messages.json'
if not self._username:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def PostDirectMessage(self, user, text):
'''Post a twitter direct message from the authenticated user
The twitter.Api instance must be authenticated.
Args:
user: The ID or screen name of the recipient user.
text: The message text to be posted. Must be less than 140 characters.
Returns:
A twitter.DirectMessage instance representing the message posted
'''
if not self._username:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = 'http://twitter.com/direct_messages/new.json'
data = {'text': text, 'user': user}
json = self._FetchUrl(url, post_data=data)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return DirectMessage.NewFromJsonDict(data)
def DestroyDirectMessage(self, id):
'''Destroys the direct message specified in the required ID parameter.
The twitter.Api instance must be authenticated, and the
authenticating user must be the recipient of the specified direct
message.
Args:
id: The id of the direct message to be destroyed
Returns:
A twitter.DirectMessage instance representing the message destroyed
'''
url = 'http://twitter.com/direct_messages/destroy/%s.json' % id
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return DirectMessage.NewFromJsonDict(data)
def CreateFriendship(self, user):
'''Befriends the user specified in the user parameter as the authenticating user.
The twitter.Api instance must be authenticated.
Args:
The ID or screen name of the user to befriend.
Returns:
A twitter.User instance representing the befriended user.
'''
url = 'http://twitter.com/friendships/create/%s.json' % user
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def DestroyFriendship(self, user):
'''Discontinues friendship with the user specified in the user parameter.
The twitter.Api instance must be authenticated.
Args:
The ID or screen name of the user with whom to discontinue friendship.
Returns:
A twitter.User instance representing the discontinued friend.
'''
url = 'http://twitter.com/friendships/destroy/%s.json' % user
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def CreateFavorite(self, status):
'''Favorites the status specified in the status parameter as the authenticating user.
Returns the favorite status when successful.
The twitter.Api instance must be authenticated.
Args:
The twitter.Status instance to mark as a favorite.
Returns:
A twitter.Status instance representing the newly-marked favorite.
'''
url = 'http://twitter.com/favorites/create/%s.json' % status.id
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def DestroyFavorite(self, status):
'''Un-favorites the status specified in the ID parameter as the authenticating user.
Returns the un-favorited status in the requested format when successful.
The twitter.Api instance must be authenticated.
Args:
The twitter.Status to unmark as a favorite.
Returns:
A twitter.Status instance representing the newly-unmarked favorite.
'''
url = 'http://twitter.com/favorites/destroy/%s.json' % status.id
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def GetUserByEmail(self, email):
'''Returns a single user by email address.
Args:
email: The email of the user to retrieve.
Returns:
A twitter.User instance representing that user
'''
url = 'http://twitter.com/users/show.json?email=%s' % email
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def SetCredentials(self, username, password):
'''Set the username and password for this instance
Args:
username: The twitter username.
password: The twitter password.
'''
self._username = username
self._password = password
def ClearCredentials(self):
'''Clear the username and password for this instance
'''
self._username = None
self._password = None
def SetCache(self, cache):
'''Override the default cache. Set to None to prevent caching.
Args:
cache: an instance that supports the same API as the twitter._FileCache
'''
self._cache = cache
def SetUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib: an instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def SetCacheTimeout(self, cache_timeout):
'''Override the default cache timeout.
Args:
cache_timeout: time, in seconds, that responses should be reused.
'''
self._cache_timeout = cache_timeout
def SetUserAgent(self, user_agent):
'''Override the default user agent
Args:
user_agent: a string that should be send to the server as the User-agent
'''
self._request_headers['User-Agent'] = user_agent
def SetXTwitterHeaders(self, client, url, version):
'''Set the X-Twitter HTTP headers that will be sent to the server.
Args:
client:
The client name as a string. Will be sent to the server as
the 'X-Twitter-Client' header.
url:
The URL of the meta.xml as a string. Will be sent to the server
as the 'X-Twitter-Client-URL' header.
version:
The client version as a string. Will be sent to the server
as the 'X-Twitter-Client-Version' header.
'''
self._request_headers['X-Twitter-Client'] = client
self._request_headers['X-Twitter-Client-URL'] = url
self._request_headers['X-Twitter-Client-Version'] = version
def SetSource(self, source):
'''Suggest the "from source" value to be displayed on the Twitter web site.
The value of the 'source' parameter must be first recognized by
the Twitter server. New source values are authorized on a case by
case basis by the Twitter development team.
Args:
source:
The source name as a string. Will be sent to the server as
the 'source' parameter.
'''
self._default_params['source'] = source
def _BuildUrl(self, url, path_elements=None, extra_params=None):
# Break url into consituent parts
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
# Add any additional path elements to the path
if path_elements:
# Filter out the path elements that have a value of None
p = [i for i in path_elements if i]
if not path.endswith('/'):
path += '/'
path += '/'.join(p)
# Add any additional query parameters to the query string
if extra_params and len(extra_params) > 0:
extra_query = self._EncodeParameters(extra_params)
# Add it to the existing query
if query:
query += '&' + extra_query
else:
query = extra_query
# Return the rebuilt URL
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
def _InitializeRequestHeaders(self, request_headers):
if request_headers:
self._request_headers = request_headers
else:
self._request_headers = {}
def _InitializeUserAgent(self):
user_agent = 'Python-urllib/%s (python-twitter/%s)' % \
(self._urllib.__version__, __version__)
self.SetUserAgent(user_agent)
def _InitializeDefaultParameters(self):
self._default_params = {}
def _AddAuthorizationHeader(self, username, password):
if username and password:
basic_auth = base64.encodestring('%s:%s' % (username, password))[:-1]
self._request_headers['Authorization'] = 'Basic %s' % basic_auth
def _RemoveAuthorizationHeader(self):
if self._request_headers and 'Authorization' in self._request_headers:
del self._request_headers['Authorization']
def _GetOpener(self, url, username=None, password=None):
if username and password:
self._AddAuthorizationHeader(username, password)
handler = self._urllib.HTTPBasicAuthHandler()
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
handler.add_password(Api._API_REALM, netloc, username, password)
opener = self._urllib.build_opener(handler)
else:
opener = self._urllib.build_opener()
opener.addheaders = self._request_headers.items()
return opener
def _Encode(self, s):
if self._input_encoding:
return unicode(s, self._input_encoding).encode('utf-8')
else:
return unicode(s).encode('utf-8')
def _EncodeParameters(self, parameters):
'''Return a string in key=value&key=value form
Values of None are not included in the output string.
Args:
parameters:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if parameters is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in parameters.items() if v is not None]))
def _EncodePostData(self, post_data):
'''Return a string in key=value&key=value form
Values are assumed to be encoded in the format specified by self._encoding,
and are subsequently URL encoded.
Args:
post_data:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if post_data is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in post_data.items()]))
def _CheckForTwitterError(self, data):
"""Raises a TwitterError if twitter returns an error message.
Args:
data: A python dict created from the Twitter json response
Raises:
TwitterError wrapping the twitter error message if one exists.
"""
# Twitter errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'error' in data:
raise TwitterError(data['error'])
def _FetchUrl(self,
url,
post_data=None,
parameters=None,
no_cache=None):
'''Fetch a URL, optionally caching for a specified time.
Args:
url: The URL to retrieve
post_data:
A dict of (str, unicode) key/value pairs. If set, POST will be used.
parameters:
A dict whose key/value pairs should encoded and added
to the query string. [OPTIONAL]
no_cache: If true, overrides the cache on the current request
Returns:
A string containing the body of the response.
'''
# Build the extra parameters dict
extra_params = {}
if self._default_params:
extra_params.update(self._default_params)
if parameters:
extra_params.update(parameters)
# Add key/value parameters to the query string of the url
url = self._BuildUrl(url, extra_params=extra_params)
# Get a url opener that can handle basic auth
opener = self._GetOpener(url, username=self._username, password=self._password)
encoded_post_data = self._EncodePostData(post_data)
# Open and return the URL immediately if we're not going to cache
if encoded_post_data or no_cache or not self._cache or not self._cache_timeout:
url_data = opener.open(url, encoded_post_data).read()
opener.close()
else:
# Unique keys are a combination of the url and the username
if self._username:
key = self._username + ':' + url
else:
key = url
# See if it has been cached before
last_cached = self._cache.GetCachedTime(key)
# If the cached version is outdated then fetch another and store it
if not last_cached or time.time() >= last_cached + self._cache_timeout:
url_data = opener.open(url, encoded_post_data).read()
opener.close()
self._cache.Set(key, url_data)
else:
url_data = self._cache.Get(key)
# Always return the latest version
return url_data
class _FileCacheError(Exception):
'''Base exception class for FileCache related errors'''
class _FileCache(object):
DEPTH = 3
def __init__(self,root_directory=None):
self._InitializeRootDirectory(root_directory)
def Get(self,key):
path = self._GetPath(key)
if os.path.exists(path):
return open(path).read()
else:
return None
def Set(self,key,data):
path = self._GetPath(key)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise _FileCacheError('%s exists but is not a directory' % directory)
temp_fd, temp_path = tempfile.mkstemp()
temp_fp = os.fdopen(temp_fd, 'w')
temp_fp.write(data)
temp_fp.close()
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
os.rename(temp_path, path)
def Remove(self,key):
path = self._GetPath(key)
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory ))
if os.path.exists(path):
os.remove(path)
def GetCachedTime(self,key):
path = self._GetPath(key)
if os.path.exists(path):
return os.path.getmtime(path)
else:
return None
def _GetUsername(self):
'''Attempt to find the username in a cross-platform fashion.'''
try:
return os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (IOError, OSError), e:
return 'nobody'
def _GetTmpCachePath(self):
username = self._GetUsername()
cache_directory = 'python.cache_' + username
return os.path.join(tempfile.gettempdir(), cache_directory)
def _InitializeRootDirectory(self, root_directory):
if not root_directory:
root_directory = self._GetTmpCachePath()
root_directory = os.path.abspath(root_directory)
if not os.path.exists(root_directory):
os.mkdir(root_directory)
if not os.path.isdir(root_directory):
raise _FileCacheError('%s exists but is not a directory' %
root_directory)
self._root_directory = root_directory
def _GetPath(self,key):
try:
hashed_key = md5(key).hexdigest()
except TypeError:
hashed_key = md5.new(key).hexdigest()
return os.path.join(self._root_directory,
self._GetPrefix(hashed_key),
hashed_key)
def _GetPrefix(self,hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
| Python |
"""Implementation of JSONEncoder
"""
import re
try:
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor- and/or
# platform-specific, so do tests which don't depend on the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| Python |
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise ValueError(errmsg(msg, s, end))
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise ValueError(errmsg(msg, s, end))
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError(errmsg(msg, s, end))
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = {}
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True):
"""``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
"""
self.encoding = encoding
self.object_hook = object_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
| Python |
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> import decimal
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.0.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decoder import JSONDecoder
from encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and object
members will be pretty-printed with that indent level. An indent level
of 0 will only insert newlines. ``None`` is the most compact representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
| Python |
r"""Command-line tool to validate and pretty-print JSON
Usage::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
import sys
import simplejson
def main():
if len(sys.argv) == 1:
infile = sys.stdin
outfile = sys.stdout
elif len(sys.argv) == 2:
infile = open(sys.argv[1], 'rb')
outfile = sys.stdout
elif len(sys.argv) == 3:
infile = open(sys.argv[1], 'rb')
outfile = open(sys.argv[2], 'wb')
else:
raise SystemExit(sys.argv[0] + " [infile [outfile]]")
try:
obj = simplejson.load(infile)
except ValueError, e:
raise SystemExit(e)
simplejson.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
if __name__ == '__main__':
main()
| Python |
"""JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner
| Python |
#!/usr/bin/python2.4
#
# Copyright 2007 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
'''A library that provides a python interface to the Twitter API'''
__author__ = 'dewitt@google.com'
__version__ = '0.6-devel'
import base64
import calendar
import os
import rfc822
import simplejson
import sys
import tempfile
import textwrap
import time
import urllib
import urllib2
import urlparse
try:
from hashlib import md5
except ImportError:
from md5 import md5
CHARACTER_LIMIT = 140
class TwitterError(Exception):
'''Base class for Twitter errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Status(object):
'''A class representing the Status structure used by the twitter API.
The Status structure exposes the following properties:
status.created_at
status.created_at_in_seconds # read only
status.favorited
status.in_reply_to_screen_name
status.in_reply_to_user_id
status.in_reply_to_status_id
status.truncated
status.source
status.id
status.text
status.relative_created_at # read only
status.user
'''
def __init__(self,
created_at=None,
favorited=None,
id=None,
text=None,
user=None,
in_reply_to_screen_name=None,
in_reply_to_user_id=None,
in_reply_to_status_id=None,
truncated=None,
source=None,
now=None):
'''An object to hold a Twitter status message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
created_at: The time this status message was posted
favorited: Whether this is a favorite of the authenticated user
id: The unique id of this status message
text: The text of this status message
relative_created_at:
A human readable string representing the posting time
user:
A twitter.User instance representing the person posting the message
now:
The current time, if the client choses to set it. Defaults to the
wall clock time.
'''
self.created_at = created_at
self.favorited = favorited
self.id = id
self.text = text
self.user = user
self.now = now
self.in_reply_to_screen_name = in_reply_to_screen_name
self.in_reply_to_user_id = in_reply_to_user_id
self.in_reply_to_status_id = in_reply_to_status_id
self.truncated = truncated
self.source = source
def GetCreatedAt(self):
'''Get the time this status message was posted.
Returns:
The time this status message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this status message was posted.
Args:
created_at: The time this status message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this status message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this status message was posted, in seconds since the epoch.
Returns:
The time this status message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this status message was "
"posted, in seconds since the epoch")
def GetFavorited(self):
'''Get the favorited setting of this status message.
Returns:
True if this status message is favorited; False otherwise
'''
return self._favorited
def SetFavorited(self, favorited):
'''Set the favorited state of this status message.
Args:
favorited: boolean True/False favorited state of this status message
'''
self._favorited = favorited
favorited = property(GetFavorited, SetFavorited,
doc='The favorited state of this status message.')
def GetId(self):
'''Get the unique id of this status message.
Returns:
The unique id of this status message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this status message.
Args:
id: The unique id of this status message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this status message.')
def GetInReplyToScreenName(self):
return self._in_reply_to_screen_name
def SetInReplyToScreenName(self, in_reply_to_screen_name):
self._in_reply_to_screen_name = in_reply_to_screen_name
in_reply_to_screen_name = property(GetInReplyToScreenName, SetInReplyToScreenName,
doc='')
def GetInReplyToUserId(self):
return self._in_reply_to_user_id
def SetInReplyToUserId(self, in_reply_to_user_id):
self._in_reply_to_user_id = in_reply_to_user_id
in_reply_to_user_id = property(GetInReplyToUserId, SetInReplyToUserId,
doc='')
def GetInReplyToStatusId(self):
return self._in_reply_to_status_id
def SetInReplyToStatusId(self, in_reply_to_status_id):
self._in_reply_to_status_id = in_reply_to_status_id
in_reply_to_status_id = property(GetInReplyToStatusId, SetInReplyToStatusId,
doc='')
def GetTruncated(self):
return self._truncated
def SetTruncated(self, truncated):
self._truncated = truncated
truncated = property(GetTruncated, SetTruncated,
doc='')
def GetSource(self):
return self._source
def SetSource(self, source):
self._source = source
source = property(GetSource, SetSource,
doc='')
def GetText(self):
'''Get the text of this status message.
Returns:
The text of this status message.
'''
return self._text
def SetText(self, text):
'''Set the text of this status message.
Args:
text: The text of this status message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this status message')
def GetRelativeCreatedAt(self):
'''Get a human redable string representing the posting time
Returns:
A human readable string representing the posting time
'''
fudge = 1.25
delta = int(self.now) - int(self.created_at_in_seconds)
if delta < (1 * fudge):
return 'about a second ago'
elif delta < (60 * (1/fudge)):
return 'about %d seconds ago' % (delta)
elif delta < (60 * fudge):
return 'about a minute ago'
elif delta < (60 * 60 * (1/fudge)):
return 'about %d minutes ago' % (delta / 60)
elif delta < (60 * 60 * fudge):
return 'about an hour ago'
elif delta < (60 * 60 * 24 * (1/fudge)):
return 'about %d hours ago' % (delta / (60 * 60))
elif delta < (60 * 60 * 24 * fudge):
return 'about a day ago'
else:
return 'about %d days ago' % (delta / (60 * 60 * 24))
relative_created_at = property(GetRelativeCreatedAt,
doc='Get a human readable string representing'
'the posting time')
def GetUser(self):
'''Get a twitter.User reprenting the entity posting this status message.
Returns:
A twitter.User reprenting the entity posting this status message
'''
return self._user
def SetUser(self, user):
'''Set a twitter.User reprenting the entity posting this status message.
Args:
user: A twitter.User reprenting the entity posting this status message
'''
self._user = user
user = property(GetUser, SetUser,
doc='A twitter.User reprenting the entity posting this '
'status message')
def GetNow(self):
'''Get the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Returns:
Whatever the status instance believes the current time to be,
in seconds since the epoch.
'''
if self._now is None:
self._now = time.time()
return self._now
def SetNow(self, now):
'''Set the wallclock time for this status message.
Used to calculate relative_created_at. Defaults to the time
the object was instantiated.
Args:
now: The wallclock time for this instance.
'''
self._now = now
now = property(GetNow, SetNow,
doc='The wallclock time for this status instance.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.created_at == other.created_at and \
self.id == other.id and \
self.text == other.text and \
self.user == other.user and \
self.in_reply_to_screen_name == other.in_reply_to_screen_name and \
self.in_reply_to_user_id == other.in_reply_to_user_id and \
self.in_reply_to_status_id == other.in_reply_to_status_id and \
self.truncated == other.truncated and \
self.favorited == other.favorited and \
self.source == other.source
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.Status instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.Status instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.Status instance.
Returns:
A JSON string representation of this twitter.Status instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.Status instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.Status instance
'''
data = {}
if self.created_at:
data['created_at'] = self.created_at
if self.favorited:
data['favorited'] = self.favorited
if self.id:
data['id'] = self.id
if self.text:
data['text'] = self.text
if self.user:
data['user'] = self.user.AsDict()
if self.in_reply_to_screen_name:
data['in_reply_to_screen_name'] = self.in_reply_to_screen_name
if self.in_reply_to_user_id:
data['in_reply_to_user_id'] = self.in_reply_to_user_id
if self.in_reply_to_status_id:
data['in_reply_to_status_id'] = self.in_reply_to_status_id
if self.truncated is not None:
data['truncated'] = self.truncated
if self.favorited is not None:
data['favorited'] = self.favorited
if self.source:
data['source'] = self.source
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.Status instance
'''
if 'user' in data:
user = User.NewFromJsonDict(data['user'])
else:
user = None
return Status(created_at=data.get('created_at', None),
favorited=data.get('favorited', None),
id=data.get('id', None),
text=data.get('text', None),
in_reply_to_screen_name=data.get('in_reply_to_screen_name', None),
in_reply_to_user_id=data.get('in_reply_to_user_id', None),
in_reply_to_status_id=data.get('in_reply_to_status_id', None),
truncated=data.get('truncated', None),
source=data.get('source', None),
user=user)
class User(object):
'''A class representing the User structure used by the twitter API.
The User structure exposes the following properties:
user.id
user.name
user.screen_name
user.location
user.description
user.profile_image_url
user.profile_background_tile
user.profile_background_image_url
user.profile_sidebar_fill_color
user.profile_background_color
user.profile_link_color
user.profile_text_color
user.protected
user.utc_offset
user.time_zone
user.url
user.status
user.statuses_count
user.followers_count
user.friends_count
user.favourites_count
'''
def __init__(self,
id=None,
name=None,
screen_name=None,
location=None,
description=None,
profile_image_url=None,
profile_background_tile=None,
profile_background_image_url=None,
profile_sidebar_fill_color=None,
profile_background_color=None,
profile_link_color=None,
profile_text_color=None,
protected=None,
utc_offset=None,
time_zone=None,
followers_count=None,
friends_count=None,
statuses_count=None,
favourites_count=None,
url=None,
status=None):
self.id = id
self.name = name
self.screen_name = screen_name
self.location = location
self.description = description
self.profile_image_url = profile_image_url
self.profile_background_tile = profile_background_tile
self.profile_background_image_url = profile_background_image_url
self.profile_sidebar_fill_color = profile_sidebar_fill_color
self.profile_background_color = profile_background_color
self.profile_link_color = profile_link_color
self.profile_text_color = profile_text_color
self.protected = protected
self.utc_offset = utc_offset
self.time_zone = time_zone
self.followers_count = followers_count
self.friends_count = friends_count
self.statuses_count = statuses_count
self.favourites_count = favourites_count
self.url = url
self.status = status
def GetId(self):
'''Get the unique id of this user.
Returns:
The unique id of this user
'''
return self._id
def SetId(self, id):
'''Set the unique id of this user.
Args:
id: The unique id of this user.
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this user.')
def GetName(self):
'''Get the real name of this user.
Returns:
The real name of this user
'''
return self._name
def SetName(self, name):
'''Set the real name of this user.
Args:
name: The real name of this user
'''
self._name = name
name = property(GetName, SetName,
doc='The real name of this user.')
def GetScreenName(self):
'''Get the short username of this user.
Returns:
The short username of this user
'''
return self._screen_name
def SetScreenName(self, screen_name):
'''Set the short username of this user.
Args:
screen_name: the short username of this user
'''
self._screen_name = screen_name
screen_name = property(GetScreenName, SetScreenName,
doc='The short username of this user.')
def GetLocation(self):
'''Get the geographic location of this user.
Returns:
The geographic location of this user
'''
return self._location
def SetLocation(self, location):
'''Set the geographic location of this user.
Args:
location: The geographic location of this user
'''
self._location = location
location = property(GetLocation, SetLocation,
doc='The geographic location of this user.')
def GetDescription(self):
'''Get the short text description of this user.
Returns:
The short text description of this user
'''
return self._description
def SetDescription(self, description):
'''Set the short text description of this user.
Args:
description: The short text description of this user
'''
self._description = description
description = property(GetDescription, SetDescription,
doc='The short text description of this user.')
def GetUrl(self):
'''Get the homepage url of this user.
Returns:
The homepage url of this user
'''
return self._url
def SetUrl(self, url):
'''Set the homepage url of this user.
Args:
url: The homepage url of this user
'''
self._url = url
url = property(GetUrl, SetUrl,
doc='The homepage url of this user.')
def GetProfileImageUrl(self):
'''Get the url of the thumbnail of this user.
Returns:
The url of the thumbnail of this user
'''
return self._profile_image_url
def SetProfileImageUrl(self, profile_image_url):
'''Set the url of the thumbnail of this user.
Args:
profile_image_url: The url of the thumbnail of this user
'''
self._profile_image_url = profile_image_url
profile_image_url= property(GetProfileImageUrl, SetProfileImageUrl,
doc='The url of the thumbnail of this user.')
def GetProfileBackgroundTile(self):
'''Boolean for whether to tile the profile background image.
Returns:
True if the background is to be tiled, False if not, None if unset.
'''
return self._profile_background_tile
def SetProfileBackgroundTile(self, profile_background_tile):
'''Set the boolean flag for whether to tile the profile background image.
Args:
profile_background_tile: Boolean flag for whether to tile or not.
'''
self._profile_background_tile = profile_background_tile
profile_background_tile = property(GetProfileBackgroundTile, SetProfileBackgroundTile,
doc='Boolean for whether to tile the background image.')
def GetProfileBackgroundImageUrl(self):
return self._profile_background_image_url
def SetProfileBackgroundImageUrl(self, profile_background_image_url):
self._profile_background_image_url = profile_background_image_url
profile_background_image_url = property(GetProfileBackgroundImageUrl, SetProfileBackgroundImageUrl,
doc='The url of the profile background of this user.')
def GetProfileSidebarFillColor(self):
return self._profile_sidebar_fill_color
def SetProfileSidebarFillColor(self, profile_sidebar_fill_color):
self._profile_sidebar_fill_color = profile_sidebar_fill_color
profile_sidebar_fill_color = property(GetProfileSidebarFillColor, SetProfileSidebarFillColor)
def GetProfileBackgroundColor(self):
return self._profile_background_color
def SetProfileBackgroundColor(self, profile_background_color):
self._profile_background_color = profile_background_color
profile_background_color = property(GetProfileBackgroundColor, SetProfileBackgroundColor)
def GetProfileLinkColor(self):
return self._profile_link_color
def SetProfileLinkColor(self, profile_link_color):
self._profile_link_color = profile_link_color
profile_link_color = property(GetProfileLinkColor, SetProfileLinkColor)
def GetProfileTextColor(self):
return self._profile_text_color
def SetProfileTextColor(self, profile_text_color):
self._profile_text_color = profile_text_color
profile_text_color = property(GetProfileTextColor, SetProfileTextColor)
def GetProtected(self):
return self._protected
def SetProtected(self, protected):
self._protected = protected
protected = property(GetProtected, SetProtected)
def GetUtcOffset(self):
return self._utc_offset
def SetUtcOffset(self, utc_offset):
self._utc_offset = utc_offset
utc_offset = property(GetUtcOffset, SetUtcOffset)
def GetTimeZone(self):
'''Returns the current time zone string for the user.
Returns:
The descriptive time zone string for the user.
'''
return self._time_zone
def SetTimeZone(self, time_zone):
'''Sets the user's time zone string.
Args:
time_zone: The descriptive time zone to assign for the user.
'''
self._time_zone = time_zone
time_zone = property(GetTimeZone, SetTimeZone)
def GetStatus(self):
'''Get the latest twitter.Status of this user.
Returns:
The latest twitter.Status of this user
'''
return self._status
def SetStatus(self, status):
'''Set the latest twitter.Status of this user.
Args:
status: The latest twitter.Status of this user
'''
self._status = status
status = property(GetStatus, SetStatus,
doc='The latest twitter.Status of this user.')
def GetFriendsCount(self):
'''Get the friend count for this user.
Returns:
The number of users this user has befriended.
'''
return self._friends_count
def SetFriendsCount(self, count):
'''Set the friend count for this user.
Args:
count: The number of users this user has befriended.
'''
self._friends_count = count
friends_count = property(GetFriendsCount, SetFriendsCount,
doc='The number of friends for this user.')
def GetFollowersCount(self):
'''Get the follower count for this user.
Returns:
The number of users following this user.
'''
return self._followers_count
def SetFollowersCount(self, count):
'''Set the follower count for this user.
Args:
count: The number of users following this user.
'''
self._followers_count = count
followers_count = property(GetFollowersCount, SetFollowersCount,
doc='The number of users following this user.')
def GetStatusesCount(self):
'''Get the number of status updates for this user.
Returns:
The number of status updates for this user.
'''
return self._statuses_count
def SetStatusesCount(self, count):
'''Set the status update count for this user.
Args:
count: The number of updates for this user.
'''
self._statuses_count = count
statuses_count = property(GetStatusesCount, SetStatusesCount,
doc='The number of updates for this user.')
def GetFavouritesCount(self):
'''Get the number of favourites for this user.
Returns:
The number of favourites for this user.
'''
return self._favourites_count
def SetFavouritesCount(self, count):
'''Set the favourite count for this user.
Args:
count: The number of favourites for this user.
'''
self._favourites_count = count
favourites_count = property(GetFavouritesCount, SetFavouritesCount,
doc='The number of favourites for this user.')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.name == other.name and \
self.screen_name == other.screen_name and \
self.location == other.location and \
self.description == other.description and \
self.profile_image_url == other.profile_image_url and \
self.profile_background_tile == other.profile_background_tile and \
self.profile_background_image_url == other.profile_background_image_url and \
self.profile_sidebar_fill_color == other.profile_sidebar_fill_color and \
self.profile_background_color == other.profile_background_color and \
self.profile_link_color == other.profile_link_color and \
self.profile_text_color == other.profile_text_color and \
self.protected == other.protected and \
self.utc_offset == other.utc_offset and \
self.time_zone == other.time_zone and \
self.url == other.url and \
self.statuses_count == other.statuses_count and \
self.followers_count == other.followers_count and \
self.favourites_count == other.favourites_count and \
self.friends_count == other.friends_count and \
self.status == other.status
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.User instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.User instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.User instance.
Returns:
A JSON string representation of this twitter.User instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.User instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.User instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.name:
data['name'] = self.name
if self.screen_name:
data['screen_name'] = self.screen_name
if self.location:
data['location'] = self.location
if self.description:
data['description'] = self.description
if self.profile_image_url:
data['profile_image_url'] = self.profile_image_url
if self.profile_background_tile is not None:
data['profile_background_tile'] = self.profile_background_tile
if self.profile_background_image_url:
data['profile_sidebar_fill_color'] = self.profile_background_image_url
if self.profile_background_color:
data['profile_background_color'] = self.profile_background_color
if self.profile_link_color:
data['profile_link_color'] = self.profile_link_color
if self.profile_text_color:
data['profile_text_color'] = self.profile_text_color
if self.protected is not None:
data['protected'] = self.protected
if self.utc_offset:
data['utc_offset'] = self.utc_offset
if self.time_zone:
data['time_zone'] = self.time_zone
if self.url:
data['url'] = self.url
if self.status:
data['status'] = self.status.AsDict()
if self.friends_count:
data['friends_count'] = self.friends_count
if self.followers_count:
data['followers_count'] = self.followers_count
if self.statuses_count:
data['statuses_count'] = self.statuses_count
if self.favourites_count:
data['favourites_count'] = self.favourites_count
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.User instance
'''
if 'status' in data:
status = Status.NewFromJsonDict(data['status'])
else:
status = None
return User(id=data.get('id', None),
name=data.get('name', None),
screen_name=data.get('screen_name', None),
location=data.get('location', None),
description=data.get('description', None),
statuses_count=data.get('statuses_count', None),
followers_count=data.get('followers_count', None),
favourites_count=data.get('favourites_count', None),
friends_count=data.get('friends_count', None),
profile_image_url=data.get('profile_image_url', None),
profile_background_tile = data.get('profile_background_tile', None),
profile_background_image_url = data.get('profile_background_image_url', None),
profile_sidebar_fill_color = data.get('profile_sidebar_fill_color', None),
profile_background_color = data.get('profile_background_color', None),
profile_link_color = data.get('profile_link_color', None),
profile_text_color = data.get('profile_text_color', None),
protected = data.get('protected', None),
utc_offset = data.get('utc_offset', None),
time_zone = data.get('time_zone', None),
url=data.get('url', None),
status=status)
class DirectMessage(object):
'''A class representing the DirectMessage structure used by the twitter API.
The DirectMessage structure exposes the following properties:
direct_message.id
direct_message.created_at
direct_message.created_at_in_seconds # read only
direct_message.sender_id
direct_message.sender_screen_name
direct_message.recipient_id
direct_message.recipient_screen_name
direct_message.text
'''
def __init__(self,
id=None,
created_at=None,
sender_id=None,
sender_screen_name=None,
recipient_id=None,
recipient_screen_name=None,
text=None):
'''An object to hold a Twitter direct message.
This class is normally instantiated by the twitter.Api class and
returned in a sequence.
Note: Dates are posted in the form "Sat Jan 27 04:17:38 +0000 2007"
Args:
id: The unique id of this direct message
created_at: The time this direct message was posted
sender_id: The id of the twitter user that sent this message
sender_screen_name: The name of the twitter user that sent this message
recipient_id: The id of the twitter that received this message
recipient_screen_name: The name of the twitter that received this message
text: The text of this direct message
'''
self.id = id
self.created_at = created_at
self.sender_id = sender_id
self.sender_screen_name = sender_screen_name
self.recipient_id = recipient_id
self.recipient_screen_name = recipient_screen_name
self.text = text
def GetId(self):
'''Get the unique id of this direct message.
Returns:
The unique id of this direct message
'''
return self._id
def SetId(self, id):
'''Set the unique id of this direct message.
Args:
id: The unique id of this direct message
'''
self._id = id
id = property(GetId, SetId,
doc='The unique id of this direct message.')
def GetCreatedAt(self):
'''Get the time this direct message was posted.
Returns:
The time this direct message was posted
'''
return self._created_at
def SetCreatedAt(self, created_at):
'''Set the time this direct message was posted.
Args:
created_at: The time this direct message was created
'''
self._created_at = created_at
created_at = property(GetCreatedAt, SetCreatedAt,
doc='The time this direct message was posted.')
def GetCreatedAtInSeconds(self):
'''Get the time this direct message was posted, in seconds since the epoch.
Returns:
The time this direct message was posted, in seconds since the epoch.
'''
return calendar.timegm(rfc822.parsedate(self.created_at))
created_at_in_seconds = property(GetCreatedAtInSeconds,
doc="The time this direct message was "
"posted, in seconds since the epoch")
def GetSenderId(self):
'''Get the unique sender id of this direct message.
Returns:
The unique sender id of this direct message
'''
return self._sender_id
def SetSenderId(self, sender_id):
'''Set the unique sender id of this direct message.
Args:
sender id: The unique sender id of this direct message
'''
self._sender_id = sender_id
sender_id = property(GetSenderId, SetSenderId,
doc='The unique sender id of this direct message.')
def GetSenderScreenName(self):
'''Get the unique sender screen name of this direct message.
Returns:
The unique sender screen name of this direct message
'''
return self._sender_screen_name
def SetSenderScreenName(self, sender_screen_name):
'''Set the unique sender screen name of this direct message.
Args:
sender_screen_name: The unique sender screen name of this direct message
'''
self._sender_screen_name = sender_screen_name
sender_screen_name = property(GetSenderScreenName, SetSenderScreenName,
doc='The unique sender screen name of this direct message.')
def GetRecipientId(self):
'''Get the unique recipient id of this direct message.
Returns:
The unique recipient id of this direct message
'''
return self._recipient_id
def SetRecipientId(self, recipient_id):
'''Set the unique recipient id of this direct message.
Args:
recipient id: The unique recipient id of this direct message
'''
self._recipient_id = recipient_id
recipient_id = property(GetRecipientId, SetRecipientId,
doc='The unique recipient id of this direct message.')
def GetRecipientScreenName(self):
'''Get the unique recipient screen name of this direct message.
Returns:
The unique recipient screen name of this direct message
'''
return self._recipient_screen_name
def SetRecipientScreenName(self, recipient_screen_name):
'''Set the unique recipient screen name of this direct message.
Args:
recipient_screen_name: The unique recipient screen name of this direct message
'''
self._recipient_screen_name = recipient_screen_name
recipient_screen_name = property(GetRecipientScreenName, SetRecipientScreenName,
doc='The unique recipient screen name of this direct message.')
def GetText(self):
'''Get the text of this direct message.
Returns:
The text of this direct message.
'''
return self._text
def SetText(self, text):
'''Set the text of this direct message.
Args:
text: The text of this direct message
'''
self._text = text
text = property(GetText, SetText,
doc='The text of this direct message')
def __ne__(self, other):
return not self.__eq__(other)
def __eq__(self, other):
try:
return other and \
self.id == other.id and \
self.created_at == other.created_at and \
self.sender_id == other.sender_id and \
self.sender_screen_name == other.sender_screen_name and \
self.recipient_id == other.recipient_id and \
self.recipient_screen_name == other.recipient_screen_name and \
self.text == other.text
except AttributeError:
return False
def __str__(self):
'''A string representation of this twitter.DirectMessage instance.
The return value is the same as the JSON string representation.
Returns:
A string representation of this twitter.DirectMessage instance.
'''
return self.AsJsonString()
def AsJsonString(self):
'''A JSON string representation of this twitter.DirectMessage instance.
Returns:
A JSON string representation of this twitter.DirectMessage instance
'''
return simplejson.dumps(self.AsDict(), sort_keys=True)
def AsDict(self):
'''A dict representation of this twitter.DirectMessage instance.
The return value uses the same key names as the JSON representation.
Return:
A dict representing this twitter.DirectMessage instance
'''
data = {}
if self.id:
data['id'] = self.id
if self.created_at:
data['created_at'] = self.created_at
if self.sender_id:
data['sender_id'] = self.sender_id
if self.sender_screen_name:
data['sender_screen_name'] = self.sender_screen_name
if self.recipient_id:
data['recipient_id'] = self.recipient_id
if self.recipient_screen_name:
data['recipient_screen_name'] = self.recipient_screen_name
if self.text:
data['text'] = self.text
return data
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the twitter API
Returns:
A twitter.DirectMessage instance
'''
return DirectMessage(created_at=data.get('created_at', None),
recipient_id=data.get('recipient_id', None),
sender_id=data.get('sender_id', None),
text=data.get('text', None),
sender_screen_name=data.get('sender_screen_name', None),
id=data.get('id', None),
recipient_screen_name=data.get('recipient_screen_name', None))
class Api(object):
'''A python interface into the Twitter API
By default, the Api caches results for 1 minute.
Example usage:
To create an instance of the twitter.Api class, with no authentication:
>>> import twitter
>>> api = twitter.Api()
To fetch the most recently posted public twitter status messages:
>>> statuses = api.GetPublicTimeline()
>>> print [s.user.name for s in statuses]
[u'DeWitt', u'Kesuke Miyagi', u'ev', u'Buzz Andersen', u'Biz Stone'] #...
To fetch a single user's public status messages, where "user" is either
a Twitter "short name" or their user id.
>>> statuses = api.GetUserTimeline(user)
>>> print [s.text for s in statuses]
To use authentication, instantiate the twitter.Api class with a
username and password:
>>> api = twitter.Api(username='twitter user', password='twitter pass')
To fetch your friends (after being authenticated):
>>> users = api.GetFriends()
>>> print [u.name for u in users]
To post a twitter status message (after being authenticated):
>>> status = api.PostUpdate('I love python-twitter!')
>>> print status.text
I love python-twitter!
There are many other methods, including:
>>> api.PostUpdates(status)
>>> api.PostDirectMessage(user, text)
>>> api.GetUser(user)
>>> api.GetReplies()
>>> api.GetUserTimeline(user)
>>> api.GetStatus(id)
>>> api.DestroyStatus(id)
>>> api.GetFriendsTimeline(user)
>>> api.GetFriends(user)
>>> api.GetFollowers()
>>> api.GetFeatured()
>>> api.GetDirectMessages()
>>> api.PostDirectMessage(user, text)
>>> api.DestroyDirectMessage(id)
>>> api.DestroyFriendship(user)
>>> api.CreateFriendship(user)
>>> api.GetUserByEmail(email)
'''
DEFAULT_CACHE_TIMEOUT = 60 # cache for 1 minute
_API_REALM = 'Twitter API'
def __init__(self,
username=None,
password=None,
input_encoding=None,
request_headers=None):
'''Instantiate a new twitter.Api object.
Args:
username: The username of the twitter account. [optional]
password: The password for the twitter account. [optional]
input_encoding: The encoding used to encode input strings. [optional]
request_header: A dictionary of additional HTTP request headers. [optional]
'''
# self._cache = _FileCache()
try:
import google.appengine.api
self._cache = None
except ImportError:
self._cache = _FileCache()
self._urllib = urllib2
self._cache_timeout = Api.DEFAULT_CACHE_TIMEOUT
self._InitializeRequestHeaders(request_headers)
self._InitializeUserAgent()
self._InitializeDefaultParameters()
self._input_encoding = input_encoding
self.SetCredentials(username, password)
def GetPublicTimeline(self, since_id=None):
'''Fetch the sequnce of public twitter.Status message for all users.
Args:
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
An sequence of twitter.Status instances, one for each message
'''
parameters = {}
if since_id:
parameters['since_id'] = since_id
url = 'http://twitter.com/statuses/public_timeline.json'
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetFriendsTimeline(self,
user=None,
count=None,
since=None,
since_id=None):
'''Fetch the sequence of twitter.Status messages for a user's friends
The twitter.Api instance must be authenticated if the user is private.
Args:
user:
Specifies the ID or screen name of the user for whom to return
the friends_timeline. If unspecified, the username and password
must be set in the twitter.Api instance. [Optional]
count:
Specifies the number of statuses to retrieve. May not be
greater than 200. [Optional]
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [Optional]
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message
'''
if user:
url = 'http://twitter.com/statuses/friends_timeline/%s.json' % user
elif not user and not self._username:
raise TwitterError("User must be specified if API is not authenticated.")
else:
url = 'http://twitter.com/statuses/friends_timeline.json'
parameters = {}
if count is not None:
try:
if int(count) > 200:
raise TwitterError("'count' may not be greater than 200")
except ValueError:
raise TwitterError("'count' must be an integer")
parameters['count'] = count
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetUserTimeline(self, user=None, count=None, since=None, since_id=None):
'''Fetch the sequence of public twitter.Status messages for a single user.
The twitter.Api instance must be authenticated if the user is private.
Args:
user:
either the username (short_name) or id of the user to retrieve. If
not specified, then the current authenticated user is used. [optional]
count: the number of status messages to retrieve [optional]
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [optional]
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
A sequence of twitter.Status instances, one for each message up to count
'''
try:
if count:
int(count)
except:
raise TwitterError("Count must be an integer")
parameters = {}
if count:
parameters['count'] = count
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if user:
url = 'http://twitter.com/statuses/user_timeline/%s.json' % user
elif not user and not self._username:
raise TwitterError("User must be specified if API is not authenticated.")
else:
url = 'http://twitter.com/statuses/user_timeline.json'
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetStatus(self, id):
'''Returns a single status message.
The twitter.Api instance must be authenticated if the status message is private.
Args:
id: The numerical ID of the status you're trying to retrieve.
Returns:
A twitter.Status instance representing that status message
'''
try:
if id:
int(id)
except:
raise TwitterError("id must be an integer")
url = 'http://twitter.com/statuses/show/%s.json' % id
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def DestroyStatus(self, id):
'''Destroys the status specified by the required ID parameter.
The twitter.Api instance must be authenticated and thee
authenticating user must be the author of the specified status.
Args:
id: The numerical ID of the status you're trying to destroy.
Returns:
A twitter.Status instance representing the destroyed status message
'''
try:
if id:
int(id)
except:
raise TwitterError("id must be an integer")
url = 'http://twitter.com/statuses/destroy/%s.json' % id
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def PostUpdate(self, status, in_reply_to_status_id=None):
'''Post a twitter status message from the authenticated user.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted. Must be less than or equal to
140 characters.
in_reply_to_status_id:
The ID of an existing status that the status to be posted is
in reply to. This implicitly sets the in_reply_to_user_id
attribute of the resulting status to the user ID of the
message being replied to. Invalid/missing status IDs will be
ignored. [Optional]
Returns:
A twitter.Status instance representing the message posted.
'''
if not self._username:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = 'http://twitter.com/statuses/update.json'
if len(status) > CHARACTER_LIMIT:
raise TwitterError("Text must be less than or equal to %d characters. "
"Consider using PostUpdates." % CHARACTER_LIMIT)
data = {'status': status}
if in_reply_to_status_id:
data['in_reply_to_status_id'] = in_reply_to_status_id
json = self._FetchUrl(url, post_data=data)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def PostUpdates(self, status, continuation=None, **kwargs):
'''Post one or more twitter status messages from the authenticated user.
Unlike api.PostUpdate, this method will post multiple status updates
if the message is longer than 140 characters.
The twitter.Api instance must be authenticated.
Args:
status:
The message text to be posted. May be longer than 140 characters.
continuation:
The character string, if any, to be appended to all but the
last message. Note that Twitter strips trailing '...' strings
from messages. Consider using the unicode \u2026 character
(horizontal ellipsis) instead. [Defaults to None]
**kwargs:
See api.PostUpdate for a list of accepted parameters.
Returns:
A of list twitter.Status instance representing the messages posted.
'''
results = list()
if continuation is None:
continuation = ''
line_length = CHARACTER_LIMIT - len(continuation)
lines = textwrap.wrap(status, line_length)
for line in lines[0:-1]:
results.append(self.PostUpdate(line + continuation, **kwargs))
results.append(self.PostUpdate(lines[-1], **kwargs))
return results
def GetReplies(self, since=None, since_id=None, page=None):
'''Get a sequence of status messages representing the 20 most recent
replies (status updates prefixed with @username) to the authenticating
user.
Args:
page:
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [optional]
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
A sequence of twitter.Status instances, one for each reply to the user.
'''
url = 'http://twitter.com/statuses/replies.json'
if not self._username:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [Status.NewFromJsonDict(x) for x in data]
def GetFriends(self, user=None, page=None):
'''Fetch the sequence of twitter.User instances, one for each friend.
Args:
user: the username or id of the user whose friends you are fetching. If
not specified, defaults to the authenticated user. [optional]
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances, one for each friend
'''
if not self._username:
raise TwitterError("twitter.Api instance must be authenticated")
if user:
url = 'http://twitter.com/statuses/friends/%s.json' % user
else:
url = 'http://twitter.com/statuses/friends.json'
parameters = {}
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data]
def GetFollowers(self, page=None):
'''Fetch the sequence of twitter.User instances, one for each follower
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances, one for each follower
'''
if not self._username:
raise TwitterError("twitter.Api instance must be authenticated")
url = 'http://twitter.com/statuses/followers.json'
parameters = {}
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data]
def GetFeatured(self):
'''Fetch the sequence of twitter.User instances featured on twitter.com
The twitter.Api instance must be authenticated.
Returns:
A sequence of twitter.User instances
'''
url = 'http://twitter.com/statuses/featured.json'
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [User.NewFromJsonDict(x) for x in data]
def GetUser(self, user):
'''Returns a single user.
The twitter.Api instance must be authenticated.
Args:
user: The username or id of the user to retrieve.
Returns:
A twitter.User instance representing that user
'''
url = 'http://twitter.com/users/show/%s.json' % user
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def GetDirectMessages(self, since=None, since_id=None, page=None):
'''Returns a list of the direct messages sent to the authenticating user.
The twitter.Api instance must be authenticated.
Args:
since:
Narrows the returned results to just those statuses created
after the specified HTTP-formatted date. [optional]
since_id:
Returns only public statuses with an ID greater than (that is,
more recent than) the specified ID. [Optional]
Returns:
A sequence of twitter.DirectMessage instances
'''
url = 'http://twitter.com/direct_messages.json'
if not self._username:
raise TwitterError("The twitter.Api instance must be authenticated.")
parameters = {}
if since:
parameters['since'] = since
if since_id:
parameters['since_id'] = since_id
if page:
parameters['page'] = page
json = self._FetchUrl(url, parameters=parameters)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return [DirectMessage.NewFromJsonDict(x) for x in data]
def PostDirectMessage(self, user, text):
'''Post a twitter direct message from the authenticated user
The twitter.Api instance must be authenticated.
Args:
user: The ID or screen name of the recipient user.
text: The message text to be posted. Must be less than 140 characters.
Returns:
A twitter.DirectMessage instance representing the message posted
'''
if not self._username:
raise TwitterError("The twitter.Api instance must be authenticated.")
url = 'http://twitter.com/direct_messages/new.json'
data = {'text': text, 'user': user}
json = self._FetchUrl(url, post_data=data)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return DirectMessage.NewFromJsonDict(data)
def DestroyDirectMessage(self, id):
'''Destroys the direct message specified in the required ID parameter.
The twitter.Api instance must be authenticated, and the
authenticating user must be the recipient of the specified direct
message.
Args:
id: The id of the direct message to be destroyed
Returns:
A twitter.DirectMessage instance representing the message destroyed
'''
url = 'http://twitter.com/direct_messages/destroy/%s.json' % id
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return DirectMessage.NewFromJsonDict(data)
def CreateFriendship(self, user):
'''Befriends the user specified in the user parameter as the authenticating user.
The twitter.Api instance must be authenticated.
Args:
The ID or screen name of the user to befriend.
Returns:
A twitter.User instance representing the befriended user.
'''
url = 'http://twitter.com/friendships/create/%s.json' % user
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def DestroyFriendship(self, user):
'''Discontinues friendship with the user specified in the user parameter.
The twitter.Api instance must be authenticated.
Args:
The ID or screen name of the user with whom to discontinue friendship.
Returns:
A twitter.User instance representing the discontinued friend.
'''
url = 'http://twitter.com/friendships/destroy/%s.json' % user
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def CreateFavorite(self, status):
'''Favorites the status specified in the status parameter as the authenticating user.
Returns the favorite status when successful.
The twitter.Api instance must be authenticated.
Args:
The twitter.Status instance to mark as a favorite.
Returns:
A twitter.Status instance representing the newly-marked favorite.
'''
url = 'http://twitter.com/favorites/create/%s.json' % status.id
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def DestroyFavorite(self, status):
'''Un-favorites the status specified in the ID parameter as the authenticating user.
Returns the un-favorited status in the requested format when successful.
The twitter.Api instance must be authenticated.
Args:
The twitter.Status to unmark as a favorite.
Returns:
A twitter.Status instance representing the newly-unmarked favorite.
'''
url = 'http://twitter.com/favorites/destroy/%s.json' % status.id
json = self._FetchUrl(url, post_data={})
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return Status.NewFromJsonDict(data)
def GetUserByEmail(self, email):
'''Returns a single user by email address.
Args:
email: The email of the user to retrieve.
Returns:
A twitter.User instance representing that user
'''
url = 'http://twitter.com/users/show.json?email=%s' % email
json = self._FetchUrl(url)
data = simplejson.loads(json)
self._CheckForTwitterError(data)
return User.NewFromJsonDict(data)
def SetCredentials(self, username, password):
'''Set the username and password for this instance
Args:
username: The twitter username.
password: The twitter password.
'''
self._username = username
self._password = password
def ClearCredentials(self):
'''Clear the username and password for this instance
'''
self._username = None
self._password = None
def SetCache(self, cache):
'''Override the default cache. Set to None to prevent caching.
Args:
cache: an instance that supports the same API as the twitter._FileCache
'''
self._cache = cache
def SetUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib: an instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def SetCacheTimeout(self, cache_timeout):
'''Override the default cache timeout.
Args:
cache_timeout: time, in seconds, that responses should be reused.
'''
self._cache_timeout = cache_timeout
def SetUserAgent(self, user_agent):
'''Override the default user agent
Args:
user_agent: a string that should be send to the server as the User-agent
'''
self._request_headers['User-Agent'] = user_agent
def SetXTwitterHeaders(self, client, url, version):
'''Set the X-Twitter HTTP headers that will be sent to the server.
Args:
client:
The client name as a string. Will be sent to the server as
the 'X-Twitter-Client' header.
url:
The URL of the meta.xml as a string. Will be sent to the server
as the 'X-Twitter-Client-URL' header.
version:
The client version as a string. Will be sent to the server
as the 'X-Twitter-Client-Version' header.
'''
self._request_headers['X-Twitter-Client'] = client
self._request_headers['X-Twitter-Client-URL'] = url
self._request_headers['X-Twitter-Client-Version'] = version
def SetSource(self, source):
'''Suggest the "from source" value to be displayed on the Twitter web site.
The value of the 'source' parameter must be first recognized by
the Twitter server. New source values are authorized on a case by
case basis by the Twitter development team.
Args:
source:
The source name as a string. Will be sent to the server as
the 'source' parameter.
'''
self._default_params['source'] = source
def _BuildUrl(self, url, path_elements=None, extra_params=None):
# Break url into consituent parts
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
# Add any additional path elements to the path
if path_elements:
# Filter out the path elements that have a value of None
p = [i for i in path_elements if i]
if not path.endswith('/'):
path += '/'
path += '/'.join(p)
# Add any additional query parameters to the query string
if extra_params and len(extra_params) > 0:
extra_query = self._EncodeParameters(extra_params)
# Add it to the existing query
if query:
query += '&' + extra_query
else:
query = extra_query
# Return the rebuilt URL
return urlparse.urlunparse((scheme, netloc, path, params, query, fragment))
def _InitializeRequestHeaders(self, request_headers):
if request_headers:
self._request_headers = request_headers
else:
self._request_headers = {}
def _InitializeUserAgent(self):
user_agent = 'Python-urllib/%s (python-twitter/%s)' % \
(self._urllib.__version__, __version__)
self.SetUserAgent(user_agent)
def _InitializeDefaultParameters(self):
self._default_params = {}
def _AddAuthorizationHeader(self, username, password):
if username and password:
basic_auth = base64.encodestring('%s:%s' % (username, password))[:-1]
self._request_headers['Authorization'] = 'Basic %s' % basic_auth
def _RemoveAuthorizationHeader(self):
if self._request_headers and 'Authorization' in self._request_headers:
del self._request_headers['Authorization']
def _GetOpener(self, url, username=None, password=None):
if username and password:
self._AddAuthorizationHeader(username, password)
handler = self._urllib.HTTPBasicAuthHandler()
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
handler.add_password(Api._API_REALM, netloc, username, password)
opener = self._urllib.build_opener(handler)
else:
opener = self._urllib.build_opener()
opener.addheaders = self._request_headers.items()
return opener
def _Encode(self, s):
if self._input_encoding:
return unicode(s, self._input_encoding).encode('utf-8')
else:
return unicode(s).encode('utf-8')
def _EncodeParameters(self, parameters):
'''Return a string in key=value&key=value form
Values of None are not included in the output string.
Args:
parameters:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if parameters is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in parameters.items() if v is not None]))
def _EncodePostData(self, post_data):
'''Return a string in key=value&key=value form
Values are assumed to be encoded in the format specified by self._encoding,
and are subsequently URL encoded.
Args:
post_data:
A dict of (key, value) tuples, where value is encoded as
specified by self._encoding
Returns:
A URL-encoded string in "key=value&key=value" form
'''
if post_data is None:
return None
else:
return urllib.urlencode(dict([(k, self._Encode(v)) for k, v in post_data.items()]))
def _CheckForTwitterError(self, data):
"""Raises a TwitterError if twitter returns an error message.
Args:
data: A python dict created from the Twitter json response
Raises:
TwitterError wrapping the twitter error message if one exists.
"""
# Twitter errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'error' in data:
raise TwitterError(data['error'])
def _FetchUrl(self,
url,
post_data=None,
parameters=None,
no_cache=None):
'''Fetch a URL, optionally caching for a specified time.
Args:
url: The URL to retrieve
post_data:
A dict of (str, unicode) key/value pairs. If set, POST will be used.
parameters:
A dict whose key/value pairs should encoded and added
to the query string. [OPTIONAL]
no_cache: If true, overrides the cache on the current request
Returns:
A string containing the body of the response.
'''
# Build the extra parameters dict
extra_params = {}
if self._default_params:
extra_params.update(self._default_params)
if parameters:
extra_params.update(parameters)
# Add key/value parameters to the query string of the url
url = self._BuildUrl(url, extra_params=extra_params)
# Get a url opener that can handle basic auth
opener = self._GetOpener(url, username=self._username, password=self._password)
encoded_post_data = self._EncodePostData(post_data)
# Open and return the URL immediately if we're not going to cache
if encoded_post_data or no_cache or not self._cache or not self._cache_timeout:
url_data = opener.open(url, encoded_post_data).read()
opener.close()
else:
# Unique keys are a combination of the url and the username
if self._username:
key = self._username + ':' + url
else:
key = url
# See if it has been cached before
last_cached = self._cache.GetCachedTime(key)
# If the cached version is outdated then fetch another and store it
if not last_cached or time.time() >= last_cached + self._cache_timeout:
url_data = opener.open(url, encoded_post_data).read()
opener.close()
self._cache.Set(key, url_data)
else:
url_data = self._cache.Get(key)
# Always return the latest version
return url_data
class _FileCacheError(Exception):
'''Base exception class for FileCache related errors'''
class _FileCache(object):
DEPTH = 3
def __init__(self,root_directory=None):
self._InitializeRootDirectory(root_directory)
def Get(self,key):
path = self._GetPath(key)
if os.path.exists(path):
return open(path).read()
else:
return None
def Set(self,key,data):
path = self._GetPath(key)
directory = os.path.dirname(path)
if not os.path.exists(directory):
os.makedirs(directory)
if not os.path.isdir(directory):
raise _FileCacheError('%s exists but is not a directory' % directory)
temp_fd, temp_path = tempfile.mkstemp()
temp_fp = os.fdopen(temp_fd, 'w')
temp_fp.write(data)
temp_fp.close()
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory))
if os.path.exists(path):
os.remove(path)
os.rename(temp_path, path)
def Remove(self,key):
path = self._GetPath(key)
if not path.startswith(self._root_directory):
raise _FileCacheError('%s does not appear to live under %s' %
(path, self._root_directory ))
if os.path.exists(path):
os.remove(path)
def GetCachedTime(self,key):
path = self._GetPath(key)
if os.path.exists(path):
return os.path.getmtime(path)
else:
return None
def _GetUsername(self):
'''Attempt to find the username in a cross-platform fashion.'''
try:
return os.getenv('USER') or \
os.getenv('LOGNAME') or \
os.getenv('USERNAME') or \
os.getlogin() or \
'nobody'
except (IOError, OSError), e:
return 'nobody'
def _GetTmpCachePath(self):
username = self._GetUsername()
cache_directory = 'python.cache_' + username
return os.path.join(tempfile.gettempdir(), cache_directory)
def _InitializeRootDirectory(self, root_directory):
if not root_directory:
root_directory = self._GetTmpCachePath()
root_directory = os.path.abspath(root_directory)
if not os.path.exists(root_directory):
os.mkdir(root_directory)
if not os.path.isdir(root_directory):
raise _FileCacheError('%s exists but is not a directory' %
root_directory)
self._root_directory = root_directory
def _GetPath(self,key):
try:
hashed_key = md5(key).hexdigest()
except TypeError:
hashed_key = md5.new(key).hexdigest()
return os.path.join(self._root_directory,
self._GetPrefix(hashed_key),
hashed_key)
def _GetPrefix(self,hashed_key):
return os.path.sep.join(hashed_key[0:_FileCache.DEPTH])
| Python |
# Copyright 2009 Daniel Schubert
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from google.appengine.ext import db
"""Database models used in the Flowered application.
"""
class Mark(db.Model):
timestamp = db.DateTimeProperty(auto_now_add = True)
geopt = db.GeoPtProperty()
type = db.StringProperty()
project = db.StringProperty()
class Service(db.Model):
name = db.StringProperty()
user = db.StringProperty()
password = db.StringProperty()
active = db.BooleanProperty()
| Python |
#!/usr/bin/env python
# -*- coding:Utf-8 -*-
#
# Modules
#
import ConfigParser
import os
import sys
import threading
from base.decorators import synchronized
from base.Patterns import Singleton
import core.Constantes as Constantes
import logging
logger = logging.getLogger( "TVDownloader.Configuration" )
#
# Classe
#
# Create mutex
mutex = threading.Lock()
class Configuration( object ):
__metaclass__ = Singleton
TVD_REPERTOIRE_TELECHARGEMENT = ( "Telechargements", "dossier" )
NAVIGATEUR_TIMEOUT = ( "Navigateur", "timeout" )
NAVIGATEUR_THREADS = ( "Navigateur", "threads" )
def __init__( self, configFileName = Constantes.FICHIER_CONFIGURATION_TVD, configFileNameDefaut = Constantes.FICHIER_CONFIGURATION_DEFAUT_TVD ):
self.configFileName = configFileName
self.configFileNameDefaut = configFileNameDefaut
# Create ConfigParser instances
self.configParser = ConfigParser.ConfigParser()
self.configParserDefaut = ConfigParser.ConfigParser()
# Open config files
self.open()
@synchronized( mutex )
def open( self ):
if( os.path.exists( self.configFileNameDefaut ) ):
self.configParser.read( self.configFileNameDefaut )
else:
logger.warning( "Pas de fichier de configuration par defaut" )
if( os.path.exists( self.configFileName ) ):
self.configParser.read( self.configFileName )
else:
logger.info( "Pas de fichier de configuration par defaut ; creation" )
@synchronized( mutex )
def save( self ):
# N.B. : only one thread has to call this function
with open( self.configFileName, "w" ) as configFile:
self.configParser.write( configFile )
@synchronized( mutex )
def get( self, elmt ):
( section, option ) = elmt
if( self.configParser.has_option( section, option ) ):
return self.configParser.get( section, option )
else:
if( self.configParserDefaut.has_option( section, option ) ):
logger.warning( "Utilisation de l'option par defaut pour %s [%s]" %( option, section ) )
return self.configParserDefaut.get( section, option )
else:
logger.critical( "Impossible de trouver l'option %s [%s]" %( option, section ) )
return None
@synchronized( mutex )
def set( self, elmt, value ):
( section, option ) = elmt
if( not self.configParser.has_section( section ) ):
self.configParser.add_section( section )
self.configParser.set( section, option, value )
| Python |
#!/usr/bin/env python
# -*- coding:Utf-8 -*-
import ftplib,socket,re,sys,urlparse,os
import subprocess,shlex,re,ctypes
import fcntl,select
libmms = ctypes.cdll.LoadLibrary("libmms.so.0")
librtmp = ctypes.cdll.LoadLibrary("librtmp.so")
import urllib,httplib
## Interface des classes effectuant le téléchargement de fichiers distant.
#
#
class DownloaderInterface :
def __init__(self):
pass
## Démarre le téléchargement
# Ne doit être appelée qu'une seul fois avant l'utilisation des méthodes read ou stop.
# @return True en cas de réussite, False en cas d'échec
def start(self):
pass
## Arrête le téléchargement
def stop(self):
pass
## Lit et renvoie des octets du flux téléchargé.
#
# La méthode start doit avoir été appelé avant pour que le téléchargement soit lancé.
# @param n le nombre d'octet à lire
# @return une chaîne de charactère de taille maximale n ou de taille 0 en cas de fin du flux ou None en cas d'échec
def read (self, n) :
# returns byte[]
pass
## Renvoie la taille du fichier en cours de téléchargement.
# La valeur renvoyer peut changer en fonction de l'état du téléchargement. Il est préférable de ne l'appeler après start() ou pendant le téléchargement.
# @return la taille du téléchargement en cour en octets, None si inconnue
def getSize(self):
pass
class FtpDownloader (DownloaderInterface) :
def __init__(self, url) :
DownloaderInterface.__init__(self)
self.url = url
self.ftpconn = None
self.size = None
self.stream = None
def start(self):
try:
parsed = urlparse.urlparse(self.url)
ftpconn = ftplib.FTP(parsed.netloc)
ftpconn.login()
self.stream,self.size = ftpconn.ntransfercmd("RETR "+parsed.path)
self.ftpconn = ftpconn
except BaseException as e:
import traceback
traceback.print_exc(e)
if self.stream != None:
self.stream.close()
return False
return True
def getSize(self):
return self.size
def read (self, n) :
return self.stream.recv(n)
def stop(self):
self.stream.close()
self.ftpconn.close()
@staticmethod
def canDownload (url) :
return url[:4] == "ftp:"
class MsdlDownloader (DownloaderInterface) :
def __init__(self, url) :
DownloaderInterface.__init__(self)
self.url = url
self.size = None
self.stream = None
def start(self):
try:
self.mmscon = libmms.mmsx_connect(None, None, self.url, int(5000))
if self.mmscon == 0:
return False
self.size = libmms.mmsx_get_length(self.mmscon)
except Exception as e:
import traceback
traceback.print_exc(e)
return False
return True
def read (self, n):
buffer = ctypes.create_string_buffer(n)
libmms.mmsx_read(0, self.mmscon, buffer, n)
return buffer.value
def stop(self):
libmms.mmsx_close(self.mmscon)
def getSize(self):
return self.size
@staticmethod
def canDownload (url) :
return url[:4] == "mms:"
class HttpDownloader (DownloaderInterface) :
def __init__(self, url) :
DownloaderInterface.__init__(self)
self.url = url
self.size = None
def start(self):
moved = False
while True:
parsed = urlparse.urlparse(self.url)
httpcon = httplib.HTTPConnection(parsed.netloc)
try:
if parsed.query != "":
httpcon.request("GET", parsed.path+"?"+parsed.query)
else:
httpcon.request("GET", parsed.path)
except:
return False
resp = httpcon.getresponse()
if resp.status == 301 or resp.status == 302:
location = resp.getheader("Location")
httpcon.close()
if moved:
return False#On a déjà suivis une redirection, on arrête là pour ne pas tourner en rond
elif location == None:
return False
else:
self.url = location
moved = True
continue
elif resp.status != 200:
httpcon.close()
return False
self.stream = resp
if isinstance(resp.getheader("Content-Length"), str):
self.size = int(resp.getheader("Content-Length"))
break
return True
def getSize(self):
return self.size
def read (self, n) :
return self.stream.read(n)
def stop(self):
self.stream.close()
@staticmethod
def canDownload (url) :
return url[:5] == "http:"
class RtmpDownloader(DownloaderInterface):
RTMP_URL_PATTERN = re.compile("(?P<scheme>[^:]*)://(?P<host>[^/^:]*):{0,1}(?P<port>[^/]*)/(?P<app>.*?)/(?P<playpath>\w*?\:.*)", re.DOTALL)
LENGHT_PATTERN = re.compile("length[^0-9]*([0-9]*.[0-9]*)")
PROGRESS_PATTERN = re.compile("\(([0-9]+[.]?[0-9]*)%\)")
def __init__(self, url) :
DownloaderInterface.__init__(self)
self.command = RtmpDownloader.urlToRtmpdump(url)
self.size = 0
self.dled = 0
self.process = None
def start(self):
arguments = shlex.split(self.command)
self.process = subprocess.Popen( arguments, stdout = subprocess.PIPE, stderr = subprocess.PIPE)
if self.process != None:
fno = self.process.stderr.fileno()
fcntl.fcntl( fno, fcntl.F_SETFL, fcntl.fcntl( fno, fcntl.F_GETFL ) | os.O_NDELAY )
select.select([fno],[],[], 0.2)#Attente le temps de la connexion au serveur
try:
# descriptionFound = False
# while len(select.select([fno],[],[], 0.5)[0]) > 0:#Recherche de la description du fichier
# line = self.process.stderr.read(100)
# if "sampledescription:" in line:
# descriptionFound = True
# break
# if descriptionFound:
# while len(select.select([fno],[],[], 0.5)[0]) > 0:#Recherche de la taille du fichier
# line = self.process.stderr.readline()
# match = re.search(RtmpDownloader.LENGHT_PATTERN, line)
# if match != None:
# self.size = float(match.group(1))
# break
while len(select.select([fno],[],[], 0.5)[0]) > 0:#On vide le pipe
line = self.process.stderr.read(100)
except Exception, e:
self.stop()
return False
return True
else:
return False
def getSize(self):
return self.size
def read (self, n) :
try:
line = ""
while len(select.select([self.process.stderr.fileno()],[],[], 0.05)[0]) > 0:#On vide le pipe
newLine = self.process.stderr.read(100)
if len(newLine) == 0:
break;
else:
line = newLine
#Récupération de la progression pour estimation de la taille
match = re.search(RtmpDownloader.PROGRESS_PATTERN, line)
if match != None:
perc = float(match.group(1))
self.size = (100.0/perc)*self.dled
except:
pass
if len(select.select([self.process.stdout.fileno()],[],[], 0.2)[0]) > 0:
data = self.process.stdout.read(n)
self.dled = self.dled+len(data)
if self.dled > self.size:
self.size = self.dled
return data
else:
self.size = self.dled
return ""
def stop(self):
try:
self.process.terminate()
except:
pass
@staticmethod
def canDownload (url) :
return url[:4] == "rtmp"
@staticmethod
def urlToRtmpdump( url ):
match = re.match( RtmpDownloader.RTMP_URL_PATTERN, url )
comand = ""
if match != None:
comand = "rtmpdump --host %host% --port %port% --protocol %scheme% --app %app% --playpath %playpath% -o -"
comand = comand.replace( "%scheme%", match.group( "scheme" ) ).replace( "%host%", match.group( "host" ) ).replace( "%app%", match.group( "app" ) ).replace( "%playpath%", match.group( "playpath" ) )
if( match.group( "port" ) != "" ):
comand = comand.replace( "%port%", match.group( "port" ) )
elif( url[ :6 ] == "rtmpte" ):
comand = comand.replace( "%port%", "80" )
elif( url[ :5 ] == "rtmpe" ):
comand = comand.replace( "%port%", "1935" )
elif( url[ :5 ] == "rtmps" ):
comand = comand.replace( "%port%", "443" )
elif( url[ :5 ] == "rtmpt" ):
comand = comand.replace( "%port%", "80" )
else:
comand = comand.replace( "%port%", "1935" )
else:
comand = "rtmpdump -r " + url + " -o -"
return comand
| Python |
#!/usr/bin/env python
# -*- coding:Utf-8 -*-
###########
# Modules #
###########
import cPickle as pickle
import os
import Constantes
from Fichier import Fichier
import logging
logger = logging.getLogger( "TVDownloader" )
import threading
from tvdcore.util import SynchronizedMethod
from TVDContext import TVDContext
##########
# Classe #
##########
## Classe qui gere l'historique des telechargements
class Historique( object ):
# Instance de la classe (singleton)
__instance = None
## Surcharge de la methode de construction standard (pour mettre en place le singleton)
def __new__(typ, *args, **kwargs):
# On vérifie qu'on peut instancier
context = TVDContext()
if not(context.isInitialized()):
logger.error("Le context n'est pas initialisé, impossible d'instancier")
return None
if Historique.__instance == None:
return super(Historique, typ).__new__(typ, *args, **kwargs)
else:
return Historique.__instance
# Historique : les fichiers sont d'abord hashes selon leur date pour diminuer le temps de recherche
# Ainsi, l'historique est de la forme { date1, [ Fichiers a date1 ], date2, [ Fichiers a date2 ], ... }
## Constructeur
def __init__( self ):
if Historique.__instance != None:
return
Historique.__instance = self
self.RLOCK = threading.RLock()
self.chargerHistorique()
## Destructeur
def __del__( self ):
self.sauverHistorique()
## Charge l'historique existant
@SynchronizedMethod
def chargerHistorique( self ):
if os.path.exists( Constantes.FICHIER_HISTORIQUE_TVD ): # Charge le fichier s'il existe
logger.info( "chargerHistorique : chargement de l'historique" )
with open( Constantes.FICHIER_HISTORIQUE_TVD, "r" ) as fichier:
self.historique = pickle.load( fichier )
else: # Sinon, historique vide
logger.info( "chargerHistorique : fichier historique non trouve ; creation" )
self.historique = {}
## Ajoute un fichier a l'historique
# @param nouveauFichier Fichier a ajouter a l'historique
@SynchronizedMethod
def ajouterHistorique( self, nouveauFichier ):
if( isinstance( nouveauFichier, Fichier ) ):
date = nouveauFichier.date
if( self.historique.has_key( date ) ):
self.historique[ date ].append( nouveauFichier )
else:
self.historique[ date ] = [ nouveauFichier ]
## Sauvegarde l'historique
@SynchronizedMethod
def sauverHistorique( self ):
# On enregistre l'historique
with open( Constantes.FICHIER_HISTORIQUE_TVD, "w" ) as fichier:
logger.info( "sauverHistorique : sauvegarde de l'historique" )
pickle.dump( self.historique, fichier )
## Verifie si un fichier se trouve dans l'historique
# @param fichier Fichier a chercher dans l'historique
# @return Si le fichier est present ou non dans l'historique
@SynchronizedMethod
def comparerHistorique( self, fichier ):
if( isinstance( fichier, Fichier ) ):
date = fichier.date
if( self.historique.has_key( date ) ):
return fichier in self.historique[ date ]
else:
return False
else:
return False
## Nettoie l'historique
# Supprime les entrees les plus vieilles de l'historique
@SynchronizedMethod
def nettoieHistorique( self ):
logger.info( "nettoieHistorique : suppression des vieilles reference de l'historique" )
| Python |
#!/usr/bin/env python
# -*- coding:Utf-8 -*-
#
# Modules
#
import os
import sys
#
# Variables
#
# du programme
TVD_NOM = "TVDownloader"
TVD_VERSION = "1.0 alpha"
# du systeme
OS_UNIX = False
OS_WINDOWS = False
if( sys.platform.lower()[ : 3 ] == "win" ):
OS_WINDOWS = True
else:
OS_UNIX = True
# des chemins
if( "TVDOWNLOADER_HOME" in os.environ ):
REPERTOIRE_HOME = os.path.join( os.environ[ "TVDOWNLOADER_HOME" ] )
REPERTOIRE_CACHE = os.path.join( REPERTOIRE_HOME, "cache" )
REPERTOIRE_CONFIGURATION = os.path.join( REPERTOIRE_HOME, "config" )
elif( "APPDATA" in os.environ ):
REPERTOIRE_HOME = os.path.join( os.environ[ "APPDATA" ], "tvdownloader" )
REPERTOIRE_CACHE = os.path.join( REPERTOIRE_HOME, "cache" )
REPERTOIRE_CONFIGURATION = os.path.join( REPERTOIRE_HOME, "config" )
else:
REPERTOIRE_HOME = os.path.expanduser( "~" )
REPERTOIRE_CACHE = os.path.join( REPERTOIRE_HOME, ".cache", "tvdownloader" )
REPERTOIRE_CONFIGURATION = os.path.join( REPERTOIRE_HOME, ".config", "tvdownloader" )
REPERTOIRE_LOGS = os.path.join( REPERTOIRE_CONFIGURATION, "logs" )
REPERTOIRE_PLUGIN_PERSO = os.path.join( REPERTOIRE_CONFIGURATION, "plugins" )
REPERTOIRE_TELECHARGEMENT_DEFAUT = os.path.join( os.path.expanduser( "~" ), "Videos_TVDownloader" )
# des plugins
REPERTOIRES_PLUGINS = [ REPERTOIRE_PLUGIN_PERSO,
"plugins" ]
# des fichiers
FICHIER_CONFIGURATION_TVD = os.path.join( REPERTOIRE_CONFIGURATION, "tvdownloader.cfg" )
FICHIER_CONFIGURATION_DEFAUT_TVD = os.path.join( os.path.dirname( os.path.abspath( __file__ ) ), "tvdownloader_defaut.cfg" )
FICHIER_HISTORIQUE_TVD = os.path.join( REPERTOIRE_CONFIGURATION, "historique" )
| Python |
#!/usr/bin/env python
# -*- coding:Utf-8 -*-
###########
# Modules #
###########
import datetime
import os.path
import time
from DownloaderFactory import DownloaderFactory
import util as lib
import util.html
import util.fichierDossier
import logging
logger = logging.getLogger( "TVDownloader" )
##########
# Classe #
##########
## Classe qui contient les informations d'un fichier
class Fichier(object):
DOWNLOADER_FACTORY = DownloaderFactory()
## Contructeur
# @param nom Le nom du fichier (tel qu'affiché à l'utilisateur)
# @param date La date du fichier
# @param lien L'url où se trouve le fichier
# @param nomFichierSortie Nom du fichier de sortie
# @param urlImage URL de l'image a afficher
# @param descriptif Texte descriptif a afficher
def __init__( self, nom, date = datetime.datetime.now(), lien = "", nomFichierSortie = "", urlImage = "", descriptif = "" ):
self.nom = util.html.supprimeBalisesHTML( nom )
self.date = date
self.lien = lien
if( nomFichierSortie == "" ):
self.nomFichierSortie = util.fichierDossier.chaineToNomFichier( os.path.basename( self.lien ) )
else:
self.nomFichierSortie = util.fichierDossier.chaineToNomFichier( nomFichierSortie )
self.urlImage = urlImage
self.descriptif = util.html.supprimeBalisesHTML( descriptif )
## Surcharge de la methode ==
# @param autre L'autre Fichier a comparer
# @return Si les 2 Fichiers sont egaux
def __eq__( self, autre ):
if not isinstance( autre, Fichier ):
return False
else:
return ( self.nom == autre.nom and self.date == autre.date and self.lien == autre.lien )
## Surcharge de la methode !=
# @param autre L'autre Fichier a comparer
# @return Si les 2 Fichiers sont differents
def __ne__( self, autre ):
return not self.__eq__( autre )
## Surcharge de la methode d'affichage d'un fichier
def __str__( self ):
return "--> Fichier :\nNom : %s\nLien : %s" %( self.nom, self.lien )
def getDownloader(self):
return Fichier.DOWNLOADER_FACTORY.create(self.lien)
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.