code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
# Django settings for project project.
DEBUG = True
TEMPLATE_DEBUG = DEBUG
ADMINS = (
# ('Your Name', 'your_email@domain.com'),
('Kundan Singh', 'kundan10@gmail.com'),
)
MANAGERS = ADMINS
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.', # Add 'postgresql_psycopg2', 'postgresql', 'mysql', 'sqlite3' or 'oracle'.
'NAME': '', # Or path to database file if using sqlite3.
'USER': '', # Not used with sqlite3.
'PASSWORD': '', # Not used with sqlite3.
'HOST': '', # Set to empty string for localhost. Not used with sqlite3.
'PORT': '', # Set to empty string for default. Not used with sqlite3.
}
}
# Local time zone for this installation. Choices can be found here:
# http://en.wikipedia.org/wiki/List_of_tz_zones_by_name
# although not all choices may be available on all operating systems.
# On Unix systems, a value of None will cause Django to use the same
# timezone as the operating system.
# If running in a Windows environment this must be set to the same as your
# system time zone.
TIME_ZONE = 'America/Los_Angeles'
# Language code for this installation. All choices can be found here:
# http://www.i18nguy.com/unicode/language-identifiers.html
LANGUAGE_CODE = 'en-us'
SITE_ID = 1
# If you set this to False, Django will make some optimizations so as not
# to load the internationalization machinery.
USE_I18N = True
# If you set this to False, Django will not format dates, numbers and
# calendars according to the current locale
USE_L10N = True
# Absolute path to the directory that holds media.
# Example: "/home/media/media.lawrence.com/"
MEDIA_ROOT = ''
# URL that handles the media served from MEDIA_ROOT. Make sure to use a
# trailing slash if there is a path component (optional in other cases).
# Examples: "http://media.lawrence.com", "http://example.com/media/"
MEDIA_URL = ''
# URL prefix for admin media -- CSS, JavaScript and images. Make sure to use a
# trailing slash.
# Examples: "http://foo.com/media/", "/media/".
ADMIN_MEDIA_PREFIX = '/static/media/'
# Make this unique, and don't share it with anybody.
SECRET_KEY = 'lve5_4^@l3v&4we6&o)*y2kgrd))iame%x(bw+*e)$h7tw8(d7'
# List of callables that know how to import templates from various sources.
TEMPLATE_LOADERS = (
#'django.template.loaders.filesystem.Loader',
'django.template.loaders.filesystem.load_template_source',
'django.template.loaders.app_directories.load_template_source',
# 'django.template.loaders.eggs.Loader',
)
MIDDLEWARE_CLASSES = (
'django.middleware.common.CommonMiddleware',
# 'django.contrib.sessions.middleware.SessionMiddleware',
# 'django.middleware.csrf.CsrfViewMiddleware',
# 'django.contrib.auth.middleware.AuthenticationMiddleware',
# 'django.contrib.messages.middleware.MessageMiddleware',
)
ROOT_URLCONF = 'project.urls'
import os
ROOT_PATH = os.path.dirname(__file__)
TEMPLATE_DIRS = (
# Put strings here, like "/home/html/django_templates" or "C:/www/django/templates".
# Always use forward slashes, even on Windows.
# Don't forget to use absolute paths, not relative paths.
ROOT_PATH + '/templates',
)
INSTALLED_APPS = (
# 'django.contrib.auth',
'django.contrib.contenttypes',
# 'django.contrib.sessions',
'django.contrib.sites',
# 'django.contrib.messages',
# Uncomment the next line to enable the admin:
'django.contrib.admin',
'project.experts',
'project.office',
)
| Python |
import logging, sys
from google.appengine.api import xmpp
from project.experts.views.talk import xmpp_handler as handler1
from project.office.views import xmpp_handler as handler2
from django.http import HttpResponse, HttpResponseServerError, HttpResponseNotAllowed
def xmpp_handler(request):
try:
# logging.info('received xmpp_request %r'%(request,))
if request.method == 'POST':
message = xmpp.Message(request.POST)
receiver, ignore, body = message.body.partition(' ')
# logging.info(' receiver=%r body=%r'%(receiver, body))
if '@' not in receiver:
message.reply('Please start your message with email address of the receiver.')
else:
sender = message.sender.partition('/')[0]
sent_count = 0
for handler in [handler1, handler2]:
sent_count += handler(sender, receiver, body)
if not sent_count:
message.reply('Could not send message to ' + receiver)
elif sent_count == 1:
message.reply('Sent your message')
else:
message.reply('Sent your message to %d session(s)'%(sent_count,))
return HttpResponse()
else:
return HttpResponseNotAllowed()
except:
logging.info('xmpp_handler exception %r'%(sys.exc_info(),))
return HttpResponseServerError()
| Python |
#!/usr/bin/env python
import os
import optparse
import subprocess
import sys
here = os.path.dirname(__file__)
def main():
usage = "usage: %prog [file1..fileN]"
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = optparse.OptionParser(usage, description=description)
parser.add_option("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
if not args:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
args = [os.path.join(here, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
for arg in args:
if not arg.endswith(".js"):
arg = arg + ".js"
to_compress = os.path.expanduser(arg)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(arg.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
import os
import optparse
import subprocess
import sys
here = os.path.dirname(__file__)
def main():
usage = "usage: %prog [file1..fileN]"
description = """With no file paths given this script will automatically
compress all jQuery-based files of the admin app. Requires the Google Closure
Compiler library and Java version 6 or later."""
parser = optparse.OptionParser(usage, description=description)
parser.add_option("-c", dest="compiler", default="~/bin/compiler.jar",
help="path to Closure Compiler jar file")
parser.add_option("-v", "--verbose",
action="store_true", dest="verbose")
parser.add_option("-q", "--quiet",
action="store_false", dest="verbose")
(options, args) = parser.parse_args()
compiler = os.path.expanduser(options.compiler)
if not os.path.exists(compiler):
sys.exit("Google Closure compiler jar file %s not found. Please use the -c option to specify the path." % compiler)
if not args:
if options.verbose:
sys.stdout.write("No filenames given; defaulting to admin scripts\n")
args = [os.path.join(here, f) for f in [
"actions.js", "collapse.js", "inlines.js", "prepopulate.js"]]
for arg in args:
if not arg.endswith(".js"):
arg = arg + ".js"
to_compress = os.path.expanduser(arg)
if os.path.exists(to_compress):
to_compress_min = "%s.min.js" % "".join(arg.rsplit(".js"))
cmd = "java -jar %s --js %s --js_output_file %s" % (compiler, to_compress, to_compress_min)
if options.verbose:
sys.stdout.write("Running: %s\n" % cmd)
subprocess.call(cmd.split())
else:
sys.stdout.write("File %s not found. Sure it exists?\n" % to_compress)
if __name__ == '__main__':
main()
| Python |
# Copyright (c) 2010, Kundan Singh, all rights reserved.
#
# This is a chatroulette-type application built using the Flash VideoIO component on Adobe
# Stratus service and Google App Engine. This site is just a demonstration of how such
# services can be built using the generic Flash-VideoIO component, and not meant for
# production use.
#
# When you land on this page, it prompts you for some nickname, and starts publishing your
# audio and video stream, after you approve the device access. It tries to connect you with
# another person who is also on the page publishing his or her video. The status of the
# connection is displayed in the chat history area. You can also type a message to send
# to the person you are talking with.
#
# It uses Google App Engine for all session initiation and discovery of other users, and
# Adobe Stratus to do media negotiation for peer-to-peer media streams. The project contains
# one HTML file with some javascript and one Python file, with about 400 lines total. There
# is no authentication, but is easy to add using Google App Engine. You can right-click on
# this page to view the HTML and javascript source code which contributes to all front-end
# interactions and shows how to use Flash-VideoIO for chatroulette type applications.
#
# This version of the project uses the Channel API available in Google App Engine for
# asynchronous notifications of connections, disconnections and chat messages.
#
# Visit http://code.google.com/p/flash-videoio for more.
import os, random, datetime, time, logging
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from google.appengine.api import channel
from django.utils import simplejson as json
# The individual user. This is created on /login/ and destroyed on /logout/ or
# when expired -- not refreshed for 90 seconds.
# stream -- the user's stream (unique/random) used as clientId
# publish -- VideoIO URL to play this user's stream.
# play -- VideoIO URL that user is connected to.
# lastmodified -- when was this user login refreshed.
class User(db.Model):
stream = db.StringProperty()
publish = db.StringProperty()
play = db.StringProperty()
lastmodified = db.DateTimeProperty(auto_now=True)
# The main page just return index.html.
class MainPage(webapp.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, {}))
# Clean up the given user by removing the connection he has and sending the
# updated connect message to his previous connection. If delete is set then
# it also deletes the user.
def cleanup_user(user, delete=True):
#logging.debug('clean_user ' + user.stream)
if user.play:
other = db.GqlQuery('SELECT * FROM User WHERE publish = :1', user.play).get()
if other:
other.play = None
other.put()
data = {'method': 'connect', 'play': None}
try:
channel.send_message(other.stream, json.dumps(data))
except channel.InvalidChannelClientIdError:
logging.warn('InvalidChannelClientIdError: ' + other.stream + ' ' + other.stream)
other.delete()
user.play = None
if delete:
user.delete()
# POST /logout/?stream={stream}
class Logout(webapp.RequestHandler):
def post(self):
user = db.GqlQuery('SELECT * FROM User WHERE stream = :1', self.request.get('stream')).get()
if not user:
self.response.set_status(404, 'Stream Not Found')
else:
cleanup_user(user)
# POST /create/?stream={stream}
# Response: {"token", "new-channel-token-for-clientId-as-stream"}
class Create(webapp.RequestHandler):
def post(self):
stream = self.request.get('stream')
data = {'token': channel.create_channel(stream)}
self.response.out.write(json.dumps(data))
# POST /login/?stream={stream}[&change=true]
# Request: {"publish": "VideoIO-url-that-can-be-used-to-play-user's-local-video"}
# Response: {"play": "VideoIO-url-that-this-user-should-play"}
# If change is set, then it disconnects previous connection and attempts a new
# connection, to random person. It also cleans up expired User objects before handling.
class Login(webapp.RequestHandler):
def post(self):
users = db.GqlQuery('SELECT * FROM User WHERE lastmodified < :1', datetime.datetime.fromtimestamp(time.time()-90))
if users:
for user in users:
cleanup_user(user)
change = bool(self.request.get('change') == 'true')
data = json.loads(self.request.body)
user = db.GqlQuery('SELECT * FROM User WHERE stream = :1', self.request.get('stream')).get()
if not user:
user = User()
user.stream = self.request.get('stream')
user.publish = data['publish']
user.put()
to_send = False
if change:
cleanup_user(user, delete=False)
to_send = True
if not user.play:
count = db.GqlQuery('SELECT * FROM User WHERE publish != :1 AND play = :2', user.publish, None).count()
if count > 0:
r = random.randint(0, count-1)
others = db.GqlQuery('SELECT * FROM User WHERE publish != :1 AND play = :2', user.publish, None).fetch(1, r)
if others:
other = others[0]
other.play, user.play = user.publish, other.publish
#logging.debug('connecting ' + user.stream + ' ' + other.stream)
data = {'method': 'connect', 'play': user.play}
try:
channel.send_message(user.stream, json.dumps(data))
except channel.InvalidChannelClientIdError:
logging.warn('InvalidChannelClientIdError: ' + user.stream + ' ' + user.stream)
user.play = other.play = None
data = {'method': 'connect', 'play': other.play}
try:
channel.send_message(other.stream, json.dumps(data))
except channel.InvalidChannelClientIdError:
logging.warn('InvalidChannelClientIdError: ' + other.stream + ' ' + other.stream)
user.play = other.play = None
other.delete()
other.put()
user.put()
to_send = False
#else:
#logging.debug('no other stream found')
if to_send:
user.put()
data = {'method': 'connect', 'play': user.play}
try:
channel.send_message(user.stream, json.dumps(data))
except channel.InvalidChannelClientIdError:
logging.warn('InvalidChannelClientIdError: ' + user.stream + ' ' + user.stream)
cleanup_user(user)
data = {'play': user.play}
self.response.out.write(json.dumps(data))
# POST /send/?stream={stream}
# Request: {"dest": "VideoIO-url-that-this-user-is-connected-to", "body": "text-message"}
# Send the text message to the target user identified by his VideoIO URL.
class Send(webapp.RequestHandler):
def post(self):
data = json.loads(self.request.body)
user = db.GqlQuery('SELECT * FROM User WHERE stream = :1', self.request.get('stream')).get()
if not user:
self.response.set_status(404, 'Stream Not Found')
elif not user.play:
self.response.set_status(404, 'Stream Not Connected')
elif user.play != data['dest']:
self.response.set_status(404, 'Destination Not Found')
else:
other = db.GqlQuery('SELECT * FROM User WHERE publish = :1', user.play).get()
if not other:
self.response.set_status(404, 'Other Person Left')
else:
data = {'method': 'send', 'body': data['body']}
try:
channel.send_message(other.stream, json.dumps(data))
except channel.InvalidChannelClientIdError:
logging.warn('InvalidChannelClientIdError: ' + other.stream + ' ' + other.stream)
cleanup_user(other)
def main():
logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/', MainPage), ('/login/', Login), ('/logout/', Logout),
('/create/', Create), ('/send/', Send),
], debug=True)
run_wsgi_app(application)
if __name__ == "__main__":
main()
| Python |
import os, random, datetime, time
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.ext.webapp import template
class Stream(db.Model):
publish = db.StringProperty()
play = db.StringProperty()
lastmodified = db.DateTimeProperty(auto_now=True)
class Message(db.Model):
receiver = db.StringProperty()
content = db.StringProperty(multiline=True)
class Call(db.Model):
first = db.Key()
second = db.Key()
class MainPage(webapp.RequestHandler):
def get(self):
template_values = {}
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, template_values))
class Logout(webapp.RequestHandler):
def post(self):
publish = self.request.body
streams = db.GqlQuery("SELECT * FROM Stream WHERE publish = :1", publish)
if streams:
for stream in streams:
if stream.play:
other = db.GqlQuery("SELECT * FROM Stream WHERE publish = :1", stream.play).get()
if other:
other.play = None
other.put()
stream.delete()
class Publish(webapp.RequestHandler):
def post(self):
# first delete any expired datetime
streams = db.GqlQuery("SELECT * FROM Stream WHERE lastmodified < :1", datetime.datetime.fromtimestamp(time.time()-60))
if streams:
for stream in streams:
if stream.play:
other = db.GqlQuery("SELECT * FROM Stream WHERE publish = :1", stream.play).get()
if other:
other.play = None
other.put()
stream.delete()
stream = db.GqlQuery("SELECT * FROM Stream WHERE publish = :1", self.request.body).get()
if not stream:
stream = Stream()
stream.publish = self.request.body
stream.put()
if self.request.get("change"):
if stream.play:
other = db.GqlQuery("SELECT * FROM Stream WHERE publish = :1", stream.play).get()
if other:
other.play = None
other.put()
stream.play = None
stream.put()
if not stream.play:
count = db.GqlQuery("SELECT * FROM Stream WHERE publish != :1 AND play = :2", stream.publish, None).count()
if count > 0:
r = random.randint(0, count-1)
other = db.GqlQuery("SELECT * FROM Stream WHERE publish != :1 AND play = :2", stream.publish, None).fetch(1, r)
if other:
other[0].play, stream.play = stream.publish, other[0].publish
other[0].put()
stream.put()
if stream.play:
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(stream.play)
msgs = db.GqlQuery("SELECT * FROM Message WHERE receiver = :1", stream.publish)
if msgs:
for msg in msgs:
self.response.out.write("\n" + msg.content)
db.delete(msgs)
class Send(webapp.RequestHandler):
def post(self):
body = self.request.body
receiver, ignore, content = body.partition("\n")
msg = Message()
msg.receiver = receiver
msg.content = content
msg.put()
application = webapp.WSGIApplication([
('/', MainPage), ('/logout', Logout),
('/publish', Publish), ('/send', Send),
], debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| Python |
# Copyright (c) 2010, Kundan Singh, all rights reserved.
#
# This is a public-chat application built using the Flash VideoIO component on Adobe
# Stratus service and Google App Engine. This site is just a demonstration of how such
# services can be built using the generic Flash-VideoIO component.
#
# This version of the project uses the Channel API available in Google App Engine for
# asynchronous notifications of user list and chat history.
#
# Visit http://code.google.com/p/flash-videoio for more.
import os, datetime, time, logging
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from google.appengine.api import channel
from django.utils import simplejson as json
# The main page at / just returns index.html.
class MainPage(webapp.RequestHandler):
def get(self):
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, {}))
# POST /create/
# Request: {"senderId": "client-id-of-sender"}
# Response: {"token", "new-channel-token-for-clientId-as-stream"}
class Create(webapp.RequestHandler):
def post(self):
data = json.loads(self.request.body)
data = {'token': channel.create_channel(data['senderId'])}
self.response.out.write(json.dumps(data))
# Data model to store connected clients and their locations.
class User(db.Model):
location = db.StringProperty()
name = db.StringProperty()
clientId = db.StringProperty()
extra = db.BlobProperty()
lastmodified = db.DateTimeProperty(auto_now=True)
def get_object(self):
return {'name': self.name, 'clientId': self.clientId, 'extra': self.extra}
def __repr__(self):
return '<User location=%r clientId=%r name=%r lastmodified=%r len(extra)=%d />'%(self.location, self.clientId, self.name, self.lastmodified, len(self.extra) if self.extra else 0)
# Data model to store chat message in a location.
class Chat(db.Model):
location = db.StringProperty()
senderId = db.StringProperty()
sender = db.StringProperty()
targetId = db.StringProperty()
timestamp = db.DateTimeProperty(auto_now_add=True)
text = db.TextProperty()
extra = db.BlobProperty()
def get_object(self):
return {'senderId': self.senderId, 'sender': self.sender, 'targetId': self.targetId,
'timestamp': time.mktime(self.timestamp.timetuple()),
'text': self.text, 'extra': self.extra}
def __repr__(self):
return '<Chat location=%r sender=%r targetId=%r timestamp=%r len(text)=%d len(extra)=%d />'%(self.location, self.sender, self.targetId, self.timestamp, len(self.text), len(self.extra) if self.extra else 0)
# GET /userlist/?location={location}
# Response: {"userlist": [... list of {"name":..., "clientId":..., "extra":...}]
# POST /userlist/?location={location}
# Request: {"clientId":..., "name":..., "extra":...}
class UserList(webapp.RequestHandler):
def get(self):
location = self.request.get('location')
userlist = [u.get_object() for u in db.GqlQuery('SELECT * FROM User WHERE location = :1', location)]
# logging.debug("userlist returns: " + json.dumps({'userlist': userlist}))
self.response.out.write(json.dumps({'userlist': userlist}))
def post(self):
expired_users = [u for u in db.GqlQuery('SELECT * FROM User WHERE lastmodified < :1', datetime.datetime.fromtimestamp(time.time()-90))]
added_users, removed_users = [], [u.get_object() for u in expired_users]
[u.delete() for u in expired_users]
location, body = self.request.get('location'), json.loads(self.request.body)
clientId, name, extra = body['clientId'], body['name'], body.get('extra', None)
if extra == 'null': extra = None
if name == 'null': name = None
if extra and isinstance(extra, unicode):
extra = extra.encode('utf-8')
user = db.GqlQuery('SELECT * FROM User WHERE clientId = :1', clientId).get()
if self.request.path.endswith('/delete/'):
if user:
removed_users.append(user.get_object())
user.delete()
else:
if not user:
user = User(location=location, name=name, clientId=clientId, extra=extra)
added_users.append(user.get_object())
else:
changed = (user.location != location or user.name != name or user.extra != extra)
if changed:
user.location, user.name, user.extra = location, name, extra
added_users.append(user.get_object())
user.put()
if added_users or removed_users:
data = json.dumps({'method': 'userlist', 'added': added_users, 'removed': removed_users})
for u in db.GqlQuery('SELECT * FROM User WHERE location = :1', location):
try:
channel.send_message(u.clientId, data)
except channel.InvalidChannelClientIdError:
pass # ignore the exception
# GET /chathistory/?location={location}&target={targetId}
# Response: {"chathistory": [... list of {"senderId":...,"sender":...,"targetId":...,"timestamp":...,"text":...,"extra":...}]
# POST /chathistory?location={location}[&target={targetId}]
# Request: {"senderId":..., "sender":..., "text":..., "extra":...}
class ChatHistory(webapp.RequestHandler):
def get(self):
location, targetId = self.request.get('location'), self.request.get('targetId')
chats = [u for u in db.GqlQuery('SELECT * FROM Chat WHERE location = :1 ORDER BY timestamp DESC LIMIT 30', location)]
# logging.debug('result=' + str(chats))
chathistory = [u.get_object() for u in chats if not u.targetId or u.targetId == targetId]
result = json.dumps({'chathistory': [r for r in reversed(chathistory)]})
self.response.out.write(result)
def post(self):
location, targetId, body = self.request.get('location'), self.request.get('targetId'), json.loads(self.request.body)
senderId, sender, text, extra = body['senderId'], body['sender'], body['text'], body['extra'] if 'extra' in body else None
if sender == "null": sender = "User " + str(senderId)
if text == "null": text = None
if extra == "null": extra = None
if targetId == "null" or targetId == "": targetId = None
if extra and isinstance(extra, unicode):
extra = extra.encode('utf-8')
if text:
chat = Chat(location=location, senderId=senderId, sender=sender, targetId=targetId, text=text, extra=extra)
chat.put()
data = json.dumps({'method': 'chathistory', 'added': [chat.get_object()]})
if not targetId:
for u in db.GqlQuery('SELECT * FROM User WHERE location = :1', location):
try:
channel.send_message(u.clientId, data)
except channel.InvalidChannelClientIdError:
pass # ignore the error
else:
target = db.GqlQuery('SELECT * FROM User WHERE clientId = :1', targetId).get()
if target:
try:
channel.send_message(target.clientId, data)
except channel.InvalidChannelClientIdError:
pass # ignore the error
else:
self.response.set_status(404)
def main():
logging.getLogger().setLevel(logging.DEBUG)
application = webapp.WSGIApplication([
('/', MainPage), ('/create/', Create),
('/userlist/delete/', UserList), ('/userlist/', UserList),
('/chathistory/', ChatHistory),
], debug=True)
run_wsgi_app(application)
if __name__ == "__main__":
main()
| Python |
import os, datetime, time, logging
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
from google.appengine.ext.webapp import template
from google.appengine.api import xmpp
from django.utils import simplejson as json
class MainPage(webapp.RequestHandler):
def get(self):
# user = users.get_current_user()
# if not user:
# self.redirect(users.create_login_url(self.request.uri))
# return
template_values = {}
path = os.path.join(os.path.dirname(__file__), 'index.html')
self.response.out.write(template.render(path, template_values))
def post(self):
msg = self.request.get("input")
status_code = xmpp.send_message("internetvideocity@appspot.com/bot", msg)
self.response.out.write("status=" + str(status_code) + \
" presence=" + str(xmpp.get_presence("internetvideocity@appspot.com")))
class XMPPHandler(webapp.RequestHandler):
def post(self):
message = xmpp.Message(self.request.POST)
logging.info(message.body)
#if message.body[0:5].lower() == 'hello':
# message.reply("Greetings!")
class User(db.Model):
location = db.StringProperty()
name = db.StringProperty()
clientId = db.StringProperty()
extra = db.BlobProperty()
lastmodified = db.DateTimeProperty(auto_now=True)
def __repr__(self):
return '<User location=%r clientId=%r name=%r lastmodified=%r len(extra)=%d />'%(self.location, self.clientId, self.name, self.lastmodified, len(self.extra) if self.extra else 0)
class Chat(db.Model):
location = db.StringProperty()
senderId = db.StringProperty()
sender = db.StringProperty()
targetId = db.StringProperty()
timestamp = db.DateTimeProperty(auto_now_add=True)
text = db.TextProperty()
extra = db.BlobProperty()
def __repr__(self):
return '<Chat location=%r sender=%r target=%r timestamp=%r len(text)=%d len(extra)=%d />'%(self.location, self.sender, self.target, self.timestamp, len(self.text), len(self.extra) if self.extra else 0)
class Version(db.Model):
location = db.StringProperty()
chathistory = db.IntegerProperty()
userlist = db.IntegerProperty()
def __repr__(self):
return '<Version location=%r chathistory=%r userlist=%r />'%(self.location, self.chathistory, self.userlist)
class UserList(webapp.RequestHandler):
def get(self):
location, since = self.request.get('location'), self.request.get('since')
changeVersion = False
for u in db.GqlQuery("SELECT * FROM User WHERE lastmodified < :1", datetime.datetime.fromtimestamp(time.time()-60)):
logging.info('deleting expired %r'%(u,))
changeVersion = True
u.delete()
if changeVersion:
ver = db.GqlQuery("SELECT * FROM Version WHERE location = :1", location).get()
if not ver:
ver = Version(location=location, chathistory=0, userlist=0)
ver.userlist = ver.userlist + 1
logging.info('change to %r'%(ver,))
ver.put()
else:
ver = db.GqlQuery("SELECT * FROM Version WHERE location = :1", location).get()
logging.info('found %r'%(ver,))
if not ver:
found, ver = 0, Version(location=location, chathistory=0, userlist=0)
ver.put()
else:
found = ver.userlist
if found != 0 and str(found) == since:
self.response.set_status(304)
else:
users = db.GqlQuery("SELECT * FROM User WHERE location = :1 ORDER BY lastmodified DESC", location)
userlist = [{"clientId": u.clientId,
"name": u.name,
"extra": u.extra} for u in users]
userlist = sorted(userlist, key=lambda u: str(u['name']).lower())
result = json.dumps({"version": found, "userlist": userlist})
logging.info('response ' + result)
self.response.out.write(result)
def post(self):
location = self.request.get('location')
changeVersion = False
for u in db.GqlQuery("SELECT * FROM User WHERE lastmodified < :1", datetime.datetime.fromtimestamp(time.time()-60)):
logging.info('deleting expired %r'%(u,))
changeVersion = True
u.delete()
body = json.loads(self.request.body)
clientId, name, extra = body['clientId'], body['name'], body['extra'] if 'extra' in body else None
if extra == "null": extra = None
if name == "null": name = None
if extra and isinstance(extra, unicode):
extra = extra.encode('utf-8')
logging.info('body clientId='+ str(clientId) + ' name=' + str(name) + ' extra=' + str(extra))
user = db.GqlQuery("SELECT * FROM User WHERE clientId = :1", clientId).get()
if self.request.path.endswith('/delete'):
logging.info('deleting %r'%(user,))
if user:
changeVersion = True
user.delete()
else:
logging.info('found %r'%(user,))
if not user:
changeVersion = True
user = User(location=location, name=name, clientId=clientId, extra=extra)
else:
if not changeVersion:
changeVersion = (user.location != location or user.name != name or user.extra != extra)
user.location, user.name, user.extra = location, name, extra
user.put()
if changeVersion:
ver = db.GqlQuery("SELECT * FROM Version WHERE location = :1", location).get()
if not ver:
ver = Version(location=location, chathistory=0, userlist=0)
ver.userlist = ver.userlist + 1
logging.info('change to %r'%(ver,))
ver.put()
class ChatHistory(webapp.RequestHandler):
def get(self):
location, targetId, since = self.request.get('location'), self.request.get('targetId'), self.request.get('since')
ver = db.GqlQuery("SELECT * FROM Version WHERE location = :1", location).get()
logging.info('found %r'%(ver,))
if not ver:
found, ver = 0, Version(location=location, chathistory=0, userlist=0)
ver.put()
else:
found = ver.chathistory
if found != 0 and str(found) == since:
self.response.set_status(304)
else:
chats = db.GqlQuery("SELECT * FROM Chat WHERE location = :1 ORDER BY timestamp DESC LIMIT 30", location)
#for u in chats:
# logging.info("chat item " + u.senderId + " " + u.targetId + " " + u.text)
chathistory = [{"senderId": u.senderId,
"sender": u.sender,
"targetId": u.targetId,
"timestamp": time.mktime(u.timestamp.timetuple()),
"text": u.text,
"extra": u.extra} for u in chats]
result = json.dumps({"version": found, "chathistory": [r for r in reversed(chathistory)]})
logging.info('response ' + result)
self.response.out.write(result)
def post(self):
location, targetId = self.request.get('location'), self.request.get('targetId')
body = json.loads(self.request.body)
senderId, sender, text, extra = body['senderId'], body['sender'], body['text'], body['extra'] if 'extra' in body else None
if sender == "null": sender = "User " + str(senderId)
if text == "null": text = None
if extra == "null": extra = None
if extra and isinstance(extra, unicode):
extra = extra.encode('utf-8')
logging.info('body senderId='+ str(senderId) + ' sender=' + str(sender) + ' text=' + str(text) + ' extra=' + str(extra))
if text:
chat = Chat(location=location, senderId=senderId, sender=sender, targetId=targetId, text=text, extra=extra)
chat.put()
ver = db.GqlQuery("SELECT * FROM Version WHERE location = :1", location).get()
if not ver:
ver = Version(location=location, chathistory=0, userlist=0)
ver.chathistory = ver.chathistory + 1
logging.info('change to %r'%(ver,))
ver.put()
application = webapp.WSGIApplication([
('/_ah/xmpp/message/chat/', XMPPHandler), ('/', MainPage),
('/userlist', UserList), ('/userlist/delete', UserList),
('/chathistory', ChatHistory),
], debug=True)
def main():
run_wsgi_app(application)
if __name__ == "__main__":
main()
| Python |
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from google.appengine.ext import db
import simplejson as json
import geohash
import datetime
def lod_2_table(lod):
if len(lod) > 0:
trs = []
trs.append('<tr>%s</tr>' % '\n'.join(['<th>%s</th>' % k for k in lod[0]]))
for i in lod:
trs.append('<tr>%s</tr>' % '\n'.join(['<td>%s</td>' % i[k] for k in i]))
return '<table>%s</table>' % '\n'.join(trs)
return 'blank'
class Unit(db.Model):
ep = db.IntegerProperty(default=10)
sp = db.IntegerProperty(default=10)
owner = db.StringProperty(required=True)
loc = db.StringProperty(default='ezs42e44yx96')
x = db.FloatProperty(default=42.6)
y = db.FloatProperty(default=-5.6)
updated = db.DateTimeProperty(auto_now=True)
type = db.StringProperty(default='base')
def to_dict(self):
return {'ep':self.ep,
'sp':self.sp,
'x':self.x,
'y':self.y,
'loc':self.loc,
'type':self.type,
'owner':self.owner,
'updated':self.updated.strftime("%Y-%m-%d %H:%M:%S"),
'key':str(self.key())}
def regen(self):
t1 = self.updated
t2 = datetime.datetime.utcnow()
d = t2 - t1
self.ep += d.seconds
if self.ep > 20:
self.ep = 20
class attack(webapp.RequestHandler):
def get(self,atk,tgt):
atk = get_item(atk)
atk.regen()
atk.ep -= 10
tgt = get_item(tgt)
tgt.regen()
tgt.sp -= 1
db.put([atk,tgt])
res = lod_2_table([atk.to_dict(),tgt.to_dict()])
self.response.out.write(res)
class scan(webapp.RequestHandler):
def get(self,key,x1,y1,x2,y2):
ll = str(geohash.Geohash((float(x1), float(y1))))
ur = str(geohash.Geohash((float(x2), float(y2))))
unit = get_item(key)
unit.regen()
unit.ep -= 15
unit.put()
q = Unit.all()
q.filter('loc <', ur)
q.filter('loc >', ll)
units = q.fetch(100)
res = lod_2_table([u.to_dict() for u in units])
self.response.out.write(res)
class create(webapp.RequestHandler):
def get(self,owner,type,x,y):
x=float(x)
y=float(y)
loc = str(geohash.Geohash((x, y)))
unit = Unit(owner=owner,x=x,y=y,loc=loc)
unit.put()
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('added')
class move(webapp.RequestHandler):
def get(self,key,x,y):
x=float(x)
y=float(y)
loc = str(geohash.Geohash((x, y)))
unit = get_item(key)
unit.regen()
unit.x = x
unit.y = y
unit.loc = loc
unit.ep -= 20
unit.put()
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('moved')
class dump(webapp.RequestHandler):
def get(self):
db.delete(Unit.all())
class unit_handler(webapp.RequestHandler):
def get(self,owner):
units = db.GqlQuery("SELECT * FROM Unit WHERE owner = :1",owner)
j = lod_2_table([u.to_dict() for u in units])
self.response.out.write(j)
def post(self):
ep = int(self.request.get("ep"))
owner = self.request.get("owner")
unit = Unit(owner=owner,ep=ep)
unit.put()
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('added')
class main_page(webapp.RequestHandler):
def get(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('Hello, webapp World!')
app = webapp.WSGIApplication([('/', main_page),
(r'/unit/(.*)', unit_handler),
(r'/unit', unit_handler),
(r'/attack/(.*)/(.*)', attack),
(r'/move/(.*)/(.*)/(.*)', move),
(r'/create/(.*)/(.*)/(.*)/(.*)', create),
(r'/scan/(.*)/(.*)/(.*)/(.*)/(.*)', scan),
(r'/db/dump', dump)
], debug=True)
def get_item(key):
return db.get(db.Key(key))
def main():
run_wsgi_app(app)
if __name__ == "__main__":
main()
| Python |
"""Implementation of JSONEncoder
"""
import re
try:
from simplejson._speedups import encode_basestring_ascii as c_encode_basestring_ascii
except ImportError:
c_encode_basestring_ascii = None
try:
from simplejson._speedups import make_encoder as c_make_encoder
except ImportError:
c_make_encoder = None
ESCAPE = re.compile(r'[\x00-\x1f\\"\b\f\n\r\t]')
ESCAPE_ASCII = re.compile(r'([\\"]|[^\ -~])')
HAS_UTF8 = re.compile(r'[\x80-\xff]')
ESCAPE_DCT = {
'\\': '\\\\',
'"': '\\"',
'\b': '\\b',
'\f': '\\f',
'\n': '\\n',
'\r': '\\r',
'\t': '\\t',
}
for i in range(0x20):
#ESCAPE_DCT.setdefault(chr(i), '\\u{0:04x}'.format(i))
ESCAPE_DCT.setdefault(chr(i), '\\u%04x' % (i,))
# Assume this produces an infinity on all machines (probably not guaranteed)
INFINITY = float('1e66666')
FLOAT_REPR = repr
def encode_basestring(s):
"""Return a JSON representation of a Python string
"""
def replace(match):
return ESCAPE_DCT[match.group(0)]
return '"' + ESCAPE.sub(replace, s) + '"'
def py_encode_basestring_ascii(s):
"""Return an ASCII-only JSON representation of a Python string
"""
if isinstance(s, str) and HAS_UTF8.search(s) is not None:
s = s.decode('utf-8')
def replace(match):
s = match.group(0)
try:
return ESCAPE_DCT[s]
except KeyError:
n = ord(s)
if n < 0x10000:
#return '\\u{0:04x}'.format(n)
return '\\u%04x' % (n,)
else:
# surrogate pair
n -= 0x10000
s1 = 0xd800 | ((n >> 10) & 0x3ff)
s2 = 0xdc00 | (n & 0x3ff)
#return '\\u{0:04x}\\u{1:04x}'.format(s1, s2)
return '\\u%04x\\u%04x' % (s1, s2)
return '"' + str(ESCAPE_ASCII.sub(replace, s)) + '"'
encode_basestring_ascii = c_encode_basestring_ascii or py_encode_basestring_ascii
class JSONEncoder(object):
"""Extensible JSON <http://json.org> encoder for Python data structures.
Supports the following objects and types by default:
+-------------------+---------------+
| Python | JSON |
+===================+===============+
| dict | object |
+-------------------+---------------+
| list, tuple | array |
+-------------------+---------------+
| str, unicode | string |
+-------------------+---------------+
| int, long, float | number |
+-------------------+---------------+
| True | true |
+-------------------+---------------+
| False | false |
+-------------------+---------------+
| None | null |
+-------------------+---------------+
To extend this to recognize other objects, subclass and implement a
``.default()`` method with another method that returns a serializable
object for ``o`` if possible, otherwise it should call the superclass
implementation (to raise ``TypeError``).
"""
item_separator = ', '
key_separator = ': '
def __init__(self, skipkeys=False, ensure_ascii=True,
check_circular=True, allow_nan=True, sort_keys=False,
indent=None, separators=None, encoding='utf-8', default=None):
"""Constructor for JSONEncoder, with sensible defaults.
If skipkeys is false, then it is a TypeError to attempt
encoding of keys that are not str, int, long, float or None. If
skipkeys is True, such items are simply skipped.
If ensure_ascii is true, the output is guaranteed to be str
objects with all incoming unicode characters escaped. If
ensure_ascii is false, the output will be unicode object.
If check_circular is true, then lists, dicts, and custom encoded
objects will be checked for circular references during encoding to
prevent an infinite recursion (which would cause an OverflowError).
Otherwise, no such check takes place.
If allow_nan is true, then NaN, Infinity, and -Infinity will be
encoded as such. This behavior is not JSON specification compliant,
but is consistent with most JavaScript based encoders and decoders.
Otherwise, it will be a ValueError to encode such floats.
If sort_keys is true, then the output of dictionaries will be
sorted by key; this is useful for regression tests to ensure
that JSON serializations can be compared on a day-to-day basis.
If indent is a non-negative integer, then JSON array
elements and object members will be pretty-printed with that
indent level. An indent level of 0 will only insert newlines.
None is the most compact representation.
If specified, separators should be a (item_separator, key_separator)
tuple. The default is (', ', ': '). To get the most compact JSON
representation you should specify (',', ':') to eliminate whitespace.
If specified, default is a function that gets called for objects
that can't otherwise be serialized. It should return a JSON encodable
version of the object or raise a ``TypeError``.
If encoding is not None, then all input strings will be
transformed into unicode using that encoding prior to JSON-encoding.
The default is UTF-8.
"""
self.skipkeys = skipkeys
self.ensure_ascii = ensure_ascii
self.check_circular = check_circular
self.allow_nan = allow_nan
self.sort_keys = sort_keys
self.indent = indent
if separators is not None:
self.item_separator, self.key_separator = separators
if default is not None:
self.default = default
self.encoding = encoding
def default(self, o):
"""Implement this method in a subclass such that it returns
a serializable object for ``o``, or calls the base implementation
(to raise a ``TypeError``).
For example, to support arbitrary iterators, you could
implement default like this::
def default(self, o):
try:
iterable = iter(o)
except TypeError:
pass
else:
return list(iterable)
return JSONEncoder.default(self, o)
"""
raise TypeError(repr(o) + " is not JSON serializable")
def encode(self, o):
"""Return a JSON string representation of a Python data structure.
>>> JSONEncoder().encode({"foo": ["bar", "baz"]})
'{"foo": ["bar", "baz"]}'
"""
# This is for extremely simple cases and benchmarks.
if isinstance(o, basestring):
if isinstance(o, str):
_encoding = self.encoding
if (_encoding is not None
and not (_encoding == 'utf-8')):
o = o.decode(_encoding)
if self.ensure_ascii:
return encode_basestring_ascii(o)
else:
return encode_basestring(o)
# This doesn't pass the iterator directly to ''.join() because the
# exceptions aren't as detailed. The list call should be roughly
# equivalent to the PySequence_Fast that ''.join() would do.
chunks = self.iterencode(o, _one_shot=True)
if not isinstance(chunks, (list, tuple)):
chunks = list(chunks)
return ''.join(chunks)
def iterencode(self, o, _one_shot=False):
"""Encode the given object and yield each string
representation as available.
For example::
for chunk in JSONEncoder().iterencode(bigobject):
mysocket.write(chunk)
"""
if self.check_circular:
markers = {}
else:
markers = None
if self.ensure_ascii:
_encoder = encode_basestring_ascii
else:
_encoder = encode_basestring
if self.encoding != 'utf-8':
def _encoder(o, _orig_encoder=_encoder, _encoding=self.encoding):
if isinstance(o, str):
o = o.decode(_encoding)
return _orig_encoder(o)
def floatstr(o, allow_nan=self.allow_nan, _repr=FLOAT_REPR, _inf=INFINITY, _neginf=-INFINITY):
# Check for specials. Note that this type of test is processor- and/or
# platform-specific, so do tests which don't depend on the internals.
if o != o:
text = 'NaN'
elif o == _inf:
text = 'Infinity'
elif o == _neginf:
text = '-Infinity'
else:
return _repr(o)
if not allow_nan:
raise ValueError(
"Out of range float values are not JSON compliant: " +
repr(o))
return text
if _one_shot and c_make_encoder is not None and not self.indent and not self.sort_keys:
_iterencode = c_make_encoder(
markers, self.default, _encoder, self.indent,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, self.allow_nan)
else:
_iterencode = _make_iterencode(
markers, self.default, _encoder, self.indent, floatstr,
self.key_separator, self.item_separator, self.sort_keys,
self.skipkeys, _one_shot)
return _iterencode(o, 0)
def _make_iterencode(markers, _default, _encoder, _indent, _floatstr, _key_separator, _item_separator, _sort_keys, _skipkeys, _one_shot,
## HACK: hand-optimized bytecode; turn globals into locals
False=False,
True=True,
ValueError=ValueError,
basestring=basestring,
dict=dict,
float=float,
id=id,
int=int,
isinstance=isinstance,
list=list,
long=long,
str=str,
tuple=tuple,
):
def _iterencode_list(lst, _current_indent_level):
if not lst:
yield '[]'
return
if markers is not None:
markerid = id(lst)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = lst
buf = '['
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
separator = _item_separator + newline_indent
buf += newline_indent
else:
newline_indent = None
separator = _item_separator
first = True
for value in lst:
if first:
first = False
else:
buf = separator
if isinstance(value, basestring):
yield buf + _encoder(value)
elif value is None:
yield buf + 'null'
elif value is True:
yield buf + 'true'
elif value is False:
yield buf + 'false'
elif isinstance(value, (int, long)):
yield buf + str(value)
elif isinstance(value, float):
yield buf + _floatstr(value)
else:
yield buf
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield ']'
if markers is not None:
del markers[markerid]
def _iterencode_dict(dct, _current_indent_level):
if not dct:
yield '{}'
return
if markers is not None:
markerid = id(dct)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = dct
yield '{'
if _indent is not None:
_current_indent_level += 1
newline_indent = '\n' + (' ' * (_indent * _current_indent_level))
item_separator = _item_separator + newline_indent
yield newline_indent
else:
newline_indent = None
item_separator = _item_separator
first = True
if _sort_keys:
items = dct.items()
items.sort(key=lambda kv: kv[0])
else:
items = dct.iteritems()
for key, value in items:
if isinstance(key, basestring):
pass
# JavaScript is weakly typed for these, so it makes sense to
# also allow them. Many encoders seem to do something like this.
elif isinstance(key, float):
key = _floatstr(key)
elif key is True:
key = 'true'
elif key is False:
key = 'false'
elif key is None:
key = 'null'
elif isinstance(key, (int, long)):
key = str(key)
elif _skipkeys:
continue
else:
raise TypeError("key " + repr(key) + " is not a string")
if first:
first = False
else:
yield item_separator
yield _encoder(key)
yield _key_separator
if isinstance(value, basestring):
yield _encoder(value)
elif value is None:
yield 'null'
elif value is True:
yield 'true'
elif value is False:
yield 'false'
elif isinstance(value, (int, long)):
yield str(value)
elif isinstance(value, float):
yield _floatstr(value)
else:
if isinstance(value, (list, tuple)):
chunks = _iterencode_list(value, _current_indent_level)
elif isinstance(value, dict):
chunks = _iterencode_dict(value, _current_indent_level)
else:
chunks = _iterencode(value, _current_indent_level)
for chunk in chunks:
yield chunk
if newline_indent is not None:
_current_indent_level -= 1
yield '\n' + (' ' * (_indent * _current_indent_level))
yield '}'
if markers is not None:
del markers[markerid]
def _iterencode(o, _current_indent_level):
if isinstance(o, basestring):
yield _encoder(o)
elif o is None:
yield 'null'
elif o is True:
yield 'true'
elif o is False:
yield 'false'
elif isinstance(o, (int, long)):
yield str(o)
elif isinstance(o, float):
yield _floatstr(o)
elif isinstance(o, (list, tuple)):
for chunk in _iterencode_list(o, _current_indent_level):
yield chunk
elif isinstance(o, dict):
for chunk in _iterencode_dict(o, _current_indent_level):
yield chunk
else:
if markers is not None:
markerid = id(o)
if markerid in markers:
raise ValueError("Circular reference detected")
markers[markerid] = o
o = _default(o)
for chunk in _iterencode(o, _current_indent_level):
yield chunk
if markers is not None:
del markers[markerid]
return _iterencode
| Python |
"""Implementation of JSONDecoder
"""
import re
import sys
import struct
from simplejson.scanner import make_scanner
try:
from simplejson._speedups import scanstring as c_scanstring
except ImportError:
c_scanstring = None
__all__ = ['JSONDecoder']
FLAGS = re.VERBOSE | re.MULTILINE | re.DOTALL
def _floatconstants():
_BYTES = '7FF80000000000007FF0000000000000'.decode('hex')
if sys.byteorder != 'big':
_BYTES = _BYTES[:8][::-1] + _BYTES[8:][::-1]
nan, inf = struct.unpack('dd', _BYTES)
return nan, inf, -inf
NaN, PosInf, NegInf = _floatconstants()
def linecol(doc, pos):
lineno = doc.count('\n', 0, pos) + 1
if lineno == 1:
colno = pos
else:
colno = pos - doc.rindex('\n', 0, pos)
return lineno, colno
def errmsg(msg, doc, pos, end=None):
# Note that this function is called from _speedups
lineno, colno = linecol(doc, pos)
if end is None:
#fmt = '{0}: line {1} column {2} (char {3})'
#return fmt.format(msg, lineno, colno, pos)
fmt = '%s: line %d column %d (char %d)'
return fmt % (msg, lineno, colno, pos)
endlineno, endcolno = linecol(doc, end)
#fmt = '{0}: line {1} column {2} - line {3} column {4} (char {5} - {6})'
#return fmt.format(msg, lineno, colno, endlineno, endcolno, pos, end)
fmt = '%s: line %d column %d - line %d column %d (char %d - %d)'
return fmt % (msg, lineno, colno, endlineno, endcolno, pos, end)
_CONSTANTS = {
'-Infinity': NegInf,
'Infinity': PosInf,
'NaN': NaN,
}
STRINGCHUNK = re.compile(r'(.*?)(["\\\x00-\x1f])', FLAGS)
BACKSLASH = {
'"': u'"', '\\': u'\\', '/': u'/',
'b': u'\b', 'f': u'\f', 'n': u'\n', 'r': u'\r', 't': u'\t',
}
DEFAULT_ENCODING = "utf-8"
def py_scanstring(s, end, encoding=None, strict=True, _b=BACKSLASH, _m=STRINGCHUNK.match):
"""Scan the string s for a JSON string. End is the index of the
character in s after the quote that started the JSON string.
Unescapes all valid JSON string escape sequences and raises ValueError
on attempt to decode an invalid string. If strict is False then literal
control characters are allowed in the string.
Returns a tuple of the decoded string and the index of the character in s
after the end quote."""
if encoding is None:
encoding = DEFAULT_ENCODING
chunks = []
_append = chunks.append
begin = end - 1
while 1:
chunk = _m(s, end)
if chunk is None:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
end = chunk.end()
content, terminator = chunk.groups()
# Content is contains zero or more unescaped string characters
if content:
if not isinstance(content, unicode):
content = unicode(content, encoding)
_append(content)
# Terminator is the end of string, a literal control character,
# or a backslash denoting that an escape sequence follows
if terminator == '"':
break
elif terminator != '\\':
if strict:
msg = "Invalid control character %r at" % (terminator,)
#msg = "Invalid control character {0!r} at".format(terminator)
raise ValueError(errmsg(msg, s, end))
else:
_append(terminator)
continue
try:
esc = s[end]
except IndexError:
raise ValueError(
errmsg("Unterminated string starting at", s, begin))
# If not a unicode escape sequence, must be in the lookup table
if esc != 'u':
try:
char = _b[esc]
except KeyError:
msg = "Invalid \\escape: " + repr(esc)
raise ValueError(errmsg(msg, s, end))
end += 1
else:
# Unicode escape sequence
esc = s[end + 1:end + 5]
next_end = end + 5
if len(esc) != 4:
msg = "Invalid \\uXXXX escape"
raise ValueError(errmsg(msg, s, end))
uni = int(esc, 16)
# Check for surrogate pair on UCS-4 systems
if 0xd800 <= uni <= 0xdbff and sys.maxunicode > 65535:
msg = "Invalid \\uXXXX\\uXXXX surrogate pair"
if not s[end + 5:end + 7] == '\\u':
raise ValueError(errmsg(msg, s, end))
esc2 = s[end + 7:end + 11]
if len(esc2) != 4:
raise ValueError(errmsg(msg, s, end))
uni2 = int(esc2, 16)
uni = 0x10000 + (((uni - 0xd800) << 10) | (uni2 - 0xdc00))
next_end += 6
char = unichr(uni)
end = next_end
# Append the unescaped character
_append(char)
return u''.join(chunks), end
# Use speedup if available
scanstring = c_scanstring or py_scanstring
WHITESPACE = re.compile(r'[ \t\n\r]*', FLAGS)
WHITESPACE_STR = ' \t\n\r'
def JSONObject((s, end), encoding, strict, scan_once, object_hook, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
pairs = {}
# Use a slice to prevent IndexError from being raised, the following
# check will raise a more specific ValueError if the string is empty
nextchar = s[end:end + 1]
# Normally we expect nextchar == '"'
if nextchar != '"':
if nextchar in _ws:
end = _w(s, end).end()
nextchar = s[end:end + 1]
# Trivial empty object
if nextchar == '}':
return pairs, end + 1
elif nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end))
end += 1
while True:
key, end = scanstring(s, end, encoding, strict)
# To skip some function call overhead we optimize the fast paths where
# the JSON key separator is ": " or just ":".
if s[end:end + 1] != ':':
end = _w(s, end).end()
if s[end:end + 1] != ':':
raise ValueError(errmsg("Expecting : delimiter", s, end))
end += 1
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
pairs[key] = value
try:
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar == '}':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end - 1))
try:
nextchar = s[end]
if nextchar in _ws:
end += 1
nextchar = s[end]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end]
except IndexError:
nextchar = ''
end += 1
if nextchar != '"':
raise ValueError(errmsg("Expecting property name", s, end - 1))
if object_hook is not None:
pairs = object_hook(pairs)
return pairs, end
def JSONArray((s, end), scan_once, _w=WHITESPACE.match, _ws=WHITESPACE_STR):
values = []
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
# Look-ahead for trivial empty array
if nextchar == ']':
return values, end + 1
_append = values.append
while True:
try:
value, end = scan_once(s, end)
except StopIteration:
raise ValueError(errmsg("Expecting object", s, end))
_append(value)
nextchar = s[end:end + 1]
if nextchar in _ws:
end = _w(s, end + 1).end()
nextchar = s[end:end + 1]
end += 1
if nextchar == ']':
break
elif nextchar != ',':
raise ValueError(errmsg("Expecting , delimiter", s, end))
try:
if s[end] in _ws:
end += 1
if s[end] in _ws:
end = _w(s, end + 1).end()
except IndexError:
pass
return values, end
class JSONDecoder(object):
"""Simple JSON <http://json.org> decoder
Performs the following translations in decoding by default:
+---------------+-------------------+
| JSON | Python |
+===============+===================+
| object | dict |
+---------------+-------------------+
| array | list |
+---------------+-------------------+
| string | unicode |
+---------------+-------------------+
| number (int) | int, long |
+---------------+-------------------+
| number (real) | float |
+---------------+-------------------+
| true | True |
+---------------+-------------------+
| false | False |
+---------------+-------------------+
| null | None |
+---------------+-------------------+
It also understands ``NaN``, ``Infinity``, and ``-Infinity`` as
their corresponding ``float`` values, which is outside the JSON spec.
"""
def __init__(self, encoding=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, strict=True):
"""``encoding`` determines the encoding used to interpret any ``str``
objects decoded by this instance (utf-8 by default). It has no
effect when decoding ``unicode`` objects.
Note that currently only encodings that are a superset of ASCII work,
strings of other encodings should be passed in as ``unicode``.
``object_hook``, if specified, will be called with the result
of every JSON object decoded and its return value will be used in
place of the given ``dict``. This can be used to provide custom
deserializations (e.g. to support JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN.
This can be used to raise an exception if invalid JSON numbers
are encountered.
"""
self.encoding = encoding
self.object_hook = object_hook
self.parse_float = parse_float or float
self.parse_int = parse_int or int
self.parse_constant = parse_constant or _CONSTANTS.__getitem__
self.strict = strict
self.parse_object = JSONObject
self.parse_array = JSONArray
self.parse_string = scanstring
self.scan_once = make_scanner(self)
def decode(self, s, _w=WHITESPACE.match):
"""Return the Python representation of ``s`` (a ``str`` or ``unicode``
instance containing a JSON document)
"""
obj, end = self.raw_decode(s, idx=_w(s, 0).end())
end = _w(s, end).end()
if end != len(s):
raise ValueError(errmsg("Extra data", s, end, len(s)))
return obj
def raw_decode(self, s, idx=0):
"""Decode a JSON document from ``s`` (a ``str`` or ``unicode`` beginning
with a JSON document) and return a 2-tuple of the Python
representation and the index in ``s`` where the document ended.
This can be used to decode a JSON document from a string that may
have extraneous data at the end.
"""
try:
obj, end = self.scan_once(s, idx)
except StopIteration:
raise ValueError("No JSON object could be decoded")
return obj, end
| Python |
r"""JSON (JavaScript Object Notation) <http://json.org> is a subset of
JavaScript syntax (ECMA-262 3rd edition) used as a lightweight data
interchange format.
:mod:`simplejson` exposes an API familiar to users of the standard library
:mod:`marshal` and :mod:`pickle` modules. It is the externally maintained
version of the :mod:`json` library contained in Python 2.6, but maintains
compatibility with Python 2.4 and Python 2.5 and (currently) has
significant performance advantages, even without using the optional C
extension for speedups.
Encoding basic Python object hierarchies::
>>> import simplejson as json
>>> json.dumps(['foo', {'bar': ('baz', None, 1.0, 2)}])
'["foo", {"bar": ["baz", null, 1.0, 2]}]'
>>> print json.dumps("\"foo\bar")
"\"foo\bar"
>>> print json.dumps(u'\u1234')
"\u1234"
>>> print json.dumps('\\')
"\\"
>>> print json.dumps({"c": 0, "b": 0, "a": 0}, sort_keys=True)
{"a": 0, "b": 0, "c": 0}
>>> from StringIO import StringIO
>>> io = StringIO()
>>> json.dump(['streaming API'], io)
>>> io.getvalue()
'["streaming API"]'
Compact encoding::
>>> import simplejson as json
>>> json.dumps([1,2,3,{'4': 5, '6': 7}], separators=(',',':'))
'[1,2,3,{"4":5,"6":7}]'
Pretty printing::
>>> import simplejson as json
>>> s = json.dumps({'4': 5, '6': 7}, sort_keys=True, indent=4)
>>> print '\n'.join([l.rstrip() for l in s.splitlines()])
{
"4": 5,
"6": 7
}
Decoding JSON::
>>> import simplejson as json
>>> obj = [u'foo', {u'bar': [u'baz', None, 1.0, 2]}]
>>> json.loads('["foo", {"bar":["baz", null, 1.0, 2]}]') == obj
True
>>> json.loads('"\\"foo\\bar"') == u'"foo\x08ar'
True
>>> from StringIO import StringIO
>>> io = StringIO('["streaming API"]')
>>> json.load(io)[0] == 'streaming API'
True
Specializing JSON object decoding::
>>> import simplejson as json
>>> def as_complex(dct):
... if '__complex__' in dct:
... return complex(dct['real'], dct['imag'])
... return dct
...
>>> json.loads('{"__complex__": true, "real": 1, "imag": 2}',
... object_hook=as_complex)
(1+2j)
>>> import decimal
>>> json.loads('1.1', parse_float=decimal.Decimal) == decimal.Decimal('1.1')
True
Specializing JSON object encoding::
>>> import simplejson as json
>>> def encode_complex(obj):
... if isinstance(obj, complex):
... return [obj.real, obj.imag]
... raise TypeError(repr(o) + " is not JSON serializable")
...
>>> json.dumps(2 + 1j, default=encode_complex)
'[2.0, 1.0]'
>>> json.JSONEncoder(default=encode_complex).encode(2 + 1j)
'[2.0, 1.0]'
>>> ''.join(json.JSONEncoder(default=encode_complex).iterencode(2 + 1j))
'[2.0, 1.0]'
Using simplejson.tool from the shell to validate and pretty-print::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
__version__ = '2.0.9'
__all__ = [
'dump', 'dumps', 'load', 'loads',
'JSONDecoder', 'JSONEncoder',
]
__author__ = 'Bob Ippolito <bob@redivi.com>'
from decoder import JSONDecoder
from encoder import JSONEncoder
_default_encoder = JSONEncoder(
skipkeys=False,
ensure_ascii=True,
check_circular=True,
allow_nan=True,
indent=None,
separators=None,
encoding='utf-8',
default=None,
)
def dump(obj, fp, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` as a JSON formatted stream to ``fp`` (a
``.write()``-supporting file-like object).
If ``skipkeys`` is true then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the some chunks written to ``fp``
may be ``unicode`` instances, subject to normal Python ``str`` to
``unicode`` coercion rules. Unless ``fp.write()`` explicitly
understands ``unicode`` (as in ``codecs.getwriter()``) this is likely
to cause an error.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``)
in strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and object
members will be pretty-printed with that indent level. An indent level
of 0 will only insert newlines. ``None`` is the most compact representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
iterable = _default_encoder.iterencode(obj)
else:
if cls is None:
cls = JSONEncoder
iterable = cls(skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding,
default=default, **kw).iterencode(obj)
# could accelerate with writelines in some versions of Python, at
# a debuggability cost
for chunk in iterable:
fp.write(chunk)
def dumps(obj, skipkeys=False, ensure_ascii=True, check_circular=True,
allow_nan=True, cls=None, indent=None, separators=None,
encoding='utf-8', default=None, **kw):
"""Serialize ``obj`` to a JSON formatted ``str``.
If ``skipkeys`` is false then ``dict`` keys that are not basic types
(``str``, ``unicode``, ``int``, ``long``, ``float``, ``bool``, ``None``)
will be skipped instead of raising a ``TypeError``.
If ``ensure_ascii`` is false, then the return value will be a
``unicode`` instance subject to normal Python ``str`` to ``unicode``
coercion rules instead of being escaped to an ASCII ``str``.
If ``check_circular`` is false, then the circular reference check
for container types will be skipped and a circular reference will
result in an ``OverflowError`` (or worse).
If ``allow_nan`` is false, then it will be a ``ValueError`` to
serialize out of range ``float`` values (``nan``, ``inf``, ``-inf``) in
strict compliance of the JSON specification, instead of using the
JavaScript equivalents (``NaN``, ``Infinity``, ``-Infinity``).
If ``indent`` is a non-negative integer, then JSON array elements and
object members will be pretty-printed with that indent level. An indent
level of 0 will only insert newlines. ``None`` is the most compact
representation.
If ``separators`` is an ``(item_separator, dict_separator)`` tuple
then it will be used instead of the default ``(', ', ': ')`` separators.
``(',', ':')`` is the most compact JSON representation.
``encoding`` is the character encoding for str instances, default is UTF-8.
``default(obj)`` is a function that should return a serializable version
of obj or raise TypeError. The default simply raises TypeError.
To use a custom ``JSONEncoder`` subclass (e.g. one that overrides the
``.default()`` method to serialize additional types), specify it with
the ``cls`` kwarg.
"""
# cached encoder
if (not skipkeys and ensure_ascii and
check_circular and allow_nan and
cls is None and indent is None and separators is None and
encoding == 'utf-8' and default is None and not kw):
return _default_encoder.encode(obj)
if cls is None:
cls = JSONEncoder
return cls(
skipkeys=skipkeys, ensure_ascii=ensure_ascii,
check_circular=check_circular, allow_nan=allow_nan, indent=indent,
separators=separators, encoding=encoding, default=default,
**kw).encode(obj)
_default_decoder = JSONDecoder(encoding=None, object_hook=None)
def load(fp, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``fp`` (a ``.read()``-supporting file-like object containing
a JSON document) to a Python object.
If the contents of ``fp`` is encoded with an ASCII based encoding other
than utf-8 (e.g. latin-1), then an appropriate ``encoding`` name must
be specified. Encodings that are not ASCII based (such as UCS-2) are
not allowed, and should be wrapped with
``codecs.getreader(fp)(encoding)``, or simply decoded to a ``unicode``
object and passed to ``loads()``
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
return loads(fp.read(),
encoding=encoding, cls=cls, object_hook=object_hook,
parse_float=parse_float, parse_int=parse_int,
parse_constant=parse_constant, **kw)
def loads(s, encoding=None, cls=None, object_hook=None, parse_float=None,
parse_int=None, parse_constant=None, **kw):
"""Deserialize ``s`` (a ``str`` or ``unicode`` instance containing a JSON
document) to a Python object.
If ``s`` is a ``str`` instance and is encoded with an ASCII based encoding
other than utf-8 (e.g. latin-1) then an appropriate ``encoding`` name
must be specified. Encodings that are not ASCII based (such as UCS-2)
are not allowed and should be decoded to ``unicode`` first.
``object_hook`` is an optional function that will be called with the
result of any object literal decode (a ``dict``). The return value of
``object_hook`` will be used instead of the ``dict``. This feature
can be used to implement custom decoders (e.g. JSON-RPC class hinting).
``parse_float``, if specified, will be called with the string
of every JSON float to be decoded. By default this is equivalent to
float(num_str). This can be used to use another datatype or parser
for JSON floats (e.g. decimal.Decimal).
``parse_int``, if specified, will be called with the string
of every JSON int to be decoded. By default this is equivalent to
int(num_str). This can be used to use another datatype or parser
for JSON integers (e.g. float).
``parse_constant``, if specified, will be called with one of the
following strings: -Infinity, Infinity, NaN, null, true, false.
This can be used to raise an exception if invalid JSON numbers
are encountered.
To use a custom ``JSONDecoder`` subclass, specify it with the ``cls``
kwarg.
"""
if (cls is None and encoding is None and object_hook is None and
parse_int is None and parse_float is None and
parse_constant is None and not kw):
return _default_decoder.decode(s)
if cls is None:
cls = JSONDecoder
if object_hook is not None:
kw['object_hook'] = object_hook
if parse_float is not None:
kw['parse_float'] = parse_float
if parse_int is not None:
kw['parse_int'] = parse_int
if parse_constant is not None:
kw['parse_constant'] = parse_constant
return cls(encoding=encoding, **kw).decode(s)
| Python |
r"""Command-line tool to validate and pretty-print JSON
Usage::
$ echo '{"json":"obj"}' | python -m simplejson.tool
{
"json": "obj"
}
$ echo '{ 1.2:3.4}' | python -m simplejson.tool
Expecting property name: line 1 column 2 (char 2)
"""
import sys
import simplejson
def main():
if len(sys.argv) == 1:
infile = sys.stdin
outfile = sys.stdout
elif len(sys.argv) == 2:
infile = open(sys.argv[1], 'rb')
outfile = sys.stdout
elif len(sys.argv) == 3:
infile = open(sys.argv[1], 'rb')
outfile = open(sys.argv[2], 'wb')
else:
raise SystemExit(sys.argv[0] + " [infile [outfile]]")
try:
obj = simplejson.load(infile)
except ValueError, e:
raise SystemExit(e)
simplejson.dump(obj, outfile, sort_keys=True, indent=4)
outfile.write('\n')
if __name__ == '__main__':
main()
| Python |
"""JSON token scanner
"""
import re
try:
from simplejson._speedups import make_scanner as c_make_scanner
except ImportError:
c_make_scanner = None
__all__ = ['make_scanner']
NUMBER_RE = re.compile(
r'(-?(?:0|[1-9]\d*))(\.\d+)?([eE][-+]?\d+)?',
(re.VERBOSE | re.MULTILINE | re.DOTALL))
def py_make_scanner(context):
parse_object = context.parse_object
parse_array = context.parse_array
parse_string = context.parse_string
match_number = NUMBER_RE.match
encoding = context.encoding
strict = context.strict
parse_float = context.parse_float
parse_int = context.parse_int
parse_constant = context.parse_constant
object_hook = context.object_hook
def _scan_once(string, idx):
try:
nextchar = string[idx]
except IndexError:
raise StopIteration
if nextchar == '"':
return parse_string(string, idx + 1, encoding, strict)
elif nextchar == '{':
return parse_object((string, idx + 1), encoding, strict, _scan_once, object_hook)
elif nextchar == '[':
return parse_array((string, idx + 1), _scan_once)
elif nextchar == 'n' and string[idx:idx + 4] == 'null':
return None, idx + 4
elif nextchar == 't' and string[idx:idx + 4] == 'true':
return True, idx + 4
elif nextchar == 'f' and string[idx:idx + 5] == 'false':
return False, idx + 5
m = match_number(string, idx)
if m is not None:
integer, frac, exp = m.groups()
if frac or exp:
res = parse_float(integer + (frac or '') + (exp or ''))
else:
res = parse_int(integer)
return res, m.end()
elif nextchar == 'N' and string[idx:idx + 3] == 'NaN':
return parse_constant('NaN'), idx + 3
elif nextchar == 'I' and string[idx:idx + 8] == 'Infinity':
return parse_constant('Infinity'), idx + 8
elif nextchar == '-' and string[idx:idx + 9] == '-Infinity':
return parse_constant('-Infinity'), idx + 9
else:
raise StopIteration
return _scan_once
make_scanner = c_make_scanner or py_make_scanner
| Python |
class Geostring (object):
def _to_bits (cls,f,depth=32):
f *= (1L << depth)
return [(long(f) >> (depth-i)) & 1 for i in range(1,depth+1)]
_to_bits = classmethod(_to_bits)
def bitstring (cls,(x,y),bound=(-180,-90,180,90),depth=32):
x = cls._to_bits((x-bound[0])/float(bound[2]-bound[0]),depth)
y = cls._to_bits((y-bound[1])/float(bound[3]-bound[1]),depth)
bits = reduce(lambda x,y:x+list(y), zip(x,y), [])
return "".join(map(str,bits))
bitstring = classmethod(bitstring)
def __init__ (self, data, bound=(-180,-90,180,90), depth=32):
self.bound = bound
self.depth = depth
self.origin = bound[0:2]
self.size = (bound[2]-bound[0], bound[3]-bound[1])
if isinstance(data,tuple) or isinstance(data,list):
self.hash = self.bitstring(data,bound,depth)
else:
self.hash = data
def __str__ (self):
return self.hash
def _to_bbox (self, bits):
depth = len(bits)/2
minx = miny = 0.0
maxx = maxy = 1.0
for i in range(depth+1):
try:
minx += float(bits[i*2])/(2L<<i)
miny += float(bits[i*2+1])/(2L<<i)
except IndexError:
pass
if depth:
maxx = minx + 1.0/(2L<<(depth-1))
maxy = miny + 1.0/(2L<<(depth-1))
elif len(bits) == 1:
# degenerate case
maxx = min(minx + .5, 1.0)
minx, maxx = [self.origin[0]+x*self.size[0] for x in (minx,maxx)]
miny, maxy = [self.origin[1]+y*self.size[1] for y in (miny,maxy)]
return tuple([round(x,6) for x in minx, miny, maxx, maxy])
def bbox (self, prefix=None):
if not prefix: prefix=len(self.hash)
return self._to_bbox(self.hash[:prefix])
def point (self,prefix=None):
minx, miny, maxx, maxy = self.bbox(prefix)
return (minx+maxx)/2.0, (miny+maxy)/2.0
def union (self,other):
other = str(other)
hash = self.hash
for i in range(min(len(self.hash),len(other))):
if self.hash[i] != other[i]:
hash = self.hash[:i]
break
return type(self)(hash,self.bound,self.depth)
__add__ = union
class Geoindex (Geostring):
def bitstring (cls,coord,bound=(-180,-90,180,90),depth=32):
bits = Geostring.bitstring(coord,bound,depth)
bits = bits.replace("1","2")
bits += "1" * (depth*2 - len(bits))
return bits
bitstring = classmethod(bitstring)
def bbox (self, prefix=None):
bits = self.hash.replace("1","").replace("2","1")
if not prefix: prefix=len(bits)
return self._to_bbox(bits[:prefix])
def union (self,other):
other = str(other)
hash = self.hash
for i in range(min(len(self.hash),len(other))):
if self.hash[i] != other[i]:
hash = self.hash[:i] + ("1" * (self.depth*2-i))
break
return type(self)(hash,self.bound,self.depth)
__add__ = union
class Geohash (Geostring):
BASE_32 = "0123456789bcdefghjkmnpqrstuvwxyz"
def bitstring (cls,coord,bound=(-180,-90,180,90),depth=32):
bits = Geostring.bitstring(coord,bound,depth)
hash = ""
for i in range(0,len(bits),5):
m = sum([int(n)<<(4-j) for j,n in enumerate(bits[i:i+5])])
hash += cls.BASE_32[m]
return hash
bitstring = classmethod(bitstring)
def bbox (self,prefix=None):
if not prefix: prefix=len(self.hash)
bits = [[n>>(4-i)&1 for i in range(5)]
for n in map(self.BASE_32.find, self.hash[:prefix])]
bits = reduce(lambda x,y:x+y, bits, [])
return self._to_bbox(bits)
if __name__ == "__main__":
import sys
if len(sys.argv) == 1:
import doctest
doctest.testmod(verbose=True)
elif len(sys.argv) == 2:
print Geohash(sys.argv[1]).bbox()
else:
print Geohash(map(float, sys.argv[1:3]))
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Define the kinds of data returned from the bugs tool."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
from common.util import class_attr
class Kind(object):
"""Define kind keywords for the bugs tool."""
BUG = 'bugs#bug'
ID = 'bugs#id'
URL_BUG_MAP = 'bugs#url-bug-map'
URLS = 'bugs#urls'
# List of valid kinds.
_ALL_KINDS = class_attr.GetPODAttrsValue(Kind)
def IsValid(value):
"""Determine if the given value is a valid kind.
Args:
value: The value to test. (string)
Returns:
Whether or not the value is a kind. (boolean)
"""
return value in _ALL_KINDS
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Datastore crawler object; does nothing."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
from bugs.providers import crawler_base
class Error(crawler_base.Error):
pass
class Crawler(crawler_base.CrawlerBase):
"""Crawler base class.
Crawlers move bug data from a provider database to the Datastore. It also
provides translation from the provider bug data to BITE bug data. This is a
no op for the Datastore Crawler.
"""
def __init__(self, max_retries=3):
crawler_base.CrawlerBase.__init__(self, max_retries)
def Crawl(self):
"""Crawl provider database.
Raises:
Error: Raised if there was an error creating an index.
"""
pass
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Datastore indexer object."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
from google.appengine.api import users
from bugs import kind
from bugs.models.bugs import bug
from bugs.providers import pusher_base
class Error(pusher_base.Error):
pass
class Pusher(pusher_base.PusherBase):
"""Pusher base class.
Pusher is responsible for filling the provider's database with the bug
details. Normally, this may require a transformation of the metadata from
the BITE bug details into data useful for the provider.
"""
def __init__(self, bug, max_retries=3):
pusher_base.PusherBase.__init__(self, bug, max_retries)
def Push(self):
"""Updates the bug model with values set by providers such as bug_id.
Raises:
Error: Raised if there was an error creating an index.
"""
try:
current_user = users.get_current_user()
user_email = None
if current_user:
user_email = current_user.email()
provider_data = {
'kind': kind.Kind.BUG,
'id': self.bug.key().id(),
'bug_id': str(self.bug.key().id()), # bug_id
'author': user_email,
'author_id': user_email,
'reported_on': str(self.bug.added),
'last_update': str(self.bug.modified),
'last_updater': user_email,
'status': self.bug.state,
'project': 'none',
'priority': 'none',
'details_link': ''
}
bug.Update(self.bug, provider_data)
except (bug.InvalidIdError, bug.UpdateError), e:
raise Error(e)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Datastore indexer object."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
from bugs.models.url_bug_map import url_bug_map
from bugs.providers import indexer_base
class Error(indexer_base.Error):
pass
class Indexer(indexer_base.IndexerBase):
"""Indexer base class.
Indexer is responsible creating search indices for bug from a specific
provider.
"""
def __init__(self):
pass
def Index(self, bug):
"""Creates search indices for the bug specified by the given bug.
Args:
bug: The bug. (bugs.models.bug.Bug)
Returns:
The id for the newly created UrlBugMap. (integer)
Raises:
Error: Raised if there was an error creating an index.
"""
try:
return url_bug_map.Create(bug)
except url_bug_map.CreateError, e:
raise Error(e)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for Indexer objects.
The choice to add the bug to the function rather than to the object was that
the indexer may be run on many bugs/items/etc so I didn't want the object to
become dependent on the bug it was manipulating.
"""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
class Error(Exception):
pass
class IndexerBase(object):
"""Indexer base class
Indexer are responsible creating search indices for bug from a specific
provider.
"""
def __init__(self):
pass
def Index(self, bug):
raise NotImplementedError
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the available providers."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
class Provider(object):
"""Define available providers."""
DATASTORE = 'datastore'
ISSUETRACKER = 'issuetracker'
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for Crawler objects."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
class Error(Exception):
pass
class CrawlerBase(object):
"""Crawler base class
Crawlers are responsible for retrieving relevant bug data from a specific
provider database.
Attributes:
max_retries: The maximum number of attempts a crawl.
"""
def __init__(self, max_retries=3):
self.max_retries = max_retries
def Crawl(self):
raise NotImplementedError
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Issue Tracker crawler object."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
from bugs.providers import crawler_base
class Error(crawler_base.Error):
pass
class Crawler(crawler_base.CrawlerBase):
"""Crawler base class.
Crawlers move bug data from a provider database to the Datastore. It also
provides translation from the provider bug data to BITE bug data.
"""
def __init__(self, max_retries=3):
crawler_base.CrawlerBase.__init__(self, max_retries)
def Crawl(self):
"""Crawl provider database.
Raises:
Error: Raised if there was an error creating an index.
"""
pass
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Issue Tracker indexer object."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
from bugs.providers import pusher_base
class Error(pusher_base.Error):
pass
class Pusher(pusher_base.PusherBase):
"""Pusher base class.
Pusher is responsible for filling the provider's database with the bug
details. Normally, this may require a transformation of the metadata from
the BITE bug details into data useful for the provider.
"""
def __init__(self, key, max_retries=3):
pusher_base.PusherBase.__init__(self, key, max_retries)
def Push(self):
"""Updates the bug model with values set by providers such as bug_id.
Raises:
Error: Raised if there was an error creating an index.
"""
pass
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Issue Tracker indexer object."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
from bugs.providers import indexer_base
class Error(indexer_base.Error):
pass
class Indexer(indexer_base.IndexerBase):
"""Indexer base class.
Indexer is responsible creating search indices for bug from a specific
provider.
"""
def __init__(self):
pass
def Index(self, bug):
"""Creates search indices for the bug specified by the given bug.
Args:
bug: The bug. (bugs.models.bug.Bug)
Returns:
The id for the newly created UrlBugMap. (integer)
Raises:
Error: Raised if there was an error creating an index.
"""
pass
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Defines the available services."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
class Service(object):
"""Define services providers must support."""
CRAWL = 'crawl'
INDEX = 'index'
PUSH = 'push'
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Base class for Pusher objects."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
class Error(Exception):
pass
class PusherBase(object):
"""Pusher base class
Pushers are responsible for pushing bugs to a specific provider database.
Attributes:
bug: The bug object to push. (bugs.models.bug.Bug)
max_retries: The maximum number of attempts to push a bug. (integer)
"""
def __init__(self, bug, max_retries=3):
self.bug = bug
self.max_retries = max_retries
def Push(self):
raise NotImplementedError
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides access to services for the various providers.
TODO (jason.stredwick): Return to address the potential for bugs to have
multiple providers when addressing changes to the bug model.
Three main functions:
Crawl(provider)
Index(id)
Push(id)
"""
__author__ = ('alexto@google.com (Alexis O. Torres)',
'jason.stredwick@gmail.com (Jason Stredwick)')
from bugs.models.bugs import bug
from bugs.providers import config
from bugs.providers import crawler_base
from bugs.providers import indexer_base
from bugs.providers import pusher_base
from bugs.providers.provider import Provider
from bugs.providers.service import Service
class Error(Exception):
pass
class CrawlError(crawler_base.Error):
pass
class IndexError(indexer_base.Error):
pass
class InvalidIdError(Error):
pass
class ProviderNotSupportedError(Error):
pass
class PushError(pusher_base.Error):
pass
def Crawl(provider):
"""Crawl the given provider.
Args:
provider: The provider to crawl. (string)
Raises:
ProviderNotSupported: The given provider is not supported.
"""
if not provider or provider not in config.PROVIDER_MAP:
raise ProviderNotSupported('Invalid provider; %s' % provider)
crawler = config.PROVIDER_MAP[provider][Service.CRAWL]()
crawler.Crawl()
def Index(id):
"""Build index for the given provider.
Args:
id: The id for the bug the service will act on. (integer)
Raises:
InvalidIdError: Raised if the given id does not map to a bug.
ProviderNotSupported: The given provider is not supported.
"""
try:
bug_model = bug.Get(id)
except bug.InvalidIdError, e:
raise InvalidIdError(e)
provider = bug_model.provider
if not provider or provider not in config.PROVIDER_MAP:
raise ProviderNotSupported('Invalid provider; %s' % provider)
indexer = config.PROVIDER_MAP[provider][Service.INDEX]()
try:
indexer.Index(bug_model)
except indexer_base.Error, e:
raise InvalidIdError(e)
def Push(id):
"""Pushes the bug specified by the given id per the provided pusher.
Args:
id: The id for the bug the service will act on. (integer)
Raises:
InvalidIdError: Raised if the given id does not map to a bug.
ProviderNotSupported: The given provider is not supported.
"""
try:
bug_model = bug.Get(id)
except bug.InvalidIdError, e:
raise InvalidIdError(e)
provider = bug_model.provider
if not provider or provider not in config.PROVIDER_MAP:
raise ProviderNotSupported('Invalid provider; %s' % provider)
pusher = config.PROVIDER_MAP[provider][Service.PUSH](bug_model)
pusher.Push()
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Controls the available providers."""
__author__ = ('alexto@google.com (Alexis O. Torres)',
'jason.stredwick@gmail.com (Jason Stredwick)')
from bugs.providers.service import Service
from bugs.providers.provider import Provider
from bugs.providers.datastore import crawler as datastore_crawler
from bugs.providers.datastore import indexer as datastore_indexer
from bugs.providers.datastore import pusher as datastore_pusher
from bugs.providers.issuetracker import crawler as issuetracker_crawler
from bugs.providers.issuetracker import indexer as issuetracker_indexer
from bugs.providers.issuetracker import pusher as issuetracker_pusher
PROVIDER_MAP = {
Provider.DATASTORE: {
Service.CRAWL: datastore_crawler.Crawler,
Service.INDEX: datastore_indexer.Indexer,
Service.PUSH: datastore_pusher.Pusher
},
Provider.ISSUETRACKER: {
Service.CRAWL: issuetracker_crawler.Crawler,
Service.INDEX: issuetracker_indexer.Indexer,
Service.PUSH: issuetracker_pusher.Pusher
}
}
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Gets and stores templates from the BITE server.
Called by clients to retrieve a list of bug templates that have been stored on
the BITE server, or to add a template to the BITE server.
"""
__author__ = 'ralphj@google.com (Julie Ralph)'
import sys
from google.appengine.api import users
from google.appengine.ext import webapp
from google.appengine.ext.webapp.util import run_wsgi_app
from common.handlers import base
from models import bug_template
MAX_RESULTS_CAP = 1000
class GetTemplatesHandler(base.BaseHandler):
"""Handles GET requests to the '/get_templates' URI."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self):
"""Retrieves all bug templates.
TODO(ralphj): Add a function that retrieves templates for only a
specific url or project.
Returns:
A list of JSON-encoded templates.
"""
query = bug_template.BugTemplate.all()
templates_list = query.fetch(MAX_RESULTS_CAP)
result = bug_template.JsonEncode(templates_list)
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(result)
class NewTemplateHandler(base.BaseHandler):
"""Handles requests to the '/new_template' URI.
GET requests will load a form allowing the user to write a new Bug Template.
Submitting the form will result in a POST request, which adds the
Bug Template to the AppEngine Datastore.
"""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self):
"""Displays a form for adding a new template."""
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
self.RenderTemplate('templates.html', {})
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def post(self):
"""Adds a new template, or replaces a current template with the same id."""
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
template_id = self.GetRequiredParameter('id')
name = self.GetRequiredParameter('name')
urls = self.GetRequiredParameter('urls')
project = self.GetRequiredParameter('project')
backend_project = self.GetRequiredParameter('backend_project')
backend_provider = self.GetRequiredParameter('backend_provider')
selector_text = self.GetRequiredParameter('selector_text')
note_text = self.GetRequiredParameter('note_text')
display_order = self.GetOptionalIntParameter('display_order', 0)
url_list = urls.split(',')
bug_template.StoreBugTemplate(template_id=template_id,
name=name,
urls=url_list,
project=project,
backend_project=backend_project,
backend_provider=backend_provider,
selector_text=selector_text,
note_text=note_text,
display_order=display_order)
self.RenderTemplate('templates.html',
{'alert': 'Bug Template succesfully created.'})
app = webapp.WSGIApplication(
[('/get_templates', GetTemplatesHandler),
('/new_template', NewTemplateHandler)],
debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Create a new bug entry."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
import logging
import webapp2
from bugs import kind
from bugs.models.bugs import bug
from bugs.providers import services
from common.handlers import base
class Error(base.Error):
pass
class CreateHandler(base.BaseHandler):
"""Create a new bug entry."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def post(self):
"""Create a new bug entry with the given data.
Raises:
Error: Raised if the data fails to be JSON parsed/stringified or is not
present.
"""
logging.info('New bug handler; bugs.handlers.bugs.create.CreateHandler')
# TODO (jason.stredwick): Figure out the correct failure strategy if a new
# bug is created but either the url/bug mapping or pusher fails.
try:
data = self.GetData(kind.Kind.BUG)
bug_model = bug.Create(data)
id = bug_model.key().id()
services.Index(id)
services.Push(id)
self.WriteResponse({'kind': kind.Kind.ID, 'id': id})
except bug.CreateError, e:
raise Error('Failed to create a new bug.\n%s\n' % e, code=400)
except services.PushError:
raise Error('Failed to push new bug [id=%s].\n' % id, code=400)
except services.IndexError:
raise Error('Failed to create index for new bug [id=%s].\n' % id,
code=400)
except base.Error, e:
raise Error(e)
routes = [
webapp2.Route(r'/bugs', handler=CreateHandler, name='bugs_create',
methods=['POST'])
]
app = webapp2.WSGIApplication(routes, debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Access a bug entry; get/update."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
import logging
import webapp2
from bugs import kind
from bugs.models.bugs import bug
from common.handlers import base
from util import model_to_dict
class Error(base.Error):
pass
class AccessHandler(base.BaseHandler):
"""Access a bug entry."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def put(self, id):
"""Update a bug entry with the given data using the given id.
Args:
id: The id for the bug to retrieve. (integer)
Raises:
Error: Something went wrong processing the request/response or performing
the update.
"""
logging.info('Update bug handler; bugs.handlers.access.AccessHandler')
id = int(id)
try:
data = self.GetData(kind.Kind.BUG)
bug_model = bug.Get(id)
bug.Update(bug_model, data)
# TODO (jason.stredwick): Add in deletion of UrlBugMaps and add in new
# ones.
except bug.InvalidIdError:
raise Error('Failed to find bug [id=%s].' % id, code=400)
except bug.UpdateError, e:
raise Error('Update bug [id=%s] failed. Exception: %s' % (id, e),
code=400)
except base.Error, e:
raise Error(e)
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self, id):
"""Get a bug entry using the given id.
Args:
id: The id for the bug to retrieve. (integer)
Raises:
Error: The id did not match a stored bug.
"""
logging.info('Update bug handler; bugs.handlers.access.AccessHandler')
id = int(id)
try:
bug_model = bug.Get(id)
response = model_to_dict.ModelToDict(bug_model)
response['kind'] = kind.Kind.BUG
self.WriteResponse(bug_model)
except bug.InvalidIdError:
raise Error('Failed to find bug [id=%s].' % id, code=400)
except base.Error, e:
raise Error(e)
routes = [
webapp2.Route(r'/bugs/<id:\d+>', handler=AccessHandler, name='bugs_access',
methods=['GET', 'PUT'])
]
app = webapp2.WSGIApplication(routes, debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Get a set of bugs based on url."""
__author__ = 'jason.stredwick@gmail.com (Jason Stredwick)'
import logging
import webapp2
from bugs import kind
from bugs.models.url_bug_map import get_bugs
from common.handlers import base
class Error(base.Error):
pass
class UrlsHandler(base.BaseHandler):
"""Get bug entries based on url."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def post(self):
"""Get bugs for the given urls.
Raises:
Error: Raised upon failure.
"""
logging.info('UrlBugMap handler; bugs.handlers.bugs.urls.UrlsHandler')
try:
data = self.GetData(kind.Kind.URLS)
mappings = get_bugs.GetBugs(data['urls'])
self.WriteResponse({'kind': kind.Kind.URL_BUG_MAP, 'mappings': mappings})
except get_bugs.Error, e:
raise Error('Failed to retrieve bugs for Url to Bug map: %s\n' % e,
code=400)
except base.Error, e:
raise Error(e)
routes = [
webapp2.Route(r'/bugs/urls', handler=UrlsHandler, name='bugs_urls',
methods=['POST'])
]
app = webapp2.WSGIApplication(routes, debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for bug templates.
Bug Templates provides a model for a template for a type of bug. A project
owner can define templates for their project, which pre-populate the
backend project that the bug should be filed to and provide a starting place for
the bug report writer to write their notes. Bug Templates are stored in
AppEngine's Datastore.
"""
__author__ = 'ralphj@google.com (Julie Ralph)'
import simplejson
from google.appengine.ext import db
class BugTemplate(db.Model):
"""Models a Bug Template stored in AppEngine's Datastore.
TODO(ralphj): urls should be more flexible and should be able to
handle patterns.
Attributes:
template_id: A unique string identifier for this template.
name: A human-readable name for this template.
urls: A list of urls that this template should be used for.
project: The human-readable project that this template is associated with.
backend_project: An identifier for the project that is compatable with the
backend provider.
backend_provider: The issue tracking system that this template is
associated with.
selector_text: Text that should appear when the user is asked to pick a
template, under 'What kind of problem are you reporting?'
note_text: Text that should appear in the notes field.
display_order: An integer declaring the relative position where this
template should be displayed in lists. Higher numbers are displayed
after lower numbers.
"""
template_id = db.StringProperty(required=True)
name = db.StringProperty(required=True)
urls = db.StringListProperty(required=True)
project = db.StringProperty(required=True)
backend_project = db.StringProperty(required=True)
backend_provider = db.StringProperty(required=True)
selector_text = db.StringProperty(required=True)
note_text = db.TextProperty(required=True)
display_order = db.IntegerProperty(required=True, default=0)
class BugTemplateEncoder(simplejson.JSONEncoder):
"""Encoder to properly encode Bug Template objects."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def default(self, obj):
"""Overrides the default JSONEncoder.
Args:
obj: Object to serialize.
Returns:
A serializable representation of the Bug Template object.
"""
if isinstance(obj, BugTemplate):
return {'id': obj.template_id,
'name': obj.name,
'urls': obj.urls,
'project': obj.project,
'backendProject': obj.backend_project,
'backendProvider': obj.backend_provider,
'selectorText': obj.selector_text,
'noteText': obj.note_text,
'displayOrder': obj.display_order}
else:
return simplejson.JSONEncoder.default(self, obj)
def JsonEncode(template):
"""Encodes a bug template model as JSON.
Args:
template: A bug template to encode.
Returns:
A JSON-encoded string representation of the bug template list.
"""
return simplejson.dumps(template, cls=BugTemplateEncoder)
def StoreBugTemplate(template_id, name, urls, project, backend_project,
backend_provider, selector_text, note_text, display_order):
"""Stores a new bug template in the App Engine Datastore.
If there is already a Bug Template with the same template_id, overwrites
the old template.
Args:
template_id: A unique string identifier for this template.
name: A human-readable name for this template.
urls: A list of urls that this template should be used for.
project: The project that this template is associated with.
backend_project: An identifier for the project that is compatable with the
backend provider.
backend_provider: The issue tracking system that this template is
associated with.
selector_text: Text that should appear when the user is asked to pick a
template, under 'What kind of problem are you reporting?'
note_text: Text that should appear in the notes field.
display_order: An integer declaring the relative position where this
template should be displayed in lists. Higher numbers are displayed
after lower numbers.
Returns:
The newly created bug template.
"""
template = BugTemplate.get_by_key_name(template_id)
if template is None:
template = BugTemplate(key_name=template_id,
template_id=template_id,
name=name,
urls=urls,
project=project,
backend_project=backend_project,
backend_provider=backend_provider,
selector_text=selector_text,
note_text=note_text,
display_order=display_order)
else:
template.name = name
template.urls = urls
template.project = project
template.backend_project = backend_project
template.backend_provider = backend_provider
template.selector_text = selector_text
template.note_text = note_text
template.display_order = display_order
template.put()
return template
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for BITE bug data."""
__author__ = ('alexto@google.com (Alexis O. Torres)'
'jason.stredwick@gmail.com (Jason Stredwick)')
import logging
from google.appengine.ext import db
from bugs import kind
from bugs.providers.provider import Provider
from common.util import class_attr
# Allowed bug states.
class State(object):
ACTIVE = 'active'
CLOSED = 'closed'
RESOLVED = 'resolved'
UNKNOWN = 'unknown'
class CreateError(Exception):
pass
class InvalidIdError(Exception):
pass
class UpdateError(Exception):
pass
class Bug(db.Model):
"""Models a Bug stored in AppEngine's Datastore.
This data may be a reduced form of the bug's details as stored in the
provider's database.
Attributes:
title: The bug's title.
state: The current state of the bug; i.e. resolved, closed, or active.
url: The url of the page the bug was filed against.
summary: The bug's summary.
added: When this instance of the bug was added to the BITE datastore.
modified: When this instance of the bug was last modified in the BITE
datastore.
provider: Source provider of the bug information.
bug_id: The ID of the bug within the provider's bug database.
status: Status of the bug (eg. active, fixed, closed) when it
was crawled.
author: The user who first reported this bug; from provider.
author_id: Identifies the user in the provider backend.
reported_on: The date the bug was first opened; from provider.
last_update: Date the bug was last updated; from provider.
last_updater: The last user to update the bug; from provider.
project: Name of the project this bug is associated with.
priority: The bug's priority.
details_link: A url/link to the bug in the provider's database.
has_target_element: Whether or not a target element is attached.
target_element: When specified, describes an element on the page the bug
is associated with.
has_screenshot: Whether a screenshot is attached.
screenshot: Url to an associated screenshot.
has_recording: True, if the bug has recorded script attached.
recording_link: Link to recorded script.
"""
# Bug Details
title = db.StringProperty(required=False)
state = db.StringProperty(required=False, default=State.UNKNOWN,
choices=(State.ACTIVE, State.RESOLVED,
State.CLOSED, State.UNKNOWN))
url = db.StringProperty(required=False, multiline=True, default='')
summary = db.TextProperty(required=False)
added = db.DateTimeProperty(required=False, auto_now_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
# Provider Related Details
provider = db.StringProperty(required=False,
default=Provider.DATASTORE,
choices=(Provider.DATASTORE,
Provider.ISSUETRACKER))
bug_id = db.StringProperty(required=False)
status = db.StringProperty(required=False)
author = db.StringProperty(required=False)
author_id = db.StringProperty(required=False)
reported_on = db.StringProperty(required=False)
last_update = db.StringProperty(required=False)
last_updater = db.StringProperty(required=False)
project = db.StringProperty(required=False)
priority = db.StringProperty(required=False)
details_link = db.StringProperty(required=False)
# Attachments
has_target_element = db.BooleanProperty(required=False, default=False)
target_element = db.TextProperty(required=False, default='')
has_screenshot = db.BooleanProperty(required=False, default=False)
screenshot = db.TextProperty(required=False, default='')
has_recording = db.BooleanProperty(required=False, default=False)
recording_link = db.TextProperty(required=False, default='')
def Patch(self, obj):
"""Patch/update the model with data from the given object.
For each property in the model, check if that property exists in the given
data. If it exists then update the value for that property for the value
in the given data. All properties in the given data will be ignored if
that property does not exist in the model.
Properties ignored by the patcher (that also exist in the model):
added, modified, has_target_element, has_screenshot, has_recording
Args:
obj: The data to use to patch the model. (dict)
Raise:
db.Error: Raised if there is an error assigning the value from the given
data to model.
TypeError: Raised if the given object is not an object.
"""
# TODO (jason.stredwick): Change to
# auto_update_attr = class_attr.GetPODAttrs(Bug)
# once the special cases have been resolved.
special_props = ['target_element', 'has_target_element',
'screenshot', 'has_screenshot',
'recording_link', 'has_recording',
'added', 'modified']
props = self.properties().keys()
for key, value in obj.iteritems():
if key in props and key not in special_props:
setattr(self, key, value)
# Handle special case properties.
# Attachments
if 'target_element' in obj:
self.target_element = obj['target_element']
if obj['target_element']:
self.has_target_element = True
else:
self.has_target_element = False
if 'screenshot' in obj:
self.screenshot = obj['screenshot']
if obj['screenshot']:
self.has_screenshot = True
else:
self.has_screenshot = False
if 'recording_link' in obj:
self.recording_link = obj['recording_link']
if obj['recording_link']:
self.has_recording = True
else:
self.has_recording = False
self.__Verify()
def __Verify(self):
"""Determines if the bug is valid.
Raises:
db.Error: Raised if any bug property is invalid.
"""
if not self.title:
raise db.Error('Missing title; required.')
def Create(data):
"""Create a new bug entry.
Args:
data: An object used to create a new model. (dict)
Returns:
Return the newly created bug.
Raises:
CreateError: Raised if something goes wrong while creating a new bug.
"""
try:
bug = Bug()
bug.Patch(data)
bug.put()
except (TypeError, db.Error, AssertionError), e:
logging.error('bug.Create: Exception while creating bug: %s', e)
raise CreateError('Failed to create a new bug.\n%s\n' % e)
return bug
def Get(id):
"""Returns the bug model for the given id.
Args:
id: The id of the bug to retrieve. (integer)
Returns:
Returns the bug model. (Bug)
Raises:
InvalidIdError: Raised if the id does not match a stored bug.
"""
try:
bug = Bug.get_by_id(id)
if not bug:
raise InvalidIdError
except (db.Error, InvalidIdError), e:
logging.error('bug.Get: Exception while retrieving bug (%s): %s', id, e)
raise InvalidIdError('Bug not found [id=%s].%s' % (id, e))
return bug
def Update(bug, data):
"""Update the bug specified by the given id with the given data.
Args:
bug: The bug to update. (Bug)
data: An object used to update the model details. (dict)
Raises:
UpdateError: Raised if there was an error updating the bug.
"""
try:
bug.Patch(data)
bug.put()
except (TypeError, db.Error), e:
# tempdate is used to output the data into the log, but strip out the
# screenshot information due to size.
# TODO (jason.stredwick): Resolve how to store and access screenshots and
# remove tempdate once the screenshot data is no longer directly stored.
tempdata = data
if 'screenshot' in tempdata:
del tempdata['screenshot']
logging.error('bug.Update: Exception while updating bug (%s): %s. Given '
'data of %s', id, e, tempdata)
raise UpdateError('bug [id=%s] failed to update.\n%s\n' % (id, e))
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Remove all mappings for a bug."""
__author__ = ('alexto@google.com (Alexis O. Torres)',
'jason.stredwick@gmail.com (Jason Stredwick)')
from bugs.models.url_bug_map import url_bug_map
class Error(Exception):
pass
def DeleteAllMappingsForBug(key_name):
"""Deletes all mappings for the specified bug.
Args:
key_name: The key name of the bug.
Returns:
The total amount of mappings deleted.
"""
total_deleted = 0
bug = bugs.GetBugByKey(key_name)
query = UrlBugMap.all(keys_only=True).filter('bug = ', bug)
mappings = query.fetch(_MAX_RESULTS_CAP)
while mappings:
total_deleted += len(mappings)
db.delete(mappings)
mappings = query.fetch(_MAX_RESULTS_CAP)
logging.info(
'DeleteAllMappingsForBug: total mappings deleted for bug %s: %d.',
key_name, total_deleted)
return total_deleted
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model to store URL to Bug associations.
Each bug is associated with one or more URLs. Each association is stored
as a separate entry in the UrlBugMap table.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import logging
import re
from google.appengine.ext import db
from bugs.models.bugs import bug
from utils import encoding_util
from utils import url_util
class UrlPosition(object):
"""Used to prioritize a bugs returned from the UrlBugMap.
Used tp determine the origin of the url from within a bug retrieved by the
crawler. The origin/position determines the priority of the bug relative to
the other positions.
"""
TITLE = 1
MAIN = 2
COMMENTS = 3
OTHER = 0
class CreateError(Exception):
pass
class InvalidIdError(Exception):
pass
class UpdateError(Exception):
pass
class UrlBugMap(db.Model):
"""Represents a relationship between a URL and a Bug.
There are 3 fields a typical query will try to search on:
url, hostname, path, and status. These properties are stored as
indexed properties to speed up searches.
Attributes:
url: The url associated with the bug, truncated for a string property for
indexing purposes.
hostname: The hostname of the url, truncated for a string property for
indexing purposes.
path: The path of the url, truncated for a string property for indexing
purposes.
state: The bug state, replicated here to enable query sorting.
bug: Reference to the bug associated with the url.
position: The position within the provider's bug's details and provides
relevance information for quering.
added: When the model was first created.
"""
# Indices:
url = db.StringProperty(required=True)
hostname = db.StringProperty(required=False, default='')
path = db.StringProperty(required=False, default='')
state = db.StringProperty(required=True,
choices=(bug.State.ACTIVE,
bug.State.RESOLVED,
bug.State.CLOSED,
bug.State.UNKNOWN))
last_update = db.StringProperty(required=False)
# Non-indexed information.
bug = db.ReferenceProperty(required=True, reference_class=bug.Bug,
collection_name='bug_urls')
position = db.IntegerProperty(required=True,
choices=(UrlPosition.TITLE,
UrlPosition.MAIN,
UrlPosition.COMMENTS,
UrlPosition.OTHER))
# Tracks when an entry is added and modified.
added = db.DateTimeProperty(required=False, auto_now_add=True)
def Create(bug, position=UrlPosition.OTHER):
"""Stores a new URL to bug mapping into the Datastore.
Args:
bug: The bug to index. (bug.Bug)
position: The position the url was found with the bug details from the
provider. (UrlPosition)
Returns:
The newly created mapping or None if no mapping exists. (UrlBugMap or None)
Raises:
CreateError: Raised if something goes wrong while creating a new bug.
"""
bug_id = 'no-id'
try:
bug_id = bug.key().id()
url = bug.url
if not url:
return None
url_components = PrepareUrl(url)
mapping = UrlBugMap(bug=bug,
url=url_components['url'],
hostname=url_components['hostname'],
path=url_components['path'],
position=position,
state=bug.state)
if bug.last_update:
mapping.last_update = bug.last_update
logging.info('Adding mapping for bug: %s', bug_id)
logging.info('URL: %s', url_components['url'])
logging.info('Hostname: %s', url_components['hostname'])
logging.info('Path: %s', url_components['path'])
mapping.put()
except Exception, e:
logging.error('url_bug_map.Create: Exception while creating mapping for '
'bug [id=%s]: %s' % (bug_id, e))
raise CreateError('Failed to create mapping for bug [id=%s].\n' % bug_id)
return mapping
def Delete(id):
"""Deletes the mapping specified by the given id.
Args:
id: The id of the mapping to retrieve. (integer)
"""
mapping = UrlBugMap.get_by_id(id)
if mapping:
mapping.delete()
def Get(id):
"""Returns the mapping specified by the given id.
Args:
id: The id of the mapping to retrieve. (integer)
Returns:
Returns the UrlBugMap model object. (UrlBugMap)
Raises:
InvalidIdError: Raised if the id does not match a stored mapping.
"""
try:
mapping = UrlBugMap.get_by_id(id)
if not mapping:
raise InvalidIdError
except (db.Error, InvalidIdError), e:
logging.error('url_bug_map.Get: Exception while retrieving mapping [id=%s]'
': %s' % (id, e))
raise InvalidIdError
return mapping
def PrepareUrl(url):
"""Return the processed full url and its hostname and path.
The url is processed to extract its hostname and path. Then all three are
ASCII encoded and truncated to 500 characters to fit within a
db.StringProperty; for indexing purposes TextProperty is not indexable. The
function returns a dictionary of three processed values.
Args:
url: The url to process. (string)
Returns:
A dictionary containing values for processed url, hostname, and path. If
the url doesn't contain one of the components it will be assigned the
empty string. ({url: string, hostname: string, path: string})
"""
# Successful NormalizeUrl already encodes each entry to ascii.
# TODO (jason.stredwick): Determine the necessity of EncodeToAscii and
# the potential of defaulting to not encoding and encoding upon
# exception as was done prior.
urlnorm = url_util.NormalizeUrl(url)
# 500 character restriction; StringProperty limit.
if urlnorm:
return {'url': urlnorm.url[:500],
'hostname': urlnorm.hostname[:500] or '',
'path': urlnorm.path[:500] or ''}
else:
logging.exception('URL normalization failed, converting to ASCII: %s',
url)
return {'url': encoding_util.EncodeToAscii(url)[:500],
'hostname': '',
'path': ''}
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Retrieve bugs based on url from mapping functionality.
The retrieval returns the following data
[[url, [bug*]]+]
url associated with a set of bugs. The url can be a full url, domain, domain +
path, etc.
bug is the entire details of a given bug.
The data returned will be a list containing all the given urls and
componentized versions of those urls. Each url will be broken into the
following:
full url, url_domain + url_path, url_domain
Each component will contain all the bugs that contain those components.
Attributes:
_MAX_RESULTS_CAP: Private static constant used used to cap the amount of
results a clients can request.
"""
__author__ = ('alexto@google.com (Alexis O. Torres)',
'jason.stredwick@gmail.com (Jason Stredwick)')
import logging
import re
import json
from google.appengine.ext import db
from bugs import kind
from bugs.models.url_bug_map import url_bug_map
from bugs.models.bugs import bug
from util import model_to_dict
_MAX_RESULTS_CAP = 500
class Error(Exception):
pass
def GetBugs(urls, limit=_MAX_RESULTS_CAP):
"""Returns a list of objects containing the mapping of url to bugs.
TODO (jason.stredwick): Change the URL_BUG_MAP kind to isolate the break
down of the url into components into a single result for a given url.
Args:
urls: A list or urls used to retrieve bugs. ([string])
limit: The max number of results to fetch. (integer)
Returns:
An object. ([{url: string, [kind.Kind.BUG]}])
Raises:
Error: Raised if an error occurs accessing bug references.
"""
if limit > _MAX_RESULTS_CAP:
limit = _MAX_RESULTS_CAP
results = []
# For each url create a relevance mapping to related bugs.
for url in urls:
url_components = url_bug_map.PrepareUrl(url)
results_dict = {} # Track which bugs have already been added.
queries = GetQueriesForUrl(url_components)
for (key, query) in queries:
if not query:
results.append({'url': key, 'bugs': []})
continue
mappings = query.fetch(limit)
if not mappings:
results.append({'url': key, 'bugs': []})
continue
result = []
keys = []
for mapping in mappings:
try:
bug_key = mapping.bug.key()
id = bug_key.id()
except Exception, e:
raise Error(e)
if id in results_dict:
continue
results_dict[id] = True
keys.append(bug_key)
if keys:
try:
result = db.get(keys)
except Exception, e:
raise Error(e)
result = [model_to_dict.ModelToDict(r) for r in result if r]
for r in result:
r['kind'] = kind.Kind.BUG
results.append({'url': key, 'bugs': result})
return results
def GetQueriesForUrl(url_components):
"""Retrieves a list of queries to try for a given URL.
Each query represents a possible way to find matches, each one has different
relevancy implications:
query[0] = Does a full URL match (considered the most relevant).
query[1] = Does a hostname + path match.
query[2] = Does a hostname match (considered the least relevant).
Args:
url_components: NormalizUrlResult object.
Returns:
A list containing Query objects.
"""
url = url_components['url']
hostname = url_components['hostname']
path = url_components['path']
url_no_schema = re.sub('^https?://', '', url)
hostname_path = hostname + path
url_query = (url, url_bug_map.UrlBugMap.all().filter('url = ', url))
hostname_path_query = (hostname + path, url_bug_map.UrlBugMap.all()
.filter('hostname = ', hostname)
.filter('path = ', path))
hostname_query = (hostname, url_bug_map.UrlBugMap.all()
.filter('hostname = ', hostname))
queries = []
# This does not make sense to me. What if the url is only a schemeless
# hostname + path? Why wouldn't one also search for url?
# TODO (jasonstredwick): Figure out purpose and reinstate if necessary.
#if url_no_schema == hostname_path:
# if path:
# queries.append(hostname_path_query)
# queries.append(hostname_query)
#el
if hostname_path == hostname: # If no path then do query on it.
queries.append(url_query)
queries.append((hostname_path, None))
queries.append(hostname_query)
else:
queries.append(url_query)
queries.append(hostname_path_query)
queries.append(hostname_query)
queries = [(k, q.order('-last_update')) for (k, q) in queries if q]
# TODO (jason.stredwick): Add back in state filtering later. It requires the
# passing of filter data with the request.
# If states is specified, filter results to query bug matching it's value.
#queries = [(k, q.filter('state = ', state.lower()))
# for (k, q) in queries if q]
return queries
| Python |
#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests testing.chronos.bite.server.crawlers.issuetracker_crawler
TODO(alexto): Aadd more unit tests to exercise issuetracker_crawler handler.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from .pyglib import app
from .testing.pybase import googletest
from crawlers import issuetracker_crawler
class IssueTrackerCrawlerTest(googletest.TestCase):
"""Tests the IssueTrackerCrawler."""
def testImports(self):
# This is just a blank test that only ensures that all the imports are
# working correctly. If we get to this point, then the test
# passes.
pass
def main(unused_):
googletest.main()
if __name__ == '__main__':
app.run()
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities used by crawlers."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import logging
from os import environ
import gdata
import gdata.client
import gdata.projecthosting
import gdata.projecthosting.client
from google.appengine.ext import deferred
from google.appengine.runtime import DeadlineExceededError
from models import bugs
from models import bugs_util
from models import screenshots
from models import test_cycle
from models import test_cycle_user
from models import url_bug_map
from utils import target_element_util
from utils import screenshots_util
from utils import url_util
# Used to control the max amount of retries a Task should be retried.
_MAX_RETRIES = 5
# Maximum length of an issue summary to store.
SUMMARY_LIMIT = 150
class BugCrawlerError(Exception):
"""Generic error thrown when something goes wrong while craling bugs."""
pass
def ExtractIssueTrackerBugId(issue):
"""Extract the bug id from a GData bug object."""
return issue.id.text.split('/')[-1]
def SpawnDetailsCrawlersIssueTracker(recent_issues, project_name,
skip_recent_check=False):
"""Queues the tasks to do the actual crawling for recent updates."""
count = 0
try:
for issue in recent_issues:
bug_id = issue['id']
logging.info('Adding crawler to the queue for issue_id %s, project: %s.',
bug_id, project_name)
end = bug_id.find('/')
if end > 0:
bug_id = bug_id[0:end]
bug = bugs.GetBug(bug_id=bug_id, project=project_name,
provider=bugs_util.Provider.ISSUETRACKER)
if bug:
if not skip_recent_check and bug.last_update == issue['updated']:
logging.info('Bug %s is up-to-date.', bug.key().id_or_name())
count += 1
continue
else:
logging.info('Bug %s needs to be updated.', bug.key().id_or_name())
else:
logging.info('Bug %s seems to be a new issue.', bug_id)
deferred.defer(ExtractDetailsCrawlerIssueTracker, project_name, bug_id,
_queue='find-bugs-queue')
count += 1
except DeadlineExceededError:
remaining = recent_issues[count:]
deferred.defer(SpawnDetailsCrawlersIssueTracker, remaining, project_name)
deferred.PermanentTaskFailure(
'Deadline exceeded, started a new SpawnDetailsCrawler'
' for the remaining %d urls.' % len(remaining))
return
def ExtractDetailsCrawlerIssueTracker(project_name, bug_id):
"""Extract useful information for a given bug."""
logging.debug('Scraping details for bug %s in project %s.',
bug_id, project_name)
phclient = gdata.projecthosting.client.ProjectHostingClient()
try:
query = gdata.projecthosting.client.Query(issue_id=bug_id)
feed = phclient.get_issues(project_name, query=query)
except gdata.client.RequestError, e:
if ('HTTP_X_APPENGINE_TASKRETRYCOUNT' in environ and
int(environ['HTTP_X_APPENGINE_TASKRETRYCOUNT']) < _MAX_RETRIES):
if e.status == 403: # Skip 403 (Unautorized)errors.
logging.info('Unautorized to access this issue, skipping: %s, %s',
bug_id, project_name)
# Nuke cache data for private bugs.
url_bug_map.DeleteBugAndMappings(
bug_id, project_name, bugs_util.Provider.ISSUETRACKER)
return
else:
raise BugCrawlerError(
'Error while trying to get details for %s. Error %s' %
(str(bug_id), str(e)))
else:
raise deferred.PermanentTaskFailure(
'Error hit too many times, aborting '
'extracting details for bug %s on project %s. Error: %s' %
(str(bug_id), str(project_name), str(e)))
if not feed or not feed.entry:
raise deferred.PermanentTaskFailure(
'Failed to fetch full details for bug %s', bug_id)
entry = feed.entry[0]
urls = []
if entry.title.text:
urls = [(u, url_bug_map.UrlPosition.TITLE)
for u in url_util.ExtractUrls(entry.title.text)]
if entry.content.text:
urls.extend([(u, url_bug_map.UrlPosition.MAIN)
for u in url_util.ExtractUrls(entry.content.text)])
comments = GetComments(project_name, bug_id, phclient)
comments_text = GetTextInComments(comments)
if comments_text:
urls.extend([(u, url_bug_map.UrlPosition.COMMENTS)
for u in url_util.ExtractUrls(comments_text)])
last_updater = GetLastUpdater(comments, FindAuthor(entry))
if not urls:
logging.info('Nothing to do, no URLs found for bug %s in project %s.',
bug_id, project_name)
return
logging.debug('URLs found: %s', str(urls))
target = (target_element_util.ExtractTargetElement(comments_text) or
target_element_util.ExtractTargetElement(entry.content.text))
logging.debug('Target information extracted for bug: %s, '
'target_element: %s', bug_id, target)
if entry.status and entry.status.text: # Status is None sometimes.
status = entry.status.text
else:
logging.warning('Status was not found, setting to unknown.')
status = 'unknown'
QueueStoreBug(bug_id=bug_id,
title=entry.title.text,
summary=entry.content.text[:SUMMARY_LIMIT],
priority=FindPriority(entry),
project_name=project_name,
provider=bugs_util.Provider.ISSUETRACKER,
# Special case status since it can be None.
status=status,
author=FindAuthor(entry),
details_link=entry.GetAlternateLink().href,
reported_on=entry.published.text,
last_update=entry.updated.text,
last_updater=last_updater,
target_element=target,
urls=urls)
def GetComments(project_name, bug_id, phclient=None):
"""Fetches the comments for a specified issue.
Args:
project_name: The name of the project (ie chromium)
bug_id: The ID of the bug to fetch comments for.
phclient: Project Hosting client to use.
Returns:
A list of CommentEntry instances.
"""
# Comments needs to be fetched separately.
if not phclient:
phclient = gdata.projecthosting.client.ProjectHostingClient()
comments = []
try:
comments = phclient.get_comments(project_name, bug_id)
comments = comments.entry
except gdata.client.RequestError, e:
logging.exception('Error while getting the comments for %s. Error %s',
bug_id, e)
return comments
def GetTextInComments(comments):
"""Gets the comments for the given issue id as a list of text fields.
Args:
comments: A list of CommentEntry instances.
Returns:
String of the attached.
"""
comments_text = [c.content.text for c in comments if c.content.text]
return ' '.join(comments_text)
def GetLastUpdater(comments, author):
"""Get the last author to update this bug.
Args:
comments: A list of CommentEntry instances.
author: The default last_updater if one isn't found.
Returns:
A string containing the alias of the last updater for this bug.
"""
last_updater = author
for comment in comments:
if comment.author:
last_updater = comment.author[0].name.text
return last_updater
def FindPriority(bug_entry):
"""Finds and returns the priority of a provided bug entry.
Args:
bug_entry: The provided bug, a IssueEntry instance.
Returns:
A string containg the priority of the bug ("1", "2", etc...)
"""
priority = ''
for label in bug_entry.label:
if label.text.lower().startswith('pri-'):
priority = label.text[4:]
return priority
def FindAuthor(bug_entry):
"""Finds and returns the author of a provided bug entry."""
author = ''
if bug_entry.author:
author = bug_entry.author[0].name.text
return author
def QueueStoreBug(bug_id, title, summary, priority,
project_name, provider, status, author,
details_link, reported_on, last_update,
last_updater, target_element, urls, recording_link='',
cycle_id=None, expected=None, result=None, author_id='',
screenshot=None):
"""Adds a task to updates or create a Bug."""
deferred.defer(StoreBug,
bug_id=bug_id,
title=title,
summary=summary,
priority=priority,
project_name=project_name,
provider=provider,
status=status,
author=author,
author_id=author_id,
details_link=details_link,
reported_on=reported_on,
last_update=last_update,
last_updater=last_updater,
target_element=target_element,
urls=urls,
recording_link=recording_link,
cycle_id=cycle_id,
expected=expected,
result=result,
screenshot=screenshot,
_queue='store-bug-queue')
def StoreBug(bug_id, title, summary, priority, project_name, provider,
status, author, details_link, reported_on, last_update, last_updater,
target_element='', screenshot=None, urls=None, recording_link='',
cycle_id=None, expected=None, result=None, author_id=''):
"""Updates or create a Bug."""
screenshot_link = ''
if screenshot:
# Store the screenshot data and get the link.
new_screenshot = screenshots.Add(
data=screenshots_util.DecodeBase64PNG(screenshot),
source=provider, project=project)
screenshot_link = screenshots_util.RetrievalUrl(
self.request.url, new_screenshot.key().id())
if cycle_id:
cycle = test_cycle.AddTestCycle(provider, project_name, cycle_id)
if not urls:
urls = [(u, url_bug_map.UrlPosition.TITLE) for u in url_util.ExtractUrls(title)]
expected = expected or ''
result = result or ''
text = summary + ' ' + expected + ' ' + result
urls.extend([(u, url_bug_map.UrlPosition.TITLE)
for u in url_util.ExtractUrls(text)])
logging.info(urls)
urls = urls or [] # Set default url list to have only one empty string
bug = bugs.Store(
bug_id=str(bug_id),
title=title,
summary=summary,
priority=priority,
project=project_name,
provider=provider,
status=status,
author=author,
author_id=author_id,
details_link=details_link,
reported_on=reported_on,
last_update=last_update,
last_updater=last_updater,
target_element=target_element,
screenshot=screenshot_link,
recording_link=recording_link,
cycle=cycle,
expected=expected,
result=result)
if cycle:
test_cycle_user.AddTestCycleUser(author, cycle)
# TODO(alexto): Do the deletion first in a separate queue, then
# add the bug-URL mappings to avoid timeouts. For now, this works
# since the timeout causes the task to re-execute.
logging.debug('Deleting all existing bug mappings')
deleted = url_bug_map.DeleteAllMappingsForBug(bug)
logging.debug('Mappings deleted: %d', deleted)
if len(urls) > 0:
# NOTE: This is an optimization,
# list comprehension loop is faster than a FOR loop.
# pylint: disable-msg=W0104
# pylint: disable-msg=W0106
[deferred.defer(UpdateUrlBugMappings,
bug_key=bug.key().id(),
url=url,
position=position,
_queue='urls-map-queue')
for (url, position) in urls]
def UpdateUrlBugMappings(bug_key, url, position):
"""Updates or creates a Bug-URL mapping."""
url_bug_map.StoreUrlBugMapping(target_url=url,
bug=bugs.GetBugByKey(bug_key),
position=position)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Crawls bugs on a project.
Called periodically to fetch new bugs on a project or do a full
bug DB re-scan.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
# Disable 'Import not at top of file' lint error.
# pylint: disable-msg=C6204
try:
import auto_import_fixer
except ImportError:
pass # This will fail on unittest, OK to pass.
import logging
import re
import sys
import webapp2
import gdata
import gdata.client
import gdata.projecthosting
import gdata.projecthosting.client
from google.appengine.api import memcache
from google.appengine.api.labs import taskqueue
from google.appengine.ext import deferred
from crawlers import crawler_util
from common.handlers import base
from models import bugs_util
from models import crawl_state
# Regex to extract issue Ids from a bulk update operation.
_ISSUES_FROM_BULK_UPDATE_REGEX = re.compile(
'issue (\d+):')
class RecrawlProjectWorker(base.BaseHandler):
"""Worker handler to crawl all bugs in a project."""
def get(self):
"""Redirect get() request to post() to facilitate testing."""
self.post()
def post(self):
"""Starts crawling."""
project_name = self.GetRequiredParameter('project_name')
start_index = self.GetOptionalParameter('start_index', None)
if not start_index:
last = crawl_state.GetLastCrawlResults(bugs_util.Provider.ISSUETRACKER,
project_name)
start_index = last.end_index
else:
start_index = int(start_index)
max_results = 25
query = gdata.projecthosting.client.Query(
start_index=start_index, max_results=max_results)
phclient = gdata.projecthosting.client.ProjectHostingClient()
try:
issues = phclient.get_issues(project_name, query=query)
except gdata.client.Error, e:
retries = int(self.request.headers.get('X-AppEngine-TaskRetryCount', 0))
if retries < 4:
logging.warning('Retry crawling, retries is less than 5, '
'current retries: %s, start_index: %d',
retries, start_index)
raise # Re-raise, so that the task is re-tried.
else:
# Skip current, try at start_index + 1.
logging.warning('Skipping current index, start_index: %d', start_index)
taskqueue.add(url='/tasks/crawl/issuetracker/recrawl_project',
params={'project_name': project_name,
'start_index': start_index + 1})
return
(new_updates, total, unused_seen) = GetNewUpdates(issues, True)
if not new_updates:
crawl_state.StoreCrawlResults(
bugs_util.Provider.ISSUETRACKER, project_name,
start_index, start_index, 0)
self.response.out.write('Done.')
return # Reached the end of the crawl.
deferred.defer(crawler_util.SpawnDetailsCrawlersIssueTracker,
new_updates, project_name, True)
crawl_state.StoreCrawlResults(
bugs_util.Provider.ISSUETRACKER, project_name, start_index,
start_index + total, len(new_updates))
# Don't overwelm the provider, throttle to once per second.
taskqueue.add(url='/tasks/crawl/issuetracker/recrawl_project',
params={'project_name': project_name},
countdown=1)
self.response.out.write('start_index: %d, end_index: %d, total: %d'
% (start_index,
start_index + total,
total))
class CrawlRecentUpdatesWorker(base.BaseHandler):
"""Worker handler retrieve recent bug changes."""
def get(self):
"""Redirect get() request to post() to facilitate testing."""
self.post()
def post(self):
"""Starts crawling for recent updates."""
project_name = self.GetRequiredParameter('project_name')
max_results = self.GetOptionalParameter('max_results', 1000)
gdclient = gdata.client.GDClient()
issues = None
count = 1
while not issues:
try:
curr_max_results = int(int(max_results)/count)
logging.debug('Fetching %s issues.', curr_max_results)
issues = gdclient.get_feed(
GetUpdatesUrl(project_name, curr_max_results))
except SyntaxError, e:
count *= 2
new_max_results = int(int(max_results)/count)
logging.error(
'Failed to fetch issues feed. Try smaller mount: %d. Error: %s',
new_max_results, str(e))
if new_max_results == 0:
logging.error('Max results reached 0, terminating.')
return
(new_updates, total, seen) = GetNewUpdates(issues)
deferred.defer(crawler_util.SpawnDetailsCrawlersIssueTracker,
new_updates, project_name)
end_msg = ('Done crawling for updates.'
'Total updates: %d, already seen: %d'
%(total, seen))
logging.debug(end_msg)
self.response.out.write(end_msg)
def GetNewUpdates(issues, skip_recent_check=False):
"""Extract new issue from the issues feed."""
namespace = '%s/' % issues.id.text
results = []
total = 0
seen = 0
for issue in issues.entry:
if issue.id.text.find('bulk') > 0:
content_text = issue.content.text
updated = issue.updated.text
logging.debug('Found a bulk operation, updated: %s content: %s.',
updated, content_text)
issue_ids = _ISSUES_FROM_BULK_UPDATE_REGEX.findall(content_text)
logging.debug('Issue Ids found: %s', issue_ids)
total += len(issue_ids)
not_seen = [{'id': curr_id,
'updated': updated}
for curr_id in issue_ids
if skip_recent_check or not SeenRecently(
'%s_%s_%s' % (namespace, curr_id, updated))]
seen += len(issue_ids) - len(not_seen)
results.extend(not_seen)
elif skip_recent_check or not SeenRecently(issue.id.text):
total += 1
results.append({'id': issue.id.text.replace(namespace, ''),
'updated': issue.updated.text})
else:
total += 1
seen += 1
return (results, total, seen)
def SeenRecently(text_id):
"""Keeps track if a bug has already been seen, if so, returns True."""
key_name = 'IssuesSeenRecently_%s' % text_id
if memcache.get(key_name):
logging.debug('Issue has been seen recently. ID: %s', text_id)
return True
else:
logging.debug('Recent issue. ID: %s', text_id)
memcache.set(key_name, True, 432000) # Expires in 5 days (in seconds).
return False
def GetUpdatesUrl(project_name, max_results=1000):
"""Construct the URL to the issues updates for the given project."""
return ('http://code.google.com/feeds/p/%s'
'/issueupdates/basic?max-results=%d' %
(project_name, max_results))
app = webapp2.WSGIApplication(
[('/tasks/crawl/issuetracker/crawl_recent_updates',
CrawlRecentUpdatesWorker),
('/tasks/crawl/issuetracker/recrawl_project',
RecrawlProjectWorker)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite home handler."""
__author__ = 'phu@google.com (Po Hu)'
import os
import sys
try:
from google.appengine.api import users
from common.handlers import base
import webapp2
except ImportError:
file_dir = os.path.dirname(__file__)
project_dir = os.path.join(file_dir, '..')
sys.path.append(project_dir)
from google.appengine.api import users
from common.handlers import base
import webapp2
class Error(Exception):
pass
class TestSuiteHandler(base.BaseHandler):
"""Test Suite Handler."""
def get(self):
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
self.RenderTemplate('suite_test.html', {})
class TestAddTestsHandler(base.BaseHandler):
"""Test add tests Handler."""
def get(self):
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
self.RenderTemplate('add_tests.html', {})
app = webapp2.WSGIApplication(
[('/testing/suite_test', TestSuiteHandler),
('/testing/add_tests', TestAddTestsHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bug reporting handlers."""
__author__ = 'alexis.torres@gmail.com (Alexis O. Torres)'
import logging
import sys
import urllib
import webapp2
from crawlers import crawler_util
from common.handlers import base
from models import bugs
from models import bugs_util
from models import screenshots
from utils import screenshots_util
class ImportBug(base.BaseHandler):
"""Handles adding bugs by an admin."""
def post(self):
bug_id = self.GetRequiredParameter('id')
title = self.GetRequiredParameter('title')
cycle_id = self.GetRequiredParameter('test_cycle')
project = self.GetRequiredParameter('project')
provider = self.GetRequiredParameter('provider')
version = self.GetRequiredParameter('version')
title =self.GetRequiredParameter('title')
summary = self.GetRequiredParameter('summary')
priority = self.GetRequiredParameter('priority')
project = self.GetRequiredParameter('project')
provider = self.GetRequiredParameter('provider')
author = self.GetRequiredParameter('author')
author_id = self.GetRequiredParameter('author_id')
status = self.GetRequiredParameter('status')
reported_on = self.GetRequiredParameter('reported_on')
last_update = self.GetRequiredParameter('last_update')
last_updater = self.GetRequiredParameter('last_updater')
expected = self.GetRequiredParameter('expected')
result = self.GetRequiredParameter('result')
crawler_util.QueueStoreBug(bug_id=bug_id,
title=title,
summary=summary,
priority=priority,
project_name=project,
provider=provider,
status=status,
author=author,
details_link='http://www.utest.com/bugs/%s' % bug_id,
reported_on=reported_on,
last_update=last_update,
last_updater=author,
target_element=None,
urls=None,
cycle_id=cycle_id,
expected=expected,
result=result)
app = webapp2.WSGIApplication([('/admin/bugs', ImportBug)],
debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tester management."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import logging
import webapp2
from common.handlers import base
from models import test_cycle_user
class TestersHandler(base.BaseHandler):
"""Handles requrest to get known testers."""
def get(self):
testers = test_cycle_user.GetTesters()
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(test_cycle_user.JsonEncode(testers))
class TestersCycleHandler(base.BaseHandler):
"""Handles requests to get testers in a given cycle."""
def get(self, cycle_key):
testers = test_cycle_user.FetchTestersForCycle(cycle_key)
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(test_cycle_user.JsonEncode(testers))
app = webapp2.WSGIApplication(
[('/testers', TestersHandler),
('/testers/all', TestersHandler),
('/cycle/(\w+)/testers', TestersCycleHandler),
('/cycle/(\w+)/testers/all', TestersCycleHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite suite handler."""
__author__ = 'phu@google.com (Po Hu)'
import datetime
import logging
import webapp2
from google.appengine.api import users
from common.handlers import base
from handlers import deferred_util
from models import bite_suite
from models import suite_test_map
from utils import basic_util
from utils import bite_constants
class Error(Exception):
pass
class LabelInfoFormatError(Error):
"""An error encountered if the label format is not correct."""
class ShowSuiteDetail(base.BaseHandler):
"""Shows the suite details page."""
def get(self):
self.post()
def post(self):
"""Shows the Bite suites info."""
suite_name = self.GetOptionalParameter('suiteName', '')
project_name = self.GetOptionalParameter('projectName', '')
data = {'nav': bite_constants.NAV_DEFAULT_DATA}
if suite_name and project_name:
data['commands'] = [{'command': 'loadProject',
'suiteName': suite_name,
'projectName': project_name}]
self.RenderTemplate('set.html', {'common': data})
class ShowSuitesHandler(base.BaseHandler):
"""The handler for showing the Bite suites."""
def get(self):
self.post()
def _GetSuitesData(self, project_name):
"""Gets suites data."""
suites_data = []
projects = None
logging.info('The project name is : ' + project_name)
if project_name:
projects = [project_name]
suites = bite_suite.LoadAllSuitesOfProjects(projects)
for suite in suites:
labels = [suite.parent().name]
labels.extend(suite.labels)
suite_data = {
'id': str(suite.key()),
'type': 'suite',
'title': suite.name,
'labels': labels,
'icon': '/images/artifacts/testautomated.png',
'actions': [
{'title': 'View details',
'operation': 'details'}],
'props': [{'label': '# of tests', 'value': suite.tests_number}]
}
suites_data.append(suite_data)
return suites_data
def post(self):
"""Shows the Bite suites info."""
project_name = self.GetOptionalParameter('projectName', '')
data = self._GetSuitesData(project_name)
self.response.out.write(
basic_util.DumpJsonStr({'details': data}))
class LoadAllSuitesInProjectHandler(base.BaseHandler):
"""The handler for loading all Bite suites of a project."""
def get(self):
self.post()
def post(self):
project_name = self.GetRequiredParameter('projectName')
suites = bite_suite.LoadAllSuitesOfProject(project_name)
suite_info = [
{'name': suite.name, 'key': str(suite.key())}
for suite in suites]
self.response.out.write(
basic_util.DumpJsonStr({'suites': suite_info}))
class DeleteTestsFromSuiteHandler(base.BaseHandler):
"""The handler for deleting tests from a suite."""
def get(self):
self.post()
def post(self):
"""Deletes tests associated with a suite."""
suite_name = self.GetRequiredParameter('suiteName')
project_name = self.GetRequiredParameter('projectName')
suite = bite_suite.LoadSuite(suite_name, project_name)
if suite:
suite_test_map.DeleteTestsFromSuite(str(suite.key()))
else:
error_log = 'No specified suite is available.'
logging.error(error_log)
self.response.out.write(error_log)
class LoadSuiteHandler(base.BaseHandler):
"""The handler for loading a specified Bite suite."""
def get(self):
self.post()
def _dumpLabels(self, labels):
"""Dumps the label string.
The label string in Json format should be:
{'labels': ['label1', 'label2', ...]}
Args:
labels: A list of labels of a suite.
Returns:
A Json string of suite labels list.
"""
if not labels or not isinstance(labels, list):
labels = []
return basic_util.DumpJsonStr({'labels': labels})
def post(self):
"""Loads a specified suite."""
suite_name = self.GetOptionalParameter('suiteName', '')
project_name = self.GetOptionalParameter('projectName', '')
suite_key_str = self.GetOptionalParameter('suiteKey', '')
suite = bite_suite.LoadSuite(suite_name, project_name, suite_key_str)
if suite:
labels = self._dumpLabels(suite.labels)
version_url = suite.latest_version_url
if version_url is None:
version_url = ''
self.response.out.write(
basic_util.DumpJsonStr(
{'suiteName': suite.name,
'suiteKey': str(suite.key()),
'description': suite.description,
'projectName': suite.parent().name,
'labels': labels,
'token': bite_suite.GetSuiteTokens(suite),
'startUrl': bite_suite.GetStartUrl(suite),
'interval': bite_suite.ParseWatchdogSetting(
suite.watchdog_setting),
'versionUrl': version_url,
'emailFrom': bite_suite.GetSuiteAttribute(
suite, 'report_setting', 'email_from'),
'emailTo': bite_suite.GetSuiteAttribute(
suite, 'report_setting', 'email_to'),
'failureThresh': bite_suite.GetSuiteAttribute(
suite, 'report_setting', 'failure_thresh'),
'testSource': suite.test_source,
'retryTimes': suite.retry_times,
'defaultTimeout': suite.default_timeout,
'deleteDeadline': suite.auto_delete_deadline,
'reminder': suite.reminder_setting}))
else:
error_log = 'No specified suite is available.'
logging.error(error_log)
self.response.out.write(error_log)
class AddSuiteHandler(base.BaseHandler):
"""The handler for adding a Bite suite."""
def get(self):
self.post()
def _parseLabelStr(self, label_str):
"""Parses the label string.
The label string in Json format should be:
{'labels': ['label1', 'label2', ...]}
Args:
label_str: A Json format string represents the labels.
Returns:
A list of labels.
Raises:
LabelInfoFormatError: An error occurred if the label info is incorrect.
"""
if not label_str:
return []
label_obj = basic_util.ParseJsonStr(label_str)
if label_obj.has_key('labels') and isinstance(label_obj['labels'], list):
return label_obj['labels']
else:
raise LabelInfoFormatError()
def _parseTestInfoStr(self, test_info_str):
"""Parses the test info string.
The test info string in Json format should be:
{'testInfoList': [{'id': xx, 'name': yy,
'automated': true}, ...]}
Args:
test_info_str: A Json format test info string.
Returns:
A list of tests.
"""
if not test_info_str:
return []
test_info_obj = basic_util.ParseJsonStr(test_info_str)
return test_info_obj['testInfoList']
def post(self):
"""Adds a specified suite."""
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
suite_name = self.GetRequiredParameter('suiteName')
project_name = self.GetRequiredParameter('projectName')
description = self.GetOptionalParameter('description', '')
tokens = self.GetOptionalParameter('tokens', '')
interval = self.GetOptionalParameter('interval', '')
label_str = self.GetOptionalParameter('labels', '')
start_url = self.GetOptionalParameter('startUrl', '')
test_source = self.GetOptionalParameter('testSource', '')
project_id = self.GetOptionalParameter('projectId', '')
public_labels_str = self.GetOptionalParameter('publicLabels', '')
configs_str = self.GetOptionalParameter('configs', '')
watchdog_setting_str = self.GetOptionalParameter('watchdogSetting', '')
latest_version_url = self.GetOptionalParameter('versionUrl', '')
report_setting_str = self.GetOptionalParameter('report', '')
retry_times = self.GetOptionalIntParameter('retryTimes', 0)
default_timeout = self.GetOptionalIntParameter(
'defaultTimeout', bite_suite.DEFAULT_SUITE_TIMEOUT)
auto_delete_deadline = self.GetOptionalIntParameter(
'deleteDeadline', bite_suite.DEFAULT_AUTO_DELETE_DEADLINE)
reminder_setting_str = self.GetOptionalParameter('reminder', '')
email_from = self.GetOptionalParameter('emailFrom', [])
email_to = self.GetOptionalParameter('emailTo', [])
failure_thresh = self.GetOptionalParameter('failureThresh', 100)
test_info_list_str = self.GetOptionalParameter('testInfo', '')
tests_num = 0
test_info_list = []
public_labels = []
if public_labels_str:
public_labels = basic_util.ParseJsonStr(public_labels_str)
if test_info_list_str:
test_info_list = self._parseTestInfoStr(test_info_list_str)
tests_num = len(test_info_list)
if interval:
watchdog_setting_str = bite_suite.GetSuiteWatchdogStr(
{}, int(interval))
if not latest_version_url:
latest_version_url = None
configs_str = bite_suite.GetSuiteConfigStr({}, tokens, start_url)
report_setting_str = bite_suite.GetSuiteReportStr(
{}, basic_util.ParseJsonStr(email_from),
basic_util.ParseJsonStr(email_to), failure_thresh)
if not bite_suite.CheckSuiteExists(suite_name, project_name):
suite = bite_suite.AddSuite(
suite_name, project_name, description,
self._parseLabelStr(label_str),
configs_str, watchdog_setting_str, latest_version_url,
report_setting_str, retry_times, default_timeout,
auto_delete_deadline, reminder_setting_str, tests_num,
test_source)
else:
suite = bite_suite.UpdateSuite(
suite_name, project_name, description,
self._parseLabelStr(label_str),
configs_str, watchdog_setting_str, latest_version_url,
report_setting_str, retry_times, default_timeout,
auto_delete_deadline, reminder_setting_str, tests_num,
test_source)
if test_info_list_str:
suite_test_map.AddTestsToSuite(
str(suite.key()), test_info_list)
app = webapp2.WSGIApplication(
[('/suite/add', AddSuiteHandler),
('/suite/load', LoadSuiteHandler),
('/suite/load_project', LoadAllSuitesInProjectHandler),
('/suite/delete_tests', DeleteTestsFromSuiteHandler),
('/suite/show_all', ShowSuitesHandler),
('/suite/detail', ShowSuiteDetail)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Site Compatibility handlers."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import json
import re
import sys
import webapp2
from google.appengine.api import memcache
from google.appengine.api import users
from common.handlers import base
from models.compat import admins
from models.compat import assignment
from models.compat import browser as compat_browser
from models.compat import result as compat_result
from models.compat import run as compat_run
from models.compat import run_site_map
from models.compat import run_tester_map
from models.compat import site as compat_site
from models.compat import tester
from models.compat import verification
JSON_CONTENT_TYPE = 'application/json'
# Extracts the OS, OS version, webkit version, and chrome version from the
# user agent string. User agent strings looks like this:
# Windows: Mozilla/5.0 (Windows; U; Windows NT 5.1; en-US) AppleWebKit/532.5
# (KHTML, like Gecko) Chrome/4.1.249.1064 Safari/532.5
# Linux: Mozilla/5.0 (X11; U; Linux x86_64; en-US) AppleWebKit/533.4
# (KHTML, like Gecko) Chrome/5.0.375.29 Safari/533.4
# Mac: Mozilla/5.0 (Macintosh; U; Intel Mac OS X 10_5_8; en-US)
# AppleWebKit/533.4 (KHTML, like Gecko)
# Chrome/5.0.375.38 Safari/533.4
_EXTRACT_VERSION_RE = re.compile(
'\((?P<platform>.*); .; (?P<platform_version>.*); (?P<locale>..-..)\) .+'
'webkit/(?P<webkit_version>[\d\.]+) .+ chrome/(?P<chrome_version>[\d\.]+) ')
def GetBrowserInfo(user_agent):
"""Extracts browser version information from the supplied string.
Args:
user_agent: User agent string.
Returns:
A dict with the relevant browser version information, including
platform, platform_version, locale, webkit_version, and chrome_version.
"""
result = _EXTRACT_VERSION_RE.search(user_agent.lower())
if not result:
return None
return result.groupdict()
class SiteCompatHandler(base.BaseHandler):
"""Base handler for the Site Compatibility handlers."""
def IsCurrentUserSuperAdmin(self):
"""Wheter the current user is a site-wide admin."""
return users.is_current_user_admin()
def IsCurrentUserAnAdmin(self):
"""Whether the current user is an admin."""
return (self.IsCurrentUserSuperAdmin() or
admins.IsAdmin(users.get_current_user().email()))
def IsCurrentUserAnActiveTester(self):
"""Whet er the current user is an active tester."""
return (self.IsCurrentUserAnAdmin() or
tester.IsActive(users.get_current_user().email()))
def GetUserInfo(self):
"""Gets user information used by the django template."""
user = users.get_current_user()
return {'email': user.email(),
'signOut': users.create_logout_url(self.request.url),
'isAdmin': self.IsCurrentUserAnAdmin(),
'isTester': self.IsCurrentUserAnActiveTester()}
def GetBrowserVersion(self):
"""Gets the BrowserVersion that match the user-agent information."""
browser_info = GetBrowserInfo(self.request.headers['user-agent'])
if not browser_info:
return None
return assignment.GetBrowserVersion(
platform=browser_info['platform'],
platform_version=browser_info['platform_version'],
webkit_version=browser_info['webkit_version'],
chrome_version=browser_info['chrome_version'],
locale=browser_info['locale'])
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class RunsHandler(SiteCompatHandler):
"""Handler used to manage the Site Compatibility runs."""
def get(self):
self.RenderTemplate('site_compat.html',
{'view': 'runs',
'user': self.GetUserInfo()})
def post(self):
name = self.GetRequiredParameter('name')
description = self.GetRequiredParameter('description')
key_name = self.GetOptionalParameter('id', None)
if key_name:
compat_run.Update(
key_name=key_name, name=name, description=description)
else:
assignment.AddRun(name=name, description=description)
self.get()
class RunsVisibilityHandler(SiteCompatHandler):
"""Handler used to manage the visibility of a run."""
def get(self):
self.post()
def post(self):
key_name = self.GetRequiredParameter('id')
hide = self.GetRequiredParameter('hide')
if hide.lower() == 'true':
compat_run.SetVisibility(key_name=key_name, hidden=True)
else:
compat_run.SetVisibility(key_name=key_name, hidden=False)
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class VerificationsHandler(SiteCompatHandler):
"""Handler used to manage the Site Compatibility verification steps."""
def get(self):
"""Gets the verifications view."""
self.RenderTemplate('site_compat.html',
{'view': 'verifications',
'user': self.GetUserInfo()})
def post(self):
"""Adds a new verification."""
name = self.GetRequiredParameter('name')
description = self.GetRequiredParameter('description')
steps = self.GetRequiredParameter('steps')
key_name = self.GetOptionalParameter('id')
if key_name:
verification.Update(key_name=key_name,
name=name,
description=description,
steps=steps)
else:
verification.AddVerificationSteps(name=name,
description=description,
steps=steps)
self.get()
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class VerificationsVisibilityHandler(SiteCompatHandler):
"""Handler used to manage the visibility of verification steps."""
def get(self):
self.post()
def post(self):
key_name = self.GetRequiredParameter('id')
hide = self.GetRequiredParameter('hide')
if hide.lower() == 'true':
verification.SetVisibility(key_name=key_name, hidden=True)
else:
verification.SetVisibility(key_name=key_name, hidden=False)
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class SitesHandler(SiteCompatHandler):
"""Handler used to manage the Site Compatibility sites."""
def get(self):
self.RenderTemplate('site_compat.html',
{'view': 'sites',
'user': self.GetUserInfo()})
def post(self):
url = self.GetRequiredParameter('url')
key_name = self.GetOptionalParameter('id')
if key_name:
compat_site.Update(key_name=key_name, url=url)
else:
compat_site.AddSite(url=url)
self.get()
class SitesVisibilityHandler(SiteCompatHandler):
"""Handler used to manage the visibility of a site."""
def get(self):
self.post()
def post(self):
key_name = self.GetRequiredParameter('id')
hide = self.GetRequiredParameter('hide')
if hide.lower() == 'true':
compat_site.SetVisibility(key_name=key_name, hidden=True)
else:
compat_site.SetVisibility(key_name=key_name, hidden=False)
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class MappingsHandler(SiteCompatHandler):
"""Handler used to manage the Site Compatibility mappings."""
def get(self):
"""Gets the mappings view."""
runs = compat_run.GetRuns(source=compat_run.Sources.LOCAL)
sites = compat_site.GetSites()
verifications = verification.GetVerificationSteps()
last_added = {'run': self.GetOptionalParameter('run'),
'site': self.GetOptionalParameter('site'),
'verification': self.GetOptionalParameter('verification')}
selected_browsers = self.request.get_all('version')
def _GetBrowserDetails(browser):
name = ('Chrome: %s, Webkit: %s, Locale: %s, Platform: %s' %
(browser.chrome_version, browser.webkit_version,
browser.user_locale, browser.platform_version))
key = browser.key().id_or_name()
return {'id': key,
'name': name,
'selected': key in selected_browsers}
browsers_list = compat_browser.GetBrowserVersions(reverse=True)
browsers = map(_GetBrowserDetails, browsers_list)
self.RenderTemplate('site_compat.html',
{'view': 'mappings',
'runs': runs,
'sites': sites,
'verifications': verifications,
'user': self.GetUserInfo(),
'last_added': last_added,
'browsers': browsers})
def post(self):
"""Adds a new mapping."""
run_key_name = self.GetRequiredParameter('run')
site_key_name = self.GetRequiredParameter('site')
verification_key_name = self.GetRequiredParameter('verification')
apply_to = self.GetRequiredParameter('apply')
selected_run = compat_run.GetRunByKeyName(run_key_name)
selected_site = compat_site.GetSiteByKeyName(site_key_name)
selected_verification = verification.GetVerificationStepsByKeyName(
verification_key_name)
all = False
browsers = None
# TODO(alexto): Change these to constants.
if apply_to == 'futureAndPast':
all = True
elif apply_to == 'futureAndSelected':
browser_keys = self.request.get_all('version')
if browser_keys:
browsers = compat_browser.GetBrowserVersionsByKeyName(browser_keys)
assignment.AddMapping(
run=selected_run,
site=selected_site,
verification=selected_verification,
apply_to_all_versions=all,
browser_versions=browsers)
self.get()
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class ResultsHandler(SiteCompatHandler):
"""Handler used to view the Results submitted by a given user."""
def get(self):
self.RenderTemplate('site_compat.html',
{'view': 'my_results',
'user': self.GetUserInfo()})
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class TesterMapHandler(SiteCompatHandler):
"""Handler used to manage the user subscriptions to runs."""
def get(self):
"""Gets the tester-run mappings view."""
self.RenderTemplate('site_compat.html',
{'view': 'tester_map',
'user': self.GetUserInfo()})
def post(self):
"""Adds a new tester-run mapping."""
user = users.get_current_user()
run_key_name = self.GetOptionalParameter('joinRun', None)
if run_key_name:
run_tester_map.AddMapping(
run=compat_run.GetRunByKeyName(run_key_name), user=user)
else:
run_key_name = self.GetRequiredParameter('leaveRun')
run_tester_map.RemoveMapping(
run=compat_run.GetRunByKeyName(run_key_name), user=user)
self.get()
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class TesterTestsHandler(SiteCompatHandler):
"""Handler used to manage sites assigned (tests sites) to the current user."""
def get(self):
"""Gets the test assignment view."""
user = users.get_current_user()
version = self.GetBrowserVersion()
assign = assignment.GetOrAssignTest(
user=user, browser_version=version)
test_data = None
if assign:
test = assign.test
test_data = {'key': assign.key().name(),
'run_name': test.run.name,
'url': test.start_url,
'steps': test.steps}
self.RenderTemplate('site_compat.html',
{'view': 'tester_test',
'test': test_data,
'user': self.GetUserInfo()})
def post(self):
"""Updates the test assignment for the given user."""
skipped = self.GetOptionalParameter('skip')
if skipped:
assignment.SkipAssignment(
user=users.get_current_user(),
browser_version=self.GetBrowserVersion(),
assignment=assignment.GetAssignmentByKeyName(skipped))
else:
succeed = True
key_name = self.GetOptionalParameter('passResult')
if not key_name:
succeed = False
key_name = self.GetRequiredParameter('failResult')
comment = self.GetOptionalParameter('comment_' + key_name, None)
if not comment:
comment = self.GetOptionalParameter('comment', '')
bugs = self.GetOptionalParameter('bugs_' + key_name, None)
if not bugs:
bugs = self.GetOptionalParameter('bugs', '')
assignment.LogResult(
user=users.get_current_user(),
assignment=assignment.GetAssignmentByKeyName(key_name),
browser_version=self.GetBrowserVersion(),
succeed=succeed,
comment=comment,
bugs=bugs)
self.get()
_NO_USER_RESPONSE = json.dumps({'user': None, 'test': None})
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class MyTestsHandler(SiteCompatHandler):
"""Handler called by the client to get the test site assigned to the user."""
def get(self):
"""Entry point for the client (extension) to fetch test data."""
response = _NO_USER_RESPONSE
user = users.get_current_user()
if user:
test_data = None
assign = assignment.GetOrAssignTest(
user=user, browser_version=self.GetBrowserVersion())
if assign:
test = assign.test
test_data = {'test_id': assign.key().name(),
'test_url': test.start_url,
'verification_steps': test.steps}
response = json.dumps(
{'user': user.email(),
'test': test_data})
self.response.headers['Content-Type'] = JSON_CONTENT_TYPE
self.response.out.write(response)
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class AllResultsHandler(SiteCompatHandler):
"""Handler used to view a summary of all results submitted."""
def get(self):
"""Show the all results summary page."""
self.RenderTemplate('site_compat.html',
{'view': 'all_results',
'user': self.GetUserInfo()})
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class StatsHandler(SiteCompatHandler):
"""Handler used to fetch stats data by the client.
When called with scope set, the returned JSON string is the stats for the
current user. When scope is not set, the JSON data represent the status of
the most recent BrowserVersions.
"""
def get(self):
"""Gets stats data."""
scope = self.GetOptionalParameter('scope', None)
result = None
if scope:
result = self.GetPersonalStats()
else:
stats_key = 'SiteCompat_TopLevelStats'
result = memcache.get(stats_key)
if not result:
result = self.GetTopLevelStats()
memcache.set(stats_key, result, 1800) # Cache for 30 mins.
self.response.headers['Content-Type'] = JSON_CONTENT_TYPE
self.response.out.write(json.dumps(result))
def GetPersonalStats(self):
"""Gets a list of results submitted by the current user.
Returns:
A list of tuples. A tuple in the returned list follows this format:
(run_name, site_url, chrome_version, platform, platform_version,
webkit_version, succeed, created)
"""
results = compat_result.GetResultsForUser(users.get_current_user())
if not results:
return None
def _GetDetails(result):
test = result.test.get()
start_url = test.start_url
curr_run = test.run
browser = result.browser_version
return (curr_run.name, start_url, browser.chrome_version,
browser.platform, browser.platform_version,
browser.webkit_version, result.succeed, result.bugs,
result.comment, result.created.ctime())
return map(_GetDetails, results)
def GetTopLevelStats(self):
"""Gets a list of results stats submitted for recent chrome versions.
Returns:
A list of tuples. A tuple in the returned list follows this format:
(chrome_version, (total_results, passed, failed, remaining))
"""
versions_table = {}
versions = compat_browser.GetBrowserVersions()
for version in versions:
results = compat_result.GetResultsForBrowserVersion(version)
total = len(results)
passed = len([result for result in results if result.succeed])
failed = total - passed
remaining = len(assignment.GetTestsRemainingForBrowser(version))
chrome_version = version.chrome_version
if chrome_version in versions_table:
current = versions_table[chrome_version]
total += current[0]
passed += current[1]
failed += current[2]
remaining += current[3]
versions_table[chrome_version] = (total, passed, failed, remaining)
keys = sorted(versions_table.keys())
def _GetVals(key):
return (key, versions_table[key])
return map(_GetVals, keys)
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class RedirectHandler(SiteCompatHandler):
"""Handler used to capture stats about user navigation to tests."""
def get(self):
assign = assignment.GetAssignmentByKeyName(
self.GetRequiredParameter('test_id'))
if assign:
assignment.IncrementNavigationData(assign)
self.redirect(assign.run_site_map.site.url)
else:
self.response.out.write('Invalid test_id')
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class ListHandler(SiteCompatHandler):
"""Handler to list JSON data to the site compat management pages."""
def get(self):
"""Gets data for the list specified by the "type" parameter."""
requested_list = self.GetRequiredParameter('type')
response = None
if requested_list == 'suites':
runs = compat_run.GetRuns()
response = [{'id': run.key().id_or_name(),
'name': run.name,
'description': run.description}
for run in runs]
elif requested_list == 'sites':
sites = compat_site.GetSites()
response = [{'id': site.key().id_or_name(),
'url': site.url}
for site in sites]
elif requested_list == 'verifications':
verifications = verification.GetVerificationSteps()
response = [{'id': v.key().id_or_name(),
'name': v.name,
'description': v.description,
'steps': v.steps}
for v in verifications]
elif requested_list == 'run_mappings':
mappings = run_site_map.GetMappings()
response = [{'id': m.key().id_or_name(),
'run': m.run.name,
'verification': m.verification.name,
'url': m.site.url}
for m in mappings]
elif requested_list == 'testers':
testers = tester.GetTesters()
response = [{'email': t.email,
'active': t.active}
for t in testers]
elif requested_list == 'subscriptions':
user = users.get_current_user()
available_runs = compat_run.GetRuns()
tester_mappings = run_tester_map.GetMappingsForTester(user)
tester_runs = [mapping.run.key().name() for mapping in tester_mappings]
response = [{'id': r.key().name(),
'name': r.name,
'description': r.description,
'isSubscribed': bool(r.key().name() in tester_runs)}
for r in available_runs]
self.response.headers['Content-Type'] = JSON_CONTENT_TYPE
self.response.out.write(json.dumps(response))
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class AdminsHandler(SiteCompatHandler):
"""Handler to add or remove admins to the site compat area."""
def get(self):
"""Redirects to the post method."""
self.post()
def post(self):
"""Adds, remove, or updates and admin."""
action = self.GetRequiredParameter('action')
email = self.GetRequiredParameter('email')
if action == 'add':
admins.AddAdmin(email)
elif action == 'remove':
admins.DeleteAdmin(email)
elif action == 'activate':
admins.SetActive(email, True)
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
class TestersHandler(SiteCompatHandler):
"""Handler to add or remove testers to the site compat area."""
def get(self):
testers = tester.GetTesters()
self.RenderTemplate('site_compat.html',
{'view': 'tester',
'user': self.GetUserInfo(),
'testers': testers})
def post(self):
email = self.GetRequiredParameter('email')
active = self.GetRequiredParameter('active')
if active.lower() == 'true':
tester.AddOrUpdate(email, True)
else:
tester.AddOrUpdate(email, False)
app = webapp2.WSGIApplication(
[('/get_my_compat_test', MyTestsHandler),
('/compat/test', TesterTestsHandler),
('/compat/subscriptions', TesterMapHandler),
('/compat/my_results', ResultsHandler),
('/compat/all_results', AllResultsHandler),
('/compat/stats', StatsHandler),
('/compat/runs', RunsHandler),
('/compat/runs_visibility', RunsVisibilityHandler),
('/compat/verifications', VerificationsHandler),
('/compat/verifications_visibility', VerificationsVisibilityHandler),
('/compat/sites', SitesHandler),
('/compat/sites_visibility', SitesVisibilityHandler),
('/compat/mappings', MappingsHandler),
('/compat/redirect', RedirectHandler),
('/compat/list', ListHandler),
('/compat/admins', AdminsHandler),
('/compat/tester', TestersHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Displays the home page for the HUD project.
Primarily used to provide content to users navigating directly to the HUD's
homepage (eg. via the browser).
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import os
import sys
import webapp2
from google.appengine.api import users
from common.handlers import base
from models import counter
class HomeRequestHandler(base.BaseHandler):
"""Home page request handler.
Main page accessed by users navigating to the project's home page.
The Home request handler is in charge of serving the HUD's homepage. This
page is used by human users to navigate the list of crawled bugs, trigger a
manual crawl, or add/remove bug databases from the crawling queue.
"""
def GetUserInfo(self):
user = users.get_current_user()
if user:
return {'isSigned': True,
'email': user.email(),
'signOut': ''}
else:
return {'isSigned': False,
'signIn': ''}
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self):
"""Handles the GET request for the home page."""
env = os.environ
self.RenderTemplate('landing.html',
{'user': self.GetUserInfo()})
app = webapp2.WSGIApplication(
[('/', HomeRequestHandler)],
debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The common lib utils."""
__author__ = 'phu@google.com (Po Hu)'
from models import bite_event
from utils import bite_constants
def GetEventData(event):
"""Gets the events data."""
event_id = str(bite_event.BiteEvent.host.get_value_for_datastore(event))
event_type = event.event_type
name = event.name or ''
labels = event.labels or []
if event_type == 'project':
icon = '/images/spec/performance.png'
elif event_type == 'suite' or event_type == 'set':
event_type = 'set'
icon = '/images/artifacts/testautomated.png'
elif event_type == 'run':
icon = '/images/sample/run01-pie.png'
elif event_type == 'schedule':
icon = '/images/spec/security.png'
elif event_type == 'run_template':
icon = '/images/sample/run01-pie.png'
event_type = 'runTemplate'
action = ''
if event.action:
action = bite_constants.EVENT_ACTION_TO_READABLE[event.action]
action = ' '.join([event_type, action])
email = ''
if event.created_by:
email = event.created_by.email()
return {'id': event_id,
'extraId': str(event.key()),
'type': event_type,
'title': name,
'labels': labels,
'icon': icon,
'actions': [
{'title': 'View details',
'operation': 'viewDetails'}],
'props': [{'label': 'action', 'value': action},
{'label': 'by', 'value': email},
{'label': 'around', 'value': str(event.created_time)}]}
| Python |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Gets and stores templates from the BITE server.
Called by clients to retrieve a list of bug templates that have been stored on
the BITE server, or to add a template to the BITE server.
"""
__author__ = 'ralphj@google.com (Julie Ralph)'
import sys
import webapp2
from google.appengine.api import users
from common.handlers import base
from models import bug_template
MAX_RESULTS_CAP = 1000
class GetTemplatesHandler(base.BaseHandler):
"""Handles GET requests to the '/get_templates' URI."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self):
"""Retrieves all bug templates.
TODO(ralphj): Add a function that retrieves templates for only a
specific url or project.
Returns:
A list of JSON-encoded templates.
"""
query = bug_template.BugTemplate.all()
templates_list = query.fetch(MAX_RESULTS_CAP)
result = bug_template.JsonEncode(templates_list)
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(result)
class NewTemplateHandler(base.BaseHandler):
"""Handles requests to the '/new_template' URI.
GET requests will load a form allowing the user to write a new Bug Template.
Submitting the form will result in a POST request, which adds the
Bug Template to the AppEngine Datastore.
"""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self):
"""Displays a form for adding a new template."""
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
self.RenderTemplate('templates.html', {})
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def post(self):
"""Adds a new template, or replaces a current template with the same id."""
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
template_id = self.GetRequiredParameter('id')
name = self.GetRequiredParameter('name')
urls = self.GetRequiredParameter('urls')
project = self.GetRequiredParameter('project')
backend_project = self.GetRequiredParameter('backend_project')
backend_provider = self.GetRequiredParameter('backend_provider')
selector_text = self.GetRequiredParameter('selector_text')
note_text = self.GetRequiredParameter('note_text')
display_order = self.GetOptionalIntParameter('display_order', 0)
url_list = urls.split(',')
bug_template.StoreBugTemplate(template_id=template_id,
name=name,
urls=url_list,
project=project,
backend_project=backend_project,
backend_provider=backend_provider,
selector_text=selector_text,
note_text=note_text,
display_order=display_order)
self.RenderTemplate('templates.html',
{'alert': 'Bug Template succesfully created.'})
app = webapp2.WSGIApplication(
[('/get_templates', GetTemplatesHandler),
('/new_template', NewTemplateHandler)],
debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles storing and serving screenshots."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import json
import webapp2
from common.handlers import base
from models import screenshots
from utils import screenshots_util
class UploadHandler(base.BaseHandler):
"""Class for handling uploads."""
def post(self):
"""Handles uploading a new screenshot."""
# Required params.
data = self.GetRequiredParameter('screenshot_data')
source = self.GetRequiredParameter('source')
project = self.GetRequiredParameter('project')
# Optional params.
source_id = self.GetOptionalParameter('source_id', '')
caption = self.GetOptionalParameter('caption', None)
details = self.GetOptionalParameter('details', None)
labels = self.GetOptionalParameter('labels', None)
if labels:
# If present, labels is a JSON encoded list of strings,
# decode it.
labels = json.loads(labels)
screenshot = screenshots.Add(data=data, source=source, source_id=source_id,
project=project, caption=caption,
details=details, labels=labels)
screenshot_id = screenshot.key().id()
screenshot_url = screenshots_util.RetrievalUrl(
self.request.url, screenshot_id)
self.response.out.write(
json.dumps({'id': screenshot_id, 'url': screenshot_url}))
class GetHandler(base.BaseHandler):
"""Class for handling fetching a screenshot."""
def get(self):
"""Handles retrieving an existing screenshot."""
screenshot_id = self.GetRequiredParameter('id')
screenshot = screenshots.GetById(screenshot_id=screenshot_id)
if not screenshot:
self.error(400)
return
self.response.headers['Content-Type'] = 'image/png'
self.response.out.write(screenshot.data)
class SearchHandler(base.BaseHandler):
"""Class for handling searching for a screenshot."""
def get(self):
"""Handler retrieving a list of screenshots."""
# Required params.
source = self.GetRequiredParameter('source')
# Optional params.
source_id = self.GetOptionalParameter('source_id', None)
project = self.GetOptionalParameter('project', None)
limit = int(self.GetOptionalParameter('max', screenshots.DEFAULT_LIMIT))
matches = screenshots.GetScreenshots(source=source,
source_id=source_id,
project=project,
limit=limit)
request_url = self.request.url
result = [screenshots_util.RetrievalUrl(request_url, curr.key().id())
for curr in matches]
self.response.out.write(json.dumps(result))
app = webapp2.WSGIApplication(
[('/screenshots/upload', UploadHandler),
('/screenshots/fetch', GetHandler),
('/screenshots/search', SearchHandler)
], debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite event handler."""
__author__ = 'phu@google.com (Po Hu)'
import webapp2
from common.handlers import base
from handlers import common_util
from models import bite_event
from utils import basic_util
class Error(Exception):
pass
class ShowEventsHandler(base.BaseHandler):
"""The handler for showing the Bite events."""
def get(self):
self.post()
def post(self):
"""Shows the Bite suites info."""
project_name = self.GetOptionalParameter('projectName', '')
data = bite_event.GetEventsData(common_util.GetEventData,
project_name)
self.response.out.write(
basic_util.DumpJsonStr({'details': data}))
app = webapp2.WSGIApplication(
[('/event/show_all', ShowEventsHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Testing handlers for the HUD project.
Used to expose some of the server internal contructs to facilitate manual
testing and debugging.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import sys
import webapp2
from google.appengine.api import memcache
from common.handlers import base
class FlushCacheHandler(base.BaseHandler):
"""Flushes memcache."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self):
result = memcache.flush_all()
self.response.out.write('Flush success status: <b>%s</b>' % result)
app = webapp2.WSGIApplication(
[('/testing/flush_cache', FlushCacheHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite home handler."""
__author__ = 'phu@google.com (Po Hu)'
import logging
import os
import sys
import webapp2
from google.appengine.api import users
from common.handlers import base
class Error(Exception):
pass
class BiteHomeHandler(base.BaseHandler):
"""The default home handler for BITE server."""
def get(self):
self.post()
def post(self):
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
self.RenderTemplate('base.html', {})
class AutomateHandler(base.BaseHandler):
"""The handler for automating rpf."""
def get(self):
self.post()
def post(self):
self.RenderTemplate('base.html', {})
app = webapp2.WSGIApplication(
[('/home', BiteHomeHandler),
('/automateRpf', AutomateHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite project handler."""
__author__ = ('phu@google.com (Po Hu)'
'jasonstredwick@google.com (Jason Stredwick)')
import json
import urllib
import webapp2
from common.handlers import base
from models import bite_project
from models import model_helper
class Error(Exception):
"""General exception."""
pass
class AddProjectHandler(base.BaseHandler):
"""The handler for adding a Bite project."""
def get(self):
self.post()
def post(self):
name = self.GetRequiredParameter('name')
data = self.GetOptionalParameter('data', '')
data = urllib.unquote(data)
project = bite_project.AddProject(name, data)
self.response.out.write(model_helper.ToJson(project))
class UpdateProjectHandler(base.BaseHandler):
"""The handler for updating a Bite project."""
def get(self):
self.post()
def post(self):
name = self.GetRequiredParameter('name')
data = self.GetOptionalParameter('data', '')
data = urllib.unquote(data)
project = bite_project.UpdateProject(name, data)
self.response.out.write(model_helper.ToJson(project))
class DeleteProjectHandler(base.BaseHandler):
"""The handler for removing a Bite project."""
def get(self):
self.post()
def post(self):
name = self.GetRequiredParameter('name')
bite_project.DeleteProject(name)
class GetDefaultProjectHandler(base.BaseHandler):
"""The handler for getting a Bite project."""
def get(self):
self.post()
def post(self):
project = bite_project.GetDefaultProject()
self.response.out.write(model_helper.ToJson(project))
class GetProjectHandler(base.BaseHandler):
"""The handler for getting a Bite project."""
def get(self):
self.post()
def post(self):
name = self.GetRequiredParameter('name')
project = bite_project.GetProject(name)
self.response.out.write(model_helper.ToJson(project))
class ListProjectHandler(base.BaseHandler):
"""The handler for getting a list of Bite projects."""
def get(self):
self.post()
def post(self):
self.response.out.write(bite_project.ListProjects())
app = webapp2.WSGIApplication(
[('/project/add', AddProjectHandler),
('/project/update', UpdateProjectHandler),
('/project/delete', DeleteProjectHandler),
('/project/get', GetProjectHandler),
('/project/get_default', GetDefaultProjectHandler),
('/project/list', ListProjectHandler)],
debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles login for users."""
__author__ = 'michaelwill@google.com (Michael Williamson)'
# For these gdata imports to work on AppEngine, each has to be imported
# individually...e.g. below you need both atom and atom.url.
# Be careful when changing.
import atom
import atom.url
import gdata
import gdata.client
import gdata.gauth
import json
import webapp2
from google.appengine.api import users
from config import settings
from common.handlers import base
SCOPES = [
'http://code.google.com/feeds/issues'
]
def GetLoginUrl():
"""Returns a login url that will complete all the necessary gdata checks."""
return users.create_login_url('/check_gdata_login_status')
def GetLogoutUrl():
"""Returns a logout url."""
return users.create_logout_url('/')
class SessionTokenSaver(base.BaseHandler):
def get(self):
# Here the user_email is required because it's assumed we set the
# parameter earlier in the process we use for checking gdata access.
user_email = self.request.get('user')
if not user_email:
self.error(400)
return
tmp_token = gdata.gauth.AuthSubToken.from_url(self.request.url)
client = gdata.client.GDClient()
session_token = client.UpgradeToken(tmp_token)
gdata.gauth.AeSave(session_token, user_email)
self.redirect('/')
class CheckGdataLoginStatus(base.BaseHandler):
"""Checks that the user has granted gdata access to this app."""
def GetAuthSubUrl(self, user_email):
"""Generates an auth sub url to use to redirect the user.
This url will send the user to the docs page to grant the app access,
and it contains an embedded url that docs can use to re-redirect the
user once access has been granted. For more info, see
Returns:
A url string.
"""
params = {
'user': user_email
}
next_url = atom.url.Url(protocol='http', host=settings.HOST_NAME_AND_PORT,
path='/gdata_session_token_saver', params=params)
scopes = SCOPES
secure = False # Set secure=True to request a secure AuthSub token.
session = True
return str(gdata.gauth.generate_auth_sub_url(
next_url.to_string(), scopes, secure=secure,
session=session))
def get(self):
# This is an optional parameter. Sometimes it's convenient to
# be able to manually specify the user to check the gdata login
# status for.
user_email = self.request.get('user')
if not user_email:
user_email = users.get_current_user().email()
gdata_token = gdata.gauth.AeLoad(user_email)
if not gdata_token:
url = self.GetAuthSubUrl(user_email)
self.redirect(url)
return
else:
self.redirect('/')
class CheckLoginStatus(base.BaseHandler):
"""Checks the login status of a user."""
def get(self):
user = users.get_current_user();
if user:
response = {
'loggedIn': True,
'user': user.email(),
'url': GetLogoutUrl()
}
else:
response = {
'loggedIn': False,
'url': GetLoginUrl()
}
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps(response))
app = webapp2.WSGIApplication(
[('/check_login_status', CheckLoginStatus),
('/check_gdata_login_status', CheckGdataLoginStatus),
('/gdata_session_token_saver', SessionTokenSaver),
])
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bug reporting handlers."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import datetime
import json
import logging
import sys
import urllib
import webapp2
from google.appengine.api import users
from crawlers import crawler_util
from common.handlers import base
from models import bugs
from models import bugs_util
from models import comments
from models import url_bug_map
from utils import url_util
class Error(Exception):
pass
class UnrecognizedProvider(Error):
pass
class UnrecognizedBindingAction(Error):
pass
class CreateBugHandler(base.BaseHandler):
"""Handles the creation of a new bug."""
def post(self):
current_user = users.get_current_user()
user_name = current_user.nickname()
project = self.GetRequiredParameter('project')
provider = self.GetRequiredParameter('provider')
screenshot_link = ''
screenshot = self.GetOptionalParameter('screenshot', None)
title = self.GetRequiredParameter('title')
url = self.GetOptionalParameter('url', '')
summary = self.GetOptionalParameter('repro', '')
expected = self.GetOptionalParameter('expected', '')
result = self.GetOptionalParameter('result', '')
url = self.GetRequiredParameter('url')
cycle_id = self.GetRequiredParameter('test_cycle')
bug_id = ''
priority = ''
details_link = ''
report_date = str(datetime.datetime.now())
target_element = self.GetOptionalParameter('target_element', '')
recording_link = self.GetOptionalParameter('recording_link', '')
urls = [(url, url_bug_map.UrlPosition.MAIN)]
priority = ''
status = 'unconfirmed'
crawler_util.QueueStoreBug(bug_id=bug_id,
title=title,
summary=summary,
priority=priority,
project_name=project,
provider=provider,
status=status,
author=user_name,
details_link=details_link,
reported_on=report_date,
last_update=report_date,
last_updater=user_name,
target_element=target_element,
urls=urls,
recording_link=recording_link,
cycle_id=cycle_id,
screenshot=screenshot,
expected=expected,
result=result)
class UpdateStatusHandler(base.BaseHandler):
"""Handles updating the status of bugs in Issue Tracker."""
def post(self):
comment = self.GetRequiredParameter('comment')
status = self.GetOptionalParameter('status')
issue_key = self.GetRequiredParameter('key')
if status:
bugs.UpdateStatus(issue_key, status)
if comment:
comments.AddComment(issue_key, comment)
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps({'success': True}))
class BindingActions(object):
"""Holds the valid actions during binding operations."""
UPDATE = 'update'
CLEAR = 'clear'
class UpdateBindingHandler(base.BaseHandler):
"""Handles updating the binding of bugs to controls."""
def post(self):
issue_key = self.GetRequiredParameter('key')
action = self.GetRequiredParameter('action')
target_element = ''
if action == BindingActions.UPDATE:
target_element = self.GetRequiredParameter('target_element')
elif not action == BindingActions.CLEAR:
raise UnrecognizedBindingAction('Action: ' + action)
logging.info('Updating target_element of bug %s, target_element: %s',
issue_key, target_element)
bugs.UpdateTargetElement(issue_key, target_element)
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps({'success': True}))
class UpdateRecordingHandler(base.BaseHandler):
"""Handles adding recording to the bug."""
def post(self):
issue_key = self.GetRequiredParameter('key')
action = self.GetRequiredParameter('action')
recording_link = self.GetRequiredParameter('recording_link')
logging.info('Update recording link of bug %s, recording_link: %s',
issue_key, recording_link)
bugs.UpdateRecording(issue_key, recording_link)
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(json.dumps({'success': True}))
app = webapp2.WSGIApplication(
[('/bugs', CreateBugHandler),
('/bugs/new', CreateBugHandler),
('/bugs/update_binding', UpdateBindingHandler),
('/bugs/update_recording', UpdateRecordingHandler),
('/bugs/update_status', UpdateStatusHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The examples handler."""
__author__ = 'phu@google.com (Po Hu)'
import os
import sys
import webapp2
from google.appengine.api import users
from common.handlers import base
class Error(Exception):
pass
class ExamplesHandler(base.BaseHandler):
"""The examples handler."""
def get(self):
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
self.RenderTemplate('examples.html', {})
app = webapp2.WSGIApplication(
[('/examples', ExamplesHandler)],
debug=True)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles the JS methods store."""
__author__ = 'phu@google.com (Po Hu)'
import logging
import json
import webapp2
from google.appengine.api import users
from common.handlers import base
from models import store
class Error(Exception):
pass
class DuplicateMethodNameError(Error):
"""An error encountered if the method is duplicated."""
class EditJsMethodHandler(base.BaseHandler):
"""Edit page request handler."""
def GetUserInfo(self):
user = users.get_current_user()
if user:
return {'isSigned': True,
'email': user.email(),
'signOut': ''}
else:
return {'isSigned': False,
'signIn': ''}
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self):
"""Handles the GET request for the home page."""
method_name = self.GetOptionalParameter('methodName')
if method_name:
details = store.GetMethodDetails(method_name)
method_details = ''
if details:
method_details = json.dumps(details)
self.RenderTemplate('store_edit.html',
{'method': method_details})
else:
self.RenderTemplate('store_edit.html',
{'user': self.GetUserInfo(),
'method': ''})
class SaveNewMethodHandler(base.BaseHandler):
"""Submit method handler."""
def get(self):
self.post()
def post(self):
method_code = self.GetRequiredParameter('methodCode')
method_name = self.GetRequiredParameter('methodName')
description = self.GetRequiredParameter('description')
primary_label = self.GetRequiredParameter('primaryLabel')
addl_labels = self.GetOptionalParameter('addlLabels')
deps_name = self.GetOptionalParameter('depsName')
deps_code = self.GetOptionalParameter('depsCode')
temp_method = store.GetMethodByName(method_name)
if temp_method:
raise DuplicateMethodNameError()
labels = []
if addl_labels:
labels = json.loads(addl_labels)
user = users.get_current_user()
if user:
author = user.email()
else:
author = 'unknown'
deps_reference = None
if deps_name:
deps_reference = store.UpdateDependency(deps_name, deps_code)
method_instance = store.InsertMethod(
method_code, method_name, description,
primary_label, labels, deps_reference, author)
result = {'key': str(method_instance.key())}
self.response.out.write(json.dumps(result))
class UpdateMethodHandler(base.BaseHandler):
"""Submit method handler."""
def get(self):
self.post()
def post(self):
method_code = self.GetRequiredParameter('methodCode')
method_name = self.GetRequiredParameter('methodName')
description = self.GetRequiredParameter('description')
primary_label = self.GetRequiredParameter('primaryLabel')
addl_labels = self.GetOptionalParameter('addlLabels')
deps_name = self.GetOptionalParameter('depsName')
deps_code = self.GetOptionalParameter('depsCode')
labels = []
if addl_labels:
labels = json.loads(addl_labels)
if deps_name:
store.UpdateDependency(deps_name, deps_code)
store.UpdateMethod(
method_code, method_name, description,
primary_label, labels)
class ViewMethodsHandler(base.BaseHandler):
"""View methods handler."""
def get(self):
label = self.GetOptionalParameter('label', '')
if label.lower() == 'all':
label = ''
methods = store.GetMethodsByPrimaryLabel(label)
methods_string = ''
if methods:
methods_string = json.dumps(methods)
self.RenderTemplate('store_view.html',
{'methods': methods_string,
'label': label})
class GetMethodHandler(base.BaseHandler):
"""Submit method handler."""
def get(self):
self.post()
def post(self):
method_name = self.GetRequiredParameter('methodName')
method_details = store.GetMethodDetails(method_name)
self.response.out.write(json.dumps(method_details))
class DeleteMethodHandler(base.BaseHandler):
"""Delete method handler."""
def get(self):
self.post()
def post(self):
key = self.GetRequiredParameter('key')
store.DeleteMethod(key)
class CheckMethodNameHandler(base.BaseHandler):
"""Checks the method name handler."""
def get(self):
self.post()
def post(self):
name = self.GetRequiredParameter('name')
method = store.GetMethodByName(name)
if method:
raise DuplicateMethodNameError()
app = webapp2.WSGIApplication([
('/store/edit', EditJsMethodHandler),
('/store/save_new_method', SaveNewMethodHandler),
('/store/update_method', UpdateMethodHandler),
('/store/get_method', GetMethodHandler),
('/store/view', ViewMethodsHandler),
('/store/delete', DeleteMethodHandler),
('/store/check_method_name', CheckMethodNameHandler)
])
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite fetch job handler."""
__author__ = 'phu@google.com (Po Hu)'
import logging
import webapp2
from common.handlers import base
from models import bite_result
from models import bite_run
from utils import basic_util
class Error(Exception):
pass
class FetchResultHandler(base.BaseHandler):
"""The handler for fetching a queued result."""
def get(self):
self.post()
def post(self):
"""Fetches a random result matching criteria."""
tokens = self.GetOptionalParameter('tokens', '')
run = bite_run.GetSpecifiedRun(tokens)
job = {}
result = {}
if run:
job = bite_result.GetRandomQueuedJob(run)
if job:
result = {'result': {'id': job.key().id(),
'testId': job.test_id,
'parent': str(job.parent().key())}}
self.response.out.write(
basic_util.DumpJsonStr(result))
class UpdateResultHandler(base.BaseHandler):
"""The handler for updating a result."""
def get(self):
self.post()
def post(self):
"""Updates the result."""
result = self.GetRequiredParameter('result')
status = self.GetOptionalParameter('status', 'undefined')
screenshot = self.GetOptionalParameter('screenshot', '')
log = self.GetOptionalParameter('log', '')
ip = self.request.remote_addr
logging.info(str(result))
result_obj = basic_util.ParseJsonStr(str(result))['result']
if result_obj.has_key('runKey'):
temp = bite_result.GetResult(
result_obj['runKey'], result_obj['testId'],result_obj['testName'])
# If multiple temp has multiple values, uses the first one.
result = temp.get()
if result:
result_obj['id'] = result.key().id()
result_obj['parent'] = str(result.parent().key())
result = bite_result.UpdateResult(
result_obj['id'], result_obj['parent'],
status, screenshot, log, '', ip)
self.response.out.write(
'Result has been successfully updated.' + result.test_id)
class ViewResultHandler(base.BaseHandler):
"""The handler for viewing a result."""
def get(self):
self.post()
def post(self):
"""Updates the result."""
resultKeyStr = self.GetRequiredParameter('resultKey')
result = bite_result.LoadResultByKeyStr(resultKeyStr)
params = {'screenshot': str(result.screenshot)}
self.response.out.write(basic_util.DumpJsonStr(params))
app = webapp2.WSGIApplication(
[('/result/fetch', FetchResultHandler),
('/result/update', UpdateResultHandler),
('/result/view', ViewResultHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Gets the bugs associated with a given URL.
Called by the clients to retrieve the list of bugs known for a given URL.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import sys
import webapp2
from google.appengine.api import users
from common.handlers import base
from models import bugs
from models import url_bug_map
from utils import url_util
class BugsForUrlHandler(base.BaseHandler):
"""Handles GET request to the '/get_bugs_for_url' URI.
Attributes:
_retrieve_method: Method used to retrieve the list of bugs for a given URL.
DEFAULT_MAX_RESULTS: Static int used to limit the amount of bugs retrieved
for a given URL if no max_results is specified in the request.
"""
DEFAULT_MAX_RESULTS = 1000
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def get(self):
"""Retrieves up to max_results bugs for the given target_url.
Attributes:
target_url: Required URL for which we want to retrieve bugs.
max_results: Optional int specifying the maximum results to retrieve.
status: Status of the bugs to retrieve.
Returns:
A list of JSON-encoded bugs.
"""
current_user = users.get_current_user()
user_email = None
if current_user:
user_email = current_user.email()
if users.is_current_user_admin():
# If current user is an admin allow the overriding of the user_email.
user_email = self.GetOptionalParameter('user', user_email)
target_url = self.GetRequiredParameter('target_url')
state = self.GetOptionalParameter('state', None)
status = self.GetOptionalParameter('status', None)
max_results = self.GetOptionalIntParameter(
'max_results', BugsForUrlHandler.DEFAULT_MAX_RESULTS)
# Retrieve the list of bugs.
bugs_list = url_bug_map.GetBugsForUrl(
target_url, user_email, max_results, state, status)
# JSON-encode the response and send it to the client.
result = bugs.JsonEncode(bugs_list)
self.response.headers['Content-Type'] = 'application/json'
self.response.out.write(result)
app = webapp2.WSGIApplication(
[('/get_bugs_for_url', BugsForUrlHandler)],
debug=True)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The deferred lib utils."""
__author__ = 'phu@google.com (Po Hu)'
import datetime
import logging
import math
import random
from google.appengine.ext import db
from google.appengine.ext import deferred
from models import bite_result
from models import bite_run
from models import bite_suite
from models import suite_test_map
from utils import basic_util
DEFAULT_PUT_DELETE_MAX = 500
DEFAULT_RESULTS_NUM_PER_TASK = 10000
DEFAULT_NO_DEFERRED_NUM = 2000
def DeleteResultsOfRun(run_key_str, max_num):
"""Deletes all the results of a given run."""
offset = 0
run_key = db.Key(run_key_str)
run_slice_list = bite_run.BiteRunSlice.all(keys_only=True).filter(
'run = ', run_key)
slice_num = run_slice_list.count(DEFAULT_NO_DEFERRED_NUM)
logging.info('There are %d slices to be deleted.' % slice_num)
if slice_num < DEFAULT_NO_DEFERRED_NUM:
is_deferred = False
slices_per_task = max_num / DEFAULT_PUT_DELETE_MAX
while True:
slices = run_slice_list.fetch(slices_per_task, offset=offset)
if not slices:
break
else:
offset += len(slices)
if is_deferred:
deferred.defer(DeleteResults, slices, _queue='delete-results')
else:
DeleteResults(slices)
def DeleteResults(run_slices):
if not run_slices:
return
for run_slice in run_slices:
db.run_in_transaction(RetryBatchOperation,
bite_result.GetResultsOfRunSlice(run_slice),
False)
db.delete(run_slice)
def RetryBatchOperation(temp_list, is_put):
"""Retries the batch operation until it succeeds or fails."""
counter = 0
while True:
try:
if is_put:
db.put(temp_list)
else:
db.delete(temp_list)
break
except Exception, err:
logging.warning('Batch operation failed because: ' + str(err))
if counter > 6:
raise
else:
counter += 1
def KickOffTests(test_info_list, run_slice_index, run_key):
"""Adds a list of result entities based on the given test list."""
temp_list = []
i = 0
logging.info('Adding a run slice number: ' + str(run_slice_index))
run_slice = bite_run.AddRunSlice(run_key, run_slice_index)
while test_info_list:
if len(temp_list) == len(test_info_list):
db.run_in_transaction(RetryBatchOperation, temp_list, True)
del test_info_list[:len(temp_list)]
run_slice.queued_number = len(temp_list)
run_slice.tests_number = len(temp_list)
run_slice.put()
break
test_info = test_info_list[i]
result = bite_result.BiteResult(parent=run_slice,
run=run_key,
test_id=test_info['id'],
status='queued',
random=random.random(),
automated=test_info['automated'],
test_name=test_info['name'])
temp_list.append(result)
if len(temp_list) == DEFAULT_PUT_DELETE_MAX:
db.run_in_transaction(RetryBatchOperation, temp_list, True)
temp_list = []
del test_info_list[:DEFAULT_PUT_DELETE_MAX]
run_slice.queued_number = DEFAULT_PUT_DELETE_MAX
run_slice.tests_number = DEFAULT_PUT_DELETE_MAX
run_slice.put()
run_slice_index += 1
if test_info_list:
logging.info('Add a new run slice. Num of the list is: %d',
len(test_info_list))
run_slice = bite_run.AddRunSlice(run_key, run_slice_index)
i = 0
else:
i += 1
def StartRun(suite_key, run_name,
test_info_list, tokens, labels,
dimensions, start_url,
run_template_key, user):
"""Starts the run with the given info."""
if not suite_key and not run_template_key:
return
start_time = datetime.datetime.now()
if not test_info_list:
test_info_list = GetAllTestInfo(str(suite_key), user)
if not tokens:
tokens = bite_suite.GetSuiteTokens(suite_key)
run = bite_run.AddRun(run_name, suite_key, start_time,
test_info_list, tokens, labels,
dimensions, start_url,
run_template_key)
is_deferred = True
if len(test_info_list) < DEFAULT_NO_DEFERRED_NUM:
is_deferred = False
StartTests(test_info_list, run.key(), is_deferred)
return run.key()
def GetSlicesNum(total_num):
"""Returns how many slices are needed for holding all the results."""
return int(math.ceil(float(total_num) / DEFAULT_PUT_DELETE_MAX))
def StartTests(test_info_list, run_key, is_deferred=True):
"""Kicks off the tests of the given suite."""
temp_list = []
run_index = 0
logging.info('Starts kicking off tests in tasks.')
while True:
if len(test_info_list) >= DEFAULT_RESULTS_NUM_PER_TASK:
temp_list = test_info_list[:DEFAULT_RESULTS_NUM_PER_TASK]
if is_deferred:
deferred.defer(KickOffTests, temp_list,
run_index, run_key, _queue='add-results')
else:
KickOffTests(temp_list, run_index, run_key)
del test_info_list[:DEFAULT_RESULTS_NUM_PER_TASK]
temp_list = []
run_index += GetSlicesNum(DEFAULT_RESULTS_NUM_PER_TASK)
else:
if test_info_list:
if is_deferred:
deferred.defer(KickOffTests, test_info_list,
run_index, run_key, _queue='add-results')
else:
KickOffTests(test_info_list, run_index, run_key)
break
def GetAllTestInfo(suite_key_str, user=None):
"""Gets all the tests for the suite."""
suite = bite_suite.BiteSuite.get(db.Key(suite_key_str))
# Assume the saved query overrides the stored tests.
return suite_test_map.GetAllTestInfoOfSuite(suite_key_str)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles storing and retrieving test cases.
Note, an oddity in this implementation is that we don't store
pure test code. Instead, we store json blobs which are defined
by the client. This is left over from the old WTF API, but it
would require a complete client rewrite to make it more
explicit.
"""
__author__ = 'michaelwill@google.com (Michael Williamson)'
import logging
import json
import webapp2
from common.handlers import base
from models import storage
from models import storage_project
from utils import zip_util
class Error(Exception):
pass
class DuplicatedNameError(Error):
"""Duplicated name."""
class AddPreexistingDocsMetadata(base.BaseHandler):
"""Legacy handler for adding metadata for tests already in docs.
This handler should only be used when there already exists
a test object stored in Google Docs, and the metadata on
the bite server needs to be synchronized.
"""
def post(self):
resource_id = self.GetRequiredParameter('resource_id')
resource_url = self.GetRequiredParameter('resource_url')
test_name = self.GetRequiredParameter('test_name')
project = self.GetRequiredParameter('project')
wtf_id = self.GetOptionalParameter('wtf_id', '')
preexisting_metadata = storage.FetchByDocsResourceId(resource_id)
if preexisting_metadata:
logging.info('Found a preexisting test with resource_id %s.'
' Not doing anything.',
resource_id)
return
storage.AddPreexisting(
project, test_name, resource_url, resource_id, wtf_id)
class GetTestAsJson(base.BaseHandler):
"""Returns one test object in a json bundle."""
def get(self):
test_id = self.GetRequiredParameter('id')
test_metadata = storage.FetchById(test_id)
loaded_times = storage.IncreaseAndGetLoadedTimes(test_id)
if not test_metadata:
logging.info('Test with id %s not found.', test_id)
self.error(400)
return
file_text = test_metadata.GetText()
json_obj = json.loads(file_text)
response = []
json_obj['projectname'] = test_metadata.project
# TODO(michaelwill): This should really only be returning one item.
# It returns a list because the WTF legacy API returned a list.
response.append({'id': test_id, 'json': json.dumps(json_obj)})
self.response.out.write(json.dumps(response))
class DeleteTest(base.BaseHandler):
"""Deletes a test object."""
def get(self):
self.post()
def post(self):
test_id_str = self.GetRequiredParameter('ids')
test_ids = json.loads(test_id_str)
instances = storage.FetchByIds(test_ids)
if instances:
storage.DeleteMetadata(instances)
storage.DeleteAllStepsByScriptIds(test_ids)
self.response.out.write('delete successfully.')
class UpdateTest(base.BaseHandler):
"""Updates a test object."""
def post(self):
test_id = self.GetRequiredParameter('id')
test = self.GetRequiredParameter('json')
project = self.GetRequiredParameter('project')
js_files = self.GetOptionalParameter('jsFiles')
common_methods_string = self.GetOptionalParameter('commonMethodsString')
if js_files:
js_files = json.loads(js_files)
common_methods = []
if common_methods_string:
common_methods = json.loads(common_methods_string)
test_metadata = storage.FetchById(test_id)
if not test_metadata:
self.error(400)
return
json_obj = json.loads(test)
new_test_name = json_obj['name']
storage_project.UpdateProject(project, {'js_files': js_files,
'common_methods': common_methods})
test_metadata.Update(project, new_test_name, test)
class SaveTest(base.BaseHandler):
"""Saves a new test object to the storage backend."""
def post(self):
json_str = self.GetRequiredParameter('json')
project = self.GetRequiredParameter('project')
js_files = self.GetOptionalParameter('jsFiles')
common_methods_string = self.GetOptionalParameter('commonMethodsString')
if js_files:
js_files = json.loads(js_files)
common_methods = []
if common_methods_string:
common_methods = json.loads(common_methods_string)
json_obj = json.loads(json_str)
new_test_name = json_obj['name']
exist_instance = storage.FetchByProjectAndTestName(project, new_test_name)
if exist_instance:
storage.DeleteMetadata([exist_instance])
storage.DeleteAllStepsByScriptIds([exist_instance.id])
storage_project.UpdateProject(project, {'js_files': js_files,
'common_methods': common_methods})
storage_instance = storage.Save(project, new_test_name, json_str)
# TODO(michaelwill): This weird id string is left over from the
# legacy WTF system. Change to a proper json response.
self.response.out.write('id=' + storage_instance.id)
class SaveZipFile(base.BaseHandler):
"""Convert an RPF project's exported tests into a zip file."""
def post(self):
"""Given a set of files as a json string and saves to db.
Raises:
TypeError: Unsupported key type; json.
OverflowError: Circular reference; json.
ValueError: Invalid value, out of range; json.
zip_util.BadInput: Raised for bad inputs supplied to zip_util functions.
"""
json_string = self.GetRequiredParameter('json')
logging.info(json_string)
key = storage.SaveZipData(json_string)
self.response.out.write(key)
class GetZipFile(base.BaseHandler):
"""Convert an RPF project's exported tests into a zip file."""
def get(self):
self.post()
def post(self):
"""Returns a zip file based on the given key string."""
key_string = self.GetRequiredParameter('key')
logging.info(key_string)
zip = storage.LoadZipByKeyStr(key_string)
title, contents = zip_util.JsonStringToZip(zip.json_str)
zip.delete()
disposition = 'attachment; filename="' + title + '"'
self.response.headers['Content-Type'] = 'application/zip'
self.response.headers['Content-Disposition'] = str(disposition)
self.response.out.write(contents)
class GetProject(base.BaseHandler):
"""Gets project data and returns it to the requester."""
def post(self):
"""Given project data, save it to db."""
name = self.GetRequiredParameter('name')
metadatas = storage.FetchByProject(name)
tests = []
for metadata in metadatas:
test_data = {
'test_name': metadata.test_name,
'test': metadata.test,
'id': metadata.id
}
tests.append(test_data)
project = storage_project.GetProjectObject(name)
if project is None:
# No project entry exists for the given name, but check if there are
# tests associated with the name.
if len(tests) > 0:
# There are tests so create a new project entry for the given name.
project = storage_project.GetOrInsertProject(name)
if project is None:
self.error(400)
return
data = {
'project_details': project,
'tests': tests
}
try:
self.response.out.write(json.dumps(data))
except (TypeError, OverflowError, ValueError):
self.error(400)
class SaveProject(base.BaseHandler):
"""Saves project data."""
def post(self):
"""Given project data, save it to db."""
name = self.GetRequiredParameter('name')
data_string = self.GetRequiredParameter('data')
try:
data = json.loads(data_string)
except (TypeError, OverflowError, ValueError):
# TODO(jasonstredwick): Change from error codes to an error response.
self.error(400)
return
project = storage_project.UpdateProject(name, data)
if project is None:
self.error(400)
else:
self.response.out.write('success')
class GetProjectNames(base.BaseHandler):
"""Gets the project names.
Returns:
Returns the project names.
"""
def get(self):
self.post()
def post(self):
"""Returns the project names."""
names = storage_project.GetProjectNames()
self.response.out.write(json.dumps(names))
class AddScreenshots(base.BaseHandler):
"""Adds the screenshots."""
def get(self):
self.post()
def post(self):
"""Adds the screenshots to a script."""
test_id = self.GetRequiredParameter('id')
steps = self.GetRequiredParameter('steps')
storage.DeleteAllSteps(test_id)
steps = json.loads(steps)
for index in steps:
storage.AddNewScriptStep(
test_id, steps[index]['index'], steps[index]['data'])
class GetScreenshots(base.BaseHandler):
"""Gets the screenshots of a given script."""
def get(self):
self.post()
def post(self):
"""Returns the screenshots of a script."""
test_id = self.GetRequiredParameter('id')
steps = storage.GetAllSteps(test_id)
rtn_obj = {}
for step in steps:
rtn_obj[step.step_index] = {}
rtn_obj[step.step_index]['index'] = step.step_index
rtn_obj[step.step_index]['id'] = step.script_id
rtn_obj[step.step_index]['data'] = step.image_url
self.response.out.write(json.dumps(rtn_obj))
app = webapp2.WSGIApplication(
[('/storage/add_test_metadata', AddPreexistingDocsMetadata),
('/storage/gettestasjson', GetTestAsJson),
('/storage/updatetest', UpdateTest),
('/storage/addtest', SaveTest),
('/storage/savezip', SaveZipFile),
('/storage/getzip', GetZipFile),
('/storage/deletetest', DeleteTest),
('/storage/getproject', GetProject),
('/storage/saveproject', SaveProject),
('/storage/getprojectnames', GetProjectNames),
('/storage/addscreenshots', AddScreenshots),
('/storage/getscreenshots', GetScreenshots)
])
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite run handler."""
__author__ = 'phu@google.com (Po Hu)'
#Import not at top
#pylint: disable-msg=C6204
#Statement before imports
#pylint: disable-msg=C6205
#Invalid method name
#pylint: disable-msg=C6409
#Catch Exception
#pylint: disable-msg=W0703
import datetime
import logging
import webapp2
from google.appengine.api import users
from common.handlers import base
from handlers import deferred_util
from models import bite_project
from models import bite_result
from models import bite_run
from models import bite_suite
from utils import basic_util
DEFAULT_RUNS_NUMBER_PER_PAGE = 50
class Error(Exception):
pass
class NotEnoughInfoError(Error):
"""Raised when there is not enough info passed."""
class ShowRunsOfSameSuiteHandler(base.BaseHandler):
"""The handler for showing the runs of the same suite."""
def get(self):
self.post()
def post(self):
"""Shows the requested runs."""
suite_key_str = self.GetOptionalParameter('suiteKey')
suite_name = self.GetOptionalParameter('suiteName')
project_name = self.GetOptionalParameter('projectName')
run_filter = self.GetOptionalParameter('filter', 'all')
if not suite_key_str:
suite_key_str = str(bite_suite.LoadSuite(suite_name, project_name).key())
runs = []
if (run_filter == 'running' or
run_filter == 'queued' or
run_filter == 'completed'):
runs = bite_run.GetRunsOfSuite(suite_key_str, True, status=run_filter,
max_num=DEFAULT_RUNS_NUMBER_PER_PAGE)
elif run_filter.startswith('day'):
days = int(run_filter[3:])
logging.info('Start from the past : ' + str(days))
runs = bite_run.GetRunsOfSuite(suite_key_str, True, past_days=days,
max_num=DEFAULT_RUNS_NUMBER_PER_PAGE)
elif run_filter == 'all':
runs = bite_run.GetRunsOfSuite(suite_key_str, True,
max_num=DEFAULT_RUNS_NUMBER_PER_PAGE)
self.response.out.write(basic_util.DumpJsonStr(
{'runs': bite_run.GetRunsData(runs)}))
class ShowRunsHandler(base.BaseHandler):
"""The handler for showing the Bite runs."""
def get(self):
self.post()
def setSelectedNav(self, selected_scope, name):
"""Sets the selected nav item."""
for i in range(len(selected_scope['filters'])):
if selected_scope['filters'][i]['name'] == name:
selected_scope['filters'][i]['selected'] = True
else:
selected_scope['filters'][i]['selected'] = False
def post(self):
"""Shows the Bite runs info."""
run_filter = self.GetOptionalParameter('filter', 'all')
project_name = self.GetOptionalParameter('projectName', '')
if run_filter == 'scheduled':
data = self._GetScheduledRunsData(project_name)
else:
runs, templates = bite_run.GetLatestRunsThroughTemplate(
run_filter, project_name)
data = bite_run.GetRunsData(runs)
data.extend(bite_run.GetEmptyTemplateData(templates))
self.response.out.write(
basic_util.DumpJsonStr({'details': data}))
def _GetScheduledRunsData(self, project_name):
"""Gets the scheduled runs data."""
jobs = bite_run.GetAllScheduledJobs(project_name)
runs_data = []
for job in jobs:
run_template = job.run_template
suite = run_template.suite
last_time = basic_util.CreateStartStr(job.watchdog_last_time)
interval = bite_suite.ParseWatchdogSetting(
run_template.watchdog_setting)
run_data = {
'id': str(job.key()),
'type': 'run',
'title': run_template.name,
'labels': [suite.parent().name].extend(run_template.filtered_labels),
'icon': '/images/run00-pie.png',
'props': [{'label': '# of tests', 'value': suite.tests_number},
{'label': 'last time', 'value': last_time},
{'label': 'interval', 'value': str(interval) + ' min'}]
}
runs_data.append(run_data)
return runs_data
class GetDetailsHandler(base.BaseHandler):
"""Gets the details info of a given run."""
def get(self):
self.post()
def _CreateDurationStr(self, time_delta):
hours, remainder = divmod(time_delta.seconds, 3600)
minutes = remainder / 60
return '%dd %dh %dm' % (time_delta.days, hours, minutes)
def AddRunSummary(self, params, passed_num, failed_num, run_start_time,
run_lead, run):
"""Adds the run summary info in the given dict."""
completed_num = passed_num + failed_num
uncompleted_num = run.tests_number - passed_num - failed_num
completed_str = '%s (%d of %d)' % (
basic_util.GetPercentStr(completed_num, run.tests_number),
completed_num, run.tests_number)
passed_str = '%s (%d of %d)' % (
basic_util.GetPercentStr(passed_num, completed_num),
passed_num, completed_num)
failed_str = '%s (%d of %d)' % (
basic_util.GetPercentStr(failed_num, completed_num),
failed_num, completed_num)
uncompleted_str = '%s (%d of %d)' % (
basic_util.GetPercentStr(uncompleted_num, run.tests_number),
uncompleted_num, run.tests_number)
start_time_pst = basic_util.ConvertFromUtcToPst(run_start_time)
elapsed_time = datetime.datetime.now() - run_start_time
elapsed_time_str = self._CreateDurationStr(elapsed_time)
start_time_str = basic_util.CreateStartStr(start_time_pst)
params['completed_str'] = completed_str
params['passed_str'] = passed_str
params['failed_str'] = failed_str
params['uncompleted_str'] = uncompleted_str
params['elapsed_time_str'] = elapsed_time_str
params['start_time_str'] = start_time_str
params['run_lead'] = run_lead
params['passed_num'] = passed_num
params['failed_num'] = failed_num
params['uncompleted_num'] = uncompleted_num
def AddRunDetails(self, results):
"""Creates the details info object."""
details = []
for result in results:
finish_time_str = ''
if result.finished_time:
finish_time_pst = basic_util.ConvertFromUtcToPst(result.finished_time)
finish_time_str = basic_util.CreateStartStr(finish_time_pst)
details.append({'id': result.test_id,
'resultKey': str(result.key()),
'log': str(result.log),
'name': result.test_name,
'status': result.status,
'when': finish_time_str,
'tester': result.executor_ip,
'labels': result.labels})
return details
def post(self):
"""Starts a run and kicks off the tests."""
run_key_str = self.GetRequiredParameter('runKey')
run = bite_run.GetModel(run_key_str)
results_num = self.GetOptionalIntParameter('resultsNum', 5)
completed_numbers = bite_run.GetTestsNumberOfStatus(run_key_str)
passed_num = completed_numbers['passed']
failed_num = completed_numbers['failed']
run_start_time = run.start_time
run_lead = ''
if run.created_by:
run_lead = run.created_by.email()
results = bite_result.GetResultsOfRun(run_key_str, results_num)
details = {}
self.AddRunSummary(details, passed_num, failed_num,
run_start_time, run_lead, run)
details['results'] = self.AddRunDetails(results)
self.response.out.write(
basic_util.DumpJsonStr({'details': details}))
class LoadResultsSummaryHandler(GetDetailsHandler):
"""Loads the results summary handler."""
def get(self):
self.post()
def post(self):
"""Returns to client the run's results summary."""
run_key_str = self.GetRequiredParameter('runKey')
run = bite_run.GetModel(run_key_str)
completed_numbers = bite_run.GetTestsNumberOfStatus(run_key_str)
passed_num = completed_numbers['passed']
failed_num = completed_numbers['failed']
run_start_time = run.start_time
run_lead = ''
if run.created_by:
run_lead = run.created_by.email()
data = {}
self.AddRunSummary(data, passed_num, failed_num,
run_start_time, run_lead, run)
details = {
'startTimeStr': data['start_time_str'],
'elapsedTimeStr': data['elapsed_time_str'],
'numOfTesters': '',
'resultsLabels': run.labels,
'passedNum': data['passed_num'],
'failedNum': data['failed_num'],
'uncompletedNum': data['uncompleted_num'],
'summaryRows': [
{'type': 'All',
'pass': data['passed_str'],
'fail': data['failed_str'],
'notRun': data['uncompleted_str'],
'total': run.tests_number}
],
}
self.response.out.write(
basic_util.DumpJsonStr({'data': details}))
class LoadResultsDetailsHandler(GetDetailsHandler):
"""Loads the results summary handler."""
def get(self):
self.post()
def post(self):
"""Returns the run's detailed results."""
run_key_str = self.GetRequiredParameter('runKey')
results = bite_result.GetResultsOfRun(run_key_str, 9999)
data = {}
data['results'] = self.AddRunDetails(results)
details = {
'numOfTests': len(data['results']),
'resultRows': data['results']
}
self.response.out.write(
basic_util.DumpJsonStr({'data': details}))
class LoadRunTemplateHandler(base.BaseHandler):
"""Loads a run template."""
def get(self):
self.post()
def post(self):
"""Loads a run template."""
run_key_str = self.GetOptionalParameter('runKey', '')
run_template_key_str = self.GetOptionalParameter('runTemplateKey', '')
if run_key_str:
run = bite_run.GetModel(run_key_str)
run_template = run.run_template
else:
run_template = bite_run.GetTemplateEntity(run_template_key_str)
if run_template:
self.response.out.write(
basic_util.DumpJsonStr(
{'suiteKey': str(run_template.suite.key()),
'runKey': run_key_str,
'runTemplateKey': str(run_template.key()),
'runName': run_template.name,
'runDesc': run_template.description,
'filteredLabels': run_template.filtered_labels,
'dimensionLabels': run_template.test_dimension_labels,
'runTokens': run_template.tokens,
'runRecurMethod': not run_template.run_once,
'interval': bite_suite.ParseWatchdogSetting(
run_template.watchdog_setting),
'runStartUrl': run_template.start_url}))
else:
error_log = 'No specified run template is available.'
logging.error(error_log)
self.response.out.write(error_log)
class AddRunTemplateHandler(base.BaseHandler):
"""Adds a run template."""
def get(self):
self.post()
def post(self):
"""Adds a run template."""
suite_key_str = self.GetOptionalParameter('suiteKey', '')
run_template_key_str = self.GetOptionalParameter('runTemplateKey', '')
run_name = self.GetRequiredParameter('runName')
run_description = self.GetOptionalParameter('runDesc', '')
run_filtered_labels = self.GetOptionalParameter('filteredLabels', '')
run_dimension_labels = self.GetOptionalParameter('dimensionLabels', '')
run_tokens = self.GetOptionalParameter('runTokens', '')
run_recurring = self.GetOptionalParameter('runRecurMethod', True)
run_start_url = self.GetOptionalParameter('runStartUrl', '')
watchdog_setting_str = self.GetOptionalParameter('watchdogSetting', '')
interval = self.GetOptionalParameter('interval', '')
if interval:
watchdog_setting_str = bite_suite.GetSuiteWatchdogStr(
{}, int(interval))
if not suite_key_str:
logging.info('run tmeplate key str:' + run_template_key_str)
suite_key_str = str(
bite_run.GetTemplateEntity(run_template_key_str).suite.key())
if run_filtered_labels:
run_filtered_labels = basic_util.ParseJsonStr(run_filtered_labels)
if run_dimension_labels:
run_dimension_labels = basic_util.ParseJsonStr(run_dimension_labels)
if (not run_template_key_str or
not bite_run.CheckRunTemplateExists(run_template_key_str)):
run_template = bite_run.AddRunTemplate(
run_name, suite_key_str, run_description, run_recurring,
watchdog_setting_str, run_filtered_labels,
run_dimension_labels, run_tokens,
run_start_url)
else:
run_template = bite_run.UpdateRunTemplate(
run_name, suite_key_str, run_template_key_str,
run_description, run_recurring,
watchdog_setting_str, run_filtered_labels,
run_dimension_labels, run_tokens,
run_start_url)
if watchdog_setting_str:
bite_run.AddScheduledJob(str(run_template.key()), int(interval))
self.response.out.write(str(run_template.key()))
class CheckScheduledJobs(base.BaseHandler):
"""Checks whether to run the scheduled jobs."""
def get(self):
self.post()
def _checkHitInterval(self, last_time, interval, test):
elapsed_time = datetime.datetime.now() - last_time
elapsed_minutes = elapsed_time.days * 24 * 60 + elapsed_time.seconds / 60
logging.info(
'Since last run, it has passed ' + str(elapsed_minutes) + ' mins')
if test:
return True
else:
return elapsed_minutes > interval
def CheckScheduledJob(self, test):
"""Checks whether to launch the scheduled jobs."""
jobs = bite_run.GetAllScheduledJobs()
updated_jobs = []
for job in jobs:
interval = bite_suite.ParseWatchdogSetting(
job.run_template.watchdog_setting)
if (interval and self._checkHitInterval(job.watchdog_last_time,
interval, test)):
self.kickOffScheduledJob(job.run_template)
job.watchdog_last_time = datetime.datetime.now()
updated_jobs.append(job)
if updated_jobs:
bite_run.UpdateScheduledJobs(updated_jobs)
def kickOffScheduledJob(self, run_template):
deferred_util.StartRun(
str(run_template.suite.key()),
run_template.name, [],
run_template.tokens,
run_template.filtered_labels,
run_template.test_dimension_labels,
run_template.start_url,
str(run_template.key()),
run_template.created_by)
def post(self):
"""Checks whether to run a suite."""
test = self.GetOptionalParameter('test', '')
self.CheckScheduledJob(test)
self.response.out.write('done')
class AddRealTimeRunHandler(base.BaseHandler):
"""The handler for adding a realtime run."""
def get(self):
self.post()
def post(self):
"""Starts a run and kicks off the tests."""
user = users.get_current_user()
run_name = self.GetRequiredParameter('runName')
test_info_list_str = self.GetRequiredParameter('testInfoList')
project_name = self.GetOptionalParameter('projectName', 'default')
suite_name = self.GetOptionalParameter('suiteName', 'default')
user_agent = self.GetOptionalParameter('userAgent', '')
test_info_list = basic_util.ParseJsonStr(test_info_list_str)
# Creates the test info list based on the given tests info.
test_list = []
for test in test_info_list:
test_list.append({'id': str(test['id']),
'title': test['name'],
'name': test['name'],
'automated': True,
'author': '',
'labels': [run_name]})
# Creates the suite if the required suite doesn't exist.
try:
project = bite_project.GetProject(project_name)
except bite_project.NoProjectFoundError:
project = bite_project.AddProject(project_name, '')
# Creates the suite if the required suite doesn't exist.
if not bite_suite.CheckSuiteExists(suite_name, project_name):
suite = bite_suite.AddSuite(suite_name, project_name)
else:
suite = bite_suite.LoadSuite(suite_name, project_name)
# Creates the run.
run_key = deferred_util.StartRun(suite.key(), run_name, test_list, '',
[suite_name, user_agent], [], None, '',
user)
self.response.out.write(run_key)
class AddRunHandler(base.BaseHandler):
"""The handler for adding a Bite run."""
def get(self):
self.post()
def post(self):
"""Starts a run, kicks off the tests and returns the run key."""
user = users.get_current_user()
suite_key = self.GetOptionalParameter('suiteKey', '')
run_template_key = self.GetOptionalParameter('runTemplateKey', '')
run_key = self.GetOptionalParameter('runKey', '')
run_name = self.GetOptionalParameter('runName', '')
test_info_list = self.GetOptionalParameter('testInfoList', [])
tokens = self.GetOptionalParameter('tokens', '')
labels = self.GetOptionalParameter('labels', [])
dimensions = self.GetOptionalParameter('dimensions', [])
start_url = self.GetOptionalParameter('startUrl', '')
if not suite_key and not run_template_key and not run_key:
raise NotEnoughInfoError('Not enough info to start the run.')
run_template = {}
if run_key:
run_entity = bite_run.GetModel(run_key)
if run_entity.run_template:
run_template = run_entity.run_template
run_template_key = str(run_template.key())
if run_template_key:
if not run_template:
run_template = bite_run.GetTemplateEntity(run_template_key)
suite_key = str(run_template.suite.key())
run_name = run_template.name
tokens = run_template.tokens
labels = run_template.filtered_labels
dimensions = run_template.test_dimension_labels
start_url = run_template.start_url
deferred_util.StartRun(
suite_key, run_name, test_info_list, tokens, labels,
dimensions, start_url, run_template_key, user)
self.response.out.write('done')
class DeleteRunHandler(base.BaseHandler):
"""The handler for deleting a Bite run."""
def get(self):
self.post()
def post(self):
run_key = self.GetRequiredParameter('runKey')
deferred_util.DeleteResultsOfRun(
run_key,
deferred_util.DEFAULT_RESULTS_NUM_PER_TASK)
bite_run.DeleteRun(run_key)
self.response.out.write('done deleting!')
class GetRunsOfSuiteHandler(base.BaseHandler):
"""The handler for getting Bite runs based on suites."""
def get(self):
self.post()
def post(self):
suite_key_str = self.GetRequiredParameter('suiteKey')
runs = bite_run.GetRunsOfSuite(suite_key_str)
runs_info = [
{'name': run.name, 'key': str(run.key())}
for run in runs]
self.response.out.write(
basic_util.DumpJsonStr({'runs': runs_info}))
class GetNumOfStatus(base.BaseHandler):
"""Gets number of tests in a specified status."""
def get(self):
self.post()
def post(self):
status = self.GetRequiredParameter('status')
run_key_str = self.GetRequiredParameter('runKey')
number = bite_run.GetTestsNumberOfStatus(run_key_str, status)
self.response.out.write(str(number))
app = webapp2.WSGIApplication(
[('/run/add', AddRunHandler),
('/run/delete', DeleteRunHandler),
('/run/get_runs', GetRunsOfSuiteHandler),
('/run/get_num', GetNumOfStatus),
('/run/show_all', ShowRunsHandler),
('/run/same_suite', ShowRunsOfSameSuiteHandler),
('/run/get_details', GetDetailsHandler),
('/run/load_results_summary', LoadResultsSummaryHandler),
('/run/load_results_details', LoadResultsDetailsHandler),
('/run/check_scheduled_jobs', CheckScheduledJobs),
('/run/add_template', AddRunTemplateHandler),
('/run/load_template', LoadRunTemplateHandler),
('/run/add_realtime_run', AddRealTimeRunHandler)],
debug=True)
| Python |
#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides an interface to Google docs.
The DocsGateway provides an interface to the Google Docs gdata service
using version 3 of the API. It uses either AuthSub or ClientLogin for
authentication.
For detailed documentation, see
http://code.google.com/apis/documents/docs/3.0/developers_guide_python.html
NOTE: An annoyance of this interface is that we use the resource_id for
some of the functions and the resource_url for others. This is an
unfortunate consequence of the current gdata docs client which uses
the two interchangeably, and there is not an easy way to generate
one from the other.
"""
__author__ = 'michaelwill@google.com (Michael Williamson)'
# Disable 'Import not at top of file' lint error.
# pylint: disable-msg=C6204
try:
import auto_import_fixer
except ImportError:
pass # This will fail on unittest, ok to pass.
import StringIO
import urllib
# For these imports to work on AppEngine, each has to be imported
# individually...e.g. below you need both gdata and gdata.docs.
# Be careful when changing.
import gdata
import gdata.docs
import gdata.docs.client
import gdata.docs.data
import gdata.gauth
from google.appengine.api import users
def GatewayForAppEngine(email):
"""Creates a DocsGateway to access the user's google docs.
NOTE: We assume here that the user provided has already
granted access to Google Docs and that they have a gdata
access token already stored in the datastore using
gdata.gauth.AeSave(email).
Args:
email: The user's email string.
Returns:
An instantiated DocsGateway object.
Raises:
Exception: If the user provided does not have a gdata
access token stored in the datastore.
"""
gdata_token = gdata.gauth.AeLoad(email)
if not gdata_token:
raise Exception('Error no gdata token for the user')
client = gdata.docs.client.DocsClient(
source='Bite',
auth_token=gdata_token)
return DocsGateway(client)
def GatewayForAppEngineForCurrentUser():
"""Creates a DocsGateway to access the user's google docs.
NOTE: See GatewayForAppEngine() for assumptions we make.
Returns:
An instantiated DocsGateway object.
Raises:
Exception: If there is no current user or the current user
does not have a stored gdata access token.
"""
user = users.get_current_user()
if not user:
raise Exception('Login required!')
return GatewayForAppEngine(user.email())
def GatewayForClientLogin(username, password):
"""Creates a DocsGateway to access the user's google docs.
Logs in via ClientLogin and NOT via AuthSub.
Args:
username: The username to use to authenticate.
password: The password for the given username.
Returns:
An instantiated DocsGateway object.
"""
client = gdata.docs.client.DocsClient(source='Bite')
client.ClientLogin(username, password, client.source)
return DocsGateway(client)
class EasyDocsEntry(object):
"""Provides an abstraction layer between users and the Google Docs api."""
def __init__(self, docs_entry):
"""Constructs a new EasyDocsEntry."""
self.title = docs_entry.title.text
self.src_url = docs_entry.content.src
self.resource_id = docs_entry.resource_id.text
self.docs_entry = docs_entry
def GetDocumentType(self):
"""Returns the document type of this entry.
Possible values are document, presentation, spreadsheet, folder, pdf, etc.
Returns:
The document type in string format.
"""
return self.docs_entry.get_document_type()
def IsFolder(self):
"""Returns true if this docs entry represents a folder."""
if self.docs_entry.get_document_type() == gdata.docs.data.COLLECTION_LABEL:
return True
return False
def __repr__(self):
return ('(' + self.title + ', ' + self.src_url + ', ' +
self.resource_id + ')')
class DocsGateway(object):
"""Provides an interface to interact with Google Docs."""
def __init__(self, client):
"""Constructs a new DocsGateway object.
Args:
client: An instantiated gdata.docs.client.DocsClient() to
use to access docs.
"""
self.client = client
def GetRootFolders(self):
"""Returns the root folders in the user's docs account.
Returns:
A list of EasyDocsEntry objects, one for each root folder.
"""
return self.GetFolderContents('/feeds/default/private/full/-/folder')
def GetAllDocs(self):
returned_entries = []
entries = self.client.GetEverything()
for entry in entries:
returned_entries.append(EasyDocsEntry(entry))
return returned_entries
def GetFolderContents(self, folder_src_url):
"""Returns all the doc entries in the given folder including subfolders.
Args:
folder_src_url: The source url for the folder entry.
Returns:
A list of EasyDocsEntry objects, one for each file in the folder.
"""
feed = self.client.GetAllResources(uri=folder_src_url)
folder_entries = []
for entry in feed:
folder_entries.append(EasyDocsEntry(entry))
return folder_entries
def GetSubFolders(self, folder_src_url):
"""Returns only the folders beneath a given parent folder.
Args:
folder_src_url: The source url for the parent folder.
Returns:
A list of EasyDocsEntry objects, one for each subfolder.
"""
entries = self.GetFolderContents(folder_src_url)
folders = []
for entry in entries:
if entry.IsFolder():
folders.append(entry)
return folders
def CreateRootFolder(self, folder_name):
"""Creates a folder in the user's docs account at the root level.
Args:
folder_name: What the folder will be called. Spaces allowed.
Returns:
An EasyDocsEntry describing the new folder.
"""
resource = gdata.docs.data.Resource(type=gdata.docs.data.COLLECTION_LABEL,
title=folder_name)
return EasyDocsEntry(self.client.CreateResource(resource))
def CreateSubFolder(self, folder_name, parent_folder_resource_id):
"""Creates a folder in the user's docs account within the specified folder.
Args:
folder_name: What the folder will be called. Spaces allowed.
parent_folder_resource_id: (string) The resource id of the parent folder.
Returns:
An EasyDocsEntry describing the new folder.
"""
resource = gdata.docs.data.Resource(type=gdata.docs.data.COLLECTION_LABEL,
title=folder_name)
parent = self.client.GetResourceById(parent_folder_resource_id)
return EasyDocsEntry(
self.client.CreateResource(resource, collection=parent))
def CreateNewDoc(self, title, contents=None):
"""Creates a new doc in the user's root document list."""
return self.CreateNewDocInFolder(title=title, contents=contents)
def CreateNewDocInFolder(self, title, parent_folder_src_url=None,
contents=None, content_type='text/plain'):
"""Creates a new document inside of the specified folder.
All new documents are saved as plain text.
Args:
title: What the new doc will be called.
parent_folder_src_url: The docs url of the parent folder.
contents: An optional argument used to set the initial contents.
content_type: An optional argument used to set the initial content type.
Returns:
An EasyDocEntry object describing the new doc.
"""
if not contents:
contents = ''
resource = gdata.docs.data.Resource(title=title)
parent = None
if parent_folder_src_url:
parent = self.client.GetResourceBySelfLink(parent_folder_src_url)
media_source = gdata.data.MediaSource(
file_handle=StringIO.StringIO(contents),
content_length=len(contents),
content_type=content_type)
return EasyDocsEntry(self.client.CreateResource(resource, media_source,
collection=parent))
def GetFileContentAsText(self, src_url):
"""Returns a doc's content as text.
Args:
src_url: The DocsEntry resource url.
Returns:
The document text in string format.
"""
src_url += '&' + urllib.urlencode({'exportFormat': 'txt'})
contents = self.client._get_content(src_url)
# Get rid of the byte order mark (BOM):
# ef bb bf
# that Google Docs inserts at the beginning of files.
return contents[3:]
def UpdateDocument(self, resource_id, new_title=None, new_contents=None,
content_type='text/plain'):
"""Replaces the title and/or text of a document with those passed in.
Args:
resource_id: The resource id of the docs entry.
new_title: An optional new title string, if the title is to be changed.
new_contents: An optional new content string.
content_type: An optional string describing the content type of the
new contents.
"""
if not new_title and not new_contents:
return
doc_entry = self.client.GetResourceById(resource_id)
if new_title:
doc_entry.title.text = new_title
media_source = None
if new_contents:
media_source = gdata.data.MediaSource(
file_handle=StringIO.StringIO(new_contents),
content_length=len(new_contents),
content_type=content_type)
self.client.UpdateResource(doc_entry, media=media_source)
def Delete(self, resource_id):
"""Deletes a document."""
doc_entry = self.client.GetResourceById(resource_id)
self.client.DeleteResource(doc_entry)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides an interface to Google docs.
The DocsGateway provides an interface to the Google Docs gdata service
using version 3 of the API. It uses either AuthSub or ClientLogin for
authentication.
For detailed documentation, see
http://code.google.com/apis/documents/docs/3.0/developers_guide_python.html
NOTE: An annoyance of this interface is that we use the resource_id for
some of the functions and the resource_url for others. This is an
unfortunate consequence of the current gdata docs client which uses
the two interchangeably, and there is not an easy way to generate
one from the other.
"""
__author__ = 'michaelwill@google.com (Michael Williamson)'
# Disable 'Import not at top of file' lint error.
# pylint: disable-msg=C6204
try:
import auto_import_fixer
except ImportError:
pass # This will fail on unittest, ok to pass.
import StringIO
import urllib
# For these imports to work on AppEngine, each has to be imported
# individually...e.g. below you need both gdata and gdata.docs.
# Be careful when changing.
import gdata
import gdata.docs
import gdata.docs.client
import gdata.docs.data
import gdata.gauth
from google.appengine.api import users
def GatewayForAppEngine(email):
"""Creates a DocsGateway to access the user's google docs.
NOTE: We assume here that the user provided has already
granted access to Google Docs and that they have a gdata
access token already stored in the datastore using
gdata.gauth.AeSave(email).
Args:
email: The user's email string.
Returns:
An instantiated DocsGateway object.
Raises:
Exception: If the user provided does not have a gdata
access token stored in the datastore.
"""
gdata_token = gdata.gauth.AeLoad(email)
if not gdata_token:
raise Exception('Error no gdata token for the user')
client = gdata.docs.client.DocsClient(
source='Bite',
auth_token=gdata_token)
return DocsGateway(client)
def GatewayForAppEngineForCurrentUser():
"""Creates a DocsGateway to access the user's google docs.
NOTE: See GatewayForAppEngine() for assumptions we make.
Returns:
An instantiated DocsGateway object.
Raises:
Exception: If there is no current user or the current user
does not have a stored gdata access token.
"""
user = users.get_current_user()
if not user:
raise Exception('Login required!')
return GatewayForAppEngine(user.email())
def GatewayForClientLogin(username, password):
"""Creates a DocsGateway to access the user's google docs.
Logs in via ClientLogin and NOT via AuthSub.
Args:
username: The username to use to authenticate.
password: The password for the given username.
Returns:
An instantiated DocsGateway object.
"""
client = gdata.docs.client.DocsClient(source='Bite')
client.ClientLogin(username, password, client.source)
return DocsGateway(client)
class EasyDocsEntry(object):
"""Provides an abstraction layer between users and the Google Docs api."""
def __init__(self, docs_entry):
"""Constructs a new EasyDocsEntry."""
self.title = docs_entry.title.text
self.src_url = docs_entry.content.src
self.resource_id = docs_entry.resource_id.text
self.docs_entry = docs_entry
def GetDocumentType(self):
return self.docs_entry.get_document_type()
def IsFolder(self):
"""Returns true if this docs entry represents a folder."""
if self.docs_entry.get_document_type() == gdata.docs.data.COLLECTION_LABEL:
return True
return False
def __repr__(self):
return ('(' + self.title + ', ' + self.src_url + ', ' +
self.resource_id + ')')
class DocsGateway(object):
"""Provides an interface to interact with Google Docs."""
def __init__(self, client):
"""Constructs a new DocsGateway object.
Args:
client: An instantiated gdata.docs.client.DocsClient() to
use to access docs.
"""
self.client = client
def GetRootFolders(self):
"""Returns the root folders in the user's docs account.
Returns:
A list of EasyDocsEntry objects, one for each root folder.
"""
return self.GetFolderContents('/feeds/default/private/full/-/folder')
def GetAllDocs(self):
returned_entries = []
entries = self.client.GetEverything()
for entry in entries:
returned_entries.append(EasyDocsEntry(entry))
return returned_entries
def GetFolderContents(self, folder_src_url):
"""Returns all the doc entries in the given folder including subfolders.
Args:
folder_src_url: The source url for the folder entry.
Returns:
A list of EasyDocsEntry objects, one for each file in the folder.
"""
feed = self.client.GetAllResources(uri=folder_src_url)
folder_entries = []
for entry in feed:
folder_entries.append(EasyDocsEntry(entry))
return folder_entries
def GetSubFolders(self, folder_src_url):
"""Returns only the folders beneath a given parent folder.
Args:
folder_src_url: The source url for the parent folder.
Returns:
A list of EasyDocsEntry objects, one for each subfolder.
"""
entries = self.GetFolderContents(folder_src_url)
folders = []
for entry in entries:
if entry.IsFolder():
folders.append(entry)
return folders
def CreateRootFolder(self, folder_name):
"""Creates a folder in the user's docs account at the root level.
Args:
folder_name: What the folder will be called. Spaces allowed.
Returns:
An EasyDocsEntry describing the new folder.
"""
resource = gdata.docs.data.Resource(type=gdata.docs.data.COLLECTION_LABEL,
title=folder_name)
return EasyDocsEntry(self.client.CreateResource(resource))
def CreateSubFolder(self, folder_name, parent_folder_resource_id):
"""Creates a folder in the user's docs account within the specified folder.
Args:
folder_name: What the folder will be called. Spaces allowed.
parent_folder_resource_id: (string) The resource id of the parent folder.
Returns:
An EasyDocsEntry describing the new folder.
"""
resource = gdata.docs.data.Resource(type=gdata.docs.data.COLLECTION_LABEL,
title=folder_name)
parent = self.client.GetResourceById(parent_folder_resource_id)
return EasyDocsEntry(
self.client.CreateResource(resource, collection=parent))
def CreateNewDoc(self, title, contents=None):
"""Creates a new doc in the user's root document list."""
return self.CreateNewDocInFolder(title=title, contents=contents)
def CreateNewDocInFolder(self, title, parent_folder_src_url=None,
contents=None, content_type='text/plain'):
"""Creates a new document inside of the specified folder.
All new documents are saved as plain text.
Args:
title: What the new doc will be called.
parent_folder_src_url: The docs url of the parent folder.
contents: An optional argument used to set the initial contents.
content_type: An optional argument used to set the initial content type.
Returns:
An EasyDocEntry object describing the new doc.
"""
if not contents:
contents = ''
resource = gdata.docs.data.Resource(title=title)
parent = None
if parent_folder_src_url:
parent = self.client.GetResourceBySelfLink(parent_folder_src_url)
media_source = gdata.data.MediaSource(
file_handle=StringIO.StringIO(contents),
content_length=len(contents),
content_type=content_type)
return EasyDocsEntry(self.client.CreateResource(resource, media_source,
collection=parent))
def GetFileContentAsText(self, src_url):
"""Returns a doc's content as text.
Args:
src_url: The DocsEntry resource url.
Returns:
The document text in string format.
"""
src_url += '&' + urllib.urlencode({'exportFormat': 'txt'})
contents = self.client._get_content(src_url)
# Get rid of the byte order mark (BOM):
# ef bb bf
# that Google Docs inserts at the beginning of files.
return contents[3:]
def UpdateDocument(self, resource_id, new_title=None, new_contents=None,
content_type='text/plain'):
"""Replaces the title and/or text of a document with those passed in.
Args:
resource_id: The resource id of the docs entry.
new_title: An optional new title string, if the title is to be changed.
new_contents: An optional new content string.
content_type: An optional string describing the content type of the
new contents.
"""
if not new_title and not new_contents:
return
doc_entry = self.client.GetResourceById(resource_id)
if new_title:
doc_entry.title.text = new_title
media_source = None
if new_contents:
media_source = gdata.data.MediaSource(
file_handle=StringIO.StringIO(new_contents),
content_length=len(new_contents),
content_type=content_type)
self.client.UpdateResource(doc_entry, media=media_source)
def Delete(self, resource_id):
"""Deletes a document."""
doc_entry = self.client.GetResourceById(resource_id)
self.client.DeleteResource(doc_entry)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""RunTesterMap model and related logic.
RunTesterMap stores the relationship between a CompatRun
and a User. It tracks the runs a user is subscribed to.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.api import memcache
from google.appengine.ext import db
from models.compat import run as compat_run
class RunTesterMap(db.Model):
"""Tracks the relationship between a CompatRun and a User."""
run = db.ReferenceProperty(compat_run.CompatRun)
user = db.UserProperty(required=True)
def GetMappingKeyName(run, user):
"""Returns a str used to uniquely identify a mapping."""
return 'RunTesterMap_%s_%s' % (run.key().name(), str(user.user_id()))
def GetMappingKey(run, user):
"""Returns the unique db.Key object for the given a run and user."""
return db.Key.from_path('RunTesterMap', GetMappingKeyName(run, user))
def AddMapping(run, user):
"""Adds a new mapping between a given run and a user."""
def _Txn():
mapping = RunTesterMap(key_name=GetMappingKeyName(run, user),
user=user, run=run)
mapping.put()
# Update memcache mappings for user.
memcache.delete(GetMappingsForTesterKeyName(user))
return mapping
return db.run_in_transaction(_Txn)
def RemoveMapping(run, user):
"""Removes given mapping between run and user."""
def _Txn():
db.delete(GetMappingKey(run, user))
# Invalidate memcache mappings for user.
memcache.delete(GetMappingsForTesterKeyName(user))
db.run_in_transaction(_Txn)
def GetMappingsForTesterKeyName(user):
"""Returns a str used to uniquely identify mappings for a given user.."""
return 'RunTesterMap_Tester_%s' % str(user.user_id())
def _PrefetchRefprops(entities, *props):
"""Prefetches reference properties on the given list of entities."""
fields = [(entity, prop) for entity in entities for prop in props]
ref_keys = [prop.get_value_for_datastore(x) for x, prop in fields]
ref_entities = dict((x.key(), x) for x in db.get(set(ref_keys)))
for (entity, prop), ref_key in zip(fields, ref_keys):
prop.__set__(entity, ref_entities[ref_key])
return entities
def GetMappingsForTester(user, prefetch_ref_properties=True):
"""Returns a list of mappings associated with the given user.."""
cache_key = GetMappingsForTesterKeyName(user)
mappings = None #memcache.get(cache_key)
if mappings is None:
runs = compat_run.GetRuns()
keys = [GetMappingKey(run, user) for run in runs]
mappings = RunTesterMap.get(keys)
if mappings:
# Remove keys not found, eg. [None, None, None] -> []
mappings = filter(lambda item: item is not None, mappings)
memcache.set(cache_key, mappings)
if prefetch_ref_properties:
return _PrefetchRefprops(mappings, RunTesterMap.run)
else:
return mappings
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests model and associated logic."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import sha
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import deferred
from models.compat import browser as compat_browser
from models.compat import result as compat_result
from models.compat import run as compat_run
from models.compat import run_site_map
class Test(db.Model):
"""Tracks assignments of sites to users."""
created = db.DateTimeProperty(required=False, auto_now_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
start_url = db.LinkProperty(required=False)
steps = db.TextProperty(required=False)
run = db.ReferenceProperty(
required=True,
reference_class=compat_run.CompatRun,
collection_name='tests')
browser_version = db.ReferenceProperty(
required=True,
reference_class=compat_browser.BrowserVersion,
collection_name='tests')
mapping = db.ReferenceProperty(
required=False,
reference_class=run_site_map.RunSiteMap,
collection_name='tests')
result = db.ReferenceProperty(
required=False,
reference_class=compat_result.Result,
collection_name='test')
assignment = db.ReferenceProperty(required=False)
def GetTestKeyName(start_url, steps, browser_version,
run, mapping):
"""Returns a str used to uniquely identify a test.
Args:
start_url: Start url (if any) this test needs to start at.
steps: Verification steps that need to be performed on a given site.
browser_version: BrowserVersion object this test is associated with.
run: Run the test is associated with.
mapping: RunSiteMap object this test is associated with.
Returns:
A str that can be used to uniquely identify a given tester.
"""
mapping_id = ''
if mapping:
mapping_id = str(mapping.key().id_or_name())
# TODO(alexto): Use dictionary substitution.
return ('Test_%s' %
sha.sha('%s_%s_%s_%s_%s' %
(start_url,
steps,
str(browser_version.key().id_or_name()),
str(run.key().id_or_name()),
mapping_id)).hexdigest())
def AddTest(start_url, steps, browser_version, run,
mapping=None):
"""Returns a str used to uniquely identify a test.
Args:
start_url: Start url (if any) this test needs to start at.
steps: Verification steps that need to be performed on a given site.
browser_version: BrowserVersion object this test is associated with.
run: Run the test is associated with.
mapping: RunSiteMap object this test is associated with.
Returns:
A Test object.
"""
key_name = GetTestKeyName(start_url=start_url,
steps=steps,
browser_version=browser_version,
run=run,
mapping=mapping)
test = Test.get_or_insert(key_name=key_name,
start_url=start_url,
steps=steps,
run=run,
browser_version=browser_version,
mapping=mapping)
return test
def _FetchAll(q):
"""Gets all entities from the datastore."""
results = []
curr_result = q.fetch(9999)
cursor = q.cursor()
while curr_result:
results.extend(curr_result)
q = q.with_cursor(cursor)
curr_result = q.fetch(9999)
cursor = q.cursor()
return results
def GetTestsForBrowser(browser_version, keys_only=False):
"""Gets all tests associated with the given browser version."""
q = Test.all(keys_only=keys_only)
q.filter('browser_version = ', browser_version)
return _FetchAll(q)
def GetTestsRemainingForBrowser(browser_version, keys_only=False):
"""Gets all tests remaining to execute for the given browser version."""
q = Test.all(keys_only=keys_only)
q.filter('browser_version = ', browser_version)
q.filter('result = ', None)
q.filter('assignment = ', None)
q.order('modified')
return _FetchAll(q)
def GetTestsRemainingFroRunAndBrowser(
run, browser_version, keys_only=False):
"""Gets all tests remaining to execute for the given browser and run."""
q = Test.all(keys_only=keys_only)
q.filter('run = ', run)
q.filter('browser_version = ', browser_version)
q.filter('result = ', None)
q.filter('assignment = ', None)
q.order('modified')
return _FetchAll(q)
def SetAssignment(key, assignment):
"""Sets the test assignment info."""
def _Txn():
test = Test.get(key)
test.assignment = assignment
test.put()
return test
return db.run_in_transaction(_Txn)
def SetResult(key, result):
"""Sets the result for the given test."""
def _Txn():
test = Test.get(key)
test.result = result
test.put()
return test
test = db.run_in_transaction(_Txn)
return test
def AddTestsForBrowserVersion(browser_version):
"""Adds a map of all known tests and the specified browser version."""
deferred.defer(SpawnAddTestsForMappings,
browser_version=browser_version,
_queue='tests-queue')
def SpawnAddTestsForMappings(browser_version):
"""Fan-out operation to map tests with the given browser version."""
mappings = run_site_map.GetMappings()
for mapping in mappings:
deferred.defer(AddTestForMapping,
mapping=mapping,
browser_version=browser_version,
_queue='tests-queue')
def AddTestForMapping(mapping, browser_version):
"""Maps all tests in the given run to the specified browser version."""
# TODO(alexto): Make queue names constants.
deferred.defer(AddTest,
start_url=mapping.site.url,
steps=mapping.verification.steps,
browser_version=browser_version,
run=mapping.run,
mapping=mapping,
_queue='tests-queue')
def AddTestForAllBrowserVersions(mapping):
"""Adds tests for the given RunSiteMap across all browser versions."""
browser_versions = compat_browser.GetBrowserVersions()
AddTestForBrowserVersions(mapping, browser_versions)
def AddTestForBrowserVersions(mapping, browser_versions):
"""Adds tests for the given RunSiteMap the list of browser versions."""
for browser_version in browser_versions:
deferred.defer(AddTestForMapping,
mapping=mapping,
browser_version=browser_version,
_queue='tests-queue')
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""RunSiteMap model and associated logic.
RunSiteMap stores the relationship between a CompatRun, Site,
and VerificationSteps. It keeps tracks of the sites that need
to be tested for a particular run. This information can be used
to also answer which sites remain to be tested for a
particular run and/or browser version.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.ext import db
from models.compat import run as compat_run
from models.compat import site as compat_site
from models.compat import verification as compat_verification
class RunSiteMap(db.Model):
"""Tracks the relationship between a Run, Site and VerificationSteps."""
run = db.ReferenceProperty(compat_run.CompatRun)
site = db.ReferenceProperty(compat_site.Site)
verification = db.ReferenceProperty(compat_verification.VerificationSteps)
hidden = db.BooleanProperty(required=False, default=False)
def GetMappingKeyName(run, site, verification):
"""Returns a str used to uniquely identify a mapping."""
return 'RunSiteMap_%s_%s_%s' % (run.key().name(),
site.key().name(),
verification.key().name())
def AddMapping(run, site, verification):
"""Adds a new relationship between the a run, site, and verification."""
def _Txn():
mapping = RunSiteMap(
key_name=GetMappingKeyName(run, site, verification),
run=run,
site=site,
verification=verification)
mapping.put()
return mapping
return db.run_in_transaction(_Txn)
def GetMappingByKeyName(key_name):
"""Returns a str used to uniquely identify a mapping."""
return RunSiteMap.get_by_key_name(key_name)
def _PrefetchRefprops(entities, *props):
"""Pre-fetches reference properties on the given list of entities."""
fields = [(entity, prop) for entity in entities for prop in props]
ref_keys = [prop.get_value_for_datastore(x) for x, prop in fields]
ref_entities = dict((x.key(), x) for x in db.get(set(ref_keys)))
for (entity, prop), ref_key in zip(fields, ref_keys):
prop.__set__(entity, ref_entities[ref_key])
return entities
def PrefetchRefProps(entities):
"""Pre-fetches reference properties on the given list of entities."""
return _PrefetchRefprops(entities,
RunSiteMap.run,
RunSiteMap.site,
RunSiteMap.verification)
def _FetchAll(q, prefetch_ref_properties):
"""Gets all entities from the datastore."""
results = []
curr_result = q.fetch(9999)
cursor = q.cursor()
while curr_result:
results.extend(curr_result)
q = q.with_cursor(cursor)
curr_result = q.fetch(9999)
cursor = q.cursor()
if prefetch_ref_properties:
return PrefetchRefProps(results)
else:
return results
def GetMappings(exclude_hidden=True, prefetch_ref_properties=True):
"""Returns an iterator for the RunSiteMap model."""
q = RunSiteMap.all()
if exclude_hidden:
q.filter('hidden = ', False)
return _FetchAll(q, prefetch_ref_properties)
def GetMappingsForRun(run, prefetch_ref_properties=True):
"""Returns a list of mappings associated with a given run."""
q = RunSiteMap.all().filter('run = ', run)
return _FetchAll(q, prefetch_ref_properties)
def SetVisibility(key_name, hidden):
"""Sets the visibility of the mapping."""
def _Txn():
mapping = RunSiteMap.get_by_key_name(key_name)
mapping.hidden = hidden
mapping.put()
return mapping
return db.run_in_transaction(_Txn)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""VerificationSteps model and associated logic.
VerificationSteps stores information about the verification steps that need
to be performed on a given site during a compatibility run.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.ext import db
class VerificationSteps(db.Model):
"""Tracks verification steps."""
name = db.StringProperty(required=True)
description = db.StringProperty(required=False)
steps = db.TextProperty(required=True)
hidden = db.BooleanProperty(required=False, default=False)
created = db.DateTimeProperty(required=False, auto_now_add=True)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
modified_by = db.UserProperty(required=False, auto_current_user=True)
def GetVerificationStepsKeyName(name):
"""Returns a str used to uniquely identify a verification steps."""
return 'VerificationSteps_' + name
def AddVerificationSteps(name, steps, description=''):
"""Adds the verification steps with the specified properties."""
verification = VerificationSteps(key_name=GetVerificationStepsKeyName(name),
name=name,
description=description,
steps=steps)
verification.put()
return verification
def GetVerificationStepsByKeyName(key_name):
"""Gets the verification steps with the specified key name."""
return VerificationSteps.get_by_key_name(key_name)
def GetVerificationSteps(exclude_hidden=True):
"""Returns an iterator for the VerificationSteps model."""
q = VerificationSteps.all()
if exclude_hidden:
q.filter('hidden = ', False)
verifications = []
results = q.fetch(9999)
cursor = q.cursor()
while results:
verifications.extend(results)
q = q.with_cursor(cursor)
results = q.fetch(9999)
cursor = q.cursor()
return verifications
def Update(key_name, name, description, steps):
"""Updates the administrator with the specified key name."""
def _Txn():
verification = VerificationSteps.get_by_key_name(key_name)
verification.name = name
verification.description = description
verification.steps = steps
verification.put()
return verification
return db.run_in_transaction(_Txn)
def SetVisibility(key_name, hidden):
"""Sets the visibility of the administrator with the specified key name."""
def _Txn():
verification = VerificationSteps.get_by_key_name(key_name)
verification.hidden = hidden
verification.put()
return verification
return db.run_in_transaction(_Txn)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Site model and associated logic.
Site stores information about a site used during a compatibility run.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.ext import db
from utils import url_util
class Site(db.Model):
"""Tracks sites available."""
url = db.LinkProperty(required=True)
hidden = db.BooleanProperty(required=False, default=False)
created = db.DateTimeProperty(required=False, auto_now_add=True)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
modified_by = db.UserProperty(required=False, auto_current_user=True)
def GetSiteKeyName(url):
return 'Site_' + url_util.HashUrl(url)
def AddSite(url):
"""Adds a site with the specified URL."""
url = url.lower()
site = Site(key_name=GetSiteKeyName(url),
url=url)
site.put()
return site
def GetSiteByKeyName(key_name):
"""Gets the site with the specified key name."""
return Site.get_by_key_name(key_name)
def GetSites():
"""Returns an iterator for the Site model."""
return Site.all().filter('hidden = ', False)
def Update(key_name, url):
"""Updates the site with the given key name."""
def _Txn():
site = Site.get_by_key_name(key_name)
site.url = url
site.put()
return site
return db.run_in_transaction(_Txn)
def SetVisibility(key_name, hidden):
"""Sets the visibility of the site with the specified key name."""
def _Txn():
site = Site.get_by_key_name(key_name)
site.hidden = hidden
site.put()
return site
return db.run_in_transaction(_Txn)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BrowserVersion model and associated logic.
BrowserVersion model stores information about a given browser version.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.api import memcache
from google.appengine.ext import db
class BrowserVersion(db.Model):
"""Tracks browser version information."""
platform = db.StringProperty(required=True)
platform_version = db.StringProperty(required=True)
webkit_version = db.StringProperty(required=True)
chrome_version = db.StringProperty(required=True)
user_locale = db.StringProperty(required=True)
created = db.DateTimeProperty(required=False, auto_now_add=True)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
def GetBrowserVersionKeyName(
platform, platform_version, webkit_version,
chrome_version, locale):
"""Returns a str key name used that uniquely identify a browser.
Args:
platform: str with the name of the OS.
platform_version: str with the Platform version string.
webkit_version: str with the WebKit version number.
chrome_version: str with the Chrome version number.
locale: str with the user agent language.
Returns:
A str that can be used to uniquely identify a given browser version.
"""
return 'BrowserVersion_%s_%s_%s_%s_%s' % (platform,
platform_version,
webkit_version,
chrome_version,
locale)
def GetBrowserVersionKey(
platform, platform_version, webkit_version,
chrome_version, locale):
"""Returns a Key used to uniquely identify a browser.
Args:
platform: str with the name of the OS.
platform_version: str with the Platform version string.
webkit_version: str with the WebKit version number.
chrome_version: str with the Chrome version number.
locale: str with the user agent language.
Returns:
A Key object.
"""
return db.Key.from_path('BrowserVersion',
GetBrowserVersionKeyName(platform,
platform_version,
webkit_version,
chrome_version,
locale))
def GetBrowserVersionByKeyName(key_name):
"""Gets the browser with the specified key name.
Args:
key_name: str that uniquely identifies a browser version.
Returns:
A BrowserVersion object.
"""
return BrowserVersion.get_by_key_name(key_name)
def GetBrowserVersionsByKeyName(key_names):
"""Gets the browsers for the the specified key names.
Args:
key_names: key_names list.
Returns:
A BrowserVersion list or None.
"""
keys = [db.Key.from_path('BrowserVersion', k) for k in key_names]
return db.get(keys)
def GetBrowserVersion(platform, platform_version,
webkit_version, chrome_version, locale):
"""Gets the browsers for the the specified parameters."""
key_name = GetBrowserVersionKeyName(platform,
platform_version,
webkit_version,
chrome_version,
locale)
browser_version = memcache.get(key_name)
if not browser_version:
browser_version = GetBrowserVersionByKeyName(key_name)
memcache.set(key_name, browser_version)
return browser_version
def GetOrInsertBrowserVersion(platform, platform_version,
webkit_version, chrome_version, locale):
"""Gets or inserts the BrowserVersion object for the given parameters.
Args:
platform: str with the name of the OS.
platform_version: str with the Platform version string.
webkit_version: str with the WebKit version number.
chrome_version: str with the Chrome version number.
locale: str with the user agent language.
Returns:
A BrowserVersion object.
"""
browser_version = GetBrowserVersion(platform,
platform_version,
webkit_version,
chrome_version,
locale)
if not browser_version:
key_name = GetBrowserVersionKeyName(platform,
platform_version,
webkit_version,
chrome_version,
locale)
browser_version = BrowserVersion(
key_name=key_name,
platform=platform,
platform_version=platform_version,
webkit_version=webkit_version,
chrome_version=chrome_version,
user_locale=locale)
browser_version.put()
return browser_version
def _FetchAll(q):
"""Gets all entities from the datastore."""
results = []
curr_result = q.fetch(9999)
cursor = q.cursor()
while curr_result:
results.extend(curr_result)
q = q.with_cursor(cursor)
curr_result = q.fetch(9999)
cursor = q.cursor()
return results
def GetBrowserVersionsByChromeVersion(chrome_version):
"""Gets an iterator for BrowserVersion matching the given chrome version."""
q = BrowserVersion.all().filter('chrome_version =', chrome_version)
return _FetchAll(q)
def GetBrowserVersions(order_by='chrome_version', reverse=False):
"""Gets an iterator for the BrowserVersion model."""
q = BrowserVersion.all()
if order_by:
ordering = order_by
if reverse:
ordering = '-%s' % order_by
q.order(ordering)
return _FetchAll(q)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Assignment model and associated logic.
Assignment stores information about run-site-maps assignment
to individual users (a.k.a. testers).
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from datetime import datetime
from google.appengine.api import memcache
from google.appengine.ext import db
from models.compat import browser
from models.compat import result as compat_result
from models.compat import run as compat_run
from models.compat import run_site_map as compat_run_site_map
from models.compat import run_tester_map
from models.compat import tests
class Assignment(db.Model):
"""Tracks assignments of sites to users."""
run_site_map = db.ReferenceProperty(
required=False, reference_class=compat_run_site_map.RunSiteMap)
user = db.UserProperty(required=True)
created = db.DateTimeProperty(required=False, auto_now_add=True)
first_visit = db.DateTimeProperty(required=False)
last_visit = db.DateTimeProperty(required=False)
visits = db.IntegerProperty(required=False, default=0)
browser_version = db.ReferenceProperty(
browser.BrowserVersion, required=True)
test = db.ReferenceProperty(required=True,
reference_class=tests.Test)
def IncrementNavigationData(assignment):
"""Increment navigation made by a user to his/her assigned test."""
now = datetime.now()
assignment.last_visit = now
if not assignment.first_visit:
assignment.first_visit = now
assignment.visits += 1
assignment.put()
def GetAssignmentForTesterKeyName(user):
"""Returns a str used to uniquely identify an assignment."""
return 'Assignment_%s' % str(user.user_id())
def GetAssignmentForTesterKey(user):
"""Returns Key object used to uniquely identify a browser."""
return db.Key.from_path('Assignment', GetAssignmentForTesterKeyName(user))
def GetAssignmentByKeyName(key_name):
"""Gets the assignment with the specified key name."""
return Assignment.get_by_key_name(key_name)
def GetAssignmentForTester(user):
"""Gets the Assignment for the given user if any."""
key_name = GetAssignmentForTesterKeyName(user)
assignment = memcache.get(key_name)
if not assignment:
assignment = Assignment.get_by_key_name(key_name)
memcache.set(key_name, assignment)
return assignment
def GetOrAssignTest(user, browser_version):
"""Gets or assign a mapping to the given user."""
assignment = GetAssignmentForTester(user)
if not assignment:
assignment = AssignTest(user=user, browser_version=browser_version)
return assignment
def AssignTest(user, browser_version):
"""Assigns a mapping to the given user."""
tester_mappings = run_tester_map.GetMappingsForTester(user)
for tester_mapping in tester_mappings:
run = tester_mapping.run
tests_remaining = GetTestsRemainingForRun(run, browser_version)
if tests_remaining:
test = tests_remaining[0]
key_name = GetAssignmentForTesterKeyName(user)
assignment = Assignment(key_name=key_name,
run_site_map=test.mapping,
user=user,
browser_version=browser_version,
test=test)
assignment.put()
tests.SetAssignment(test.key(), assignment=assignment)
return assignment
return None
def LogResult(user, assignment, browser_version, succeed, comment, bugs):
"""Commits the result for the given mapping and removes the assignment."""
result = compat_result.AddResult(user=user,
mapping=assignment.run_site_map,
browser_version=browser_version,
succeed=succeed,
assigned=assignment.created,
first_visit=assignment.first_visit,
last_visit=assignment.last_visit,
visits=assignment.visits,
comment=comment,
bugs=bugs)
test_key = assignment.test.key()
tests.SetAssignment(test_key, assignment=None)
tests.SetResult(test_key, result=result)
# Remove assignment from the given user.
RemoveAssignment(assignment)
return result
def RemoveAssignment(assignment):
"""Remove test to user mapping."""
# Remove assignment from the given user.
memcache.delete(assignment.key().name())
return assignment.delete()
def SkipAssignment(user, browser_version, assignment):
"""Tries to get a new mapping for the user, if more are available."""
RemoveAssignment(assignment=assignment)
return AssignTest(user=user, browser_version=browser_version)
def GetTestsRemainingForRun(run, browser_version):
"""Returns a list of mappings with no corresponding results."""
return tests.GetTestsRemainingFroRunAndBrowser(
run, browser_version)
def GetTestsRemainingForBrowser(browser_version):
"""Returns a list of mappings with no result across all runs."""
return tests.GetTestsRemainingForBrowser(browser_version)
def AddMapping(run, site, verification, apply_to_all_versions=False,
browser_versions=None):
"""Adds a new relationship between the a run, site, and verification."""
mapping = compat_run_site_map.AddMapping(run=run,
site=site,
verification=verification)
if apply_to_all_versions:
tests.AddTestForAllBrowserVersions(mapping)
elif browser_versions:
tests.AddTestForBrowserVersions(mapping, browser_versions)
return mapping
def AddRun(name, description, source='local', project_id=None,
project_name=None, labels=None):
"""Adds a new run."""
run = compat_run.AddRun(name=name,
description=description,
source=source,
project_id=project_id,
project_name=project_name,
labels=labels)
return run
def GetBrowserVersion(platform, platform_version,
webkit_version, chrome_version, locale):
"""Gets the BrowserVersion for the given parameter combination.
If the browser is not a known version (presumably a new build),
then the new version is store and tests are created for all mappings
present at that moment.
Args:
platform: str with the name of the OS.
platform_version: str with the Platform version string.
webkit_version: str with the WebKit version number.
chrome_version: str with the Chrome version number.
locale: str with the user agent language.
Returns:
A BrowserVersion object.
"""
version = browser.GetBrowserVersion(platform=platform,
platform_version=platform_version,
webkit_version=webkit_version,
chrome_version=chrome_version,
locale=locale)
if not version:
version = browser.GetOrInsertBrowserVersion(
platform=platform,
platform_version=platform_version,
webkit_version=webkit_version,
chrome_version=chrome_version,
locale=locale)
# Create tests for this new browser version.
tests.AddTestsForBrowserVersion(version)
return version
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Admin model and associated logic.
Admin stores information necessary to manage
access to administrative tasks for site compatibility.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.ext import db
class Admin(db.Model):
"""Tracks admins by email."""
email = db.StringProperty(required=True)
active = db.BooleanProperty(required=True, default=True)
created = db.DateTimeProperty(required=False, auto_now_add=True)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
modified_by = db.UserProperty(required=False, auto_current_user=True)
def GetAdminKeyName(email):
"""Returns a str used to uniquely identify an administrator.
Args:
email: str user email.
Returns:
A str that can be used to uniquely identify a given administrator.
"""
return 'Admin_' + email.lower()
def GetAdminKey(email):
"""Returns the unique db.Key object for the given email.
Args:
email: str user email.
Returns:
A Key object.
"""
return db.Key.from_path('Admin', GetAdminKeyName(email))
def GetAdmin(email):
"""Returns the Admin object for the given email.
Args:
email: str user email.
Returns:
An Admin object if an administrator with the given email exists, or None.
"""
return db.get(GetAdminKey(email))
def AddAdmin(email):
"""Adds a admin with the specified email.
Args:
email: str user email.
Returns:
An Admin object.
"""
admin = Admin.get_or_insert(key_name=GetAdminKeyName(email),
email=email.lower())
return admin
def DeleteAdmin(email):
"""Sets the admin as inactive.
Args:
email: str user email.
"""
SetActive(email, False)
def SetActive(email, is_active):
"""Sets the admin as inactive.
Args:
email: str user email.
is_active: Whether the user is an active admin or not.
Returns:
An updated Admin object or None.
"""
def _Txn():
admin = GetAdmin(email)
if admin:
admin.active = is_active
admin.put()
return admin
return db.run_in_transaction(_Txn)
def IsAdmin(email):
"""Returns whether the given email belongs to an admin.
Args:
email: str user email.
Returns:
Whether the specified email belongs to an administrator of the
site compat areas.
"""
admin = GetAdmin(email)
if admin:
return admin.active
else:
return False
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Result model and associated logic.
Result stores information about a pass/fail result for
a given browser/run combination.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.ext import db
from models.compat import browser
from models.compat import run_site_map
class Result(db.Model):
"""Tracks results submitted by users for a given run and browser."""
user = db.UserProperty(required=True, auto_current_user_add=True)
mapping = db.ReferenceProperty(
required=False, reference_class=run_site_map.RunSiteMap)
browser_version = db.ReferenceProperty(
required=True, reference_class=browser.BrowserVersion)
assigned = db.DateTimeProperty(required=False)
first_visit = db.DateTimeProperty(required=False)
last_visit = db.DateTimeProperty(required=False)
visits = db.IntegerProperty(required=False)
succeed = db.BooleanProperty(required=True)
comment = db.StringProperty(required=False, default='')
bugs = db.StringProperty(required=False, default='')
created = db.DateTimeProperty(required=True, auto_now_add=True)
def AddResult(user, mapping, browser_version, succeed,
assigned, first_visit, last_visit, visits,
comment='', bugs=''):
"""Adds a new result."""
result = Result(user=user,
mapping=mapping,
browser_version=browser_version,
assigned=assigned,
first_visit=first_visit,
last_visit=last_visit,
visits=visits,
succeed=succeed,
comment=comment,
bugs=bugs)
result.put()
return result
def _PrefetchRefprops(entities, *props):
"""Pre-fetches reference properties on the given list of entities."""
fields = [(entity, prop) for entity in entities for prop in props]
ref_keys = [prop.get_value_for_datastore(x) for x, prop in fields]
ref_entities = dict((x.key(), x) for x in db.get(set(ref_keys)))
for (entity, prop), ref_key in zip(fields, ref_keys):
prop.__set__(entity, ref_entities[ref_key])
return entities
def PrefetchRefProps(entities):
"""Pre-fetches reference properties on the given list of entities."""
return _PrefetchRefprops(entities,
Result.browser_version)
def _FetchAll(q, prefetch_ref_properties):
"""Gets all entities from the datastore."""
results = []
curr_result = q.fetch(9999)
cursor = q.cursor()
while curr_result:
results.extend(curr_result)
q = q.with_cursor(cursor)
curr_result = q.fetch(9999)
cursor = q.cursor()
if prefetch_ref_properties:
return PrefetchRefProps(results)
else:
return results
def GetResultsForUser(user, prefetch_ref_properties=True):
"""Returns an iterator of results submitted by a given user."""
q = Result.all().filter('user = ', user).order('-created')
return _FetchAll(q, prefetch_ref_properties)
def GetResultsForBrowserVersion(browser_version, keys_only=False,
prefetch_ref_properties=True):
"""Returns all results submitted for a given browser version."""
q = Result.all(keys_only=keys_only)
q.filter('browser_version = ', browser_version)
return _FetchAll(q, prefetch_ref_properties)
def GetResultsQuery(keys_only=False):
"""Returns an iterator for the Results model."""
return Result.all(keys_only=keys_only)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""CompatRun model and associated logic.
CompatRun stores information about a compatibility run. A run
is the top most grouping for compatibility tests.It might contains
one or more Sites to be verified.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.ext import db
class Sources:
"""Runs sources."""
LOCAL = 'local'
class CompatRun(db.Model):
"""Tracks the compatibility runs available."""
name = db.StringProperty(required=True)
description = db.StringProperty(required=True)
hidden = db.BooleanProperty(required=False, default=False)
created = db.DateTimeProperty(required=False, auto_now_add=True)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
modified_by = db.UserProperty(required=False, auto_current_user=True)
source = db.StringProperty(required=False, default=Sources.LOCAL)
project_id = db.StringProperty(required=False, default=None)
project_name = db.StringProperty(required=False, default=None)
labels = db.StringProperty(required=False, default=None)
def GetRunKeyName(name):
"""Returns a str used to uniquely identify a run."""
return 'Run_%s' % name
def AddRun(name, description='', source='local', project_id=None,
project_name=None, labels=None):
"""Adds a run with the specified name and description."""
def _Txn():
run = CompatRun(key_name=GetRunKeyName(name),
name=name,
description=description,
source=source,
project_id=project_id,
project_name=project_name,
labels=labels)
run.put()
return run
return db.run_in_transaction(_Txn)
def GetRunByKeyName(key_name):
"""Gets the run with the specified key name."""
return CompatRun.get_by_key_name(key_name)
def GetRuns(exclude_hidden=True, source=None):
"""Returns an iterator for the CompatRun model."""
q = CompatRun.all()
if exclude_hidden:
q.filter('hidden = ', False)
if source:
q.filter('source = ', source)
runs = []
results = q.fetch(9999)
cursor = q.cursor()
while results:
runs.extend(results)
q = q.with_cursor(cursor)
results = q.fetch(9999)
cursor = q.cursor()
return runs
def Update(key_name, name, description):
"""Updates the run specified by the given key_name."""
def _Txn():
run = GetRunByKeyName(key_name)
run.name = name
run.description = description
run.put()
return run
return db.run_in_transaction(_Txn)
def SetVisibility(key_name, hidden):
"""Sets the visibility of the mapping."""
def _Txn():
run = GetRunByKeyName(key_name)
run.hidden = hidden
run.put()
return run
return db.run_in_transaction(_Txn)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tester model and associated logic.
Tester stores information necessary to manage
access to the test features of the site compatibility.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import re
from google.appengine.ext import db
class Tester(db.Model):
"""Tracks testers by email."""
email = db.StringProperty(required=True)
active = db.BooleanProperty(required=True, default=True)
created = db.DateTimeProperty(required=True, auto_now_add=True)
created_by = db.UserProperty(required=True, auto_current_user_add=True)
modified = db.DateTimeProperty(required=True, auto_now=True)
modified_by = db.UserProperty(required=True, auto_current_user=True)
def GetKeyName(email):
"""Returns a str used to uniquely identify a tester.
Args:
email: str user email.
Returns:
A str that can be used to uniquely identify a given tester.
"""
return 'Tester_' + email.lower()
def GetKey(email):
"""Returns the unique db.Key object for the given email.
Args:
email: str user email.
Returns:
A Key object.
"""
return db.Key.from_path('Tester', GetKeyName(email))
def Get(email):
"""Returns the Tester object for the given email.
Args:
email: str user email.
Returns:
Tester object if a tester with the given email exists, or None.
"""
return db.get(GetKey(email))
def Add(email):
"""Adds a tester with the specified email.
Args:
email: str user email.
Returns:
A Tester object.
"""
tester = Tester(key_name=GetKeyName(email),
email=email)
tester.put()
return tester
def AddOrUpdate(email, is_active):
"""Adds or updates the tester with the specified email."""
tester = Tester.get_or_insert(key_name=GetKeyName(email),
email=email)
tester.active = is_active
return tester
def SetActive(email, is_active):
"""Sets the active field of the tester with the given email.
Args:
email: str user email.
is_active: whether the user is active or not.
Returns:
An updated Tester object if a tester with the given email exits, or None.
"""
def _Txn():
tester = Tester.get_by_key_name(GetKeyName(email))
if tester:
tester.active = is_active
tester.put()
return tester
return db.run_in_transaction(_Txn)
def IsActive(email):
"""Returns whether the tester with the given email is active."""
tester = Tester.get_by_key_name(GetKeyName(email))
if tester:
return tester.active
elif email and re.search('@(.+\.)?google.com$', email, flags=re.IGNORECASE):
# Automatically grant Googlers access.
Add(email)
return True
return False
def GetTesters():
"""Returns an iterator to the Tester model."""
testers = []
q = Tester.all()
results = q.fetch(9999)
cursor = q.cursor()
while results:
testers.extend(results)
q = q.with_cursor(cursor)
results = q.fetch(9999)
cursor = q.cursor()
return testers
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Describes metadata for the test storage back-end.
We currently use Google Docs as a storage back-end, but
this should be easy to change in the future.
"""
__author__ = 'michaelwill@google.com (Michael Williamson)'
import json
import logging
import re
import uuid
from google.appengine.ext import db
from config import settings
DEFAULT_NUMBER_PER_BATCH = 500
# In the bite test code, any references to legacy wtf ids have
# been replaced with the string 'legacy-<id>'.
# The most common occurrence of this was where a test
# case had a 'call()' statement
# defined inline. This is a regular expression to
# determine if the id passed to an AppEngine handler
# is a legacy id or not.
LEGACY_ID_REGEX = re.compile(r'legacy-([0-9]+)')
class StorageMetadata(db.Model):
"""Stores metadata associated with persistent bite text objects."""
# An id that is independent of the AppEngine datastore.
# When switching out the storage model, be sure to keep
# this field the same, as it may be referenced in
# the test code.
id = db.StringProperty(required=True)
# TODO(michaelwill): Ideally this would be
# a reference property to our project model,
# but at the moment, our project implementation
# is not ready for prime time.
project = db.StringProperty(required=True)
# There are still certain situations where having
# the old legacy wtf id around is useful, but new
# tests should not set this property.
legacy_wtf_id = db.StringProperty()
test = db.TextProperty(required=False)
docs_resource_url = db.StringProperty(required=False)
docs_resource_id = db.StringProperty(required=False)
test_name = db.StringProperty(required=True)
def GetText(self):
"""Retrieves the active revision text blob for this storage entity."""
return self._GetActiveTestVersion()
def _GetActiveTestVersion(self):
"""Gets the active test version."""
result = ''
if self.test:
test = json.loads(self.test)
result = test['active']
return result
def Update(self, new_project, new_name, new_contents):
"""Updates the metadata and Google Docs using a transaction."""
db.run_in_transaction(self._UpdateTransaction,
new_project, new_name, new_contents)
def _UpdateTransaction(self, new_project, new_name, new_contents):
"""This transaction ensures the metadata and Google Docs are in sync."""
self.project = new_project
self.test_name = new_name
self.test = self._UpdateTestMetadata(new_contents)
self.put()
def _UpdateTestMetadata(self, new_contents):
"""Updates the test metadata stored."""
result = ''
if self.test:
cur_test = json.loads(self.test)
cur_test['backup2'] = ''
cur_test['backup1'] = ''
cur_test['active'] = new_contents
result = json.dumps(cur_test)
return result
class ZipData(db.Model):
"""Stores the zip string data."""
json_str = db.TextProperty(required=True)
class ScriptStep(db.Model):
"""Stores the screenshot for a step."""
script_id = db.StringProperty()
step_index = db.StringProperty()
image_url = db.TextProperty()
class ScriptActivity(db.Model):
"""Stores the script activity."""
loaded_times = db.IntegerProperty()
modified = db.DateTimeProperty(required=False, auto_now=True)
def IncreaseAndGetLoadedTimes(id):
"""Gets the total loaded times."""
instance = ScriptActivity.get_or_insert(id + '_activity', loaded_times=0)
instance.loaded_times += 1
instance.put()
return instance.loaded_times
def AddNewScriptStep(id, index, data):
"""Adds a new script step."""
new_step = ScriptStep(script_id=id,
step_index=index,
image_url=data)
new_step.put()
def GetAllSteps(id):
"""Gets all of the screenshots of a script."""
return db.GqlQuery('SELECT * FROM ScriptStep WHERE script_id = :1', id)
def DeleteAllSteps(id):
"""Deletes all of the screenshots of a script."""
keys = db.GqlQuery('SELECT __key__ FROM ScriptStep WHERE script_id = :1', id)
db.delete(keys)
def DeleteAllStepsByScriptIds(ids):
"""Deletes all of the screenshots of the given scripts."""
for id in ids:
DeleteAllSteps(id)
def SaveZipData(json_str):
"""Saves the zip data to db."""
zip = ZipData(json_str=json_str)
return str(zip.put())
def LoadZipByKeyStr(key_str):
"""Load the zip data by key string."""
return ZipData.get(db.Key(key_str))
def GetTestString(contents):
"""Gets the test contents to be saved in the metadata."""
return json.dumps(
{'active': contents,
'backup1': '',
'backup2': ''});
def Save(project, new_test_name, contents):
"""Saves both new metadata and a new docs object."""
return db.run_in_transaction(
_SaveTransaction, project, new_test_name, contents)
def _SaveTransaction(project, new_test_name, contents):
"""Carries out the actual save operation, retrying if necessary."""
storage_metadata = StorageMetadata(
id=GetUniqueId(),
project=project,
docs_resource_url='',
docs_resource_id='',
test_name=new_test_name,
test=GetTestString(contents))
storage_metadata.put()
return storage_metadata
def FetchById(id_string):
"""Fetches a storage metadata instance by its id field.
This function also supports passing a legacy wtf id,
identified by a 'legacy-' tag in front of the numerical
id. If a legacy id is detected, we query using that instead
of the normal storage id.
Args:
id_string: Either a pure numerical id string, or one prefixed
with the string 'legacy-'.
Returns:
The corresponding StorageMetadata instance or None if no
instance is found for the given id.
"""
q = StorageMetadata.all()
match = LEGACY_ID_REGEX.search(id_string)
if match:
legacy_id = match.group(1)
q.filter('legacy_wtf_id = ', legacy_id)
else:
q.filter('id = ', id_string)
return q.get()
def FetchByIds(ids):
"""Fetches the metadata instances by ids."""
metadata = []
for id in ids:
metadata.append(FetchById(id))
return metadata
def DeleteMetadata(instances):
"""Deletes all of the metadata."""
def BatchDelete(instances):
db.delete(instances)
while instances:
if len(instances) <= DEFAULT_NUMBER_PER_BATCH:
BatchDelete(instances)
del instances
break
else:
BatchDelete(instances[:DEFAULT_NUMBER_PER_BATCH])
del instances[:DEFAULT_NUMBER_PER_BATCH]
def FetchByDocsResourceId(resource_id):
"""Fetchs a storage metadata instance by its docs resource id."""
q = StorageMetadata.all()
q.filter('docs_resource_id = ', resource_id)
return q.get()
def FetchByProjectAndTestName(project_name, test_name):
"""Fetches the first test with the given name."""
q = StorageMetadata.all()
q.filter('project = ', project_name)
q.filter('test_name = ', test_name)
return q.get()
def FetchByProject(project_name):
"""Fetches a list of metadata objects by their project."""
q = StorageMetadata.all()
q.filter('project = ', project_name)
response_objects = []
for storage_metadata in q:
response_objects.append(storage_metadata)
return response_objects
def AddPreexisting(project, test_name, resource_url, resource_id,
legacy_wtf_id=None):
"""Adds the metadata for a storage instance that already exists in Docs."""
metadata = StorageMetadata(
id=GetUniqueId(),
project=project, test_name=test_name, docs_resource_url=resource_url,
docs_resource_id=resource_id, legacy_wtf_id=legacy_wtf_id)
metadata.put()
return metadata
def GetUniqueId():
"""Returns a unique 128 bit identifier as a string."""
return str(uuid.uuid4())
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for bug templates.
Bug Templates provides a model for a template for a type of bug. A project
owner can define templates for their project, which pre-populate the
backend project that the bug should be filed to and provide a starting place for
the bug report writer to write their notes. Bug Templates are stored in
AppEngine's Datastore.
"""
__author__ = 'ralphj@google.com (Julie Ralph)'
import json
from google.appengine.ext import db
class BugTemplate(db.Model):
"""Models a Bug Template stored in AppEngine's Datastore.
TODO(ralphj): urls should be more flexible and should be able to
handle patterns.
Attributes:
template_id: A unique string identifier for this template.
name: A human-readable name for this template.
urls: A list of urls that this template should be used for.
project: The human-readable project that this template is associated with.
backend_project: An identifier for the project that is compatable with the
backend provider.
backend_provider: The issue tracking system that this template is
associated with.
selector_text: Text that should appear when the user is asked to pick a
template, under 'What kind of problem are you reporting?'
note_text: Text that should appear in the notes field.
display_order: An integer declaring the relative position where this
template should be displayed in lists. Higher numbers are displayed
after lower numbers.
"""
template_id = db.StringProperty(required=True)
name = db.StringProperty(required=True)
urls = db.StringListProperty(required=True)
project = db.StringProperty(required=True)
backend_project = db.StringProperty(required=True)
backend_provider = db.StringProperty(required=True)
selector_text = db.StringProperty(required=True)
note_text = db.TextProperty(required=True)
display_order = db.IntegerProperty(required=True, default=0)
class BugTemplateEncoder(json.JSONEncoder):
"""Encoder to properly encode Bug Template objects."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def default(self, obj):
"""Overrides the default JSONEncoder.
Args:
obj: Object to serialize.
Returns:
A serializable representation of the Bug Template object.
"""
if isinstance(obj, BugTemplate):
return {'id': obj.template_id,
'name': obj.name,
'urls': obj.urls,
'project': obj.project,
'backendProject': obj.backend_project,
'backendProvider': obj.backend_provider,
'selectorText': obj.selector_text,
'noteText': obj.note_text,
'displayOrder': obj.display_order}
else:
return json.JSONEncoder.default(self, obj)
def JsonEncode(template):
"""Encodes a bug template model as JSON.
Args:
template: A bug template to encode.
Returns:
A JSON-encoded string representation of the bug template list.
"""
return json.dumps(template, cls=BugTemplateEncoder)
def StoreBugTemplate(template_id, name, urls, project, backend_project,
backend_provider, selector_text, note_text, display_order):
"""Stores a new bug template in the App Engine Datastore.
If there is already a Bug Template with the same template_id, overwrites
the old template.
Args:
template_id: A unique string identifier for this template.
name: A human-readable name for this template.
urls: A list of urls that this template should be used for.
project: The project that this template is associated with.
backend_project: An identifier for the project that is compatable with the
backend provider.
backend_provider: The issue tracking system that this template is
associated with.
selector_text: Text that should appear when the user is asked to pick a
template, under 'What kind of problem are you reporting?'
note_text: Text that should appear in the notes field.
display_order: An integer declaring the relative position where this
template should be displayed in lists. Higher numbers are displayed
after lower numbers.
Returns:
The newly created bug template.
"""
template = BugTemplate.get_by_key_name(template_id)
if template is None:
template = BugTemplate(key_name=template_id,
template_id=template_id,
name=name,
urls=urls,
project=project,
backend_project=backend_project,
backend_provider=backend_provider,
selector_text=selector_text,
note_text=note_text,
display_order=display_order)
else:
template.name = name
template.urls = urls
template.project = project
template.backend_project = backend_project
template.backend_provider = backend_provider
template.selector_text = selector_text
template.note_text = note_text
template.display_order = display_order
template.put()
return template
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite Result model.
Bite Result model is used to store a test's result during a run.
"""
__author__ = 'phu@google.com (Po Hu)'
import datetime
import logging
import random
from google.appengine.ext import db
from models import bite_event
from models import bite_run
class Error(Exception):
pass
class MissingRunError(Error):
"""Misses the project name."""
class MissingTestInfoError(Error):
"""Misses the test info."""
class ResultAlreadyPickedError(Error):
"""The result has already been picked and not in queued status."""
class BiteResult(db.Model):
"""Contains result related info."""
run = db.ReferenceProperty(bite_run.BiteRun)
test_id = db.StringProperty(required=False)
status = db.StringProperty(
choices=('queued', 'assigned', 'passed', 'failed',
'undefined'))
screenshot = db.TextProperty(required=False)
last_picked_time = db.DateTimeProperty(required=False)
retried_times = db.IntegerProperty(required=False, default=0)
created_time = db.DateTimeProperty(required=False, auto_now_add=True)
log = db.TextProperty(required=False)
finished_time = db.DateTimeProperty(required=False)
executor_ip = db.StringProperty(required=False)
# Make sure there is a 1 for each run.
random = db.FloatProperty(required=True)
automated = db.BooleanProperty(required=False, default=True)
test_name = db.StringProperty(required=False)
labels = db.StringListProperty(default=None)
def GetResult(run_key, test_id='', test_name=''):
"""Gets the result based on the given info."""
results = BiteResult.all().filter('run = ', db.Key(run_key))
if test_id:
results.filter('test_id =', test_id)
if test_name:
results.filter('test_name =', test_name)
return results
def LoadResultByKeyStr(key_str):
"""Load the result by key string."""
return BiteResult.get(db.Key(key_str))
def AddResult(run_key, test_id):
"""Adds a result entity."""
if not run_key:
raise MissingRunError('There is no run defined.')
if not test_id:
raise MissingTestInfoError('No test id is specified.')
# Assume name is ascii.
return BiteResult.get_or_insert(run=run_key,
test_id=test_id)
def GetResultsOfRunSlice(run_slice_key):
"""Gets the results of a run slice."""
return BiteResult.all(keys_only=True).ancestor(run_slice_key)
def UpdateStatusAfterFetched(result):
"""Updates the result's status."""
result = BiteResult.get(result.key())
if result.status == 'queued':
result.status = 'assigned'
result.put()
result.parent().queued_number -= 1
result.parent().put()
return True
else:
# TODO(phu): Fetch another result in queued status.
#raise ResultAlreadyPickedError()
return False
def GetRandomQueuedJob(run):
"""Gets a random queued job given a run."""
rand_num = random.random()
result = BiteResult.all().order('random').filter(
'run =', run).filter('status =', 'queued').filter(
'random >=', rand_num).get()
logging.info('Looked at larger random :')
if result is None:
logging.info('Looking for a result smaller than the random.')
result = BiteResult.all().order('-random').filter(
'run =', run).filter('status =', 'queued').filter(
'random <', rand_num).get()
if result:
logging.info('Found a valid result')
success = db.run_in_transaction(UpdateStatusAfterFetched, result)
logging.info('The final result is:' + str(success))
if success:
if result.parent().queued_number == 0:
run.queued_number -= result.parent().tests_number
# TODO(phu): Need to run_in_transaction.
run.put()
return result
return ''
def UpdateResult(result_id, parent_key_str, status, screenshot='',
log='', finished_time='', executor_ip=''):
"""Updates the result and run slice info in a transaction."""
result = db.run_in_transaction(
_UpdateResult, result_id, parent_key_str,
status, screenshot, log, finished_time, executor_ip)
run_slice = result.parent()
run = run_slice.run
if (run_slice.passed_number + run_slice.failed_number ==
run_slice.tests_number):
run.passed_number += run_slice.passed_number
run.failed_number += run_slice.failed_number
run.end_time = datetime.datetime.now()
run.status = 'completed'
run.put()
bite_event.AddEvent(run, action='complete', event_type='run',
name=run.name, labels=run.labels,
project=run.suite.parent().name)
return result
def _UpdateResult(result_id, parent_key_str, status, screenshot='',
log='', finished_time='', executor_ip=''):
"""Updates the result after it's executed."""
parent_key = None
if parent_key_str:
parent_key = db.Key(str(parent_key_str))
result = BiteResult.get_by_id(result_id, parent_key)
result.status = status
result.screenshot = screenshot
result.log = log
if not finished_time:
finished_time = datetime.datetime.now()
result.finished_time = finished_time
result.executor_ip = executor_ip
result.put()
if status == 'passed':
result.parent().passed_number += 1
elif status == 'failed':
result.parent().failed_number += 1
result.parent().put()
return result
def GetResultsOfRun(run_key_str, number):
"""Gets a number of results of the specified run."""
return (BiteResult.all().filter('run =', db.Key(run_key_str)).
order('-finished_time').fetch(number))
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bugs utilities."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import logging
from utils import encoding_util
# Allowed bug states.
ACTIVE = 'active'
RESOLVED = 'resolved'
CLOSED = 'closed'
UNKNOWN = 'unknown'
# Bug providers.
class Provider(object):
ISSUETRACKER = 'issuetracker'
LOCAL = 'local'
class InvalidProvider(Exception):
"""Thrown when the caller uses an invalid bug provider."""
pass
ISSUETRACKER_STATE_STATUS_MAP = {
'untriaged': ACTIVE,
'assigned': ACTIVE,
'available': ACTIVE,
'unconfirmed': ACTIVE,
'awaitingtranslation': ACTIVE,
'externaldependency': ACTIVE,
'started': ACTIVE,
'upstream': ACTIVE,
'resolved': RESOLVED,
'fixed': RESOLVED,
'wontfix': CLOSED,
'verified': CLOSED,
'duplicate': CLOSED,
'confirmed': CLOSED,
'invalid': CLOSED,
'unknown': UNKNOWN
}
LOCAL_STATE_STATUS_MAP = {
'unconfirmed': ACTIVE,
'approved': CLOSED,
'rejected': CLOSED,
'unknown': UNKNOWN
}
def StateFromStatus(status, provider):
"""Maps status to a state of active, resolved, closed, or unknown."""
if not status:
return UNKNOWN
lowered = status.lower()
if cmp(provider, Provider.ISSUETRACKER) == 0:
if lowered in ISSUETRACKER_STATE_STATUS_MAP:
return ISSUETRACKER_STATE_STATUS_MAP[lowered]
else:
logging.error('Unrecognized IssueTracker status: %s', lowered)
return UNKNOWN
elif cmp(provider, Provider.LOCAL) == 0:
if lowered in LOCAL_STATE_STATUS_MAP:
return LOCAL_STATE_STATUS_MAP[lowered]
else:
logging.error('Unrecognized Local status: %s', lowered)
return UNKNOWN
else:
raise InvalidProvider('Unrecognized provider: %s' % provider)
def GetUserLink(provider, email):
"""Retrieves a url to the profile of the specified user on the given provider.
Args:
provider: The name of the provider
email: The email alias of the user.
Returns:
Str of the url to the profile of the user.
"""
user_link = ''
if email and provider == Provider.ISSUETRACKER:
user_link = 'http://code.google.com/u/' + email.split('@')[0]
return encoding_util.EncodeToAscii(user_link)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Describes metadata for the rpf project storage back-end.
We currently use Google Docs as a storage back-end, but
this should be easy to change in the future.
"""
__author__ = 'jasonstredwick@google.com (Jason Stredwick)'
import logging
import json
from google.appengine.ext import db
from models import store
class StorageProjectMetadata(db.Model):
"""Stores metadata associated with persistent rpf project objects.
Attributes:
name: A string representing the name of the project.
page_map: A long json string containing an encoded mapping of url patterns
matched to names.
java_package_path: The Java package path. (string)
"""
# Project specific information
name = db.StringProperty(required=True)
page_map = db.TextProperty(required=False, default='{}')
params = db.TextProperty(required=False, default='{}')
# Translation configuration data
# Java
java_package_path = db.StringProperty(required=False, default='')
common_methods = db.StringListProperty(default=[])
def GetOrInsertProject(name):
"""Gets or inserts a project object.
Args:
name: A string representing the name of a project and its key.
Returns:
Newly created/existing StorageProjectMetadata object, or None if name is
invalid or the instance was not able to be created.
"""
if name is None:
return None
project = StorageProjectMetadata.get_by_key_name(name)
if project is None:
project = StorageProjectMetadata(key_name=name, name=name)
if project is None:
return None
project.put()
return project
def GetProjectJson(name):
"""Gets project object and returns it in json form.
Args:
name: A string representing the name of a project and its key.
Returns:
A string form of the object in JSON, or None upon failure to create a new
object.
"""
obj = GetProjectObject(name)
if obj is None:
return None
try:
return json.dumps(obj)
except (TypeError, OverflowError, ValueError):
return None
def GetJsFiles(project):
"""Gets the JS files associated with a project."""
files = []
js_files = project.jsfile_set.get() or {}
if isinstance(js_files, JsFile):
js_files = [js_files]
for js_file in js_files:
files.append({'name': js_file.name,
'code': js_file.code})
return files
def GetProjectObject(name):
"""Gets an object representation of the specified project.
Args:
name: A string representing the name of a project and its key.
Returns:
The object form of the entry or None if the project does not exist.
"""
project = StorageProjectMetadata.get_by_key_name(name)
if project is None:
return None
common_methods = ''
common_methods_deps = ''
if project.common_methods:
common_methods = ','.join(project.common_methods)
common_methods_deps = store.GetDepsByNames(project.common_methods)
obj = {
'name': project.name,
'page_map': project.page_map,
'params': project.params,
'js_files': GetJsFiles(project),
'common_methods': common_methods,
'common_methods_deps': common_methods_deps,
'java_package_path': project.java_package_path
}
return obj
def UpdateProject(name, data):
"""Updates a project's data.
Args:
name: A string representing the name of the project and its key.
data: An object containing the values to update within the project details.
Note that any given attribute is optional.
Returns:
Returns the project that was updated or None if no project was updated.
"""
project = GetOrInsertProject(name)
if project is None:
return None
if 'page_map' in data and data['page_map']:
project.page_map = data['page_map']
if 'java_package_path' in data:
project.java_package_path = data['java_package_path'] or ''
if 'params' in data and data['params']:
project.params = data['params']
if 'js_files' in data and data['js_files'] is not None:
DeleteFiles(project)
SaveFiles(project, data['js_files'])
if 'common_methods' in data and data['common_methods'] is not None:
project.common_methods = data['common_methods']
project.put()
return project
def GetProjectNames():
"""Gets the project names."""
projects = StorageProjectMetadata.all()
return [project.name for project in projects]
class JsFile(db.Model):
"""Stores the JS file associated with a project.
Attributes:
name: The JS file name.
code: The JS code.
"""
name = db.StringProperty(required=True)
code = db.TextProperty(required=False, default='')
project = db.ReferenceProperty(StorageProjectMetadata)
def SaveFiles(project, files):
"""Saves the JS files associated with the given project."""
entities = []
for file in files:
entity = JsFile(name=file['name'],
code=file['code'],
project=project)
entities.append(entity)
db.put(entities)
def DeleteFiles(project):
"""Removes the files associated with the given project."""
js_files = project.jsfile_set.get()
if js_files:
db.delete(js_files)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Screenshots captures model.
Used to store screnshots captured while logging a bug or during
an automation run.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.ext import db
DEFAULT_LIMIT = 10
class Screenshot(db.Model):
"""Stores a screenshot."""
data = db.BlobProperty(required=True)
source = db.StringProperty(required=True)
source_id = db.StringProperty(required=False)
project = db.StringProperty(required=False)
reported_by = db.UserProperty(required=False, auto_current_user_add=True)
reported_date = db.DateTimeProperty(required=False, auto_now_add=True)
modified_by = db.UserProperty(required=False, auto_current_user_add=True)
modified_date = db.DateTimeProperty(required=False, auto_now=True)
caption = db.StringProperty(required=False)
details = db.TextProperty(required=False)
labels = db.StringListProperty()
def Add(data, source, source_id='', project='',
caption=None, details=None, labels=None):
"""Adds a new screenshot entry.
Args:
data: The screenshot data to store.
source: The source name, should be one of the types in the Sources class.
source_id: ID of the artifact (test, bug, automation, etc.) the screenshot
is associated with.
project: Project the screenshot is associated with.
caption: Caption string.
details: More detailed string about the screenshot.
labels: List of strings used to label the screenshot.
Returns:
The model object for the new screenshot.
"""
def _Transaction():
labels_list = []
if labels:
labels_list = labels
screenshot = Screenshot(data=db.Blob(data),
source=source,
source_id=source_id,
project=project,
caption=caption,
details=details,
labels=labels_list)
screenshot.put()
return screenshot
return db.run_in_transaction(_Transaction)
def GetById(screenshot_id):
"""Gets a screenshot by ID."""
return Screenshot.get_by_id(int(screenshot_id))
def GetScreenshots(source, source_id=None, project=None,
limit=DEFAULT_LIMIT):
query = Screenshot.all().filter('source=', source)
if source_id:
query.filter('source_id=', source_id)
if project:
query.filter('project=', project)
return query.fetch(limit)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for bug data.
Bug is a model for crawled bug information stored in AppEngine's Datastore.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import json
import logging
from google.appengine.ext import db
from models import bugs_util
from models import test_cycle
from utils import encoding_util
from utils import url_util
class InvalidProvider(Exception):
"""Thrown when the caller uses an invalid bug provider."""
pass
class Bug(db.Model):
"""Models a Bug stored in AppEngine's Datastore.
After crawling the bug information, relevant bug data is stored in the Bug
model and one or more association is created in the UrlToBugMap model.
The bug data stored is normalized to contain some basic fields
(eg. id, title, domain, etc.), to facilitate its retrieval.
Attributes:
bug_id: The ID of the bug in the source (original) bug database.
title: The bug's title.
summary: The bug's summary.
priority: The bug's priority.
project: Name of the project this bug is associated with.
provider: Source provider of the bug information.
author: The user who first reported tihs bug.
author_id: Identifies the user in the provider backend.
author_url: URL to the profile of the bug's author
status: Status of the bug (eg. active, fixed, closed) when it
was crawled.
details_link: Link to the bug details on the original source.
reported_on: The date the bug was first opened.
last_update: Date the bug was last updated in the original bug database.
last_updater: The last user to update the bug.
last_updater_url: URL to the profile of the last user to update the bug.
target_element: When specified, describes an element on the page the bug
is associated with.
screenshot: Url to an associated screenshot.
has_screenshot: Whether a screenshot is attached.
has_recording: True, if the bug has recorded script attached.
recording_link: Link to recorded script.
"""
bug_id = db.StringProperty(required=False)
title = db.StringProperty(required=True)
summary = db.TextProperty(required=False)
priority = db.StringProperty(required=False, default='2')
project = db.StringProperty(required=True)
provider = db.StringProperty(required=False)
author = db.StringProperty(required=False)
author_id = db.StringProperty(required=False)
author_url = db.StringProperty(required=False, default='')
status = db.StringProperty(required=True)
state = db.StringProperty(required=True,
choices=(bugs_util.ACTIVE,
bugs_util.RESOLVED,
bugs_util.CLOSED,
bugs_util.UNKNOWN))
details_link = db.StringProperty(required=False)
reported_on = db.StringProperty(required=False)
last_update = db.StringProperty(required=True)
last_updater = db.StringProperty(required=False)
last_updater_url = db.StringProperty(required=False, default='')
has_target_element = db.BooleanProperty(required=False, default=False)
target_element = db.TextProperty(required=False)
has_screenshot = db.BooleanProperty(required=False, default=False)
screenshot = db.StringProperty(required=False, default='')
has_recording = db.BooleanProperty(required=False, default=False)
recording_link = db.TextProperty(required=False, default='')
# Tracks when an entry is added and modified.
added = db.DateTimeProperty(required=False, auto_now_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
# Test cycle is the differentiates various test runs.
test_cycle = db.ReferenceProperty(reference_class=test_cycle.TestCycle,
collection_name='testcycle_bugs')
expected = db.TextProperty(required=False)
result = db.TextProperty(required=False)
class BugEncoder(json.JSONEncoder):
"""Encoder that knows how to encode Bugs objects."""
# Disable 'Invalid method name' lint error.
# pylint: disable-msg=C6409
def default(self, obj):
"""Overrides the default JSONEncoder.
Args:
obj: Object to serialize.
Returns:
A serializable representation of the Object.
"""
if isinstance(obj, Bug):
return {'key': obj.key().id(),
'id': obj.bug_id,
'title': obj.title,
'summary': obj.summary,
'priority': obj.priority,
'project': obj.project,
'provider': obj.provider,
'status': obj.status,
'author': obj.author,
'author_url': obj.author_url,
'state': obj.state,
'details_link': obj.details_link,
'reported_on': obj.reported_on,
'last_update': obj.last_update,
'last_updater': obj.last_updater,
'last_updater_url': obj.last_updater_url,
'target_element': obj.target_element,
'has_target_element': obj.has_target_element,
'screenshot': obj.screenshot,
'has_screenshot': obj.has_screenshot,
'has_recording': obj.has_recording,
'recording_link': obj.recording_link}
else:
return json.JSONEncoder.default(self, obj)
def Store(bug_id, title, summary, priority, project, provider, status, author, author_id,
details_link, reported_on, last_update, last_updater,
target_element='', screenshot='', recording_link='',
cycle=None, expected=None, result=None):
"""Creates or updates a bug into the Datastore.
Creates a bug with the given properties into the datastore. If there is
already a bug that shares the same bug_id and project name, the bug
information is simply updated.
Args:
bug_id: The ID of the bug in the source (original) bug database.
title: The bug's title.
summary: The bug's summary.
priority: The bug's priority.
project: Name of the project this bug is associated with.
provider: Source provider of the bug information.
status: Status of the bug (eg. Active, Fixed, Closed) when it
was crawled.
author: The author of the bug.
details_link: Link to the bug details on the original source.
reported_on: The date the bug was first opened.
last_update: Date the bug was last updated in the original bug
database.
last_updater: The last user to update the bug.
target_element: Optional str describing a specific element on the page
the bug is associated with.
screenshot: Optional str url to an associated screenshot.
recording_link: Optional link to recorded steps.
Returns:
The newly created entry.
"""
logging.info('Status: %s', status)
status = status.lower()
state = bugs_util.StateFromStatus(status, provider)
last_updater_url = bugs_util.GetUserLink(provider, last_updater)
bug = None
if bug_id:
# Check if bug is already in the cache, in which case, we just update it.
bug = GetBug(bug_id, project, provider)
title = encoding_util.EncodeToAscii(title)
summary = encoding_util.EncodeToAscii(summary)
expected = encoding_util.EncodeToAscii(expected)
result = encoding_util.EncodeToAscii(result)
author = encoding_util.EncodeToAscii(author)
if bug:
bug.title = title
bug.summary = summary
bug.priority = priority
bug.status = status
bug.state = state
bug.details_link = details_link
bug.last_update = last_update
bug.last_updater = last_updater
bug.last_updater_url = last_updater_url
bug.target_element = target_element
bug.has_target_element = bool(target_element)
bug.screenshot = screenshot
bug.has_screenshot = bool(screenshot)
bug.test_cycle = cycle
bug.expected = expected
bug.result = result
else:
bug = Bug(bug_id=bug_id,
title=title,
summary=summary,
priority=priority,
project=project,
provider=provider,
status=status,
author=author,
author_id=author_id,
author_url=bugs_util.GetUserLink(provider, author),
state=state,
details_link=details_link,
reported_on=reported_on,
last_update=last_update,
last_updater=last_updater,
last_updater_url=last_updater_url,
target_element=target_element,
has_target_element=bool(target_element),
screenshot=screenshot,
has_screenshot=bool(screenshot),
recording_link=recording_link,
has_recording=bool(recording_link),
test_cycle=cycle,
expected=expected,
result=result)
bug.put()
return bug
def GetBugsById(bug_id, project=None, provider=None,
query_method=Bug.all, limit=1000):
"""Retrieves a list of bugs from the datastore based on ID.
Args:
bug_id: The Id of the bug to retrieve.
project: The project of the bug.
provider: The provider of the bug.
query_method: The method that returns instance of db.Query(Bug).
limit: The maximum number of results to return.
Returns:
A list of bugs matching the ID passed in.
"""
query = query_method()
query.filter('bug_id =', bug_id)
if project:
query.filter('project =', project)
if provider:
query.filter('provider =', provider)
return query.fetch(limit=limit)
def GetBug(bug_id, project, provider, keys_only=False):
"""Retrieves a bug from either memcache or the Datastore.
Args:
bug_id: Id of bug to retrieve.
project: Project of bug in question.
provider: Source provider of the bug information.
keys_only: Whether the query should return full entities or just keys.
Returns:
Bug object if one exists with the specified id and project
combination or None.
"""
query = Bugs.all(keys_only=keys_only).filter('bug_id =', bug_id)
return query.filter('project =', project).provider('provider =', provider).get()
def GetBugByKey(key_name):
"""Retrieves a bug from the Datastore.
Args:
key_name: The key name of the bug.
Returns:
Bug object with the given key_name or None.
"""
return Bug.get_by_id(int(key_name))
def UpdateTargetElement(key_name, target_element):
"""Update the target element information on the specified bug.
Args:
key_name: Key name of the bug to update.
target_element: Str describing a specific element on the page
the bug is associated with.
Returns:
Bug object with the updated target_element information.
"""
bug = GetBugByKey(key_name)
bug.target_element = target_element
bug.has_target_element = bool(target_element)
bug.put()
return bug
def UpdateRecording(key_name, recording_link):
"""Update the specified bug with the link to recorded steps.
Args:
key_name: Key name of the bug to update.
recording_link: Link to recorded steps.
Returns:
Bug object with the updated recording link.
"""
bug = GetBugByKey(key_name)
bug.recording_link = recording_link
bug.has_recording = bool(recording_link)
bug.put()
return bug
def UpdateStatus(key_name, status):
"""Update the status/state of the specified bug.
Args:
key_name: Key name of the bug to update.
status: A string containing the new status of the bug.
Returns:
Bug object with the updated target_element information.
"""
bug = GetBugByKey(key_name)
bug.status = status
bug.state = bugs_util.StateFromStatus(status, bug.provider)
bug.put()
return bug
def JsonEncode(bugs):
"""JSON encode the given bugs list.
Args:
bugs: A list of Bugs.
Returns:
JSON encoded str representation of the list.
"""
return json.dumps(bugs, cls=BugEncoder)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model to store URL to Bugs associations.
Each bug is associated with one or more URLs. Each association is stored
as a separate entry in the UrlBugMap table.
Attributes:
_MAX_RESULTS_CAP: Private static constant used used to cap the amount of
results a clients can request.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import logging
import re
from google.appengine.ext import db
from models import bugs
from models import bugs_util
from models import test_cycle
from models import test_cycle_user
from utils import encoding_util
from utils import url_util
_MAX_RESULTS_CAP = 500
class UrlPosition(object):
TITLE = 1
MAIN = 2
COMMENTS = 3
OTHER = 0
class UrlBugMap(db.Model):
"""Represents a relationship between a URL and a Bug.
There are 3 fields a typical query will try to search on:
url, hostname, path, and status. These properties are stored as
indexed properties to speed up searches.
"""
# Indices:
url = db.StringProperty(required=True)
hostname = db.StringProperty(required=False)
path = db.StringProperty(required=False)
status = db.StringProperty(required=False)
state = db.StringProperty(required=False,
choices=(bugs_util.ACTIVE,
bugs_util.RESOLVED,
bugs_util.CLOSED,
bugs_util.UNKNOWN))
provider = db.StringProperty(required=False)
# Non-indexed information.
bug = db.ReferenceProperty(reference_class=bugs.Bug,
collection_name='bug_urls')
last_update = db.StringProperty(required=True)
position = db.IntegerProperty(required=False, default=UrlPosition.OTHER,
choices=(UrlPosition.TITLE,
UrlPosition.MAIN,
UrlPosition.COMMENTS,
UrlPosition.OTHER))
# Tracks when an entry is added and modified.
added = db.DateTimeProperty(required=False, auto_now_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
# Test cycle is the differentiates various test runs.
test_cycle = db.ReferenceProperty(reference_class=test_cycle.TestCycle,
collection_name='testcycle_urls')
author = db.StringProperty(required=False)
author_id = db.StringProperty(required=False)
def TruncateStr(text, max_len=500):
"""Truncates strings to the specified maximum length or 500.
Args:
text: Text to truncate if longer than max_len.
max_len: Maximum length of the string returned by the function.
Returns:
A string with max_len or less letters on it.
"""
if len(text) > max_len:
logging.warning(
'Text length of %d is greater than the max length allowed. '
'Truncating to a length of %d. Text: %s', len(text), max_len, text)
return text[:max_len]
def StoreUrlBugMapping(target_url, bug, position=UrlPosition.OTHER):
"""Stores a new URL to bug mapping into the Datastore.
Args:
target_url: Fully qualified URL of the page associated with the given Bug.
bug: Bug object containing the details of an issue.
position: Position of the URL inside of the bug report.
Returns:
The newly created entry.
"""
url = target_url
hostname = ''
path = ''
urlnorm = url_util.NormalizeUrl(target_url)
if urlnorm:
url = urlnorm.url
hostname = urlnorm.hostname
path = urlnorm.path
else:
logging.exception('URL normalization failed, converting to ASCII: %s',
target_url)
url = target_url
lowered = bug.status.lower()
url = encoding_util.EncodeToAscii(url)
hostname = encoding_util.EncodeToAscii(hostname)
path = encoding_util.EncodeToAscii(path)
url_bug = UrlBugMap(url=TruncateStr(url),
hostname=TruncateStr(hostname),
path=TruncateStr(path),
status=lowered,
state=bugs_util.StateFromStatus(
lowered, bug.provider),
provider=bug.provider,
bug=bug,
last_update=bug.last_update,
position=position,
test_cycle=bug.test_cycle,
author=bug.author,
author_id=bug.author_id)
url_bug.put()
return url_bug
def CacheKey(state, status, urlkey):
"""Calculates the cache key for the given combination of parameters."""
return 'GetBugs_state_%s_status_%s_key_%s' % (state, status, urlkey)
def GetCacheKeys(urlnorm, state, status):
"""Calculates the cache keys for the given combination of parameters."""
urls = [re.sub('https?://', '', urlnorm.url)]
url = urlnorm.hostname + urlnorm.path
if not url in urls:
urls.push(url)
url = urlnorm.hostname
if not url in urls:
urls.push(url)
return [(url, CacheKey(state, status, url_util.HashUrl(url))) for url in urls]
def GetBugsForUrlUserIsAuthorized(
url, user, max_results, state, status):
GetBugsForUrl(url, user, max_results, state, status,
enforce_cycle_scoping=True)
def GetBugsForUrl(
url, user, max_results, state, status, enforce_cycle_scoping=False):
"""Retrieves a list of bugs for a given URL up to the specified amount.
Args:
url: Str containing information about the target URL.
max_results: Maximum number of bugs to return.
state: State of the bugs to retrieve. If no value is specified,
the list of bugs returned will not be filtered based on state.
status: Status of the bugs to retrieve.
If no value is specified, the list of
bugs returned will not be filtered based on status.
Returns:
A list of known bugs for the specified URL.
"""
urlnorm = url_util.NormalizeUrl(url)
if not urlnorm:
logging.error('Unable to normalize URL.')
return []
limit = _MAX_RESULTS_CAP
if max_results < limit:
limit = max_results
cycles = test_cycle_user.GetTestCyclesForUser(user)
if enforce_cycle_scoping and not cycles:
# Nothing to do, user is not authorized to see bugs.
return []
queries = GetQueriesForUrl(urlnorm, state, status, cycles)
results = []
results_dict = {}
for (key, query) in queries:
mappings = query.fetch(limit)
if mappings:
keys = []
for curr in mappings:
curr_key = UrlBugMap.bug.get_value_for_datastore(curr)
key_name = str(curr_key)
logging.info('Considering key: %s', key_name)
if key_name in results_dict:
continue
results_dict[key_name] = True
keys.append(curr_key)
if keys:
result = db.get(keys)
result = [r for r in result if r]
results.append([key, result])
# Nothing found, return an empty list.
return results
def GetQueriesForUrl(urlnorm, state, status, cycles=None):
"""Retrieves a list of queries to try for a given URL.
Each query represents a possible way to find matches, each one has different
relevancy implications:
query[0] = Does a full URL match (considered the most relevant).
query[1] = Does a hostname + path match.
query[2] = Does a hostname match (considered the least relevant).
Args:
urlnorm: NormalizUrlResult object.
state: State to filter on. If set to None,
bugs will not be filtered based on state.
status: Status to filter on. If set to None,
bugs will not be filtered based on status.
Returns:
A list containing Query objects.
"""
url_no_schema = re.sub('^https?://', '', urlnorm.url)
hostname_path = urlnorm.hostname + urlnorm.path
url_query = UrlBugMap.all().filter('url = ', TruncateStr(urlnorm.url))
url_query_tuple = (urlnorm.url, url_query)
hostname_path_query = UrlBugMap.all()
hostname_path_query = hostname_path_query.filter(
'hostname = ', TruncateStr(urlnorm.hostname))
hostname_path_query = hostname_path_query.filter(
'path = ', TruncateStr(urlnorm.path))
hostname_path_tuple = (hostname_path, hostname_path_query)
hostname_query = UrlBugMap.all().filter(
'hostname = ', TruncateStr(urlnorm.hostname))
hostname_tuple = (urlnorm.hostname, hostname_query)
queries = []
if url_no_schema == hostname_path:
if urlnorm.path:
queries.append(hostname_path_tuple)
queries.append(hostname_tuple)
elif hostname_path == urlnorm.hostname:
queries.append(url_tuple)
queries.append(hostname_tuple)
else:
queries.append(url_tuple)
queries.append(hostname_path_tuple)
queries.append(hostname_tuple)
queries = [(k, q.order('-last_update')) for (k, q) in queries]
if cycles:
queries = [(k, q.filter('test_cycle in ', cycles)) for (k, q) in queries]
# If states is specified, filter results to query bug matching it's value.
if state:
queries = [(k, q.filter('state = ', state.lower())) for (k, q) in queries]
if status:
queries = [(k, q.filter('status = ', status.lower())) for (k, q) in queries]
return queries
def DeleteAllMappingsForBug(bug):
"""Deletes all mappings for the specified bug.
Args:
bug: The target bug to delete the bugs from.
Returns:
The total amount of mappings deleted.
"""
total_deleted = 0
query = bug.bug_urls
mappings = query.fetch(_MAX_RESULTS_CAP)
while mappings:
total_deleted += len(mappings)
db.delete(mappings)
mappings = query.fetch(_MAX_RESULTS_CAP)
return total_deleted
def DeleteBugAndMappings(bug_id, project, provider):
"""Delete bug and all mappings assiciated with that bug.
Args:
key_name: The key name of the bug.
Returns:
The total amount of mappings deleted.
"""
mappings_deleted = 0
bug = bugs.GetBug(bug_id, project, provider)
if bug:
mappings_deleted = DeleteAllMappingsForBug(bug)
bug.delete()
return mappings_deleted
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite Run model.
Bite Run model is used to identify each run of a group of tests.
"""
__author__ = 'phu@google.com (Po Hu)'
import datetime
import logging
#Import not at top
#pylint: disable-msg=C6204
from google.appengine.ext import db
from models import bite_event
from models import bite_suite
from utils import basic_util
class Error(Exception):
pass
class MissingRunNameError(Error):
"""Misses the run name."""
class MissingRunError(Error):
"""Misses the run."""
class DuplicatedProjectNameError(Error):
"""Has the project name existing."""
class BiteRunTemplate(db.Model):
"""Contains the run template info."""
name = db.StringProperty(required=True)
description = db.StringProperty(required=False)
suite = db.ReferenceProperty(bite_suite.BiteSuite, required=True)
run_once = db.BooleanProperty(required=True, default=True)
watchdog_setting = db.StringProperty(required=False)
filtered_labels = db.StringListProperty(default=None)
test_dimension_labels = db.StringListProperty(default=None)
tokens = db.StringProperty(required=False)
start_url = db.StringProperty(required=False)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
class BiteScheduledJob(db.Model):
"""Contains scheduled jobs info."""
run_template = db.ReferenceProperty(BiteRunTemplate, required=True)
suite = db.StringProperty(required=False)
project = db.StringProperty(required=False)
watchdog_last_time = db.DateTimeProperty()
target_url_versions = db.StringListProperty()
class BiteRun(db.Model):
"""Contains run related info."""
name = db.StringProperty(required=True)
tests_number = db.IntegerProperty(required=False)
passed_number = db.IntegerProperty(required=False)
failed_number = db.IntegerProperty(required=False)
queued_number = db.IntegerProperty(required=False)
suite = db.ReferenceProperty(bite_suite.BiteSuite, required=True)
visible = db.BooleanProperty(required=True, default=True)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
start_time = db.DateTimeProperty(required=False)
end_time = db.DateTimeProperty(required=False)
status = db.StringProperty(
required=False,
choices=('queued', 'running', 'completed'))
labels = db.StringListProperty(default=None)
tokens = db.StringProperty(required=False)
test_dimension_labels = db.StringListProperty(default=None)
start_url = db.StringProperty(required=False)
run_template = db.ReferenceProperty(BiteRunTemplate, required=False)
class BiteRunSlice(db.Model):
"""Contains a run slice info which is used to control results."""
run = db.ReferenceProperty(BiteRun, required=False)
passed_number = db.IntegerProperty(required=False)
failed_number = db.IntegerProperty(required=False)
queued_number = db.IntegerProperty(required=False)
tests_number = db.IntegerProperty(required=False)
def GetAllScheduledJobs(project_name=''):
"""Gets all the scheduled jobs."""
results = BiteScheduledJob.all()
if project_name:
results.filter('project =', project_name)
return results
def AddScheduledJob(run_template_key_str, interval):
"""Adds a scheduled job for a suite."""
run_template = BiteRunTemplate.get(db.Key(str(run_template_key_str)))
suite = run_template.suite
if not interval:
interval = ParseWatchdogSetting(run_template.watchdog_setting)
for obj in run_template.bitescheduledjob_set:
obj.delete()
# add scheduled job
if int(interval) > 0:
scheduled_job = BiteScheduledJob(
watchdog_last_time=datetime.datetime.now(),
target_url_versions=[],
run_template=run_template,
suite=suite.name,
project=suite.parent().name)
scheduled_job.put()
bite_event.AddEvent(scheduled_job, action='schedule', event_type='schedule',
name=run_template.name,
labels=run_template.filtered_labels,
project=suite.parent().name)
def UpdateScheduledJobs(jobs):
"""Updates the scheduled jobs."""
db.put(jobs)
def GetRunTemplatesWithSuite(suite):
"""Gets the run templates associated with a suite."""
return BiteRunTemplate.all().filter('suite =', suite)
def CheckRunTemplateExists(run_template_key_str):
"""Checks if the run template exists or not."""
try:
BiteRunTemplate.get(db.Key(run_template_key_str))
return True
except db.KindError, err:
logging.info('The given run template key is not found:' +
str(err))
return False
def AddRunTemplate(name, suite_key_str, description='', run_once=True,
watchdog_setting='', filtered_labels=None,
test_dimension_labels=None, tokens='',
start_url=''):
"""Adds a run template."""
suite = bite_suite.BiteSuite.get(db.Key(suite_key_str))
if not name:
raise MissingRunNameError()
def PutRun():
"""Saves the run entity."""
run = BiteRunTemplate(
name=name,
description=description,
suite=suite,
run_once=run_once,
watchdog_setting=watchdog_setting,
filtered_labels=filtered_labels or [],
test_dimension_labels=test_dimension_labels or [],
tokens=tokens,
start_url=start_url)
run.put()
return run
run = db.run_in_transaction(PutRun)
bite_event.AddEvent(run, action=bite_event.EventActions.CREATE,
event_type=bite_event.EventTypes.RUN_TEMPLATE,
name=run.name, labels=run.filtered_labels,
project=run.suite.parent().name)
return run
def UpdateRunTemplate(name, suite_key_str, run_template_key_str,
description='', run_once=True,
watchdog_setting='', filtered_labels=None,
test_dimension_labels=None, tokens='',
start_url=''):
"""Updates a run template."""
suite = bite_suite.BiteSuite.get(db.Key(suite_key_str))
if not name:
raise MissingRunNameError()
logging.info(tokens)
def Update():
"""Updates the bite run template instance."""
run = BiteRunTemplate.get(db.Key(run_template_key_str))
run.name = name
run.description = description
run.suite = suite
run.run_once = run_once
run.watchdog_setting = watchdog_setting
run.filtered_labels = filtered_labels or []
run.test_dimension_labels = test_dimension_labels or []
run.tokens = tokens
run.start_url = start_url
run.put()
return run
run = db.run_in_transaction(Update)
bite_event.AddEvent(run, action=bite_event.EventActions.MODIFY,
event_type=bite_event.EventTypes.RUN_TEMPLATE,
name=run.name, labels=run.filtered_labels,
project=run.suite.parent().name)
return run
def AddRun(name, suite_key, start_time, test_info_list,
tokens='', labels=None, dimensions=None, start_url='',
run_template_key_str=''):
"""Adds a run."""
suite = bite_suite.BiteSuite.get(suite_key)
if not name:
if not suite_key:
raise MissingRunNameError()
else:
name = suite.name
# Add a run entity.
run_key = db.Key.from_path(bite_suite.BiteSuite.kind(),
suite.key().name(),
'BiteRun',
name + '_' + str(start_time))
tests_len = len(test_info_list)
if not labels:
labels = suite.labels
run_template = None
if run_template_key_str:
run_template = db.Key(run_template_key_str)
run = BiteRun.get_or_insert(run_key.name(),
name=name,
tests_number=tests_len,
passed_number=0,
failed_number=0,
queued_number=tests_len,
suite=suite,
start_time=start_time,
end_time=None,
status='queued',
labels=labels,
tokens=tokens,
test_dimension_labels=dimensions,
start_url=start_url,
run_template=run_template)
bite_event.AddEvent(run, action='create', event_type='run',
name=run.name, labels=run.labels,
project=run.suite.parent().name)
return run
def DeleteRun(run_key_str):
run_key = db.Key(run_key_str)
db.delete(run_key)
#bite_event.AddEvent(run_key, action='delete', event_type='run')
def GetRunsOfSuite(suite_key_str, reverse_start_order=False, max_num=None,
status=None, past_days=None):
"""Gets runs of a suite."""
runs = BiteRun.all().filter('suite = ', db.Key(suite_key_str))
if status and status != 'all':
runs.filter('status =', status)
if reverse_start_order:
runs.order('-start_time')
if past_days:
start_time = datetime.datetime.now() - datetime.timedelta(days=past_days)
runs.filter('start_time >', start_time)
if max_num:
return runs.fetch(max_num)
else:
return runs
def GetRunsOfTemplate(template_key, reverse_start_order=False, max_num=None,
status=None, past_days=None):
"""Gets runs of a template."""
if isinstance(template_key, str):
template_key = db.Key(template_key)
runs = BiteRun.all().filter('run_template = ', template_key)
if status and status != 'all':
runs.filter('status =', status)
if reverse_start_order:
runs.order('-start_time')
if past_days:
start_time = datetime.datetime.now() - datetime.timedelta(days=past_days)
runs.filter('start_time >', start_time)
if max_num:
return runs.fetch(max_num)
else:
return runs
def AddRunSlice(run_key, index):
"""Adds a run slice."""
if not run_key:
raise MissingRunError()
run_slice_key_name = run_key.name() + '_' + str(index)
return BiteRunSlice.get_or_insert(run_slice_key_name,
run=run_key,
passed_number=0,
failed_number=0,
queued_number=0)
def GetSpecifiedRun(tokens):
"""Gets a specified run matching given conditions."""
runs = (BiteRun.all().filter('queued_number >', 0).
filter('tokens =', tokens))
run = runs.fetch(1)[0]
if run.status == 'queued':
run.status = 'running'
run.put()
return run
def GetLatestRunsThroughSuite(status):
"""Gets the latest runs info through suite info."""
suites = bite_suite.LoadAllSuitesOfProjects()
latest_runs = []
for suite in suites:
latest_runs.extend(GetRunsOfSuite(str(suite.key()), True, 1, status))
return latest_runs
def GetLatestRunsThroughTemplate(status, project_name):
"""Gets the latest runs through run template info."""
projects = None
logging.info('The project name in run is: ' + project_name)
if project_name:
projects = [project_name]
suites = bite_suite.LoadAllSuitesOfProjects(projects)
latest_runs = []
empty_templates = []
for suite in suites:
run_templates = GetRunTemplatesWithSuite(suite)
for run_template in run_templates:
runs = GetRunsOfTemplate(run_template, True, 1, status)
if not runs:
empty_templates.append(run_template)
latest_runs.extend(runs)
return latest_runs, empty_templates
def GetTestsNumberOfStatus(run_key_str):
"""Gets the tests number of a specified status."""
if not run_key_str:
raise MissingRunError()
run_key = db.Key(run_key_str)
run_slices = BiteRunSlice.all().filter('run =', run_key)
passed_number = 0
failed_number = 0
for slice_obj in run_slices:
passed_number += slice_obj.passed_number
failed_number += slice_obj.failed_number
return {'passed': passed_number, 'failed': failed_number}
def GetModel(run_key_str):
return BiteRun.get(db.Key(run_key_str))
def GetTemplateEntity(run_template_key_str):
return BiteRunTemplate.get(db.Key(run_template_key_str))
def GetEmptyTemplateData(templates):
"""Gets the data of empty templates."""
return [{
'id': '',
'templateId': str(template.key()),
'type': 'runTemplate',
'title': template.name,
'labels': template.filtered_labels.extend(
template.test_dimension_labels),
'icon': '/images/run00-pie.png',
'state': 'template',
'actions': [{'title': 'View details',
'operation': 'runDetails'},
{'title': 'Start a run',
'operation': 'startARunTemplate'}],
'props': []
} for template in templates]
def GetRunsData(runs):
"""Gets all the relevant runs info."""
runs_data = []
for run in runs:
state = 'running'
if run.end_time:
state = 'finished'
number_obj = GetTestsNumberOfStatus(str(run.key()))
passed_num = number_obj['passed']
failed_num = number_obj['failed']
total_num = passed_num + failed_num
complete_value = '%s (%d)' % (
basic_util.GetPercentStr(total_num, run.tests_number),
total_num)
passed_value = '%s (%d)' % (
basic_util.GetPercentStr(passed_num, total_num),
passed_num)
failed_value = '%s (%d)' % (
basic_util.GetPercentStr(failed_num, total_num),
failed_num)
labels = [run.suite.parent().name]
labels.extend(run.labels)
start_time_pst = basic_util.ConvertFromUtcToPst(run.start_time)
start_time = basic_util.CreateStartStr(start_time_pst)
template_id = ''
if run.run_template:
template_id = str(run.run_template.key())
run_data = {
'id': str(run.key()),
'templateId': template_id,
'type': 'run',
'title': run.name,
'labels': labels,
'icon': '/images/run00-pie.png',
'state': state,
'actions': [
{'title': 'View details',
'operation': 'runDetails'},
{'title': 'Delete',
'operation': 'deleteRun'}],
'props': [{'label': '# of tests', 'value': run.tests_number},
{'label': 'started', 'value': start_time},
{'label': 'complete', 'value': complete_value},
{'label': 'passed', 'value': passed_value},
{'label': 'failed', 'value': failed_value}]
}
runs_data.append(run_data)
return runs_data
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Model for bug comments."""
__author__ = 'alexis.torres@gmail.com (Alexis O. Torres)'
from google.appengine.ext import db
from models import bugs
class Comment(db.Model):
"""Models a bug comment stored in AppEngine's Datastore."""
message = db.TextProperty(required=True)
bug = db.ReferenceProperty(reference_class=bugs.Bug,
collection_name='bug_comments')
# Tracks when an entry is added and modified.
added = db.DateTimeProperty(required=False, auto_now_add=True)
def AddComment(bug_key, message):
"""Adds a new comment."""
bug = bugs.GetBugByKey(bug_key)
comment = Comment(message=message, bug=bug)
comment.put()
return comment
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite suite and test map model.
This model is used to store the association between a suite and tests.
"""
__author__ = 'phu@google.com (Po Hu)'
#Import not at top
#pylint: disable-msg=C6204
try:
from google.appengine.ext import db
from models import bite_suite
from utils import basic_util
except ImportError:
from google.appengine.ext import db
from models import bite_suite
from utils import basic_util
DEFAULT_TEST_ID_LIST_LENGTH = 200
DEFAULT_PUT_DELETE_MAX = 500
class Error(Exception):
pass
class MissingSuiteError(Error):
"""Misses the suite."""
class DuplicatedProjectNameError(Error):
"""Has the project name existing."""
class SuiteTestsMap(db.Model):
"""Contains suite and tests association."""
suite = db.ReferenceProperty(bite_suite.BiteSuite,
collection_name='suite_tests_map')
created_time = db.DateTimeProperty(required=False, auto_now_add=True)
test_info_list_str = db.TextProperty()
def GetAllTestInfoOfSuite(suite_key_str):
test_info_list = []
suite_key = db.Key(suite_key_str)
queries = SuiteTestsMap.all().filter('suite =', suite_key)
test_info_lists = [basic_util.ParseJsonStr(query.test_info_list_str)
for query in queries]
for test_info in test_info_lists:
test_info_list.extend(test_info)
return test_info_list
def AddTestsToSuite(suite_key_str, test_info_list):
"""Adds tests to a suite."""
if not suite_key_str:
raise MissingSuiteError('There is no suite defined.')
suite_key = db.Key(suite_key_str)
if not test_info_list:
return
map_list = []
# Deals with a potentially very large list.
while True:
if len(map_list) == DEFAULT_PUT_DELETE_MAX:
db.put(map_list)
map_list = []
if len(test_info_list) >= DEFAULT_TEST_ID_LIST_LENGTH:
map_list.append(SuiteTestsMap(
suite=suite_key,
test_info_list_str=basic_util.DumpJsonStr(
test_info_list[:DEFAULT_TEST_ID_LIST_LENGTH])))
del test_info_list[:DEFAULT_TEST_ID_LIST_LENGTH]
else:
if test_info_list:
map_list.append(SuiteTestsMap(
suite=suite_key,
test_info_list_str=basic_util.DumpJsonStr(test_info_list)))
break
if map_list:
db.put(map_list)
def DeleteTestsFromSuite(suite_key_str):
"""Deletes tests from a suite."""
if not suite_key_str:
raise MissingSuiteError('There is no suite defined.')
suite_key = db.Key(suite_key_str)
map_key_list = SuiteTestsMap.all(keys_only=True).filter(
'suite = ', suite_key)
if not map_key_list:
return
map_key_list = [key for key in map_key_list]
while True:
if len(map_key_list) > DEFAULT_PUT_DELETE_MAX:
db.delete(map_key_list[:DEFAULT_PUT_DELETE_MAX])
del map_key_list[:DEFAULT_PUT_DELETE_MAX]
else:
db.delete(map_key_list)
break
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite suite model.
Bite Suite model is one of the main meat of BITE models. Users could add
a list of tests to a suite, and then configure the suite from various of
aspects like watchdog job, reports, dimensions, retry logic, etc.
"""
__author__ = 'phu@google.com (Po Hu)'
import datetime
import logging
from google.appengine.ext import db
from models import bite_event
from models import bite_project
from utils import basic_util
DEFAULT_SUITE_TIMEOUT = 9999
DEFAULT_AUTO_DELETE_DEADLINE = 9999
class Error(Exception):
pass
class MissingSuiteNameError(Error):
"""Misses the suite name."""
class DuplicatedSuiteNameError(Error):
"""Has the suite name existing under project."""
class MissingProjectError(Error):
"""Misses the project name."""
class BiteSuite(db.Model):
"""Contains a group of tests as well as configurations."""
name = db.StringProperty(required=True)
visible = db.BooleanProperty(required=True, default=True)
description = db.TextProperty(required=False)
# project = db.ReferenceProperty(bite_project.BiteProject)
labels = db.StringListProperty(default=None)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
created_time = db.DateTimeProperty(required=False, auto_now_add=True)
configs = db.TextProperty(required=False) # Used to override test's config.
# Either interval or concrete time.
watchdog_setting = db.StringProperty(required=False)
latest_version_url = db.LinkProperty(required=False)
# Includes sender, recipient, pass rate, etc info.
report_setting = db.TextProperty(required=False)
retry_times = db.IntegerProperty(required=False)
default_timeout = db.IntegerProperty(required=False)
# In how long should the job be auto deleted if not executed.
auto_delete_deadline = db.IntegerProperty(required=False)
reminder_setting = db.TextProperty(required=False)
last_modified_time = db.DateTimeProperty(required=False, auto_now=True)
last_modified_by = db.UserProperty(required=False, auto_current_user=True)
tests_number = db.IntegerProperty(required=False)
test_source = db.StringProperty(required=False,
choices=('acc'))
test_src_dict = db.TextProperty(required=False)
def ParseWatchdogSetting(watchdog_setting):
"""Parses the watchdog settings and returns the interval in mins."""
watchdog_setting_obj = basic_util.ParseJsonStr(watchdog_setting)
if (isinstance(watchdog_setting_obj, dict) and
watchdog_setting_obj.has_key('every')):
return int(watchdog_setting_obj['every'])
else:
return 0
def LoadSuite(name, project_name, suite_key_str=''):
"""Loads a bite suite."""
if suite_key_str:
return BiteSuite.get(db.Key(suite_key_str))
if not name:
raise MissingSuiteNameError('There is no suite name defined.')
if not project_name:
raise MissingProjectError('No project name was given.')
suite_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name, 'BiteSuite', name)
return BiteSuite.get(suite_key)
def LoadAllSuitesOfProjects(project_names=None):
"""Loads all the suites of the given projects."""
if project_names is None:
projects = bite_project.GetAllProjects()
project_names = [project.name for project in projects]
suites = []
for project_name in project_names:
suites.extend(LoadAllSuitesOfProject(project_name))
return suites
def LoadAllSuitesOfProject(project_name):
"""Loads all of the suites of a project."""
project_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name)
return BiteSuite.all().ancestor(project_key)
def GetSuiteWatchdogStr(watchdog_setting, interval):
"""Creates a watchdog setting string to save."""
if not watchdog_setting:
watchdog_setting = {}
if interval:
watchdog_setting['every'] = interval
return basic_util.DumpJsonStr(watchdog_setting)
def GetSuiteConfigStr(configs, tokens, start_url=''):
"""Creates a suite config str."""
if not configs:
configs = {}
if isinstance(configs, str):
configs = basic_util.ParseJsonStr(configs)['configs']
configs['tokens'] = tokens
configs['start_url'] = start_url
return basic_util.DumpJsonStr({'configs': configs})
def GetSuiteReportStr(report, email_from, email_to, failure_thresh):
"""Creates a suite report str."""
if not report:
report = {}
if isinstance(report, str):
report = basic_util.ParseJsonStr(report)['report']
report['email_from'] = email_from
report['email_to'] = email_to
report['failure_thresh'] = failure_thresh
return basic_util.DumpJsonStr({'report': report})
def ParseReportStr(report_str):
"""Parses the report string.
The report string in Json format should be:
{'report': {'name1': 'value1',
...}}
Args:
report_str: A Json format report string.
Returns:
A dict of report info.
"""
if not report_str:
return {}
report_obj = basic_util.ParseJsonStr(report_str)
return report_obj['report']
def ParseConfigStr(configs_str):
"""Parses the configs string.
The configs string in Json format should be:
{'configs': {'name1': 'value1',
...}}
Args:
configs_str: A Json format configs string.
Returns:
A dict of configs.
"""
if not configs_str:
return {}
configs_obj = basic_util.ParseJsonStr(configs_str)
return configs_obj['configs']
def GetSuiteTokens(suite):
"""Gets the tokens of the given suite."""
try:
return GetSuiteAttribute(suite, 'configs', 'tokens')
except:
return ''
def GetStartUrl(suite):
"""Gets the start url of the given suite."""
return GetSuiteAttribute(suite, 'configs', 'start_url')
def GetSuiteAttribute(suite, prop, attr):
"""Gets the attribute of the given suite."""
if isinstance(suite, unicode) or isinstance(suite, str):
suite = BiteSuite.get(db.Key(suite))
prop_obj = {}
if prop == 'configs':
prop_obj = ParseConfigStr(str(suite.configs))
elif prop == 'watchdog_setting':
return ParseWatchdogSetting(suite.watchdog_setting)
elif prop == 'report_setting':
prop_obj = ParseReportStr(str(suite.report_setting))
result = ''
if prop_obj.has_key(attr):
result = prop_obj[attr]
return result
def CheckSuiteExists(suite_name, project_name):
"""Checks if the suite exists or not."""
suite_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name, 'BiteSuite', suite_name)
if BiteSuite.get(suite_key):
return True
else:
return False
def UpdateSuite(name, project_name, description='', labels=None, configs='',
watchdog_setting='', latest_version_url=None, report_setting='',
retry_times=0, default_timeout=DEFAULT_SUITE_TIMEOUT,
auto_delete_deadline=DEFAULT_AUTO_DELETE_DEADLINE,
reminder_setting='', tests_num=0,
test_source='', test_src_dict=''):
"""Updates the given suite."""
suite_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name, 'BiteSuite', name)
suite = BiteSuite.get(suite_key)
suite.name = name
suite.description = description
suite.labels = labels
suite.configs = configs
suite.watchdog_setting = watchdog_setting
suite.latest_version_url = latest_version_url
suite.report_setting = report_setting
suite.retry_times = retry_times
suite.default_timeout = default_timeout
suite.auto_delete_deadline = auto_delete_deadline
suite.reminder_setting = reminder_setting
suite.tests_number = tests_num
suite.test_source = test_source
suite.test_src_dict = test_src_dict
suite.put()
bite_event.AddEvent(suite, action='modify', event_type='set',
name=suite.name, labels=suite.labels,
project=suite.parent().name)
return suite
def AddSuite(name, project_name, description='', labels=None, configs='',
watchdog_setting='', latest_version_url=None, report_setting='',
retry_times=0, default_timeout=DEFAULT_SUITE_TIMEOUT,
auto_delete_deadline=DEFAULT_AUTO_DELETE_DEADLINE,
reminder_setting='', tests_num=0,
test_source='', test_src_dict=''):
"""Adds a bite suite."""
if not name:
raise MissingSuiteNameError('There is no suite name defined.')
if not project_name:
raise MissingProjectError('No project name was given.')
project_key = db.Key.from_path(bite_project.BiteProject.kind(),
project_name)
if CheckSuiteExists(name, project_name):
raise DuplicatedSuiteNameError('Duplicated suite name.')
# Assume name is ascii.
suite = BiteSuite.get_or_insert(str(name),
parent=project_key,
name=name,
description=description,
labels=labels or [],
configs=configs,
watchdog_setting=watchdog_setting,
latest_version_url=latest_version_url,
report_setting=report_setting,
retry_times=retry_times,
default_timeout=default_timeout,
auto_delete_deadline=auto_delete_deadline,
reminder_setting=reminder_setting,
tests_number=tests_num,
test_source=test_source,
test_src_dict=test_src_dict)
bite_event.AddEvent(suite, action='create', event_type='set',
name=suite.name, labels=suite.labels,
project=suite.parent().name)
return suite
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Counter sharding class."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import random
from google.appengine.ext import db
class Error(Exception):
pass
class MissingShardNameError(Error):
pass
class CounterShard(db.Model):
"""Shards for the counter."""
count = db.IntegerProperty(required=True, default=0)
_NUM_SHARDS = 9
def _GetShardKeyName(name, index):
"""Gets a key name for a partition on a given shard.
Args:
name: Str name of the shard.
index: Int partitin number.
Returns:
Str key name for the given partition.
"""
return 'CounterShard_%s_%d' % (name, index)
def _GetShardKey(name, index):
"""Returns the unique db.Key for the specified shard partition.
Args:
name: Str name of the shard.
index: Int partition number.
Returns:
A db.Key object.
"""
key_name = _GetShardKeyName(name, index)
return db.Key.from_path('CounterShard', key_name)
def GetCount(name):
"""Retrieves the total count value for a given sharded counter.
The counter value is calculated each time this method is called.
This value is eventually consistent.
Args:
name: Str name of the shard.
Returns:
The total downloads count as an int.
Raises:
MissingShardNameError: Raised if name is not specified.
"""
if not name:
raise MissingShardNameError()
total = 0
counters = db.get(
[_GetShardKey(name, index) for index in range(_NUM_SHARDS)])
for counter in counters:
if counter:
total += counter.count
return total
def Increment(name):
"""Increment the value for a given sharded counter.
Args:
name: Str name of the shard.
Raises:
MissingShardNameError: Raised if name is not specified.
"""
if not name:
raise MissingShardNameError()
def txn():
index = random.randint(0, _NUM_SHARDS - 1)
counter = db.get(_GetShardKey(name, index))
if not counter:
counter = CounterShard(key_name=_GetShardKeyName(name, index))
counter.count += 1
counter.put()
db.run_in_transaction(txn)
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite Event model is used to log activities performed."""
__author__ = 'phu@google.com (Po Hu)'
# Import not at top
#pylint: disable-msg=C6204
from google.appengine.ext import db
DEFAULT_MAX_EVENTS = 10000
class Error(Exception):
pass
class EventActions(object):
"""The actions that performed for an entity object."""
CREATE = 'create'
MODIFY = 'modify'
PASS = 'pass'
FAIL = 'fail'
START = 'start'
SCHEDULE = 'schedule'
DELETE = 'delete'
COMPLETE = 'complete'
class EventTypes(object):
"""The entity object types."""
PROJECT = 'project'
SUITE = 'suite'
RUN = 'run'
RUN_TEMPLATE = 'run_template'
SCHEDULE = 'schedule'
SET = 'set'
class BiteEvent(db.Model):
"""Contains event related info."""
host = db.ReferenceProperty(required=False)
name = db.StringProperty(required=False)
project = db.StringProperty(required=False)
labels = db.StringListProperty(default=None)
action = db.StringProperty(
required=False,
choices=(EventActions.CREATE,
EventActions.MODIFY,
EventActions.PASS,
EventActions.FAIL,
EventActions.START,
EventActions.SCHEDULE,
EventActions.DELETE,
EventActions.COMPLETE))
log = db.StringProperty(required=False)
event_type = db.StringProperty(
choices=(EventTypes.PROJECT,
EventTypes.SUITE,
EventTypes.RUN,
EventTypes.RUN_TEMPLATE,
EventTypes.SCHEDULE,
EventTypes.SET),
required=False)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
created_time = db.DateTimeProperty(required=False, auto_now_add=True)
def AddEvent(host=None, action='', log='', event_type='',
name='', labels=None, project=''):
"""Adds an event."""
# Assume name is ascii.
labels = labels or []
def Add():
event = BiteEvent(host=host, action=action, log=log,
event_type=event_type,
name=name, labels=labels,
project=project)
event.put()
return db.run_in_transaction(Add)
def GetAllEvents(limit=DEFAULT_MAX_EVENTS, project_name=''):
"""Gets all the events."""
results = BiteEvent.all().order('-created_time')
if project_name:
results.filter('project =', project_name)
return results.fetch(limit)
def GetEventsData(get_event_func, project_name):
"""Gets events data."""
events_data = []
events = GetAllEvents(10, project_name)
for event in events:
temp_data = get_event_func(event)
if temp_data:
events_data.append(temp_data)
return events_data
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BITE Project model.
Bite Project model is used to differ projects, which is created by
a lead.
"""
__author__ = ('phu@google.com (Po Hu)'
'jasonstredwick@google.com (Jason Stredwick)')
import json
from google.appengine.ext import db
from models import model_helper
class Error(Exception):
pass
class MissingProjectNameError(Error):
"""Missing the project name."""
class DuplicatedProjectNameError(Error):
"""Has the project name existing."""
class NoProjectFoundError(Error):
"""A project with the given name was not found."""
class BiteProject(db.Model):
"""Contains project related info."""
# General information
name = db.StringProperty(required=False, default='')
description = db.TextProperty(required=False, default='')
emails = db.StringListProperty(default=[])
# Creation Information
visible = db.BooleanProperty(required=False, default=True)
created_by = db.UserProperty(required=False, auto_current_user_add=True)
created_time = db.DateTimeProperty(required=False, auto_now_add=True)
last_modified_time = db.DateTimeProperty(required=False, auto_now=True)
last_modified_by = db.UserProperty(required=False, auto_current_user=True)
# External affiliations
provider = db.StringProperty(required=False)
provider_info = db.StringProperty(required=False, default='')
# Run settings
worker_mode_token = db.StringProperty(required=False, default='')
start_url_replacement = db.StringProperty(required=False, default='')
line_timeout_limit = db.IntegerProperty(required=False, default=15)
max_runs_per_test = db.IntegerProperty(required=False, default=5)
test_case_line_length = db.IntegerProperty(required=False, default=80)
save_screen_shot = db.BooleanProperty(required=False, default=True)
def _LookupProject(name):
"""Retrieve a project database entry."""
if not name:
raise MissingProjectNameError('There is no project name defined.')
project = BiteProject.get_by_key_name(name)
if not project:
raise NoProjectFoundError(''.join(['Project (', name, ') not found.']))
return project
def AddProject(name, data):
"""Adds a project."""
if not name:
raise MissingProjectNameError('There is no project name defined.')
if BiteProject.get_by_key_name(name):
raise DuplicatedProjectNameError('Duplicated project name error.')
# Assume name is ascii.
project = BiteProject.get_or_insert(key_name=str(name), name=name)
if data:
data_obj = json.loads(data)
model_helper.Update(project, data_obj, exclude=['name'])
project.put()
return project
def GetAllProjects():
"""Returns a list of all the BiteProject entities."""
return BiteProject.all()
def GetDefaultProject():
"""Return a project in the default state."""
return BiteProject()
def GetProject(name):
"""Get a project."""
return _LookupProject(name)
def ListProjects():
"""List all projects."""
query = BiteProject.all(keys_only=True)
keys = [result.name() for result in query]
return json.dumps(keys)
def DeleteProject(name):
"""Delete a project."""
_LookupProject(name).delete()
def UpdateProject(name, data):
"""Update a project."""
project = _LookupProject(name)
if data:
data_obj = json.loads(data)
model_helper.Update(project, data_obj, exclude=['name'])
project.put()
return project
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The store db class and the related functions."""
__author__ = 'phu@google.com (Po Hu)'
import json
import logging
from google.appengine.ext import db
class CodeData(db.Model):
"""Stores the code."""
text = db.TextProperty(required=True)
class DependencyMetaData(db.Model):
"""Stores the dependency metadata."""
name = db.StringProperty(required=True)
code = db.ReferenceProperty(reference_class=CodeData)
class MethodMetaData(db.Model):
"""Stores the js method metadata."""
name = db.StringProperty(required=True)
description = db.TextProperty(required=True)
code = db.ReferenceProperty(reference_class=CodeData)
dependency = db.ReferenceProperty(reference_class=DependencyMetaData)
primary_label = db.StringProperty(required=True)
addl_labels = db.StringListProperty(default=[])
author = db.StringProperty(required=False)
added = db.DateTimeProperty(required=False, auto_now_add=True)
modified = db.DateTimeProperty(required=False, auto_now=True)
def GetDepsByNames(names):
"""Gets the deps string by names."""
result = ''
temp_code = ''
for name in names:
method = GetMethodByName(name)
if method:
temp_code = method.code.text
if method.dependency:
temp_code = method.dependency.code.text + temp_code
result += temp_code
return result
def UpdateDependency(deps_name, deps_code):
"""Updates the dependency."""
deps_instance = GetDepsByName(deps_name)
if deps_instance:
code_instance = deps_instance.code
code_instance.text = deps_code
code_instance.put()
else:
code_instance = CodeData(text=deps_code)
code_instance.put()
deps_instance = DependencyMetaData(name=deps_name,
code=code_instance)
deps_instance.put()
return deps_instance
def InsertMethod(method_code, method_name, description,
primary_label, addl_labels, deps_reference,
author):
"""Inserts the method."""
code_instance = CodeData(text=method_code)
code_instance.put()
method = MethodMetaData(name=method_name,
description=description,
code=code_instance,
dependency=deps_reference,
primary_label=primary_label,
addl_labels=addl_labels,
author=author)
method.put()
return method
def UpdateMethod(method_code, method_name, description,
primary_label, addl_labels):
"""Updates the method."""
method_instance = GetMethodByName(method_name)
code_instance = method_instance.code
code_instance.text = method_code
code_instance.put()
method_instance.description = description
method_instance.primary_label = primary_label
method_instance.addl_labels = addl_labels
method_instance.put()
def GetDepsByName(deps_name):
"""Gets the entity of the given dependency name."""
q = DependencyMetaData.all()
q.filter('name = ', deps_name)
return q.get()
def GetMethodByName(method_name):
"""Gets the entity of the given method name."""
q = MethodMetaData.all()
q.filter('name = ', method_name)
return q.get()
def GetMethodsByPrimaryLabel(label):
"""Gets the methods by primary label."""
q = MethodMetaData.all()
if label:
q.filter('primary_label = ', label)
# We'll have better way to handle the number of methods shown in page.
methods = q.fetch(9999)
results = []
for method in methods:
results.append(GetMethodDetailsByInstance(method))
return results
def GetMethodDetails(method_name):
"""Gets the method details object."""
method = GetMethodByName(method_name)
return GetMethodDetailsByInstance(method)
def DeleteMethod(key):
"""Deletes the given method."""
method = MethodMetaData.get(db.Key(key))
if method:
db.delete(method.code)
db.delete(method)
def GetMethodDetailsByInstance(method):
"""Gets the method details object from the given method instance."""
result = {}
if method:
deps_name = ''
deps_code = ''
if method.dependency:
deps_name = method.dependency.name
deps_code = method.dependency.code.text
result['methodName'] = method.name
result['methodCode'] = method.code.text
result['description'] = method.description
result['primaryLabel'] = method.primary_label
result['addlLabels'] = method.addl_labels
result['author'] = method.author
result['depsName'] = deps_name
result['depsCode'] = deps_code
result['key'] = str(method.key())
return result
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Preserves the state of a crawl in the datastore.
CrawlState: Models the state of a crawl in AppEngine's datastore.
GetLastCrawlResults(): Retrieves the last crawl state saved for a given
project.
StoreCrawlState(): Stores the specified state into the datastore.
"""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import logging
from google.appengine.ext import db
class CrawlState(db.Model):
"""Represents the state of a crawl.
A crawl is said to go from start_index to end_index, those indexes map to
bug ID's from the original bug db.
Attributes:
provider: Str name of the bugs provider.
project_name: Str name of the project crawled.
start_index: ID of the bug where the crawl started as an int.
end_index: ID of the last bug crawled (inclusive) as an int.
total_imported: Number of bugs indexed and stored into the
datastore as an int.
last_modified: Optional Date the crawl state was saved. When not provided,
the field is automatically set by the datastore to Now.
"""
# Indices:
provider = db.StringProperty(required=True)
project_name = db.StringProperty(required=True)
last_modified = db.DateTimeProperty(auto_now=True)
# Non-indexed information about a crawl state:
start_index = db.IntegerProperty(required=True)
end_index = db.IntegerProperty(required=True)
total_imported = db.IntegerProperty(required=True)
def GetLastCrawlResults(provider, project_name, model=CrawlState):
"""Gets the last crawled state for the specified project.
Args:
provider: Str name of the bugs provider.
project_name: Str name of the project.
model: Used as a test hook for mocking the CrawlState object.
Returns:
The last CrawlState stored in the datastore for the specified project,
or None if no results have been stored for the specifed project.
"""
logging.debug('Getting last crawl results for %s, project: %s .',
provider, project_name)
# Fetch the status of the last crawl (if any).
last_crawl_query = model.all().order('-last_modified')
last_crawl_query.filter('provider = ', provider)
last_crawl_query.filter('project_name = ', project_name)
last_crawl = last_crawl_query.get()
if last_crawl:
logging.debug('Last crawl results found. start_index: %d, end_index: %d, '
'last_modified: %s, total_imported: %d .',
last_crawl.start_index, last_crawl.end_index,
last_crawl.last_modified, last_crawl.total_imported)
else:
logging.debug('Last crawl results NOT found.')
return last_crawl
def StoreCrawlResults(provider, project_name, start_index, end_index,
total_imported=0, model=CrawlState, _db=db):
"""Creates, puts and returns a CrawlState object with the given params.
Args:
provider: Str name of the bugs provider.
project_name: Str name of the project crawled.
start_index: ID of the bug where the crawl started as an int.
end_index: ID of the last bug crawled as an int.
total_imported: Number of bugs indexed and stored into the
datastore as an int.
model: Used as a test hook for mocking the CrawlState object.
_db: Used as a test hook for mocking the db module.
Returns:
The CrawlState object with the stored information.
"""
def txn():
logging.debug('Storing last crawl state: start_index: %d, end_index: %d, '
'total_imported: %d, provider: %s provider, '
'project_name: %s.',
start_index, end_index,
total_imported, provider,
project_name)
last_crawl = model(provider=provider,
project_name=project_name,
start_index=int(start_index),
end_index=int(end_index),
total_imported=int(total_imported))
last_crawl.put()
return last_crawl
return _db.run_in_transaction(txn)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""BITE model help.
The model helper is a series of functions that work on model objects and
perform convenience functions.
"""
__author__ = 'jasonstredwick@google.com (Jason Stredwick)'
import json
def Compare(obj, data):
"""Compare the given dictionary to see if it is the same as the model."""
if not data:
return False
for key in data:
if not CompareProperty(obj, key, data[key]):
return False
return True
def CompareProperty(obj, key, value):
"""Compare the property value for the given key with the given value."""
if not hasattr(obj, key) or str(getattr(obj, key)) != str(value):
return False
return True
def ToDict(obj):
"""Convert model into a json serializable dict."""
# The properties function is part of db.Model.
return dict([(p, str(getattr(obj, p))) for p in obj.properties()])
def ToJson(obj):
"""Convert the dictionary version of the model to a json string."""
return json.dumps(ToDict(obj))
def Update(obj, data, exclude=None):
"""Given a dictionary, update appropriate properties.
Args:
obj: A db.Model object that is to be update.
data: A dictionary of data used to update the model's properties. Only
those keys that exist that are in both model and data will be used
to update the model.
exclude: A set of strings corresponding to model keys that should not
be updated within the model even if data values are present.
"""
if not data:
return
exclude = exclude or []
for key in data:
if key not in exclude and hasattr(obj, key):
setattr(obj, key, data[key])
| Python |
#!/usr/bin/python
#
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Appengine configuration options."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
from google.appengine.ext.appstats import recording
def webapp_add_wsgi_middleware(app):
"""Adds support for Appstats, the appengine RPC instrumentation service."""
app = recording.appstats_wsgi_middleware(app)
return app
| Python |
# Copyright 2011 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Handles conversion of data into a string representing a zip file."""
__author__ = 'jasonstredwick@google.com (Jason Stredwick)'
import StringIO
import zipfile
import json
class BadInput(Exception):
"""Thrown for bad function input."""
def JsonStringToZip(data):
"""A utility that takes a json string and converts it into a zip file string.
Parses the given json string into an object and sends it to the conversion
function to generate a string form of a zip file.
Args:
data: A string hold a json representation of the data to be zipped.
Returns:
A tuple of (zip file title, zip file embedded in a string).
Raises:
BadInput: Raised by ObjToZip.
"""
try:
obj = json.loads(data)
except (ValueError, OverflowError, TypeError):
raise BadInput('Invalid data received.')
return ObjToZip(obj)
def ObjToZip(data):
"""Convert an object of file related information into a zip file.
Args:
data: An object containing the information related to the files to be
zipped. The format is {'title': string, 'files': {filename: contents} }
where filename and contents are strings. Title is required, but files
is optional. Files can also be an empty object.
Returns:
A tuple of (zip file title, zip file embedded in a string).
Raises:
BadInput: Raised if data is not a valid object or has an incorrect
structure.
"""
# Process archive title; i.e. archive file name.
files = None
title = 'untitled.zip'
try:
# Throws exception if data is not an object.
if 'title' in data:
title = data['title'] or title
# This will raise and exception if title is not a string.
if not title.endswith('.zip'):
title += '.zip'
# Throws exception if data is not an object.
if 'files' in data:
# Throws exception if data['files'] is not iteratable.
files = data['files'].iteritems()
except Exception:
raise BadInput('Invalid data received.')
# Create zip file.
output = StringIO.StringIO()
zip_file = zipfile.ZipFile(output, 'w')
if files:
# Add each file in data['files'] to the zip file where filename (n) is
# mapped to the file content string (s) within the archive.
# Note: n and c must be converted or UnicodeError will be raised.
[zip_file.writestr(n.encode('utf-8'), c.encode('utf-8')) for n, c in files]
zip_file.close()
return (title, output.getvalue())
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Bite basic util lib.
Bite basic util lib contains a bunch of common useful functions.
"""
__author__ = 'phu@google.com (Po Hu)'
import datetime
import json
import logging
import urllib2
class Error(Exception):
pass
class ParsingJsonError(Error):
"""Exception encountered while parsing a Json string."""
class DumpingJsonError(Error):
"""Exception encountered while dumping a Json object."""
def ParseJsonStr(json_str):
"""Parses a Json string."""
if not json_str:
return ''
try:
return json.loads(json_str)
except ValueError:
logging.error('The json string is: ' + json_str)
raise ParsingJsonError()
def DumpJsonStr(json_obj):
"""Dumps a Json object."""
if not json_obj:
return ''
try:
return json.dumps(json_obj)
except ValueError:
raise DumpingJsonError()
def GetPercentStr(first, second, digits=0):
"""Gets a percent string."""
if second:
return str(round(float(first) / second * 100, digits)) + '%'
else:
return '0%'
def ConvertFromUtcToPst(date_time):
"""Converts a datetime from UTC to PST format."""
return date_time.replace(tzinfo=UTC()).astimezone(PacificTzinfo())
def CreateStartStr(date_time):
return '%d/%d/%d %d:%d PST' % (date_time.month, date_time.day,
date_time.year,
date_time.hour, date_time.minute)
class UTC(datetime.tzinfo):
"""UTC."""
def utcoffset(self, dt):
return datetime.timedelta(0)
def tzname(self, dt):
return 'UTC'
def dst(self, dt):
return datetime.timedelta(0)
class PacificTzinfo(datetime.tzinfo):
"""Implementation of the Pacific timezone."""
def utcoffset(self, dt):
return datetime.timedelta(hours=-8) + self.dst(dt)
def _FirstSunday(self, dt):
"""First Sunday on or after dt."""
return dt + datetime.timedelta(days=(6-dt.weekday()))
def dst(self, dt):
# 2 am on the second Sunday in March
dst_start = self._FirstSunday(datetime.datetime(dt.year, 3, 8, 2))
# 1 am on the first Sunday in November
dst_end = self._FirstSunday(datetime.datetime(dt.year, 11, 1, 1))
if dst_start <= dt.replace(tzinfo=None) < dst_end:
return datetime.timedelta(hours=1)
else:
return datetime.timedelta(hours=0)
def tzname(self, dt):
if self.dst(dt) == datetime.timedelta(hours=0):
return 'PST'
else:
return 'PDT'
def SetMainNav(main_nav, name):
"""Sets the main nav."""
for nav in main_nav['scopes']:
if nav['name'] == name:
nav['selected'] = True
break
main_nav['name'] = name
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Screenstho utilities."""
__author__ = 'alexto@google.com (Alexis O. Torres)'
import base64
from utils import url_util
# Path to the get screenshots API.
GET_PATH = '/screenshots/fetch'
def RetrievalUrl(request_url, screenshot_id):
"""Returns URL to fetch a screenshot by id."""
base_url = url_util.GetBaseUrl(request_url)
return base_url + GET_PATH + '?id=' + str(screenshot_id)
def DecodeBase64PNG(data):
prefix = 'data:image/png;base64,'
content = data[len(prefix):]
return base64.b64decode(content)
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Utilities which operate on URL strings."""
__author__ = ('alexto@google.com (Alexis O. Torres)',
'jcarollo@google.com (Jeff Carollo)')
import logging
import re
import sha
from third_party import urlnorm
from urlparse import urlparse
from urlparse import urlunparse
from utils import encoding_util
class InvalidUrlError(Exception):
pass
# Compiled regular expression used by GetBaseUrl.
_GET_BASE_URL_RE = re.compile('(^http[s:]*//[a-zA-Z0-9.-]*[:]?[0-9]*)(.*)')
def GetBaseUrl(url):
"""Turns http://www.foo.com/bar/baz -> http://www.foo.com.
Needed for making requests to ourselves.
Args:
url: URL to extract base from.
Example:
url = self.request.uri
base_url = GetBaseUrl(url)
forarding_url = base_url + '/tasks/foo'
Returns:
The base str for the given URL.
Raises:
InvalidUrlError: Raised when the passed URL is not a valid one.
"""
result = _GET_BASE_URL_RE.match(url)
try:
return result.group(1)
except AttributeError:
raise InvalidUrlError(url)
class NormalizedUrlResult(object):
"""Normalized URL result object.
All attribute values are converted to lowercase as part of the
normalization process.
Attributes:
url: The fully qualified normalized URL.
hostname: The hostname part of the URL.
path: The path part of the URL.
"""
def __init__(self, url='', hostname='', path=''):
"""Initializes NormalizeUrlResult object."""
self.url = encoding_util.EncodeToAscii(url).lower()
self.hostname = encoding_util.EncodeToAscii(hostname).lower()
self.path = encoding_util.EncodeToAscii(path).lower()
def _GetNormalizationTuple(url):
"""Parse a URL into a components tuple.
Parse a URL into 6 components:
<scheme>://<netloc>/<path>;<params>?<query>#<fragment>
Args:
url:A URL string.
Returns:
A 6-tuple: (scheme, netloc, path, params, query, fragment).
"""
url = encoding_util.EncodeToAscii(url)
up = urlparse(url, 'http')
authority = up[1]
path = up[2]
if not authority:
end_index = path.find('/')
if end_index == -1:
end_index = len(path)
authority = path[:end_index]
path = path[end_index:]
path = path.rstrip('/') # Ignore trailing slashes on the path.
return (up[0], authority, path, up[3], up[4], up[5])
def NormalizeUrl(url):
"""Normalizes the given URL.
Normalizes a URL, and adds the http schema to a URL without one,
converting a URL of the form:
'www.foo.com' to 'http://www.foo.com'.
Additionally, it prepends 'www.' to the hostname, when only the
url is in the form of 'foo.com', resulting in 'www.foo.com'.
Args:
url: A URL string.
Returns:
A NormalizeUrlResult object containing the normalized URL.
"""
logging.debug('Normalizing %s', url)
if not url or not url.strip():
return None
try:
norm_result = urlnorm.norm(_GetNormalizationTuple(url))
except AttributeError, err:
if url.count(':') > 1:
try:
logging.warning('Failed to normalize, try to without schema. Error: %s',
str(err))
url_modified = url[url.find(':') + 1:]
logging.debug('Original: %s, modified: %s', url, url_modified)
norm_result = urlnorm.norm(_GetNormalizationTuple(url_modified))
except AttributeError, err2:
logging.error('urlnorm.norm raise an error for the second time: %s',
str(err2))
return None
else:
logging.error('urlnorm.norm raise an error: %s', str(err))
return None
hostname = norm_result[1]
if hostname.count('.') == 1:
# Seems like we have a host name in the form of 'foo.com',
# prefix with 'www' to produce 'www.foo.com'.
logging.debug('Prefixing www. in front of hostname: %s', hostname)
hostname = 'www.%s' %hostname
path = norm_result[2]
normalized_url = urlunparse(
(norm_result[0], hostname, norm_result[2],
norm_result[3], norm_result[4], norm_result[5]))
logging.debug('Normalized URL from %s to %s.', url, normalized_url)
return NormalizedUrlResult(
url=normalized_url, hostname=hostname, path=path)
def HashUrl(url):
"""Hashes the given URL.
Args:
url: URL str to hash.
Returns:
A string containing only hexadecimal digits
"""
return sha.sha(url).hexdigest()
# Regular expression pattern used used to find URLs in a string of text.
_REGEX_PATTERNS_URLS_MATCH = [
re.compile('\s+https?://[\w\d\.\:%@#&=/\-\?]{2,}[\w\d]'),
re.compile('(@?[\w\d][\w\d\.:/\-]{2,}\.com([\w\d\.\:@%#&=/\-\?]*[\w\d])?)'),
re.compile('(@?[\w\d][\w\d\.:/\-]{2,}\.org([\w\d\.\:@%#&=/\-\?]*[\w\d])?)')]
# Regular expression pattern used to identify URLs we want to ignore.
# Most of the time, these are considered noise, for example a link to the
# fix should not be considered a repro URL.
_REGEX_URL_IGNORE = re.compile(
r'(chromium.org|http://crash/|^@)')
def IsIgnorableUrl(url):
"""Identifies ignorable URLs.
Args:
url: Str with the URL categorize as ignorable or not.
Returns:
Wheter the given URL is an ignorable URL.
"""
return bool(_REGEX_URL_IGNORE.search(url))
def ExtractUrls(text):
"""Extracts a urls from the given text.
Args:
text: Text as a str.
Returns:
A list with of the unique URLs found on the text, if any.
"""
urls = []
for pattern in _REGEX_PATTERNS_URLS_MATCH:
matches = pattern.findall(text)
for url in matches:
if isinstance(url, tuple):
url = url[0] # Use the full match.
url = url.strip().replace('\n', '').replace('\r', '')
if not IsIgnorableUrl(url):
urls.append(url)
return list(set(urls)) # Return only unique URLs.
| Python |
# Copyright 2010 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Class to define how to export datasore entities to a file.
BugExporter tells appcfg.py how to export UrlBugMap entries to a file.
Use:
appcfg.py download_data --config_file=bug_map_exporter.py
--filename=urlbugmap.csv --kind=UrlBugMap <app-directory>
"""
# pylint: disable-msg=C6204
try:
# COV_NF_START
from google.appengine.tools import bulkloader
# COV_NF_END
except ImportError:
from google.appengine.tools import bulkloader
class UrlBugMapExporter(bulkloader.Exporter):
def __init__(self):
bulkloader.Exporter.__init__(self, 'UrlBugMap',
[('url', str, None),
('hostname', str, None),
('path', str, None),
('status', str, None),
('bug', str, None),
('last_update', str, None)
])
exporters = [UrlBugMapExporter]
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.