code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A mix-in handler for bulk loading data into an application.
For complete documentation, see the Tools and Libraries section of the
documentation.
To use this in your app, first write a script, e.g. bulkload.py, that
instantiates a Loader for each entity kind you want to import and call
bulkload.main(instance). For example:
person = bulkload.Loader(
'Person',
[('name', str),
('email', datastore_types.Email),
('cool', bool), # ('0', 'False', 'No', '')=False, otherwise bool(value)
('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
])
if __name__ == '__main__':
bulkload.main(person)
See the Loader class for more information. Then, add a handler for it in your
app.yaml, e.g.:
handlers:
- url: /load
script: bulkload.py
login: admin
Finally, deploy your app and run bulkloader.py. For example, to load the
file people.csv into a dev_appserver running on your local machine:
./bulkloader.py --filename people.csv --kind Person --cookie ... \
--url http://localhost:8080/load
The kind parameter is used to look up the Loader instance that will be used.
The bulkload handler should usually be admin_only, so that non-admins can't use
the shell to modify your app's data. The bulkload client uses the cookie
parameter to piggyback its HTTP requests on your login session. A GET request
to the URL specified for your bulkload script will give you a cookie parameter
you can use (/load in the example above). If your bulkload handler is not
admin_only, you may omit the cookie parameter.
If you want to do extra processing before the entities are stored, you can
subclass Loader and override HandleEntity. HandleEntity is called once with
each entity that is imported from the CSV data. You can return one or more
entities from HandleEntity to be stored in its place, or None if nothing
should be stored.
For example, this loads calendar events and stores them as
datastore_entities.Event entities. It also populates their author field with a
reference to the corresponding datastore_entites.Contact entity. If no Contact
entity exists yet for the given author, it creates one and stores it first.
class EventLoader(bulkload.Loader):
def __init__(self):
EventLoader.__init__(self, 'Event',
[('title', str),
('creator', str),
('where', str),
('startTime', lambda x:
datetime.datetime.fromtimestamp(float(x))),
])
def HandleEntity(self, entity):
event = datastore_entities.Event(entity.title)
event.update(entity)
creator = event['creator']
if creator:
contact = datastore.Query('Contact', {'title': creator}).Get(1)
if not contact:
contact = [datastore_entities.Contact(creator)]
datastore.Put(contact[0])
event['author'] = contact[0].key()
return event
if __name__ == '__main__':
bulkload.main(EventLoader())
"""
import Cookie
import StringIO
import csv
import httplib
import os
import traceback
import google
import wsgiref.handlers
from google.appengine.api import datastore
from google.appengine.ext import webapp
from google.appengine.ext.bulkload import constants
def Validate(value, type):
""" Checks that value is non-empty and of the right type.
Raises ValueError if value is None or empty, TypeError if it's not the given
type.
Args:
value: any value
type: a type or tuple of types
"""
if not value:
raise ValueError('Value should not be empty; received %s.' % value)
elif not isinstance(value, type):
raise TypeError('Expected a %s, but received %s (a %s).' %
(type, value, value.__class__))
class Loader(object):
"""A base class for creating datastore entities from input data.
To add a handler for bulk loading a new entity kind into your datastore,
write a subclass of this class that calls Loader.__init__ from your
class's __init__.
If you need to run extra code to convert entities from the input
data, create new properties, or otherwise modify the entities before
they're inserted, override HandleEntity.
See the CreateEntity method for the creation of entities from the
(parsed) input data.
"""
__loaders = {}
__kind = None
__properties = None
def __init__(self, kind, properties):
""" Constructor.
Populates this Loader's kind and properties map. Also registers it with
the bulk loader, so that all you need to do is instantiate your Loader,
and the bulkload handler will automatically use it.
Args:
kind: a string containing the entity kind that this loader handles
properties: list of (name, converter) tuples.
This is used to automatically convert the CSV columns into properties.
The converter should be a function that takes one argument, a string
value from the CSV file, and returns a correctly typed property value
that should be inserted. The tuples in this list should match the
columns in your CSV file, in order.
For example:
[('name', str),
('id_number', int),
('email', datastore_types.Email),
('user', users.User),
('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
('description', datastore_types.Text),
]
"""
Validate(kind, basestring)
self.__kind = kind
Validate(properties, list)
for name, fn in properties:
Validate(name, basestring)
assert callable(fn), (
'Conversion function %s for property %s is not callable.' % (fn, name))
self.__properties = properties
Loader.__loaders[kind] = self
def kind(self):
""" Return the entity kind that this Loader handes.
"""
return self.__kind
def CreateEntity(self, values, key_name=None):
""" Creates an entity from a list of property values.
Args:
values: list/tuple of str
key_name: if provided, the name for the (single) resulting Entity
Returns:
list of datastore.Entity
The returned entities are populated with the property values from the
argument, converted to native types using the properties map given in
the constructor, and passed through HandleEntity. They're ready to be
inserted.
Raises:
AssertionError if the number of values doesn't match the number
of properties in the properties map.
"""
Validate(values, (list, tuple))
assert len(values) == len(self.__properties), (
'Expected %d CSV columns, found %d.' %
(len(self.__properties), len(values)))
entity = datastore.Entity(self.__kind, name=key_name)
for (name, converter), val in zip(self.__properties, values):
if converter is bool and val.lower() in ('0', 'false', 'no'):
val = False
entity[name] = converter(val)
entities = self.HandleEntity(entity)
if entities is not None:
if not isinstance(entities, (list, tuple)):
entities = [entities]
for entity in entities:
if not isinstance(entity, datastore.Entity):
raise TypeError('Expected a datastore.Entity, received %s (a %s).' %
(entity, entity.__class__))
return entities
def HandleEntity(self, entity):
""" Subclasses can override this to add custom entity conversion code.
This is called for each entity, after its properties are populated from
CSV but before it is stored. Subclasses can override this to add custom
entity handling code.
The entity to be inserted should be returned. If multiple entities should
be inserted, return a list of entities. If no entities should be inserted,
return None or [].
Args:
entity: datastore.Entity
Returns:
datastore.Entity or list of datastore.Entity
"""
return entity
@staticmethod
def RegisteredLoaders():
""" Returns a list of the Loader instances that have been created.
"""
return dict(Loader.__loaders)
class BulkLoad(webapp.RequestHandler):
"""A handler for bulk load requests.
This class contains handlers for the bulkloading process. One for
GET to provide cookie information for the upload script, and one
handler for a POST request to upload the entities.
In the POST request, the body contains the data representing the
entities' property values. The original format was a sequences of
lines of comma-separated values (and is handled by the Load
method). The current (version 1) format is a binary format described
in the Tools and Libraries section of the documentation, and is
handled by the LoadV1 method).
"""
def get(self):
""" Handle a GET. Just show an info page.
"""
page = self.InfoPage(self.request.uri)
self.response.out.write(page)
def post(self):
""" Handle a POST. Reads CSV data, converts to entities, and stores them.
"""
self.response.headers['Content-Type'] = 'text/plain'
response, output = self.Load(self.request.get(constants.KIND_PARAM),
self.request.get(constants.CSV_PARAM))
self.response.set_status(response)
self.response.out.write(output)
def InfoPage(self, uri):
""" Renders an information page with the POST endpoint and cookie flag.
Args:
uri: a string containing the request URI
Returns:
A string with the contents of the info page to be displayed
"""
page = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html><head>
<title>Bulk Loader</title>
</head><body>"""
page += ('The bulk load endpoint is: <a href="%s">%s</a><br />\n' %
(uri, uri))
cookies = os.environ.get('HTTP_COOKIE', None)
if cookies:
cookie = Cookie.BaseCookie(cookies)
for param in ['ACSID', 'dev_appserver_login']:
value = cookie.get(param)
if value:
page += ("Pass this flag to the client: --cookie='%s=%s'\n" %
(param, value.value))
break
else:
page += 'No cookie found!\n'
page += '</body></html>'
return page
def IterRows(self, reader):
""" Yields a tuple of a line number and row for each row of the CSV data.
Args:
reader: a csv reader for the input data.
"""
line_num = 1
for columns in reader:
yield (line_num, columns)
line_num += 1
def LoadEntities(self, iter, loader, key_format=None):
"""Generates entities and loads them into the datastore. Returns
a tuple of HTTP code and string reply.
Args:
iter: an iterator yielding pairs of a line number and row contents.
key_format: a format string to convert a line number into an
entity id. If None, then entity ID's are automatically generated.
"""
entities = []
output = []
for line_num, columns in iter:
key_name = None
if key_format is not None:
key_name = key_format % line_num
if columns:
try:
output.append('\nLoading from line %d...' % line_num)
new_entities = loader.CreateEntity(columns, key_name=key_name)
if new_entities:
entities.extend(new_entities)
output.append('done.')
except:
stacktrace = traceback.format_exc()
output.append('error:\n%s' % stacktrace)
return (httplib.BAD_REQUEST, ''.join(output))
datastore.Put(entities)
return (httplib.OK, ''.join(output))
def Load(self, kind, data):
"""Parses CSV data, uses a Loader to convert to entities, and stores them.
On error, fails fast. Returns a "bad request" HTTP response code and
includes the traceback in the output.
Args:
kind: a string containing the entity kind that this loader handles
data: a string containing the CSV data to load
Returns:
tuple (response code, output) where:
response code: integer HTTP response code to return
output: string containing the HTTP response body
"""
data = data.encode('utf-8')
Validate(kind, basestring)
Validate(data, basestring)
output = []
try:
loader = Loader.RegisteredLoaders()[kind]
except KeyError:
output.append('Error: no Loader defined for kind %s.' % kind)
return (httplib.BAD_REQUEST, ''.join(output))
buffer = StringIO.StringIO(data)
reader = csv.reader(buffer, skipinitialspace=True)
try:
csv.field_size_limit(800000)
except AttributeError:
pass
return self.LoadEntities(self.IterRows(reader), loader)
def main(*loaders):
"""Starts bulk upload.
Raises TypeError if not, at least one Loader instance is given.
Args:
loaders: One or more Loader instance.
"""
if not loaders:
raise TypeError('Expected at least one argument.')
for loader in loaders:
if not isinstance(loader, Loader):
raise TypeError('Expected a Loader instance; received %r' % loader)
application = webapp.WSGIApplication([('.*', BulkLoad)])
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple datastore view and interactive console, for use in dev_appserver."""
import cgi
import csv
import cStringIO
import datetime
import logging
import math
import mimetypes
import os
import os.path
import pickle
import pprint
import random
import sys
import time
import traceback
import types
import urllib
import urlparse
import wsgiref.handlers
try:
from google.appengine.cron import groctimespecification
from google.appengine.api import croninfo
except ImportError:
HAVE_CRON = False
else:
HAVE_CRON = True
from google.appengine.api import datastore
from google.appengine.api import datastore_admin
from google.appengine.api import datastore_types
from google.appengine.api import datastore_errors
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
_DEBUG = True
class ImageHandler(webapp.RequestHandler):
"""Serves a static image.
This exists because we don't want to burden the user with specifying
a static file handler for the image resources used by the admin tool.
"""
PATH = '/images/.*'
def get(self):
image_name = os.path.basename(self.request.path)
content_type, encoding = mimetypes.guess_type(image_name)
if not content_type or not content_type.startswith('image/'):
logging.debug('image_name=%r, content_type=%r, encoding=%r',
image_name, content_type, encoding)
self.error(404)
return
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'templates', 'images', image_name)
try:
image_stream = open(path, 'rb')
except IOError, e:
logging.error('Cannot open image %s: %s', image_name, e)
self.error(404)
return
try:
image_data = image_stream.read()
finally:
image_stream.close()
self.response.headers['Content-Type'] = content_type
self.response.out.write(image_data)
class BaseRequestHandler(webapp.RequestHandler):
"""Supplies a common template generation function.
When you call generate(), we augment the template variables supplied with
the current user in the 'user' variable and the current webapp request
in the 'request' variable.
"""
def generate(self, template_name, template_values={}):
base_path = self.base_path()
values = {
'application_name': self.request.environ['APPLICATION_ID'],
'user': users.get_current_user(),
'request': self.request,
'home_path': base_path + DefaultPageHandler.PATH,
'datastore_path': base_path + DatastoreQueryHandler.PATH,
'datastore_edit_path': base_path + DatastoreEditHandler.PATH,
'datastore_batch_edit_path': base_path + DatastoreBatchEditHandler.PATH,
'interactive_path': base_path + InteractivePageHandler.PATH,
'interactive_execute_path': base_path + InteractiveExecuteHandler.PATH,
'memcache_path': base_path + MemcachePageHandler.PATH,
}
if HAVE_CRON:
values['cron_path'] = base_path + CronPageHandler.PATH
values.update(template_values)
directory = os.path.dirname(__file__)
path = os.path.join(directory, os.path.join('templates', template_name))
self.response.out.write(template.render(path, values, debug=_DEBUG))
def base_path(self):
"""Returns the base path of this admin app, which is chosen by the user.
The user specifies which paths map to this application in their app.cfg.
You can get that base path with this method. Combine with the constant
paths specified by the classes to construct URLs.
"""
path = self.__class__.PATH
return self.request.path[:-len(path)]
def filter_url(self, args):
"""Filters the current URL to only have the given list of arguments.
For example, if your URL is /search?q=foo&num=100&start=10, then
self.filter_url(['start', 'num']) => /search?num=100&start=10
self.filter_url(['q']) => /search?q=10
self.filter_url(['random']) => /search?
"""
queries = []
for arg in args:
value = self.request.get(arg)
if value:
queries.append(arg + '=' + urllib.quote_plus(self.request.get(arg)))
return self.request.path + '?' + '&'.join(queries)
def in_production(self):
"""Detects if app is running in production.
Returns a boolean.
"""
server_software = os.environ['SERVER_SOFTWARE']
return not server_software.startswith('Development')
class DefaultPageHandler(BaseRequestHandler):
"""Redirects to the Datastore application by default."""
PATH = '/'
def get(self):
if self.request.path.endswith('/'):
base = self.request.path[:-1]
else:
base = self.request.path
self.redirect(base + DatastoreQueryHandler.PATH)
class InteractivePageHandler(BaseRequestHandler):
"""Shows our interactive console HTML."""
PATH = '/interactive'
def get(self):
self.generate('interactive.html')
class InteractiveExecuteHandler(BaseRequestHandler):
"""Executes the Python code submitted in a POST within this context.
For obvious reasons, this should only be available to administrators
of the applications.
"""
PATH = InteractivePageHandler.PATH + '/execute'
def post(self):
save_stdout = sys.stdout
results_io = cStringIO.StringIO()
try:
sys.stdout = results_io
code = self.request.get('code')
code = code.replace("\r\n", "\n")
try:
compiled_code = compile(code, '<string>', 'exec')
exec(compiled_code, globals())
except Exception, e:
traceback.print_exc(file=results_io)
finally:
sys.stdout = save_stdout
results = results_io.getvalue()
self.generate('interactive-output.html', {'output': results})
class CronPageHandler(BaseRequestHandler):
"""Shows information about configured cron jobs in this application."""
PATH = '/cron'
def get(self, now=None):
"""Shows template displaying the configured cron jobs."""
if not now:
now = datetime.datetime.now()
values = {'request': self.request}
cron_info = _ParseCronYaml()
values['cronjobs'] = []
values['now'] = str(now)
if cron_info:
for entry in cron_info.cron:
job = {}
values['cronjobs'].append(job)
if entry.description:
job['description'] = entry.description
else:
job['description'] = '(no description)'
if entry.timezone:
job['timezone'] = entry.timezone
job['url'] = entry.url
job['schedule'] = entry.schedule
schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
matches = schedule.GetMatches(now, 3)
job['times'] = []
for match in matches:
job['times'].append({'runtime': match.strftime("%Y-%m-%d %H:%M:%SZ"),
'difference': str(match - now)})
self.generate('cron.html', values)
class MemcachePageHandler(BaseRequestHandler):
"""Shows stats about memcache and query form to get values."""
PATH = '/memcache'
TYPES = ((str, str, 'String'),
(unicode, unicode, 'Unicode String'),
(bool, lambda value: MemcachePageHandler._ToBool(value), 'Boolean'),
(int, int, 'Integer'),
(long, long, 'Long Integer'),
(float, float, 'Float'))
DEFAULT_TYPESTR_FOR_NEW = 'String'
@staticmethod
def _ToBool(string_value):
"""Convert string to boolean value.
Args:
string_value: A string.
Returns:
Boolean. True if string_value is "true", False if string_value is
"false". This is case-insensitive.
Raises:
ValueError: string_value not "true" or "false".
"""
string_value_low = string_value.lower()
if string_value_low not in ('false', 'true'):
raise ValueError('invalid literal for boolean: %s' % string_value)
return string_value_low == 'true'
def _GetValueAndType(self, key):
"""Fetch value from memcache and detect its type.
Args:
key: String
Returns:
(value, type), value is a Python object or None if the key was not set in
the cache, type is a string describing the type of the value.
"""
try:
value = memcache.get(key)
except (pickle.UnpicklingError, AttributeError, EOFError, ImportError,
IndexError), e:
msg = 'Failed to retrieve value from cache: %s' % e
return msg, 'error'
if value is None:
return None, self.DEFAULT_TYPESTR_FOR_NEW
for typeobj, _, typestr in self.TYPES:
if isinstance(value, typeobj):
break
else:
typestr = 'pickled'
value = pprint.pformat(value, indent=2)
return value, typestr
def _SetValue(self, key, type_, value):
"""Convert a string value and store the result in memcache.
Args:
key: String
type_: String, describing what type the value should have in the cache.
value: String, will be converted according to type_.
Returns:
Result of memcache.set(ket, converted_value). True if value was set.
Raises:
ValueError: Value can't be converted according to type_.
"""
for _, converter, typestr in self.TYPES:
if typestr == type_:
value = converter(value)
break
else:
raise ValueError('Type %s not supported.' % type_)
return memcache.set(key, value)
def get(self):
"""Show template and prepare stats and/or key+value to display/edit."""
values = {'request': self.request,
'message': self.request.get('message')}
edit = self.request.get('edit')
key = self.request.get('key')
if edit:
key = edit
values['show_stats'] = False
values['show_value'] = False
values['show_valueform'] = True
values['types'] = [typestr for _, _, typestr in self.TYPES]
elif key:
values['show_stats'] = True
values['show_value'] = True
values['show_valueform'] = False
else:
values['show_stats'] = True
values['show_valueform'] = False
values['show_value'] = False
if key:
values['key'] = key
values['value'], values['type'] = self._GetValueAndType(key)
values['key_exists'] = values['value'] is not None
if values['type'] in ('pickled', 'error'):
values['writable'] = False
else:
values['writable'] = True
if values['show_stats']:
memcache_stats = memcache.get_stats()
if not memcache_stats:
memcache_stats = {'hits': 0, 'misses': 0, 'byte_hits': 0, 'items': 0,
'bytes': 0, 'oldest_item_age': 0}
values['stats'] = memcache_stats
try:
hitratio = memcache_stats['hits'] * 100 / (memcache_stats['hits']
+ memcache_stats['misses'])
except ZeroDivisionError:
hitratio = 0
values['hitratio'] = hitratio
delta_t = datetime.timedelta(seconds=memcache_stats['oldest_item_age'])
values['oldest_item_age'] = datetime.datetime.now() - delta_t
self.generate('memcache.html', values)
def _urlencode(self, query):
"""Encode a dictionary into a URL query string.
In contrast to urllib this encodes unicode characters as UTF8.
Args:
query: Dictionary of key/value pairs.
Returns:
String.
"""
return '&'.join('%s=%s' % (urllib.quote_plus(k.encode('utf8')),
urllib.quote_plus(v.encode('utf8')))
for k, v in query.iteritems())
def post(self):
"""Handle modifying actions and/or redirect to GET page."""
next_param = {}
if self.request.get('action:flush'):
if memcache.flush_all():
next_param['message'] = 'Cache flushed, all keys dropped.'
else:
next_param['message'] = 'Flushing the cache failed. Please try again.'
elif self.request.get('action:display'):
next_param['key'] = self.request.get('key')
elif self.request.get('action:edit'):
next_param['edit'] = self.request.get('key')
elif self.request.get('action:delete'):
key = self.request.get('key')
result = memcache.delete(key)
if result == memcache.DELETE_NETWORK_FAILURE:
next_param['message'] = ('ERROR: Network failure, key "%s" not deleted.'
% key)
elif result == memcache.DELETE_ITEM_MISSING:
next_param['message'] = 'Key "%s" not in cache.' % key
elif result == memcache.DELETE_SUCCESSFUL:
next_param['message'] = 'Key "%s" deleted.' % key
else:
next_param['message'] = ('Unknown return value. Key "%s" might still '
'exist.' % key)
elif self.request.get('action:save'):
key = self.request.get('key')
value = self.request.get('value')
type_ = self.request.get('type')
next_param['key'] = key
try:
if self._SetValue(key, type_, value):
next_param['message'] = 'Key "%s" saved.' % key
else:
next_param['message'] = 'ERROR: Failed to save key "%s".' % key
except ValueError, e:
next_param['message'] = 'ERROR: Unable to encode value: %s' % e
elif self.request.get('action:cancel'):
next_param['key'] = self.request.get('key')
else:
next_param['message'] = 'Unknown action.'
next = self.request.path_url
if next_param:
next = '%s?%s' % (next, self._urlencode(next_param))
self.redirect(next)
class DatastoreRequestHandler(BaseRequestHandler):
"""The base request handler for our datastore admin pages.
We provide utility functions for quering the datastore and infering the
types of entity properties.
"""
def start(self):
"""Returns the santized "start" argument from the URL."""
return self.request.get_range('start', min_value=0, default=0)
def num(self):
"""Returns the sanitized "num" argument from the URL."""
return self.request.get_range('num', min_value=1, max_value=100,
default=10)
def execute_query(self, start=0, num=0, no_order=False):
"""Parses the URL arguments and executes the query.
We return a tuple (list of entities, total entity count).
If the appropriate URL arguments are not given, we return an empty
set of results and 0 for the entity count.
"""
kind = self.request.get('kind')
if not kind:
return ([], 0)
query = datastore.Query(kind)
order = self.request.get('order')
order_type = self.request.get('order_type')
if order and order_type:
order_type = DataType.get_by_name(order_type).python_type()
if order.startswith('-'):
direction = datastore.Query.DESCENDING
order = order[1:]
else:
direction = datastore.Query.ASCENDING
try:
query.Order((order, order_type, direction))
except datastore_errors.BadArgumentError:
pass
if not start:
start = self.start()
if not num:
num = self.num()
total = query.Count()
entities = query.Get(start + num)[start:]
return (entities, total)
def get_key_values(self, entities):
"""Returns the union of key names used by the given list of entities.
We return the union as a dictionary mapping the key names to a sample
value from one of the entities for the key name.
"""
key_dict = {}
for entity in entities:
for key, value in entity.iteritems():
if key_dict.has_key(key):
key_dict[key].append(value)
else:
key_dict[key] = [value]
return key_dict
class DatastoreQueryHandler(DatastoreRequestHandler):
"""Our main request handler that executes queries and lists entities.
We use execute_query() in our base request handler to parse URL arguments
and execute the datastore query.
"""
PATH = '/datastore'
def get_kinds(self):
"""Get sorted list of kind names the datastore knows about.
This should only be called in the development environment as GetSchema is
expensive and no caching is done.
"""
schema = datastore_admin.GetSchema()
kinds = []
for entity_proto in schema:
kinds.append(entity_proto.key().path().element_list()[-1].type())
kinds.sort()
return kinds
def get(self):
"""Formats the results from execute_query() for datastore.html.
The only complex part of that process is calculating the pager variables
to generate the Gooooogle pager at the bottom of the page.
"""
result_set, total = self.execute_query()
key_values = self.get_key_values(result_set)
keys = key_values.keys()
keys.sort()
headers = []
for key in keys:
sample_value = key_values[key][0]
headers.append({
'name': key,
'type': DataType.get(sample_value).name(),
})
entities = []
edit_path = self.base_path() + DatastoreEditHandler.PATH
for entity in result_set:
attributes = []
for key in keys:
if entity.has_key(key):
raw_value = entity[key]
value = DataType.get(raw_value).format(raw_value)
short_value = DataType.get(raw_value).short_format(raw_value)
else:
value = ''
short_value = ''
attributes.append({
'name': key,
'value': value,
'short_value': short_value,
})
entities.append({
'key': str(entity.key()),
'key_name': entity.key().name(),
'key_id': entity.key().id(),
'shortened_key': str(entity.key())[:8] + '...',
'attributes': attributes,
'edit_uri': edit_path + '?key=' + str(entity.key()) + '&kind=' + urllib.quote(self.request.get('kind')) + '&next=' + urllib.quote(self.request.uri),
})
start = self.start()
num = self.num()
max_pager_links = 8
current_page = start / num
num_pages = int(math.ceil(total * 1.0 / num))
page_start = max(math.floor(current_page - max_pager_links / 2), 0)
page_end = min(page_start + max_pager_links, num_pages)
pages = []
for page in range(page_start + 1, page_end + 1):
pages.append({
'number': page,
'start': (page - 1) * num,
})
current_page += 1
in_production = self.in_production()
if in_production:
kinds = None
else:
kinds = self.get_kinds()
values = {
'request': self.request,
'in_production': in_production,
'kinds': kinds,
'kind': self.request.get('kind'),
'order': self.request.get('order'),
'headers': headers,
'entities': entities,
'message': self.request.get('msg'),
'pages': pages,
'current_page': current_page,
'num': num,
'next_start': -1,
'prev_start': -1,
'start': start,
'total': total,
'start_base_url': self.filter_url(['kind', 'order', 'order_type',
'num']),
'order_base_url': self.filter_url(['kind', 'num']),
}
if current_page > 1:
values['prev_start'] = int((current_page - 2) * num)
if current_page < num_pages:
values['next_start'] = int(current_page * num)
self.generate('datastore.html', values)
class DatastoreBatchEditHandler(DatastoreRequestHandler):
"""Request handler for a batch operation on entities.
Supports deleting multiple entities by key, then redirecting to another url.
"""
PATH = DatastoreQueryHandler.PATH + '/batchedit'
def post(self):
kind = self.request.get('kind')
keys = []
index = 0
num_keys = int(self.request.get('numkeys'))
for i in xrange(1, num_keys+1):
key = self.request.get('key%d' % i)
if key:
keys.append(key)
if self.request.get('action') == 'Delete':
num_deleted = 0
for key in keys:
datastore.Delete(datastore.Key(key))
num_deleted = num_deleted + 1
message = '%d entit%s deleted.' % (
num_deleted, ('ies', 'y')[num_deleted == 1])
self.redirect(
'%s&msg=%s' % (self.request.get('next'), urllib.quote_plus(message)))
return
self.error(404)
class DatastoreEditHandler(DatastoreRequestHandler):
"""Request handler for the entity create/edit form.
We determine how to generate a form to edit an entity by doing a query
on the entity kind and looking at the set of keys and their types in
the result set. We use the DataType subclasses for those introspected types
to generate the form and parse the form results.
"""
PATH = DatastoreQueryHandler.PATH + '/edit'
def get(self):
kind = self.request.get('kind')
sample_entities = self.execute_query()[0]
if len(sample_entities) < 1:
next_uri = self.request.get('next')
kind_param = 'kind=%s' % kind
if not kind_param in next_uri:
if '?' in next_uri:
next_uri += '&' + kind_param
else:
next_uri += '?' + kind_param
self.redirect(next_uri)
return
entity_key = self.request.get('key')
if entity_key:
key_instance = datastore.Key(entity_key)
entity_key_name = key_instance.name()
entity_key_id = key_instance.id()
parent_key = key_instance.parent()
entity = datastore.Get(key_instance)
else:
key_instance = None
entity_key_name = None
entity_key_id = None
parent_key = None
entity = None
if parent_key:
parent_kind = parent_key.kind()
else:
parent_kind = None
fields = []
key_values = self.get_key_values(sample_entities)
for key, sample_values in key_values.iteritems():
if entity and entity.has_key(key):
data_type = DataType.get(entity[key])
else:
data_type = DataType.get(sample_values[0])
name = data_type.name() + "|" + key
if entity and entity.has_key(key):
value = entity[key]
else:
value = None
field = data_type.input_field(name, value, sample_values)
fields.append((key, data_type.name(), field))
self.generate('datastore_edit.html', {
'kind': kind,
'key': entity_key,
'key_name': entity_key_name,
'key_id': entity_key_id,
'fields': fields,
'focus': self.request.get('focus'),
'next': self.request.get('next'),
'parent_key': parent_key,
'parent_kind': parent_kind,
})
def post(self):
kind = self.request.get('kind')
entity_key = self.request.get('key')
if entity_key:
if self.request.get('action') == 'Delete':
datastore.Delete(datastore.Key(entity_key))
self.redirect(self.request.get('next'))
return
entity = datastore.Get(datastore.Key(entity_key))
else:
entity = datastore.Entity(kind)
args = self.request.arguments()
for arg in args:
bar = arg.find('|')
if bar > 0:
data_type_name = arg[:bar]
field_name = arg[bar + 1:]
form_value = self.request.get(arg)
data_type = DataType.get_by_name(data_type_name)
if entity and entity.has_key(field_name):
old_formatted_value = data_type.format(entity[field_name])
if old_formatted_value == form_value:
continue
if len(form_value) > 0:
value = data_type.parse(form_value)
entity[field_name] = value
elif entity.has_key(field_name):
del entity[field_name]
datastore.Put(entity)
self.redirect(self.request.get('next'))
class DataType(object):
"""A DataType represents a data type in the datastore.
Each DataType subtype defines four methods:
format: returns a formatted string for a datastore value
input_field: returns a string HTML <input> element for this DataType
name: the friendly string name of this DataType
parse: parses the formatted string representation of this DataType
python_type: the canonical Python type for this datastore type
We use DataType instances to display formatted values in our result lists,
and we uses input_field/format/parse to generate forms and parse the results
from those forms to allow editing of entities.
"""
@staticmethod
def get(value):
return _DATA_TYPES[value.__class__]
@staticmethod
def get_by_name(name):
return _NAMED_DATA_TYPES[name]
def format(self, value):
return str(value)
def short_format(self, value):
return self.format(value)
def input_field(self, name, value, sample_values):
if value is not None:
string_value = self.format(value)
else:
string_value = ''
return '<input class="%s" name="%s" type="text" size="%d" value="%s"/>' % (cgi.escape(self.name()), cgi.escape(name), self.input_field_size(),
cgi.escape(string_value, True))
def input_field_size(self):
return 30
class StringType(DataType):
def format(self, value):
return value
def input_field(self, name, value, sample_values):
multiline = False
if value:
multiline = len(value) > 255 or value.find('\n') >= 0
if not multiline:
for sample_value in sample_values:
if sample_value and (len(sample_value) > 255 or
sample_value.find('\n') >= 0):
multiline = True
break
if multiline:
if not value:
value = ''
return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(value))
else:
return DataType.input_field(self, name, value, sample_values)
def name(self):
return 'string'
def parse(self, value):
return value
def python_type(self):
return str
def input_field_size(self):
return 50
class TextType(StringType):
def name(self):
return 'Text'
def input_field(self, name, value, sample_values):
return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(str(value)))
def parse(self, value):
return datastore_types.Text(value)
def python_type(self):
return datastore_types.Text
class BlobType(StringType):
def name(self):
return 'Blob'
def input_field(self, name, value, sample_values):
return '<binary>'
def format(self, value):
return '<binary>'
def python_type(self):
return datastore_types.Blob
class TimeType(DataType):
_FORMAT = '%Y-%m-%d %H:%M:%S'
def format(self, value):
return value.strftime(TimeType._FORMAT)
def name(self):
return 'datetime'
def parse(self, value):
return datetime.datetime(*(time.strptime(value, TimeType._FORMAT)[0:6]))
def python_type(self):
return datetime.datetime
class ListType(DataType):
def format(self, value):
value_file = cStringIO.StringIO()
try:
writer = csv.writer(value_file)
writer.writerow(value)
return value_file.getvalue()
finally:
value_file.close()
def name(self):
return 'list'
def parse(self, value):
value_file = cStringIO.StringIO(value)
try:
reader = csv.reader(value_file)
return reader.next()
finally:
value_file.close()
def python_type(self):
return list
class BoolType(DataType):
def name(self):
return 'bool'
def input_field(self, name, value, sample_values):
selected = { None: '', False: '', True: '' };
selected[value] = "selected"
return """<select class="%s" name="%s">
<option %s value=''></option>
<option %s value='0'>False</option>
<option %s value='1'>True</option></select>""" % (cgi.escape(self.name()), cgi.escape(name), selected[None],
selected[False], selected[True])
def parse(self, value):
if value.lower() is 'true':
return True
if value.lower() is 'false':
return False
return bool(int(value))
def python_type(self):
return bool
class NumberType(DataType):
def input_field_size(self):
return 10
class IntType(NumberType):
def name(self):
return 'int'
def parse(self, value):
return int(value)
def python_type(self):
return int
class LongType(NumberType):
def name(self):
return 'long'
def parse(self, value):
return long(value)
def python_type(self):
return long
class FloatType(NumberType):
def name(self):
return 'float'
def parse(self, value):
return float(value)
def python_type(self):
return float
class UserType(DataType):
def name(self):
return 'User'
def parse(self, value):
return users.User(value)
def python_type(self):
return users.User
def input_field_size(self):
return 15
class ReferenceType(DataType):
def name(self):
return 'Key'
def short_format(self, value):
return str(value)[:8] + '...'
def parse(self, value):
return datastore_types.Key(value)
def python_type(self):
return datastore_types.Key
def input_field_size(self):
return 85
class EmailType(StringType):
def name(self):
return 'Email'
def parse(self, value):
return datastore_types.Email(value)
def python_type(self):
return datastore_types.Email
class CategoryType(StringType):
def name(self):
return 'Category'
def parse(self, value):
return datastore_types.Category(value)
def python_type(self):
return datastore_types.Category
class LinkType(StringType):
def name(self):
return 'Link'
def parse(self, value):
return datastore_types.Link(value)
def python_type(self):
return datastore_types.Link
class GeoPtType(DataType):
def name(self):
return 'GeoPt'
def parse(self, value):
return datastore_types.GeoPt(value)
def python_type(self):
return datastore_types.GeoPt
class ImType(DataType):
def name(self):
return 'IM'
def parse(self, value):
return datastore_types.IM(value)
def python_type(self):
return datastore_types.IM
class PhoneNumberType(StringType):
def name(self):
return 'PhoneNumber'
def parse(self, value):
return datastore_types.PhoneNumber(value)
def python_type(self):
return datastore_types.PhoneNumber
class PostalAddressType(StringType):
def name(self):
return 'PostalAddress'
def parse(self, value):
return datastore_types.PostalAddress(value)
def python_type(self):
return datastore_types.PostalAddress
class RatingType(NumberType):
def name(self):
return 'Rating'
def parse(self, value):
return datastore_types.Rating(value)
def python_type(self):
return datastore_types.Rating
class NoneType(DataType):
def name(self):
return 'None'
def parse(self, value):
return None
def format(self, value):
return 'None'
_DATA_TYPES = {
types.NoneType: NoneType(),
types.StringType: StringType(),
types.UnicodeType: StringType(),
datastore_types.Text: TextType(),
datastore_types.Blob: BlobType(),
types.BooleanType: BoolType(),
types.IntType: IntType(),
types.LongType: LongType(),
types.FloatType: FloatType(),
datetime.datetime: TimeType(),
users.User: UserType(),
datastore_types.Key: ReferenceType(),
types.ListType: ListType(),
datastore_types.Email: EmailType(),
datastore_types.Category: CategoryType(),
datastore_types.Link: LinkType(),
datastore_types.GeoPt: GeoPtType(),
datastore_types.IM: ImType(),
datastore_types.PhoneNumber: PhoneNumberType(),
datastore_types.PostalAddress: PostalAddressType(),
datastore_types.Rating: RatingType(),
}
_NAMED_DATA_TYPES = {}
for data_type in _DATA_TYPES.values():
_NAMED_DATA_TYPES[data_type.name()] = data_type
def _ParseCronYaml():
"""Load the cron.yaml file and parse it."""
cronyaml_files = 'cron.yaml', 'cron.yml'
for cronyaml in cronyaml_files:
try:
fh = open(cronyaml, "r")
except IOError:
continue
try:
cron_info = croninfo.LoadSingleCron(fh)
return cron_info
finally:
fh.close()
return None
def main():
handlers = [
('.*' + DatastoreQueryHandler.PATH, DatastoreQueryHandler),
('.*' + DatastoreEditHandler.PATH, DatastoreEditHandler),
('.*' + DatastoreBatchEditHandler.PATH, DatastoreBatchEditHandler),
('.*' + InteractivePageHandler.PATH, InteractivePageHandler),
('.*' + InteractiveExecuteHandler.PATH, InteractiveExecuteHandler),
('.*' + MemcachePageHandler.PATH, MemcachePageHandler),
('.*' + ImageHandler.PATH, ImageHandler),
('.*', DefaultPageHandler),
]
if HAVE_CRON:
handlers.insert(0, ('.*' + CronPageHandler.PATH, CronPageHandler))
application = webapp.WSGIApplication(handlers, debug=_DEBUG)
wsgiref.handlers.CGIHandler().run(application)
import django
if django.VERSION[:2] < (0, 97):
from django.template import defaultfilters
def safe(text, dummy=None):
return text
defaultfilters.register.filter("safe", safe)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple datastore view and interactive console, for use in dev_appserver."""
import cgi
import csv
import cStringIO
import datetime
import logging
import math
import mimetypes
import os
import os.path
import pickle
import pprint
import random
import sys
import time
import traceback
import types
import urllib
import urlparse
import wsgiref.handlers
try:
from google.appengine.cron import groctimespecification
from google.appengine.api import croninfo
except ImportError:
HAVE_CRON = False
else:
HAVE_CRON = True
from google.appengine.api import datastore
from google.appengine.api import datastore_admin
from google.appengine.api import datastore_types
from google.appengine.api import datastore_errors
from google.appengine.api import memcache
from google.appengine.api import users
from google.appengine.ext import db
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
_DEBUG = True
class ImageHandler(webapp.RequestHandler):
"""Serves a static image.
This exists because we don't want to burden the user with specifying
a static file handler for the image resources used by the admin tool.
"""
PATH = '/images/.*'
def get(self):
image_name = os.path.basename(self.request.path)
content_type, encoding = mimetypes.guess_type(image_name)
if not content_type or not content_type.startswith('image/'):
logging.debug('image_name=%r, content_type=%r, encoding=%r',
image_name, content_type, encoding)
self.error(404)
return
directory = os.path.dirname(__file__)
path = os.path.join(directory, 'templates', 'images', image_name)
try:
image_stream = open(path, 'rb')
except IOError, e:
logging.error('Cannot open image %s: %s', image_name, e)
self.error(404)
return
try:
image_data = image_stream.read()
finally:
image_stream.close()
self.response.headers['Content-Type'] = content_type
self.response.out.write(image_data)
class BaseRequestHandler(webapp.RequestHandler):
"""Supplies a common template generation function.
When you call generate(), we augment the template variables supplied with
the current user in the 'user' variable and the current webapp request
in the 'request' variable.
"""
def generate(self, template_name, template_values={}):
base_path = self.base_path()
values = {
'application_name': self.request.environ['APPLICATION_ID'],
'user': users.get_current_user(),
'request': self.request,
'home_path': base_path + DefaultPageHandler.PATH,
'datastore_path': base_path + DatastoreQueryHandler.PATH,
'datastore_edit_path': base_path + DatastoreEditHandler.PATH,
'datastore_batch_edit_path': base_path + DatastoreBatchEditHandler.PATH,
'interactive_path': base_path + InteractivePageHandler.PATH,
'interactive_execute_path': base_path + InteractiveExecuteHandler.PATH,
'memcache_path': base_path + MemcachePageHandler.PATH,
}
if HAVE_CRON:
values['cron_path'] = base_path + CronPageHandler.PATH
values.update(template_values)
directory = os.path.dirname(__file__)
path = os.path.join(directory, os.path.join('templates', template_name))
self.response.out.write(template.render(path, values, debug=_DEBUG))
def base_path(self):
"""Returns the base path of this admin app, which is chosen by the user.
The user specifies which paths map to this application in their app.cfg.
You can get that base path with this method. Combine with the constant
paths specified by the classes to construct URLs.
"""
path = self.__class__.PATH
return self.request.path[:-len(path)]
def filter_url(self, args):
"""Filters the current URL to only have the given list of arguments.
For example, if your URL is /search?q=foo&num=100&start=10, then
self.filter_url(['start', 'num']) => /search?num=100&start=10
self.filter_url(['q']) => /search?q=10
self.filter_url(['random']) => /search?
"""
queries = []
for arg in args:
value = self.request.get(arg)
if value:
queries.append(arg + '=' + urllib.quote_plus(self.request.get(arg)))
return self.request.path + '?' + '&'.join(queries)
def in_production(self):
"""Detects if app is running in production.
Returns a boolean.
"""
server_software = os.environ['SERVER_SOFTWARE']
return not server_software.startswith('Development')
class DefaultPageHandler(BaseRequestHandler):
"""Redirects to the Datastore application by default."""
PATH = '/'
def get(self):
if self.request.path.endswith('/'):
base = self.request.path[:-1]
else:
base = self.request.path
self.redirect(base + DatastoreQueryHandler.PATH)
class InteractivePageHandler(BaseRequestHandler):
"""Shows our interactive console HTML."""
PATH = '/interactive'
def get(self):
self.generate('interactive.html')
class InteractiveExecuteHandler(BaseRequestHandler):
"""Executes the Python code submitted in a POST within this context.
For obvious reasons, this should only be available to administrators
of the applications.
"""
PATH = InteractivePageHandler.PATH + '/execute'
def post(self):
save_stdout = sys.stdout
results_io = cStringIO.StringIO()
try:
sys.stdout = results_io
code = self.request.get('code')
code = code.replace("\r\n", "\n")
try:
compiled_code = compile(code, '<string>', 'exec')
exec(compiled_code, globals())
except Exception, e:
traceback.print_exc(file=results_io)
finally:
sys.stdout = save_stdout
results = results_io.getvalue()
self.generate('interactive-output.html', {'output': results})
class CronPageHandler(BaseRequestHandler):
"""Shows information about configured cron jobs in this application."""
PATH = '/cron'
def get(self, now=None):
"""Shows template displaying the configured cron jobs."""
if not now:
now = datetime.datetime.now()
values = {'request': self.request}
cron_info = _ParseCronYaml()
values['cronjobs'] = []
values['now'] = str(now)
if cron_info:
for entry in cron_info.cron:
job = {}
values['cronjobs'].append(job)
if entry.description:
job['description'] = entry.description
else:
job['description'] = '(no description)'
if entry.timezone:
job['timezone'] = entry.timezone
job['url'] = entry.url
job['schedule'] = entry.schedule
schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
matches = schedule.GetMatches(now, 3)
job['times'] = []
for match in matches:
job['times'].append({'runtime': match.strftime("%Y-%m-%d %H:%M:%SZ"),
'difference': str(match - now)})
self.generate('cron.html', values)
class MemcachePageHandler(BaseRequestHandler):
"""Shows stats about memcache and query form to get values."""
PATH = '/memcache'
TYPES = ((str, str, 'String'),
(unicode, unicode, 'Unicode String'),
(bool, lambda value: MemcachePageHandler._ToBool(value), 'Boolean'),
(int, int, 'Integer'),
(long, long, 'Long Integer'),
(float, float, 'Float'))
DEFAULT_TYPESTR_FOR_NEW = 'String'
@staticmethod
def _ToBool(string_value):
"""Convert string to boolean value.
Args:
string_value: A string.
Returns:
Boolean. True if string_value is "true", False if string_value is
"false". This is case-insensitive.
Raises:
ValueError: string_value not "true" or "false".
"""
string_value_low = string_value.lower()
if string_value_low not in ('false', 'true'):
raise ValueError('invalid literal for boolean: %s' % string_value)
return string_value_low == 'true'
def _GetValueAndType(self, key):
"""Fetch value from memcache and detect its type.
Args:
key: String
Returns:
(value, type), value is a Python object or None if the key was not set in
the cache, type is a string describing the type of the value.
"""
try:
value = memcache.get(key)
except (pickle.UnpicklingError, AttributeError, EOFError, ImportError,
IndexError), e:
msg = 'Failed to retrieve value from cache: %s' % e
return msg, 'error'
if value is None:
return None, self.DEFAULT_TYPESTR_FOR_NEW
for typeobj, _, typestr in self.TYPES:
if isinstance(value, typeobj):
break
else:
typestr = 'pickled'
value = pprint.pformat(value, indent=2)
return value, typestr
def _SetValue(self, key, type_, value):
"""Convert a string value and store the result in memcache.
Args:
key: String
type_: String, describing what type the value should have in the cache.
value: String, will be converted according to type_.
Returns:
Result of memcache.set(ket, converted_value). True if value was set.
Raises:
ValueError: Value can't be converted according to type_.
"""
for _, converter, typestr in self.TYPES:
if typestr == type_:
value = converter(value)
break
else:
raise ValueError('Type %s not supported.' % type_)
return memcache.set(key, value)
def get(self):
"""Show template and prepare stats and/or key+value to display/edit."""
values = {'request': self.request,
'message': self.request.get('message')}
edit = self.request.get('edit')
key = self.request.get('key')
if edit:
key = edit
values['show_stats'] = False
values['show_value'] = False
values['show_valueform'] = True
values['types'] = [typestr for _, _, typestr in self.TYPES]
elif key:
values['show_stats'] = True
values['show_value'] = True
values['show_valueform'] = False
else:
values['show_stats'] = True
values['show_valueform'] = False
values['show_value'] = False
if key:
values['key'] = key
values['value'], values['type'] = self._GetValueAndType(key)
values['key_exists'] = values['value'] is not None
if values['type'] in ('pickled', 'error'):
values['writable'] = False
else:
values['writable'] = True
if values['show_stats']:
memcache_stats = memcache.get_stats()
if not memcache_stats:
memcache_stats = {'hits': 0, 'misses': 0, 'byte_hits': 0, 'items': 0,
'bytes': 0, 'oldest_item_age': 0}
values['stats'] = memcache_stats
try:
hitratio = memcache_stats['hits'] * 100 / (memcache_stats['hits']
+ memcache_stats['misses'])
except ZeroDivisionError:
hitratio = 0
values['hitratio'] = hitratio
delta_t = datetime.timedelta(seconds=memcache_stats['oldest_item_age'])
values['oldest_item_age'] = datetime.datetime.now() - delta_t
self.generate('memcache.html', values)
def _urlencode(self, query):
"""Encode a dictionary into a URL query string.
In contrast to urllib this encodes unicode characters as UTF8.
Args:
query: Dictionary of key/value pairs.
Returns:
String.
"""
return '&'.join('%s=%s' % (urllib.quote_plus(k.encode('utf8')),
urllib.quote_plus(v.encode('utf8')))
for k, v in query.iteritems())
def post(self):
"""Handle modifying actions and/or redirect to GET page."""
next_param = {}
if self.request.get('action:flush'):
if memcache.flush_all():
next_param['message'] = 'Cache flushed, all keys dropped.'
else:
next_param['message'] = 'Flushing the cache failed. Please try again.'
elif self.request.get('action:display'):
next_param['key'] = self.request.get('key')
elif self.request.get('action:edit'):
next_param['edit'] = self.request.get('key')
elif self.request.get('action:delete'):
key = self.request.get('key')
result = memcache.delete(key)
if result == memcache.DELETE_NETWORK_FAILURE:
next_param['message'] = ('ERROR: Network failure, key "%s" not deleted.'
% key)
elif result == memcache.DELETE_ITEM_MISSING:
next_param['message'] = 'Key "%s" not in cache.' % key
elif result == memcache.DELETE_SUCCESSFUL:
next_param['message'] = 'Key "%s" deleted.' % key
else:
next_param['message'] = ('Unknown return value. Key "%s" might still '
'exist.' % key)
elif self.request.get('action:save'):
key = self.request.get('key')
value = self.request.get('value')
type_ = self.request.get('type')
next_param['key'] = key
try:
if self._SetValue(key, type_, value):
next_param['message'] = 'Key "%s" saved.' % key
else:
next_param['message'] = 'ERROR: Failed to save key "%s".' % key
except ValueError, e:
next_param['message'] = 'ERROR: Unable to encode value: %s' % e
elif self.request.get('action:cancel'):
next_param['key'] = self.request.get('key')
else:
next_param['message'] = 'Unknown action.'
next = self.request.path_url
if next_param:
next = '%s?%s' % (next, self._urlencode(next_param))
self.redirect(next)
class DatastoreRequestHandler(BaseRequestHandler):
"""The base request handler for our datastore admin pages.
We provide utility functions for quering the datastore and infering the
types of entity properties.
"""
def start(self):
"""Returns the santized "start" argument from the URL."""
return self.request.get_range('start', min_value=0, default=0)
def num(self):
"""Returns the sanitized "num" argument from the URL."""
return self.request.get_range('num', min_value=1, max_value=100,
default=10)
def execute_query(self, start=0, num=0, no_order=False):
"""Parses the URL arguments and executes the query.
We return a tuple (list of entities, total entity count).
If the appropriate URL arguments are not given, we return an empty
set of results and 0 for the entity count.
"""
kind = self.request.get('kind')
if not kind:
return ([], 0)
query = datastore.Query(kind)
order = self.request.get('order')
order_type = self.request.get('order_type')
if order and order_type:
order_type = DataType.get_by_name(order_type).python_type()
if order.startswith('-'):
direction = datastore.Query.DESCENDING
order = order[1:]
else:
direction = datastore.Query.ASCENDING
try:
query.Order((order, order_type, direction))
except datastore_errors.BadArgumentError:
pass
if not start:
start = self.start()
if not num:
num = self.num()
total = query.Count()
entities = query.Get(start + num)[start:]
return (entities, total)
def get_key_values(self, entities):
"""Returns the union of key names used by the given list of entities.
We return the union as a dictionary mapping the key names to a sample
value from one of the entities for the key name.
"""
key_dict = {}
for entity in entities:
for key, value in entity.iteritems():
if key_dict.has_key(key):
key_dict[key].append(value)
else:
key_dict[key] = [value]
return key_dict
class DatastoreQueryHandler(DatastoreRequestHandler):
"""Our main request handler that executes queries and lists entities.
We use execute_query() in our base request handler to parse URL arguments
and execute the datastore query.
"""
PATH = '/datastore'
def get_kinds(self):
"""Get sorted list of kind names the datastore knows about.
This should only be called in the development environment as GetSchema is
expensive and no caching is done.
"""
schema = datastore_admin.GetSchema()
kinds = []
for entity_proto in schema:
kinds.append(entity_proto.key().path().element_list()[-1].type())
kinds.sort()
return kinds
def get(self):
"""Formats the results from execute_query() for datastore.html.
The only complex part of that process is calculating the pager variables
to generate the Gooooogle pager at the bottom of the page.
"""
result_set, total = self.execute_query()
key_values = self.get_key_values(result_set)
keys = key_values.keys()
keys.sort()
headers = []
for key in keys:
sample_value = key_values[key][0]
headers.append({
'name': key,
'type': DataType.get(sample_value).name(),
})
entities = []
edit_path = self.base_path() + DatastoreEditHandler.PATH
for entity in result_set:
attributes = []
for key in keys:
if entity.has_key(key):
raw_value = entity[key]
value = DataType.get(raw_value).format(raw_value)
short_value = DataType.get(raw_value).short_format(raw_value)
else:
value = ''
short_value = ''
attributes.append({
'name': key,
'value': value,
'short_value': short_value,
})
entities.append({
'key': str(entity.key()),
'key_name': entity.key().name(),
'key_id': entity.key().id(),
'shortened_key': str(entity.key())[:8] + '...',
'attributes': attributes,
'edit_uri': edit_path + '?key=' + str(entity.key()) + '&kind=' + urllib.quote(self.request.get('kind')) + '&next=' + urllib.quote(self.request.uri),
})
start = self.start()
num = self.num()
max_pager_links = 8
current_page = start / num
num_pages = int(math.ceil(total * 1.0 / num))
page_start = max(math.floor(current_page - max_pager_links / 2), 0)
page_end = min(page_start + max_pager_links, num_pages)
pages = []
for page in range(page_start + 1, page_end + 1):
pages.append({
'number': page,
'start': (page - 1) * num,
})
current_page += 1
in_production = self.in_production()
if in_production:
kinds = None
else:
kinds = self.get_kinds()
values = {
'request': self.request,
'in_production': in_production,
'kinds': kinds,
'kind': self.request.get('kind'),
'order': self.request.get('order'),
'headers': headers,
'entities': entities,
'message': self.request.get('msg'),
'pages': pages,
'current_page': current_page,
'num': num,
'next_start': -1,
'prev_start': -1,
'start': start,
'total': total,
'start_base_url': self.filter_url(['kind', 'order', 'order_type',
'num']),
'order_base_url': self.filter_url(['kind', 'num']),
}
if current_page > 1:
values['prev_start'] = int((current_page - 2) * num)
if current_page < num_pages:
values['next_start'] = int(current_page * num)
self.generate('datastore.html', values)
class DatastoreBatchEditHandler(DatastoreRequestHandler):
"""Request handler for a batch operation on entities.
Supports deleting multiple entities by key, then redirecting to another url.
"""
PATH = DatastoreQueryHandler.PATH + '/batchedit'
def post(self):
kind = self.request.get('kind')
keys = []
index = 0
num_keys = int(self.request.get('numkeys'))
for i in xrange(1, num_keys+1):
key = self.request.get('key%d' % i)
if key:
keys.append(key)
if self.request.get('action') == 'Delete':
num_deleted = 0
for key in keys:
datastore.Delete(datastore.Key(key))
num_deleted = num_deleted + 1
message = '%d entit%s deleted.' % (
num_deleted, ('ies', 'y')[num_deleted == 1])
self.redirect(
'%s&msg=%s' % (self.request.get('next'), urllib.quote_plus(message)))
return
self.error(404)
class DatastoreEditHandler(DatastoreRequestHandler):
"""Request handler for the entity create/edit form.
We determine how to generate a form to edit an entity by doing a query
on the entity kind and looking at the set of keys and their types in
the result set. We use the DataType subclasses for those introspected types
to generate the form and parse the form results.
"""
PATH = DatastoreQueryHandler.PATH + '/edit'
def get(self):
kind = self.request.get('kind')
sample_entities = self.execute_query()[0]
if len(sample_entities) < 1:
next_uri = self.request.get('next')
kind_param = 'kind=%s' % kind
if not kind_param in next_uri:
if '?' in next_uri:
next_uri += '&' + kind_param
else:
next_uri += '?' + kind_param
self.redirect(next_uri)
return
entity_key = self.request.get('key')
if entity_key:
key_instance = datastore.Key(entity_key)
entity_key_name = key_instance.name()
entity_key_id = key_instance.id()
parent_key = key_instance.parent()
entity = datastore.Get(key_instance)
else:
key_instance = None
entity_key_name = None
entity_key_id = None
parent_key = None
entity = None
if parent_key:
parent_kind = parent_key.kind()
else:
parent_kind = None
fields = []
key_values = self.get_key_values(sample_entities)
for key, sample_values in key_values.iteritems():
if entity and entity.has_key(key):
data_type = DataType.get(entity[key])
else:
data_type = DataType.get(sample_values[0])
name = data_type.name() + "|" + key
if entity and entity.has_key(key):
value = entity[key]
else:
value = None
field = data_type.input_field(name, value, sample_values)
fields.append((key, data_type.name(), field))
self.generate('datastore_edit.html', {
'kind': kind,
'key': entity_key,
'key_name': entity_key_name,
'key_id': entity_key_id,
'fields': fields,
'focus': self.request.get('focus'),
'next': self.request.get('next'),
'parent_key': parent_key,
'parent_kind': parent_kind,
})
def post(self):
kind = self.request.get('kind')
entity_key = self.request.get('key')
if entity_key:
if self.request.get('action') == 'Delete':
datastore.Delete(datastore.Key(entity_key))
self.redirect(self.request.get('next'))
return
entity = datastore.Get(datastore.Key(entity_key))
else:
entity = datastore.Entity(kind)
args = self.request.arguments()
for arg in args:
bar = arg.find('|')
if bar > 0:
data_type_name = arg[:bar]
field_name = arg[bar + 1:]
form_value = self.request.get(arg)
data_type = DataType.get_by_name(data_type_name)
if entity and entity.has_key(field_name):
old_formatted_value = data_type.format(entity[field_name])
if old_formatted_value == form_value:
continue
if len(form_value) > 0:
value = data_type.parse(form_value)
entity[field_name] = value
elif entity.has_key(field_name):
del entity[field_name]
datastore.Put(entity)
self.redirect(self.request.get('next'))
class DataType(object):
"""A DataType represents a data type in the datastore.
Each DataType subtype defines four methods:
format: returns a formatted string for a datastore value
input_field: returns a string HTML <input> element for this DataType
name: the friendly string name of this DataType
parse: parses the formatted string representation of this DataType
python_type: the canonical Python type for this datastore type
We use DataType instances to display formatted values in our result lists,
and we uses input_field/format/parse to generate forms and parse the results
from those forms to allow editing of entities.
"""
@staticmethod
def get(value):
return _DATA_TYPES[value.__class__]
@staticmethod
def get_by_name(name):
return _NAMED_DATA_TYPES[name]
def format(self, value):
return str(value)
def short_format(self, value):
return self.format(value)
def input_field(self, name, value, sample_values):
if value is not None:
string_value = self.format(value)
else:
string_value = ''
return '<input class="%s" name="%s" type="text" size="%d" value="%s"/>' % (cgi.escape(self.name()), cgi.escape(name), self.input_field_size(),
cgi.escape(string_value, True))
def input_field_size(self):
return 30
class StringType(DataType):
def format(self, value):
return value
def input_field(self, name, value, sample_values):
multiline = False
if value:
multiline = len(value) > 255 or value.find('\n') >= 0
if not multiline:
for sample_value in sample_values:
if sample_value and (len(sample_value) > 255 or
sample_value.find('\n') >= 0):
multiline = True
break
if multiline:
if not value:
value = ''
return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(value))
else:
return DataType.input_field(self, name, value, sample_values)
def name(self):
return 'string'
def parse(self, value):
return value
def python_type(self):
return str
def input_field_size(self):
return 50
class TextType(StringType):
def name(self):
return 'Text'
def input_field(self, name, value, sample_values):
return '<textarea name="%s" rows="5" cols="50">%s</textarea>' % (cgi.escape(name), cgi.escape(str(value)))
def parse(self, value):
return datastore_types.Text(value)
def python_type(self):
return datastore_types.Text
class BlobType(StringType):
def name(self):
return 'Blob'
def input_field(self, name, value, sample_values):
return '<binary>'
def format(self, value):
return '<binary>'
def python_type(self):
return datastore_types.Blob
class TimeType(DataType):
_FORMAT = '%Y-%m-%d %H:%M:%S'
def format(self, value):
return value.strftime(TimeType._FORMAT)
def name(self):
return 'datetime'
def parse(self, value):
return datetime.datetime(*(time.strptime(value, TimeType._FORMAT)[0:6]))
def python_type(self):
return datetime.datetime
class ListType(DataType):
def format(self, value):
value_file = cStringIO.StringIO()
try:
writer = csv.writer(value_file)
writer.writerow(value)
return value_file.getvalue()
finally:
value_file.close()
def name(self):
return 'list'
def parse(self, value):
value_file = cStringIO.StringIO(value)
try:
reader = csv.reader(value_file)
return reader.next()
finally:
value_file.close()
def python_type(self):
return list
class BoolType(DataType):
def name(self):
return 'bool'
def input_field(self, name, value, sample_values):
selected = { None: '', False: '', True: '' };
selected[value] = "selected"
return """<select class="%s" name="%s">
<option %s value=''></option>
<option %s value='0'>False</option>
<option %s value='1'>True</option></select>""" % (cgi.escape(self.name()), cgi.escape(name), selected[None],
selected[False], selected[True])
def parse(self, value):
if value.lower() is 'true':
return True
if value.lower() is 'false':
return False
return bool(int(value))
def python_type(self):
return bool
class NumberType(DataType):
def input_field_size(self):
return 10
class IntType(NumberType):
def name(self):
return 'int'
def parse(self, value):
return int(value)
def python_type(self):
return int
class LongType(NumberType):
def name(self):
return 'long'
def parse(self, value):
return long(value)
def python_type(self):
return long
class FloatType(NumberType):
def name(self):
return 'float'
def parse(self, value):
return float(value)
def python_type(self):
return float
class UserType(DataType):
def name(self):
return 'User'
def parse(self, value):
return users.User(value)
def python_type(self):
return users.User
def input_field_size(self):
return 15
class ReferenceType(DataType):
def name(self):
return 'Key'
def short_format(self, value):
return str(value)[:8] + '...'
def parse(self, value):
return datastore_types.Key(value)
def python_type(self):
return datastore_types.Key
def input_field_size(self):
return 85
class EmailType(StringType):
def name(self):
return 'Email'
def parse(self, value):
return datastore_types.Email(value)
def python_type(self):
return datastore_types.Email
class CategoryType(StringType):
def name(self):
return 'Category'
def parse(self, value):
return datastore_types.Category(value)
def python_type(self):
return datastore_types.Category
class LinkType(StringType):
def name(self):
return 'Link'
def parse(self, value):
return datastore_types.Link(value)
def python_type(self):
return datastore_types.Link
class GeoPtType(DataType):
def name(self):
return 'GeoPt'
def parse(self, value):
return datastore_types.GeoPt(value)
def python_type(self):
return datastore_types.GeoPt
class ImType(DataType):
def name(self):
return 'IM'
def parse(self, value):
return datastore_types.IM(value)
def python_type(self):
return datastore_types.IM
class PhoneNumberType(StringType):
def name(self):
return 'PhoneNumber'
def parse(self, value):
return datastore_types.PhoneNumber(value)
def python_type(self):
return datastore_types.PhoneNumber
class PostalAddressType(StringType):
def name(self):
return 'PostalAddress'
def parse(self, value):
return datastore_types.PostalAddress(value)
def python_type(self):
return datastore_types.PostalAddress
class RatingType(NumberType):
def name(self):
return 'Rating'
def parse(self, value):
return datastore_types.Rating(value)
def python_type(self):
return datastore_types.Rating
class NoneType(DataType):
def name(self):
return 'None'
def parse(self, value):
return None
def format(self, value):
return 'None'
_DATA_TYPES = {
types.NoneType: NoneType(),
types.StringType: StringType(),
types.UnicodeType: StringType(),
datastore_types.Text: TextType(),
datastore_types.Blob: BlobType(),
types.BooleanType: BoolType(),
types.IntType: IntType(),
types.LongType: LongType(),
types.FloatType: FloatType(),
datetime.datetime: TimeType(),
users.User: UserType(),
datastore_types.Key: ReferenceType(),
types.ListType: ListType(),
datastore_types.Email: EmailType(),
datastore_types.Category: CategoryType(),
datastore_types.Link: LinkType(),
datastore_types.GeoPt: GeoPtType(),
datastore_types.IM: ImType(),
datastore_types.PhoneNumber: PhoneNumberType(),
datastore_types.PostalAddress: PostalAddressType(),
datastore_types.Rating: RatingType(),
}
_NAMED_DATA_TYPES = {}
for data_type in _DATA_TYPES.values():
_NAMED_DATA_TYPES[data_type.name()] = data_type
def _ParseCronYaml():
"""Load the cron.yaml file and parse it."""
cronyaml_files = 'cron.yaml', 'cron.yml'
for cronyaml in cronyaml_files:
try:
fh = open(cronyaml, "r")
except IOError:
continue
try:
cron_info = croninfo.LoadSingleCron(fh)
return cron_info
finally:
fh.close()
return None
def main():
handlers = [
('.*' + DatastoreQueryHandler.PATH, DatastoreQueryHandler),
('.*' + DatastoreEditHandler.PATH, DatastoreEditHandler),
('.*' + DatastoreBatchEditHandler.PATH, DatastoreBatchEditHandler),
('.*' + InteractivePageHandler.PATH, InteractivePageHandler),
('.*' + InteractiveExecuteHandler.PATH, InteractiveExecuteHandler),
('.*' + MemcachePageHandler.PATH, MemcachePageHandler),
('.*' + ImageHandler.PATH, ImageHandler),
('.*', DefaultPageHandler),
]
if HAVE_CRON:
handlers.insert(0, ('.*' + CronPageHandler.PATH, CronPageHandler))
application = webapp.WSGIApplication(handlers, debug=_DEBUG)
wsgiref.handlers.CGIHandler().run(application)
import django
if django.VERSION[:2] < (0, 97):
from django.template import defaultfilters
def safe(text, dummy=None):
return text
defaultfilters.register.filter("safe", safe)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Support for polymorphic models and queries.
The Model class on its own is only able to support functional polymorphism.
It is possible to create a subclass of Model and then subclass that one as
many generations as necessary and those classes will share all the same
properties and behaviors. The problem is that subclassing Model in this way
places each subclass in their own Kind. This means that it is not possible
to do polymorphic queries. Building a query on a base class will only return
instances of that class from the Datastore, while queries on a subclass will
only return those instances.
This module allows applications to specify class hierarchies that support
polymorphic queries.
"""
from google.appengine.ext import db
_class_map = {}
_CLASS_KEY_PROPERTY = 'class'
class _ClassKeyProperty(db.ListProperty):
"""Property representing class-key property of a polymorphic class.
The class key is a list of strings describing an polymorphic instances
place within its class hierarchy. This property is automatically calculated.
For example:
class Foo(PolyModel): ...
class Bar(Foo): ...
class Baz(Bar): ...
Foo.class_key() == ['Foo']
Bar.class_key() == ['Foo', 'Bar']
Baz.class_key() == ['Foo', 'Bar', 'Baz']
"""
def __init__(self, name):
super(_ClassKeyProperty, self).__init__(name=name,
item_type=str,
default=None)
def __set__(self, *args):
raise db.DerivedPropertyError(
'Class-key is a derived property and cannot be set.')
def __get__(self, model_instance, model_class):
if model_instance is None:
return self
return [cls.__name__ for cls in model_class.__class_hierarchy__]
class PolymorphicClass(db.PropertiedClass):
"""Meta-class for initializing PolymorphicClasses.
This class extends PropertiedClass to add a few static attributes to
new polymorphic classes necessary for their correct functioning.
"""
def __init__(cls, name, bases, dct):
"""Initializes a class that belongs to a polymorphic hierarchy.
This method configures a few built-in attributes of polymorphic
models:
__root_class__: If the new class is a root class, __root_class__ is set to
itself so that it subclasses can quickly know what the root of
their hierarchy is and what kind they are stored in.
__class_hierarchy__: List of classes describing the new model's place
in the class hierarchy. The first element is always the root
element while the last element is the new class itself. For example:
class Foo(PolymorphicClass): ...
class Bar(Foo): ...
class Baz(Bar): ...
Foo.__class_hierarchy__ == [Foo]
Bar.__class_hierarchy__ == [Foo, Bar]
Baz.__class_hierarchy__ == [Foo, Bar, Baz]
Unless the class is a root class or PolyModel itself, it is not
inserted in to the kind-map like other models. However, all polymorphic
classes, are inserted in to the class-map which maps the class-key to
implementation. This class key is consulted using the polymorphic instances
discriminator (the 'class' property of the entity) when loading from the
datastore.
"""
if name == 'PolyModel' or PolyModel not in bases:
db._initialize_properties(cls, name, bases, dct)
super(db.PropertiedClass, cls).__init__(name, bases, dct)
else:
cls.__root_class__ = cls
super(PolymorphicClass, cls).__init__(name, bases, dct)
if name == 'PolyModel':
return
if cls is not cls.__root_class__:
poly_class = None
for base in cls.__bases__:
if issubclass(base, PolyModel):
poly_class = base
break
else:
raise db.ConfigurationError(
"Polymorphic class '%s' does not inherit from PolyModel."
% cls.__name__)
cls.__class_hierarchy__ = poly_class.__class_hierarchy__ + [cls]
else:
cls.__class_hierarchy__ = [cls]
_class_map[cls.class_key()] = cls
class PolyModel(db.Model):
"""Base-class for models that supports polymorphic queries.
Use this class to build hierarchies that can be queried based
on their types.
Example:
consider the following model hierarchy:
+------+
|Animal|
+------+
|
+-----------------+
| |
+------+ +------+
|Canine| |Feline|
+------+ +------+
| |
+-------+ +-------+
| | | |
+---+ +----+ +---+ +-------+
|Dog| |Wolf| |Cat| |Panther|
+---+ +----+ +---+ +-------+
This class hierarchy has three levels. The first is the "root class".
All models in a single class hierarchy must inherit from this root. All
models in the hierarchy are stored as the same kind as the root class.
For example, Panther entities when stored to the datastore are of the kind
'Animal'. Querying against the Animal kind will retrieve Cats, Dogs and
Canines, for example, that match your query. Different classes stored
in the root class' kind are identified by their class-key. When loaded
from the datastore, it is mapped to the appropriate implementation class.
Polymorphic properties:
Properties that are defined in a given base-class within a hierarchy are
stored in the datastore for all sub-casses only. So, if the Feline class
had a property called 'whiskers', the Cat and Panther enties would also
have whiskers, but not Animal, Canine, Dog or Wolf.
Polymorphic queries:
When written to the datastore, all polymorphic objects automatically have
a property called 'class' that you can query against. Using this property
it is possible to easily write a GQL query against any sub-hierarchy. For
example, to fetch only Canine objects, including all Dogs and Wolves:
db.GqlQuery("SELECT * FROM Animal WHERE class='Canine'")
And alternate method is to use the 'all' or 'gql' methods of the Canine
class:
Canine.all()
Canine.gql('')
The 'class' property is not meant to be used by your code other than
for queries. Since it is supposed to represents the real Python class
it is intended to be hidden from view.
Root class:
The root class is the class from which all other classes of the hierarchy
inherits from. Each hierarchy has a single root class. A class is a
root class if it is an immediate child of PolyModel. The subclasses of
the root class are all the same kind as the root class. In other words:
Animal.kind() == Feline.kind() == Panther.kind() == 'Animal'
"""
__metaclass__ = PolymorphicClass
_class = _ClassKeyProperty(name=_CLASS_KEY_PROPERTY)
def __new__(cls, *args, **kwds):
"""Prevents direct instantiation of PolyModel."""
if cls is PolyModel:
raise NotImplementedError()
return super(PolyModel, cls).__new__(cls, *args, **kwds)
@classmethod
def kind(cls):
"""Get kind of polymorphic model.
Overridden so that all subclasses of root classes are the same kind
as the root.
Returns:
Kind of entity to write to datastore.
"""
if cls is cls.__root_class__:
return super(PolyModel, cls).kind()
else:
return cls.__root_class__.kind()
@classmethod
def class_key(cls):
"""Caclulate the class-key for this class.
Returns:
Class key for class. By default this is a the list of classes
of the hierarchy, starting with the root class and walking its way
down to cls.
"""
if not hasattr(cls, '__class_hierarchy__'):
raise NotImplementedError(
'Cannot determine class key without class hierarchy')
return tuple(cls.class_name() for cls in cls.__class_hierarchy__)
@classmethod
def class_name(cls):
"""Calculate class name for this class.
Returns name to use for each classes element within its class-key. Used
to discriminate between different classes within a class hierarchy's
Datastore kind.
The presence of this method allows developers to use a different class
name in the datastore from what is used in Python code. This is useful,
for example, for renaming classes without having to migrate instances
already written to the datastore. For example, to rename a polymorphic
class Contact to SimpleContact, you could convert:
# Class key is ['Information']
class Information(PolyModel): ...
# Class key is ['Information', 'Contact']
class Contact(Information): ...
to:
# Class key is still ['Information', 'Contact']
class SimpleContact(Information):
...
@classmethod
def class_name(cls):
return 'Contact'
# Class key is ['Information', 'Contact', 'ExtendedContact']
class ExtendedContact(SimpleContact): ...
This would ensure that all objects written previously using the old class
name would still be loaded.
Returns:
Name of this class.
"""
return cls.__name__
@classmethod
def from_entity(cls, entity):
"""Load from entity to class based on discriminator.
Rather than instantiating a new Model instance based on the kind
mapping, this creates an instance of the correct model class based
on the entities class-key.
Args:
entity: Entity loaded directly from datastore.
Raises:
KindError when there is no class mapping based on discriminator.
"""
if (_CLASS_KEY_PROPERTY in entity and
tuple(entity[_CLASS_KEY_PROPERTY]) != cls.class_key()):
key = tuple(entity[_CLASS_KEY_PROPERTY])
try:
poly_class = _class_map[key]
except KeyError:
raise db.KindError('No implementation for class \'%s\'' % key)
return poly_class.from_entity(entity)
return super(PolyModel, cls).from_entity(entity)
@classmethod
def all(cls):
"""Get all instance of a class hierarchy.
Returns:
Query with filter set to match this class' discriminator.
"""
query = super(PolyModel, cls).all()
if cls != cls.__root_class__:
query.filter(_CLASS_KEY_PROPERTY + ' =', cls.class_name())
return query
@classmethod
def gql(cls, query_string, *args, **kwds):
"""Returns a polymorphic query using GQL query string.
This query is polymorphic in that it has its filters configured in a way
to retrieve instances of the model or an instance of a subclass of the
model.
Args:
query_string: properly formatted GQL query string with the
'SELECT * FROM <entity>' part omitted
*args: rest of the positional arguments used to bind numeric references
in the query.
**kwds: dictionary-based arguments (for named parameters).
"""
if cls == cls.__root_class__:
return super(PolyModel, cls).gql(query_string, *args, **kwds)
else:
from google.appengine.ext import gql
query = db.GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string))
query_filter = [('nop',
[gql.Literal(cls.class_name())])]
query._proto_query.filters()[('class', '=')] = query_filter
query.bind(*args, **kwds)
return query
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Support for polymorphic models and queries.
The Model class on its own is only able to support functional polymorphism.
It is possible to create a subclass of Model and then subclass that one as
many generations as necessary and those classes will share all the same
properties and behaviors. The problem is that subclassing Model in this way
places each subclass in their own Kind. This means that it is not possible
to do polymorphic queries. Building a query on a base class will only return
instances of that class from the Datastore, while queries on a subclass will
only return those instances.
This module allows applications to specify class hierarchies that support
polymorphic queries.
"""
from google.appengine.ext import db
_class_map = {}
_CLASS_KEY_PROPERTY = 'class'
class _ClassKeyProperty(db.ListProperty):
"""Property representing class-key property of a polymorphic class.
The class key is a list of strings describing an polymorphic instances
place within its class hierarchy. This property is automatically calculated.
For example:
class Foo(PolyModel): ...
class Bar(Foo): ...
class Baz(Bar): ...
Foo.class_key() == ['Foo']
Bar.class_key() == ['Foo', 'Bar']
Baz.class_key() == ['Foo', 'Bar', 'Baz']
"""
def __init__(self, name):
super(_ClassKeyProperty, self).__init__(name=name,
item_type=str,
default=None)
def __set__(self, *args):
raise db.DerivedPropertyError(
'Class-key is a derived property and cannot be set.')
def __get__(self, model_instance, model_class):
if model_instance is None:
return self
return [cls.__name__ for cls in model_class.__class_hierarchy__]
class PolymorphicClass(db.PropertiedClass):
"""Meta-class for initializing PolymorphicClasses.
This class extends PropertiedClass to add a few static attributes to
new polymorphic classes necessary for their correct functioning.
"""
def __init__(cls, name, bases, dct):
"""Initializes a class that belongs to a polymorphic hierarchy.
This method configures a few built-in attributes of polymorphic
models:
__root_class__: If the new class is a root class, __root_class__ is set to
itself so that it subclasses can quickly know what the root of
their hierarchy is and what kind they are stored in.
__class_hierarchy__: List of classes describing the new model's place
in the class hierarchy. The first element is always the root
element while the last element is the new class itself. For example:
class Foo(PolymorphicClass): ...
class Bar(Foo): ...
class Baz(Bar): ...
Foo.__class_hierarchy__ == [Foo]
Bar.__class_hierarchy__ == [Foo, Bar]
Baz.__class_hierarchy__ == [Foo, Bar, Baz]
Unless the class is a root class or PolyModel itself, it is not
inserted in to the kind-map like other models. However, all polymorphic
classes, are inserted in to the class-map which maps the class-key to
implementation. This class key is consulted using the polymorphic instances
discriminator (the 'class' property of the entity) when loading from the
datastore.
"""
if name == 'PolyModel' or PolyModel not in bases:
db._initialize_properties(cls, name, bases, dct)
super(db.PropertiedClass, cls).__init__(name, bases, dct)
else:
cls.__root_class__ = cls
super(PolymorphicClass, cls).__init__(name, bases, dct)
if name == 'PolyModel':
return
if cls is not cls.__root_class__:
poly_class = None
for base in cls.__bases__:
if issubclass(base, PolyModel):
poly_class = base
break
else:
raise db.ConfigurationError(
"Polymorphic class '%s' does not inherit from PolyModel."
% cls.__name__)
cls.__class_hierarchy__ = poly_class.__class_hierarchy__ + [cls]
else:
cls.__class_hierarchy__ = [cls]
_class_map[cls.class_key()] = cls
class PolyModel(db.Model):
"""Base-class for models that supports polymorphic queries.
Use this class to build hierarchies that can be queried based
on their types.
Example:
consider the following model hierarchy:
+------+
|Animal|
+------+
|
+-----------------+
| |
+------+ +------+
|Canine| |Feline|
+------+ +------+
| |
+-------+ +-------+
| | | |
+---+ +----+ +---+ +-------+
|Dog| |Wolf| |Cat| |Panther|
+---+ +----+ +---+ +-------+
This class hierarchy has three levels. The first is the "root class".
All models in a single class hierarchy must inherit from this root. All
models in the hierarchy are stored as the same kind as the root class.
For example, Panther entities when stored to the datastore are of the kind
'Animal'. Querying against the Animal kind will retrieve Cats, Dogs and
Canines, for example, that match your query. Different classes stored
in the root class' kind are identified by their class-key. When loaded
from the datastore, it is mapped to the appropriate implementation class.
Polymorphic properties:
Properties that are defined in a given base-class within a hierarchy are
stored in the datastore for all sub-casses only. So, if the Feline class
had a property called 'whiskers', the Cat and Panther enties would also
have whiskers, but not Animal, Canine, Dog or Wolf.
Polymorphic queries:
When written to the datastore, all polymorphic objects automatically have
a property called 'class' that you can query against. Using this property
it is possible to easily write a GQL query against any sub-hierarchy. For
example, to fetch only Canine objects, including all Dogs and Wolves:
db.GqlQuery("SELECT * FROM Animal WHERE class='Canine'")
And alternate method is to use the 'all' or 'gql' methods of the Canine
class:
Canine.all()
Canine.gql('')
The 'class' property is not meant to be used by your code other than
for queries. Since it is supposed to represents the real Python class
it is intended to be hidden from view.
Root class:
The root class is the class from which all other classes of the hierarchy
inherits from. Each hierarchy has a single root class. A class is a
root class if it is an immediate child of PolyModel. The subclasses of
the root class are all the same kind as the root class. In other words:
Animal.kind() == Feline.kind() == Panther.kind() == 'Animal'
"""
__metaclass__ = PolymorphicClass
_class = _ClassKeyProperty(name=_CLASS_KEY_PROPERTY)
def __new__(cls, *args, **kwds):
"""Prevents direct instantiation of PolyModel."""
if cls is PolyModel:
raise NotImplementedError()
return super(PolyModel, cls).__new__(cls, *args, **kwds)
@classmethod
def kind(cls):
"""Get kind of polymorphic model.
Overridden so that all subclasses of root classes are the same kind
as the root.
Returns:
Kind of entity to write to datastore.
"""
if cls is cls.__root_class__:
return super(PolyModel, cls).kind()
else:
return cls.__root_class__.kind()
@classmethod
def class_key(cls):
"""Caclulate the class-key for this class.
Returns:
Class key for class. By default this is a the list of classes
of the hierarchy, starting with the root class and walking its way
down to cls.
"""
if not hasattr(cls, '__class_hierarchy__'):
raise NotImplementedError(
'Cannot determine class key without class hierarchy')
return tuple(cls.class_name() for cls in cls.__class_hierarchy__)
@classmethod
def class_name(cls):
"""Calculate class name for this class.
Returns name to use for each classes element within its class-key. Used
to discriminate between different classes within a class hierarchy's
Datastore kind.
The presence of this method allows developers to use a different class
name in the datastore from what is used in Python code. This is useful,
for example, for renaming classes without having to migrate instances
already written to the datastore. For example, to rename a polymorphic
class Contact to SimpleContact, you could convert:
# Class key is ['Information']
class Information(PolyModel): ...
# Class key is ['Information', 'Contact']
class Contact(Information): ...
to:
# Class key is still ['Information', 'Contact']
class SimpleContact(Information):
...
@classmethod
def class_name(cls):
return 'Contact'
# Class key is ['Information', 'Contact', 'ExtendedContact']
class ExtendedContact(SimpleContact): ...
This would ensure that all objects written previously using the old class
name would still be loaded.
Returns:
Name of this class.
"""
return cls.__name__
@classmethod
def from_entity(cls, entity):
"""Load from entity to class based on discriminator.
Rather than instantiating a new Model instance based on the kind
mapping, this creates an instance of the correct model class based
on the entities class-key.
Args:
entity: Entity loaded directly from datastore.
Raises:
KindError when there is no class mapping based on discriminator.
"""
if (_CLASS_KEY_PROPERTY in entity and
tuple(entity[_CLASS_KEY_PROPERTY]) != cls.class_key()):
key = tuple(entity[_CLASS_KEY_PROPERTY])
try:
poly_class = _class_map[key]
except KeyError:
raise db.KindError('No implementation for class \'%s\'' % key)
return poly_class.from_entity(entity)
return super(PolyModel, cls).from_entity(entity)
@classmethod
def all(cls):
"""Get all instance of a class hierarchy.
Returns:
Query with filter set to match this class' discriminator.
"""
query = super(PolyModel, cls).all()
if cls != cls.__root_class__:
query.filter(_CLASS_KEY_PROPERTY + ' =', cls.class_name())
return query
@classmethod
def gql(cls, query_string, *args, **kwds):
"""Returns a polymorphic query using GQL query string.
This query is polymorphic in that it has its filters configured in a way
to retrieve instances of the model or an instance of a subclass of the
model.
Args:
query_string: properly formatted GQL query string with the
'SELECT * FROM <entity>' part omitted
*args: rest of the positional arguments used to bind numeric references
in the query.
**kwds: dictionary-based arguments (for named parameters).
"""
if cls == cls.__root_class__:
return super(PolyModel, cls).gql(query_string, *args, **kwds)
else:
from google.appengine.ext import gql
query = db.GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string))
query_filter = [('nop',
[gql.Literal(cls.class_name())])]
query._proto_query.filters()[('class', '=')] = query_filter
query.bind(*args, **kwds)
return query
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple, schema-based database abstraction layer for the datastore.
Modeled after Django's abstraction layer on top of SQL databases,
http://www.djangoproject.com/documentation/mode_api/. Ours is a little simpler
and a lot less code because the datastore is so much simpler than SQL
databases.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want to
publish a story with title, body, and created date, you would do it like this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
You can create a new Story in the datastore with this usage pattern:
story = Story(title='My title')
story.body = 'My body'
story.put()
You query for Story entities using built in query interfaces that map directly
to the syntax and semantics of the datastore:
stories = Story.all().filter('date >=', yesterday).order('-date')
for story in stories:
print story.title
The Property declarations enforce types by performing validation on assignment.
For example, the DateTimeProperty enforces that you assign valid datetime
objects, and if you supply the "required" option for a property, you will not
be able to assign None to that property.
We also support references between models, so if a story has comments, you
would represent it like this:
class Comment(db.Model):
story = db.ReferenceProperty(Story)
body = db.TextProperty()
When you get a story out of the datastore, the story reference is resolved
automatically the first time it is referenced, which makes it easy to use
model instances without performing additional queries by hand:
comment = Comment.get(key)
print comment.story.title
Likewise, you can access the set of comments that refer to each story through
this property through a reverse reference called comment_set, which is a Query
preconfigured to return all matching comments:
story = Story.get(key)
for comment in story.comment_set:
print comment.body
"""
import copy
import datetime
import logging
import re
import time
import urlparse
import warnings
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
Error = datastore_errors.Error
BadValueError = datastore_errors.BadValueError
BadPropertyError = datastore_errors.BadPropertyError
BadRequestError = datastore_errors.BadRequestError
EntityNotFoundError = datastore_errors.EntityNotFoundError
BadArgumentError = datastore_errors.BadArgumentError
QueryNotFoundError = datastore_errors.QueryNotFoundError
TransactionNotFoundError = datastore_errors.TransactionNotFoundError
Rollback = datastore_errors.Rollback
TransactionFailedError = datastore_errors.TransactionFailedError
BadFilterError = datastore_errors.BadFilterError
BadQueryError = datastore_errors.BadQueryError
BadKeyError = datastore_errors.BadKeyError
InternalError = datastore_errors.InternalError
NeedIndexError = datastore_errors.NeedIndexError
Timeout = datastore_errors.Timeout
ValidationError = BadValueError
Key = datastore_types.Key
Category = datastore_types.Category
Link = datastore_types.Link
Email = datastore_types.Email
GeoPt = datastore_types.GeoPt
IM = datastore_types.IM
PhoneNumber = datastore_types.PhoneNumber
PostalAddress = datastore_types.PostalAddress
Rating = datastore_types.Rating
Text = datastore_types.Text
Blob = datastore_types.Blob
ByteString = datastore_types.ByteString
_kind_map = {}
_SELF_REFERENCE = object()
_RESERVED_WORDS = set(['key_name'])
class NotSavedError(Error):
"""Raised when a saved-object action is performed on a non-saved object."""
class KindError(BadValueError):
"""Raised when an entity is used with incorrect Model."""
class PropertyError(Error):
"""Raised when non-existent property is referenced."""
class DuplicatePropertyError(Error):
"""Raised when a property is duplicated in a model definition."""
class ConfigurationError(Error):
"""Raised when a property or model is improperly configured."""
class ReservedWordError(Error):
"""Raised when a property is defined for a reserved word."""
class DerivedPropertyError(Error):
"""Raised when attempting to assign a value to a derived property."""
_ALLOWED_PROPERTY_TYPES = set([
basestring,
str,
unicode,
bool,
int,
long,
float,
Key,
datetime.datetime,
datetime.date,
datetime.time,
Blob,
ByteString,
Text,
users.User,
Category,
Link,
Email,
GeoPt,
IM,
PhoneNumber,
PostalAddress,
Rating,
])
_ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
_ALLOWED_EXPANDO_PROPERTY_TYPES.update((list, tuple, type(None)))
_OPERATORS = ['<', '<=', '>', '>=', '=', '==', '!=', 'in']
_FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(_OPERATORS),
re.IGNORECASE | re.UNICODE)
def class_for_kind(kind):
"""Return base-class responsible for implementing kind.
Necessary to recover the class responsible for implementing provided
kind.
Args:
kind: Entity kind string.
Returns:
Class implementation for kind.
Raises:
KindError when there is no implementation for kind.
"""
try:
return _kind_map[kind]
except KeyError:
raise KindError('No implementation for kind \'%s\'' % kind)
def check_reserved_word(attr_name):
"""Raise an exception if attribute name is a reserved word.
Args:
attr_name: Name to check to see if it is a reserved word.
Raises:
ReservedWordError when attr_name is determined to be a reserved word.
"""
if datastore_types.RESERVED_PROPERTY_NAME.match(attr_name):
raise ReservedWordError(
"Cannot define property. All names both beginning and "
"ending with '__' are reserved.")
if attr_name in _RESERVED_WORDS or attr_name in dir(Model):
raise ReservedWordError(
"Cannot define property using reserved word '%(attr_name)s'. "
"If you would like to use this name in the datastore consider "
"using a different name like %(attr_name)s_ and adding "
"name='%(attr_name)s' to the parameter list of the property "
"definition." % locals())
def _initialize_properties(model_class, name, bases, dct):
"""Initialize Property attributes for Model-class.
Args:
model_class: Model class to initialize properties for.
"""
model_class._properties = {}
defined = set()
for base in bases:
if hasattr(base, '_properties'):
property_keys = base._properties.keys()
duplicate_properties = defined.intersection(property_keys)
if duplicate_properties:
raise DuplicatePropertyError(
'Duplicate properties in base class %s already defined: %s' %
(base.__name__, list(duplicate_properties)))
defined.update(property_keys)
model_class._properties.update(base._properties)
for attr_name in dct.keys():
attr = dct[attr_name]
if isinstance(attr, Property):
check_reserved_word(attr_name)
if attr_name in defined:
raise DuplicatePropertyError('Duplicate property: %s' % attr_name)
defined.add(attr_name)
model_class._properties[attr_name] = attr
attr.__property_config__(model_class, attr_name)
class PropertiedClass(type):
"""Meta-class for initializing Model classes properties.
Used for initializing Properties defined in the context of a model.
By using a meta-class much of the configuration of a Property
descriptor becomes implicit. By using this meta-class, descriptors
that are of class Model are notified about which class they
belong to and what attribute they are associated with and can
do appropriate initialization via __property_config__.
Duplicate properties are not permitted.
"""
def __init__(cls, name, bases, dct, map_kind=True):
"""Initializes a class that might have property definitions.
This method is called when a class is created with the PropertiedClass
meta-class.
Loads all properties for this model and its base classes in to a dictionary
for easy reflection via the 'properties' method.
Configures each property defined in the new class.
Duplicate properties, either defined in the new class or defined separately
in two base classes are not permitted.
Properties may not assigned to names which are in the list of
_RESERVED_WORDS. It is still possible to store a property using a reserved
word in the datastore by using the 'name' keyword argument to the Property
constructor.
Args:
cls: Class being initialized.
name: Name of new class.
bases: Base classes of new class.
dct: Dictionary of new definitions for class.
Raises:
DuplicatePropertyError when a property is duplicated either in the new
class or separately in two base classes.
ReservedWordError when a property is given a name that is in the list of
reserved words, attributes of Model and names of the form '__.*__'.
"""
super(PropertiedClass, cls).__init__(name, bases, dct)
_initialize_properties(cls, name, bases, dct)
if map_kind:
_kind_map[cls.kind()] = cls
class Property(object):
"""A Property is an attribute of a Model.
It defines the type of the attribute, which determines how it is stored
in the datastore and how the property values are validated. Different property
types support different options, which change validation rules, default
values, etc. The simplest example of a property is a StringProperty:
class Story(db.Model):
title = db.StringProperty()
"""
creation_counter = 0
def __init__(self, verbose_name=None, name=None, default=None,
required=False, validator=None, choices=None):
"""Initializes this Property with the given options.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
default: Default value for property if none is assigned.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
"""
self.verbose_name = verbose_name
self.name = name
self.default = default
self.required = required
self.validator = validator
self.choices = choices
self.creation_counter = Property.creation_counter
Property.creation_counter += 1
def __property_config__(self, model_class, property_name):
"""Configure property, connecting it to its model.
Configure the property so that it knows its property name and what class
it belongs to.
Args:
model_class: Model class which Property will belong to.
property_name: Name of property within Model instance to store property
values in. By default this will be the property name preceded by
an underscore, but may change for different subclasses.
"""
self.model_class = model_class
if self.name is None:
self.name = property_name
def __get__(self, model_instance, model_class):
"""Returns the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean."""
if model_instance is None:
return self
try:
return getattr(model_instance, self._attr_name())
except AttributeError:
return None
def __set__(self, model_instance, value):
"""Sets the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean.
"""
value = self.validate(value)
setattr(model_instance, self._attr_name(), value)
def default_value(self):
"""Default value for unassigned values.
Returns:
Default value as provided by __init__(default).
"""
return self.default
def validate(self, value):
"""Assert that provided value is compatible with this property.
Args:
value: Value to validate against this Property.
Returns:
A valid value, either the input unchanged or adapted to the
required type.
Raises:
BadValueError if the value is not appropriate for this
property in any way.
"""
if self.empty(value):
if self.required:
raise BadValueError('Property %s is required' % self.name)
else:
if self.choices:
match = False
for choice in self.choices:
if choice == value:
match = True
if not match:
raise BadValueError('Property %s is %r; must be one of %r' %
(self.name, value, self.choices))
if self.validator is not None:
self.validator(value)
return value
def empty(self, value):
"""Determine if value is empty in the context of this property.
For most kinds, this is equivalent to "not value", but for kinds like
bool, the test is more subtle, so subclasses can override this method
if necessary.
Args:
value: Value to validate against this Property.
Returns:
True if this value is considered empty in the context of this Property
type, otherwise False.
"""
return not value
def get_value_for_datastore(self, model_instance):
"""Datastore representation of this property.
Looks for this property in the given model instance, and returns the proper
datastore representation of the value that can be stored in a datastore
entity. Most critically, it will fetch the datastore key value for
reference properties.
Args:
model_instance: Instance to fetch datastore value from.
Returns:
Datastore representation of the model value in a form that is
appropriate for storing in the datastore.
"""
return self.__get__(model_instance, model_instance.__class__)
def make_value_from_datastore(self, value):
"""Native representation of this property.
Given a value retrieved from a datastore entity, return a value,
possibly converted, to be stored on the model instance. Usually
this returns the value unchanged, but a property class may
override this when it uses a different datatype on the model
instance than on the entity.
This API is not quite symmetric with get_value_for_datastore(),
because the model instance on which to store the converted value
may not exist yet -- we may be collecting values to be passed to a
model constructor.
Args:
value: value retrieved from the datastore entity.
Returns:
The value converted for use as a model instance attribute.
"""
return value
def _attr_name(self):
"""Attribute name we use for this property in model instances.
DO NOT USE THIS METHOD.
"""
return '_' + self.name
data_type = str
def datastore_type(self):
"""Deprecated backwards-compatible accessor method for self.data_type."""
return self.data_type
class Model(object):
"""Model is the superclass of all object entities in the datastore.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want
to publish a story with title, body, and created date, you would do it like
this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
A model instance can have a single parent. Model instances without any
parent are root entities. It is possible to efficiently query for
instances by their shared parent. All descendents of a single root
instance also behave as a transaction group. This means that when you
work one member of the group within a transaction all descendents of that
root join the transaction. All operations within a transaction on this
group are ACID.
"""
__metaclass__ = PropertiedClass
def __init__(self,
parent=None,
key_name=None,
_app=None,
_from_entity=False,
**kwds):
"""Creates a new instance of this model.
To create a new entity, you instantiate a model and then call put(),
which saves the entity to the datastore:
person = Person()
person.name = 'Bret'
person.put()
You can initialize properties in the model in the constructor with keyword
arguments:
person = Person(name='Bret')
We initialize all other properties to the default value (as defined by the
properties in the model definition) if they are not provided in the
constructor.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_app: Intentionally undocumented.
_from_entity: Intentionally undocumented.
args: Keyword arguments mapping to properties of model.
"""
if key_name == '':
raise BadKeyError('Name cannot be empty.')
elif key_name is not None and not isinstance(key_name, basestring):
raise BadKeyError('Name must be string type, not %s' %
key_name.__class__.__name__)
if parent is not None:
if not isinstance(parent, (Model, Key)):
raise TypeError('Expected Model type; received %s (is %s)' %
(parent, parent.__class__.__name__))
if isinstance(parent, Model) and not parent.has_key():
raise BadValueError(
"%s instance must have a complete key before it can be used as a "
"parent." % parent.kind())
if isinstance(parent, Key):
self._parent_key = parent
self._parent = None
else:
self._parent_key = parent.key()
self._parent = parent
else:
self._parent_key = None
self._parent = None
self._entity = None
self._key_name = key_name
self._app = _app
properties = self.properties()
for prop in self.properties().values():
if prop.name in kwds:
value = kwds[prop.name]
else:
value = prop.default_value()
try:
prop.__set__(self, value)
except DerivedPropertyError, e:
if prop.name in kwds and not _from_entity:
raise
def key(self):
"""Unique key for this entity.
This property is only available if this entity is already stored in the
datastore, so it is available if this entity was fetched returned from a
query, or after put() is called the first time for new entities.
Returns:
Datastore key of persisted entity.
Raises:
NotSavedError when entity is not persistent.
"""
if self.is_saved():
return self._entity.key()
elif self._key_name:
if self._parent_key:
parent_key = self._parent_key
elif self._parent:
parent_key = self._parent.key()
parent = self._parent_key or (self._parent and self._parent.key())
return Key.from_path(self.kind(), self._key_name, parent=parent)
else:
raise NotSavedError()
def _to_entity(self, entity):
"""Copies information from this model to provided entity.
Args:
entity: Entity to save information on.
"""
for prop in self.properties().values():
datastore_value = prop.get_value_for_datastore(self)
if datastore_value == []:
try:
del entity[prop.name]
except KeyError:
pass
else:
entity[prop.name] = datastore_value
def _populate_internal_entity(self, _entity_class=datastore.Entity):
"""Populates self._entity, saving its state to the datastore.
After this method is called, calling is_saved() will return True.
Returns:
Populated self._entity
"""
self._entity = self._populate_entity(_entity_class=_entity_class)
if hasattr(self, '_key_name'):
del self._key_name
return self._entity
def put(self):
"""Writes this model instance to the datastore.
If this instance is new, we add an entity to the datastore.
Otherwise, we update this instance, and the key will remain the
same.
Returns:
The key of the instance (either the existing key or a new key).
Raises:
TransactionFailedError if the data could not be committed.
"""
self._populate_internal_entity()
return datastore.Put(self._entity)
save = put
def _populate_entity(self, _entity_class=datastore.Entity):
"""Internal helper -- Populate self._entity or create a new one
if that one does not exist. Does not change any state of the instance
other than the internal state of the entity.
This method is separate from _populate_internal_entity so that it is
possible to call to_xml without changing the state of an unsaved entity
to saved.
Returns:
self._entity or a new Entity which is not stored on the instance.
"""
if self.is_saved():
entity = self._entity
else:
if self._parent_key is not None:
entity = _entity_class(self.kind(),
parent=self._parent_key,
name=self._key_name,
_app=self._app)
elif self._parent is not None:
entity = _entity_class(self.kind(),
parent=self._parent._entity,
name=self._key_name,
_app=self._app)
else:
entity = _entity_class(self.kind(),
name=self._key_name,
_app=self._app)
self._to_entity(entity)
return entity
def delete(self):
"""Deletes this entity from the datastore.
Raises:
TransactionFailedError if the data could not be committed.
"""
datastore.Delete(self.key())
self._entity = None
def is_saved(self):
"""Determine if entity is persisted in the datastore.
New instances of Model do not start out saved in the data. Objects which
are saved to or loaded from the Datastore will have a True saved state.
Returns:
True if object has been persisted to the datastore, otherwise False.
"""
return self._entity is not None
def has_key(self):
"""Determine if this model instance has a complete key.
Ids are not assigned until the data is saved to the Datastore, but
instances with a key name always have a full key.
Returns:
True if the object has been persisted to the datastore or has a key_name,
otherwise False.
"""
return self.is_saved() or self._key_name
def dynamic_properties(self):
"""Returns a list of all dynamic properties defined for instance."""
return []
def instance_properties(self):
"""Alias for dyanmic_properties."""
return self.dynamic_properties()
def parent(self):
"""Get the parent of the model instance.
Returns:
Parent of contained entity or parent provided in constructor, None if
instance has no parent.
"""
if self._parent is None:
parent_key = self.parent_key()
if parent_key is not None:
self._parent = get(parent_key)
return self._parent
def parent_key(self):
"""Get the parent's key.
This method is useful for avoiding a potential fetch from the datastore
but still get information about the instances parent.
Returns:
Parent key of entity, None if there is no parent.
"""
if self._parent_key is not None:
return self._parent_key
elif self._parent is not None:
return self._parent.key()
elif self._entity is not None:
return self._entity.parent()
else:
return None
def to_xml(self, _entity_class=datastore.Entity):
"""Generate an XML representation of this model instance.
atom and gd:namespace properties are converted to XML according to their
respective schemas. For more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
"""
entity = self._populate_entity(_entity_class)
return entity.ToXml()
@classmethod
def get(cls, keys):
"""Fetch instance from the datastore of a specific Model type using key.
We support Key objects and string keys (we convert them to Key objects
automatically).
Useful for ensuring that specific instance types are retrieved from the
datastore. It also helps that the source code clearly indicates what
kind of object is being retreived. Example:
story = Story.get(story_key)
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for provided class if it exists in the datastore, otherwise
None; if a list of keys was given: a list whose items are either
a Model instance or None.
Raises:
KindError if any of the retreived objects are not instances of the
type associated with call to 'get'.
"""
results = get(keys)
if results is None:
return None
if isinstance(results, Model):
instances = [results]
else:
instances = results
for instance in instances:
if not(instance is None or isinstance(instance, cls)):
raise KindError('Kind %r is not a subclass of kind %r' %
(instance.kind(), cls.kind()))
return results
@classmethod
def get_by_key_name(cls, key_names, parent=None):
"""Get instance of Model class by its key's name.
Args:
key_names: A single key-name or a list of key-names.
parent: Parent of instances to get. Can be a model or key.
"""
if isinstance(parent, Model):
parent = parent.key()
key_names, multiple = datastore.NormalizeAndTypeCheck(key_names, basestring)
keys = [datastore.Key.from_path(cls.kind(), name, parent=parent)
for name in key_names]
if multiple:
return get(keys)
else:
return get(*keys)
@classmethod
def get_by_id(cls, ids, parent=None):
"""Get instance of Model class by id.
Args:
key_names: A single id or a list of ids.
parent: Parent of instances to get. Can be a model or key.
"""
if isinstance(parent, Model):
parent = parent.key()
ids, multiple = datastore.NormalizeAndTypeCheck(ids, (int, long))
keys = [datastore.Key.from_path(cls.kind(), id, parent=parent)
for id in ids]
if multiple:
return get(keys)
else:
return get(*keys)
@classmethod
def get_or_insert(cls, key_name, **kwds):
"""Transactionally retrieve or create an instance of Model class.
This acts much like the Python dictionary setdefault() method, where we
first try to retrieve a Model instance with the given key name and parent.
If it's not present, then we create a new instance (using the *kwds
supplied) and insert that with the supplied key name.
Subsequent calls to this method with the same key_name and parent will
always yield the same entity (though not the same actual object instance),
regardless of the *kwds supplied. If the specified entity has somehow
been deleted separately, then the next call will create a new entity and
return it.
If the 'parent' keyword argument is supplied, it must be a Model instance.
It will be used as the parent of the new instance of this Model class if
one is created.
This method is especially useful for having just one unique entity for
a specific identifier. Insertion/retrieval is done transactionally, which
guarantees uniqueness.
Example usage:
class WikiTopic(db.Model):
creation_date = db.DatetimeProperty(auto_now_add=True)
body = db.TextProperty(required=True)
# The first time through we'll create the new topic.
wiki_word = 'CommonIdioms'
topic = WikiTopic.get_or_insert(wiki_word,
body='This topic is totally new!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
# The second time through will just retrieve the entity.
overwrite_topic = WikiTopic.get_or_insert(wiki_word,
body='A totally different message!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
Args:
key_name: Key name to retrieve or create.
**kwds: Keyword arguments to pass to the constructor of the model class
if an instance for the specified key name does not already exist. If
an instance with the supplied key_name and parent already exists, the
rest of these arguments will be discarded.
Returns:
Existing instance of Model class with the specified key_name and parent
or a new one that has just been created.
Raises:
TransactionFailedError if the specified Model instance could not be
retrieved or created transactionally (due to high contention, etc).
"""
def txn():
entity = cls.get_by_key_name(key_name, parent=kwds.get('parent'))
if entity is None:
entity = cls(key_name=key_name, **kwds)
entity.put()
return entity
return run_in_transaction(txn)
@classmethod
def all(cls):
"""Returns a query over all instances of this model from the datastore.
Returns:
Query that will retrieve all instances from entity collection.
"""
return Query(cls)
@classmethod
def gql(cls, query_string, *args, **kwds):
"""Returns a query using GQL query string.
See appengine/ext/gql for more information about GQL.
Args:
query_string: properly formatted GQL query string with the
'SELECT * FROM <entity>' part omitted
*args: rest of the positional arguments used to bind numeric references
in the query.
**kwds: dictionary-based arguments (for named parameters).
"""
return GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string),
*args, **kwds)
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Loads attributes which are not defined as part of the entity in
to the model instance.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = {}
for prop in cls.properties().values():
if prop.name in entity:
try:
value = prop.make_value_from_datastore(entity[prop.name])
entity_values[prop.name] = value
except KeyError:
entity_values[prop.name] = []
return entity_values
@classmethod
def from_entity(cls, entity):
"""Converts the entity representation of this model to an instance.
Converts datastore.Entity instance to an instance of cls.
Args:
entity: Entity loaded directly from datastore.
Raises:
KindError when cls is incorrect model for entity.
"""
if cls.kind() != entity.kind():
raise KindError('Class %s cannot handle kind \'%s\'' %
(repr(cls), entity.kind()))
entity_values = cls._load_entity_values(entity)
instance = cls(None, _from_entity=True, **entity_values)
instance._entity = entity
del instance._key_name
return instance
@classmethod
def kind(cls):
"""Returns the datastore kind we use for this model.
We just use the name of the model for now, ignoring potential collisions.
"""
return cls.__name__
@classmethod
def entity_type(cls):
"""Soon to be removed alias for kind."""
return cls.kind()
@classmethod
def properties(cls):
"""Returns a dictionary of all the properties defined for this model."""
return dict(cls._properties)
@classmethod
def fields(cls):
"""Soon to be removed alias for properties."""
return cls.properties()
def get(keys):
"""Fetch the specific Model instance with the given key from the datastore.
We support Key objects and string keys (we convert them to Key objects
automatically).
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for if it exists in the datastore, otherwise None; if a list of
keys was given: a list whose items are either a Model instance or
None.
"""
keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys)
try:
entities = datastore.Get(keys)
except datastore_errors.EntityNotFoundError:
assert not multiple
return None
models = []
for entity in entities:
if entity is None:
model = None
else:
cls1 = class_for_kind(entity.kind())
model = cls1.from_entity(entity)
models.append(model)
if multiple:
return models
assert len(models) == 1
return models[0]
def put(models):
"""Store one or more Model instances.
Args:
models: Model instance or list of Model instances.
Returns:
A Key or a list of Keys (corresponding to the argument's plurality).
Raises:
TransactionFailedError if the data could not be committed.
"""
models, multiple = datastore.NormalizeAndTypeCheck(models, Model)
entities = [model._populate_internal_entity() for model in models]
keys = datastore.Put(entities)
if multiple:
return keys
assert len(keys) == 1
return keys[0]
save = put
def delete(models):
"""Delete one or more Model instances.
Args:
models_or_keys: Model instance or list of Model instances.
Raises:
TransactionFailedError if the data could not be committed.
"""
models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
models, (Model, Key, basestring))
keys = []
for model_or_key in models_or_keys:
if isinstance(model_or_key, Model):
key = model_or_key = model_or_key.key()
elif isinstance(model_or_key, basestring):
key = model_or_key = Key(model_or_key)
else:
key = model_or_key
keys.append(key)
datastore.Delete(keys)
class Expando(Model):
"""Dynamically expandable model.
An Expando does not require (but can still benefit from) the definition
of any properties before it can be used to store information in the
datastore. Properties can be added to an expando object by simply
performing an assignment. The assignment of properties is done on
an instance by instance basis, so it is possible for one object of an
expando type to have different properties from another or even the same
properties with different types. It is still possible to define
properties on an expando, allowing those properties to behave the same
as on any other model.
Example:
import datetime
class Song(db.Expando):
title = db.StringProperty()
crazy = Song(title='Crazy like a diamond',
author='Lucy Sky',
publish_date='yesterday',
rating=5.0)
hoboken = Song(title='The man from Hoboken',
author=['Anthony', 'Lou'],
publish_date=datetime.datetime(1977, 5, 3))
crazy.last_minute_note=db.Text('Get a train to the station.')
Possible Uses:
One use of an expando is to create an object without any specific
structure and later, when your application mature and it in the right
state, change it to a normal model object and define explicit properties.
Additional exceptions for expando:
Protected attributes (ones whose names begin with '_') cannot be used
as dynamic properties. These are names that are reserved for protected
transient (non-persisted) attributes.
Order of lookup:
When trying to set or access an attribute value, any other defined
properties, such as methods and other values in __dict__ take precedence
over values in the datastore.
1 - Because it is not possible for the datastore to know what kind of
property to store on an undefined expando value, setting a property to
None is the same as deleting it from the expando.
2 - Persistent variables on Expando must not begin with '_'. These
variables considered to be 'protected' in Python, and are used
internally.
3 - Expando's dynamic properties are not able to store empty lists.
Attempting to assign an empty list to a dynamic property will raise
ValueError. Static properties on Expando can still support empty
lists but like normal Model properties is restricted from using
None.
"""
_dynamic_properties = None
def __init__(self, parent=None, key_name=None, _app=None, **kwds):
"""Creates a new instance of this expando model.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_app: Intentionally undocumented.
args: Keyword arguments mapping to properties of model.
"""
super(Expando, self).__init__(parent, key_name, _app, **kwds)
self._dynamic_properties = {}
for prop, value in kwds.iteritems():
if prop not in self.properties() and value is not None:
setattr(self, prop, value)
def __setattr__(self, key, value):
"""Dynamically set field values that are not defined.
Tries to set the value on the object normally, but failing that
sets the value on the contained entity.
Args:
key: Name of attribute.
value: Value to set for attribute. Must be compatible with
datastore.
Raises:
ValueError on attempt to assign empty list.
"""
check_reserved_word(key)
if key[:1] != '_' and key not in self.properties():
if value == []:
raise ValueError('Cannot store empty list to dynamic property %s' %
key)
if type(value) not in _ALLOWED_EXPANDO_PROPERTY_TYPES:
raise TypeError("Expando cannot accept values of type '%s'." %
type(value).__name__)
if self._dynamic_properties is None:
self._dynamic_properties = {}
self._dynamic_properties[key] = value
else:
super(Expando, self).__setattr__(key, value)
def __getattr__(self, key):
"""If no explicit attribute defined, retrieve value from entity.
Tries to get the value on the object normally, but failing that
retrieves value from contained entity.
Args:
key: Name of attribute.
Raises:
AttributeError when there is no attribute for key on object or
contained entity.
"""
if self._dynamic_properties and key in self._dynamic_properties:
return self._dynamic_properties[key]
else:
return getattr(super(Expando, self), key)
def __delattr__(self, key):
"""Remove attribute from expando.
Expando is not like normal entities in that undefined fields
can be removed.
Args:
key: Dynamic property to be deleted.
"""
if self._dynamic_properties and key in self._dynamic_properties:
del self._dynamic_properties[key]
else:
object.__delattr__(self, key)
def dynamic_properties(self):
"""Determine which properties are particular to instance of entity.
Returns:
Set of names which correspond only to the dynamic properties.
"""
if self._dynamic_properties is None:
return []
return self._dynamic_properties.keys()
def _to_entity(self, entity):
"""Store to entity, deleting dynamic properties that no longer exist.
When the expando is saved, it is possible that a given property no longer
exists. In this case, the property will be removed from the saved instance.
Args:
entity: Entity which will receive dynamic properties.
"""
super(Expando, self)._to_entity(entity)
if self._dynamic_properties is None:
self._dynamic_properties = {}
for key, value in self._dynamic_properties.iteritems():
entity[key] = value
all_properties = set(self._dynamic_properties.iterkeys())
all_properties.update(self.properties().iterkeys())
for key in entity.keys():
if key not in all_properties:
del entity[key]
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Expando needs to do a second pass to add the entity values which were
ignored by Model because they didn't have an corresponding predefined
property on the model.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = super(Expando, cls)._load_entity_values(entity)
for key, value in entity.iteritems():
if key not in entity_values:
entity_values[str(key)] = value
return entity_values
class _BaseQuery(object):
"""Base class for both Query and GqlQuery."""
def __init__(self, model_class):
"""Constructor."
Args:
model_class: Model class from which entities are constructed.
"""
self._model_class = model_class
def _get_query(self):
"""Subclass must override (and not call their super method).
Returns:
A datastore.Query instance representing the query.
"""
raise NotImplementedError
def run(self):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
Returns:
Iterator for this query.
"""
return _QueryIterator(self._model_class, iter(self._get_query().Run()))
def __iter__(self):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
"""
return self.run()
def get(self):
"""Get first result from this.
Beware: get() ignores the LIMIT clause on GQL queries.
Returns:
First result from running the query if there are any, else None.
"""
results = self.fetch(1)
try:
return results[0]
except IndexError:
return None
def count(self, limit=None):
"""Number of entities this query fetches.
Beware: count() ignores the LIMIT clause on GQL queries.
Args:
limit, a number. If there are more results than this, stop short and
just return this number. Providing this argument makes the count
operation more efficient.
Returns:
Number of entities this query fetches.
"""
return self._get_query().Count(limit=limit)
def fetch(self, limit, offset=0):
"""Return a list of items selected using SQL-like limit and offset.
Whenever possible, use fetch() instead of iterating over the query
results with run() or __iter__() . fetch() is more efficient.
Beware: fetch() ignores the LIMIT clause on GQL queries.
Args:
limit: Maximum number of results to return.
offset: Optional number of results to skip first; default zero.
Returns:
A list of db.Model instances. There may be fewer than 'limit'
results if there aren't enough results to satisfy the request.
"""
accepted = (int, long)
if not (isinstance(limit, accepted) and isinstance(offset, accepted)):
raise TypeError('Arguments to fetch() must be integers')
if limit < 0 or offset < 0:
raise ValueError('Arguments to fetch() must be >= 0')
if limit == 0:
return []
raw = self._get_query().Get(limit, offset)
return map(self._model_class.from_entity, raw)
def __getitem__(self, arg):
"""Support for query[index] and query[start:stop].
Beware: this ignores the LIMIT clause on GQL queries.
Args:
arg: Either a single integer, corresponding to the query[index]
syntax, or a Python slice object, corresponding to the
query[start:stop] or query[start:stop:step] syntax.
Returns:
A single Model instance when the argument is a single integer.
A list of Model instances when the argument is a slice.
"""
if isinstance(arg, slice):
start, stop, step = arg.start, arg.stop, arg.step
if start is None:
start = 0
if stop is None:
raise ValueError('Open-ended slices are not supported')
if step is None:
step = 1
if start < 0 or stop < 0 or step != 1:
raise ValueError(
'Only slices with start>=0, stop>=0, step==1 are supported')
limit = stop - start
if limit < 0:
return []
return self.fetch(limit, start)
elif isinstance(arg, (int, long)):
if arg < 0:
raise ValueError('Only indices >= 0 are supported')
results = self.fetch(1, arg)
if results:
return results[0]
else:
raise IndexError('The query returned fewer than %d results' % (arg+1))
else:
raise TypeError('Only integer indices and slices are supported')
class _QueryIterator(object):
"""Wraps the datastore iterator to return Model instances.
The datastore returns entities. We wrap the datastore iterator to
return Model instances instead.
"""
def __init__(self, model_class, datastore_iterator):
"""Iterator constructor
Args:
model_class: Model class from which entities are constructed.
datastore_iterator: Underlying datastore iterator.
"""
self.__model_class = model_class
self.__iterator = datastore_iterator
def __iter__(self):
"""Iterator on self.
Returns:
Self.
"""
return self
def next(self):
"""Get next Model instance in query results.
Returns:
Next model instance.
Raises:
StopIteration when there are no more results in query.
"""
return self.__model_class.from_entity(self.__iterator.next())
def _normalize_query_parameter(value):
"""Make any necessary type conversions to a query parameter.
The following conversions are made:
- Model instances are converted to Key instances. This is necessary so
that querying reference properties will work.
- datetime.date objects are converted to datetime.datetime objects (see
_date_to_datetime for details on this conversion). This is necessary so
that querying date properties with date objects will work.
- datetime.time objects are converted to datetime.datetime objects (see
_time_to_datetime for details on this conversion). This is necessary so
that querying time properties with time objects will work.
Args:
value: The query parameter value.
Returns:
The input value, or a converted value if value matches one of the
conversions specified above.
"""
if isinstance(value, Model):
value = value.key()
if (isinstance(value, datetime.date) and
not isinstance(value, datetime.datetime)):
value = _date_to_datetime(value)
elif isinstance(value, datetime.time):
value = _time_to_datetime(value)
return value
class Query(_BaseQuery):
"""A Query instance queries over instances of Models.
You construct a query with a model class, like this:
class Story(db.Model):
title = db.StringProperty()
date = db.DateTimeProperty()
query = Query(Story)
You modify a query with filters and orders like this:
query.filter('title =', 'Foo')
query.order('-date')
query.ancestor(key_or_model_instance)
Every query can return an iterator, so you access the results of a query
by iterating over it:
for story in query:
print story.title
For convenience, all of the filtering and ordering methods return "self",
so the easiest way to use the query interface is to cascade all filters and
orders in the iterator line like this:
for story in Query(story).filter('title =', 'Foo').order('-date'):
print story.title
"""
def __init__(self, model_class):
"""Constructs a query over instances of the given Model.
Args:
model_class: Model class to build query for.
"""
super(Query, self).__init__(model_class)
self.__query_sets = [{}]
self.__orderings = []
self.__ancestor = None
def _get_query(self,
_query_class=datastore.Query,
_multi_query_class=datastore.MultiQuery):
queries = []
for query_set in self.__query_sets:
query = _query_class(self._model_class.kind(), query_set)
if self.__ancestor is not None:
query.Ancestor(self.__ancestor)
queries.append(query)
if (_query_class != datastore.Query and
_multi_query_class == datastore.MultiQuery):
warnings.warn(
'Custom _query_class specified without corresponding custom'
' _query_multi_class. Things will break if you use queries with'
' the "IN" or "!=" operators.', RuntimeWarning)
if len(queries) > 1:
raise datastore_errors.BadArgumentError(
'Query requires multiple subqueries to satisfy. If _query_class'
' is overridden, _multi_query_class must also be overridden.')
elif (_query_class == datastore.Query and
_multi_query_class != datastore.MultiQuery):
raise BadArgumentError('_query_class must also be overridden if'
' _multi_query_class is overridden.')
if len(queries) == 1:
queries[0].Order(*self.__orderings)
return queries[0]
else:
return _multi_query_class(queries, self.__orderings)
def __filter_disjunction(self, operations, values):
"""Add a disjunction of several filters and several values to the query.
This is implemented by duplicating queries and combining the
results later.
Args:
operations: a string or list of strings. Each string contains a
property name and an operator to filter by. The operators
themselves must not require multiple queries to evaluate
(currently, this means that 'in' and '!=' are invalid).
values: a value or list of filter values, normalized by
_normalize_query_parameter.
"""
if not isinstance(operations, (list, tuple)):
operations = [operations]
if not isinstance(values, (list, tuple)):
values = [values]
new_query_sets = []
for operation in operations:
if operation.lower().endswith('in') or operation.endswith('!='):
raise BadQueryError('Cannot use "in" or "!=" in a disjunction.')
for query_set in self.__query_sets:
for value in values:
new_query_set = copy.copy(query_set)
datastore._AddOrAppend(new_query_set, operation, value)
new_query_sets.append(new_query_set)
self.__query_sets = new_query_sets
def filter(self, property_operator, value):
"""Add filter to query.
Args:
property_operator: string with the property and operator to filter by.
value: the filter value.
Returns:
Self to support method chaining.
"""
match = _FILTER_REGEX.match(property_operator)
prop = match.group(1)
if match.group(3) is not None:
operator = match.group(3)
else:
operator = '=='
if operator.lower() == 'in':
if not isinstance(value, (list, tuple)):
raise BadValueError('Argument to the "in" operator must be a list')
values = [_normalize_query_parameter(v) for v in value]
self.__filter_disjunction(prop + ' =', values)
else:
if isinstance(value, (list, tuple)):
raise BadValueError('Filtering on lists is not supported')
if operator == '!=':
self.__filter_disjunction([prop + ' <', prop + ' >'],
_normalize_query_parameter(value))
else:
value = _normalize_query_parameter(value)
for query_set in self.__query_sets:
datastore._AddOrAppend(query_set, property_operator, value)
return self
def order(self, property):
"""Set order of query result.
To use descending order, prepend '-' (minus) to the property
name, e.g., '-date' rather than 'date'.
Args:
property: Property to sort on.
Returns:
Self to support method chaining.
Raises:
PropertyError if invalid property name is provided.
"""
if property.startswith('-'):
property = property[1:]
order = datastore.Query.DESCENDING
else:
order = datastore.Query.ASCENDING
if not issubclass(self._model_class, Expando):
if (property not in self._model_class.properties() and
property not in datastore_types._SPECIAL_PROPERTIES):
raise PropertyError('Invalid property name \'%s\'' % property)
self.__orderings.append((property, order))
return self
def ancestor(self, ancestor):
"""Sets an ancestor for this query.
This restricts the query to only return results that descend from
a given model instance. In other words, all of the results will
have the ancestor as their parent, or parent's parent, etc. The
ancestor itself is also a possible result!
Args:
ancestor: Model or Key (that has already been saved)
Returns:
Self to support method chaining.
Raises:
TypeError if the argument isn't a Key or Model; NotSavedError
if it is, but isn't saved yet.
"""
if isinstance(ancestor, datastore.Key):
if ancestor.has_id_or_name():
self.__ancestor = ancestor
else:
raise NotSavedError()
elif isinstance(ancestor, Model):
if ancestor.has_key():
self.__ancestor = ancestor.key()
else:
raise NotSavedError()
else:
raise TypeError('ancestor should be Key or Model')
return self
class GqlQuery(_BaseQuery):
"""A Query class that uses GQL query syntax instead of .filter() etc."""
def __init__(self, query_string, *args, **kwds):
"""Constructor.
Args:
query_string: Properly formatted GQL query string.
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
"""
from google.appengine.ext import gql
app = kwds.pop('_app', None)
self._proto_query = gql.GQL(query_string, _app=app)
super(GqlQuery, self).__init__(class_for_kind(self._proto_query._entity))
self.bind(*args, **kwds)
def bind(self, *args, **kwds):
"""Bind arguments (positional or keyword) to the query.
Note that you can also pass arguments directly to the query
constructor. Each time you call bind() the previous set of
arguments is replaced with the new set. This is useful because
the hard work in in parsing the query; so if you expect to be
using the same query with different sets of arguments, you should
hold on to the GqlQuery() object and call bind() on it each time.
Args:
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
"""
self._args = []
for arg in args:
self._args.append(_normalize_query_parameter(arg))
self._kwds = {}
for name, arg in kwds.iteritems():
self._kwds[name] = _normalize_query_parameter(arg)
def run(self):
"""Override _BaseQuery.run() so the LIMIT clause is handled properly."""
query_run = self._proto_query.Run(*self._args, **self._kwds)
return _QueryIterator(self._model_class, iter(query_run))
def _get_query(self):
return self._proto_query.Bind(self._args, self._kwds)
class TextProperty(Property):
"""A string that can be longer than 500 bytes.
This type should be used for large text values to make sure the datastore
has good performance for queries.
"""
def validate(self, value):
"""Validate text property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'Text'.
"""
if value is not None and not isinstance(value, Text):
try:
value = Text(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a Text instance (%s)' % (self.name, err))
value = super(TextProperty, self).validate(value)
if value is not None and not isinstance(value, Text):
raise BadValueError('Property %s must be a Text instance' % self.name)
return value
data_type = Text
class StringProperty(Property):
"""A textual property, which can be multi- or single-line."""
def __init__(self, verbose_name=None, multiline=False, **kwds):
"""Construct string property.
Args:
verbose_name: Verbose name is always first parameter.
multi-line: Carriage returns permitted in property.
"""
super(StringProperty, self).__init__(verbose_name, **kwds)
self.multiline = multiline
def validate(self, value):
"""Validate string property.
Returns:
A valid value.
Raises:
BadValueError if property is not multi-line but value is.
"""
value = super(StringProperty, self).validate(value)
if value is not None and not isinstance(value, basestring):
raise BadValueError(
'Property %s must be a str or unicode instance, not a %s'
% (self.name, type(value).__name__))
if not self.multiline and value and value.find('\n') != -1:
raise BadValueError('Property %s is not multi-line' % self.name)
return value
data_type = basestring
class _CoercingProperty(Property):
"""A Property subclass that extends validate() to coerce to self.data_type."""
def validate(self, value):
"""Coerce values (except None) to self.data_type.
Args:
value: The value to be validated and coerced.
Returns:
The coerced and validated value. It is guaranteed that this is
either None or an instance of self.data_type; otherwise an exception
is raised.
Raises:
BadValueError if the value could not be validated or coerced.
"""
value = super(_CoercingProperty, self).validate(value)
if value is not None and not isinstance(value, self.data_type):
value = self.data_type(value)
return value
class CategoryProperty(_CoercingProperty):
"""A property whose values are Category instances."""
data_type = Category
class LinkProperty(_CoercingProperty):
"""A property whose values are Link instances."""
def validate(self, value):
value = super(LinkProperty, self).validate(value)
if value is not None:
scheme, netloc, path, query, fragment = urlparse.urlsplit(value)
if not scheme or not netloc:
raise BadValueError('Property %s must be a full URL (\'%s\')' %
(self.name, value))
return value
data_type = Link
URLProperty = LinkProperty
class EmailProperty(_CoercingProperty):
"""A property whose values are Email instances."""
data_type = Email
class GeoPtProperty(_CoercingProperty):
"""A property whose values are GeoPt instances."""
data_type = GeoPt
class IMProperty(_CoercingProperty):
"""A property whose values are IM instances."""
data_type = IM
class PhoneNumberProperty(_CoercingProperty):
"""A property whose values are PhoneNumber instances."""
data_type = PhoneNumber
class PostalAddressProperty(_CoercingProperty):
"""A property whose values are PostalAddress instances."""
data_type = PostalAddress
class BlobProperty(Property):
"""A string that can be longer than 500 bytes.
This type should be used for large binary values to make sure the datastore
has good performance for queries.
"""
def validate(self, value):
"""Validate blob property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'Blob'.
"""
if value is not None and not isinstance(value, Blob):
try:
value = Blob(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a Blob instance (%s)' % (self.name, err))
value = super(BlobProperty, self).validate(value)
if value is not None and not isinstance(value, Blob):
raise BadValueError('Property %s must be a Blob instance' % self.name)
return value
data_type = Blob
class ByteStringProperty(Property):
"""A short (<=500 bytes) byte string.
This type should be used for short binary values that need to be indexed. If
you do not require indexing (regardless of length), use BlobProperty instead.
"""
def validate(self, value):
"""Validate ByteString property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'ByteString'.
"""
if value is not None and not isinstance(value, ByteString):
try:
value = ByteString(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a ByteString instance (%s)' % (self.name, err))
value = super(ByteStringProperty, self).validate(value)
if value is not None and not isinstance(value, ByteString):
raise BadValueError('Property %s must be a ByteString instance'
% self.name)
return value
data_type = ByteString
class DateTimeProperty(Property):
"""The base class of all of our date/time properties.
We handle common operations, like converting between time tuples and
datetime instances.
"""
def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False,
**kwds):
"""Construct a DateTimeProperty
Args:
verbose_name: Verbose name is always first parameter.
auto_now: Date/time property is updated with the current time every time
it is saved to the datastore. Useful for properties that want to track
the modification time of an instance.
auto_now_add: Date/time is set to the when its instance is created.
Useful for properties that record the creation time of an entity.
"""
super(DateTimeProperty, self).__init__(verbose_name, **kwds)
self.auto_now = auto_now
self.auto_now_add = auto_now_add
def validate(self, value):
"""Validate datetime.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'datetime'.
"""
value = super(DateTimeProperty, self).validate(value)
if value and not isinstance(value, self.data_type):
raise BadValueError('Property %s must be a %s' %
(self.name, self.data_type.__name__))
return value
def default_value(self):
"""Default value for datetime.
Returns:
value of now() as appropriate to the date-time instance if auto_now
or auto_now_add is set, else user configured default value implementation.
"""
if self.auto_now or self.auto_now_add:
return self.now()
return Property.default_value(self)
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
now() as appropriate to the date-time instance in the odd case where
auto_now is set to True, else the default implementation.
"""
if self.auto_now:
return self.now()
else:
return super(DateTimeProperty,
self).get_value_for_datastore(model_instance)
data_type = datetime.datetime
@staticmethod
def now():
"""Get now as a full datetime value.
Returns:
'now' as a whole timestamp, including both time and date.
"""
return datetime.datetime.now()
def _date_to_datetime(value):
"""Convert a date to a datetime for datastore storage.
Args:
value: A datetime.date object.
Returns:
A datetime object with time set to 0:00.
"""
assert isinstance(value, datetime.date)
return datetime.datetime(value.year, value.month, value.day)
def _time_to_datetime(value):
"""Convert a time to a datetime for datastore storage.
Args:
value: A datetime.time object.
Returns:
A datetime object with date set to 1970-01-01.
"""
assert isinstance(value, datetime.time)
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second,
value.microsecond)
class DateProperty(DateTimeProperty):
"""A date property, which stores a date without a time."""
@staticmethod
def now():
"""Get now as a date datetime value.
Returns:
'date' part of 'now' only.
"""
return datetime.datetime.now().date()
def validate(self, value):
"""Validate date.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'date',
or if it is an instance of 'datetime' (which is a subclass
of 'date', but for all practical purposes a different type).
"""
value = super(DateProperty, self).validate(value)
if isinstance(value, datetime.datetime):
raise BadValueError('Property %s must be a %s, not a datetime' %
(self.name, self.data_type.__name__))
return value
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.date from the model instance and return a
datetime.datetime instance with the time set to zero.
See base class method documentation for details.
"""
value = super(DateProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.date)
value = _date_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its date portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.date()
return value
data_type = datetime.date
class TimeProperty(DateTimeProperty):
"""A time property, which stores a time without a date."""
@staticmethod
def now():
"""Get now as a time datetime value.
Returns:
'time' part of 'now' only.
"""
return datetime.datetime.now().time()
def empty(self, value):
"""Is time property empty.
"0:0" (midnight) is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.time from the model instance and return a
datetime.datetime instance with the date set to 1/1/1970.
See base class method documentation for details.
"""
value = super(TimeProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.time), repr(value)
value = _time_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its time portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.time()
return value
data_type = datetime.time
class IntegerProperty(Property):
"""An integer property."""
def validate(self, value):
"""Validate integer property.
Returns:
A valid value.
Raises:
BadValueError if value is not an integer or long instance.
"""
value = super(IntegerProperty, self).validate(value)
if value is None:
return value
if not isinstance(value, (int, long)) or isinstance(value, bool):
raise BadValueError('Property %s must be an int or long, not a %s'
% (self.name, type(value).__name__))
if value < -0x8000000000000000 or value > 0x7fffffffffffffff:
raise BadValueError('Property %s must fit in 64 bits' % self.name)
return value
data_type = int
def empty(self, value):
"""Is integer property empty.
0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class RatingProperty(_CoercingProperty, IntegerProperty):
"""A property whose values are Rating instances."""
data_type = Rating
class FloatProperty(Property):
"""A float property."""
def validate(self, value):
"""Validate float.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'float'.
"""
value = super(FloatProperty, self).validate(value)
if value is not None and not isinstance(value, float):
raise BadValueError('Property %s must be a float' % self.name)
return value
data_type = float
def empty(self, value):
"""Is float property empty.
0.0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class BooleanProperty(Property):
"""A boolean property."""
def validate(self, value):
"""Validate boolean.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'bool'.
"""
value = super(BooleanProperty, self).validate(value)
if value is not None and not isinstance(value, bool):
raise BadValueError('Property %s must be a bool' % self.name)
return value
data_type = bool
def empty(self, value):
"""Is boolean property empty.
False is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class UserProperty(Property):
"""A user property."""
def __init__(self, verbose_name=None, name=None,
required=False, validator=None, choices=None,
auto_current_user=False, auto_current_user_add=False):
"""Initializes this Property with the given options.
Note: this does *not* support the 'default' keyword argument.
Use auto_current_user_add=True instead.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
auto_current_user: If true, the value is set to the current user
each time the entity is written to the datastore.
auto_current_user_add: If true, the value is set to the current user
the first time the entity is written to the datastore.
"""
super(UserProperty, self).__init__(verbose_name, name,
required=required,
validator=validator,
choices=choices)
self.auto_current_user = auto_current_user
self.auto_current_user_add = auto_current_user_add
def validate(self, value):
"""Validate user.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'User'.
"""
value = super(UserProperty, self).validate(value)
if value is not None and not isinstance(value, users.User):
raise BadValueError('Property %s must be a User' % self.name)
return value
def default_value(self):
"""Default value for user.
Returns:
Value of users.get_current_user() if auto_current_user or
auto_current_user_add is set; else None. (But *not* the default
implementation, since we don't support the 'default' keyword
argument.)
"""
if self.auto_current_user or self.auto_current_user_add:
return users.get_current_user()
return None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
Value of users.get_current_user() if auto_current_user is set;
else the default implementation.
"""
if self.auto_current_user:
return users.get_current_user()
return super(UserProperty, self).get_value_for_datastore(model_instance)
data_type = users.User
class ListProperty(Property):
"""A property that stores a list of things.
This is a parameterized property; the parameter must be a valid
non-list data type, and all items must conform to this type.
"""
def __init__(self, item_type, verbose_name=None, default=None, **kwds):
"""Construct ListProperty.
Args:
item_type: Type for the list items; must be one of the allowed property
types.
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to base class.
Note that the only permissible value for 'required' is True.
"""
if item_type is str:
item_type = basestring
if not isinstance(item_type, type):
raise TypeError('Item type should be a type object')
if item_type not in _ALLOWED_PROPERTY_TYPES:
raise ValueError('Item type %s is not acceptable' % item_type.__name__)
if 'required' in kwds and kwds['required'] is not True:
raise ValueError('List values must be required')
if default is None:
default = []
self.item_type = item_type
super(ListProperty, self).__init__(verbose_name,
required=True,
default=default,
**kwds)
def validate(self, value):
"""Validate list.
Returns:
A valid value.
Raises:
BadValueError if property is not a list whose items are instances of
the item_type given to the constructor.
"""
value = super(ListProperty, self).validate(value)
if value is not None:
if not isinstance(value, list):
raise BadValueError('Property %s must be a list' % self.name)
value = self.validate_list_contents(value)
return value
def validate_list_contents(self, value):
"""Validates that all items in the list are of the correct type.
Returns:
The validated list.
Raises:
BadValueError if the list has items are not instances of the
item_type given to the constructor.
"""
if self.item_type in (int, long):
item_type = (int, long)
else:
item_type = self.item_type
for item in value:
if not isinstance(item, item_type):
if item_type == (int, long):
raise BadValueError('Items in the %s list must all be integers.' %
self.name)
else:
raise BadValueError(
'Items in the %s list must all be %s instances' %
(self.name, self.item_type.__name__))
return value
def empty(self, value):
"""Is list property empty.
[] is not an empty value.
Returns:
True if value is None, else false.
"""
return value is None
data_type = list
def default_value(self):
"""Default value for list.
Because the property supplied to 'default' is a static value,
that value must be shallow copied to prevent all fields with
default values from sharing the same instance.
Returns:
Copy of the default value.
"""
return list(super(ListProperty, self).default_value())
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
validated list appropriate to save in the datastore.
"""
value = self.validate_list_contents(
super(ListProperty, self).get_value_for_datastore(model_instance))
if self.validator:
self.validator(value)
return value
class StringListProperty(ListProperty):
"""A property that stores a list of strings.
A shorthand for the most common type of ListProperty.
"""
def __init__(self, verbose_name=None, default=None, **kwds):
"""Construct StringListProperty.
Args:
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to ListProperty().
"""
super(StringListProperty, self).__init__(basestring,
verbose_name=verbose_name,
default=default,
**kwds)
class ReferenceProperty(Property):
"""A property that represents a many-to-one reference to another model.
For example, a reference property in model A that refers to model B forms
a many-to-one relationship from A to B: every instance of A refers to a
single B instance, and every B instance can have many A instances refer
to it.
"""
def __init__(self,
reference_class=None,
verbose_name=None,
collection_name=None,
**attrs):
"""Construct ReferenceProperty.
Args:
reference_class: Which model class this property references.
verbose_name: User friendly name of property.
collection_name: If provided, alternate name of collection on
reference_class to store back references. Use this to allow
a Model to have multiple fields which refer to the same class.
"""
super(ReferenceProperty, self).__init__(verbose_name, **attrs)
self.collection_name = collection_name
if reference_class is None:
reference_class = Model
if not ((isinstance(reference_class, type) and
issubclass(reference_class, Model)) or
reference_class is _SELF_REFERENCE):
raise KindError('reference_class must be Model or _SELF_REFERENCE')
self.reference_class = self.data_type = reference_class
def __property_config__(self, model_class, property_name):
"""Loads all of the references that point to this model.
We need to do this to create the ReverseReferenceProperty properties for
this model and create the <reference>_set attributes on the referenced
model, e.g.:
class Story(db.Model):
title = db.StringProperty()
class Comment(db.Model):
story = db.ReferenceProperty(Story)
story = Story.get(id)
print [c for c in story.comment_set]
In this example, the comment_set property was created based on the reference
from Comment to Story (which is inherently one to many).
Args:
model_class: Model class which will have its reference properties
initialized.
property_name: Name of property being configured.
Raises:
DuplicatePropertyError if referenced class already has the provided
collection name as a property.
"""
super(ReferenceProperty, self).__property_config__(model_class,
property_name)
if self.reference_class is _SELF_REFERENCE:
self.reference_class = self.data_type = model_class
if self.collection_name is None:
self.collection_name = '%s_set' % (model_class.__name__.lower())
if hasattr(self.reference_class, self.collection_name):
raise DuplicatePropertyError('Class %s already has property %s'
% (self.reference_class.__name__,
self.collection_name))
setattr(self.reference_class,
self.collection_name,
_ReverseReferenceProperty(model_class, property_name))
def __get__(self, model_instance, model_class):
"""Get reference object.
This method will fetch unresolved entities from the datastore if
they are not already loaded.
Returns:
ReferenceProperty to Model object if property is set, else None.
"""
if model_instance is None:
return self
if hasattr(model_instance, self.__id_attr_name()):
reference_id = getattr(model_instance, self.__id_attr_name())
else:
reference_id = None
if reference_id is not None:
resolved = getattr(model_instance, self.__resolved_attr_name())
if resolved is not None:
return resolved
else:
instance = get(reference_id)
if instance is None:
raise Error('ReferenceProperty failed to be resolved')
setattr(model_instance, self.__resolved_attr_name(), instance)
return instance
else:
return None
def __set__(self, model_instance, value):
"""Set reference."""
value = self.validate(value)
if value is not None:
if isinstance(value, datastore.Key):
setattr(model_instance, self.__id_attr_name(), value)
setattr(model_instance, self.__resolved_attr_name(), None)
else:
setattr(model_instance, self.__id_attr_name(), value.key())
setattr(model_instance, self.__resolved_attr_name(), value)
else:
setattr(model_instance, self.__id_attr_name(), None)
setattr(model_instance, self.__resolved_attr_name(), None)
def get_value_for_datastore(self, model_instance):
"""Get key of reference rather than reference itself."""
return getattr(model_instance, self.__id_attr_name())
def validate(self, value):
"""Validate reference.
Returns:
A valid value.
Raises:
BadValueError for the following reasons:
- Value is not saved.
- Object not of correct model type for reference.
"""
if isinstance(value, datastore.Key):
return value
if value is not None and not value.has_key():
raise BadValueError(
'%s instance must have a complete key before it can be stored as a '
'reference' % self.reference_class.kind())
value = super(ReferenceProperty, self).validate(value)
if value is not None and not isinstance(value, self.reference_class):
raise KindError('Property %s must be an instance of %s' %
(self.name, self.reference_class.kind()))
return value
def __id_attr_name(self):
"""Get attribute of referenced id.
Returns:
Attribute where to store id of referenced entity.
"""
return self._attr_name()
def __resolved_attr_name(self):
"""Get attribute of resolved attribute.
The resolved attribute is where the actual loaded reference instance is
stored on the referring model instance.
Returns:
Attribute name of where to store resolved reference model instance.
"""
return '_RESOLVED' + self._attr_name()
Reference = ReferenceProperty
def SelfReferenceProperty(verbose_name=None, collection_name=None, **attrs):
"""Create a self reference.
Function for declaring a self referencing property on a model.
Example:
class HtmlNode(db.Model):
parent = db.SelfReferenceProperty('Parent', 'children')
Args:
verbose_name: User friendly name of property.
collection_name: Name of collection on model.
Raises:
ConfigurationError if reference_class provided as parameter.
"""
if 'reference_class' in attrs:
raise ConfigurationError(
'Do not provide reference_class to self-reference.')
return ReferenceProperty(_SELF_REFERENCE,
verbose_name,
collection_name,
**attrs)
SelfReference = SelfReferenceProperty
class _ReverseReferenceProperty(Property):
"""The inverse of the Reference property above.
We construct reverse references automatically for the model to which
the Reference property is pointing to create the one-to-many property for
that model. For example, if you put a Reference property in model A that
refers to model B, we automatically create a _ReverseReference property in
B called a_set that can fetch all of the model A instances that refer to
that instance of model B.
"""
def __init__(self, model, prop):
"""Constructor for reverse reference.
Constructor does not take standard values of other property types.
Args:
model: Model that this property is a collection of.
property: Foreign property on referred model that points back to this
properties entity.
"""
self.__model = model
self.__property = prop
def __get__(self, model_instance, model_class):
"""Fetches collection of model instances of this collection property."""
if model_instance is not None:
query = Query(self.__model)
return query.filter(self.__property + ' =', model_instance.key())
else:
return self
def __set__(self, model_instance, value):
"""Not possible to set a new collection."""
raise BadValueError('Virtual property is read-only')
run_in_transaction = datastore.RunInTransaction
run_in_transaction_custom_retries = datastore.RunInTransactionCustomRetries
RunInTransaction = run_in_transaction
RunInTransactionCustomRetries = run_in_transaction_custom_retries
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Support for creating Django (new) forms from Datastore data models.
This is our best shot at supporting as much of Django as possible: you
won't be able to use Django's db package, but you can use our
db package instead, and create Django forms from it, either fully
automatically, or with overrides.
Note, you should not import these classes from this module. Importing
this module patches the classes in place, and you should continue to
import them from google.appengine.db.
Some of the code here is strongly inspired by Django's own ModelForm
class (new in Django 0.97). Our code also supports Django 0.96 (so as
to be maximally compatible). Note that our API is always similar to
Django 0.97's API, even when used with Django 0.96 (which uses a
different API, chiefly form_for_model()).
Terminology notes:
- forms: always refers to the Django newforms subpackage
- field: always refers to a Django forms.Field instance
- property: always refers to a db.Property instance
Mapping between properties and fields:
+====================+===================+==============+====================+
| Property subclass | Field subclass | datatype | widget; notes |
+====================+===================+==============+====================+
| StringProperty | CharField | unicode | Textarea |
| | | | if multiline |
+--------------------+-------------------+--------------+--------------------+
| TextProperty | CharField | unicode | Textarea |
+--------------------+-------------------+--------------+--------------------+
| BlobProperty | FileField | str | skipped in v0.96 |
+--------------------+-------------------+--------------+--------------------+
| DateTimeProperty | DateTimeField | datetime | skipped |
| | | | if auto_now[_add] |
+--------------------+-------------------+--------------+--------------------+
| DateProperty | DateField | date | ditto |
+--------------------+-------------------+--------------+--------------------+
| TimeProperty | TimeField | time | ditto |
+--------------------+-------------------+--------------+--------------------+
| IntegerProperty | IntegerField | int or long | |
+--------------------+-------------------+--------------+--------------------+
| FloatProperty | FloatField | float | CharField in v0.96 |
+--------------------+-------------------+--------------+--------------------+
| BooleanProperty | BooleanField | bool | |
+--------------------+-------------------+--------------+--------------------+
| UserProperty | CharField | users.User | |
+--------------------+-------------------+--------------+--------------------+
| StringListProperty | CharField | list of str | Textarea |
+--------------------+-------------------+--------------+--------------------+
| LinkProperty | URLField | str | |
+--------------------+-------------------+--------------+--------------------+
| ReferenceProperty | ModelChoiceField* | db.Model | |
+--------------------+-------------------+--------------+--------------------+
| _ReverseReferenceP.| None | <iterable> | always skipped |
+====================+===================+==============+====================+
Notes:
*: this Field subclasses is defined by us, not in Django.
"""
import itertools
import logging
import django.core.exceptions
import django.utils.datastructures
try:
from django import newforms as forms
except ImportError:
from django import forms
try:
from django.utils.translation import ugettext_lazy as _
except ImportError:
pass
from google.appengine.api import users
from google.appengine.ext import db
def monkey_patch(name, bases, namespace):
"""A 'metaclass' for adding new methods to an existing class.
In this version, existing methods can't be overridden; this is by
design, to avoid accidents.
Usage example:
class PatchClass(TargetClass):
__metaclass__ = monkey_patch
def foo(self, ...): ...
def bar(self, ...): ...
This is equivalent to:
def foo(self, ...): ...
def bar(self, ...): ...
TargetClass.foo = foo
TargetClass.bar = bar
PatchClass = TargetClass
Note that PatchClass becomes an alias for TargetClass; by convention
it is recommended to give PatchClass the same name as TargetClass.
"""
assert len(bases) == 1, 'Exactly one base class is required'
base = bases[0]
for name, value in namespace.iteritems():
if name not in ('__metaclass__', '__module__'):
assert name not in base.__dict__, "Won't override attribute %r" % (name,)
setattr(base, name, value)
return base
class Property(db.Property):
__metaclass__ = monkey_patch
def get_form_field(self, form_class=forms.CharField, **kwargs):
"""Return a Django form field appropriate for this property.
Args:
form_class: a forms.Field subclass, default forms.CharField
Additional keyword arguments are passed to the form_class constructor,
with certain defaults:
required: self.required
label: prettified self.verbose_name, if not None
widget: a forms.Select instance if self.choices is non-empty
initial: self.default, if not None
Returns:
A fully configured instance of form_class, or None if no form
field should be generated for this property.
"""
defaults = {'required': self.required}
if self.verbose_name:
defaults['label'] = self.verbose_name.capitalize().replace('_', ' ')
if self.choices:
choices = []
if not self.required or (self.default is None and
'initial' not in kwargs):
choices.append(('', '---------'))
for choice in self.choices:
choices.append((str(choice), unicode(choice)))
defaults['widget'] = forms.Select(choices=choices)
if self.default is not None:
defaults['initial'] = self.default
defaults.update(kwargs)
return form_class(**defaults)
def get_value_for_form(self, instance):
"""Extract the property value from the instance for use in a form.
Override this to do a property- or field-specific type conversion.
Args:
instance: a db.Model instance
Returns:
The property's value extracted from the instance, possibly
converted to a type suitable for a form field; possibly None.
By default this returns the instance attribute's value unchanged.
"""
return getattr(instance, self.name)
def make_value_from_form(self, value):
"""Convert a form value to a property value.
Override this to do a property- or field-specific type conversion.
Args:
value: the cleaned value retrieved from the form field
Returns:
A value suitable for assignment to a model instance's property;
possibly None.
By default this converts the value to self.data_type if it
isn't already an instance of that type, except if the value is
empty, in which case we return None.
"""
if value in (None, ''):
return None
if not isinstance(value, self.data_type):
value = self.data_type(value)
return value
class UserProperty(db.Property):
"""This class exists solely to log a warning when it is used."""
def __init__(self, *args, **kwds):
logging.warn("Please don't use modelforms.UserProperty; "
"use db.UserProperty instead.")
super(UserProperty, self).__init__(*args, **kwds)
class StringProperty(db.StringProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a string property.
This sets the widget default to forms.Textarea if the property's
multiline attribute is set.
"""
defaults = {}
if self.multiline:
defaults['widget'] = forms.Textarea
defaults.update(kwargs)
return super(StringProperty, self).get_form_field(**defaults)
class TextProperty(db.TextProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a text property.
This sets the widget default to forms.Textarea.
"""
defaults = {'widget': forms.Textarea}
defaults.update(kwargs)
return super(TextProperty, self).get_form_field(**defaults)
class BlobProperty(db.BlobProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a blob property.
This defaults to a forms.FileField instance when using Django 0.97
or later. For 0.96 this returns None, as file uploads are not
really supported in that version.
"""
if not hasattr(forms, 'FileField'):
return None
defaults = {'form_class': forms.FileField}
defaults.update(kwargs)
return super(BlobProperty, self).get_form_field(**defaults)
def get_value_for_form(self, instance):
"""Extract the property value from the instance for use in a form.
There is no way to convert a Blob into an initial value for a file
upload, so we always return None.
"""
return None
def make_value_from_form(self, value):
"""Convert a form value to a property value.
This extracts the content from the UploadedFile instance returned
by the FileField instance.
"""
if value.__class__.__name__ == 'UploadedFile':
return db.Blob(value.content)
return super(BlobProperty, self).make_value_from_form(value)
class DateTimeProperty(db.DateTimeProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a date-time property.
This defaults to a DateTimeField instance, except if auto_now or
auto_now_add is set, in which case None is returned, as such
'auto' fields should not be rendered as part of the form.
"""
if self.auto_now or self.auto_now_add:
return None
defaults = {'form_class': forms.DateTimeField}
defaults.update(kwargs)
return super(DateTimeProperty, self).get_form_field(**defaults)
class DateProperty(db.DateProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a date property.
This defaults to a DateField instance, except if auto_now or
auto_now_add is set, in which case None is returned, as such
'auto' fields should not be rendered as part of the form.
"""
if self.auto_now or self.auto_now_add:
return None
defaults = {'form_class': forms.DateField}
defaults.update(kwargs)
return super(DateProperty, self).get_form_field(**defaults)
class TimeProperty(db.TimeProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a time property.
This defaults to a TimeField instance, except if auto_now or
auto_now_add is set, in which case None is returned, as such
'auto' fields should not be rendered as part of the form.
"""
if self.auto_now or self.auto_now_add:
return None
defaults = {'form_class': forms.TimeField}
defaults.update(kwargs)
return super(TimeProperty, self).get_form_field(**defaults)
class IntegerProperty(db.IntegerProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for an integer property.
This defaults to an IntegerField instance.
"""
defaults = {'form_class': forms.IntegerField}
defaults.update(kwargs)
return super(IntegerProperty, self).get_form_field(**defaults)
class FloatProperty(db.FloatProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for an integer property.
This defaults to a FloatField instance when using Django 0.97 or
later. For 0.96 this defaults to the CharField class.
"""
defaults = {}
if hasattr(forms, 'FloatField'):
defaults['form_class'] = forms.FloatField
defaults.update(kwargs)
return super(FloatProperty, self).get_form_field(**defaults)
class BooleanProperty(db.BooleanProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a boolean property.
This defaults to a BooleanField.
"""
defaults = {'form_class': forms.BooleanField}
defaults.update(kwargs)
return super(BooleanProperty, self).get_form_field(**defaults)
def make_value_from_form(self, value):
"""Convert a form value to a property value.
This is needed to ensure that False is not replaced with None.
"""
if value is None:
return None
if isinstance(value, basestring) and value.lower() == 'false':
return False
return bool(value)
class StringListProperty(db.StringListProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a StringList property.
This defaults to a Textarea widget with a blank initial value.
"""
defaults = {'widget': forms.Textarea,
'initial': ''}
defaults.update(kwargs)
return super(StringListProperty, self).get_form_field(**defaults)
def get_value_for_form(self, instance):
"""Extract the property value from the instance for use in a form.
This joins a list of strings with newlines.
"""
value = super(StringListProperty, self).get_value_for_form(instance)
if not value:
return None
if isinstance(value, list):
value = '\n'.join(value)
return value
def make_value_from_form(self, value):
"""Convert a form value to a property value.
This breaks the string into lines.
"""
if not value:
return []
if isinstance(value, basestring):
value = value.splitlines()
return value
class LinkProperty(db.LinkProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a URL property.
This defaults to a URLField instance.
"""
defaults = {'form_class': forms.URLField}
defaults.update(kwargs)
return super(LinkProperty, self).get_form_field(**defaults)
class _WrapIter(object):
"""Helper class whose iter() calls a given function to get an iterator."""
def __init__(self, function):
self._function = function
def __iter__(self):
return self._function()
class ModelChoiceField(forms.Field):
default_error_messages = {
'invalid_choice': _(u'Please select a valid choice. '
u'That choice is not one of the available choices.'),
}
def __init__(self, reference_class, query=None, choices=None,
empty_label=u'---------',
required=True, widget=forms.Select, label=None, initial=None,
help_text=None, *args, **kwargs):
"""Constructor.
Args:
reference_class: required; the db.Model subclass used in the reference
query: optional db.Query; default db.Query(reference_class)
choices: optional explicit list of (value, label) pairs representing
available choices; defaults to dynamically iterating over the
query argument (or its default)
empty_label: label to be used for the default selection item in
the widget; this is prepended to the choices
required, widget, label, initial, help_text, *args, **kwargs:
like for forms.Field.__init__(); widget defaults to forms.Select
"""
assert issubclass(reference_class, db.Model)
if query is None:
query = db.Query(reference_class)
assert isinstance(query, db.Query)
super(ModelChoiceField, self).__init__(required, widget, label, initial,
help_text, *args, **kwargs)
self.empty_label = empty_label
self.reference_class = reference_class
self._query = query
self._choices = choices
self._update_widget_choices()
def _update_widget_choices(self):
"""Helper to copy the choices to the widget."""
self.widget.choices = self.choices
def _get_query(self):
"""Getter for the query attribute."""
return self._query
def _set_query(self, query):
"""Setter for the query attribute.
As a side effect, the widget's choices are updated.
"""
self._query = query
self._update_widget_choices()
query = property(_get_query, _set_query)
def _generate_choices(self):
"""Generator yielding (key, label) pairs from the query results."""
yield ('', self.empty_label)
for inst in self._query:
yield (inst.key(), unicode(inst))
def _get_choices(self):
"""Getter for the choices attribute.
This is required to return an object that can be iterated over
multiple times.
"""
if self._choices is not None:
return self._choices
return _WrapIter(self._generate_choices)
def _set_choices(self, choices):
"""Setter for the choices attribute.
As a side effect, the widget's choices are updated.
"""
self._choices = choices
self._update_widget_choices()
choices = property(_get_choices, _set_choices)
def clean(self, value):
"""Override Field.clean() to do reference-specific value cleaning.
This turns a non-empty value into a model instance.
"""
value = super(ModelChoiceField, self).clean(value)
if not value:
return None
instance = db.get(value)
if instance is None:
raise db.BadValueError(self.error_messages['invalid_choice'])
return instance
class ReferenceProperty(db.ReferenceProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a reference property.
This defaults to a ModelChoiceField instance.
"""
defaults = {'form_class': ModelChoiceField,
'reference_class': self.reference_class}
defaults.update(kwargs)
return super(ReferenceProperty, self).get_form_field(**defaults)
def get_value_for_form(self, instance):
"""Extract the property value from the instance for use in a form.
This return the key object for the referenced object, or None.
"""
value = super(ReferenceProperty, self).get_value_for_form(instance)
if value is not None:
value = value.key()
return value
def make_value_from_form(self, value):
"""Convert a form value to a property value.
This turns a key string or object into a model instance.
"""
if value:
if not isinstance(value, db.Model):
value = db.get(value)
return value
class _ReverseReferenceProperty(db._ReverseReferenceProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a reverse reference.
This always returns None, since reverse references are always
automatic.
"""
return None
def property_clean(prop, value):
"""Apply Property level validation to value.
Calls .make_value_from_form() and .validate() on the property and catches
exceptions generated by either. The exceptions are converted to
forms.ValidationError exceptions.
Args:
prop: The property to validate against.
value: The value to validate.
Raises:
forms.ValidationError if the value cannot be validated.
"""
if value is not None:
try:
prop.validate(prop.make_value_from_form(value))
except (db.BadValueError, ValueError), e:
raise forms.ValidationError(unicode(e))
class ModelFormOptions(object):
"""A simple class to hold internal options for a ModelForm class.
Instance attributes:
model: a db.Model class, or None
fields: list of field names to be defined, or None
exclude: list of field names to be skipped, or None
These instance attributes are copied from the 'Meta' class that is
usually present in a ModelForm class, and all default to None.
"""
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
class ModelFormMetaclass(type):
"""The metaclass for the ModelForm class defined below.
This is our analog of Django's own ModelFormMetaclass. (We
can't conveniently subclass that class because there are quite a few
differences.)
See the docs for ModelForm below for a usage example.
"""
def __new__(cls, class_name, bases, attrs):
"""Constructor for a new ModelForm class instance.
The signature of this method is determined by Python internals.
All Django Field instances are removed from attrs and added to
the base_fields attribute instead. Additional Field instances
are added to this based on the Datastore Model class specified
by the Meta attribute.
"""
fields = sorted(((field_name, attrs.pop(field_name))
for field_name, obj in attrs.items()
if isinstance(obj, forms.Field)),
key=lambda obj: obj[1].creation_counter)
for base in bases[::-1]:
if hasattr(base, 'base_fields'):
fields = base.base_fields.items() + fields
declared_fields = django.utils.datastructures.SortedDict()
for field_name, obj in fields:
declared_fields[field_name] = obj
opts = ModelFormOptions(attrs.get('Meta', None))
attrs['_meta'] = opts
base_models = []
for base in bases:
base_opts = getattr(base, '_meta', None)
base_model = getattr(base_opts, 'model', None)
if base_model is not None:
base_models.append(base_model)
if len(base_models) > 1:
raise django.core.exceptions.ImproperlyConfigured(
"%s's base classes define more than one model." % class_name)
if opts.model is not None:
if base_models and base_models[0] is not opts.model:
raise django.core.exceptions.ImproperlyConfigured(
'%s defines a different model than its parent.' % class_name)
model_fields = django.utils.datastructures.SortedDict()
for name, prop in sorted(opts.model.properties().iteritems(),
key=lambda prop: prop[1].creation_counter):
if opts.fields and name not in opts.fields:
continue
if opts.exclude and name in opts.exclude:
continue
form_field = prop.get_form_field()
if form_field is not None:
model_fields[name] = form_field
model_fields.update(declared_fields)
attrs['base_fields'] = model_fields
props = opts.model.properties()
for name, field in model_fields.iteritems():
prop = props.get(name)
if prop:
def clean_for_property_field(value, prop=prop, old_clean=field.clean):
value = old_clean(value)
property_clean(prop, value)
return value
field.clean = clean_for_property_field
else:
attrs['base_fields'] = declared_fields
return super(ModelFormMetaclass, cls).__new__(cls,
class_name, bases, attrs)
class BaseModelForm(forms.BaseForm):
"""Base class for ModelForm.
This overrides the forms.BaseForm constructor and adds a save() method.
This class does not have a special metaclass; the magic metaclass is
added by the subclass ModelForm.
"""
def __init__(self, data=None, files=None, auto_id=None, prefix=None,
initial=None, error_class=None, label_suffix=None,
instance=None):
"""Constructor.
Args (all optional and defaulting to None):
data: dict of data values, typically from a POST request)
files: dict of file upload values; Django 0.97 or later only
auto_id, prefix: see Django documentation
initial: dict of initial values
error_class, label_suffix: see Django 0.97 or later documentation
instance: Model instance to be used for additional initial values
Except for initial and instance, these arguments are passed on to
the forms.BaseForm constructor unchanged, but only if not None.
Some arguments (files, error_class, label_suffix) are only
supported by Django 0.97 or later. Leave these blank (i.e. None)
when using Django 0.96. Their default values will be used with
Django 0.97 or later even when they are explicitly set to None.
"""
opts = self._meta
self.instance = instance
object_data = {}
if instance is not None:
for name, prop in instance.properties().iteritems():
if opts.fields and name not in opts.fields:
continue
if opts.exclude and name in opts.exclude:
continue
object_data[name] = prop.get_value_for_form(instance)
if initial is not None:
object_data.update(initial)
kwargs = dict(data=data, files=files, auto_id=auto_id,
prefix=prefix, initial=object_data,
error_class=error_class, label_suffix=label_suffix)
kwargs = dict((name, value)
for name, value in kwargs.iteritems()
if value is not None)
super(BaseModelForm, self).__init__(**kwargs)
def save(self, commit=True):
"""Save this form's cleaned data into a model instance.
Args:
commit: optional bool, default True; if true, the model instance
is also saved to the datastore.
Returns:
A model instance. If a model instance was already associated
with this form instance (either passed to the constructor with
instance=... or by a previous save() call), that same instance
is updated and returned; if no instance was associated yet, one
is created by this call.
Raises:
ValueError if the data couldn't be validated.
"""
if not self.is_bound:
raise ValueError('Cannot save an unbound form')
opts = self._meta
instance = self.instance
if instance is None:
fail_message = 'created'
else:
fail_message = 'updated'
if self.errors:
raise ValueError("The %s could not be %s because the data didn't "
'validate.' % (opts.model.kind(), fail_message))
cleaned_data = self._cleaned_data()
converted_data = {}
propiter = itertools.chain(
opts.model.properties().iteritems(),
iter([('key_name', StringProperty(name='key_name'))])
)
for name, prop in propiter:
value = cleaned_data.get(name)
if value is not None:
converted_data[name] = prop.make_value_from_form(value)
try:
if instance is None:
instance = opts.model(**converted_data)
self.instance = instance
else:
for name, value in converted_data.iteritems():
if name == 'key_name':
continue
setattr(instance, name, value)
except db.BadValueError, err:
raise ValueError('The %s could not be %s (%s)' %
(opts.model.kind(), fail_message, err))
if commit:
instance.put()
return instance
def _cleaned_data(self):
"""Helper to retrieve the cleaned data attribute.
In Django 0.96 this attribute was called self.clean_data. In 0.97
and later it's been renamed to self.cleaned_data, to avoid a name
conflict. This helper abstracts the difference between the
versions away from its caller.
"""
try:
return self.cleaned_data
except AttributeError:
return self.clean_data
class ModelForm(BaseModelForm):
"""A Django form tied to a Datastore model.
Note that this particular class just sets the metaclass; all other
functionality is defined in the base class, BaseModelForm, above.
Usage example:
from google.appengine.ext import db
from google.appengine.ext.db import djangoforms
# First, define a model class
class MyModel(db.Model):
foo = db.StringProperty()
bar = db.IntegerProperty(required=True, default=42)
# Now define a form class
class MyForm(djangoforms.ModelForm):
class Meta:
model = MyModel
You can now instantiate MyForm without arguments to create an
unbound form, or with data from a POST request to create a bound
form. You can also pass a model instance with the instance=...
keyword argument to create an unbound (!) form whose initial values
are taken from the instance. For bound forms, use the save() method
to return a model instance.
Like Django's own corresponding ModelForm class, the nested Meta
class can have two other attributes:
fields: if present and non-empty, a list of field names to be
included in the form; properties not listed here are
excluded from the form
exclude: if present and non-empty, a list of field names to be
excluded from the form
If exclude and fields are both non-empty, names occurring in both
are excluded (i.e. exclude wins). By default all property in the
model have a corresponding form field defined.
It is also possible to define form fields explicitly. This gives
more control over the widget used, constraints, initial value, and
so on. Such form fields are not affected by the nested Meta class's
fields and exclude attributes.
If you define a form field named 'key_name' it will be treated
specially and will be used as the value for the key_name parameter
to the Model constructor. This allows you to create instances with
named keys. The 'key_name' field will be ignored when updating an
instance (although it will still be shown on the form).
"""
__metaclass__ = ModelFormMetaclass
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Support for creating Django (new) forms from Datastore data models.
This is our best shot at supporting as much of Django as possible: you
won't be able to use Django's db package, but you can use our
db package instead, and create Django forms from it, either fully
automatically, or with overrides.
Note, you should not import these classes from this module. Importing
this module patches the classes in place, and you should continue to
import them from google.appengine.db.
Some of the code here is strongly inspired by Django's own ModelForm
class (new in Django 0.97). Our code also supports Django 0.96 (so as
to be maximally compatible). Note that our API is always similar to
Django 0.97's API, even when used with Django 0.96 (which uses a
different API, chiefly form_for_model()).
Terminology notes:
- forms: always refers to the Django newforms subpackage
- field: always refers to a Django forms.Field instance
- property: always refers to a db.Property instance
Mapping between properties and fields:
+====================+===================+==============+====================+
| Property subclass | Field subclass | datatype | widget; notes |
+====================+===================+==============+====================+
| StringProperty | CharField | unicode | Textarea |
| | | | if multiline |
+--------------------+-------------------+--------------+--------------------+
| TextProperty | CharField | unicode | Textarea |
+--------------------+-------------------+--------------+--------------------+
| BlobProperty | FileField | str | skipped in v0.96 |
+--------------------+-------------------+--------------+--------------------+
| DateTimeProperty | DateTimeField | datetime | skipped |
| | | | if auto_now[_add] |
+--------------------+-------------------+--------------+--------------------+
| DateProperty | DateField | date | ditto |
+--------------------+-------------------+--------------+--------------------+
| TimeProperty | TimeField | time | ditto |
+--------------------+-------------------+--------------+--------------------+
| IntegerProperty | IntegerField | int or long | |
+--------------------+-------------------+--------------+--------------------+
| FloatProperty | FloatField | float | CharField in v0.96 |
+--------------------+-------------------+--------------+--------------------+
| BooleanProperty | BooleanField | bool | |
+--------------------+-------------------+--------------+--------------------+
| UserProperty | CharField | users.User | |
+--------------------+-------------------+--------------+--------------------+
| StringListProperty | CharField | list of str | Textarea |
+--------------------+-------------------+--------------+--------------------+
| LinkProperty | URLField | str | |
+--------------------+-------------------+--------------+--------------------+
| ReferenceProperty | ModelChoiceField* | db.Model | |
+--------------------+-------------------+--------------+--------------------+
| _ReverseReferenceP.| None | <iterable> | always skipped |
+====================+===================+==============+====================+
Notes:
*: this Field subclasses is defined by us, not in Django.
"""
import itertools
import logging
import django.core.exceptions
import django.utils.datastructures
try:
from django import newforms as forms
except ImportError:
from django import forms
try:
from django.utils.translation import ugettext_lazy as _
except ImportError:
pass
from google.appengine.api import users
from google.appengine.ext import db
def monkey_patch(name, bases, namespace):
"""A 'metaclass' for adding new methods to an existing class.
In this version, existing methods can't be overridden; this is by
design, to avoid accidents.
Usage example:
class PatchClass(TargetClass):
__metaclass__ = monkey_patch
def foo(self, ...): ...
def bar(self, ...): ...
This is equivalent to:
def foo(self, ...): ...
def bar(self, ...): ...
TargetClass.foo = foo
TargetClass.bar = bar
PatchClass = TargetClass
Note that PatchClass becomes an alias for TargetClass; by convention
it is recommended to give PatchClass the same name as TargetClass.
"""
assert len(bases) == 1, 'Exactly one base class is required'
base = bases[0]
for name, value in namespace.iteritems():
if name not in ('__metaclass__', '__module__'):
assert name not in base.__dict__, "Won't override attribute %r" % (name,)
setattr(base, name, value)
return base
class Property(db.Property):
__metaclass__ = monkey_patch
def get_form_field(self, form_class=forms.CharField, **kwargs):
"""Return a Django form field appropriate for this property.
Args:
form_class: a forms.Field subclass, default forms.CharField
Additional keyword arguments are passed to the form_class constructor,
with certain defaults:
required: self.required
label: prettified self.verbose_name, if not None
widget: a forms.Select instance if self.choices is non-empty
initial: self.default, if not None
Returns:
A fully configured instance of form_class, or None if no form
field should be generated for this property.
"""
defaults = {'required': self.required}
if self.verbose_name:
defaults['label'] = self.verbose_name.capitalize().replace('_', ' ')
if self.choices:
choices = []
if not self.required or (self.default is None and
'initial' not in kwargs):
choices.append(('', '---------'))
for choice in self.choices:
choices.append((str(choice), unicode(choice)))
defaults['widget'] = forms.Select(choices=choices)
if self.default is not None:
defaults['initial'] = self.default
defaults.update(kwargs)
return form_class(**defaults)
def get_value_for_form(self, instance):
"""Extract the property value from the instance for use in a form.
Override this to do a property- or field-specific type conversion.
Args:
instance: a db.Model instance
Returns:
The property's value extracted from the instance, possibly
converted to a type suitable for a form field; possibly None.
By default this returns the instance attribute's value unchanged.
"""
return getattr(instance, self.name)
def make_value_from_form(self, value):
"""Convert a form value to a property value.
Override this to do a property- or field-specific type conversion.
Args:
value: the cleaned value retrieved from the form field
Returns:
A value suitable for assignment to a model instance's property;
possibly None.
By default this converts the value to self.data_type if it
isn't already an instance of that type, except if the value is
empty, in which case we return None.
"""
if value in (None, ''):
return None
if not isinstance(value, self.data_type):
value = self.data_type(value)
return value
class UserProperty(db.Property):
"""This class exists solely to log a warning when it is used."""
def __init__(self, *args, **kwds):
logging.warn("Please don't use modelforms.UserProperty; "
"use db.UserProperty instead.")
super(UserProperty, self).__init__(*args, **kwds)
class StringProperty(db.StringProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a string property.
This sets the widget default to forms.Textarea if the property's
multiline attribute is set.
"""
defaults = {}
if self.multiline:
defaults['widget'] = forms.Textarea
defaults.update(kwargs)
return super(StringProperty, self).get_form_field(**defaults)
class TextProperty(db.TextProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a text property.
This sets the widget default to forms.Textarea.
"""
defaults = {'widget': forms.Textarea}
defaults.update(kwargs)
return super(TextProperty, self).get_form_field(**defaults)
class BlobProperty(db.BlobProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a blob property.
This defaults to a forms.FileField instance when using Django 0.97
or later. For 0.96 this returns None, as file uploads are not
really supported in that version.
"""
if not hasattr(forms, 'FileField'):
return None
defaults = {'form_class': forms.FileField}
defaults.update(kwargs)
return super(BlobProperty, self).get_form_field(**defaults)
def get_value_for_form(self, instance):
"""Extract the property value from the instance for use in a form.
There is no way to convert a Blob into an initial value for a file
upload, so we always return None.
"""
return None
def make_value_from_form(self, value):
"""Convert a form value to a property value.
This extracts the content from the UploadedFile instance returned
by the FileField instance.
"""
if value.__class__.__name__ == 'UploadedFile':
return db.Blob(value.content)
return super(BlobProperty, self).make_value_from_form(value)
class DateTimeProperty(db.DateTimeProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a date-time property.
This defaults to a DateTimeField instance, except if auto_now or
auto_now_add is set, in which case None is returned, as such
'auto' fields should not be rendered as part of the form.
"""
if self.auto_now or self.auto_now_add:
return None
defaults = {'form_class': forms.DateTimeField}
defaults.update(kwargs)
return super(DateTimeProperty, self).get_form_field(**defaults)
class DateProperty(db.DateProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a date property.
This defaults to a DateField instance, except if auto_now or
auto_now_add is set, in which case None is returned, as such
'auto' fields should not be rendered as part of the form.
"""
if self.auto_now or self.auto_now_add:
return None
defaults = {'form_class': forms.DateField}
defaults.update(kwargs)
return super(DateProperty, self).get_form_field(**defaults)
class TimeProperty(db.TimeProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a time property.
This defaults to a TimeField instance, except if auto_now or
auto_now_add is set, in which case None is returned, as such
'auto' fields should not be rendered as part of the form.
"""
if self.auto_now or self.auto_now_add:
return None
defaults = {'form_class': forms.TimeField}
defaults.update(kwargs)
return super(TimeProperty, self).get_form_field(**defaults)
class IntegerProperty(db.IntegerProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for an integer property.
This defaults to an IntegerField instance.
"""
defaults = {'form_class': forms.IntegerField}
defaults.update(kwargs)
return super(IntegerProperty, self).get_form_field(**defaults)
class FloatProperty(db.FloatProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for an integer property.
This defaults to a FloatField instance when using Django 0.97 or
later. For 0.96 this defaults to the CharField class.
"""
defaults = {}
if hasattr(forms, 'FloatField'):
defaults['form_class'] = forms.FloatField
defaults.update(kwargs)
return super(FloatProperty, self).get_form_field(**defaults)
class BooleanProperty(db.BooleanProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a boolean property.
This defaults to a BooleanField.
"""
defaults = {'form_class': forms.BooleanField}
defaults.update(kwargs)
return super(BooleanProperty, self).get_form_field(**defaults)
def make_value_from_form(self, value):
"""Convert a form value to a property value.
This is needed to ensure that False is not replaced with None.
"""
if value is None:
return None
if isinstance(value, basestring) and value.lower() == 'false':
return False
return bool(value)
class StringListProperty(db.StringListProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a StringList property.
This defaults to a Textarea widget with a blank initial value.
"""
defaults = {'widget': forms.Textarea,
'initial': ''}
defaults.update(kwargs)
return super(StringListProperty, self).get_form_field(**defaults)
def get_value_for_form(self, instance):
"""Extract the property value from the instance for use in a form.
This joins a list of strings with newlines.
"""
value = super(StringListProperty, self).get_value_for_form(instance)
if not value:
return None
if isinstance(value, list):
value = '\n'.join(value)
return value
def make_value_from_form(self, value):
"""Convert a form value to a property value.
This breaks the string into lines.
"""
if not value:
return []
if isinstance(value, basestring):
value = value.splitlines()
return value
class LinkProperty(db.LinkProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a URL property.
This defaults to a URLField instance.
"""
defaults = {'form_class': forms.URLField}
defaults.update(kwargs)
return super(LinkProperty, self).get_form_field(**defaults)
class _WrapIter(object):
"""Helper class whose iter() calls a given function to get an iterator."""
def __init__(self, function):
self._function = function
def __iter__(self):
return self._function()
class ModelChoiceField(forms.Field):
default_error_messages = {
'invalid_choice': _(u'Please select a valid choice. '
u'That choice is not one of the available choices.'),
}
def __init__(self, reference_class, query=None, choices=None,
empty_label=u'---------',
required=True, widget=forms.Select, label=None, initial=None,
help_text=None, *args, **kwargs):
"""Constructor.
Args:
reference_class: required; the db.Model subclass used in the reference
query: optional db.Query; default db.Query(reference_class)
choices: optional explicit list of (value, label) pairs representing
available choices; defaults to dynamically iterating over the
query argument (or its default)
empty_label: label to be used for the default selection item in
the widget; this is prepended to the choices
required, widget, label, initial, help_text, *args, **kwargs:
like for forms.Field.__init__(); widget defaults to forms.Select
"""
assert issubclass(reference_class, db.Model)
if query is None:
query = db.Query(reference_class)
assert isinstance(query, db.Query)
super(ModelChoiceField, self).__init__(required, widget, label, initial,
help_text, *args, **kwargs)
self.empty_label = empty_label
self.reference_class = reference_class
self._query = query
self._choices = choices
self._update_widget_choices()
def _update_widget_choices(self):
"""Helper to copy the choices to the widget."""
self.widget.choices = self.choices
def _get_query(self):
"""Getter for the query attribute."""
return self._query
def _set_query(self, query):
"""Setter for the query attribute.
As a side effect, the widget's choices are updated.
"""
self._query = query
self._update_widget_choices()
query = property(_get_query, _set_query)
def _generate_choices(self):
"""Generator yielding (key, label) pairs from the query results."""
yield ('', self.empty_label)
for inst in self._query:
yield (inst.key(), unicode(inst))
def _get_choices(self):
"""Getter for the choices attribute.
This is required to return an object that can be iterated over
multiple times.
"""
if self._choices is not None:
return self._choices
return _WrapIter(self._generate_choices)
def _set_choices(self, choices):
"""Setter for the choices attribute.
As a side effect, the widget's choices are updated.
"""
self._choices = choices
self._update_widget_choices()
choices = property(_get_choices, _set_choices)
def clean(self, value):
"""Override Field.clean() to do reference-specific value cleaning.
This turns a non-empty value into a model instance.
"""
value = super(ModelChoiceField, self).clean(value)
if not value:
return None
instance = db.get(value)
if instance is None:
raise db.BadValueError(self.error_messages['invalid_choice'])
return instance
class ReferenceProperty(db.ReferenceProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a reference property.
This defaults to a ModelChoiceField instance.
"""
defaults = {'form_class': ModelChoiceField,
'reference_class': self.reference_class}
defaults.update(kwargs)
return super(ReferenceProperty, self).get_form_field(**defaults)
def get_value_for_form(self, instance):
"""Extract the property value from the instance for use in a form.
This return the key object for the referenced object, or None.
"""
value = super(ReferenceProperty, self).get_value_for_form(instance)
if value is not None:
value = value.key()
return value
def make_value_from_form(self, value):
"""Convert a form value to a property value.
This turns a key string or object into a model instance.
"""
if value:
if not isinstance(value, db.Model):
value = db.get(value)
return value
class _ReverseReferenceProperty(db._ReverseReferenceProperty):
__metaclass__ = monkey_patch
def get_form_field(self, **kwargs):
"""Return a Django form field appropriate for a reverse reference.
This always returns None, since reverse references are always
automatic.
"""
return None
def property_clean(prop, value):
"""Apply Property level validation to value.
Calls .make_value_from_form() and .validate() on the property and catches
exceptions generated by either. The exceptions are converted to
forms.ValidationError exceptions.
Args:
prop: The property to validate against.
value: The value to validate.
Raises:
forms.ValidationError if the value cannot be validated.
"""
if value is not None:
try:
prop.validate(prop.make_value_from_form(value))
except (db.BadValueError, ValueError), e:
raise forms.ValidationError(unicode(e))
class ModelFormOptions(object):
"""A simple class to hold internal options for a ModelForm class.
Instance attributes:
model: a db.Model class, or None
fields: list of field names to be defined, or None
exclude: list of field names to be skipped, or None
These instance attributes are copied from the 'Meta' class that is
usually present in a ModelForm class, and all default to None.
"""
def __init__(self, options=None):
self.model = getattr(options, 'model', None)
self.fields = getattr(options, 'fields', None)
self.exclude = getattr(options, 'exclude', None)
class ModelFormMetaclass(type):
"""The metaclass for the ModelForm class defined below.
This is our analog of Django's own ModelFormMetaclass. (We
can't conveniently subclass that class because there are quite a few
differences.)
See the docs for ModelForm below for a usage example.
"""
def __new__(cls, class_name, bases, attrs):
"""Constructor for a new ModelForm class instance.
The signature of this method is determined by Python internals.
All Django Field instances are removed from attrs and added to
the base_fields attribute instead. Additional Field instances
are added to this based on the Datastore Model class specified
by the Meta attribute.
"""
fields = sorted(((field_name, attrs.pop(field_name))
for field_name, obj in attrs.items()
if isinstance(obj, forms.Field)),
key=lambda obj: obj[1].creation_counter)
for base in bases[::-1]:
if hasattr(base, 'base_fields'):
fields = base.base_fields.items() + fields
declared_fields = django.utils.datastructures.SortedDict()
for field_name, obj in fields:
declared_fields[field_name] = obj
opts = ModelFormOptions(attrs.get('Meta', None))
attrs['_meta'] = opts
base_models = []
for base in bases:
base_opts = getattr(base, '_meta', None)
base_model = getattr(base_opts, 'model', None)
if base_model is not None:
base_models.append(base_model)
if len(base_models) > 1:
raise django.core.exceptions.ImproperlyConfigured(
"%s's base classes define more than one model." % class_name)
if opts.model is not None:
if base_models and base_models[0] is not opts.model:
raise django.core.exceptions.ImproperlyConfigured(
'%s defines a different model than its parent.' % class_name)
model_fields = django.utils.datastructures.SortedDict()
for name, prop in sorted(opts.model.properties().iteritems(),
key=lambda prop: prop[1].creation_counter):
if opts.fields and name not in opts.fields:
continue
if opts.exclude and name in opts.exclude:
continue
form_field = prop.get_form_field()
if form_field is not None:
model_fields[name] = form_field
model_fields.update(declared_fields)
attrs['base_fields'] = model_fields
props = opts.model.properties()
for name, field in model_fields.iteritems():
prop = props.get(name)
if prop:
def clean_for_property_field(value, prop=prop, old_clean=field.clean):
value = old_clean(value)
property_clean(prop, value)
return value
field.clean = clean_for_property_field
else:
attrs['base_fields'] = declared_fields
return super(ModelFormMetaclass, cls).__new__(cls,
class_name, bases, attrs)
class BaseModelForm(forms.BaseForm):
"""Base class for ModelForm.
This overrides the forms.BaseForm constructor and adds a save() method.
This class does not have a special metaclass; the magic metaclass is
added by the subclass ModelForm.
"""
def __init__(self, data=None, files=None, auto_id=None, prefix=None,
initial=None, error_class=None, label_suffix=None,
instance=None):
"""Constructor.
Args (all optional and defaulting to None):
data: dict of data values, typically from a POST request)
files: dict of file upload values; Django 0.97 or later only
auto_id, prefix: see Django documentation
initial: dict of initial values
error_class, label_suffix: see Django 0.97 or later documentation
instance: Model instance to be used for additional initial values
Except for initial and instance, these arguments are passed on to
the forms.BaseForm constructor unchanged, but only if not None.
Some arguments (files, error_class, label_suffix) are only
supported by Django 0.97 or later. Leave these blank (i.e. None)
when using Django 0.96. Their default values will be used with
Django 0.97 or later even when they are explicitly set to None.
"""
opts = self._meta
self.instance = instance
object_data = {}
if instance is not None:
for name, prop in instance.properties().iteritems():
if opts.fields and name not in opts.fields:
continue
if opts.exclude and name in opts.exclude:
continue
object_data[name] = prop.get_value_for_form(instance)
if initial is not None:
object_data.update(initial)
kwargs = dict(data=data, files=files, auto_id=auto_id,
prefix=prefix, initial=object_data,
error_class=error_class, label_suffix=label_suffix)
kwargs = dict((name, value)
for name, value in kwargs.iteritems()
if value is not None)
super(BaseModelForm, self).__init__(**kwargs)
def save(self, commit=True):
"""Save this form's cleaned data into a model instance.
Args:
commit: optional bool, default True; if true, the model instance
is also saved to the datastore.
Returns:
A model instance. If a model instance was already associated
with this form instance (either passed to the constructor with
instance=... or by a previous save() call), that same instance
is updated and returned; if no instance was associated yet, one
is created by this call.
Raises:
ValueError if the data couldn't be validated.
"""
if not self.is_bound:
raise ValueError('Cannot save an unbound form')
opts = self._meta
instance = self.instance
if instance is None:
fail_message = 'created'
else:
fail_message = 'updated'
if self.errors:
raise ValueError("The %s could not be %s because the data didn't "
'validate.' % (opts.model.kind(), fail_message))
cleaned_data = self._cleaned_data()
converted_data = {}
propiter = itertools.chain(
opts.model.properties().iteritems(),
iter([('key_name', StringProperty(name='key_name'))])
)
for name, prop in propiter:
value = cleaned_data.get(name)
if value is not None:
converted_data[name] = prop.make_value_from_form(value)
try:
if instance is None:
instance = opts.model(**converted_data)
self.instance = instance
else:
for name, value in converted_data.iteritems():
if name == 'key_name':
continue
setattr(instance, name, value)
except db.BadValueError, err:
raise ValueError('The %s could not be %s (%s)' %
(opts.model.kind(), fail_message, err))
if commit:
instance.put()
return instance
def _cleaned_data(self):
"""Helper to retrieve the cleaned data attribute.
In Django 0.96 this attribute was called self.clean_data. In 0.97
and later it's been renamed to self.cleaned_data, to avoid a name
conflict. This helper abstracts the difference between the
versions away from its caller.
"""
try:
return self.cleaned_data
except AttributeError:
return self.clean_data
class ModelForm(BaseModelForm):
"""A Django form tied to a Datastore model.
Note that this particular class just sets the metaclass; all other
functionality is defined in the base class, BaseModelForm, above.
Usage example:
from google.appengine.ext import db
from google.appengine.ext.db import djangoforms
# First, define a model class
class MyModel(db.Model):
foo = db.StringProperty()
bar = db.IntegerProperty(required=True, default=42)
# Now define a form class
class MyForm(djangoforms.ModelForm):
class Meta:
model = MyModel
You can now instantiate MyForm without arguments to create an
unbound form, or with data from a POST request to create a bound
form. You can also pass a model instance with the instance=...
keyword argument to create an unbound (!) form whose initial values
are taken from the instance. For bound forms, use the save() method
to return a model instance.
Like Django's own corresponding ModelForm class, the nested Meta
class can have two other attributes:
fields: if present and non-empty, a list of field names to be
included in the form; properties not listed here are
excluded from the form
exclude: if present and non-empty, a list of field names to be
excluded from the form
If exclude and fields are both non-empty, names occurring in both
are excluded (i.e. exclude wins). By default all property in the
model have a corresponding form field defined.
It is also possible to define form fields explicitly. This gives
more control over the widget used, constraints, initial value, and
so on. Such form fields are not affected by the nested Meta class's
fields and exclude attributes.
If you define a form field named 'key_name' it will be treated
specially and will be used as the value for the key_name parameter
to the Model constructor. This allows you to create instances with
named keys. The 'key_name' field will be ignored when updating an
instance (although it will still be shown on the form).
"""
__metaclass__ = ModelFormMetaclass
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Simple, schema-based database abstraction layer for the datastore.
Modeled after Django's abstraction layer on top of SQL databases,
http://www.djangoproject.com/documentation/mode_api/. Ours is a little simpler
and a lot less code because the datastore is so much simpler than SQL
databases.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want to
publish a story with title, body, and created date, you would do it like this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
You can create a new Story in the datastore with this usage pattern:
story = Story(title='My title')
story.body = 'My body'
story.put()
You query for Story entities using built in query interfaces that map directly
to the syntax and semantics of the datastore:
stories = Story.all().filter('date >=', yesterday).order('-date')
for story in stories:
print story.title
The Property declarations enforce types by performing validation on assignment.
For example, the DateTimeProperty enforces that you assign valid datetime
objects, and if you supply the "required" option for a property, you will not
be able to assign None to that property.
We also support references between models, so if a story has comments, you
would represent it like this:
class Comment(db.Model):
story = db.ReferenceProperty(Story)
body = db.TextProperty()
When you get a story out of the datastore, the story reference is resolved
automatically the first time it is referenced, which makes it easy to use
model instances without performing additional queries by hand:
comment = Comment.get(key)
print comment.story.title
Likewise, you can access the set of comments that refer to each story through
this property through a reverse reference called comment_set, which is a Query
preconfigured to return all matching comments:
story = Story.get(key)
for comment in story.comment_set:
print comment.body
"""
import copy
import datetime
import logging
import re
import time
import urlparse
import warnings
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
Error = datastore_errors.Error
BadValueError = datastore_errors.BadValueError
BadPropertyError = datastore_errors.BadPropertyError
BadRequestError = datastore_errors.BadRequestError
EntityNotFoundError = datastore_errors.EntityNotFoundError
BadArgumentError = datastore_errors.BadArgumentError
QueryNotFoundError = datastore_errors.QueryNotFoundError
TransactionNotFoundError = datastore_errors.TransactionNotFoundError
Rollback = datastore_errors.Rollback
TransactionFailedError = datastore_errors.TransactionFailedError
BadFilterError = datastore_errors.BadFilterError
BadQueryError = datastore_errors.BadQueryError
BadKeyError = datastore_errors.BadKeyError
InternalError = datastore_errors.InternalError
NeedIndexError = datastore_errors.NeedIndexError
Timeout = datastore_errors.Timeout
ValidationError = BadValueError
Key = datastore_types.Key
Category = datastore_types.Category
Link = datastore_types.Link
Email = datastore_types.Email
GeoPt = datastore_types.GeoPt
IM = datastore_types.IM
PhoneNumber = datastore_types.PhoneNumber
PostalAddress = datastore_types.PostalAddress
Rating = datastore_types.Rating
Text = datastore_types.Text
Blob = datastore_types.Blob
ByteString = datastore_types.ByteString
_kind_map = {}
_SELF_REFERENCE = object()
_RESERVED_WORDS = set(['key_name'])
class NotSavedError(Error):
"""Raised when a saved-object action is performed on a non-saved object."""
class KindError(BadValueError):
"""Raised when an entity is used with incorrect Model."""
class PropertyError(Error):
"""Raised when non-existent property is referenced."""
class DuplicatePropertyError(Error):
"""Raised when a property is duplicated in a model definition."""
class ConfigurationError(Error):
"""Raised when a property or model is improperly configured."""
class ReservedWordError(Error):
"""Raised when a property is defined for a reserved word."""
class DerivedPropertyError(Error):
"""Raised when attempting to assign a value to a derived property."""
_ALLOWED_PROPERTY_TYPES = set([
basestring,
str,
unicode,
bool,
int,
long,
float,
Key,
datetime.datetime,
datetime.date,
datetime.time,
Blob,
ByteString,
Text,
users.User,
Category,
Link,
Email,
GeoPt,
IM,
PhoneNumber,
PostalAddress,
Rating,
])
_ALLOWED_EXPANDO_PROPERTY_TYPES = set(_ALLOWED_PROPERTY_TYPES)
_ALLOWED_EXPANDO_PROPERTY_TYPES.update((list, tuple, type(None)))
_OPERATORS = ['<', '<=', '>', '>=', '=', '==', '!=', 'in']
_FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(_OPERATORS),
re.IGNORECASE | re.UNICODE)
def class_for_kind(kind):
"""Return base-class responsible for implementing kind.
Necessary to recover the class responsible for implementing provided
kind.
Args:
kind: Entity kind string.
Returns:
Class implementation for kind.
Raises:
KindError when there is no implementation for kind.
"""
try:
return _kind_map[kind]
except KeyError:
raise KindError('No implementation for kind \'%s\'' % kind)
def check_reserved_word(attr_name):
"""Raise an exception if attribute name is a reserved word.
Args:
attr_name: Name to check to see if it is a reserved word.
Raises:
ReservedWordError when attr_name is determined to be a reserved word.
"""
if datastore_types.RESERVED_PROPERTY_NAME.match(attr_name):
raise ReservedWordError(
"Cannot define property. All names both beginning and "
"ending with '__' are reserved.")
if attr_name in _RESERVED_WORDS or attr_name in dir(Model):
raise ReservedWordError(
"Cannot define property using reserved word '%(attr_name)s'. "
"If you would like to use this name in the datastore consider "
"using a different name like %(attr_name)s_ and adding "
"name='%(attr_name)s' to the parameter list of the property "
"definition." % locals())
def _initialize_properties(model_class, name, bases, dct):
"""Initialize Property attributes for Model-class.
Args:
model_class: Model class to initialize properties for.
"""
model_class._properties = {}
defined = set()
for base in bases:
if hasattr(base, '_properties'):
property_keys = base._properties.keys()
duplicate_properties = defined.intersection(property_keys)
if duplicate_properties:
raise DuplicatePropertyError(
'Duplicate properties in base class %s already defined: %s' %
(base.__name__, list(duplicate_properties)))
defined.update(property_keys)
model_class._properties.update(base._properties)
for attr_name in dct.keys():
attr = dct[attr_name]
if isinstance(attr, Property):
check_reserved_word(attr_name)
if attr_name in defined:
raise DuplicatePropertyError('Duplicate property: %s' % attr_name)
defined.add(attr_name)
model_class._properties[attr_name] = attr
attr.__property_config__(model_class, attr_name)
class PropertiedClass(type):
"""Meta-class for initializing Model classes properties.
Used for initializing Properties defined in the context of a model.
By using a meta-class much of the configuration of a Property
descriptor becomes implicit. By using this meta-class, descriptors
that are of class Model are notified about which class they
belong to and what attribute they are associated with and can
do appropriate initialization via __property_config__.
Duplicate properties are not permitted.
"""
def __init__(cls, name, bases, dct, map_kind=True):
"""Initializes a class that might have property definitions.
This method is called when a class is created with the PropertiedClass
meta-class.
Loads all properties for this model and its base classes in to a dictionary
for easy reflection via the 'properties' method.
Configures each property defined in the new class.
Duplicate properties, either defined in the new class or defined separately
in two base classes are not permitted.
Properties may not assigned to names which are in the list of
_RESERVED_WORDS. It is still possible to store a property using a reserved
word in the datastore by using the 'name' keyword argument to the Property
constructor.
Args:
cls: Class being initialized.
name: Name of new class.
bases: Base classes of new class.
dct: Dictionary of new definitions for class.
Raises:
DuplicatePropertyError when a property is duplicated either in the new
class or separately in two base classes.
ReservedWordError when a property is given a name that is in the list of
reserved words, attributes of Model and names of the form '__.*__'.
"""
super(PropertiedClass, cls).__init__(name, bases, dct)
_initialize_properties(cls, name, bases, dct)
if map_kind:
_kind_map[cls.kind()] = cls
class Property(object):
"""A Property is an attribute of a Model.
It defines the type of the attribute, which determines how it is stored
in the datastore and how the property values are validated. Different property
types support different options, which change validation rules, default
values, etc. The simplest example of a property is a StringProperty:
class Story(db.Model):
title = db.StringProperty()
"""
creation_counter = 0
def __init__(self, verbose_name=None, name=None, default=None,
required=False, validator=None, choices=None):
"""Initializes this Property with the given options.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
default: Default value for property if none is assigned.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
"""
self.verbose_name = verbose_name
self.name = name
self.default = default
self.required = required
self.validator = validator
self.choices = choices
self.creation_counter = Property.creation_counter
Property.creation_counter += 1
def __property_config__(self, model_class, property_name):
"""Configure property, connecting it to its model.
Configure the property so that it knows its property name and what class
it belongs to.
Args:
model_class: Model class which Property will belong to.
property_name: Name of property within Model instance to store property
values in. By default this will be the property name preceded by
an underscore, but may change for different subclasses.
"""
self.model_class = model_class
if self.name is None:
self.name = property_name
def __get__(self, model_instance, model_class):
"""Returns the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean."""
if model_instance is None:
return self
try:
return getattr(model_instance, self._attr_name())
except AttributeError:
return None
def __set__(self, model_instance, value):
"""Sets the value for this property on the given model instance.
See http://docs.python.org/ref/descriptors.html for a description of
the arguments to this class and what they mean.
"""
value = self.validate(value)
setattr(model_instance, self._attr_name(), value)
def default_value(self):
"""Default value for unassigned values.
Returns:
Default value as provided by __init__(default).
"""
return self.default
def validate(self, value):
"""Assert that provided value is compatible with this property.
Args:
value: Value to validate against this Property.
Returns:
A valid value, either the input unchanged or adapted to the
required type.
Raises:
BadValueError if the value is not appropriate for this
property in any way.
"""
if self.empty(value):
if self.required:
raise BadValueError('Property %s is required' % self.name)
else:
if self.choices:
match = False
for choice in self.choices:
if choice == value:
match = True
if not match:
raise BadValueError('Property %s is %r; must be one of %r' %
(self.name, value, self.choices))
if self.validator is not None:
self.validator(value)
return value
def empty(self, value):
"""Determine if value is empty in the context of this property.
For most kinds, this is equivalent to "not value", but for kinds like
bool, the test is more subtle, so subclasses can override this method
if necessary.
Args:
value: Value to validate against this Property.
Returns:
True if this value is considered empty in the context of this Property
type, otherwise False.
"""
return not value
def get_value_for_datastore(self, model_instance):
"""Datastore representation of this property.
Looks for this property in the given model instance, and returns the proper
datastore representation of the value that can be stored in a datastore
entity. Most critically, it will fetch the datastore key value for
reference properties.
Args:
model_instance: Instance to fetch datastore value from.
Returns:
Datastore representation of the model value in a form that is
appropriate for storing in the datastore.
"""
return self.__get__(model_instance, model_instance.__class__)
def make_value_from_datastore(self, value):
"""Native representation of this property.
Given a value retrieved from a datastore entity, return a value,
possibly converted, to be stored on the model instance. Usually
this returns the value unchanged, but a property class may
override this when it uses a different datatype on the model
instance than on the entity.
This API is not quite symmetric with get_value_for_datastore(),
because the model instance on which to store the converted value
may not exist yet -- we may be collecting values to be passed to a
model constructor.
Args:
value: value retrieved from the datastore entity.
Returns:
The value converted for use as a model instance attribute.
"""
return value
def _attr_name(self):
"""Attribute name we use for this property in model instances.
DO NOT USE THIS METHOD.
"""
return '_' + self.name
data_type = str
def datastore_type(self):
"""Deprecated backwards-compatible accessor method for self.data_type."""
return self.data_type
class Model(object):
"""Model is the superclass of all object entities in the datastore.
The programming model is to declare Python subclasses of the Model class,
declaring datastore properties as class members of that class. So if you want
to publish a story with title, body, and created date, you would do it like
this:
class Story(db.Model):
title = db.StringProperty()
body = db.TextProperty()
created = db.DateTimeProperty(auto_now_add=True)
A model instance can have a single parent. Model instances without any
parent are root entities. It is possible to efficiently query for
instances by their shared parent. All descendents of a single root
instance also behave as a transaction group. This means that when you
work one member of the group within a transaction all descendents of that
root join the transaction. All operations within a transaction on this
group are ACID.
"""
__metaclass__ = PropertiedClass
def __init__(self,
parent=None,
key_name=None,
_app=None,
_from_entity=False,
**kwds):
"""Creates a new instance of this model.
To create a new entity, you instantiate a model and then call put(),
which saves the entity to the datastore:
person = Person()
person.name = 'Bret'
person.put()
You can initialize properties in the model in the constructor with keyword
arguments:
person = Person(name='Bret')
We initialize all other properties to the default value (as defined by the
properties in the model definition) if they are not provided in the
constructor.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_app: Intentionally undocumented.
_from_entity: Intentionally undocumented.
args: Keyword arguments mapping to properties of model.
"""
if key_name == '':
raise BadKeyError('Name cannot be empty.')
elif key_name is not None and not isinstance(key_name, basestring):
raise BadKeyError('Name must be string type, not %s' %
key_name.__class__.__name__)
if parent is not None:
if not isinstance(parent, (Model, Key)):
raise TypeError('Expected Model type; received %s (is %s)' %
(parent, parent.__class__.__name__))
if isinstance(parent, Model) and not parent.has_key():
raise BadValueError(
"%s instance must have a complete key before it can be used as a "
"parent." % parent.kind())
if isinstance(parent, Key):
self._parent_key = parent
self._parent = None
else:
self._parent_key = parent.key()
self._parent = parent
else:
self._parent_key = None
self._parent = None
self._entity = None
self._key_name = key_name
self._app = _app
properties = self.properties()
for prop in self.properties().values():
if prop.name in kwds:
value = kwds[prop.name]
else:
value = prop.default_value()
try:
prop.__set__(self, value)
except DerivedPropertyError, e:
if prop.name in kwds and not _from_entity:
raise
def key(self):
"""Unique key for this entity.
This property is only available if this entity is already stored in the
datastore, so it is available if this entity was fetched returned from a
query, or after put() is called the first time for new entities.
Returns:
Datastore key of persisted entity.
Raises:
NotSavedError when entity is not persistent.
"""
if self.is_saved():
return self._entity.key()
elif self._key_name:
if self._parent_key:
parent_key = self._parent_key
elif self._parent:
parent_key = self._parent.key()
parent = self._parent_key or (self._parent and self._parent.key())
return Key.from_path(self.kind(), self._key_name, parent=parent)
else:
raise NotSavedError()
def _to_entity(self, entity):
"""Copies information from this model to provided entity.
Args:
entity: Entity to save information on.
"""
for prop in self.properties().values():
datastore_value = prop.get_value_for_datastore(self)
if datastore_value == []:
try:
del entity[prop.name]
except KeyError:
pass
else:
entity[prop.name] = datastore_value
def _populate_internal_entity(self, _entity_class=datastore.Entity):
"""Populates self._entity, saving its state to the datastore.
After this method is called, calling is_saved() will return True.
Returns:
Populated self._entity
"""
self._entity = self._populate_entity(_entity_class=_entity_class)
if hasattr(self, '_key_name'):
del self._key_name
return self._entity
def put(self):
"""Writes this model instance to the datastore.
If this instance is new, we add an entity to the datastore.
Otherwise, we update this instance, and the key will remain the
same.
Returns:
The key of the instance (either the existing key or a new key).
Raises:
TransactionFailedError if the data could not be committed.
"""
self._populate_internal_entity()
return datastore.Put(self._entity)
save = put
def _populate_entity(self, _entity_class=datastore.Entity):
"""Internal helper -- Populate self._entity or create a new one
if that one does not exist. Does not change any state of the instance
other than the internal state of the entity.
This method is separate from _populate_internal_entity so that it is
possible to call to_xml without changing the state of an unsaved entity
to saved.
Returns:
self._entity or a new Entity which is not stored on the instance.
"""
if self.is_saved():
entity = self._entity
else:
if self._parent_key is not None:
entity = _entity_class(self.kind(),
parent=self._parent_key,
name=self._key_name,
_app=self._app)
elif self._parent is not None:
entity = _entity_class(self.kind(),
parent=self._parent._entity,
name=self._key_name,
_app=self._app)
else:
entity = _entity_class(self.kind(),
name=self._key_name,
_app=self._app)
self._to_entity(entity)
return entity
def delete(self):
"""Deletes this entity from the datastore.
Raises:
TransactionFailedError if the data could not be committed.
"""
datastore.Delete(self.key())
self._entity = None
def is_saved(self):
"""Determine if entity is persisted in the datastore.
New instances of Model do not start out saved in the data. Objects which
are saved to or loaded from the Datastore will have a True saved state.
Returns:
True if object has been persisted to the datastore, otherwise False.
"""
return self._entity is not None
def has_key(self):
"""Determine if this model instance has a complete key.
Ids are not assigned until the data is saved to the Datastore, but
instances with a key name always have a full key.
Returns:
True if the object has been persisted to the datastore or has a key_name,
otherwise False.
"""
return self.is_saved() or self._key_name
def dynamic_properties(self):
"""Returns a list of all dynamic properties defined for instance."""
return []
def instance_properties(self):
"""Alias for dyanmic_properties."""
return self.dynamic_properties()
def parent(self):
"""Get the parent of the model instance.
Returns:
Parent of contained entity or parent provided in constructor, None if
instance has no parent.
"""
if self._parent is None:
parent_key = self.parent_key()
if parent_key is not None:
self._parent = get(parent_key)
return self._parent
def parent_key(self):
"""Get the parent's key.
This method is useful for avoiding a potential fetch from the datastore
but still get information about the instances parent.
Returns:
Parent key of entity, None if there is no parent.
"""
if self._parent_key is not None:
return self._parent_key
elif self._parent is not None:
return self._parent.key()
elif self._entity is not None:
return self._entity.parent()
else:
return None
def to_xml(self, _entity_class=datastore.Entity):
"""Generate an XML representation of this model instance.
atom and gd:namespace properties are converted to XML according to their
respective schemas. For more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
"""
entity = self._populate_entity(_entity_class)
return entity.ToXml()
@classmethod
def get(cls, keys):
"""Fetch instance from the datastore of a specific Model type using key.
We support Key objects and string keys (we convert them to Key objects
automatically).
Useful for ensuring that specific instance types are retrieved from the
datastore. It also helps that the source code clearly indicates what
kind of object is being retreived. Example:
story = Story.get(story_key)
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for provided class if it exists in the datastore, otherwise
None; if a list of keys was given: a list whose items are either
a Model instance or None.
Raises:
KindError if any of the retreived objects are not instances of the
type associated with call to 'get'.
"""
results = get(keys)
if results is None:
return None
if isinstance(results, Model):
instances = [results]
else:
instances = results
for instance in instances:
if not(instance is None or isinstance(instance, cls)):
raise KindError('Kind %r is not a subclass of kind %r' %
(instance.kind(), cls.kind()))
return results
@classmethod
def get_by_key_name(cls, key_names, parent=None):
"""Get instance of Model class by its key's name.
Args:
key_names: A single key-name or a list of key-names.
parent: Parent of instances to get. Can be a model or key.
"""
if isinstance(parent, Model):
parent = parent.key()
key_names, multiple = datastore.NormalizeAndTypeCheck(key_names, basestring)
keys = [datastore.Key.from_path(cls.kind(), name, parent=parent)
for name in key_names]
if multiple:
return get(keys)
else:
return get(*keys)
@classmethod
def get_by_id(cls, ids, parent=None):
"""Get instance of Model class by id.
Args:
key_names: A single id or a list of ids.
parent: Parent of instances to get. Can be a model or key.
"""
if isinstance(parent, Model):
parent = parent.key()
ids, multiple = datastore.NormalizeAndTypeCheck(ids, (int, long))
keys = [datastore.Key.from_path(cls.kind(), id, parent=parent)
for id in ids]
if multiple:
return get(keys)
else:
return get(*keys)
@classmethod
def get_or_insert(cls, key_name, **kwds):
"""Transactionally retrieve or create an instance of Model class.
This acts much like the Python dictionary setdefault() method, where we
first try to retrieve a Model instance with the given key name and parent.
If it's not present, then we create a new instance (using the *kwds
supplied) and insert that with the supplied key name.
Subsequent calls to this method with the same key_name and parent will
always yield the same entity (though not the same actual object instance),
regardless of the *kwds supplied. If the specified entity has somehow
been deleted separately, then the next call will create a new entity and
return it.
If the 'parent' keyword argument is supplied, it must be a Model instance.
It will be used as the parent of the new instance of this Model class if
one is created.
This method is especially useful for having just one unique entity for
a specific identifier. Insertion/retrieval is done transactionally, which
guarantees uniqueness.
Example usage:
class WikiTopic(db.Model):
creation_date = db.DatetimeProperty(auto_now_add=True)
body = db.TextProperty(required=True)
# The first time through we'll create the new topic.
wiki_word = 'CommonIdioms'
topic = WikiTopic.get_or_insert(wiki_word,
body='This topic is totally new!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
# The second time through will just retrieve the entity.
overwrite_topic = WikiTopic.get_or_insert(wiki_word,
body='A totally different message!')
assert topic.key().name() == 'CommonIdioms'
assert topic.body == 'This topic is totally new!'
Args:
key_name: Key name to retrieve or create.
**kwds: Keyword arguments to pass to the constructor of the model class
if an instance for the specified key name does not already exist. If
an instance with the supplied key_name and parent already exists, the
rest of these arguments will be discarded.
Returns:
Existing instance of Model class with the specified key_name and parent
or a new one that has just been created.
Raises:
TransactionFailedError if the specified Model instance could not be
retrieved or created transactionally (due to high contention, etc).
"""
def txn():
entity = cls.get_by_key_name(key_name, parent=kwds.get('parent'))
if entity is None:
entity = cls(key_name=key_name, **kwds)
entity.put()
return entity
return run_in_transaction(txn)
@classmethod
def all(cls):
"""Returns a query over all instances of this model from the datastore.
Returns:
Query that will retrieve all instances from entity collection.
"""
return Query(cls)
@classmethod
def gql(cls, query_string, *args, **kwds):
"""Returns a query using GQL query string.
See appengine/ext/gql for more information about GQL.
Args:
query_string: properly formatted GQL query string with the
'SELECT * FROM <entity>' part omitted
*args: rest of the positional arguments used to bind numeric references
in the query.
**kwds: dictionary-based arguments (for named parameters).
"""
return GqlQuery('SELECT * FROM %s %s' % (cls.kind(), query_string),
*args, **kwds)
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Loads attributes which are not defined as part of the entity in
to the model instance.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = {}
for prop in cls.properties().values():
if prop.name in entity:
try:
value = prop.make_value_from_datastore(entity[prop.name])
entity_values[prop.name] = value
except KeyError:
entity_values[prop.name] = []
return entity_values
@classmethod
def from_entity(cls, entity):
"""Converts the entity representation of this model to an instance.
Converts datastore.Entity instance to an instance of cls.
Args:
entity: Entity loaded directly from datastore.
Raises:
KindError when cls is incorrect model for entity.
"""
if cls.kind() != entity.kind():
raise KindError('Class %s cannot handle kind \'%s\'' %
(repr(cls), entity.kind()))
entity_values = cls._load_entity_values(entity)
instance = cls(None, _from_entity=True, **entity_values)
instance._entity = entity
del instance._key_name
return instance
@classmethod
def kind(cls):
"""Returns the datastore kind we use for this model.
We just use the name of the model for now, ignoring potential collisions.
"""
return cls.__name__
@classmethod
def entity_type(cls):
"""Soon to be removed alias for kind."""
return cls.kind()
@classmethod
def properties(cls):
"""Returns a dictionary of all the properties defined for this model."""
return dict(cls._properties)
@classmethod
def fields(cls):
"""Soon to be removed alias for properties."""
return cls.properties()
def get(keys):
"""Fetch the specific Model instance with the given key from the datastore.
We support Key objects and string keys (we convert them to Key objects
automatically).
Args:
keys: Key within datastore entity collection to find; or string key;
or list of Keys or string keys.
Returns:
If a single key was given: a Model instance associated with key
for if it exists in the datastore, otherwise None; if a list of
keys was given: a list whose items are either a Model instance or
None.
"""
keys, multiple = datastore.NormalizeAndTypeCheckKeys(keys)
try:
entities = datastore.Get(keys)
except datastore_errors.EntityNotFoundError:
assert not multiple
return None
models = []
for entity in entities:
if entity is None:
model = None
else:
cls1 = class_for_kind(entity.kind())
model = cls1.from_entity(entity)
models.append(model)
if multiple:
return models
assert len(models) == 1
return models[0]
def put(models):
"""Store one or more Model instances.
Args:
models: Model instance or list of Model instances.
Returns:
A Key or a list of Keys (corresponding to the argument's plurality).
Raises:
TransactionFailedError if the data could not be committed.
"""
models, multiple = datastore.NormalizeAndTypeCheck(models, Model)
entities = [model._populate_internal_entity() for model in models]
keys = datastore.Put(entities)
if multiple:
return keys
assert len(keys) == 1
return keys[0]
save = put
def delete(models):
"""Delete one or more Model instances.
Args:
models_or_keys: Model instance or list of Model instances.
Raises:
TransactionFailedError if the data could not be committed.
"""
models_or_keys, multiple = datastore.NormalizeAndTypeCheck(
models, (Model, Key, basestring))
keys = []
for model_or_key in models_or_keys:
if isinstance(model_or_key, Model):
key = model_or_key = model_or_key.key()
elif isinstance(model_or_key, basestring):
key = model_or_key = Key(model_or_key)
else:
key = model_or_key
keys.append(key)
datastore.Delete(keys)
class Expando(Model):
"""Dynamically expandable model.
An Expando does not require (but can still benefit from) the definition
of any properties before it can be used to store information in the
datastore. Properties can be added to an expando object by simply
performing an assignment. The assignment of properties is done on
an instance by instance basis, so it is possible for one object of an
expando type to have different properties from another or even the same
properties with different types. It is still possible to define
properties on an expando, allowing those properties to behave the same
as on any other model.
Example:
import datetime
class Song(db.Expando):
title = db.StringProperty()
crazy = Song(title='Crazy like a diamond',
author='Lucy Sky',
publish_date='yesterday',
rating=5.0)
hoboken = Song(title='The man from Hoboken',
author=['Anthony', 'Lou'],
publish_date=datetime.datetime(1977, 5, 3))
crazy.last_minute_note=db.Text('Get a train to the station.')
Possible Uses:
One use of an expando is to create an object without any specific
structure and later, when your application mature and it in the right
state, change it to a normal model object and define explicit properties.
Additional exceptions for expando:
Protected attributes (ones whose names begin with '_') cannot be used
as dynamic properties. These are names that are reserved for protected
transient (non-persisted) attributes.
Order of lookup:
When trying to set or access an attribute value, any other defined
properties, such as methods and other values in __dict__ take precedence
over values in the datastore.
1 - Because it is not possible for the datastore to know what kind of
property to store on an undefined expando value, setting a property to
None is the same as deleting it from the expando.
2 - Persistent variables on Expando must not begin with '_'. These
variables considered to be 'protected' in Python, and are used
internally.
3 - Expando's dynamic properties are not able to store empty lists.
Attempting to assign an empty list to a dynamic property will raise
ValueError. Static properties on Expando can still support empty
lists but like normal Model properties is restricted from using
None.
"""
_dynamic_properties = None
def __init__(self, parent=None, key_name=None, _app=None, **kwds):
"""Creates a new instance of this expando model.
Args:
parent: Parent instance for this instance or None, indicating a top-
level instance.
key_name: Name for new model instance.
_app: Intentionally undocumented.
args: Keyword arguments mapping to properties of model.
"""
super(Expando, self).__init__(parent, key_name, _app, **kwds)
self._dynamic_properties = {}
for prop, value in kwds.iteritems():
if prop not in self.properties() and value is not None:
setattr(self, prop, value)
def __setattr__(self, key, value):
"""Dynamically set field values that are not defined.
Tries to set the value on the object normally, but failing that
sets the value on the contained entity.
Args:
key: Name of attribute.
value: Value to set for attribute. Must be compatible with
datastore.
Raises:
ValueError on attempt to assign empty list.
"""
check_reserved_word(key)
if key[:1] != '_' and key not in self.properties():
if value == []:
raise ValueError('Cannot store empty list to dynamic property %s' %
key)
if type(value) not in _ALLOWED_EXPANDO_PROPERTY_TYPES:
raise TypeError("Expando cannot accept values of type '%s'." %
type(value).__name__)
if self._dynamic_properties is None:
self._dynamic_properties = {}
self._dynamic_properties[key] = value
else:
super(Expando, self).__setattr__(key, value)
def __getattr__(self, key):
"""If no explicit attribute defined, retrieve value from entity.
Tries to get the value on the object normally, but failing that
retrieves value from contained entity.
Args:
key: Name of attribute.
Raises:
AttributeError when there is no attribute for key on object or
contained entity.
"""
if self._dynamic_properties and key in self._dynamic_properties:
return self._dynamic_properties[key]
else:
return getattr(super(Expando, self), key)
def __delattr__(self, key):
"""Remove attribute from expando.
Expando is not like normal entities in that undefined fields
can be removed.
Args:
key: Dynamic property to be deleted.
"""
if self._dynamic_properties and key in self._dynamic_properties:
del self._dynamic_properties[key]
else:
object.__delattr__(self, key)
def dynamic_properties(self):
"""Determine which properties are particular to instance of entity.
Returns:
Set of names which correspond only to the dynamic properties.
"""
if self._dynamic_properties is None:
return []
return self._dynamic_properties.keys()
def _to_entity(self, entity):
"""Store to entity, deleting dynamic properties that no longer exist.
When the expando is saved, it is possible that a given property no longer
exists. In this case, the property will be removed from the saved instance.
Args:
entity: Entity which will receive dynamic properties.
"""
super(Expando, self)._to_entity(entity)
if self._dynamic_properties is None:
self._dynamic_properties = {}
for key, value in self._dynamic_properties.iteritems():
entity[key] = value
all_properties = set(self._dynamic_properties.iterkeys())
all_properties.update(self.properties().iterkeys())
for key in entity.keys():
if key not in all_properties:
del entity[key]
@classmethod
def _load_entity_values(cls, entity):
"""Load dynamic properties from entity.
Expando needs to do a second pass to add the entity values which were
ignored by Model because they didn't have an corresponding predefined
property on the model.
Args:
entity: Entity which contain values to search dyanmic properties for.
"""
entity_values = super(Expando, cls)._load_entity_values(entity)
for key, value in entity.iteritems():
if key not in entity_values:
entity_values[str(key)] = value
return entity_values
class _BaseQuery(object):
"""Base class for both Query and GqlQuery."""
def __init__(self, model_class):
"""Constructor."
Args:
model_class: Model class from which entities are constructed.
"""
self._model_class = model_class
def _get_query(self):
"""Subclass must override (and not call their super method).
Returns:
A datastore.Query instance representing the query.
"""
raise NotImplementedError
def run(self):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
Returns:
Iterator for this query.
"""
return _QueryIterator(self._model_class, iter(self._get_query().Run()))
def __iter__(self):
"""Iterator for this query.
If you know the number of results you need, consider fetch() instead,
or use a GQL query with a LIMIT clause. It's more efficient.
"""
return self.run()
def get(self):
"""Get first result from this.
Beware: get() ignores the LIMIT clause on GQL queries.
Returns:
First result from running the query if there are any, else None.
"""
results = self.fetch(1)
try:
return results[0]
except IndexError:
return None
def count(self, limit=None):
"""Number of entities this query fetches.
Beware: count() ignores the LIMIT clause on GQL queries.
Args:
limit, a number. If there are more results than this, stop short and
just return this number. Providing this argument makes the count
operation more efficient.
Returns:
Number of entities this query fetches.
"""
return self._get_query().Count(limit=limit)
def fetch(self, limit, offset=0):
"""Return a list of items selected using SQL-like limit and offset.
Whenever possible, use fetch() instead of iterating over the query
results with run() or __iter__() . fetch() is more efficient.
Beware: fetch() ignores the LIMIT clause on GQL queries.
Args:
limit: Maximum number of results to return.
offset: Optional number of results to skip first; default zero.
Returns:
A list of db.Model instances. There may be fewer than 'limit'
results if there aren't enough results to satisfy the request.
"""
accepted = (int, long)
if not (isinstance(limit, accepted) and isinstance(offset, accepted)):
raise TypeError('Arguments to fetch() must be integers')
if limit < 0 or offset < 0:
raise ValueError('Arguments to fetch() must be >= 0')
if limit == 0:
return []
raw = self._get_query().Get(limit, offset)
return map(self._model_class.from_entity, raw)
def __getitem__(self, arg):
"""Support for query[index] and query[start:stop].
Beware: this ignores the LIMIT clause on GQL queries.
Args:
arg: Either a single integer, corresponding to the query[index]
syntax, or a Python slice object, corresponding to the
query[start:stop] or query[start:stop:step] syntax.
Returns:
A single Model instance when the argument is a single integer.
A list of Model instances when the argument is a slice.
"""
if isinstance(arg, slice):
start, stop, step = arg.start, arg.stop, arg.step
if start is None:
start = 0
if stop is None:
raise ValueError('Open-ended slices are not supported')
if step is None:
step = 1
if start < 0 or stop < 0 or step != 1:
raise ValueError(
'Only slices with start>=0, stop>=0, step==1 are supported')
limit = stop - start
if limit < 0:
return []
return self.fetch(limit, start)
elif isinstance(arg, (int, long)):
if arg < 0:
raise ValueError('Only indices >= 0 are supported')
results = self.fetch(1, arg)
if results:
return results[0]
else:
raise IndexError('The query returned fewer than %d results' % (arg+1))
else:
raise TypeError('Only integer indices and slices are supported')
class _QueryIterator(object):
"""Wraps the datastore iterator to return Model instances.
The datastore returns entities. We wrap the datastore iterator to
return Model instances instead.
"""
def __init__(self, model_class, datastore_iterator):
"""Iterator constructor
Args:
model_class: Model class from which entities are constructed.
datastore_iterator: Underlying datastore iterator.
"""
self.__model_class = model_class
self.__iterator = datastore_iterator
def __iter__(self):
"""Iterator on self.
Returns:
Self.
"""
return self
def next(self):
"""Get next Model instance in query results.
Returns:
Next model instance.
Raises:
StopIteration when there are no more results in query.
"""
return self.__model_class.from_entity(self.__iterator.next())
def _normalize_query_parameter(value):
"""Make any necessary type conversions to a query parameter.
The following conversions are made:
- Model instances are converted to Key instances. This is necessary so
that querying reference properties will work.
- datetime.date objects are converted to datetime.datetime objects (see
_date_to_datetime for details on this conversion). This is necessary so
that querying date properties with date objects will work.
- datetime.time objects are converted to datetime.datetime objects (see
_time_to_datetime for details on this conversion). This is necessary so
that querying time properties with time objects will work.
Args:
value: The query parameter value.
Returns:
The input value, or a converted value if value matches one of the
conversions specified above.
"""
if isinstance(value, Model):
value = value.key()
if (isinstance(value, datetime.date) and
not isinstance(value, datetime.datetime)):
value = _date_to_datetime(value)
elif isinstance(value, datetime.time):
value = _time_to_datetime(value)
return value
class Query(_BaseQuery):
"""A Query instance queries over instances of Models.
You construct a query with a model class, like this:
class Story(db.Model):
title = db.StringProperty()
date = db.DateTimeProperty()
query = Query(Story)
You modify a query with filters and orders like this:
query.filter('title =', 'Foo')
query.order('-date')
query.ancestor(key_or_model_instance)
Every query can return an iterator, so you access the results of a query
by iterating over it:
for story in query:
print story.title
For convenience, all of the filtering and ordering methods return "self",
so the easiest way to use the query interface is to cascade all filters and
orders in the iterator line like this:
for story in Query(story).filter('title =', 'Foo').order('-date'):
print story.title
"""
def __init__(self, model_class):
"""Constructs a query over instances of the given Model.
Args:
model_class: Model class to build query for.
"""
super(Query, self).__init__(model_class)
self.__query_sets = [{}]
self.__orderings = []
self.__ancestor = None
def _get_query(self,
_query_class=datastore.Query,
_multi_query_class=datastore.MultiQuery):
queries = []
for query_set in self.__query_sets:
query = _query_class(self._model_class.kind(), query_set)
if self.__ancestor is not None:
query.Ancestor(self.__ancestor)
queries.append(query)
if (_query_class != datastore.Query and
_multi_query_class == datastore.MultiQuery):
warnings.warn(
'Custom _query_class specified without corresponding custom'
' _query_multi_class. Things will break if you use queries with'
' the "IN" or "!=" operators.', RuntimeWarning)
if len(queries) > 1:
raise datastore_errors.BadArgumentError(
'Query requires multiple subqueries to satisfy. If _query_class'
' is overridden, _multi_query_class must also be overridden.')
elif (_query_class == datastore.Query and
_multi_query_class != datastore.MultiQuery):
raise BadArgumentError('_query_class must also be overridden if'
' _multi_query_class is overridden.')
if len(queries) == 1:
queries[0].Order(*self.__orderings)
return queries[0]
else:
return _multi_query_class(queries, self.__orderings)
def __filter_disjunction(self, operations, values):
"""Add a disjunction of several filters and several values to the query.
This is implemented by duplicating queries and combining the
results later.
Args:
operations: a string or list of strings. Each string contains a
property name and an operator to filter by. The operators
themselves must not require multiple queries to evaluate
(currently, this means that 'in' and '!=' are invalid).
values: a value or list of filter values, normalized by
_normalize_query_parameter.
"""
if not isinstance(operations, (list, tuple)):
operations = [operations]
if not isinstance(values, (list, tuple)):
values = [values]
new_query_sets = []
for operation in operations:
if operation.lower().endswith('in') or operation.endswith('!='):
raise BadQueryError('Cannot use "in" or "!=" in a disjunction.')
for query_set in self.__query_sets:
for value in values:
new_query_set = copy.copy(query_set)
datastore._AddOrAppend(new_query_set, operation, value)
new_query_sets.append(new_query_set)
self.__query_sets = new_query_sets
def filter(self, property_operator, value):
"""Add filter to query.
Args:
property_operator: string with the property and operator to filter by.
value: the filter value.
Returns:
Self to support method chaining.
"""
match = _FILTER_REGEX.match(property_operator)
prop = match.group(1)
if match.group(3) is not None:
operator = match.group(3)
else:
operator = '=='
if operator.lower() == 'in':
if not isinstance(value, (list, tuple)):
raise BadValueError('Argument to the "in" operator must be a list')
values = [_normalize_query_parameter(v) for v in value]
self.__filter_disjunction(prop + ' =', values)
else:
if isinstance(value, (list, tuple)):
raise BadValueError('Filtering on lists is not supported')
if operator == '!=':
self.__filter_disjunction([prop + ' <', prop + ' >'],
_normalize_query_parameter(value))
else:
value = _normalize_query_parameter(value)
for query_set in self.__query_sets:
datastore._AddOrAppend(query_set, property_operator, value)
return self
def order(self, property):
"""Set order of query result.
To use descending order, prepend '-' (minus) to the property
name, e.g., '-date' rather than 'date'.
Args:
property: Property to sort on.
Returns:
Self to support method chaining.
Raises:
PropertyError if invalid property name is provided.
"""
if property.startswith('-'):
property = property[1:]
order = datastore.Query.DESCENDING
else:
order = datastore.Query.ASCENDING
if not issubclass(self._model_class, Expando):
if (property not in self._model_class.properties() and
property not in datastore_types._SPECIAL_PROPERTIES):
raise PropertyError('Invalid property name \'%s\'' % property)
self.__orderings.append((property, order))
return self
def ancestor(self, ancestor):
"""Sets an ancestor for this query.
This restricts the query to only return results that descend from
a given model instance. In other words, all of the results will
have the ancestor as their parent, or parent's parent, etc. The
ancestor itself is also a possible result!
Args:
ancestor: Model or Key (that has already been saved)
Returns:
Self to support method chaining.
Raises:
TypeError if the argument isn't a Key or Model; NotSavedError
if it is, but isn't saved yet.
"""
if isinstance(ancestor, datastore.Key):
if ancestor.has_id_or_name():
self.__ancestor = ancestor
else:
raise NotSavedError()
elif isinstance(ancestor, Model):
if ancestor.has_key():
self.__ancestor = ancestor.key()
else:
raise NotSavedError()
else:
raise TypeError('ancestor should be Key or Model')
return self
class GqlQuery(_BaseQuery):
"""A Query class that uses GQL query syntax instead of .filter() etc."""
def __init__(self, query_string, *args, **kwds):
"""Constructor.
Args:
query_string: Properly formatted GQL query string.
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
"""
from google.appengine.ext import gql
app = kwds.pop('_app', None)
self._proto_query = gql.GQL(query_string, _app=app)
super(GqlQuery, self).__init__(class_for_kind(self._proto_query._entity))
self.bind(*args, **kwds)
def bind(self, *args, **kwds):
"""Bind arguments (positional or keyword) to the query.
Note that you can also pass arguments directly to the query
constructor. Each time you call bind() the previous set of
arguments is replaced with the new set. This is useful because
the hard work in in parsing the query; so if you expect to be
using the same query with different sets of arguments, you should
hold on to the GqlQuery() object and call bind() on it each time.
Args:
*args: Positional arguments used to bind numeric references in the query.
**kwds: Dictionary-based arguments for named references.
"""
self._args = []
for arg in args:
self._args.append(_normalize_query_parameter(arg))
self._kwds = {}
for name, arg in kwds.iteritems():
self._kwds[name] = _normalize_query_parameter(arg)
def run(self):
"""Override _BaseQuery.run() so the LIMIT clause is handled properly."""
query_run = self._proto_query.Run(*self._args, **self._kwds)
return _QueryIterator(self._model_class, iter(query_run))
def _get_query(self):
return self._proto_query.Bind(self._args, self._kwds)
class TextProperty(Property):
"""A string that can be longer than 500 bytes.
This type should be used for large text values to make sure the datastore
has good performance for queries.
"""
def validate(self, value):
"""Validate text property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'Text'.
"""
if value is not None and not isinstance(value, Text):
try:
value = Text(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a Text instance (%s)' % (self.name, err))
value = super(TextProperty, self).validate(value)
if value is not None and not isinstance(value, Text):
raise BadValueError('Property %s must be a Text instance' % self.name)
return value
data_type = Text
class StringProperty(Property):
"""A textual property, which can be multi- or single-line."""
def __init__(self, verbose_name=None, multiline=False, **kwds):
"""Construct string property.
Args:
verbose_name: Verbose name is always first parameter.
multi-line: Carriage returns permitted in property.
"""
super(StringProperty, self).__init__(verbose_name, **kwds)
self.multiline = multiline
def validate(self, value):
"""Validate string property.
Returns:
A valid value.
Raises:
BadValueError if property is not multi-line but value is.
"""
value = super(StringProperty, self).validate(value)
if value is not None and not isinstance(value, basestring):
raise BadValueError(
'Property %s must be a str or unicode instance, not a %s'
% (self.name, type(value).__name__))
if not self.multiline and value and value.find('\n') != -1:
raise BadValueError('Property %s is not multi-line' % self.name)
return value
data_type = basestring
class _CoercingProperty(Property):
"""A Property subclass that extends validate() to coerce to self.data_type."""
def validate(self, value):
"""Coerce values (except None) to self.data_type.
Args:
value: The value to be validated and coerced.
Returns:
The coerced and validated value. It is guaranteed that this is
either None or an instance of self.data_type; otherwise an exception
is raised.
Raises:
BadValueError if the value could not be validated or coerced.
"""
value = super(_CoercingProperty, self).validate(value)
if value is not None and not isinstance(value, self.data_type):
value = self.data_type(value)
return value
class CategoryProperty(_CoercingProperty):
"""A property whose values are Category instances."""
data_type = Category
class LinkProperty(_CoercingProperty):
"""A property whose values are Link instances."""
def validate(self, value):
value = super(LinkProperty, self).validate(value)
if value is not None:
scheme, netloc, path, query, fragment = urlparse.urlsplit(value)
if not scheme or not netloc:
raise BadValueError('Property %s must be a full URL (\'%s\')' %
(self.name, value))
return value
data_type = Link
URLProperty = LinkProperty
class EmailProperty(_CoercingProperty):
"""A property whose values are Email instances."""
data_type = Email
class GeoPtProperty(_CoercingProperty):
"""A property whose values are GeoPt instances."""
data_type = GeoPt
class IMProperty(_CoercingProperty):
"""A property whose values are IM instances."""
data_type = IM
class PhoneNumberProperty(_CoercingProperty):
"""A property whose values are PhoneNumber instances."""
data_type = PhoneNumber
class PostalAddressProperty(_CoercingProperty):
"""A property whose values are PostalAddress instances."""
data_type = PostalAddress
class BlobProperty(Property):
"""A string that can be longer than 500 bytes.
This type should be used for large binary values to make sure the datastore
has good performance for queries.
"""
def validate(self, value):
"""Validate blob property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'Blob'.
"""
if value is not None and not isinstance(value, Blob):
try:
value = Blob(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a Blob instance (%s)' % (self.name, err))
value = super(BlobProperty, self).validate(value)
if value is not None and not isinstance(value, Blob):
raise BadValueError('Property %s must be a Blob instance' % self.name)
return value
data_type = Blob
class ByteStringProperty(Property):
"""A short (<=500 bytes) byte string.
This type should be used for short binary values that need to be indexed. If
you do not require indexing (regardless of length), use BlobProperty instead.
"""
def validate(self, value):
"""Validate ByteString property.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'ByteString'.
"""
if value is not None and not isinstance(value, ByteString):
try:
value = ByteString(value)
except TypeError, err:
raise BadValueError('Property %s must be convertible '
'to a ByteString instance (%s)' % (self.name, err))
value = super(ByteStringProperty, self).validate(value)
if value is not None and not isinstance(value, ByteString):
raise BadValueError('Property %s must be a ByteString instance'
% self.name)
return value
data_type = ByteString
class DateTimeProperty(Property):
"""The base class of all of our date/time properties.
We handle common operations, like converting between time tuples and
datetime instances.
"""
def __init__(self, verbose_name=None, auto_now=False, auto_now_add=False,
**kwds):
"""Construct a DateTimeProperty
Args:
verbose_name: Verbose name is always first parameter.
auto_now: Date/time property is updated with the current time every time
it is saved to the datastore. Useful for properties that want to track
the modification time of an instance.
auto_now_add: Date/time is set to the when its instance is created.
Useful for properties that record the creation time of an entity.
"""
super(DateTimeProperty, self).__init__(verbose_name, **kwds)
self.auto_now = auto_now
self.auto_now_add = auto_now_add
def validate(self, value):
"""Validate datetime.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'datetime'.
"""
value = super(DateTimeProperty, self).validate(value)
if value and not isinstance(value, self.data_type):
raise BadValueError('Property %s must be a %s' %
(self.name, self.data_type.__name__))
return value
def default_value(self):
"""Default value for datetime.
Returns:
value of now() as appropriate to the date-time instance if auto_now
or auto_now_add is set, else user configured default value implementation.
"""
if self.auto_now or self.auto_now_add:
return self.now()
return Property.default_value(self)
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
now() as appropriate to the date-time instance in the odd case where
auto_now is set to True, else the default implementation.
"""
if self.auto_now:
return self.now()
else:
return super(DateTimeProperty,
self).get_value_for_datastore(model_instance)
data_type = datetime.datetime
@staticmethod
def now():
"""Get now as a full datetime value.
Returns:
'now' as a whole timestamp, including both time and date.
"""
return datetime.datetime.now()
def _date_to_datetime(value):
"""Convert a date to a datetime for datastore storage.
Args:
value: A datetime.date object.
Returns:
A datetime object with time set to 0:00.
"""
assert isinstance(value, datetime.date)
return datetime.datetime(value.year, value.month, value.day)
def _time_to_datetime(value):
"""Convert a time to a datetime for datastore storage.
Args:
value: A datetime.time object.
Returns:
A datetime object with date set to 1970-01-01.
"""
assert isinstance(value, datetime.time)
return datetime.datetime(1970, 1, 1,
value.hour, value.minute, value.second,
value.microsecond)
class DateProperty(DateTimeProperty):
"""A date property, which stores a date without a time."""
@staticmethod
def now():
"""Get now as a date datetime value.
Returns:
'date' part of 'now' only.
"""
return datetime.datetime.now().date()
def validate(self, value):
"""Validate date.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'date',
or if it is an instance of 'datetime' (which is a subclass
of 'date', but for all practical purposes a different type).
"""
value = super(DateProperty, self).validate(value)
if isinstance(value, datetime.datetime):
raise BadValueError('Property %s must be a %s, not a datetime' %
(self.name, self.data_type.__name__))
return value
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.date from the model instance and return a
datetime.datetime instance with the time set to zero.
See base class method documentation for details.
"""
value = super(DateProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.date)
value = _date_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its date portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.date()
return value
data_type = datetime.date
class TimeProperty(DateTimeProperty):
"""A time property, which stores a time without a date."""
@staticmethod
def now():
"""Get now as a time datetime value.
Returns:
'time' part of 'now' only.
"""
return datetime.datetime.now().time()
def empty(self, value):
"""Is time property empty.
"0:0" (midnight) is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
We retrieve a datetime.time from the model instance and return a
datetime.datetime instance with the date set to 1/1/1970.
See base class method documentation for details.
"""
value = super(TimeProperty, self).get_value_for_datastore(model_instance)
if value is not None:
assert isinstance(value, datetime.time), repr(value)
value = _time_to_datetime(value)
return value
def make_value_from_datastore(self, value):
"""Native representation of this property.
We receive a datetime.datetime retrieved from the entity and return
a datetime.date instance representing its time portion.
See base class method documentation for details.
"""
if value is not None:
assert isinstance(value, datetime.datetime)
value = value.time()
return value
data_type = datetime.time
class IntegerProperty(Property):
"""An integer property."""
def validate(self, value):
"""Validate integer property.
Returns:
A valid value.
Raises:
BadValueError if value is not an integer or long instance.
"""
value = super(IntegerProperty, self).validate(value)
if value is None:
return value
if not isinstance(value, (int, long)) or isinstance(value, bool):
raise BadValueError('Property %s must be an int or long, not a %s'
% (self.name, type(value).__name__))
if value < -0x8000000000000000 or value > 0x7fffffffffffffff:
raise BadValueError('Property %s must fit in 64 bits' % self.name)
return value
data_type = int
def empty(self, value):
"""Is integer property empty.
0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class RatingProperty(_CoercingProperty, IntegerProperty):
"""A property whose values are Rating instances."""
data_type = Rating
class FloatProperty(Property):
"""A float property."""
def validate(self, value):
"""Validate float.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'float'.
"""
value = super(FloatProperty, self).validate(value)
if value is not None and not isinstance(value, float):
raise BadValueError('Property %s must be a float' % self.name)
return value
data_type = float
def empty(self, value):
"""Is float property empty.
0.0 is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class BooleanProperty(Property):
"""A boolean property."""
def validate(self, value):
"""Validate boolean.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'bool'.
"""
value = super(BooleanProperty, self).validate(value)
if value is not None and not isinstance(value, bool):
raise BadValueError('Property %s must be a bool' % self.name)
return value
data_type = bool
def empty(self, value):
"""Is boolean property empty.
False is not an empty value.
Returns:
True if value is None, else False.
"""
return value is None
class UserProperty(Property):
"""A user property."""
def __init__(self, verbose_name=None, name=None,
required=False, validator=None, choices=None,
auto_current_user=False, auto_current_user_add=False):
"""Initializes this Property with the given options.
Note: this does *not* support the 'default' keyword argument.
Use auto_current_user_add=True instead.
Args:
verbose_name: User friendly name of property.
name: Storage name for property. By default, uses attribute name
as it is assigned in the Model sub-class.
required: Whether property is required.
validator: User provided method used for validation.
choices: User provided set of valid property values.
auto_current_user: If true, the value is set to the current user
each time the entity is written to the datastore.
auto_current_user_add: If true, the value is set to the current user
the first time the entity is written to the datastore.
"""
super(UserProperty, self).__init__(verbose_name, name,
required=required,
validator=validator,
choices=choices)
self.auto_current_user = auto_current_user
self.auto_current_user_add = auto_current_user_add
def validate(self, value):
"""Validate user.
Returns:
A valid value.
Raises:
BadValueError if property is not instance of 'User'.
"""
value = super(UserProperty, self).validate(value)
if value is not None and not isinstance(value, users.User):
raise BadValueError('Property %s must be a User' % self.name)
return value
def default_value(self):
"""Default value for user.
Returns:
Value of users.get_current_user() if auto_current_user or
auto_current_user_add is set; else None. (But *not* the default
implementation, since we don't support the 'default' keyword
argument.)
"""
if self.auto_current_user or self.auto_current_user_add:
return users.get_current_user()
return None
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
Value of users.get_current_user() if auto_current_user is set;
else the default implementation.
"""
if self.auto_current_user:
return users.get_current_user()
return super(UserProperty, self).get_value_for_datastore(model_instance)
data_type = users.User
class ListProperty(Property):
"""A property that stores a list of things.
This is a parameterized property; the parameter must be a valid
non-list data type, and all items must conform to this type.
"""
def __init__(self, item_type, verbose_name=None, default=None, **kwds):
"""Construct ListProperty.
Args:
item_type: Type for the list items; must be one of the allowed property
types.
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to base class.
Note that the only permissible value for 'required' is True.
"""
if item_type is str:
item_type = basestring
if not isinstance(item_type, type):
raise TypeError('Item type should be a type object')
if item_type not in _ALLOWED_PROPERTY_TYPES:
raise ValueError('Item type %s is not acceptable' % item_type.__name__)
if 'required' in kwds and kwds['required'] is not True:
raise ValueError('List values must be required')
if default is None:
default = []
self.item_type = item_type
super(ListProperty, self).__init__(verbose_name,
required=True,
default=default,
**kwds)
def validate(self, value):
"""Validate list.
Returns:
A valid value.
Raises:
BadValueError if property is not a list whose items are instances of
the item_type given to the constructor.
"""
value = super(ListProperty, self).validate(value)
if value is not None:
if not isinstance(value, list):
raise BadValueError('Property %s must be a list' % self.name)
value = self.validate_list_contents(value)
return value
def validate_list_contents(self, value):
"""Validates that all items in the list are of the correct type.
Returns:
The validated list.
Raises:
BadValueError if the list has items are not instances of the
item_type given to the constructor.
"""
if self.item_type in (int, long):
item_type = (int, long)
else:
item_type = self.item_type
for item in value:
if not isinstance(item, item_type):
if item_type == (int, long):
raise BadValueError('Items in the %s list must all be integers.' %
self.name)
else:
raise BadValueError(
'Items in the %s list must all be %s instances' %
(self.name, self.item_type.__name__))
return value
def empty(self, value):
"""Is list property empty.
[] is not an empty value.
Returns:
True if value is None, else false.
"""
return value is None
data_type = list
def default_value(self):
"""Default value for list.
Because the property supplied to 'default' is a static value,
that value must be shallow copied to prevent all fields with
default values from sharing the same instance.
Returns:
Copy of the default value.
"""
return list(super(ListProperty, self).default_value())
def get_value_for_datastore(self, model_instance):
"""Get value from property to send to datastore.
Returns:
validated list appropriate to save in the datastore.
"""
value = self.validate_list_contents(
super(ListProperty, self).get_value_for_datastore(model_instance))
if self.validator:
self.validator(value)
return value
class StringListProperty(ListProperty):
"""A property that stores a list of strings.
A shorthand for the most common type of ListProperty.
"""
def __init__(self, verbose_name=None, default=None, **kwds):
"""Construct StringListProperty.
Args:
verbose_name: Optional verbose name.
default: Optional default value; if omitted, an empty list is used.
**kwds: Optional additional keyword arguments, passed to ListProperty().
"""
super(StringListProperty, self).__init__(basestring,
verbose_name=verbose_name,
default=default,
**kwds)
class ReferenceProperty(Property):
"""A property that represents a many-to-one reference to another model.
For example, a reference property in model A that refers to model B forms
a many-to-one relationship from A to B: every instance of A refers to a
single B instance, and every B instance can have many A instances refer
to it.
"""
def __init__(self,
reference_class=None,
verbose_name=None,
collection_name=None,
**attrs):
"""Construct ReferenceProperty.
Args:
reference_class: Which model class this property references.
verbose_name: User friendly name of property.
collection_name: If provided, alternate name of collection on
reference_class to store back references. Use this to allow
a Model to have multiple fields which refer to the same class.
"""
super(ReferenceProperty, self).__init__(verbose_name, **attrs)
self.collection_name = collection_name
if reference_class is None:
reference_class = Model
if not ((isinstance(reference_class, type) and
issubclass(reference_class, Model)) or
reference_class is _SELF_REFERENCE):
raise KindError('reference_class must be Model or _SELF_REFERENCE')
self.reference_class = self.data_type = reference_class
def __property_config__(self, model_class, property_name):
"""Loads all of the references that point to this model.
We need to do this to create the ReverseReferenceProperty properties for
this model and create the <reference>_set attributes on the referenced
model, e.g.:
class Story(db.Model):
title = db.StringProperty()
class Comment(db.Model):
story = db.ReferenceProperty(Story)
story = Story.get(id)
print [c for c in story.comment_set]
In this example, the comment_set property was created based on the reference
from Comment to Story (which is inherently one to many).
Args:
model_class: Model class which will have its reference properties
initialized.
property_name: Name of property being configured.
Raises:
DuplicatePropertyError if referenced class already has the provided
collection name as a property.
"""
super(ReferenceProperty, self).__property_config__(model_class,
property_name)
if self.reference_class is _SELF_REFERENCE:
self.reference_class = self.data_type = model_class
if self.collection_name is None:
self.collection_name = '%s_set' % (model_class.__name__.lower())
if hasattr(self.reference_class, self.collection_name):
raise DuplicatePropertyError('Class %s already has property %s'
% (self.reference_class.__name__,
self.collection_name))
setattr(self.reference_class,
self.collection_name,
_ReverseReferenceProperty(model_class, property_name))
def __get__(self, model_instance, model_class):
"""Get reference object.
This method will fetch unresolved entities from the datastore if
they are not already loaded.
Returns:
ReferenceProperty to Model object if property is set, else None.
"""
if model_instance is None:
return self
if hasattr(model_instance, self.__id_attr_name()):
reference_id = getattr(model_instance, self.__id_attr_name())
else:
reference_id = None
if reference_id is not None:
resolved = getattr(model_instance, self.__resolved_attr_name())
if resolved is not None:
return resolved
else:
instance = get(reference_id)
if instance is None:
raise Error('ReferenceProperty failed to be resolved')
setattr(model_instance, self.__resolved_attr_name(), instance)
return instance
else:
return None
def __set__(self, model_instance, value):
"""Set reference."""
value = self.validate(value)
if value is not None:
if isinstance(value, datastore.Key):
setattr(model_instance, self.__id_attr_name(), value)
setattr(model_instance, self.__resolved_attr_name(), None)
else:
setattr(model_instance, self.__id_attr_name(), value.key())
setattr(model_instance, self.__resolved_attr_name(), value)
else:
setattr(model_instance, self.__id_attr_name(), None)
setattr(model_instance, self.__resolved_attr_name(), None)
def get_value_for_datastore(self, model_instance):
"""Get key of reference rather than reference itself."""
return getattr(model_instance, self.__id_attr_name())
def validate(self, value):
"""Validate reference.
Returns:
A valid value.
Raises:
BadValueError for the following reasons:
- Value is not saved.
- Object not of correct model type for reference.
"""
if isinstance(value, datastore.Key):
return value
if value is not None and not value.has_key():
raise BadValueError(
'%s instance must have a complete key before it can be stored as a '
'reference' % self.reference_class.kind())
value = super(ReferenceProperty, self).validate(value)
if value is not None and not isinstance(value, self.reference_class):
raise KindError('Property %s must be an instance of %s' %
(self.name, self.reference_class.kind()))
return value
def __id_attr_name(self):
"""Get attribute of referenced id.
Returns:
Attribute where to store id of referenced entity.
"""
return self._attr_name()
def __resolved_attr_name(self):
"""Get attribute of resolved attribute.
The resolved attribute is where the actual loaded reference instance is
stored on the referring model instance.
Returns:
Attribute name of where to store resolved reference model instance.
"""
return '_RESOLVED' + self._attr_name()
Reference = ReferenceProperty
def SelfReferenceProperty(verbose_name=None, collection_name=None, **attrs):
"""Create a self reference.
Function for declaring a self referencing property on a model.
Example:
class HtmlNode(db.Model):
parent = db.SelfReferenceProperty('Parent', 'children')
Args:
verbose_name: User friendly name of property.
collection_name: Name of collection on model.
Raises:
ConfigurationError if reference_class provided as parameter.
"""
if 'reference_class' in attrs:
raise ConfigurationError(
'Do not provide reference_class to self-reference.')
return ReferenceProperty(_SELF_REFERENCE,
verbose_name,
collection_name,
**attrs)
SelfReference = SelfReferenceProperty
class _ReverseReferenceProperty(Property):
"""The inverse of the Reference property above.
We construct reverse references automatically for the model to which
the Reference property is pointing to create the one-to-many property for
that model. For example, if you put a Reference property in model A that
refers to model B, we automatically create a _ReverseReference property in
B called a_set that can fetch all of the model A instances that refer to
that instance of model B.
"""
def __init__(self, model, prop):
"""Constructor for reverse reference.
Constructor does not take standard values of other property types.
Args:
model: Model that this property is a collection of.
property: Foreign property on referred model that points back to this
properties entity.
"""
self.__model = model
self.__property = prop
def __get__(self, model_instance, model_class):
"""Fetches collection of model instances of this collection property."""
if model_instance is not None:
query = Query(self.__model)
return query.filter(self.__property + ' =', model_instance.key())
else:
return self
def __set__(self, model_instance, value):
"""Not possible to set a new collection."""
raise BadValueError('Virtual property is read-only')
run_in_transaction = datastore.RunInTransaction
run_in_transaction_custom_retries = datastore.RunInTransactionCustomRetries
RunInTransaction = run_in_transaction
RunInTransactionCustomRetries = run_in_transaction_custom_retries
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""GQL -- the SQL-like interface to the datastore.
Defines the GQL-based query class, which is a query mechanism
for the datastore which provides an alternative model for interacting with
data stored.
"""
import calendar
import datetime
import logging
import re
import time
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
MultiQuery = datastore.MultiQuery
LOG_LEVEL = logging.DEBUG - 1
_EPOCH = datetime.datetime.utcfromtimestamp(0)
def Execute(query_string, *args, **keyword_args):
"""Execute command to parse and run the query.
Calls the query parser code to build a proto-query which is an
unbound query. The proto-query is then bound into a real query and
executed.
Args:
query_string: properly formatted GQL query string.
args: rest of the positional arguments used to bind numeric references in
the query.
keyword_args: dictionary-based arguments (for named parameters).
Returns:
the result of running the query with *args.
"""
app = keyword_args.pop('_app', None)
proto_query = GQL(query_string, _app=app)
return proto_query.Bind(args, keyword_args).Run()
class GQL(object):
"""A GQL interface to the datastore.
GQL is a SQL-like language which supports more object-like semantics
in a langauge that is familiar to SQL users. The language supported by
GQL will change over time, but will start off with fairly simple
semantics.
- reserved words are case insensitive
- names are case sensitive
The syntax for SELECT is fairly straightforward:
SELECT * FROM <entity>
[WHERE <condition> [AND <condition> ...]]
[ORDER BY <property> [ASC | DESC] [, <property> [ASC | DESC] ...]]
[LIMIT [<offset>,]<count>]
[OFFSET <offset>]
[HINT (ORDER_FIRST | HINT FILTER_FIRST | HINT ANCESTOR_FIRST)]
<condition> := <property> {< | <= | > | >= | = | != | IN} <value>
<condition> := <property> {< | <= | > | >= | = | != | IN} CAST(<value>)
<condition> := <property> IN (<value>, ...)
<condition> := ANCESTOR IS <entity or key>
Currently the parser is LL(1) because of the simplicity of the grammer
(as it is largely predictive with one token lookahead).
The class is implemented using some basic regular expression tokenization
to pull out reserved tokens and then the recursive descent parser will act
as a builder for the pre-compiled query. This pre-compiled query is then
bound to arguments before executing the query.
Initially, three parameter passing mechanisms are supported when calling
Execute():
- Positional parameters
Execute('SELECT * FROM Story WHERE Author = :1 AND Date > :2')
- Named parameters
Execute('SELECT * FROM Story WHERE Author = :author AND Date > :date')
- Literals (numbers, and strings)
Execute('SELECT * FROM Story WHERE Author = \'James\'')
Users are also given the option of doing type conversions to other datastore
types (e.g. db.Email, db.GeoPt). The language provides a conversion function
which allows the caller to express conversions of both literals and
parameters. The current conversion operators are:
- GEOPT(float, float)
- USER(str)
- KEY(kind, id/name[, kind, id/name...])
- DATETIME(year, month, day, hour, minute, second)
- DATETIME('YYYY-MM-DD HH:MM:SS')
- DATE(year, month, day)
- DATE('YYYY-MM-DD')
- TIME(hour, minute, second)
- TIME('HH:MM:SS')
We will properly serialize and quote all values.
It should also be noted that there are some caveats to the queries that can
be expressed in the syntax. The parser will attempt to make these clear as
much as possible, but some of the caveats include:
- There is no OR operation. In most cases, you should prefer to use IN to
express the idea of wanting data matching one of a set of values.
- You cannot express inequality operators on multiple different properties
- You can only have one != operator per query (related to the previous
rule).
- The IN and != operators must be used carefully because they can
dramatically raise the amount of work done by the datastore. As such,
there is a limit on the number of elements you can use in IN statements.
This limit is set fairly low. Currently, a max of 30 datastore queries is
allowed in a given GQL query. != translates into 2x the number of
datastore queries, and IN multiplies by the number of elements in the
clause (so having two IN clauses, one with 5 elements, the other with 6
will cause 30 queries to occur).
- Literals can take the form of basic types or as type-cast literals. On
the other hand, literals within lists can currently only take the form of
simple types (strings, integers, floats).
SELECT * will return an iterable set of entries, but other operations (schema
queries, updates, inserts or field selections) will return alternative
result types.
"""
TOKENIZE_REGEX = re.compile(r"""
(?:'[^'\n\r]*')+|
<=|>=|!=|=|<|>|
:\w+|
,|
\*|
-?\d+(?:\.\d+)?|
\w+|
\(|\)|
\S+
""", re.VERBOSE | re.IGNORECASE)
MAX_ALLOWABLE_QUERIES = datastore.MAX_ALLOWABLE_QUERIES
__ANCESTOR = -1
def __init__(self, query_string, _app=None, _auth_domain=None):
"""Ctor.
Parses the input query into the class as a pre-compiled query, allowing
for a later call to Bind() to bind arguments as defined in the
documentation.
Args:
query_string: properly formatted GQL query string.
Raises:
datastore_errors.BadQueryError: if the query is not parsable.
"""
self._entity = ''
self.__filters = {}
self.__has_ancestor = False
self.__orderings = []
self.__offset = -1
self.__limit = -1
self.__hint = ''
self.__app = _app
self.__auth_domain = _auth_domain
self.__symbols = self.TOKENIZE_REGEX.findall(query_string)
self.__next_symbol = 0
if not self.__Select():
raise datastore_errors.BadQueryError(
'Unable to parse query')
else:
pass
def Bind(self, args, keyword_args):
"""Bind the existing query to the argument list.
Assumes that the input args are first positional, then a dictionary.
So, if the query contains references to :1, :2 and :name, it is assumed
that arguments are passed as (:1, :2, dict) where dict contains a mapping
[name] -> value.
Args:
args: the arguments to bind to the object's unbound references.
keyword_args: dictionary-based arguments (for named parameters).
Raises:
datastore_errors.BadArgumentError: when arguments are left unbound
(missing from the inputs arguments) or when arguments do not match the
expected type.
Returns:
The bound datastore.Query object. This may take the form of a MultiQuery
object if the GQL query will require multiple backend queries to statisfy.
"""
num_args = len(args)
input_args = frozenset(xrange(num_args))
used_args = set()
queries = []
enumerated_queries = self.EnumerateQueries(used_args, args, keyword_args)
if enumerated_queries:
query_count = len(enumerated_queries)
else:
query_count = 1
for i in xrange(query_count):
queries.append(datastore.Query(self._entity, _app=self.__app))
logging.log(LOG_LEVEL,
'Binding with %i positional args %s and %i keywords %s'
, len(args), args, len(keyword_args), keyword_args)
for ((identifier, condition), value_list) in self.__filters.iteritems():
for (operator, params) in value_list:
value = self.__Operate(args, keyword_args, used_args, operator, params)
if not self.__IsMultiQuery(condition):
for query in queries:
self.__AddFilterToQuery(identifier, condition, value, query)
unused_args = input_args - used_args
if unused_args:
unused_values = [unused_arg + 1 for unused_arg in unused_args]
raise datastore_errors.BadArgumentError('Unused positional arguments %s' %
unused_values)
if enumerated_queries:
logging.log(LOG_LEVEL,
'Multiple Queries Bound: %s',
enumerated_queries)
for (query, enumerated_query) in zip(queries, enumerated_queries):
query.update(enumerated_query)
if self.__orderings:
for query in queries:
query.Order(*tuple(self.__orderings))
if query_count > 1:
return MultiQuery(queries, self.__orderings)
else:
return queries[0]
def EnumerateQueries(self, used_args, args, keyword_args):
"""Create a list of all multi-query filter combinations required.
To satisfy multi-query requests ("IN" and "!=" filters), multiple queries
may be required. This code will enumerate the power-set of all multi-query
filters.
Args:
used_args: set of used positional parameters (output only variable used in
reporting for unused positional args)
args: positional arguments referenced by the proto-query in self. This
assumes the input is a tuple (and can also be called with a varargs
param).
keyword_args: dict of keyword arguments referenced by the proto-query in
self.
Returns:
A list of maps [(identifier, condition) -> value] of all queries needed
to satisfy the GQL query with the given input arguments.
"""
enumerated_queries = []
for ((identifier, condition), value_list) in self.__filters.iteritems():
for (operator, params) in value_list:
value = self.__Operate(args, keyword_args, used_args, operator, params)
self.__AddMultiQuery(identifier, condition, value, enumerated_queries)
return enumerated_queries
def __CastError(self, operator, values, error_message):
"""Query building error for type cast operations.
Args:
operator: the failed cast operation
values: value list passed to the cast operator
error_message: string to emit as part of the 'Cast Error' string.
Raises:
BadQueryError and passes on an error message from the caller. Will raise
BadQueryError on all calls.
"""
raise datastore_errors.BadQueryError(
'Type Cast Error: unable to cast %r with operation %s (%s)' %
(values, operator.upper(), error_message))
def __CastNop(self, values):
"""Return values[0] if it exists -- default for most where clauses."""
if len(values) != 1:
self.__CastError(values, 'nop', 'requires one and only one value')
else:
return values[0]
def __CastList(self, values):
"""Return the full list of values -- only useful for IN clause."""
if values:
return values
else:
return None
def __CastKey(self, values):
"""Cast input values to Key() class using encoded string or tuple list."""
if not len(values) % 2:
return datastore_types.Key.from_path(_app=self.__app, *values)
elif len(values) == 1 and isinstance(values[0], str):
return datastore_types.Key(values[0])
else:
self.__CastError('KEY', values,
'requires an even number of operands'
'or a single encoded string')
def __CastGeoPt(self, values):
"""Cast input to GeoPt() class using 2 input parameters."""
if len(values) != 2:
self.__CastError('GEOPT', values, 'requires 2 input parameters')
return datastore_types.GeoPt(*values)
def __CastUser(self, values):
"""Cast to User() class using the email address in values[0]."""
if len(values) != 1:
self.__CastError(values, 'user', 'requires one and only one value')
else:
return users.User(email=values[0], _auth_domain=self.__auth_domain)
def __EncodeIfNeeded(self, value):
"""Simple helper function to create an str from possibly unicode strings.
Args:
value: input string (should pass as an instance of str or unicode).
"""
if isinstance(value, unicode):
return value.encode('utf8')
else:
return value
def __CastDate(self, values):
"""Cast DATE values (year/month/day) from input (to datetime.datetime).
Casts DATE input values formulated as ISO string or time tuple inputs.
Args:
values: either a single string with ISO time representation or 3
integer valued date tuple (year, month, day).
Returns:
datetime.datetime value parsed from the input values.
"""
if len(values) == 1:
value = self.__EncodeIfNeeded(values[0])
if isinstance(value, str):
try:
time_tuple = time.strptime(value, '%Y-%m-%d')[0:6]
except ValueError, err:
self.__CastError('DATE', values, err)
else:
self.__CastError('DATE', values, 'Single input value not a string')
elif len(values) == 3:
time_tuple = (values[0], values[1], values[2], 0, 0, 0)
else:
self.__CastError('DATE', values,
'function takes 1 string or 3 integer values')
try:
return datetime.datetime(*time_tuple)
except ValueError, err:
self.__CastError('DATE', values, err)
def __CastTime(self, values):
"""Cast TIME values (hour/min/sec) from input (to datetime.datetime).
Casts TIME input values formulated as ISO string or time tuple inputs.
Args:
values: either a single string with ISO time representation or 1-4
integer valued time tuple (hour), (hour, minute),
(hour, minute, second), (hour, minute, second, microsec).
Returns:
datetime.datetime value parsed from the input values.
"""
if len(values) == 1:
value = self.__EncodeIfNeeded(values[0])
if isinstance(value, str):
try:
time_tuple = time.strptime(value, '%H:%M:%S')
except ValueError, err:
self.__CastError('TIME', values, err)
time_tuple = (1970, 1, 1) + time_tuple[3:]
time_tuple = time_tuple[0:6]
elif isinstance(value, int):
time_tuple = (1970, 1, 1, value)
else:
self.__CastError('TIME', values,
'Single input value not a string or integer hour')
elif len(values) <= 4:
time_tuple = (1970, 1, 1) + tuple(values)
else:
self.__CastError('TIME', values, err)
try:
return datetime.datetime(*time_tuple)
except ValueError, err:
self.__CastError('TIME', values, err)
def __CastDatetime(self, values):
"""Cast DATETIME values (string or tuple) from input (to datetime.datetime).
Casts DATETIME input values formulated as ISO string or datetime tuple
inputs.
Args:
values: either a single string with ISO representation or 3-7
integer valued time tuple (year, month, day, ...).
Returns:
datetime.datetime value parsed from the input values.
"""
if len(values) == 1:
value = self.__EncodeIfNeeded(values[0])
if isinstance(value, str):
try:
time_tuple = time.strptime(str(value), '%Y-%m-%d %H:%M:%S')[0:6]
except ValueError, err:
self.__CastError('DATETIME', values, err)
else:
self.__CastError('DATETIME', values, 'Single input value not a string')
else:
time_tuple = values
try:
return datetime.datetime(*time_tuple)
except ValueError, err:
self.__CastError('DATETIME', values, err)
def __Operate(self, args, keyword_args, used_args, operator, params):
"""Create a single output value from params using the operator string given.
Args:
args,keyword_args: arguments passed in for binding purposes (used in
binding positional and keyword based arguments).
used_args: set of numeric arguments accessed in this call.
values are ints representing used zero-based positional arguments.
used as an output parameter with new used arguments appended to the
list.
operator: string representing the operator to use 'nop' just returns
the first value from params.
params: parameter list to operate on (positional references, named
references, or literals).
Returns:
A value which can be used as part of a GQL filter description (either a
list of datastore types -- for use with IN, or a single datastore type --
for use with other filters).
"""
if not params:
return None
param_values = []
for param in params:
if isinstance(param, Literal):
value = param.Get()
else:
value = self.__GetParam(param, args, keyword_args)
if isinstance(param, int):
used_args.add(param - 1)
logging.log(LOG_LEVEL, 'found param for bind: %s value: %s',
param, value)
param_values.append(value)
logging.log(LOG_LEVEL, '%s Operating on values: %s',
operator, repr(param_values))
if operator in self.__cast_operators:
result = self.__cast_operators[operator](self, param_values)
else:
self.__Error('Operation %s is invalid' % operator)
return result
def __IsMultiQuery(self, condition):
"""Return whether or not this condition could require multiple queries."""
return condition.lower() in ('in', '!=')
def __GetParam(self, reference, args, keyword_args):
"""Get the specified parameter from the input arguments.
Args:
reference: id for a filter reference in the filter list (string or
number)
args: positional args passed in by the user (tuple of arguments, indexed
numerically by "reference")
keyword_args: dict of keyword based arguments (strings in "reference")
Returns:
The specified param from the input list.
Raises:
BadArgumentError if the referenced argument doesn't exist.
"""
num_args = len(args)
if isinstance(reference, int):
if reference <= num_args:
return args[reference - 1]
else:
raise datastore_errors.BadArgumentError(
'Missing argument for bind, requires argument #%i, '
'but only has %i args.' % (reference, num_args))
elif isinstance(reference, str):
if reference in keyword_args:
return keyword_args[reference]
else:
raise datastore_errors.BadArgumentError(
'Missing named arguments for bind, requires argument %s' %
reference)
else:
assert False, 'Unknown reference %s' % reference
def __AddMultiQuery(self, identifier, condition, value, enumerated_queries):
"""Helper function to add a muti-query to previously enumerated queries.
Args:
identifier: property being filtered by this condition
condition: filter condition (e.g. !=,in)
value: value being bound
enumerated_queries: in/out list of already bound queries -> expanded list
with the full enumeration required to satisfy the condition query
Raises:
BadArgumentError if the filter is invalid (namely non-list with IN)
"""
def CloneQueries(queries, n):
"""Do a full copy of the queries and append to the end of the queries.
Does an in-place replication of the input list and sorts the result to
put copies next to one-another.
Args:
queries: list of all filters to clone
n: number of copies to make
Returns:
Number of iterations needed to fill the structure
"""
if not enumerated_queries:
for i in xrange(n):
queries.append({})
return 1
else:
old_size = len(queries)
tmp_queries = []
for i in xrange(n - 1):
[tmp_queries.append(filter_map.copy()) for filter_map in queries]
queries.extend(tmp_queries)
queries.sort()
return old_size
if condition == '!=':
if len(enumerated_queries) * 2 > self.MAX_ALLOWABLE_QUERIES:
raise datastore_errors.BadArgumentError(
'Cannot satisfy query -- too many IN/!= values.')
num_iterations = CloneQueries(enumerated_queries, 2)
for i in xrange(num_iterations):
enumerated_queries[2 * i]['%s <' % identifier] = value
enumerated_queries[2 * i + 1]['%s >' % identifier] = value
elif condition.lower() == 'in':
if not isinstance(value, list):
raise datastore_errors.BadArgumentError('List expected for "IN" filter')
in_list_size = len(value)
if len(enumerated_queries) * in_list_size > self.MAX_ALLOWABLE_QUERIES:
raise datastore_errors.BadArgumentError(
'Cannot satisfy query -- too many IN/!= values.')
num_iterations = CloneQueries(enumerated_queries, in_list_size)
for clone_num in xrange(num_iterations):
for value_num in xrange(len(value)):
list_val = value[value_num]
query_num = in_list_size * clone_num + value_num
filt = '%s =' % identifier
enumerated_queries[query_num][filt] = list_val
def __AddFilterToQuery(self, identifier, condition, value, query):
"""Add a filter condition to a query based on the inputs.
Args:
identifier: name of the property (or self.__ANCESTOR for ancestors)
condition: test condition
value: test value passed from the caller
query: query to add the filter to
"""
if identifier != self.__ANCESTOR:
filter_condition = '%s %s' % (identifier, condition)
logging.log(LOG_LEVEL, 'Setting filter on "%s" with value "%s"',
filter_condition, value.__class__)
datastore._AddOrAppend(query, filter_condition, value)
else:
logging.log(LOG_LEVEL, 'Setting ancestor query for ancestor %s', value)
query.Ancestor(value)
def Run(self, *args, **keyword_args):
"""Runs this query.
Similar to datastore.Query.Run.
Assumes that limit == -1 or > 0
Args:
args: arguments used to bind to references in the compiled query object.
keyword_args: dictionary-based arguments (for named parameters).
Returns:
A list of results if a query count limit was passed.
A result iterator if no limit was given.
"""
bind_results = self.Bind(args, keyword_args)
offset = 0
if self.__offset != -1:
offset = self.__offset
if self.__limit == -1:
it = bind_results.Run()
try:
for i in xrange(offset):
it.next()
except StopIteration:
pass
return it
else:
res = bind_results.Get(self.__limit, offset)
return res
def filters(self):
"""Return the compiled list of filters."""
return self.__filters
def hint(self):
"""Return the datastore hint."""
return self.__hint
def limit(self):
"""Return numerical result count limit."""
return self.__limit
def orderings(self):
"""Return the result ordering list."""
return self.__orderings
__iter__ = Run
__quoted_string_regex = re.compile(r'((?:\'[^\'\n\r]*\')+)')
__ordinal_regex = re.compile(r':(\d+)$')
__named_regex = re.compile(r':(\w+)$')
__identifier_regex = re.compile(r'(\w+)$')
__conditions_regex = re.compile(r'(<=|>=|!=|=|<|>|is|in)$', re.IGNORECASE)
__number_regex = re.compile(r'(\d+)$')
__cast_regex = re.compile(
r'(geopt|user|key|date|time|datetime)$', re.IGNORECASE)
__cast_operators = {
'geopt': __CastGeoPt,
'user': __CastUser,
'key': __CastKey,
'datetime': __CastDatetime,
'date': __CastDate,
'time': __CastTime,
'list': __CastList,
'nop': __CastNop,
}
def __Error(self, error_message):
"""Generic query error.
Args:
error_message: string to emit as part of the 'Parse Error' string.
Raises:
BadQueryError and passes on an error message from the caller. Will raise
BadQueryError on all calls to __Error()
"""
if self.__next_symbol >= len(self.__symbols):
raise datastore_errors.BadQueryError(
'Parse Error: %s at end of string' % error_message)
else:
raise datastore_errors.BadQueryError(
'Parse Error: %s at symbol %s' %
(error_message, self.__symbols[self.__next_symbol]))
def __Accept(self, symbol_string):
"""Advance the symbol and return true iff the next symbol matches input."""
if self.__next_symbol < len(self.__symbols):
logging.log(LOG_LEVEL, '\t%s', self.__symbols)
logging.log(LOG_LEVEL, '\tExpect: %s Got: %s',
symbol_string, self.__symbols[self.__next_symbol].upper())
if self.__symbols[self.__next_symbol].upper() == symbol_string:
self.__next_symbol += 1
return True
return False
def __Expect(self, symbol_string):
"""Require that the next symbol matches symbol_string, or emit an error.
Args:
symbol_string: next symbol expected by the caller
Raises:
BadQueryError if the next symbol doesn't match the parameter passed in.
"""
if not self.__Accept(symbol_string):
self.__Error('Unexpected Symbol: %s' % symbol_string)
def __AcceptRegex(self, regex):
"""Advance and return the symbol if the next symbol matches the regex.
Args:
regex: the compiled regular expression to attempt acceptance on.
Returns:
The first group in the expression to allow for convenient access
to simple matches. Requires () around some objects in the regex.
None if no match is found.
"""
if self.__next_symbol < len(self.__symbols):
match_symbol = self.__symbols[self.__next_symbol]
logging.log(LOG_LEVEL, '\taccept %s on symbol %s', regex, match_symbol)
match = regex.match(match_symbol)
if match:
self.__next_symbol += 1
if match.groups():
matched_string = match.group(1)
logging.log(LOG_LEVEL, '\taccepted %s', matched_string)
return matched_string
return None
def __AcceptTerminal(self):
"""Only accept an empty string.
Returns:
True
Raises:
BadQueryError if there are unconsumed symbols in the query.
"""
if self.__next_symbol < len(self.__symbols):
self.__Error('Expected no additional symbols')
return True
def __Select(self):
"""Consume the SELECT clause and everything that follows it.
Assumes SELECT * to start.
Transitions to a FROM clause.
Returns:
True if parsing completed okay.
"""
self.__Expect('SELECT')
self.__Expect('*')
return self.__From()
def __From(self):
"""Consume the FROM clause.
Assumes a single well formed entity in the clause.
Assumes FROM <Entity Name>
Transitions to a WHERE clause.
Returns:
True if parsing completed okay.
"""
self.__Expect('FROM')
entity = self.__AcceptRegex(self.__identifier_regex)
if entity:
self._entity = entity
return self.__Where()
else:
self.__Error('Identifier Expected')
return False
def __Where(self):
"""Consume the WHERE cluase.
These can have some recursion because of the AND symbol.
Returns:
True if parsing the WHERE clause completed correctly, as well as all
subsequent clauses
"""
if self.__Accept('WHERE'):
return self.__FilterList()
return self.__OrderBy()
def __FilterList(self):
"""Consume the filter list (remainder of the WHERE clause)."""
identifier = self.__AcceptRegex(self.__identifier_regex)
if not identifier:
self.__Error('Invalid WHERE Identifier')
return False
condition = self.__AcceptRegex(self.__conditions_regex)
if not condition:
self.__Error('Invalid WHERE Condition')
return False
self.__CheckFilterSyntax(identifier, condition)
if not self.__AddSimpleFilter(identifier, condition, self.__Reference()):
if not self.__AddSimpleFilter(identifier, condition, self.__Literal()):
type_cast = self.__TypeCast()
if (not type_cast or
not self.__AddProcessedParameterFilter(identifier, condition,
*type_cast)):
self.__Error('Invalid WHERE condition')
if self.__Accept('AND'):
return self.__FilterList()
return self.__OrderBy()
def __GetValueList(self):
"""Read in a list of parameters from the tokens and return the list.
Reads in a set of tokens, but currently only accepts literals, positional
parameters, or named parameters. Or empty list if nothing was parsed.
Returns:
A list of values parsed from the input, with values taking the form of
strings (unbound, named reference), integers (unbound, positional
reference), or Literal() (bound value usable directly as part of a filter
with no additional information).
"""
params = []
while True:
reference = self.__Reference()
if reference:
params.append(reference)
else:
literal = self.__Literal()
if literal:
params.append(literal)
else:
self.__Error('Parameter list requires literal or reference parameter')
if not self.__Accept(','):
break
return params
def __CheckFilterSyntax(self, identifier, condition):
"""Check that filter conditions are valid and throw errors if not.
Args:
identifier: identifier being used in comparison
condition: string form of the comparison operator used in the filter
"""
if identifier.lower() == 'ancestor':
if condition.lower() == 'is':
if self.__has_ancestor:
self.__Error('Only one ANCESTOR IS" clause allowed')
else:
self.__Error('"IS" expected to follow "ANCESTOR"')
elif condition.lower() == 'is':
self.__Error('"IS" can only be used when comparing against "ANCESTOR"')
def __AddProcessedParameterFilter(self, identifier, condition,
operator, parameters):
"""Add a filter with post-processing required.
Args:
identifier: property being compared.
condition: comparison operation being used with the property (e.g. !=).
operator: operation to perform on the parameters before adding the filter.
parameters: list of bound parameters passed to 'operator' before creating
the filter. When using the parameters as a pass-through, pass 'nop'
into the operator field and the first value will be used unprocessed).
Returns:
True if the filter was okay to add.
"""
if parameters is None:
return False
if parameters[0] is None:
return False
logging.log(LOG_LEVEL, 'Adding Filter %s %s %s',
identifier, condition, repr(parameters))
filter_rule = (identifier, condition)
if identifier.lower() == 'ancestor':
self.__has_ancestor = True
filter_rule = (self.__ANCESTOR, 'is')
assert condition.lower() == 'is'
if condition.lower() != 'in' and operator == 'list':
sef.__Error('Only IN can process a list of values')
self.__filters.setdefault(filter_rule, []).append((operator, parameters))
return True
def __AddSimpleFilter(self, identifier, condition, parameter):
"""Add a filter to the query being built (no post-processing on parameter).
Args:
identifier: identifier being used in comparison
condition: string form of the comparison operator used in the filter
parameter: ID of the reference being made or a value of type Literal
Returns:
True if the filter could be added.
False otherwise.
"""
return self.__AddProcessedParameterFilter(identifier, condition,
'nop', [parameter])
def __Reference(self):
"""Consume a parameter reference and return it.
Consumes a reference to a positional parameter (:1) or a named parameter
(:email). Only consumes a single reference (not lists).
Returns:
The name of the reference (integer for positional parameters or string
for named parameters) to a bind-time parameter.
"""
logging.log(LOG_LEVEL, 'Try Reference')
reference = self.__AcceptRegex(self.__ordinal_regex)
if reference:
return int(reference)
else:
reference = self.__AcceptRegex(self.__named_regex)
if reference:
return reference
return None
def __Literal(self):
"""Parse literals from our token list.
Returns:
The parsed literal from the input string (currently either a string,
integer, or floating point value).
"""
logging.log(LOG_LEVEL, 'Try Literal')
literal = None
try:
literal = int(self.__symbols[self.__next_symbol])
except ValueError:
pass
else:
self.__next_symbol += 1
if literal is None:
try:
literal = float(self.__symbols[self.__next_symbol])
except ValueError:
pass
else:
self.__next_symbol += 1
if literal is None:
literal = self.__AcceptRegex(self.__quoted_string_regex)
if literal:
literal = literal[1:-1].replace("''", "'")
if literal is None:
if self.__Accept('TRUE'):
literal = True
elif self.__Accept('FALSE'):
literal = False
if literal is not None:
return Literal(literal)
else:
return None
def __TypeCast(self):
"""Check if the next operation is a type-cast and return the cast if so.
Casting operators look like simple function calls on their parameters. This
code returns the cast operator found and the list of parameters provided by
the user to complete the cast operation.
Returns:
A tuple (cast operator, params) which represents the cast operation
requested and the parameters parsed from the cast clause.
None - if there is no TypeCast function.
"""
logging.log(LOG_LEVEL, 'Try Type Cast')
cast_op = self.__AcceptRegex(self.__cast_regex)
if not cast_op:
if self.__Accept('('):
cast_op = 'list'
else:
return None
else:
cast_op = cast_op.lower()
self.__Expect('(')
params = self.__GetValueList()
self.__Expect(')')
logging.log(LOG_LEVEL, 'Got casting operator %s with params %s',
cast_op, repr(params))
return (cast_op, params)
def __OrderBy(self):
"""Consume the ORDER BY clause."""
if self.__Accept('ORDER'):
self.__Expect('BY')
return self.__OrderList()
return self.__Limit()
def __OrderList(self):
"""Consume variables and sort order for ORDER BY clause."""
identifier = self.__AcceptRegex(self.__identifier_regex)
if identifier:
if self.__Accept('DESC'):
self.__orderings.append((identifier, datastore.Query.DESCENDING))
elif self.__Accept('ASC'):
self.__orderings.append((identifier, datastore.Query.ASCENDING))
else:
self.__orderings.append((identifier, datastore.Query.ASCENDING))
else:
self.__Error('Invalid ORDER BY Property')
logging.log(LOG_LEVEL, self.__orderings)
if self.__Accept(','):
return self.__OrderList()
return self.__Limit()
def __Limit(self):
"""Consume the LIMIT clause."""
if self.__Accept('LIMIT'):
maybe_limit = self.__AcceptRegex(self.__number_regex)
if maybe_limit:
if self.__Accept(','):
self.__offset = int(maybe_limit)
if self.__offset < 0:
self.__Error('Bad offset in LIMIT Value')
else:
logging.log(LOG_LEVEL, 'Set offset to %i', self.__offset)
maybe_limit = self.__AcceptRegex(self.__number_regex)
self.__limit = int(maybe_limit)
if self.__limit < 1:
self.__Error('Bad Limit in LIMIT Value')
else:
logging.log(LOG_LEVEL, 'Set limit to %i', self.__limit)
else:
self.__Error('Non-number limit in LIMIT clause')
return self.__Offset()
def __Offset(self):
"""Consume the OFFSET clause."""
if self.__Accept('OFFSET'):
if self.__offset != -1:
self.__Error('Offset already defined in LIMIT clause')
offset = self.__AcceptRegex(self.__number_regex)
if offset:
self.__offset = int(offset)
if self.__offset < 0:
self.__Error('Bad offset in OFFSET clause')
else:
logging.log(LOG_LEVEL, 'Set offset to %i', self.__offset)
else:
self.__Error('Non-number offset in OFFSET clause')
return self.__Hint()
def __Hint(self):
"""Consume the HINT clause.
Requires one of three options (mirroring the rest of the datastore):
HINT ORDER_FIRST
HINT ANCESTOR_FIRST
HINT FILTER_FIRST
Returns:
True if the hint clause and later clauses all parsed okay
"""
if self.__Accept('HINT'):
if self.__Accept('ORDER_FIRST'):
self.__hint = 'ORDER_FIRST'
elif self.__Accept('FILTER_FIRST'):
self.__hint = 'FILTER_FIRST'
elif self.__Accept('ANCESTOR_FIRST'):
self.__hint = 'ANCESTOR_FIRST'
else:
self.__Error('Unknown HINT')
return False
return self.__AcceptTerminal()
class Literal(object):
"""Class for representing literal values in a way unique from unbound params.
This is a simple wrapper class around basic types and datastore types.
"""
def __init__(self, value):
self.__value = value
def Get(self):
"""Return the value of the literal."""
return self.__value
def __repr__(self):
return 'Literal(%s)' % repr(self.__value)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""GQL -- the SQL-like interface to the datastore.
Defines the GQL-based query class, which is a query mechanism
for the datastore which provides an alternative model for interacting with
data stored.
"""
import calendar
import datetime
import logging
import re
import time
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
MultiQuery = datastore.MultiQuery
LOG_LEVEL = logging.DEBUG - 1
_EPOCH = datetime.datetime.utcfromtimestamp(0)
def Execute(query_string, *args, **keyword_args):
"""Execute command to parse and run the query.
Calls the query parser code to build a proto-query which is an
unbound query. The proto-query is then bound into a real query and
executed.
Args:
query_string: properly formatted GQL query string.
args: rest of the positional arguments used to bind numeric references in
the query.
keyword_args: dictionary-based arguments (for named parameters).
Returns:
the result of running the query with *args.
"""
app = keyword_args.pop('_app', None)
proto_query = GQL(query_string, _app=app)
return proto_query.Bind(args, keyword_args).Run()
class GQL(object):
"""A GQL interface to the datastore.
GQL is a SQL-like language which supports more object-like semantics
in a langauge that is familiar to SQL users. The language supported by
GQL will change over time, but will start off with fairly simple
semantics.
- reserved words are case insensitive
- names are case sensitive
The syntax for SELECT is fairly straightforward:
SELECT * FROM <entity>
[WHERE <condition> [AND <condition> ...]]
[ORDER BY <property> [ASC | DESC] [, <property> [ASC | DESC] ...]]
[LIMIT [<offset>,]<count>]
[OFFSET <offset>]
[HINT (ORDER_FIRST | HINT FILTER_FIRST | HINT ANCESTOR_FIRST)]
<condition> := <property> {< | <= | > | >= | = | != | IN} <value>
<condition> := <property> {< | <= | > | >= | = | != | IN} CAST(<value>)
<condition> := <property> IN (<value>, ...)
<condition> := ANCESTOR IS <entity or key>
Currently the parser is LL(1) because of the simplicity of the grammer
(as it is largely predictive with one token lookahead).
The class is implemented using some basic regular expression tokenization
to pull out reserved tokens and then the recursive descent parser will act
as a builder for the pre-compiled query. This pre-compiled query is then
bound to arguments before executing the query.
Initially, three parameter passing mechanisms are supported when calling
Execute():
- Positional parameters
Execute('SELECT * FROM Story WHERE Author = :1 AND Date > :2')
- Named parameters
Execute('SELECT * FROM Story WHERE Author = :author AND Date > :date')
- Literals (numbers, and strings)
Execute('SELECT * FROM Story WHERE Author = \'James\'')
Users are also given the option of doing type conversions to other datastore
types (e.g. db.Email, db.GeoPt). The language provides a conversion function
which allows the caller to express conversions of both literals and
parameters. The current conversion operators are:
- GEOPT(float, float)
- USER(str)
- KEY(kind, id/name[, kind, id/name...])
- DATETIME(year, month, day, hour, minute, second)
- DATETIME('YYYY-MM-DD HH:MM:SS')
- DATE(year, month, day)
- DATE('YYYY-MM-DD')
- TIME(hour, minute, second)
- TIME('HH:MM:SS')
We will properly serialize and quote all values.
It should also be noted that there are some caveats to the queries that can
be expressed in the syntax. The parser will attempt to make these clear as
much as possible, but some of the caveats include:
- There is no OR operation. In most cases, you should prefer to use IN to
express the idea of wanting data matching one of a set of values.
- You cannot express inequality operators on multiple different properties
- You can only have one != operator per query (related to the previous
rule).
- The IN and != operators must be used carefully because they can
dramatically raise the amount of work done by the datastore. As such,
there is a limit on the number of elements you can use in IN statements.
This limit is set fairly low. Currently, a max of 30 datastore queries is
allowed in a given GQL query. != translates into 2x the number of
datastore queries, and IN multiplies by the number of elements in the
clause (so having two IN clauses, one with 5 elements, the other with 6
will cause 30 queries to occur).
- Literals can take the form of basic types or as type-cast literals. On
the other hand, literals within lists can currently only take the form of
simple types (strings, integers, floats).
SELECT * will return an iterable set of entries, but other operations (schema
queries, updates, inserts or field selections) will return alternative
result types.
"""
TOKENIZE_REGEX = re.compile(r"""
(?:'[^'\n\r]*')+|
<=|>=|!=|=|<|>|
:\w+|
,|
\*|
-?\d+(?:\.\d+)?|
\w+|
\(|\)|
\S+
""", re.VERBOSE | re.IGNORECASE)
MAX_ALLOWABLE_QUERIES = datastore.MAX_ALLOWABLE_QUERIES
__ANCESTOR = -1
def __init__(self, query_string, _app=None, _auth_domain=None):
"""Ctor.
Parses the input query into the class as a pre-compiled query, allowing
for a later call to Bind() to bind arguments as defined in the
documentation.
Args:
query_string: properly formatted GQL query string.
Raises:
datastore_errors.BadQueryError: if the query is not parsable.
"""
self._entity = ''
self.__filters = {}
self.__has_ancestor = False
self.__orderings = []
self.__offset = -1
self.__limit = -1
self.__hint = ''
self.__app = _app
self.__auth_domain = _auth_domain
self.__symbols = self.TOKENIZE_REGEX.findall(query_string)
self.__next_symbol = 0
if not self.__Select():
raise datastore_errors.BadQueryError(
'Unable to parse query')
else:
pass
def Bind(self, args, keyword_args):
"""Bind the existing query to the argument list.
Assumes that the input args are first positional, then a dictionary.
So, if the query contains references to :1, :2 and :name, it is assumed
that arguments are passed as (:1, :2, dict) where dict contains a mapping
[name] -> value.
Args:
args: the arguments to bind to the object's unbound references.
keyword_args: dictionary-based arguments (for named parameters).
Raises:
datastore_errors.BadArgumentError: when arguments are left unbound
(missing from the inputs arguments) or when arguments do not match the
expected type.
Returns:
The bound datastore.Query object. This may take the form of a MultiQuery
object if the GQL query will require multiple backend queries to statisfy.
"""
num_args = len(args)
input_args = frozenset(xrange(num_args))
used_args = set()
queries = []
enumerated_queries = self.EnumerateQueries(used_args, args, keyword_args)
if enumerated_queries:
query_count = len(enumerated_queries)
else:
query_count = 1
for i in xrange(query_count):
queries.append(datastore.Query(self._entity, _app=self.__app))
logging.log(LOG_LEVEL,
'Binding with %i positional args %s and %i keywords %s'
, len(args), args, len(keyword_args), keyword_args)
for ((identifier, condition), value_list) in self.__filters.iteritems():
for (operator, params) in value_list:
value = self.__Operate(args, keyword_args, used_args, operator, params)
if not self.__IsMultiQuery(condition):
for query in queries:
self.__AddFilterToQuery(identifier, condition, value, query)
unused_args = input_args - used_args
if unused_args:
unused_values = [unused_arg + 1 for unused_arg in unused_args]
raise datastore_errors.BadArgumentError('Unused positional arguments %s' %
unused_values)
if enumerated_queries:
logging.log(LOG_LEVEL,
'Multiple Queries Bound: %s',
enumerated_queries)
for (query, enumerated_query) in zip(queries, enumerated_queries):
query.update(enumerated_query)
if self.__orderings:
for query in queries:
query.Order(*tuple(self.__orderings))
if query_count > 1:
return MultiQuery(queries, self.__orderings)
else:
return queries[0]
def EnumerateQueries(self, used_args, args, keyword_args):
"""Create a list of all multi-query filter combinations required.
To satisfy multi-query requests ("IN" and "!=" filters), multiple queries
may be required. This code will enumerate the power-set of all multi-query
filters.
Args:
used_args: set of used positional parameters (output only variable used in
reporting for unused positional args)
args: positional arguments referenced by the proto-query in self. This
assumes the input is a tuple (and can also be called with a varargs
param).
keyword_args: dict of keyword arguments referenced by the proto-query in
self.
Returns:
A list of maps [(identifier, condition) -> value] of all queries needed
to satisfy the GQL query with the given input arguments.
"""
enumerated_queries = []
for ((identifier, condition), value_list) in self.__filters.iteritems():
for (operator, params) in value_list:
value = self.__Operate(args, keyword_args, used_args, operator, params)
self.__AddMultiQuery(identifier, condition, value, enumerated_queries)
return enumerated_queries
def __CastError(self, operator, values, error_message):
"""Query building error for type cast operations.
Args:
operator: the failed cast operation
values: value list passed to the cast operator
error_message: string to emit as part of the 'Cast Error' string.
Raises:
BadQueryError and passes on an error message from the caller. Will raise
BadQueryError on all calls.
"""
raise datastore_errors.BadQueryError(
'Type Cast Error: unable to cast %r with operation %s (%s)' %
(values, operator.upper(), error_message))
def __CastNop(self, values):
"""Return values[0] if it exists -- default for most where clauses."""
if len(values) != 1:
self.__CastError(values, 'nop', 'requires one and only one value')
else:
return values[0]
def __CastList(self, values):
"""Return the full list of values -- only useful for IN clause."""
if values:
return values
else:
return None
def __CastKey(self, values):
"""Cast input values to Key() class using encoded string or tuple list."""
if not len(values) % 2:
return datastore_types.Key.from_path(_app=self.__app, *values)
elif len(values) == 1 and isinstance(values[0], str):
return datastore_types.Key(values[0])
else:
self.__CastError('KEY', values,
'requires an even number of operands'
'or a single encoded string')
def __CastGeoPt(self, values):
"""Cast input to GeoPt() class using 2 input parameters."""
if len(values) != 2:
self.__CastError('GEOPT', values, 'requires 2 input parameters')
return datastore_types.GeoPt(*values)
def __CastUser(self, values):
"""Cast to User() class using the email address in values[0]."""
if len(values) != 1:
self.__CastError(values, 'user', 'requires one and only one value')
else:
return users.User(email=values[0], _auth_domain=self.__auth_domain)
def __EncodeIfNeeded(self, value):
"""Simple helper function to create an str from possibly unicode strings.
Args:
value: input string (should pass as an instance of str or unicode).
"""
if isinstance(value, unicode):
return value.encode('utf8')
else:
return value
def __CastDate(self, values):
"""Cast DATE values (year/month/day) from input (to datetime.datetime).
Casts DATE input values formulated as ISO string or time tuple inputs.
Args:
values: either a single string with ISO time representation or 3
integer valued date tuple (year, month, day).
Returns:
datetime.datetime value parsed from the input values.
"""
if len(values) == 1:
value = self.__EncodeIfNeeded(values[0])
if isinstance(value, str):
try:
time_tuple = time.strptime(value, '%Y-%m-%d')[0:6]
except ValueError, err:
self.__CastError('DATE', values, err)
else:
self.__CastError('DATE', values, 'Single input value not a string')
elif len(values) == 3:
time_tuple = (values[0], values[1], values[2], 0, 0, 0)
else:
self.__CastError('DATE', values,
'function takes 1 string or 3 integer values')
try:
return datetime.datetime(*time_tuple)
except ValueError, err:
self.__CastError('DATE', values, err)
def __CastTime(self, values):
"""Cast TIME values (hour/min/sec) from input (to datetime.datetime).
Casts TIME input values formulated as ISO string or time tuple inputs.
Args:
values: either a single string with ISO time representation or 1-4
integer valued time tuple (hour), (hour, minute),
(hour, minute, second), (hour, minute, second, microsec).
Returns:
datetime.datetime value parsed from the input values.
"""
if len(values) == 1:
value = self.__EncodeIfNeeded(values[0])
if isinstance(value, str):
try:
time_tuple = time.strptime(value, '%H:%M:%S')
except ValueError, err:
self.__CastError('TIME', values, err)
time_tuple = (1970, 1, 1) + time_tuple[3:]
time_tuple = time_tuple[0:6]
elif isinstance(value, int):
time_tuple = (1970, 1, 1, value)
else:
self.__CastError('TIME', values,
'Single input value not a string or integer hour')
elif len(values) <= 4:
time_tuple = (1970, 1, 1) + tuple(values)
else:
self.__CastError('TIME', values, err)
try:
return datetime.datetime(*time_tuple)
except ValueError, err:
self.__CastError('TIME', values, err)
def __CastDatetime(self, values):
"""Cast DATETIME values (string or tuple) from input (to datetime.datetime).
Casts DATETIME input values formulated as ISO string or datetime tuple
inputs.
Args:
values: either a single string with ISO representation or 3-7
integer valued time tuple (year, month, day, ...).
Returns:
datetime.datetime value parsed from the input values.
"""
if len(values) == 1:
value = self.__EncodeIfNeeded(values[0])
if isinstance(value, str):
try:
time_tuple = time.strptime(str(value), '%Y-%m-%d %H:%M:%S')[0:6]
except ValueError, err:
self.__CastError('DATETIME', values, err)
else:
self.__CastError('DATETIME', values, 'Single input value not a string')
else:
time_tuple = values
try:
return datetime.datetime(*time_tuple)
except ValueError, err:
self.__CastError('DATETIME', values, err)
def __Operate(self, args, keyword_args, used_args, operator, params):
"""Create a single output value from params using the operator string given.
Args:
args,keyword_args: arguments passed in for binding purposes (used in
binding positional and keyword based arguments).
used_args: set of numeric arguments accessed in this call.
values are ints representing used zero-based positional arguments.
used as an output parameter with new used arguments appended to the
list.
operator: string representing the operator to use 'nop' just returns
the first value from params.
params: parameter list to operate on (positional references, named
references, or literals).
Returns:
A value which can be used as part of a GQL filter description (either a
list of datastore types -- for use with IN, or a single datastore type --
for use with other filters).
"""
if not params:
return None
param_values = []
for param in params:
if isinstance(param, Literal):
value = param.Get()
else:
value = self.__GetParam(param, args, keyword_args)
if isinstance(param, int):
used_args.add(param - 1)
logging.log(LOG_LEVEL, 'found param for bind: %s value: %s',
param, value)
param_values.append(value)
logging.log(LOG_LEVEL, '%s Operating on values: %s',
operator, repr(param_values))
if operator in self.__cast_operators:
result = self.__cast_operators[operator](self, param_values)
else:
self.__Error('Operation %s is invalid' % operator)
return result
def __IsMultiQuery(self, condition):
"""Return whether or not this condition could require multiple queries."""
return condition.lower() in ('in', '!=')
def __GetParam(self, reference, args, keyword_args):
"""Get the specified parameter from the input arguments.
Args:
reference: id for a filter reference in the filter list (string or
number)
args: positional args passed in by the user (tuple of arguments, indexed
numerically by "reference")
keyword_args: dict of keyword based arguments (strings in "reference")
Returns:
The specified param from the input list.
Raises:
BadArgumentError if the referenced argument doesn't exist.
"""
num_args = len(args)
if isinstance(reference, int):
if reference <= num_args:
return args[reference - 1]
else:
raise datastore_errors.BadArgumentError(
'Missing argument for bind, requires argument #%i, '
'but only has %i args.' % (reference, num_args))
elif isinstance(reference, str):
if reference in keyword_args:
return keyword_args[reference]
else:
raise datastore_errors.BadArgumentError(
'Missing named arguments for bind, requires argument %s' %
reference)
else:
assert False, 'Unknown reference %s' % reference
def __AddMultiQuery(self, identifier, condition, value, enumerated_queries):
"""Helper function to add a muti-query to previously enumerated queries.
Args:
identifier: property being filtered by this condition
condition: filter condition (e.g. !=,in)
value: value being bound
enumerated_queries: in/out list of already bound queries -> expanded list
with the full enumeration required to satisfy the condition query
Raises:
BadArgumentError if the filter is invalid (namely non-list with IN)
"""
def CloneQueries(queries, n):
"""Do a full copy of the queries and append to the end of the queries.
Does an in-place replication of the input list and sorts the result to
put copies next to one-another.
Args:
queries: list of all filters to clone
n: number of copies to make
Returns:
Number of iterations needed to fill the structure
"""
if not enumerated_queries:
for i in xrange(n):
queries.append({})
return 1
else:
old_size = len(queries)
tmp_queries = []
for i in xrange(n - 1):
[tmp_queries.append(filter_map.copy()) for filter_map in queries]
queries.extend(tmp_queries)
queries.sort()
return old_size
if condition == '!=':
if len(enumerated_queries) * 2 > self.MAX_ALLOWABLE_QUERIES:
raise datastore_errors.BadArgumentError(
'Cannot satisfy query -- too many IN/!= values.')
num_iterations = CloneQueries(enumerated_queries, 2)
for i in xrange(num_iterations):
enumerated_queries[2 * i]['%s <' % identifier] = value
enumerated_queries[2 * i + 1]['%s >' % identifier] = value
elif condition.lower() == 'in':
if not isinstance(value, list):
raise datastore_errors.BadArgumentError('List expected for "IN" filter')
in_list_size = len(value)
if len(enumerated_queries) * in_list_size > self.MAX_ALLOWABLE_QUERIES:
raise datastore_errors.BadArgumentError(
'Cannot satisfy query -- too many IN/!= values.')
num_iterations = CloneQueries(enumerated_queries, in_list_size)
for clone_num in xrange(num_iterations):
for value_num in xrange(len(value)):
list_val = value[value_num]
query_num = in_list_size * clone_num + value_num
filt = '%s =' % identifier
enumerated_queries[query_num][filt] = list_val
def __AddFilterToQuery(self, identifier, condition, value, query):
"""Add a filter condition to a query based on the inputs.
Args:
identifier: name of the property (or self.__ANCESTOR for ancestors)
condition: test condition
value: test value passed from the caller
query: query to add the filter to
"""
if identifier != self.__ANCESTOR:
filter_condition = '%s %s' % (identifier, condition)
logging.log(LOG_LEVEL, 'Setting filter on "%s" with value "%s"',
filter_condition, value.__class__)
datastore._AddOrAppend(query, filter_condition, value)
else:
logging.log(LOG_LEVEL, 'Setting ancestor query for ancestor %s', value)
query.Ancestor(value)
def Run(self, *args, **keyword_args):
"""Runs this query.
Similar to datastore.Query.Run.
Assumes that limit == -1 or > 0
Args:
args: arguments used to bind to references in the compiled query object.
keyword_args: dictionary-based arguments (for named parameters).
Returns:
A list of results if a query count limit was passed.
A result iterator if no limit was given.
"""
bind_results = self.Bind(args, keyword_args)
offset = 0
if self.__offset != -1:
offset = self.__offset
if self.__limit == -1:
it = bind_results.Run()
try:
for i in xrange(offset):
it.next()
except StopIteration:
pass
return it
else:
res = bind_results.Get(self.__limit, offset)
return res
def filters(self):
"""Return the compiled list of filters."""
return self.__filters
def hint(self):
"""Return the datastore hint."""
return self.__hint
def limit(self):
"""Return numerical result count limit."""
return self.__limit
def orderings(self):
"""Return the result ordering list."""
return self.__orderings
__iter__ = Run
__quoted_string_regex = re.compile(r'((?:\'[^\'\n\r]*\')+)')
__ordinal_regex = re.compile(r':(\d+)$')
__named_regex = re.compile(r':(\w+)$')
__identifier_regex = re.compile(r'(\w+)$')
__conditions_regex = re.compile(r'(<=|>=|!=|=|<|>|is|in)$', re.IGNORECASE)
__number_regex = re.compile(r'(\d+)$')
__cast_regex = re.compile(
r'(geopt|user|key|date|time|datetime)$', re.IGNORECASE)
__cast_operators = {
'geopt': __CastGeoPt,
'user': __CastUser,
'key': __CastKey,
'datetime': __CastDatetime,
'date': __CastDate,
'time': __CastTime,
'list': __CastList,
'nop': __CastNop,
}
def __Error(self, error_message):
"""Generic query error.
Args:
error_message: string to emit as part of the 'Parse Error' string.
Raises:
BadQueryError and passes on an error message from the caller. Will raise
BadQueryError on all calls to __Error()
"""
if self.__next_symbol >= len(self.__symbols):
raise datastore_errors.BadQueryError(
'Parse Error: %s at end of string' % error_message)
else:
raise datastore_errors.BadQueryError(
'Parse Error: %s at symbol %s' %
(error_message, self.__symbols[self.__next_symbol]))
def __Accept(self, symbol_string):
"""Advance the symbol and return true iff the next symbol matches input."""
if self.__next_symbol < len(self.__symbols):
logging.log(LOG_LEVEL, '\t%s', self.__symbols)
logging.log(LOG_LEVEL, '\tExpect: %s Got: %s',
symbol_string, self.__symbols[self.__next_symbol].upper())
if self.__symbols[self.__next_symbol].upper() == symbol_string:
self.__next_symbol += 1
return True
return False
def __Expect(self, symbol_string):
"""Require that the next symbol matches symbol_string, or emit an error.
Args:
symbol_string: next symbol expected by the caller
Raises:
BadQueryError if the next symbol doesn't match the parameter passed in.
"""
if not self.__Accept(symbol_string):
self.__Error('Unexpected Symbol: %s' % symbol_string)
def __AcceptRegex(self, regex):
"""Advance and return the symbol if the next symbol matches the regex.
Args:
regex: the compiled regular expression to attempt acceptance on.
Returns:
The first group in the expression to allow for convenient access
to simple matches. Requires () around some objects in the regex.
None if no match is found.
"""
if self.__next_symbol < len(self.__symbols):
match_symbol = self.__symbols[self.__next_symbol]
logging.log(LOG_LEVEL, '\taccept %s on symbol %s', regex, match_symbol)
match = regex.match(match_symbol)
if match:
self.__next_symbol += 1
if match.groups():
matched_string = match.group(1)
logging.log(LOG_LEVEL, '\taccepted %s', matched_string)
return matched_string
return None
def __AcceptTerminal(self):
"""Only accept an empty string.
Returns:
True
Raises:
BadQueryError if there are unconsumed symbols in the query.
"""
if self.__next_symbol < len(self.__symbols):
self.__Error('Expected no additional symbols')
return True
def __Select(self):
"""Consume the SELECT clause and everything that follows it.
Assumes SELECT * to start.
Transitions to a FROM clause.
Returns:
True if parsing completed okay.
"""
self.__Expect('SELECT')
self.__Expect('*')
return self.__From()
def __From(self):
"""Consume the FROM clause.
Assumes a single well formed entity in the clause.
Assumes FROM <Entity Name>
Transitions to a WHERE clause.
Returns:
True if parsing completed okay.
"""
self.__Expect('FROM')
entity = self.__AcceptRegex(self.__identifier_regex)
if entity:
self._entity = entity
return self.__Where()
else:
self.__Error('Identifier Expected')
return False
def __Where(self):
"""Consume the WHERE cluase.
These can have some recursion because of the AND symbol.
Returns:
True if parsing the WHERE clause completed correctly, as well as all
subsequent clauses
"""
if self.__Accept('WHERE'):
return self.__FilterList()
return self.__OrderBy()
def __FilterList(self):
"""Consume the filter list (remainder of the WHERE clause)."""
identifier = self.__AcceptRegex(self.__identifier_regex)
if not identifier:
self.__Error('Invalid WHERE Identifier')
return False
condition = self.__AcceptRegex(self.__conditions_regex)
if not condition:
self.__Error('Invalid WHERE Condition')
return False
self.__CheckFilterSyntax(identifier, condition)
if not self.__AddSimpleFilter(identifier, condition, self.__Reference()):
if not self.__AddSimpleFilter(identifier, condition, self.__Literal()):
type_cast = self.__TypeCast()
if (not type_cast or
not self.__AddProcessedParameterFilter(identifier, condition,
*type_cast)):
self.__Error('Invalid WHERE condition')
if self.__Accept('AND'):
return self.__FilterList()
return self.__OrderBy()
def __GetValueList(self):
"""Read in a list of parameters from the tokens and return the list.
Reads in a set of tokens, but currently only accepts literals, positional
parameters, or named parameters. Or empty list if nothing was parsed.
Returns:
A list of values parsed from the input, with values taking the form of
strings (unbound, named reference), integers (unbound, positional
reference), or Literal() (bound value usable directly as part of a filter
with no additional information).
"""
params = []
while True:
reference = self.__Reference()
if reference:
params.append(reference)
else:
literal = self.__Literal()
if literal:
params.append(literal)
else:
self.__Error('Parameter list requires literal or reference parameter')
if not self.__Accept(','):
break
return params
def __CheckFilterSyntax(self, identifier, condition):
"""Check that filter conditions are valid and throw errors if not.
Args:
identifier: identifier being used in comparison
condition: string form of the comparison operator used in the filter
"""
if identifier.lower() == 'ancestor':
if condition.lower() == 'is':
if self.__has_ancestor:
self.__Error('Only one ANCESTOR IS" clause allowed')
else:
self.__Error('"IS" expected to follow "ANCESTOR"')
elif condition.lower() == 'is':
self.__Error('"IS" can only be used when comparing against "ANCESTOR"')
def __AddProcessedParameterFilter(self, identifier, condition,
operator, parameters):
"""Add a filter with post-processing required.
Args:
identifier: property being compared.
condition: comparison operation being used with the property (e.g. !=).
operator: operation to perform on the parameters before adding the filter.
parameters: list of bound parameters passed to 'operator' before creating
the filter. When using the parameters as a pass-through, pass 'nop'
into the operator field and the first value will be used unprocessed).
Returns:
True if the filter was okay to add.
"""
if parameters is None:
return False
if parameters[0] is None:
return False
logging.log(LOG_LEVEL, 'Adding Filter %s %s %s',
identifier, condition, repr(parameters))
filter_rule = (identifier, condition)
if identifier.lower() == 'ancestor':
self.__has_ancestor = True
filter_rule = (self.__ANCESTOR, 'is')
assert condition.lower() == 'is'
if condition.lower() != 'in' and operator == 'list':
sef.__Error('Only IN can process a list of values')
self.__filters.setdefault(filter_rule, []).append((operator, parameters))
return True
def __AddSimpleFilter(self, identifier, condition, parameter):
"""Add a filter to the query being built (no post-processing on parameter).
Args:
identifier: identifier being used in comparison
condition: string form of the comparison operator used in the filter
parameter: ID of the reference being made or a value of type Literal
Returns:
True if the filter could be added.
False otherwise.
"""
return self.__AddProcessedParameterFilter(identifier, condition,
'nop', [parameter])
def __Reference(self):
"""Consume a parameter reference and return it.
Consumes a reference to a positional parameter (:1) or a named parameter
(:email). Only consumes a single reference (not lists).
Returns:
The name of the reference (integer for positional parameters or string
for named parameters) to a bind-time parameter.
"""
logging.log(LOG_LEVEL, 'Try Reference')
reference = self.__AcceptRegex(self.__ordinal_regex)
if reference:
return int(reference)
else:
reference = self.__AcceptRegex(self.__named_regex)
if reference:
return reference
return None
def __Literal(self):
"""Parse literals from our token list.
Returns:
The parsed literal from the input string (currently either a string,
integer, or floating point value).
"""
logging.log(LOG_LEVEL, 'Try Literal')
literal = None
try:
literal = int(self.__symbols[self.__next_symbol])
except ValueError:
pass
else:
self.__next_symbol += 1
if literal is None:
try:
literal = float(self.__symbols[self.__next_symbol])
except ValueError:
pass
else:
self.__next_symbol += 1
if literal is None:
literal = self.__AcceptRegex(self.__quoted_string_regex)
if literal:
literal = literal[1:-1].replace("''", "'")
if literal is None:
if self.__Accept('TRUE'):
literal = True
elif self.__Accept('FALSE'):
literal = False
if literal is not None:
return Literal(literal)
else:
return None
def __TypeCast(self):
"""Check if the next operation is a type-cast and return the cast if so.
Casting operators look like simple function calls on their parameters. This
code returns the cast operator found and the list of parameters provided by
the user to complete the cast operation.
Returns:
A tuple (cast operator, params) which represents the cast operation
requested and the parameters parsed from the cast clause.
None - if there is no TypeCast function.
"""
logging.log(LOG_LEVEL, 'Try Type Cast')
cast_op = self.__AcceptRegex(self.__cast_regex)
if not cast_op:
if self.__Accept('('):
cast_op = 'list'
else:
return None
else:
cast_op = cast_op.lower()
self.__Expect('(')
params = self.__GetValueList()
self.__Expect(')')
logging.log(LOG_LEVEL, 'Got casting operator %s with params %s',
cast_op, repr(params))
return (cast_op, params)
def __OrderBy(self):
"""Consume the ORDER BY clause."""
if self.__Accept('ORDER'):
self.__Expect('BY')
return self.__OrderList()
return self.__Limit()
def __OrderList(self):
"""Consume variables and sort order for ORDER BY clause."""
identifier = self.__AcceptRegex(self.__identifier_regex)
if identifier:
if self.__Accept('DESC'):
self.__orderings.append((identifier, datastore.Query.DESCENDING))
elif self.__Accept('ASC'):
self.__orderings.append((identifier, datastore.Query.ASCENDING))
else:
self.__orderings.append((identifier, datastore.Query.ASCENDING))
else:
self.__Error('Invalid ORDER BY Property')
logging.log(LOG_LEVEL, self.__orderings)
if self.__Accept(','):
return self.__OrderList()
return self.__Limit()
def __Limit(self):
"""Consume the LIMIT clause."""
if self.__Accept('LIMIT'):
maybe_limit = self.__AcceptRegex(self.__number_regex)
if maybe_limit:
if self.__Accept(','):
self.__offset = int(maybe_limit)
if self.__offset < 0:
self.__Error('Bad offset in LIMIT Value')
else:
logging.log(LOG_LEVEL, 'Set offset to %i', self.__offset)
maybe_limit = self.__AcceptRegex(self.__number_regex)
self.__limit = int(maybe_limit)
if self.__limit < 1:
self.__Error('Bad Limit in LIMIT Value')
else:
logging.log(LOG_LEVEL, 'Set limit to %i', self.__limit)
else:
self.__Error('Non-number limit in LIMIT clause')
return self.__Offset()
def __Offset(self):
"""Consume the OFFSET clause."""
if self.__Accept('OFFSET'):
if self.__offset != -1:
self.__Error('Offset already defined in LIMIT clause')
offset = self.__AcceptRegex(self.__number_regex)
if offset:
self.__offset = int(offset)
if self.__offset < 0:
self.__Error('Bad offset in OFFSET clause')
else:
logging.log(LOG_LEVEL, 'Set offset to %i', self.__offset)
else:
self.__Error('Non-number offset in OFFSET clause')
return self.__Hint()
def __Hint(self):
"""Consume the HINT clause.
Requires one of three options (mirroring the rest of the datastore):
HINT ORDER_FIRST
HINT ANCESTOR_FIRST
HINT FILTER_FIRST
Returns:
True if the hint clause and later clauses all parsed okay
"""
if self.__Accept('HINT'):
if self.__Accept('ORDER_FIRST'):
self.__hint = 'ORDER_FIRST'
elif self.__Accept('FILTER_FIRST'):
self.__hint = 'FILTER_FIRST'
elif self.__Accept('ANCESTOR_FIRST'):
self.__hint = 'ANCESTOR_FIRST'
else:
self.__Error('Unknown HINT')
return False
return self.__AcceptTerminal()
class Literal(object):
"""Class for representing literal values in a way unique from unbound params.
This is a simple wrapper class around basic types and datastore types.
"""
def __init__(self, value):
self.__value = value
def Get(self):
"""Return the value of the literal."""
return self.__value
def __repr__(self):
return 'Literal(%s)' % repr(self.__value)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A handler that exports various App Engine services over HTTP.
You can export this handler in your app by adding it directly to app.yaml's
list of handlers:
handlers:
- url: /remote_api
script: $PYTHON_LIB/google/appengine/ext/remote_api/handler.py
login: admin
Then, you can use remote_api_stub to remotely access services exported by this
handler. See the documentation in remote_api_stub.py for details on how to do
this.
Using this handler without specifying "login: admin" would be extremely unwise.
So unwise that the default handler insists on checking for itself.
"""
import google
import pickle
import sha
import wsgiref.handlers
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
from google.appengine.ext import webapp
from google.appengine.ext.remote_api import remote_api_pb
from google.appengine.runtime import apiproxy_errors
class RemoteDatastoreStub(apiproxy_stub.APIProxyStub):
"""Provides a stub that permits execution of stateful datastore queries.
Some operations aren't possible using the standard interface. Notably,
datastore RunQuery operations internally store a cursor that is referenced in
later Next calls, and cleaned up at the end of each request. Because every
call to ApiCallHandler takes place in its own request, this isn't possible.
To work around this, RemoteDatastoreStub provides its own implementation of
RunQuery that immediately returns the query results.
"""
def _Dynamic_RunQuery(self, request, response):
"""Handle a RunQuery request.
We handle RunQuery by executing a Query and a Next and returning the result
of the Next request.
"""
runquery_response = datastore_pb.QueryResult()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery',
request, runquery_response)
next_request = datastore_pb.NextRequest()
next_request.mutable_cursor().CopyFrom(runquery_response.cursor())
next_request.set_count(request.limit())
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next',
next_request, response)
def _Dynamic_Transaction(self, request, response):
"""Handle a Transaction request.
We handle transactions by accumulating Put requests on the client end, as
well as recording the key and hash of Get requests. When Commit is called,
Transaction is invoked, which verifies that all the entities in the
precondition list still exist and their hashes match, then performs a
transaction of its own to make the updates.
"""
tx = datastore_pb.Transaction()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
api_base_pb.VoidProto(), tx)
preconditions = request.precondition_list()
if preconditions:
get_request = datastore_pb.GetRequest()
get_request.mutable_transaction().CopyFrom(tx)
for precondition in preconditions:
key = get_request.add_key()
key.CopyFrom(precondition.key())
get_response = datastore_pb.GetResponse()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', get_request,
get_response)
entities = get_response.entity_list()
assert len(entities) == request.precondition_size()
for precondition, entity in zip(preconditions, entities):
if precondition.has_hash() != entity.has_entity():
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.CONCURRENT_TRANSACTION,
"Transaction precondition failed.")
elif entity.has_entity():
entity_hash = sha.new(entity.entity().Encode()).digest()
if precondition.hash() != entity_hash:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.CONCURRENT_TRANSACTION,
"Transaction precondition failed.")
if request.has_puts():
put_request = request.puts()
put_request.mutable_transaction().CopyFrom(tx)
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put',
put_request, response)
if request.has_deletes():
delete_request = request.deletes()
delete_request.mutable_transaction().CopyFrom(tx)
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete',
delete_request, api_base_pb.VoidProto())
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit', tx,
api_base_pb.VoidProto())
def _Dynamic_GetIDs(self, request, response):
"""Fetch unique IDs for a set of paths."""
for entity in request.entity_list():
assert entity.property_size() == 0
assert entity.raw_property_size() == 0
assert entity.entity_group().element_size() == 0
lastpart = entity.key().path().element_list()[-1]
assert lastpart.id() == 0 and not lastpart.has_name()
tx = datastore_pb.Transaction()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
api_base_pb.VoidProto(), tx)
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', request, response)
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback', tx,
api_base_pb.VoidProto())
SERVICE_PB_MAP = {
'datastore_v3': {
'Get': (datastore_pb.GetRequest, datastore_pb.GetResponse),
'Put': (datastore_pb.PutRequest, datastore_pb.PutResponse),
'Delete': (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
'Count': (datastore_pb.Query, api_base_pb.Integer64Proto),
'GetIndices': (api_base_pb.StringProto, datastore_pb.CompositeIndices),
},
'remote_datastore': {
'RunQuery': (datastore_pb.Query, datastore_pb.QueryResult),
'Transaction': (remote_api_pb.TransactionRequest,
datastore_pb.PutResponse),
'GetIDs': (remote_api_pb.PutRequest, datastore_pb.PutResponse),
},
}
class ApiCallHandler(webapp.RequestHandler):
"""A webapp handler that accepts API calls over HTTP and executes them."""
LOCAL_STUBS = {
'remote_datastore': RemoteDatastoreStub('remote_datastore'),
}
def CheckIsAdmin(self):
if not users.is_current_user_admin():
self.response.set_status(401)
self.response.out.write(
"You must be logged in as an administrator to access this.")
self.response.headers['Content-Type'] = 'text/plain'
return False
elif 'X-appcfg-api-version' not in self.request.headers:
self.response.set_status(403)
self.response.out.write("This request did not contain a necessary header")
return False
return True
def get(self):
"""Handle a GET. Just show an info page."""
if not self.CheckIsAdmin():
return
page = self.InfoPage()
self.response.out.write(page)
def post(self):
"""Handle POST requests by executing the API call."""
if not self.CheckIsAdmin():
return
self.response.headers['Content-Type'] = 'application/octet-stream'
response = remote_api_pb.Response()
try:
request = remote_api_pb.Request()
request.ParseFromString(self.request.body)
response_data = self.ExecuteRequest(request)
response.mutable_response().set_contents(response_data.Encode())
self.response.set_status(200)
except Exception, e:
self.response.set_status(200)
response.mutable_exception().set_contents(pickle.dumps(e))
self.response.out.write(response.Encode())
def ExecuteRequest(self, request):
"""Executes an API invocation and returns the response object."""
service = request.service_name()
method = request.method()
service_methods = SERVICE_PB_MAP.get(service, {})
request_class, response_class = service_methods.get(method, (None, None))
if not request_class:
raise apiproxy_errors.CallNotFoundError()
request_data = request_class()
request_data.ParseFromString(request.request().contents())
response_data = response_class()
if service in self.LOCAL_STUBS:
self.LOCAL_STUBS[service].MakeSyncCall(service, method, request_data,
response_data)
else:
apiproxy_stub_map.MakeSyncCall(service, method, request_data,
response_data)
return response_data
def InfoPage(self):
"""Renders an information page."""
return """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html><head>
<title>App Engine API endpoint.</title>
</head><body>
<h1>App Engine API endpoint.</h1>
<p>This is an endpoint for the App Engine remote API interface.
Point your stubs (google.appengine.ext.remote_api.remote_api_stub) here.</p>
</body>
</html>"""
def main():
application = webapp.WSGIApplication([('.*', ApiCallHandler)])
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.net.proto.RawMessage import RawMessage
from google.appengine.datastore.datastore_pb import PutRequest
from google.appengine.datastore.datastore_pb import DeleteRequest
from google.appengine.datastore.entity_pb import Reference
class Request(ProtocolBuffer.ProtocolMessage):
has_service_name_ = 0
service_name_ = ""
has_method_ = 0
method_ = ""
has_request_ = 0
def __init__(self, contents=None):
self.request_ = RawMessage()
if contents is not None: self.MergeFromString(contents)
def service_name(self): return self.service_name_
def set_service_name(self, x):
self.has_service_name_ = 1
self.service_name_ = x
def clear_service_name(self):
if self.has_service_name_:
self.has_service_name_ = 0
self.service_name_ = ""
def has_service_name(self): return self.has_service_name_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = ""
def has_method(self): return self.has_method_
def request(self): return self.request_
def mutable_request(self): self.has_request_ = 1; return self.request_
def clear_request(self):self.has_request_ = 0; self.request_.Clear()
def has_request(self): return self.has_request_
def MergeFrom(self, x):
assert x is not self
if (x.has_service_name()): self.set_service_name(x.service_name())
if (x.has_method()): self.set_method(x.method())
if (x.has_request()): self.mutable_request().MergeFrom(x.request())
def Equals(self, x):
if x is self: return 1
if self.has_service_name_ != x.has_service_name_: return 0
if self.has_service_name_ and self.service_name_ != x.service_name_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_request_ != x.has_request_: return 0
if self.has_request_ and self.request_ != x.request_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_service_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: service_name not set.')
if (not self.has_method_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: method not set.')
if (not self.has_request_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: request not set.')
elif not self.request_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.service_name_))
n += self.lengthString(len(self.method_))
n += self.lengthString(self.request_.ByteSize())
return n + 3
def Clear(self):
self.clear_service_name()
self.clear_method()
self.clear_request()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.service_name_)
out.putVarInt32(26)
out.putPrefixedString(self.method_)
out.putVarInt32(34)
out.putVarInt32(self.request_.ByteSize())
self.request_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
self.set_service_name(d.getPrefixedString())
continue
if tt == 26:
self.set_method(d.getPrefixedString())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_request().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_service_name_: res+=prefix+("service_name: %s\n" % self.DebugFormatString(self.service_name_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatString(self.method_))
if self.has_request_:
res+=prefix+"request <\n"
res+=self.request_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kservice_name = 2
kmethod = 3
krequest = 4
_TEXT = (
"ErrorCode",
None,
"service_name",
"method",
"request",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Response(ProtocolBuffer.ProtocolMessage):
has_response_ = 0
response_ = None
has_exception_ = 0
exception_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def response(self):
if self.response_ is None:
self.lazy_init_lock_.acquire()
try:
if self.response_ is None: self.response_ = RawMessage()
finally:
self.lazy_init_lock_.release()
return self.response_
def mutable_response(self): self.has_response_ = 1; return self.response()
def clear_response(self):
if self.has_response_:
self.has_response_ = 0;
if self.response_ is not None: self.response_.Clear()
def has_response(self): return self.has_response_
def exception(self):
if self.exception_ is None:
self.lazy_init_lock_.acquire()
try:
if self.exception_ is None: self.exception_ = RawMessage()
finally:
self.lazy_init_lock_.release()
return self.exception_
def mutable_exception(self): self.has_exception_ = 1; return self.exception()
def clear_exception(self):
if self.has_exception_:
self.has_exception_ = 0;
if self.exception_ is not None: self.exception_.Clear()
def has_exception(self): return self.has_exception_
def MergeFrom(self, x):
assert x is not self
if (x.has_response()): self.mutable_response().MergeFrom(x.response())
if (x.has_exception()): self.mutable_exception().MergeFrom(x.exception())
def Equals(self, x):
if x is self: return 1
if self.has_response_ != x.has_response_: return 0
if self.has_response_ and self.response_ != x.response_: return 0
if self.has_exception_ != x.has_exception_: return 0
if self.has_exception_ and self.exception_ != x.exception_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_response_ and not self.response_.IsInitialized(debug_strs)): initialized = 0
if (self.has_exception_ and not self.exception_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_response_): n += 1 + self.lengthString(self.response_.ByteSize())
if (self.has_exception_): n += 1 + self.lengthString(self.exception_.ByteSize())
return n + 0
def Clear(self):
self.clear_response()
self.clear_exception()
def OutputUnchecked(self, out):
if (self.has_response_):
out.putVarInt32(10)
out.putVarInt32(self.response_.ByteSize())
self.response_.OutputUnchecked(out)
if (self.has_exception_):
out.putVarInt32(18)
out.putVarInt32(self.exception_.ByteSize())
self.exception_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_response().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_exception().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_response_:
res+=prefix+"response <\n"
res+=self.response_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_exception_:
res+=prefix+"exception <\n"
res+=self.exception_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kresponse = 1
kexception = 2
_TEXT = (
"ErrorCode",
"response",
"exception",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class TransactionRequest_Precondition(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
has_hash_ = 0
hash_ = ""
def __init__(self, contents=None):
self.key_ = Reference()
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key_
def clear_key(self):self.has_key_ = 0; self.key_.Clear()
def has_key(self): return self.has_key_
def hash(self): return self.hash_
def set_hash(self, x):
self.has_hash_ = 1
self.hash_ = x
def clear_hash(self):
if self.has_hash_:
self.has_hash_ = 0
self.hash_ = ""
def has_hash(self): return self.has_hash_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_hash()): self.set_hash(x.hash())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_hash_ != x.has_hash_: return 0
if self.has_hash_ and self.hash_ != x.hash_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
elif not self.key_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.key_.ByteSize())
if (self.has_hash_): n += 1 + self.lengthString(len(self.hash_))
return n + 1
def Clear(self):
self.clear_key()
self.clear_hash()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
if (self.has_hash_):
out.putVarInt32(26)
out.putPrefixedString(self.hash_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 26:
self.set_hash(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_hash_: res+=prefix+("hash: %s\n" % self.DebugFormatString(self.hash_))
return res
class TransactionRequest(ProtocolBuffer.ProtocolMessage):
has_puts_ = 0
puts_ = None
has_deletes_ = 0
deletes_ = None
def __init__(self, contents=None):
self.precondition_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def precondition_size(self): return len(self.precondition_)
def precondition_list(self): return self.precondition_
def precondition(self, i):
return self.precondition_[i]
def mutable_precondition(self, i):
return self.precondition_[i]
def add_precondition(self):
x = TransactionRequest_Precondition()
self.precondition_.append(x)
return x
def clear_precondition(self):
self.precondition_ = []
def puts(self):
if self.puts_ is None:
self.lazy_init_lock_.acquire()
try:
if self.puts_ is None: self.puts_ = PutRequest()
finally:
self.lazy_init_lock_.release()
return self.puts_
def mutable_puts(self): self.has_puts_ = 1; return self.puts()
def clear_puts(self):
if self.has_puts_:
self.has_puts_ = 0;
if self.puts_ is not None: self.puts_.Clear()
def has_puts(self): return self.has_puts_
def deletes(self):
if self.deletes_ is None:
self.lazy_init_lock_.acquire()
try:
if self.deletes_ is None: self.deletes_ = DeleteRequest()
finally:
self.lazy_init_lock_.release()
return self.deletes_
def mutable_deletes(self): self.has_deletes_ = 1; return self.deletes()
def clear_deletes(self):
if self.has_deletes_:
self.has_deletes_ = 0;
if self.deletes_ is not None: self.deletes_.Clear()
def has_deletes(self): return self.has_deletes_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.precondition_size()): self.add_precondition().CopyFrom(x.precondition(i))
if (x.has_puts()): self.mutable_puts().MergeFrom(x.puts())
if (x.has_deletes()): self.mutable_deletes().MergeFrom(x.deletes())
def Equals(self, x):
if x is self: return 1
if len(self.precondition_) != len(x.precondition_): return 0
for e1, e2 in zip(self.precondition_, x.precondition_):
if e1 != e2: return 0
if self.has_puts_ != x.has_puts_: return 0
if self.has_puts_ and self.puts_ != x.puts_: return 0
if self.has_deletes_ != x.has_deletes_: return 0
if self.has_deletes_ and self.deletes_ != x.deletes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.precondition_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_puts_ and not self.puts_.IsInitialized(debug_strs)): initialized = 0
if (self.has_deletes_ and not self.deletes_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.precondition_)
for i in xrange(len(self.precondition_)): n += self.precondition_[i].ByteSize()
if (self.has_puts_): n += 1 + self.lengthString(self.puts_.ByteSize())
if (self.has_deletes_): n += 1 + self.lengthString(self.deletes_.ByteSize())
return n + 0
def Clear(self):
self.clear_precondition()
self.clear_puts()
self.clear_deletes()
def OutputUnchecked(self, out):
for i in xrange(len(self.precondition_)):
out.putVarInt32(11)
self.precondition_[i].OutputUnchecked(out)
out.putVarInt32(12)
if (self.has_puts_):
out.putVarInt32(34)
out.putVarInt32(self.puts_.ByteSize())
self.puts_.OutputUnchecked(out)
if (self.has_deletes_):
out.putVarInt32(42)
out.putVarInt32(self.deletes_.ByteSize())
self.deletes_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_precondition().TryMerge(d)
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_puts().TryMerge(tmp)
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_deletes().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.precondition_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Precondition%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_puts_:
res+=prefix+"puts <\n"
res+=self.puts_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_deletes_:
res+=prefix+"deletes <\n"
res+=self.deletes_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kPreconditionGroup = 1
kPreconditionkey = 2
kPreconditionhash = 3
kputs = 4
kdeletes = 5
_TEXT = (
"ErrorCode",
"Precondition",
"key",
"hash",
"puts",
"deletes",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['Request','Response','TransactionRequest','TransactionRequest_Precondition']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A handler that exports various App Engine services over HTTP.
You can export this handler in your app by adding it directly to app.yaml's
list of handlers:
handlers:
- url: /remote_api
script: $PYTHON_LIB/google/appengine/ext/remote_api/handler.py
login: admin
Then, you can use remote_api_stub to remotely access services exported by this
handler. See the documentation in remote_api_stub.py for details on how to do
this.
Using this handler without specifying "login: admin" would be extremely unwise.
So unwise that the default handler insists on checking for itself.
"""
import google
import pickle
import sha
import wsgiref.handlers
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
from google.appengine.ext import webapp
from google.appengine.ext.remote_api import remote_api_pb
from google.appengine.runtime import apiproxy_errors
class RemoteDatastoreStub(apiproxy_stub.APIProxyStub):
"""Provides a stub that permits execution of stateful datastore queries.
Some operations aren't possible using the standard interface. Notably,
datastore RunQuery operations internally store a cursor that is referenced in
later Next calls, and cleaned up at the end of each request. Because every
call to ApiCallHandler takes place in its own request, this isn't possible.
To work around this, RemoteDatastoreStub provides its own implementation of
RunQuery that immediately returns the query results.
"""
def _Dynamic_RunQuery(self, request, response):
"""Handle a RunQuery request.
We handle RunQuery by executing a Query and a Next and returning the result
of the Next request.
"""
runquery_response = datastore_pb.QueryResult()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery',
request, runquery_response)
next_request = datastore_pb.NextRequest()
next_request.mutable_cursor().CopyFrom(runquery_response.cursor())
next_request.set_count(request.limit())
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next',
next_request, response)
def _Dynamic_Transaction(self, request, response):
"""Handle a Transaction request.
We handle transactions by accumulating Put requests on the client end, as
well as recording the key and hash of Get requests. When Commit is called,
Transaction is invoked, which verifies that all the entities in the
precondition list still exist and their hashes match, then performs a
transaction of its own to make the updates.
"""
tx = datastore_pb.Transaction()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
api_base_pb.VoidProto(), tx)
preconditions = request.precondition_list()
if preconditions:
get_request = datastore_pb.GetRequest()
get_request.mutable_transaction().CopyFrom(tx)
for precondition in preconditions:
key = get_request.add_key()
key.CopyFrom(precondition.key())
get_response = datastore_pb.GetResponse()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', get_request,
get_response)
entities = get_response.entity_list()
assert len(entities) == request.precondition_size()
for precondition, entity in zip(preconditions, entities):
if precondition.has_hash() != entity.has_entity():
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.CONCURRENT_TRANSACTION,
"Transaction precondition failed.")
elif entity.has_entity():
entity_hash = sha.new(entity.entity().Encode()).digest()
if precondition.hash() != entity_hash:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.CONCURRENT_TRANSACTION,
"Transaction precondition failed.")
if request.has_puts():
put_request = request.puts()
put_request.mutable_transaction().CopyFrom(tx)
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put',
put_request, response)
if request.has_deletes():
delete_request = request.deletes()
delete_request.mutable_transaction().CopyFrom(tx)
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete',
delete_request, api_base_pb.VoidProto())
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit', tx,
api_base_pb.VoidProto())
def _Dynamic_GetIDs(self, request, response):
"""Fetch unique IDs for a set of paths."""
for entity in request.entity_list():
assert entity.property_size() == 0
assert entity.raw_property_size() == 0
assert entity.entity_group().element_size() == 0
lastpart = entity.key().path().element_list()[-1]
assert lastpart.id() == 0 and not lastpart.has_name()
tx = datastore_pb.Transaction()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction',
api_base_pb.VoidProto(), tx)
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', request, response)
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback', tx,
api_base_pb.VoidProto())
SERVICE_PB_MAP = {
'datastore_v3': {
'Get': (datastore_pb.GetRequest, datastore_pb.GetResponse),
'Put': (datastore_pb.PutRequest, datastore_pb.PutResponse),
'Delete': (datastore_pb.DeleteRequest, datastore_pb.DeleteResponse),
'Count': (datastore_pb.Query, api_base_pb.Integer64Proto),
'GetIndices': (api_base_pb.StringProto, datastore_pb.CompositeIndices),
},
'remote_datastore': {
'RunQuery': (datastore_pb.Query, datastore_pb.QueryResult),
'Transaction': (remote_api_pb.TransactionRequest,
datastore_pb.PutResponse),
'GetIDs': (remote_api_pb.PutRequest, datastore_pb.PutResponse),
},
}
class ApiCallHandler(webapp.RequestHandler):
"""A webapp handler that accepts API calls over HTTP and executes them."""
LOCAL_STUBS = {
'remote_datastore': RemoteDatastoreStub('remote_datastore'),
}
def CheckIsAdmin(self):
if not users.is_current_user_admin():
self.response.set_status(401)
self.response.out.write(
"You must be logged in as an administrator to access this.")
self.response.headers['Content-Type'] = 'text/plain'
return False
elif 'X-appcfg-api-version' not in self.request.headers:
self.response.set_status(403)
self.response.out.write("This request did not contain a necessary header")
return False
return True
def get(self):
"""Handle a GET. Just show an info page."""
if not self.CheckIsAdmin():
return
page = self.InfoPage()
self.response.out.write(page)
def post(self):
"""Handle POST requests by executing the API call."""
if not self.CheckIsAdmin():
return
self.response.headers['Content-Type'] = 'application/octet-stream'
response = remote_api_pb.Response()
try:
request = remote_api_pb.Request()
request.ParseFromString(self.request.body)
response_data = self.ExecuteRequest(request)
response.mutable_response().set_contents(response_data.Encode())
self.response.set_status(200)
except Exception, e:
self.response.set_status(200)
response.mutable_exception().set_contents(pickle.dumps(e))
self.response.out.write(response.Encode())
def ExecuteRequest(self, request):
"""Executes an API invocation and returns the response object."""
service = request.service_name()
method = request.method()
service_methods = SERVICE_PB_MAP.get(service, {})
request_class, response_class = service_methods.get(method, (None, None))
if not request_class:
raise apiproxy_errors.CallNotFoundError()
request_data = request_class()
request_data.ParseFromString(request.request().contents())
response_data = response_class()
if service in self.LOCAL_STUBS:
self.LOCAL_STUBS[service].MakeSyncCall(service, method, request_data,
response_data)
else:
apiproxy_stub_map.MakeSyncCall(service, method, request_data,
response_data)
return response_data
def InfoPage(self):
"""Renders an information page."""
return """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html><head>
<title>App Engine API endpoint.</title>
</head><body>
<h1>App Engine API endpoint.</h1>
<p>This is an endpoint for the App Engine remote API interface.
Point your stubs (google.appengine.ext.remote_api.remote_api_stub) here.</p>
</body>
</html>"""
def main():
application = webapp.WSGIApplication([('.*', ApiCallHandler)])
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An apiproxy stub that calls a remote handler via HTTP.
This allows easy remote access to the App Engine datastore, and potentially any
of the other App Engine APIs, using the same interface you use when accessing
the service locally.
An example Python script:
---
from google.appengine.ext import db
from google.appengine.ext.remote_api import remote_api_stub
from myapp import models
import getpass
def auth_func():
return (raw_input('Username:'), getpass.getpass('Password:'))
remote_api_stub.ConfigureRemoteDatastore('my-app', '/remote_api', auth_func)
# Now you can access the remote datastore just as if your code was running on
# App Engine!
houses = models.House.all().fetch(100)
for a_house in q:
a_house.doors += 1
db.put(houses)
---
A few caveats:
- Where possible, avoid iterating over queries directly. Fetching as many
results as you will need is faster and more efficient.
- If you need to iterate, consider instead fetching items in batches with a sort
order and constructing a new query starting from where the previous one left
off. The __key__ pseudo-property can be used as a sort key for this purpose,
and does not even require a custom index if you are iterating over all
entities of a given type.
- Likewise, it's a good idea to put entities in batches. Instead of calling put
for each individual entity, accumulate them and put them in batches using
db.put(), if you can.
- Requests and responses are still limited to 1MB each, so if you have large
entities or try and fetch or put many of them at once, your requests may fail.
"""
import os
import pickle
import sha
import sys
import thread
import threading
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_pb
from google.appengine.ext.remote_api import remote_api_pb
from google.appengine.runtime import apiproxy_errors
from google.appengine.tools import appengine_rpc
def GetUserAgent():
"""Determines the value of the 'User-agent' header to use for HTTP requests.
Returns:
String containing the 'user-agent' header value, which includes the SDK
version, the platform information, and the version of Python;
e.g., "remote_api/1.0.1 Darwin/9.2.0 Python/2.5.2".
"""
product_tokens = []
product_tokens.append("Google-remote_api/1.0")
product_tokens.append(appengine_rpc.GetPlatformToken())
python_version = ".".join(str(i) for i in sys.version_info)
product_tokens.append("Python/%s" % python_version)
return " ".join(product_tokens)
def GetSourceName():
return "Google-remote_api-1.0"
class TransactionData(object):
"""Encapsulates data about an individual transaction."""
def __init__(self, thread_id):
self.thread_id = thread_id
self.preconditions = {}
self.entities = {}
class RemoteStub(object):
"""A stub for calling services on a remote server over HTTP.
You can use this to stub out any service that the remote server supports.
"""
def __init__(self, server, path):
"""Constructs a new RemoteStub that communicates with the specified server.
Args:
server: An instance of a subclass of
google.appengine.tools.appengine_rpc.AbstractRpcServer.
path: The path to the handler this stub should send requests to.
"""
self._server = server
self._path = path
def MakeSyncCall(self, service, call, request, response):
request_pb = remote_api_pb.Request()
request_pb.set_service_name(service)
request_pb.set_method(call)
request_pb.mutable_request().set_contents(request.Encode())
response_pb = remote_api_pb.Response()
response_pb.ParseFromString(self._server.Send(self._path,
request_pb.Encode()))
if response_pb.has_exception():
raise pickle.loads(response_pb.exception().contents())
else:
response.ParseFromString(response_pb.response().contents())
class RemoteDatastoreStub(RemoteStub):
"""A specialised stub for accessing the App Engine datastore remotely.
A specialised stub is required because there are some datastore operations
that preserve state between calls. This stub makes queries possible.
Transactions on the remote datastore are unfortunately still impossible.
"""
def __init__(self, server, path):
super(RemoteDatastoreStub, self).__init__(server, path)
self.__queries = {}
self.__transactions = {}
self.__next_local_cursor = 1
self.__local_cursor_lock = threading.Lock()
self.__next_local_tx = 1
self.__local_tx_lock = threading.Lock()
def MakeSyncCall(self, service, call, request, response):
assert service == 'datastore_v3'
explanation = []
assert request.IsInitialized(explanation), explanation
handler = getattr(self, '_Dynamic_' + call, None)
if handler:
handler(request, response)
else:
super(RemoteDatastoreStub, self).MakeSyncCall(service, call, request,
response)
assert response.IsInitialized(explanation), explanation
def _Dynamic_RunQuery(self, query, query_result):
self.__local_cursor_lock.acquire()
try:
cursor_id = self.__next_local_cursor
self.__next_local_cursor += 1
finally:
self.__local_cursor_lock.release()
self.__queries[cursor_id] = query
query_result.mutable_cursor().set_cursor(cursor_id)
query_result.set_more_results(True)
def _Dynamic_Next(self, next_request, query_result):
cursor = next_request.cursor().cursor()
if cursor not in self.__queries:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'Cursor %d not found' % cursor)
query = self.__queries[cursor]
if query is None:
query_result.set_more_results(False)
return
request = datastore_pb.Query()
request.CopyFrom(query)
if request.has_limit():
request.set_limit(min(request.limit(), next_request.count()))
else:
request.set_limit(next_request.count())
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'RunQuery', request, query_result)
query.set_offset(query.offset() + query_result.result_size())
if query.has_limit():
query.set_limit(query.limit() - query_result.result_size())
if not query_result.more_results():
self.__queries[cursor] = None
def _Dynamic_Get(self, get_request, get_response):
txid = None
if get_request.has_transaction():
txid = get_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id == thread.get_ident(),
"Transactions are single-threaded.")
keys = [(k, k.Encode()) for k in get_request.key_list()]
new_request = datastore_pb.GetRequest()
for key, enckey in keys:
if enckey not in txdata.entities:
new_request.add_key().CopyFrom(key)
else:
new_request = get_request
if new_request.key_size() > 0:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Get', new_request, get_response)
if txid is not None:
newkeys = new_request.key_list()
entities = get_response.entity_list()
for key, entity in zip(newkeys, entities):
entity_hash = None
if entity.has_entity():
entity_hash = sha.new(entity.entity().Encode()).digest()
txdata.preconditions[key.Encode()] = (key, entity_hash)
new_response = datastore_pb.GetResponse()
it = iter(get_response.entity_list())
for key, enckey in keys:
if enckey in txdata.entities:
cached_entity = txdata.entities[enckey][1]
if cached_entity:
new_response.add_entity().mutable_entity().CopyFrom(cached_entity)
else:
new_response.add_entity()
else:
new_entity = it.next()
if new_entity.has_entity():
assert new_entity.entity().key() == key
new_response.add_entity().CopyFrom(new_entity)
else:
new_response.add_entity()
get_response.CopyFrom(new_response)
def _Dynamic_Put(self, put_request, put_response):
if put_request.has_transaction():
entities = put_request.entity_list()
requires_id = lambda x: x.id() == 0 and not x.has_name()
new_ents = [e for e in entities
if requires_id(e.key().path().element_list()[-1])]
id_request = remote_api_pb.PutRequest()
if new_ents:
for ent in new_ents:
e = id_request.add_entity()
e.mutable_key().CopyFrom(ent.key())
e.mutable_entity_group()
id_response = datastore_pb.PutResponse()
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'GetIDs', id_request, id_response)
assert id_request.entity_size() == id_response.key_size()
for key, ent in zip(id_response.key_list(), new_ents):
ent.mutable_key().CopyFrom(key)
ent.mutable_entity_group().add_element().CopyFrom(
key.path().element(0))
txid = put_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id == thread.get_ident(),
"Transactions are single-threaded.")
for entity in entities:
txdata.entities[entity.key().Encode()] = (entity.key(), entity)
put_response.add_key().CopyFrom(entity.key())
else:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Put', put_request, put_response)
def _Dynamic_Delete(self, delete_request, response):
if delete_request.has_transaction():
txid = delete_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id == thread.get_ident(),
"Transactions are single-threaded.")
for key in delete_request.key_list():
txdata.entities[key.Encode()] = (key, None)
else:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Delete', delete_request, response)
def _Dynamic_BeginTransaction(self, request, transaction):
self.__local_tx_lock.acquire()
try:
txid = self.__next_local_tx
self.__transactions[txid] = TransactionData(thread.get_ident())
self.__next_local_tx += 1
finally:
self.__local_tx_lock.release()
transaction.set_handle(txid)
def _Dynamic_Commit(self, transaction, transaction_response):
txid = transaction.handle()
if txid not in self.__transactions:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction %d not found.' % (txid,))
txdata = self.__transactions[txid]
assert (txdata.thread_id == thread.get_ident(),
"Transactions are single-threaded.")
del self.__transactions[txid]
tx = remote_api_pb.TransactionRequest()
for key, hash in txdata.preconditions.values():
precond = tx.add_precondition()
precond.mutable_key().CopyFrom(key)
if hash:
precond.set_hash(hash)
puts = tx.mutable_puts()
deletes = tx.mutable_deletes()
for key, entity in txdata.entities.values():
if entity:
puts.add_entity().CopyFrom(entity)
else:
deletes.add_key().CopyFrom(key)
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'Transaction',
tx, datastore_pb.PutResponse())
def _Dynamic_Rollback(self, transaction, transaction_response):
txid = transaction.handle()
self.__local_tx_lock.acquire()
try:
if txid not in self.__transactions:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction %d not found.' % (txid,))
assert (txdata[txid].thread_id == thread.get_ident(),
"Transactions are single-threaded.")
del self.__transactions[txid]
finally:
self.__local_tx_lock.release()
def _Dynamic_CreateIndex(self, index, id_response):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
def _Dynamic_UpdateIndex(self, index, void):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
def _Dynamic_DeleteIndex(self, index, void):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
def ConfigureRemoteDatastore(app_id,
path,
auth_func,
servername=None,
rpc_server_factory=appengine_rpc.HttpRpcServer):
"""Does necessary setup to allow easy remote access to an AppEngine datastore.
Args:
app_id: The app_id of your app, as declared in app.yaml.
path: The path to the remote_api handler for your app
(for example, '/remote_api').
auth_func: A function that takes no arguments and returns a
(username, password) tuple. This will be called if your application
requires authentication to access the remote_api handler (it should!)
and you do not already have a valid auth cookie.
servername: The hostname your app is deployed on. Defaults to
<app_id>.appspot.com.
rpc_server_factory: A factory to construct the rpc server for the datastore.
"""
if not servername:
servername = '%s.appspot.com' % (app_id,)
os.environ['APPLICATION_ID'] = app_id
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
server = rpc_server_factory(servername, auth_func, GetUserAgent(),
GetSourceName())
stub = RemoteDatastoreStub(server, path)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.net.proto.RawMessage import RawMessage
from google.appengine.datastore.datastore_pb import PutRequest
from google.appengine.datastore.datastore_pb import DeleteRequest
from google.appengine.datastore.entity_pb import Reference
class Request(ProtocolBuffer.ProtocolMessage):
has_service_name_ = 0
service_name_ = ""
has_method_ = 0
method_ = ""
has_request_ = 0
def __init__(self, contents=None):
self.request_ = RawMessage()
if contents is not None: self.MergeFromString(contents)
def service_name(self): return self.service_name_
def set_service_name(self, x):
self.has_service_name_ = 1
self.service_name_ = x
def clear_service_name(self):
if self.has_service_name_:
self.has_service_name_ = 0
self.service_name_ = ""
def has_service_name(self): return self.has_service_name_
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = ""
def has_method(self): return self.has_method_
def request(self): return self.request_
def mutable_request(self): self.has_request_ = 1; return self.request_
def clear_request(self):self.has_request_ = 0; self.request_.Clear()
def has_request(self): return self.has_request_
def MergeFrom(self, x):
assert x is not self
if (x.has_service_name()): self.set_service_name(x.service_name())
if (x.has_method()): self.set_method(x.method())
if (x.has_request()): self.mutable_request().MergeFrom(x.request())
def Equals(self, x):
if x is self: return 1
if self.has_service_name_ != x.has_service_name_: return 0
if self.has_service_name_ and self.service_name_ != x.service_name_: return 0
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_request_ != x.has_request_: return 0
if self.has_request_ and self.request_ != x.request_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_service_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: service_name not set.')
if (not self.has_method_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: method not set.')
if (not self.has_request_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: request not set.')
elif not self.request_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.service_name_))
n += self.lengthString(len(self.method_))
n += self.lengthString(self.request_.ByteSize())
return n + 3
def Clear(self):
self.clear_service_name()
self.clear_method()
self.clear_request()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.service_name_)
out.putVarInt32(26)
out.putPrefixedString(self.method_)
out.putVarInt32(34)
out.putVarInt32(self.request_.ByteSize())
self.request_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 18:
self.set_service_name(d.getPrefixedString())
continue
if tt == 26:
self.set_method(d.getPrefixedString())
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_request().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_service_name_: res+=prefix+("service_name: %s\n" % self.DebugFormatString(self.service_name_))
if self.has_method_: res+=prefix+("method: %s\n" % self.DebugFormatString(self.method_))
if self.has_request_:
res+=prefix+"request <\n"
res+=self.request_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kservice_name = 2
kmethod = 3
krequest = 4
_TEXT = (
"ErrorCode",
None,
"service_name",
"method",
"request",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Response(ProtocolBuffer.ProtocolMessage):
has_response_ = 0
response_ = None
has_exception_ = 0
exception_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def response(self):
if self.response_ is None:
self.lazy_init_lock_.acquire()
try:
if self.response_ is None: self.response_ = RawMessage()
finally:
self.lazy_init_lock_.release()
return self.response_
def mutable_response(self): self.has_response_ = 1; return self.response()
def clear_response(self):
if self.has_response_:
self.has_response_ = 0;
if self.response_ is not None: self.response_.Clear()
def has_response(self): return self.has_response_
def exception(self):
if self.exception_ is None:
self.lazy_init_lock_.acquire()
try:
if self.exception_ is None: self.exception_ = RawMessage()
finally:
self.lazy_init_lock_.release()
return self.exception_
def mutable_exception(self): self.has_exception_ = 1; return self.exception()
def clear_exception(self):
if self.has_exception_:
self.has_exception_ = 0;
if self.exception_ is not None: self.exception_.Clear()
def has_exception(self): return self.has_exception_
def MergeFrom(self, x):
assert x is not self
if (x.has_response()): self.mutable_response().MergeFrom(x.response())
if (x.has_exception()): self.mutable_exception().MergeFrom(x.exception())
def Equals(self, x):
if x is self: return 1
if self.has_response_ != x.has_response_: return 0
if self.has_response_ and self.response_ != x.response_: return 0
if self.has_exception_ != x.has_exception_: return 0
if self.has_exception_ and self.exception_ != x.exception_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_response_ and not self.response_.IsInitialized(debug_strs)): initialized = 0
if (self.has_exception_ and not self.exception_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_response_): n += 1 + self.lengthString(self.response_.ByteSize())
if (self.has_exception_): n += 1 + self.lengthString(self.exception_.ByteSize())
return n + 0
def Clear(self):
self.clear_response()
self.clear_exception()
def OutputUnchecked(self, out):
if (self.has_response_):
out.putVarInt32(10)
out.putVarInt32(self.response_.ByteSize())
self.response_.OutputUnchecked(out)
if (self.has_exception_):
out.putVarInt32(18)
out.putVarInt32(self.exception_.ByteSize())
self.exception_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_response().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_exception().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_response_:
res+=prefix+"response <\n"
res+=self.response_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_exception_:
res+=prefix+"exception <\n"
res+=self.exception_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kresponse = 1
kexception = 2
_TEXT = (
"ErrorCode",
"response",
"exception",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class TransactionRequest_Precondition(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
has_hash_ = 0
hash_ = ""
def __init__(self, contents=None):
self.key_ = Reference()
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key_
def clear_key(self):self.has_key_ = 0; self.key_.Clear()
def has_key(self): return self.has_key_
def hash(self): return self.hash_
def set_hash(self, x):
self.has_hash_ = 1
self.hash_ = x
def clear_hash(self):
if self.has_hash_:
self.has_hash_ = 0
self.hash_ = ""
def has_hash(self): return self.has_hash_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_hash()): self.set_hash(x.hash())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_hash_ != x.has_hash_: return 0
if self.has_hash_ and self.hash_ != x.hash_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
elif not self.key_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.key_.ByteSize())
if (self.has_hash_): n += 1 + self.lengthString(len(self.hash_))
return n + 1
def Clear(self):
self.clear_key()
self.clear_hash()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
if (self.has_hash_):
out.putVarInt32(26)
out.putPrefixedString(self.hash_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 26:
self.set_hash(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_hash_: res+=prefix+("hash: %s\n" % self.DebugFormatString(self.hash_))
return res
class TransactionRequest(ProtocolBuffer.ProtocolMessage):
has_puts_ = 0
puts_ = None
has_deletes_ = 0
deletes_ = None
def __init__(self, contents=None):
self.precondition_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def precondition_size(self): return len(self.precondition_)
def precondition_list(self): return self.precondition_
def precondition(self, i):
return self.precondition_[i]
def mutable_precondition(self, i):
return self.precondition_[i]
def add_precondition(self):
x = TransactionRequest_Precondition()
self.precondition_.append(x)
return x
def clear_precondition(self):
self.precondition_ = []
def puts(self):
if self.puts_ is None:
self.lazy_init_lock_.acquire()
try:
if self.puts_ is None: self.puts_ = PutRequest()
finally:
self.lazy_init_lock_.release()
return self.puts_
def mutable_puts(self): self.has_puts_ = 1; return self.puts()
def clear_puts(self):
if self.has_puts_:
self.has_puts_ = 0;
if self.puts_ is not None: self.puts_.Clear()
def has_puts(self): return self.has_puts_
def deletes(self):
if self.deletes_ is None:
self.lazy_init_lock_.acquire()
try:
if self.deletes_ is None: self.deletes_ = DeleteRequest()
finally:
self.lazy_init_lock_.release()
return self.deletes_
def mutable_deletes(self): self.has_deletes_ = 1; return self.deletes()
def clear_deletes(self):
if self.has_deletes_:
self.has_deletes_ = 0;
if self.deletes_ is not None: self.deletes_.Clear()
def has_deletes(self): return self.has_deletes_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.precondition_size()): self.add_precondition().CopyFrom(x.precondition(i))
if (x.has_puts()): self.mutable_puts().MergeFrom(x.puts())
if (x.has_deletes()): self.mutable_deletes().MergeFrom(x.deletes())
def Equals(self, x):
if x is self: return 1
if len(self.precondition_) != len(x.precondition_): return 0
for e1, e2 in zip(self.precondition_, x.precondition_):
if e1 != e2: return 0
if self.has_puts_ != x.has_puts_: return 0
if self.has_puts_ and self.puts_ != x.puts_: return 0
if self.has_deletes_ != x.has_deletes_: return 0
if self.has_deletes_ and self.deletes_ != x.deletes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.precondition_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_puts_ and not self.puts_.IsInitialized(debug_strs)): initialized = 0
if (self.has_deletes_ and not self.deletes_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.precondition_)
for i in xrange(len(self.precondition_)): n += self.precondition_[i].ByteSize()
if (self.has_puts_): n += 1 + self.lengthString(self.puts_.ByteSize())
if (self.has_deletes_): n += 1 + self.lengthString(self.deletes_.ByteSize())
return n + 0
def Clear(self):
self.clear_precondition()
self.clear_puts()
self.clear_deletes()
def OutputUnchecked(self, out):
for i in xrange(len(self.precondition_)):
out.putVarInt32(11)
self.precondition_[i].OutputUnchecked(out)
out.putVarInt32(12)
if (self.has_puts_):
out.putVarInt32(34)
out.putVarInt32(self.puts_.ByteSize())
self.puts_.OutputUnchecked(out)
if (self.has_deletes_):
out.putVarInt32(42)
out.putVarInt32(self.deletes_.ByteSize())
self.deletes_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_precondition().TryMerge(d)
continue
if tt == 34:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_puts().TryMerge(tmp)
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_deletes().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.precondition_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Precondition%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_puts_:
res+=prefix+"puts <\n"
res+=self.puts_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_deletes_:
res+=prefix+"deletes <\n"
res+=self.deletes_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kPreconditionGroup = 1
kPreconditionkey = 2
kPreconditionhash = 3
kputs = 4
kdeletes = 5
_TEXT = (
"ErrorCode",
"Precondition",
"key",
"hash",
"puts",
"deletes",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['Request','Response','TransactionRequest','TransactionRequest_Precondition']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An apiproxy stub that calls a remote handler via HTTP.
This allows easy remote access to the App Engine datastore, and potentially any
of the other App Engine APIs, using the same interface you use when accessing
the service locally.
An example Python script:
---
from google.appengine.ext import db
from google.appengine.ext.remote_api import remote_api_stub
from myapp import models
import getpass
def auth_func():
return (raw_input('Username:'), getpass.getpass('Password:'))
remote_api_stub.ConfigureRemoteDatastore('my-app', '/remote_api', auth_func)
# Now you can access the remote datastore just as if your code was running on
# App Engine!
houses = models.House.all().fetch(100)
for a_house in q:
a_house.doors += 1
db.put(houses)
---
A few caveats:
- Where possible, avoid iterating over queries directly. Fetching as many
results as you will need is faster and more efficient.
- If you need to iterate, consider instead fetching items in batches with a sort
order and constructing a new query starting from where the previous one left
off. The __key__ pseudo-property can be used as a sort key for this purpose,
and does not even require a custom index if you are iterating over all
entities of a given type.
- Likewise, it's a good idea to put entities in batches. Instead of calling put
for each individual entity, accumulate them and put them in batches using
db.put(), if you can.
- Requests and responses are still limited to 1MB each, so if you have large
entities or try and fetch or put many of them at once, your requests may fail.
"""
import os
import pickle
import sha
import sys
import thread
import threading
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_pb
from google.appengine.ext.remote_api import remote_api_pb
from google.appengine.runtime import apiproxy_errors
from google.appengine.tools import appengine_rpc
def GetUserAgent():
"""Determines the value of the 'User-agent' header to use for HTTP requests.
Returns:
String containing the 'user-agent' header value, which includes the SDK
version, the platform information, and the version of Python;
e.g., "remote_api/1.0.1 Darwin/9.2.0 Python/2.5.2".
"""
product_tokens = []
product_tokens.append("Google-remote_api/1.0")
product_tokens.append(appengine_rpc.GetPlatformToken())
python_version = ".".join(str(i) for i in sys.version_info)
product_tokens.append("Python/%s" % python_version)
return " ".join(product_tokens)
def GetSourceName():
return "Google-remote_api-1.0"
class TransactionData(object):
"""Encapsulates data about an individual transaction."""
def __init__(self, thread_id):
self.thread_id = thread_id
self.preconditions = {}
self.entities = {}
class RemoteStub(object):
"""A stub for calling services on a remote server over HTTP.
You can use this to stub out any service that the remote server supports.
"""
def __init__(self, server, path):
"""Constructs a new RemoteStub that communicates with the specified server.
Args:
server: An instance of a subclass of
google.appengine.tools.appengine_rpc.AbstractRpcServer.
path: The path to the handler this stub should send requests to.
"""
self._server = server
self._path = path
def MakeSyncCall(self, service, call, request, response):
request_pb = remote_api_pb.Request()
request_pb.set_service_name(service)
request_pb.set_method(call)
request_pb.mutable_request().set_contents(request.Encode())
response_pb = remote_api_pb.Response()
response_pb.ParseFromString(self._server.Send(self._path,
request_pb.Encode()))
if response_pb.has_exception():
raise pickle.loads(response_pb.exception().contents())
else:
response.ParseFromString(response_pb.response().contents())
class RemoteDatastoreStub(RemoteStub):
"""A specialised stub for accessing the App Engine datastore remotely.
A specialised stub is required because there are some datastore operations
that preserve state between calls. This stub makes queries possible.
Transactions on the remote datastore are unfortunately still impossible.
"""
def __init__(self, server, path):
super(RemoteDatastoreStub, self).__init__(server, path)
self.__queries = {}
self.__transactions = {}
self.__next_local_cursor = 1
self.__local_cursor_lock = threading.Lock()
self.__next_local_tx = 1
self.__local_tx_lock = threading.Lock()
def MakeSyncCall(self, service, call, request, response):
assert service == 'datastore_v3'
explanation = []
assert request.IsInitialized(explanation), explanation
handler = getattr(self, '_Dynamic_' + call, None)
if handler:
handler(request, response)
else:
super(RemoteDatastoreStub, self).MakeSyncCall(service, call, request,
response)
assert response.IsInitialized(explanation), explanation
def _Dynamic_RunQuery(self, query, query_result):
self.__local_cursor_lock.acquire()
try:
cursor_id = self.__next_local_cursor
self.__next_local_cursor += 1
finally:
self.__local_cursor_lock.release()
self.__queries[cursor_id] = query
query_result.mutable_cursor().set_cursor(cursor_id)
query_result.set_more_results(True)
def _Dynamic_Next(self, next_request, query_result):
cursor = next_request.cursor().cursor()
if cursor not in self.__queries:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'Cursor %d not found' % cursor)
query = self.__queries[cursor]
if query is None:
query_result.set_more_results(False)
return
request = datastore_pb.Query()
request.CopyFrom(query)
if request.has_limit():
request.set_limit(min(request.limit(), next_request.count()))
else:
request.set_limit(next_request.count())
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'RunQuery', request, query_result)
query.set_offset(query.offset() + query_result.result_size())
if query.has_limit():
query.set_limit(query.limit() - query_result.result_size())
if not query_result.more_results():
self.__queries[cursor] = None
def _Dynamic_Get(self, get_request, get_response):
txid = None
if get_request.has_transaction():
txid = get_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id == thread.get_ident(),
"Transactions are single-threaded.")
keys = [(k, k.Encode()) for k in get_request.key_list()]
new_request = datastore_pb.GetRequest()
for key, enckey in keys:
if enckey not in txdata.entities:
new_request.add_key().CopyFrom(key)
else:
new_request = get_request
if new_request.key_size() > 0:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Get', new_request, get_response)
if txid is not None:
newkeys = new_request.key_list()
entities = get_response.entity_list()
for key, entity in zip(newkeys, entities):
entity_hash = None
if entity.has_entity():
entity_hash = sha.new(entity.entity().Encode()).digest()
txdata.preconditions[key.Encode()] = (key, entity_hash)
new_response = datastore_pb.GetResponse()
it = iter(get_response.entity_list())
for key, enckey in keys:
if enckey in txdata.entities:
cached_entity = txdata.entities[enckey][1]
if cached_entity:
new_response.add_entity().mutable_entity().CopyFrom(cached_entity)
else:
new_response.add_entity()
else:
new_entity = it.next()
if new_entity.has_entity():
assert new_entity.entity().key() == key
new_response.add_entity().CopyFrom(new_entity)
else:
new_response.add_entity()
get_response.CopyFrom(new_response)
def _Dynamic_Put(self, put_request, put_response):
if put_request.has_transaction():
entities = put_request.entity_list()
requires_id = lambda x: x.id() == 0 and not x.has_name()
new_ents = [e for e in entities
if requires_id(e.key().path().element_list()[-1])]
id_request = remote_api_pb.PutRequest()
if new_ents:
for ent in new_ents:
e = id_request.add_entity()
e.mutable_key().CopyFrom(ent.key())
e.mutable_entity_group()
id_response = datastore_pb.PutResponse()
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'GetIDs', id_request, id_response)
assert id_request.entity_size() == id_response.key_size()
for key, ent in zip(id_response.key_list(), new_ents):
ent.mutable_key().CopyFrom(key)
ent.mutable_entity_group().add_element().CopyFrom(
key.path().element(0))
txid = put_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id == thread.get_ident(),
"Transactions are single-threaded.")
for entity in entities:
txdata.entities[entity.key().Encode()] = (entity.key(), entity)
put_response.add_key().CopyFrom(entity.key())
else:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Put', put_request, put_response)
def _Dynamic_Delete(self, delete_request, response):
if delete_request.has_transaction():
txid = delete_request.transaction().handle()
txdata = self.__transactions[txid]
assert (txdata.thread_id == thread.get_ident(),
"Transactions are single-threaded.")
for key in delete_request.key_list():
txdata.entities[key.Encode()] = (key, None)
else:
super(RemoteDatastoreStub, self).MakeSyncCall(
'datastore_v3', 'Delete', delete_request, response)
def _Dynamic_BeginTransaction(self, request, transaction):
self.__local_tx_lock.acquire()
try:
txid = self.__next_local_tx
self.__transactions[txid] = TransactionData(thread.get_ident())
self.__next_local_tx += 1
finally:
self.__local_tx_lock.release()
transaction.set_handle(txid)
def _Dynamic_Commit(self, transaction, transaction_response):
txid = transaction.handle()
if txid not in self.__transactions:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction %d not found.' % (txid,))
txdata = self.__transactions[txid]
assert (txdata.thread_id == thread.get_ident(),
"Transactions are single-threaded.")
del self.__transactions[txid]
tx = remote_api_pb.TransactionRequest()
for key, hash in txdata.preconditions.values():
precond = tx.add_precondition()
precond.mutable_key().CopyFrom(key)
if hash:
precond.set_hash(hash)
puts = tx.mutable_puts()
deletes = tx.mutable_deletes()
for key, entity in txdata.entities.values():
if entity:
puts.add_entity().CopyFrom(entity)
else:
deletes.add_key().CopyFrom(key)
super(RemoteDatastoreStub, self).MakeSyncCall(
'remote_datastore', 'Transaction',
tx, datastore_pb.PutResponse())
def _Dynamic_Rollback(self, transaction, transaction_response):
txid = transaction.handle()
self.__local_tx_lock.acquire()
try:
if txid not in self.__transactions:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction %d not found.' % (txid,))
assert (txdata[txid].thread_id == thread.get_ident(),
"Transactions are single-threaded.")
del self.__transactions[txid]
finally:
self.__local_tx_lock.release()
def _Dynamic_CreateIndex(self, index, id_response):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
def _Dynamic_UpdateIndex(self, index, void):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
def _Dynamic_DeleteIndex(self, index, void):
raise apiproxy_errors.CapabilityDisabledError(
'The remote datastore does not support index manipulation.')
def ConfigureRemoteDatastore(app_id,
path,
auth_func,
servername=None,
rpc_server_factory=appengine_rpc.HttpRpcServer):
"""Does necessary setup to allow easy remote access to an AppEngine datastore.
Args:
app_id: The app_id of your app, as declared in app.yaml.
path: The path to the remote_api handler for your app
(for example, '/remote_api').
auth_func: A function that takes no arguments and returns a
(username, password) tuple. This will be called if your application
requires authentication to access the remote_api handler (it should!)
and you do not already have a valid auth cookie.
servername: The hostname your app is deployed on. Defaults to
<app_id>.appspot.com.
rpc_server_factory: A factory to construct the rpc server for the datastore.
"""
if not servername:
servername = '%s.appspot.com' % (app_id,)
os.environ['APPLICATION_ID'] = app_id
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
server = rpc_server_factory(servername, auth_func, GetUserAgent(),
GetSourceName())
stub = RemoteDatastoreStub(server, path)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', stub)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A simple wrapper for Django templates.
The main purpose of this module is to hide all of the package import pain
you normally have to go through to get Django to work. We expose the Django
Template and Context classes from this module, handling the import nonsense
on behalf of clients.
Typical usage:
from google.appengine.ext.webapp import template
print template.render('templates/index.html', {'foo': 'bar'})
Django uses a global setting for the directory in which it looks for templates.
This is not natural in the context of the webapp module, so our load method
takes in a complete template path, and we set these settings on the fly
automatically. Because we have to set and use a global setting on every
method call, this module is not thread safe, though that is not an issue
for applications.
Django template documentation is available at:
http://www.djangoproject.com/documentation/templates/
"""
import md5
import os
try:
from django import v0_96
except ImportError:
pass
import django
import django.conf
try:
django.conf.settings.configure(
DEBUG=False,
TEMPLATE_DEBUG=False,
TEMPLATE_LOADERS=(
'django.template.loaders.filesystem.load_template_source',
),
)
except (EnvironmentError, RuntimeError):
pass
import django.template
import django.template.loader
from google.appengine.ext import webapp
def render(template_path, template_dict, debug=False):
"""Renders the template at the given path with the given dict of values.
Example usage:
render("templates/index.html", {"name": "Bret", "values": [1, 2, 3]})
Args:
template_path: path to a Django template
template_dict: dictionary of values to apply to the template
"""
t = load(template_path, debug)
return t.render(Context(template_dict))
template_cache = {}
def load(path, debug=False):
"""Loads the Django template from the given path.
It is better to use this function than to construct a Template using the
class below because Django requires you to load the template with a method
if you want imports and extends to work in the template.
"""
abspath = os.path.abspath(path)
if not debug:
template = template_cache.get(abspath, None)
else:
template = None
if not template:
directory, file_name = os.path.split(abspath)
new_settings = {
'TEMPLATE_DIRS': (directory,),
'TEMPLATE_DEBUG': debug,
'DEBUG': debug,
}
old_settings = _swap_settings(new_settings)
try:
template = django.template.loader.get_template(file_name)
finally:
_swap_settings(old_settings)
if not debug:
template_cache[abspath] = template
def wrap_render(context, orig_render=template.render):
URLNode = django.template.defaulttags.URLNode
save_urlnode_render = URLNode.render
old_settings = _swap_settings(new_settings)
try:
URLNode.render = _urlnode_render_replacement
return orig_render(context)
finally:
_swap_settings(old_settings)
URLNode.render = save_urlnode_render
template.render = wrap_render
return template
def _swap_settings(new):
"""Swap in selected Django settings, returning old settings.
Example:
save = _swap_settings({'X': 1, 'Y': 2})
try:
...new settings for X and Y are in effect here...
finally:
_swap_settings(save)
Args:
new: A dict containing settings to change; the keys should
be setting names and the values settings values.
Returns:
Another dict structured the same was as the argument containing
the original settings. Original settings that were not set at all
are returned as None, and will be restored as None by the
'finally' clause in the example above. This shouldn't matter; we
can't delete settings that are given as None, since None is also a
legitimate value for some settings. Creating a separate flag value
for 'unset' settings seems overkill as there is no known use case.
"""
settings = django.conf.settings
old = {}
for key, value in new.iteritems():
old[key] = getattr(settings, key, None)
setattr(settings, key, value)
return old
def create_template_register():
"""Used to extend the Django template library with custom filters and tags.
To extend the template library with a custom filter module, create a Python
module, and create a module-level variable named "register", and register
all custom filters to it as described at
http://www.djangoproject.com/documentation/templates_python/
#extending-the-template-system:
templatefilters.py
==================
register = webapp.template.create_template_register()
def cut(value, arg):
return value.replace(arg, '')
register.filter(cut)
Then, register the custom template module with the register_template_library
function below in your application module:
myapp.py
========
webapp.template.register_template_library('templatefilters')
"""
return django.template.Library()
def register_template_library(package_name):
"""Registers a template extension module to make it usable in templates.
See the documentation for create_template_register for more information."""
if not django.template.libraries.get(package_name, None):
django.template.add_to_builtins(package_name)
Template = django.template.Template
Context = django.template.Context
def _urlnode_render_replacement(self, context):
"""Replacement for django's {% url %} block.
This version uses WSGIApplication's url mapping to create urls.
Examples:
<a href="{% url MyPageHandler "overview" %}">
{% url MyPageHandler implicit_args=False %}
{% url MyPageHandler "calendar" %}
{% url MyPageHandler "jsmith","calendar" %}
"""
args = [arg.resolve(context) for arg in self.args]
try:
app = webapp.WSGIApplication.active_instance
handler = app.get_registered_handler_by_name(self.view_name)
return handler.get_url(implicit_args=True, *args)
except webapp.NoUrlFoundError:
return ''
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Convience functions for the Webapp framework."""
__all__ = ["login_required", "run_wsgi_app"]
import os
import sys
import wsgiref.util
from google.appengine.api import users
from google.appengine.ext import webapp
def login_required(handler_method):
"""A decorator to require that a user be logged in to access a handler.
To use it, decorate your get() method like this:
@login_required
def get(self):
user = users.get_current_user(self)
self.response.out.write('Hello, ' + user.nickname())
We will redirect to a login page if the user is not logged in. We always
redirect to the request URI, and Google Accounts only redirects back as a GET
request, so this should not be used for POSTs.
"""
def check_login(self, *args):
if self.request.method != 'GET':
raise webapp.Error('The check_login decorator can only be used for GET '
'requests')
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
else:
handler_method(self, *args)
return check_login
def run_wsgi_app(application):
"""Runs your WSGI-compliant application object in a CGI environment.
Compared to wsgiref.handlers.CGIHandler().run(application), this
function takes some shortcuts. Those are possible because the
app server makes stronger promises than the CGI standard.
"""
env = dict(os.environ)
env["wsgi.input"] = sys.stdin
env["wsgi.errors"] = sys.stderr
env["wsgi.version"] = (1, 0)
env["wsgi.run_once"] = True
env["wsgi.url_scheme"] = wsgiref.util.guess_scheme(env)
env["wsgi.multithread"] = False
env["wsgi.multiprocess"] = False
result = application(env, _start_response)
if result is not None:
for data in result:
sys.stdout.write(data)
def _start_response(status, headers, exc_info=None):
"""A start_response() callable as specified by PEP 333"""
if exc_info is not None:
raise exc_info[0], exc_info[1], exc_info[2]
print "Status: %s" % status
for name, val in headers:
print "%s: %s" % (name, val)
print
return sys.stdout.write
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A simple wrapper for Django templates.
The main purpose of this module is to hide all of the package import pain
you normally have to go through to get Django to work. We expose the Django
Template and Context classes from this module, handling the import nonsense
on behalf of clients.
Typical usage:
from google.appengine.ext.webapp import template
print template.render('templates/index.html', {'foo': 'bar'})
Django uses a global setting for the directory in which it looks for templates.
This is not natural in the context of the webapp module, so our load method
takes in a complete template path, and we set these settings on the fly
automatically. Because we have to set and use a global setting on every
method call, this module is not thread safe, though that is not an issue
for applications.
Django template documentation is available at:
http://www.djangoproject.com/documentation/templates/
"""
import md5
import os
try:
from django import v0_96
except ImportError:
pass
import django
import django.conf
try:
django.conf.settings.configure(
DEBUG=False,
TEMPLATE_DEBUG=False,
TEMPLATE_LOADERS=(
'django.template.loaders.filesystem.load_template_source',
),
)
except (EnvironmentError, RuntimeError):
pass
import django.template
import django.template.loader
from google.appengine.ext import webapp
def render(template_path, template_dict, debug=False):
"""Renders the template at the given path with the given dict of values.
Example usage:
render("templates/index.html", {"name": "Bret", "values": [1, 2, 3]})
Args:
template_path: path to a Django template
template_dict: dictionary of values to apply to the template
"""
t = load(template_path, debug)
return t.render(Context(template_dict))
template_cache = {}
def load(path, debug=False):
"""Loads the Django template from the given path.
It is better to use this function than to construct a Template using the
class below because Django requires you to load the template with a method
if you want imports and extends to work in the template.
"""
abspath = os.path.abspath(path)
if not debug:
template = template_cache.get(abspath, None)
else:
template = None
if not template:
directory, file_name = os.path.split(abspath)
new_settings = {
'TEMPLATE_DIRS': (directory,),
'TEMPLATE_DEBUG': debug,
'DEBUG': debug,
}
old_settings = _swap_settings(new_settings)
try:
template = django.template.loader.get_template(file_name)
finally:
_swap_settings(old_settings)
if not debug:
template_cache[abspath] = template
def wrap_render(context, orig_render=template.render):
URLNode = django.template.defaulttags.URLNode
save_urlnode_render = URLNode.render
old_settings = _swap_settings(new_settings)
try:
URLNode.render = _urlnode_render_replacement
return orig_render(context)
finally:
_swap_settings(old_settings)
URLNode.render = save_urlnode_render
template.render = wrap_render
return template
def _swap_settings(new):
"""Swap in selected Django settings, returning old settings.
Example:
save = _swap_settings({'X': 1, 'Y': 2})
try:
...new settings for X and Y are in effect here...
finally:
_swap_settings(save)
Args:
new: A dict containing settings to change; the keys should
be setting names and the values settings values.
Returns:
Another dict structured the same was as the argument containing
the original settings. Original settings that were not set at all
are returned as None, and will be restored as None by the
'finally' clause in the example above. This shouldn't matter; we
can't delete settings that are given as None, since None is also a
legitimate value for some settings. Creating a separate flag value
for 'unset' settings seems overkill as there is no known use case.
"""
settings = django.conf.settings
old = {}
for key, value in new.iteritems():
old[key] = getattr(settings, key, None)
setattr(settings, key, value)
return old
def create_template_register():
"""Used to extend the Django template library with custom filters and tags.
To extend the template library with a custom filter module, create a Python
module, and create a module-level variable named "register", and register
all custom filters to it as described at
http://www.djangoproject.com/documentation/templates_python/
#extending-the-template-system:
templatefilters.py
==================
register = webapp.template.create_template_register()
def cut(value, arg):
return value.replace(arg, '')
register.filter(cut)
Then, register the custom template module with the register_template_library
function below in your application module:
myapp.py
========
webapp.template.register_template_library('templatefilters')
"""
return django.template.Library()
def register_template_library(package_name):
"""Registers a template extension module to make it usable in templates.
See the documentation for create_template_register for more information."""
if not django.template.libraries.get(package_name, None):
django.template.add_to_builtins(package_name)
Template = django.template.Template
Context = django.template.Context
def _urlnode_render_replacement(self, context):
"""Replacement for django's {% url %} block.
This version uses WSGIApplication's url mapping to create urls.
Examples:
<a href="{% url MyPageHandler "overview" %}">
{% url MyPageHandler implicit_args=False %}
{% url MyPageHandler "calendar" %}
{% url MyPageHandler "jsmith","calendar" %}
"""
args = [arg.resolve(context) for arg in self.args]
try:
app = webapp.WSGIApplication.active_instance
handler = app.get_registered_handler_by_name(self.view_name)
return handler.get_url(implicit_args=True, *args)
except webapp.NoUrlFoundError:
return ''
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An extremely simple WSGI web application framework.
This module exports three primary classes: Request, Response, and
RequestHandler. You implement a web application by subclassing RequestHandler.
As WSGI requests come in, they are passed to instances of your RequestHandlers.
The RequestHandler class provides access to the easy-to-use Request and
Response objects so you can interpret the request and write the response with
no knowledge of the esoteric WSGI semantics. Here is a simple example:
from google.appengine.ext import webapp
import wsgiref.simple_server
class MainPage(webapp.RequestHandler):
def get(self):
self.response.out.write(
'<html><body><form action="/hello" method="post">'
'Name: <input name="name" type="text" size="20"> '
'<input type="submit" value="Say Hello"></form></body></html>')
class HelloPage(webapp.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('Hello, %s' % self.request.get('name'))
application = webapp.WSGIApplication([
('/', MainPage),
('/hello', HelloPage)
], debug=True)
server = wsgiref.simple_server.make_server('', 8080, application)
print 'Serving on port 8080...'
server.serve_forever()
The WSGIApplication class maps URI regular expressions to your RequestHandler
classes. It is a WSGI-compatible application object, so you can use it in
conjunction with wsgiref to make your web application into, e.g., a CGI
script or a simple HTTP server, as in the example above.
The framework does not support streaming output. All output from a response
is stored in memory before it is written.
"""
import cgi
import StringIO
import logging
import re
import sys
import traceback
import urlparse
import webob
import wsgiref.headers
import wsgiref.util
RE_FIND_GROUPS = re.compile('\(.*?\)')
_CHARSET_RE = re.compile(r';\s*charset=([^;\s]*)', re.I)
class Error(Exception):
"""Base of all exceptions in the webapp module."""
pass
class NoUrlFoundError(Error):
"""Thrown when RequestHandler.get_url() fails."""
pass
class Request(webob.Request):
"""Abstraction for an HTTP request.
Properties:
uri: the complete URI requested by the user
scheme: 'http' or 'https'
host: the host, including the port
path: the path up to the ';' or '?' in the URL
parameters: the part of the URL between the ';' and the '?', if any
query: the part of the URL after the '?'
You can access parsed query and POST values with the get() method; do not
parse the query string yourself.
"""
uri = property(lambda self: self.url)
query = property(lambda self: self.query_string)
def __init__(self, environ):
"""Constructs a Request object from a WSGI environment.
If the charset isn't specified in the Content-Type header, defaults
to UTF-8.
Args:
environ: A WSGI-compliant environment dictionary.
"""
match = _CHARSET_RE.search(environ.get('CONTENT_TYPE', ''))
if match:
charset = match.group(1).lower()
else:
charset = 'utf-8'
webob.Request.__init__(self, environ, charset=charset,
unicode_errors= 'ignore', decode_param_names=True)
def get(self, argument_name, default_value='', allow_multiple=False):
"""Returns the query or POST argument with the given name.
We parse the query string and POST payload lazily, so this will be a
slower operation on the first call.
Args:
argument_name: the name of the query or POST argument
default_value: the value to return if the given argument is not present
allow_multiple: return a list of values with the given name (deprecated)
Returns:
If allow_multiple is False (which it is by default), we return the first
value with the given name given in the request. If it is True, we always
return an list.
"""
param_value = self.get_all(argument_name)
if allow_multiple:
return param_value
else:
if len(param_value) > 0:
return param_value[0]
else:
return default_value
def get_all(self, argument_name):
"""Returns a list of query or POST arguments with the given name.
We parse the query string and POST payload lazily, so this will be a
slower operation on the first call.
Args:
argument_name: the name of the query or POST argument
Returns:
A (possibly empty) list of values.
"""
if self.charset:
argument_name = argument_name.encode(self.charset)
param_value = self.params.getall(argument_name)
for i in xrange(len(param_value)):
if isinstance(param_value[i], cgi.FieldStorage):
param_value[i] = param_value[i].value
return param_value
def arguments(self):
"""Returns a list of the arguments provided in the query and/or POST.
The return value is a list of strings.
"""
return list(set(self.params.keys()))
def get_range(self, name, min_value=None, max_value=None, default=0):
"""Parses the given int argument, limiting it to the given range.
Args:
name: the name of the argument
min_value: the minimum int value of the argument (if any)
max_value: the maximum int value of the argument (if any)
default: the default value of the argument if it is not given
Returns:
An int within the given range for the argument
"""
try:
value = int(self.get(name, default))
except ValueError:
value = default
if max_value != None:
value = min(value, max_value)
if min_value != None:
value = max(value, min_value)
return value
class Response(object):
"""Abstraction for an HTTP response.
Properties:
out: file pointer for the output stream
headers: wsgiref.headers.Headers instance representing the output headers
"""
def __init__(self):
"""Constructs a response with the default settings."""
self.out = StringIO.StringIO()
self.__wsgi_headers = []
self.headers = wsgiref.headers.Headers(self.__wsgi_headers)
self.headers['Content-Type'] = 'text/html; charset=utf-8'
self.headers['Cache-Control'] = 'no-cache'
self.set_status(200)
def set_status(self, code, message=None):
"""Sets the HTTP status code of this response.
Args:
message: the HTTP status string to use
If no status string is given, we use the default from the HTTP/1.1
specification.
"""
if not message:
message = Response.http_status_message(code)
self.__status = (code, message)
def clear(self):
"""Clears all data written to the output stream so that it is empty."""
self.out.seek(0)
self.out.truncate(0)
def wsgi_write(self, start_response):
"""Writes this response using WSGI semantics with the given WSGI function.
Args:
start_response: the WSGI-compatible start_response function
"""
body = self.out.getvalue()
if isinstance(body, unicode):
body = body.encode('utf-8')
elif self.headers.get('Content-Type', '').endswith('; charset=utf-8'):
try:
body.decode('utf-8')
except UnicodeError, e:
logging.warning('Response written is not UTF-8: %s', e)
self.headers['Content-Length'] = str(len(body))
write = start_response('%d %s' % self.__status, self.__wsgi_headers)
write(body)
self.out.close()
def http_status_message(code):
"""Returns the default HTTP status message for the given code.
Args:
code: the HTTP code for which we want a message
"""
if not Response.__HTTP_STATUS_MESSAGES.has_key(code):
raise Error('Invalid HTTP status code: %d' % code)
return Response.__HTTP_STATUS_MESSAGES[code]
http_status_message = staticmethod(http_status_message)
__HTTP_STATUS_MESSAGES = {
100: 'Continue',
101: 'Switching Protocols',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Moved Temporarily',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
306: 'Unused',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Time-out',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request-URI Too Large',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Time-out',
505: 'HTTP Version not supported'
}
class RequestHandler(object):
"""Our base HTTP request handler. Clients should subclass this class.
Subclasses should override get(), post(), head(), options(), etc to handle
different HTTP methods.
"""
def initialize(self, request, response):
"""Initializes this request handler with the given Request and Response."""
self.request = request
self.response = response
def get(self, *args):
"""Handler method for GET requests."""
self.error(405)
def post(self, *args):
"""Handler method for POST requests."""
self.error(405)
def head(self, *args):
"""Handler method for HEAD requests."""
self.error(405)
def options(self, *args):
"""Handler method for OPTIONS requests."""
self.error(405)
def put(self, *args):
"""Handler method for PUT requests."""
self.error(405)
def delete(self, *args):
"""Handler method for DELETE requests."""
self.error(405)
def trace(self, *args):
"""Handler method for TRACE requests."""
self.error(405)
def error(self, code):
"""Clears the response output stream and sets the given HTTP error code.
Args:
code: the HTTP status error code (e.g., 501)
"""
self.response.set_status(code)
self.response.clear()
def redirect(self, uri, permanent=False):
"""Issues an HTTP redirect to the given relative URL.
Args:
uri: a relative or absolute URI (e.g., '../flowers.html')
permanent: if true, we use a 301 redirect instead of a 302 redirect
"""
if permanent:
self.response.set_status(301)
else:
self.response.set_status(302)
absolute_url = urlparse.urljoin(self.request.uri, uri)
self.response.headers['Location'] = str(absolute_url)
self.response.clear()
def handle_exception(self, exception, debug_mode):
"""Called if this handler throws an exception during execution.
The default behavior is to call self.error(500) and print a stack trace
if debug_mode is True.
Args:
exception: the exception that was thrown
debug_mode: True if the web application is running in debug mode
"""
self.error(500)
logging.exception(exception)
if debug_mode:
lines = ''.join(traceback.format_exception(*sys.exc_info()))
self.response.clear()
self.response.out.write('<pre>%s</pre>' % (cgi.escape(lines, quote=True)))
@classmethod
def get_url(cls, *args, **kargs):
"""Returns the url for the given handler.
The default implementation uses the patterns passed to the active
WSGIApplication and the django urlresolvers module to create a url.
However, it is different from urlresolvers.reverse() in the following ways:
- It does not try to resolve handlers via module loading
- It does not support named arguments
- It performs some post-prosessing on the url to remove some regex
operators that urlresolvers.reverse_helper() seems to miss.
- It will try to fill in the left-most missing arguments with the args
used in the active request.
Args:
args: Parameters for the url pattern's groups.
kwargs: Optionally contains 'implicit_args' that can either be a boolean
or a tuple. When it is True, it will use the arguments to the
active request as implicit arguments. When it is False (default),
it will not use any implicit arguments. When it is a tuple, it
will use the tuple as the implicit arguments.
the left-most args if some are missing from args.
Returns:
The url for this handler/args combination.
Raises:
NoUrlFoundError: No url pattern for this handler has the same
number of args that were passed in.
"""
app = WSGIApplication.active_instance
pattern_map = app._pattern_map
implicit_args = kargs.get('implicit_args', ())
if implicit_args == True:
implicit_args = app.current_request_args
min_params = len(args)
urlresolvers = None
for pattern_tuple in pattern_map.get(cls, ()):
num_params_in_pattern = pattern_tuple[1]
if num_params_in_pattern < min_params:
continue
if urlresolvers is None:
from django.core import urlresolvers
try:
num_implicit_args = max(0, num_params_in_pattern - len(args))
merged_args = implicit_args[:num_implicit_args] + args
url = urlresolvers.reverse_helper(pattern_tuple[0], *merged_args)
url = url.replace('\\', '')
url = url.replace('?', '')
return url
except urlresolvers.NoReverseMatch:
continue
logging.warning('get_url failed for Handler name: %r, Args: %r',
cls.__name__, args)
raise NoUrlFoundError
class WSGIApplication(object):
"""Wraps a set of webapp RequestHandlers in a WSGI-compatible application.
To use this class, pass a list of (URI regular expression, RequestHandler)
pairs to the constructor, and pass the class instance to a WSGI handler.
See the example in the module comments for details.
The URL mapping is first-match based on the list ordering.
"""
def __init__(self, url_mapping, debug=False):
"""Initializes this application with the given URL mapping.
Args:
url_mapping: list of (URI, RequestHandler) pairs (e.g., [('/', ReqHan)])
debug: if true, we send Python stack traces to the browser on errors
"""
self._init_url_mappings(url_mapping)
self.__debug = debug
WSGIApplication.active_instance = self
self.current_request_args = ()
def __call__(self, environ, start_response):
"""Called by WSGI when a request comes in."""
request = Request(environ)
response = Response()
WSGIApplication.active_instance = self
handler = None
groups = ()
for regexp, handler_class in self._url_mapping:
match = regexp.match(request.path)
if match:
handler = handler_class()
handler.initialize(request, response)
groups = match.groups()
break
self.current_request_args = groups
if handler:
try:
method = environ['REQUEST_METHOD']
if method == 'GET':
handler.get(*groups)
elif method == 'POST':
handler.post(*groups)
elif method == 'HEAD':
handler.head(*groups)
elif method == 'OPTIONS':
handler.options(*groups)
elif method == 'PUT':
handler.put(*groups)
elif method == 'DELETE':
handler.delete(*groups)
elif method == 'TRACE':
handler.trace(*groups)
else:
handler.error(501)
except Exception, e:
handler.handle_exception(e, self.__debug)
else:
response.set_status(404)
response.wsgi_write(start_response)
return ['']
def _init_url_mappings(self, handler_tuples):
"""Initializes the maps needed for mapping urls to handlers and handlers
to urls.
Args:
handler_tuples: list of (URI, RequestHandler) pairs.
"""
handler_map = {}
pattern_map = {}
url_mapping = []
for regexp, handler in handler_tuples:
handler_map[handler.__name__] = handler
if not regexp.startswith('^'):
regexp = '^' + regexp
if not regexp.endswith('$'):
regexp += '$'
compiled = re.compile(regexp)
url_mapping.append((compiled, handler))
num_groups = len(RE_FIND_GROUPS.findall(regexp))
handler_patterns = pattern_map.setdefault(handler, [])
handler_patterns.append((compiled, num_groups))
self._handler_map = handler_map
self._pattern_map = pattern_map
self._url_mapping = url_mapping
def get_registered_handler_by_name(self, handler_name):
"""Returns the handler given the handler's name.
This uses the application's url mapping.
Args:
handler_name: The __name__ of a handler to return.
Returns:
The handler with the given name.
Raises:
KeyError: If the handler name is not found in the parent application.
"""
try:
return self._handler_map[handler_name]
except:
logging.error('Handler does not map to any urls: %s', handler_name)
raise
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Convience functions for the Webapp framework."""
__all__ = ["login_required", "run_wsgi_app"]
import os
import sys
import wsgiref.util
from google.appengine.api import users
from google.appengine.ext import webapp
def login_required(handler_method):
"""A decorator to require that a user be logged in to access a handler.
To use it, decorate your get() method like this:
@login_required
def get(self):
user = users.get_current_user(self)
self.response.out.write('Hello, ' + user.nickname())
We will redirect to a login page if the user is not logged in. We always
redirect to the request URI, and Google Accounts only redirects back as a GET
request, so this should not be used for POSTs.
"""
def check_login(self, *args):
if self.request.method != 'GET':
raise webapp.Error('The check_login decorator can only be used for GET '
'requests')
user = users.get_current_user()
if not user:
self.redirect(users.create_login_url(self.request.uri))
return
else:
handler_method(self, *args)
return check_login
def run_wsgi_app(application):
"""Runs your WSGI-compliant application object in a CGI environment.
Compared to wsgiref.handlers.CGIHandler().run(application), this
function takes some shortcuts. Those are possible because the
app server makes stronger promises than the CGI standard.
"""
env = dict(os.environ)
env["wsgi.input"] = sys.stdin
env["wsgi.errors"] = sys.stderr
env["wsgi.version"] = (1, 0)
env["wsgi.run_once"] = True
env["wsgi.url_scheme"] = wsgiref.util.guess_scheme(env)
env["wsgi.multithread"] = False
env["wsgi.multiprocess"] = False
result = application(env, _start_response)
if result is not None:
for data in result:
sys.stdout.write(data)
def _start_response(status, headers, exc_info=None):
"""A start_response() callable as specified by PEP 333"""
if exc_info is not None:
raise exc_info[0], exc_info[1], exc_info[2]
print "Status: %s" % status
for name, val in headers:
print "%s: %s" % (name, val)
print
return sys.stdout.write
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""An extremely simple WSGI web application framework.
This module exports three primary classes: Request, Response, and
RequestHandler. You implement a web application by subclassing RequestHandler.
As WSGI requests come in, they are passed to instances of your RequestHandlers.
The RequestHandler class provides access to the easy-to-use Request and
Response objects so you can interpret the request and write the response with
no knowledge of the esoteric WSGI semantics. Here is a simple example:
from google.appengine.ext import webapp
import wsgiref.simple_server
class MainPage(webapp.RequestHandler):
def get(self):
self.response.out.write(
'<html><body><form action="/hello" method="post">'
'Name: <input name="name" type="text" size="20"> '
'<input type="submit" value="Say Hello"></form></body></html>')
class HelloPage(webapp.RequestHandler):
def post(self):
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write('Hello, %s' % self.request.get('name'))
application = webapp.WSGIApplication([
('/', MainPage),
('/hello', HelloPage)
], debug=True)
server = wsgiref.simple_server.make_server('', 8080, application)
print 'Serving on port 8080...'
server.serve_forever()
The WSGIApplication class maps URI regular expressions to your RequestHandler
classes. It is a WSGI-compatible application object, so you can use it in
conjunction with wsgiref to make your web application into, e.g., a CGI
script or a simple HTTP server, as in the example above.
The framework does not support streaming output. All output from a response
is stored in memory before it is written.
"""
import cgi
import StringIO
import logging
import re
import sys
import traceback
import urlparse
import webob
import wsgiref.headers
import wsgiref.util
RE_FIND_GROUPS = re.compile('\(.*?\)')
_CHARSET_RE = re.compile(r';\s*charset=([^;\s]*)', re.I)
class Error(Exception):
"""Base of all exceptions in the webapp module."""
pass
class NoUrlFoundError(Error):
"""Thrown when RequestHandler.get_url() fails."""
pass
class Request(webob.Request):
"""Abstraction for an HTTP request.
Properties:
uri: the complete URI requested by the user
scheme: 'http' or 'https'
host: the host, including the port
path: the path up to the ';' or '?' in the URL
parameters: the part of the URL between the ';' and the '?', if any
query: the part of the URL after the '?'
You can access parsed query and POST values with the get() method; do not
parse the query string yourself.
"""
uri = property(lambda self: self.url)
query = property(lambda self: self.query_string)
def __init__(self, environ):
"""Constructs a Request object from a WSGI environment.
If the charset isn't specified in the Content-Type header, defaults
to UTF-8.
Args:
environ: A WSGI-compliant environment dictionary.
"""
match = _CHARSET_RE.search(environ.get('CONTENT_TYPE', ''))
if match:
charset = match.group(1).lower()
else:
charset = 'utf-8'
webob.Request.__init__(self, environ, charset=charset,
unicode_errors= 'ignore', decode_param_names=True)
def get(self, argument_name, default_value='', allow_multiple=False):
"""Returns the query or POST argument with the given name.
We parse the query string and POST payload lazily, so this will be a
slower operation on the first call.
Args:
argument_name: the name of the query or POST argument
default_value: the value to return if the given argument is not present
allow_multiple: return a list of values with the given name (deprecated)
Returns:
If allow_multiple is False (which it is by default), we return the first
value with the given name given in the request. If it is True, we always
return an list.
"""
param_value = self.get_all(argument_name)
if allow_multiple:
return param_value
else:
if len(param_value) > 0:
return param_value[0]
else:
return default_value
def get_all(self, argument_name):
"""Returns a list of query or POST arguments with the given name.
We parse the query string and POST payload lazily, so this will be a
slower operation on the first call.
Args:
argument_name: the name of the query or POST argument
Returns:
A (possibly empty) list of values.
"""
if self.charset:
argument_name = argument_name.encode(self.charset)
param_value = self.params.getall(argument_name)
for i in xrange(len(param_value)):
if isinstance(param_value[i], cgi.FieldStorage):
param_value[i] = param_value[i].value
return param_value
def arguments(self):
"""Returns a list of the arguments provided in the query and/or POST.
The return value is a list of strings.
"""
return list(set(self.params.keys()))
def get_range(self, name, min_value=None, max_value=None, default=0):
"""Parses the given int argument, limiting it to the given range.
Args:
name: the name of the argument
min_value: the minimum int value of the argument (if any)
max_value: the maximum int value of the argument (if any)
default: the default value of the argument if it is not given
Returns:
An int within the given range for the argument
"""
try:
value = int(self.get(name, default))
except ValueError:
value = default
if max_value != None:
value = min(value, max_value)
if min_value != None:
value = max(value, min_value)
return value
class Response(object):
"""Abstraction for an HTTP response.
Properties:
out: file pointer for the output stream
headers: wsgiref.headers.Headers instance representing the output headers
"""
def __init__(self):
"""Constructs a response with the default settings."""
self.out = StringIO.StringIO()
self.__wsgi_headers = []
self.headers = wsgiref.headers.Headers(self.__wsgi_headers)
self.headers['Content-Type'] = 'text/html; charset=utf-8'
self.headers['Cache-Control'] = 'no-cache'
self.set_status(200)
def set_status(self, code, message=None):
"""Sets the HTTP status code of this response.
Args:
message: the HTTP status string to use
If no status string is given, we use the default from the HTTP/1.1
specification.
"""
if not message:
message = Response.http_status_message(code)
self.__status = (code, message)
def clear(self):
"""Clears all data written to the output stream so that it is empty."""
self.out.seek(0)
self.out.truncate(0)
def wsgi_write(self, start_response):
"""Writes this response using WSGI semantics with the given WSGI function.
Args:
start_response: the WSGI-compatible start_response function
"""
body = self.out.getvalue()
if isinstance(body, unicode):
body = body.encode('utf-8')
elif self.headers.get('Content-Type', '').endswith('; charset=utf-8'):
try:
body.decode('utf-8')
except UnicodeError, e:
logging.warning('Response written is not UTF-8: %s', e)
self.headers['Content-Length'] = str(len(body))
write = start_response('%d %s' % self.__status, self.__wsgi_headers)
write(body)
self.out.close()
def http_status_message(code):
"""Returns the default HTTP status message for the given code.
Args:
code: the HTTP code for which we want a message
"""
if not Response.__HTTP_STATUS_MESSAGES.has_key(code):
raise Error('Invalid HTTP status code: %d' % code)
return Response.__HTTP_STATUS_MESSAGES[code]
http_status_message = staticmethod(http_status_message)
__HTTP_STATUS_MESSAGES = {
100: 'Continue',
101: 'Switching Protocols',
200: 'OK',
201: 'Created',
202: 'Accepted',
203: 'Non-Authoritative Information',
204: 'No Content',
205: 'Reset Content',
206: 'Partial Content',
300: 'Multiple Choices',
301: 'Moved Permanently',
302: 'Moved Temporarily',
303: 'See Other',
304: 'Not Modified',
305: 'Use Proxy',
306: 'Unused',
307: 'Temporary Redirect',
400: 'Bad Request',
401: 'Unauthorized',
402: 'Payment Required',
403: 'Forbidden',
404: 'Not Found',
405: 'Method Not Allowed',
406: 'Not Acceptable',
407: 'Proxy Authentication Required',
408: 'Request Time-out',
409: 'Conflict',
410: 'Gone',
411: 'Length Required',
412: 'Precondition Failed',
413: 'Request Entity Too Large',
414: 'Request-URI Too Large',
415: 'Unsupported Media Type',
416: 'Requested Range Not Satisfiable',
417: 'Expectation Failed',
500: 'Internal Server Error',
501: 'Not Implemented',
502: 'Bad Gateway',
503: 'Service Unavailable',
504: 'Gateway Time-out',
505: 'HTTP Version not supported'
}
class RequestHandler(object):
"""Our base HTTP request handler. Clients should subclass this class.
Subclasses should override get(), post(), head(), options(), etc to handle
different HTTP methods.
"""
def initialize(self, request, response):
"""Initializes this request handler with the given Request and Response."""
self.request = request
self.response = response
def get(self, *args):
"""Handler method for GET requests."""
self.error(405)
def post(self, *args):
"""Handler method for POST requests."""
self.error(405)
def head(self, *args):
"""Handler method for HEAD requests."""
self.error(405)
def options(self, *args):
"""Handler method for OPTIONS requests."""
self.error(405)
def put(self, *args):
"""Handler method for PUT requests."""
self.error(405)
def delete(self, *args):
"""Handler method for DELETE requests."""
self.error(405)
def trace(self, *args):
"""Handler method for TRACE requests."""
self.error(405)
def error(self, code):
"""Clears the response output stream and sets the given HTTP error code.
Args:
code: the HTTP status error code (e.g., 501)
"""
self.response.set_status(code)
self.response.clear()
def redirect(self, uri, permanent=False):
"""Issues an HTTP redirect to the given relative URL.
Args:
uri: a relative or absolute URI (e.g., '../flowers.html')
permanent: if true, we use a 301 redirect instead of a 302 redirect
"""
if permanent:
self.response.set_status(301)
else:
self.response.set_status(302)
absolute_url = urlparse.urljoin(self.request.uri, uri)
self.response.headers['Location'] = str(absolute_url)
self.response.clear()
def handle_exception(self, exception, debug_mode):
"""Called if this handler throws an exception during execution.
The default behavior is to call self.error(500) and print a stack trace
if debug_mode is True.
Args:
exception: the exception that was thrown
debug_mode: True if the web application is running in debug mode
"""
self.error(500)
logging.exception(exception)
if debug_mode:
lines = ''.join(traceback.format_exception(*sys.exc_info()))
self.response.clear()
self.response.out.write('<pre>%s</pre>' % (cgi.escape(lines, quote=True)))
@classmethod
def get_url(cls, *args, **kargs):
"""Returns the url for the given handler.
The default implementation uses the patterns passed to the active
WSGIApplication and the django urlresolvers module to create a url.
However, it is different from urlresolvers.reverse() in the following ways:
- It does not try to resolve handlers via module loading
- It does not support named arguments
- It performs some post-prosessing on the url to remove some regex
operators that urlresolvers.reverse_helper() seems to miss.
- It will try to fill in the left-most missing arguments with the args
used in the active request.
Args:
args: Parameters for the url pattern's groups.
kwargs: Optionally contains 'implicit_args' that can either be a boolean
or a tuple. When it is True, it will use the arguments to the
active request as implicit arguments. When it is False (default),
it will not use any implicit arguments. When it is a tuple, it
will use the tuple as the implicit arguments.
the left-most args if some are missing from args.
Returns:
The url for this handler/args combination.
Raises:
NoUrlFoundError: No url pattern for this handler has the same
number of args that were passed in.
"""
app = WSGIApplication.active_instance
pattern_map = app._pattern_map
implicit_args = kargs.get('implicit_args', ())
if implicit_args == True:
implicit_args = app.current_request_args
min_params = len(args)
urlresolvers = None
for pattern_tuple in pattern_map.get(cls, ()):
num_params_in_pattern = pattern_tuple[1]
if num_params_in_pattern < min_params:
continue
if urlresolvers is None:
from django.core import urlresolvers
try:
num_implicit_args = max(0, num_params_in_pattern - len(args))
merged_args = implicit_args[:num_implicit_args] + args
url = urlresolvers.reverse_helper(pattern_tuple[0], *merged_args)
url = url.replace('\\', '')
url = url.replace('?', '')
return url
except urlresolvers.NoReverseMatch:
continue
logging.warning('get_url failed for Handler name: %r, Args: %r',
cls.__name__, args)
raise NoUrlFoundError
class WSGIApplication(object):
"""Wraps a set of webapp RequestHandlers in a WSGI-compatible application.
To use this class, pass a list of (URI regular expression, RequestHandler)
pairs to the constructor, and pass the class instance to a WSGI handler.
See the example in the module comments for details.
The URL mapping is first-match based on the list ordering.
"""
def __init__(self, url_mapping, debug=False):
"""Initializes this application with the given URL mapping.
Args:
url_mapping: list of (URI, RequestHandler) pairs (e.g., [('/', ReqHan)])
debug: if true, we send Python stack traces to the browser on errors
"""
self._init_url_mappings(url_mapping)
self.__debug = debug
WSGIApplication.active_instance = self
self.current_request_args = ()
def __call__(self, environ, start_response):
"""Called by WSGI when a request comes in."""
request = Request(environ)
response = Response()
WSGIApplication.active_instance = self
handler = None
groups = ()
for regexp, handler_class in self._url_mapping:
match = regexp.match(request.path)
if match:
handler = handler_class()
handler.initialize(request, response)
groups = match.groups()
break
self.current_request_args = groups
if handler:
try:
method = environ['REQUEST_METHOD']
if method == 'GET':
handler.get(*groups)
elif method == 'POST':
handler.post(*groups)
elif method == 'HEAD':
handler.head(*groups)
elif method == 'OPTIONS':
handler.options(*groups)
elif method == 'PUT':
handler.put(*groups)
elif method == 'DELETE':
handler.delete(*groups)
elif method == 'TRACE':
handler.trace(*groups)
else:
handler.error(501)
except Exception, e:
handler.handle_exception(e, self.__debug)
else:
response.set_status(404)
response.wsgi_write(start_response)
return ['']
def _init_url_mappings(self, handler_tuples):
"""Initializes the maps needed for mapping urls to handlers and handlers
to urls.
Args:
handler_tuples: list of (URI, RequestHandler) pairs.
"""
handler_map = {}
pattern_map = {}
url_mapping = []
for regexp, handler in handler_tuples:
handler_map[handler.__name__] = handler
if not regexp.startswith('^'):
regexp = '^' + regexp
if not regexp.endswith('$'):
regexp += '$'
compiled = re.compile(regexp)
url_mapping.append((compiled, handler))
num_groups = len(RE_FIND_GROUPS.findall(regexp))
handler_patterns = pattern_map.setdefault(handler, [])
handler_patterns.append((compiled, num_groups))
self._handler_map = handler_map
self._pattern_map = pattern_map
self._url_mapping = url_mapping
def get_registered_handler_by_name(self, handler_name):
"""Returns the handler given the handler's name.
This uses the application's url mapping.
Args:
handler_name: The __name__ of a handler to return.
Returns:
The handler with the given name.
Raises:
KeyError: If the handler name is not found in the parent application.
"""
try:
return self._handler_map[handler_name]
except:
logging.error('Handler does not map to any urls: %s', handler_name)
raise
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Serve static files from a zipfile.
This is a solution for apps that want to serve 1000s of small static
files while staying withing the 1000 file limit.
The simplest use case is driven purely from the handlers section in
app.yaml, e.g.:
- url: /images/.*
script: $PYTHON_LIB/google/appengine/ext/zipserve
This would invoke a main() within zipserve/__init__.py. This code
would then take the URL path, and look for a .zip file under the first
component of the path, in this case "images.zip" in the app's working
directory. If found, it will then serve any matching paths below that
from the zip file. In other words, /images/foo/icon.gif would map to
foo/icon.gif in the zip file images.zip.
You can also customize the behavior by adding a custom line to your
WSGIApplication() invocation:
def main():
app = webapp.WSGIApplication(
[('/', MainPage),
('/static/(.*)', zipserve.make_zip_handler('staticfiles.zip')),
])
You can pass max_age=N to the make_zip_handler() call to override the
expiration time in seconds, which defaults to 600.
To customize the behavior even more, you can subclass ZipHandler and
override the get() method, or override it and call ServeFromZipFile()
directly.
Note that by default, a Cache-control is added that makes these pages
cacheable even if they require authentication. If this is not what
you want, override ZipHandler.SetCachingHeaders().
"""
import email.Utils
import logging
import mimetypes
import time
import zipfile
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
def make_zip_handler(zipfilename, max_age=None, public=None):
"""Factory function to construct a custom ZipHandler instance.
Args:
zipfilename: The filename of a zipfile.
max_age: Optional expiration time; defaults to ZipHandler.MAX_AGE.
public: Optional public flag; defaults to ZipHandler.PUBLIC.
Returns:
A ZipHandler subclass.
"""
class CustomZipHandler(ZipHandler):
def get(self, name):
self.ServeFromZipFile(self.ZIPFILENAME, name)
ZIPFILENAME = zipfilename
if max_age is not None:
MAX_AGE = max_age
if public is not None:
PUBLIC = public
return CustomZipHandler
class ZipHandler(webapp.RequestHandler):
"""Request handler serving static files from zipfiles."""
zipfile_cache = {}
def get(self, prefix, name):
"""GET request handler.
Typically the arguments are passed from the matching groups in the
URL pattern passed to WSGIApplication().
Args:
prefix: The zipfilename without the .zip suffix.
name: The name within the zipfile.
"""
self.ServeFromZipFile(prefix + '.zip', name)
def ServeFromZipFile(self, zipfilename, name):
"""Helper for the GET request handler.
This serves the contents of file 'name' from zipfile
'zipfilename', logging a message and returning a 404 response if
either the zipfile cannot be opened or the named file cannot be
read from it.
Args:
zipfilename: The name of the zipfile.
name: The name within the zipfile.
"""
zipfile_object = self.zipfile_cache.get(zipfilename)
if zipfile_object is None:
try:
zipfile_object = zipfile.ZipFile(zipfilename)
except (IOError, RuntimeError), err:
logging.error('Can\'t open zipfile %s: %s', zipfilename, err)
zipfile_object = ''
self.zipfile_cache[zipfilename] = zipfile_object
if zipfile_object == '':
self.error(404)
self.response.out.write('Not found')
return
try:
data = zipfile_object.read(name)
except (KeyError, RuntimeError), err:
self.error(404)
self.response.out.write('Not found')
return
content_type, encoding = mimetypes.guess_type(name)
if content_type:
self.response.headers['Content-Type'] = content_type
self.SetCachingHeaders()
self.response.out.write(data)
MAX_AGE = 600
PUBLIC = True
def SetCachingHeaders(self):
"""Helper to set the caching headers.
Override this to customize the headers beyond setting MAX_AGE.
"""
max_age = self.MAX_AGE
self.response.headers['Expires'] = email.Utils.formatdate(
time.time() + max_age, usegmt=True)
cache_control = []
if self.PUBLIC:
cache_control.append('public')
cache_control.append('max-age=%d' % max_age)
self.response.headers['Cache-Control'] = ', '.join(cache_control)
def main():
"""Main program.
This is invoked when this package is referenced from app.yaml.
"""
application = webapp.WSGIApplication([('/([^/]+)/(.*)', ZipHandler)])
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Serve static files from a zipfile.
This is a solution for apps that want to serve 1000s of small static
files while staying withing the 1000 file limit.
The simplest use case is driven purely from the handlers section in
app.yaml, e.g.:
- url: /images/.*
script: $PYTHON_LIB/google/appengine/ext/zipserve
This would invoke a main() within zipserve/__init__.py. This code
would then take the URL path, and look for a .zip file under the first
component of the path, in this case "images.zip" in the app's working
directory. If found, it will then serve any matching paths below that
from the zip file. In other words, /images/foo/icon.gif would map to
foo/icon.gif in the zip file images.zip.
You can also customize the behavior by adding a custom line to your
WSGIApplication() invocation:
def main():
app = webapp.WSGIApplication(
[('/', MainPage),
('/static/(.*)', zipserve.make_zip_handler('staticfiles.zip')),
])
You can pass max_age=N to the make_zip_handler() call to override the
expiration time in seconds, which defaults to 600.
To customize the behavior even more, you can subclass ZipHandler and
override the get() method, or override it and call ServeFromZipFile()
directly.
Note that by default, a Cache-control is added that makes these pages
cacheable even if they require authentication. If this is not what
you want, override ZipHandler.SetCachingHeaders().
"""
import email.Utils
import logging
import mimetypes
import time
import zipfile
from google.appengine.ext import webapp
from google.appengine.ext.webapp import util
def make_zip_handler(zipfilename, max_age=None, public=None):
"""Factory function to construct a custom ZipHandler instance.
Args:
zipfilename: The filename of a zipfile.
max_age: Optional expiration time; defaults to ZipHandler.MAX_AGE.
public: Optional public flag; defaults to ZipHandler.PUBLIC.
Returns:
A ZipHandler subclass.
"""
class CustomZipHandler(ZipHandler):
def get(self, name):
self.ServeFromZipFile(self.ZIPFILENAME, name)
ZIPFILENAME = zipfilename
if max_age is not None:
MAX_AGE = max_age
if public is not None:
PUBLIC = public
return CustomZipHandler
class ZipHandler(webapp.RequestHandler):
"""Request handler serving static files from zipfiles."""
zipfile_cache = {}
def get(self, prefix, name):
"""GET request handler.
Typically the arguments are passed from the matching groups in the
URL pattern passed to WSGIApplication().
Args:
prefix: The zipfilename without the .zip suffix.
name: The name within the zipfile.
"""
self.ServeFromZipFile(prefix + '.zip', name)
def ServeFromZipFile(self, zipfilename, name):
"""Helper for the GET request handler.
This serves the contents of file 'name' from zipfile
'zipfilename', logging a message and returning a 404 response if
either the zipfile cannot be opened or the named file cannot be
read from it.
Args:
zipfilename: The name of the zipfile.
name: The name within the zipfile.
"""
zipfile_object = self.zipfile_cache.get(zipfilename)
if zipfile_object is None:
try:
zipfile_object = zipfile.ZipFile(zipfilename)
except (IOError, RuntimeError), err:
logging.error('Can\'t open zipfile %s: %s', zipfilename, err)
zipfile_object = ''
self.zipfile_cache[zipfilename] = zipfile_object
if zipfile_object == '':
self.error(404)
self.response.out.write('Not found')
return
try:
data = zipfile_object.read(name)
except (KeyError, RuntimeError), err:
self.error(404)
self.response.out.write('Not found')
return
content_type, encoding = mimetypes.guess_type(name)
if content_type:
self.response.headers['Content-Type'] = content_type
self.SetCachingHeaders()
self.response.out.write(data)
MAX_AGE = 600
PUBLIC = True
def SetCachingHeaders(self):
"""Helper to set the caching headers.
Override this to customize the headers beyond setting MAX_AGE.
"""
max_age = self.MAX_AGE
self.response.headers['Expires'] = email.Utils.formatdate(
time.time() + max_age, usegmt=True)
cache_control = []
if self.PUBLIC:
cache_control.append('public')
cache_control.append('max-age=%d' % max_age)
self.response.headers['Cache-Control'] = ', '.join(cache_control)
def main():
"""Main program.
This is invoked when this package is referenced from app.yaml.
"""
application = webapp.WSGIApplication([('/([^/]+)/(.*)', ZipHandler)])
util.run_wsgi_app(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Full text indexing and search, implemented in pure python.
Defines a SearchableModel subclass of db.Model that supports full text
indexing and search, based on the datastore's existing indexes.
Don't expect too much. First, there's no ranking, which is a killer drawback.
There's also no exact phrase match, substring match, boolean operators,
stemming, or other common full text search features. Finally, support for stop
words (common words that are not indexed) is currently limited to English.
To be indexed, entities must be created and saved as SearchableModel
instances, e.g.:
class Article(search.SearchableModel):
text = db.TextProperty()
...
article = Article(text=...)
article.save()
To search the full text index, use the SearchableModel.all() method to get an
instance of SearchableModel.Query, which subclasses db.Query. Use its search()
method to provide a search query, in addition to any other filters or sort
orders, e.g.:
query = article.all().search('a search query').filter(...).order(...)
for result in query:
...
The full text index is stored in a property named __searchable_text_index.
In general, if you just want to provide full text search, you *don't* need to
add any extra indexes to your index.yaml. However, if you want to use search()
in a query *in addition to* an ancestor, filter, or sort order, you'll need to
create an index in index.yaml with the __searchable_text_index property. For
example:
- kind: Article
properties:
- name: __searchable_text_index
- name: date
direction: desc
...
Note that using SearchableModel will noticeable increase the latency of save()
operations, since it writes an index row for each indexable word. This also
means that the latency of save() will increase roughly with the size of the
properties in a given entity. Caveat hacker!
"""
import re
import string
import sys
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.ext import db
from google.appengine.datastore import datastore_pb
class SearchableEntity(datastore.Entity):
"""A subclass of datastore.Entity that supports full text indexing.
Automatically indexes all string and Text properties, using the datastore's
built-in per-property indices. To search, use the SearchableQuery class and
its Search() method.
"""
_FULL_TEXT_INDEX_PROPERTY = '__searchable_text_index'
_FULL_TEXT_MIN_LENGTH = 3
_FULL_TEXT_STOP_WORDS = frozenset([
'a', 'about', 'according', 'accordingly', 'affected', 'affecting', 'after',
'again', 'against', 'all', 'almost', 'already', 'also', 'although',
'always', 'am', 'among', 'an', 'and', 'any', 'anyone', 'apparently', 'are',
'arise', 'as', 'aside', 'at', 'away', 'be', 'became', 'because', 'become',
'becomes', 'been', 'before', 'being', 'between', 'both', 'briefly', 'but',
'by', 'came', 'can', 'cannot', 'certain', 'certainly', 'could', 'did', 'do',
'does', 'done', 'during', 'each', 'either', 'else', 'etc', 'ever', 'every',
'following', 'for', 'found', 'from', 'further', 'gave', 'gets', 'give',
'given', 'giving', 'gone', 'got', 'had', 'hardly', 'has', 'have', 'having',
'here', 'how', 'however', 'i', 'if', 'in', 'into', 'is', 'it', 'itself',
'just', 'keep', 'kept', 'knowledge', 'largely', 'like', 'made', 'mainly',
'make', 'many', 'might', 'more', 'most', 'mostly', 'much', 'must', 'nearly',
'necessarily', 'neither', 'next', 'no', 'none', 'nor', 'normally', 'not',
'noted', 'now', 'obtain', 'obtained', 'of', 'often', 'on', 'only', 'or',
'other', 'our', 'out', 'owing', 'particularly', 'past', 'perhaps', 'please',
'poorly', 'possible', 'possibly', 'potentially', 'predominantly', 'present',
'previously', 'primarily', 'probably', 'prompt', 'promptly', 'put',
'quickly', 'quite', 'rather', 'readily', 'really', 'recently', 'regarding',
'regardless', 'relatively', 'respectively', 'resulted', 'resulting',
'results', 'said', 'same', 'seem', 'seen', 'several', 'shall', 'should',
'show', 'showed', 'shown', 'shows', 'significantly', 'similar', 'similarly',
'since', 'slightly', 'so', 'some', 'sometime', 'somewhat', 'soon',
'specifically', 'state', 'states', 'strongly', 'substantially',
'successfully', 'such', 'sufficiently', 'than', 'that', 'the', 'their',
'theirs', 'them', 'then', 'there', 'therefore', 'these', 'they', 'this',
'those', 'though', 'through', 'throughout', 'to', 'too', 'toward', 'under',
'unless', 'until', 'up', 'upon', 'use', 'used', 'usefully', 'usefulness',
'using', 'usually', 'various', 'very', 'was', 'we', 'were', 'what', 'when',
'where', 'whether', 'which', 'while', 'who', 'whose', 'why', 'widely',
'will', 'with', 'within', 'without', 'would', 'yet', 'you'])
_word_delimiter_regex = re.compile('[' + re.escape(string.punctuation) + ']')
def __init__(self, kind_or_entity, word_delimiter_regex=None, *args,
**kwargs):
"""Constructor. May be called as a copy constructor.
If kind_or_entity is a datastore.Entity, copies it into this Entity.
datastore.Get() and Query() returns instances of datastore.Entity, so this
is useful for converting them back to SearchableEntity so that they'll be
indexed when they're stored back in the datastore.
Otherwise, passes through the positional and keyword args to the
datastore.Entity constructor.
Args:
kind_or_entity: string or datastore.Entity
word_delimiter_regex: a regex matching characters that delimit words
"""
self._word_delimiter_regex = word_delimiter_regex
if isinstance(kind_or_entity, datastore.Entity):
self._Entity__key = kind_or_entity._Entity__key
self.update(kind_or_entity)
else:
super(SearchableEntity, self).__init__(kind_or_entity, *args, **kwargs)
def _ToPb(self):
"""Rebuilds the full text index, then delegates to the superclass.
Returns:
entity_pb.Entity
"""
if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
del self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY]
index = set()
for (name, values) in self.items():
if not isinstance(values, list):
values = [values]
if (isinstance(values[0], basestring) and
not isinstance(values[0], datastore_types.Blob)):
for value in values:
index.update(SearchableEntity._FullTextIndex(
value, self._word_delimiter_regex))
index_list = list(index)
if index_list:
self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY] = index_list
return super(SearchableEntity, self)._ToPb()
@classmethod
def _FullTextIndex(cls, text, word_delimiter_regex=None):
"""Returns a set of keywords appropriate for full text indexing.
See SearchableQuery.Search() for details.
Args:
text: string
Returns:
set of strings
"""
if word_delimiter_regex is None:
word_delimiter_regex = cls._word_delimiter_regex
if text:
datastore_types.ValidateString(text, 'text', max_len=sys.maxint)
text = word_delimiter_regex.sub(' ', text)
words = text.lower().split()
words = set(unicode(w) for w in words)
words -= cls._FULL_TEXT_STOP_WORDS
for word in list(words):
if len(word) < cls._FULL_TEXT_MIN_LENGTH:
words.remove(word)
else:
words = set()
return words
class SearchableQuery(datastore.Query):
"""A subclass of datastore.Query that supports full text search.
Only searches over entities that were created and stored using the
SearchableEntity or SearchableModel classes.
"""
def Search(self, search_query, word_delimiter_regex=None):
"""Add a search query. This may be combined with filters.
Note that keywords in the search query will be silently dropped if they
are stop words or too short, ie if they wouldn't be indexed.
Args:
search_query: string
Returns:
# this query
SearchableQuery
"""
datastore_types.ValidateString(search_query, 'search query')
self._search_query = search_query
self._word_delimiter_regex = word_delimiter_regex
return self
def _ToPb(self, limit=None, offset=None):
"""Adds filters for the search query, then delegates to the superclass.
Raises BadFilterError if a filter on the index property already exists.
Args:
# an upper bound on the number of results returned by the query.
limit: int
# number of results that match the query to skip. limit is applied
# after the offset is fulfilled.
offset: int
Returns:
datastore_pb.Query
"""
if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
raise datastore_errors.BadFilterError(
'%s is a reserved name.' % SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
pb = super(SearchableQuery, self)._ToPb(limit=limit, offset=offset)
if hasattr(self, '_search_query'):
keywords = SearchableEntity._FullTextIndex(
self._search_query, self._word_delimiter_regex)
for keyword in keywords:
filter = pb.add_filter()
filter.set_op(datastore_pb.Query_Filter.EQUAL)
prop = filter.add_property()
prop.set_name(SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
prop.set_multiple(len(keywords) > 1)
prop.mutable_value().set_stringvalue(unicode(keyword).encode('utf-8'))
return pb
class SearchableMultiQuery(datastore.MultiQuery):
"""A multiquery that supports Search() by searching subqueries."""
def Search(self, *args, **kwargs):
"""Add a search query, by trying to add it to all subqueries.
Args:
args: Passed to Search on each subquery.
kwargs: Passed to Search on each subquery.
Returns:
self for consistency with SearchableQuery.
"""
for q in self:
q.Search(*args, **kwargs)
return self
class SearchableModel(db.Model):
"""A subclass of db.Model that supports full text search and indexing.
Automatically indexes all string-based properties. To search, use the all()
method to get a SearchableModel.Query, then use its search() method.
"""
class Query(db.Query):
"""A subclass of db.Query that supports full text search."""
_search_query = None
def search(self, search_query):
"""Adds a full text search to this query.
Args:
search_query, a string containing the full text search query.
Returns:
self
"""
self._search_query = search_query
return self
def _get_query(self):
"""Wraps db.Query._get_query() and injects SearchableQuery."""
query = db.Query._get_query(self,
_query_class=SearchableQuery,
_multi_query_class=SearchableMultiQuery)
if self._search_query:
query.Search(self._search_query)
return query
def _populate_internal_entity(self):
"""Wraps db.Model._populate_internal_entity() and injects
SearchableEntity."""
return db.Model._populate_internal_entity(self,
_entity_class=SearchableEntity)
@classmethod
def from_entity(cls, entity):
"""Wraps db.Model.from_entity() and injects SearchableEntity."""
if not isinstance(entity, SearchableEntity):
entity = SearchableEntity(entity)
return super(SearchableModel, cls).from_entity(entity)
@classmethod
def all(cls):
"""Returns a SearchableModel.Query for this kind."""
return SearchableModel.Query(cls)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Full text indexing and search, implemented in pure python.
Defines a SearchableModel subclass of db.Model that supports full text
indexing and search, based on the datastore's existing indexes.
Don't expect too much. First, there's no ranking, which is a killer drawback.
There's also no exact phrase match, substring match, boolean operators,
stemming, or other common full text search features. Finally, support for stop
words (common words that are not indexed) is currently limited to English.
To be indexed, entities must be created and saved as SearchableModel
instances, e.g.:
class Article(search.SearchableModel):
text = db.TextProperty()
...
article = Article(text=...)
article.save()
To search the full text index, use the SearchableModel.all() method to get an
instance of SearchableModel.Query, which subclasses db.Query. Use its search()
method to provide a search query, in addition to any other filters or sort
orders, e.g.:
query = article.all().search('a search query').filter(...).order(...)
for result in query:
...
The full text index is stored in a property named __searchable_text_index.
In general, if you just want to provide full text search, you *don't* need to
add any extra indexes to your index.yaml. However, if you want to use search()
in a query *in addition to* an ancestor, filter, or sort order, you'll need to
create an index in index.yaml with the __searchable_text_index property. For
example:
- kind: Article
properties:
- name: __searchable_text_index
- name: date
direction: desc
...
Note that using SearchableModel will noticeable increase the latency of save()
operations, since it writes an index row for each indexable word. This also
means that the latency of save() will increase roughly with the size of the
properties in a given entity. Caveat hacker!
"""
import re
import string
import sys
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.ext import db
from google.appengine.datastore import datastore_pb
class SearchableEntity(datastore.Entity):
"""A subclass of datastore.Entity that supports full text indexing.
Automatically indexes all string and Text properties, using the datastore's
built-in per-property indices. To search, use the SearchableQuery class and
its Search() method.
"""
_FULL_TEXT_INDEX_PROPERTY = '__searchable_text_index'
_FULL_TEXT_MIN_LENGTH = 3
_FULL_TEXT_STOP_WORDS = frozenset([
'a', 'about', 'according', 'accordingly', 'affected', 'affecting', 'after',
'again', 'against', 'all', 'almost', 'already', 'also', 'although',
'always', 'am', 'among', 'an', 'and', 'any', 'anyone', 'apparently', 'are',
'arise', 'as', 'aside', 'at', 'away', 'be', 'became', 'because', 'become',
'becomes', 'been', 'before', 'being', 'between', 'both', 'briefly', 'but',
'by', 'came', 'can', 'cannot', 'certain', 'certainly', 'could', 'did', 'do',
'does', 'done', 'during', 'each', 'either', 'else', 'etc', 'ever', 'every',
'following', 'for', 'found', 'from', 'further', 'gave', 'gets', 'give',
'given', 'giving', 'gone', 'got', 'had', 'hardly', 'has', 'have', 'having',
'here', 'how', 'however', 'i', 'if', 'in', 'into', 'is', 'it', 'itself',
'just', 'keep', 'kept', 'knowledge', 'largely', 'like', 'made', 'mainly',
'make', 'many', 'might', 'more', 'most', 'mostly', 'much', 'must', 'nearly',
'necessarily', 'neither', 'next', 'no', 'none', 'nor', 'normally', 'not',
'noted', 'now', 'obtain', 'obtained', 'of', 'often', 'on', 'only', 'or',
'other', 'our', 'out', 'owing', 'particularly', 'past', 'perhaps', 'please',
'poorly', 'possible', 'possibly', 'potentially', 'predominantly', 'present',
'previously', 'primarily', 'probably', 'prompt', 'promptly', 'put',
'quickly', 'quite', 'rather', 'readily', 'really', 'recently', 'regarding',
'regardless', 'relatively', 'respectively', 'resulted', 'resulting',
'results', 'said', 'same', 'seem', 'seen', 'several', 'shall', 'should',
'show', 'showed', 'shown', 'shows', 'significantly', 'similar', 'similarly',
'since', 'slightly', 'so', 'some', 'sometime', 'somewhat', 'soon',
'specifically', 'state', 'states', 'strongly', 'substantially',
'successfully', 'such', 'sufficiently', 'than', 'that', 'the', 'their',
'theirs', 'them', 'then', 'there', 'therefore', 'these', 'they', 'this',
'those', 'though', 'through', 'throughout', 'to', 'too', 'toward', 'under',
'unless', 'until', 'up', 'upon', 'use', 'used', 'usefully', 'usefulness',
'using', 'usually', 'various', 'very', 'was', 'we', 'were', 'what', 'when',
'where', 'whether', 'which', 'while', 'who', 'whose', 'why', 'widely',
'will', 'with', 'within', 'without', 'would', 'yet', 'you'])
_word_delimiter_regex = re.compile('[' + re.escape(string.punctuation) + ']')
def __init__(self, kind_or_entity, word_delimiter_regex=None, *args,
**kwargs):
"""Constructor. May be called as a copy constructor.
If kind_or_entity is a datastore.Entity, copies it into this Entity.
datastore.Get() and Query() returns instances of datastore.Entity, so this
is useful for converting them back to SearchableEntity so that they'll be
indexed when they're stored back in the datastore.
Otherwise, passes through the positional and keyword args to the
datastore.Entity constructor.
Args:
kind_or_entity: string or datastore.Entity
word_delimiter_regex: a regex matching characters that delimit words
"""
self._word_delimiter_regex = word_delimiter_regex
if isinstance(kind_or_entity, datastore.Entity):
self._Entity__key = kind_or_entity._Entity__key
self.update(kind_or_entity)
else:
super(SearchableEntity, self).__init__(kind_or_entity, *args, **kwargs)
def _ToPb(self):
"""Rebuilds the full text index, then delegates to the superclass.
Returns:
entity_pb.Entity
"""
if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
del self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY]
index = set()
for (name, values) in self.items():
if not isinstance(values, list):
values = [values]
if (isinstance(values[0], basestring) and
not isinstance(values[0], datastore_types.Blob)):
for value in values:
index.update(SearchableEntity._FullTextIndex(
value, self._word_delimiter_regex))
index_list = list(index)
if index_list:
self[SearchableEntity._FULL_TEXT_INDEX_PROPERTY] = index_list
return super(SearchableEntity, self)._ToPb()
@classmethod
def _FullTextIndex(cls, text, word_delimiter_regex=None):
"""Returns a set of keywords appropriate for full text indexing.
See SearchableQuery.Search() for details.
Args:
text: string
Returns:
set of strings
"""
if word_delimiter_regex is None:
word_delimiter_regex = cls._word_delimiter_regex
if text:
datastore_types.ValidateString(text, 'text', max_len=sys.maxint)
text = word_delimiter_regex.sub(' ', text)
words = text.lower().split()
words = set(unicode(w) for w in words)
words -= cls._FULL_TEXT_STOP_WORDS
for word in list(words):
if len(word) < cls._FULL_TEXT_MIN_LENGTH:
words.remove(word)
else:
words = set()
return words
class SearchableQuery(datastore.Query):
"""A subclass of datastore.Query that supports full text search.
Only searches over entities that were created and stored using the
SearchableEntity or SearchableModel classes.
"""
def Search(self, search_query, word_delimiter_regex=None):
"""Add a search query. This may be combined with filters.
Note that keywords in the search query will be silently dropped if they
are stop words or too short, ie if they wouldn't be indexed.
Args:
search_query: string
Returns:
# this query
SearchableQuery
"""
datastore_types.ValidateString(search_query, 'search query')
self._search_query = search_query
self._word_delimiter_regex = word_delimiter_regex
return self
def _ToPb(self, limit=None, offset=None):
"""Adds filters for the search query, then delegates to the superclass.
Raises BadFilterError if a filter on the index property already exists.
Args:
# an upper bound on the number of results returned by the query.
limit: int
# number of results that match the query to skip. limit is applied
# after the offset is fulfilled.
offset: int
Returns:
datastore_pb.Query
"""
if SearchableEntity._FULL_TEXT_INDEX_PROPERTY in self:
raise datastore_errors.BadFilterError(
'%s is a reserved name.' % SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
pb = super(SearchableQuery, self)._ToPb(limit=limit, offset=offset)
if hasattr(self, '_search_query'):
keywords = SearchableEntity._FullTextIndex(
self._search_query, self._word_delimiter_regex)
for keyword in keywords:
filter = pb.add_filter()
filter.set_op(datastore_pb.Query_Filter.EQUAL)
prop = filter.add_property()
prop.set_name(SearchableEntity._FULL_TEXT_INDEX_PROPERTY)
prop.set_multiple(len(keywords) > 1)
prop.mutable_value().set_stringvalue(unicode(keyword).encode('utf-8'))
return pb
class SearchableMultiQuery(datastore.MultiQuery):
"""A multiquery that supports Search() by searching subqueries."""
def Search(self, *args, **kwargs):
"""Add a search query, by trying to add it to all subqueries.
Args:
args: Passed to Search on each subquery.
kwargs: Passed to Search on each subquery.
Returns:
self for consistency with SearchableQuery.
"""
for q in self:
q.Search(*args, **kwargs)
return self
class SearchableModel(db.Model):
"""A subclass of db.Model that supports full text search and indexing.
Automatically indexes all string-based properties. To search, use the all()
method to get a SearchableModel.Query, then use its search() method.
"""
class Query(db.Query):
"""A subclass of db.Query that supports full text search."""
_search_query = None
def search(self, search_query):
"""Adds a full text search to this query.
Args:
search_query, a string containing the full text search query.
Returns:
self
"""
self._search_query = search_query
return self
def _get_query(self):
"""Wraps db.Query._get_query() and injects SearchableQuery."""
query = db.Query._get_query(self,
_query_class=SearchableQuery,
_multi_query_class=SearchableMultiQuery)
if self._search_query:
query.Search(self._search_query)
return query
def _populate_internal_entity(self):
"""Wraps db.Model._populate_internal_entity() and injects
SearchableEntity."""
return db.Model._populate_internal_entity(self,
_entity_class=SearchableEntity)
@classmethod
def from_entity(cls, entity):
"""Wraps db.Model.from_entity() and injects SearchableEntity."""
if not isinstance(entity, SearchableEntity):
entity = SearchableEntity(entity)
return super(SearchableModel, cls).from_entity(entity)
@classmethod
def all(cls):
"""Returns a SearchableModel.Query for this kind."""
return SearchableModel.Query(cls)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Preloads many modules to reduce loading time of third-party code."""
import os
_original_os_urandom = os.urandom
def os_urandom_replacement(n):
raise NotImplementedError
os.urandom = os_urandom_replacement
import random
os.urandom = _original_os_urandom
random._urandom = _original_os_urandom
import BaseHTTPServer
import Bastion
import CGIHTTPServer
import ConfigParser
import Cookie
import DocXMLRPCServer
import HTMLParser
import MimeWriter
import Queue
import SimpleHTTPServer
import SimpleXMLRPCServer
import SocketServer
import StringIO
import UserDict
import UserList
import UserString
import aifc
import anydbm
import atexit
import audiodev
import base64
import bdb
import binhex
import bisect
import bz2
import calendar
import cgi
import cgitb
import chunk
import cmd
import code
import codecs
import codeop
import colorsys
import commands
import cookielib
import copy
import copy_reg
import csv
import datetime
import difflib
import dircache
import dis
import doctest
import dumbdbm
import filecmp
import fileinput
import fnmatch
import formatter
import fpformat
import ftplib
import getopt
import getpass
import gettext
import glob
import gzip
import heapq
import hmac
import htmlentitydefs
import htmllib
import httplib
import imaplib
import imghdr
import imputil
import inspect
import keyword
import linecache
import locale
import logging
import macpath
import macurl2path
import mailbox
import mailcap
import markupbase
import math
import md5
import mhlib
import mimetools
import mimetypes
import modulefinder
import multifile
import mutex
import netrc
import new
import nntplib
import ntpath
import nturl2path
import opcode
import optparse
import os2emxpath
import pdb
import pickle
import pickletools
import pipes
import pkgutil
import popen2
import poplib
import posixpath
import pprint
import profile
import pstats
import pyclbr
import pydoc
import quopri
import re
import repr
import rfc822
import robotparser
import sched
import sets
import sgmllib
import sha
import shelve
import shlex
import shutil
import site
import smtplib
import sndhdr
import socket
import stat
import statvfs
import string
import stringold
import stringprep
import struct
import sunau
import sunaudio
import symbol
import sys
import tabnanny
import tarfile
import telnetlib
import tempfile
import textwrap
import time
import timeit
import toaiff
import token
import tokenize
import trace
import traceback
import types
import unittest
import urllib
import urllib2
import urlparse
import uu
import uuid
import warnings
import wave
import weakref
import whichdb
import xdrlib
import xml.parsers.expat
import xml.dom
import xml.sax
import xmlrpclib
import zipfile
import zlib
import django
import neo_cs
import neo_util
import webob
import wsgiref.handlers
from google.appengine.api import datastore
from google.appengine.api import images
from google.appengine.api import mail
from google.appengine.api import memcache
from google.appengine.api import urlfetch
from google.appengine.api import users
from google.appengine.ext import admin
from google.appengine.ext import bulkload
from google.appengine.ext import db
from google.appengine.ext import gql
from google.appengine.ext import search
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.runtime import apiproxy
if __name__ == '__main__':
pass
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Preloads many modules to reduce loading time of third-party code."""
import os
_original_os_urandom = os.urandom
def os_urandom_replacement(n):
raise NotImplementedError
os.urandom = os_urandom_replacement
import random
os.urandom = _original_os_urandom
random._urandom = _original_os_urandom
import BaseHTTPServer
import Bastion
import CGIHTTPServer
import ConfigParser
import Cookie
import DocXMLRPCServer
import HTMLParser
import MimeWriter
import Queue
import SimpleHTTPServer
import SimpleXMLRPCServer
import SocketServer
import StringIO
import UserDict
import UserList
import UserString
import aifc
import anydbm
import atexit
import audiodev
import base64
import bdb
import binhex
import bisect
import bz2
import calendar
import cgi
import cgitb
import chunk
import cmd
import code
import codecs
import codeop
import colorsys
import commands
import cookielib
import copy
import copy_reg
import csv
import datetime
import difflib
import dircache
import dis
import doctest
import dumbdbm
import filecmp
import fileinput
import fnmatch
import formatter
import fpformat
import ftplib
import getopt
import getpass
import gettext
import glob
import gzip
import heapq
import hmac
import htmlentitydefs
import htmllib
import httplib
import imaplib
import imghdr
import imputil
import inspect
import keyword
import linecache
import locale
import logging
import macpath
import macurl2path
import mailbox
import mailcap
import markupbase
import math
import md5
import mhlib
import mimetools
import mimetypes
import modulefinder
import multifile
import mutex
import netrc
import new
import nntplib
import ntpath
import nturl2path
import opcode
import optparse
import os2emxpath
import pdb
import pickle
import pickletools
import pipes
import pkgutil
import popen2
import poplib
import posixpath
import pprint
import profile
import pstats
import pyclbr
import pydoc
import quopri
import re
import repr
import rfc822
import robotparser
import sched
import sets
import sgmllib
import sha
import shelve
import shlex
import shutil
import site
import smtplib
import sndhdr
import socket
import stat
import statvfs
import string
import stringold
import stringprep
import struct
import sunau
import sunaudio
import symbol
import sys
import tabnanny
import tarfile
import telnetlib
import tempfile
import textwrap
import time
import timeit
import toaiff
import token
import tokenize
import trace
import traceback
import types
import unittest
import urllib
import urllib2
import urlparse
import uu
import uuid
import warnings
import wave
import weakref
import whichdb
import xdrlib
import xml.parsers.expat
import xml.dom
import xml.sax
import xmlrpclib
import zipfile
import zlib
import django
import neo_cs
import neo_util
import webob
import wsgiref.handlers
from google.appengine.api import datastore
from google.appengine.api import images
from google.appengine.api import mail
from google.appengine.api import memcache
from google.appengine.api import urlfetch
from google.appengine.api import users
from google.appengine.ext import admin
from google.appengine.ext import bulkload
from google.appengine.ext import db
from google.appengine.ext import gql
from google.appengine.ext import search
from google.appengine.ext import webapp
from google.appengine.ext.webapp import template
from google.appengine.runtime import apiproxy
if __name__ == '__main__':
pass
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors thrown by apiproxy.MakeSyncCall.
"""
class Error(Exception):
"""Base APIProxy error type."""
class RPCFailedError(Error):
"""Raised by APIProxy calls when the RPC to the application server fails."""
class CallNotFoundError(Error):
"""Raised by APIProxy calls when the requested method cannot be found."""
class ArgumentError(Error):
"""Raised by APIProxy calls if there is an error parsing the arguments."""
class DeadlineExceededError(Error):
"""Raised by APIProxy calls if the call took too long to respond."""
class CancelledError(Error):
"""Raised by APIProxy calls if the call was cancelled, such as when
the user's request is exiting."""
class ApplicationError(Error):
"""Raised by APIProxy in the event of an application-level error."""
def __init__(self, application_error, error_detail=''):
self.application_error = application_error
self.error_detail = error_detail
Error.__init__(self, application_error)
def __str__(self):
return 'ApplicationError: %d %s' % (self.application_error,
self.error_detail)
class OverQuotaError(Error):
"""Raised by APIProxy calls when they have been blocked due to a lack of
available quota."""
class RequestTooLargeError(Error):
"""Raised by APIProxy calls if the request was too large."""
class CapabilityDisabledError(Error):
"""Raised by APIProxy when API calls are temporarily disabled."""
class InterruptedError(Error):
"""Raised by APIProxy.Wait() when the wait is interrupted by an uncaught
exception from some callback, not necessarily associated with the RPC in
question."""
def __init__(self, exception, rpc):
self.args = ("The Wait() request was interrupted by an exception from "
"another callback:", exception)
self.__rpc = rpc
self.__exception = exception
@property
def rpc(self):
return self.__rpc
@property
def exception(self):
return self.__exception
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Makes API calls to various Google-provided services.
Provides methods for making calls into Google Apphosting services and APIs
from your application code. This code will only work properly from within
the Google Apphosting environment.
"""
import sys
from google.net.proto import ProtocolBuffer
from google.appengine import runtime
from google.appengine.api import apiproxy_rpc
from google3.apphosting.runtime import _apphosting_runtime___python__apiproxy
from google.appengine.runtime import apiproxy_errors
OK = 0
RPC_FAILED = 1
CALL_NOT_FOUND = 2
ARGUMENT_ERROR = 3
DEADLINE_EXCEEDED = 4
CANCELLED = 5
APPLICATION_ERROR = 6
OTHER_ERROR = 7
OVER_QUOTA = 8
REQUEST_TOO_LARGE = 9
CAPABILITY_DISABLED = 10
_ExceptionsMap = {
RPC_FAILED:
(apiproxy_errors.RPCFailedError,
"The remote RPC to the application server failed for the call %s.%s()."),
CALL_NOT_FOUND:
(apiproxy_errors.CallNotFoundError,
"The API package '%s' or call '%s()' was not found."),
ARGUMENT_ERROR:
(apiproxy_errors.ArgumentError,
"An error occurred parsing (locally or remotely) the arguments to %s.%s()."),
DEADLINE_EXCEEDED:
(apiproxy_errors.DeadlineExceededError,
"The API call %s.%s() took too long to respond and was cancelled."),
CANCELLED:
(apiproxy_errors.CancelledError,
"The API call %s.%s() was explicitly cancelled."),
OTHER_ERROR:
(apiproxy_errors.Error,
"An error occurred for the API request %s.%s()."),
OVER_QUOTA:
(apiproxy_errors.OverQuotaError,
"The API call %s.%s() required more quota than is available."),
REQUEST_TOO_LARGE:
(apiproxy_errors.RequestTooLargeError,
"The request to API call %s.%s() was too large."),
}
class RPC(apiproxy_rpc.RPC):
"""A RPC object, suitable for talking to remote services.
Each instance of this object can be used only once, and should not be reused.
Stores the data members and methods for making RPC calls via the APIProxy.
"""
def __init__(self, *args, **kargs):
"""Constructor for the RPC object. All arguments are optional, and
simply set members on the class. These data members will be
overriden by values passed to MakeCall.
"""
super(RPC, self).__init__(*args, **kargs)
self.__result_dict = {}
def _WaitImpl(self):
"""Waits on the API call associated with this RPC. The callback,
if provided, will be executed before Wait() returns. If this RPC
is already complete, or if the RPC was never started, this
function will return immediately.
Raises:
InterruptedError if a callback throws an uncaught exception.
"""
try:
rpc_completed = _apphosting_runtime___python__apiproxy.Wait(self)
except (runtime.DeadlineExceededError, apiproxy_errors.InterruptedError):
raise
except:
exc_class, exc, tb = sys.exc_info()
if (isinstance(exc, SystemError) and
exc.args[0] == 'uncaught RPC exception'):
raise
rpc = None
if hasattr(exc, "_appengine_apiproxy_rpc"):
rpc = exc._appengine_apiproxy_rpc
new_exc = apiproxy_errors.InterruptedError(exc, rpc)
raise new_exc.__class__, new_exc, tb
return True
def _MakeCallImpl(self):
assert isinstance(self.request, ProtocolBuffer.ProtocolMessage)
assert isinstance(self.response, ProtocolBuffer.ProtocolMessage)
e = ProtocolBuffer.Encoder()
self.request.Output(e)
self.__state = RPC.RUNNING
_apphosting_runtime___python__apiproxy.MakeCall(
self.package, self.call, e.buffer(), self.__result_dict,
self.__MakeCallDone, self, deadline=(self.deadline or -1))
def __MakeCallDone(self):
self.__state = RPC.FINISHING
if self.__result_dict['error'] == APPLICATION_ERROR:
self.__exception = apiproxy_errors.ApplicationError(
self.__result_dict['application_error'],
self.__result_dict['error_detail'])
elif self.__result_dict['error'] == CAPABILITY_DISABLED:
if self.__result_dict['error_detail']:
self.__exception = apiproxy_errors.CapabilityDisabledError(
self.__result_dict['error_detail'])
else:
self.__exception = apiproxy_errors.CapabilityDisabledError(
"The API call %s.%s() is temporarily unavailable." % (
self.package, self.call))
elif self.__result_dict['error'] in _ExceptionsMap:
exception_entry = _ExceptionsMap[self.__result_dict['error']]
self.__exception = exception_entry[0](
exception_entry[1] % (self.package, self.call))
else:
try:
self.response.ParseFromString(self.__result_dict['result_string'])
except Exception, e:
self.__exception = e
self.__Callback()
def CreateRPC():
"""Create a RPC instance. suitable for talking to remote services.
Each RPC instance can be used only once, and should not be reused.
Returns:
an instance of RPC object
"""
return RPC()
def MakeSyncCall(package, call, request, response):
"""Makes a synchronous (i.e. blocking) API call within the specified
package for the specified call method. request and response must be the
appropriately typed ProtocolBuffers for the API call. An exception is
thrown if an error occurs when communicating with the system.
Args:
See MakeCall above.
Raises:
See CheckSuccess() above.
"""
rpc = CreateRPC()
rpc.MakeCall(package, call, request, response)
rpc.Wait()
rpc.CheckSuccess()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Makes API calls to various Google-provided services.
Provides methods for making calls into Google Apphosting services and APIs
from your application code. This code will only work properly from within
the Google Apphosting environment.
"""
import sys
from google.net.proto import ProtocolBuffer
from google.appengine import runtime
from google.appengine.api import apiproxy_rpc
from google3.apphosting.runtime import _apphosting_runtime___python__apiproxy
from google.appengine.runtime import apiproxy_errors
OK = 0
RPC_FAILED = 1
CALL_NOT_FOUND = 2
ARGUMENT_ERROR = 3
DEADLINE_EXCEEDED = 4
CANCELLED = 5
APPLICATION_ERROR = 6
OTHER_ERROR = 7
OVER_QUOTA = 8
REQUEST_TOO_LARGE = 9
CAPABILITY_DISABLED = 10
_ExceptionsMap = {
RPC_FAILED:
(apiproxy_errors.RPCFailedError,
"The remote RPC to the application server failed for the call %s.%s()."),
CALL_NOT_FOUND:
(apiproxy_errors.CallNotFoundError,
"The API package '%s' or call '%s()' was not found."),
ARGUMENT_ERROR:
(apiproxy_errors.ArgumentError,
"An error occurred parsing (locally or remotely) the arguments to %s.%s()."),
DEADLINE_EXCEEDED:
(apiproxy_errors.DeadlineExceededError,
"The API call %s.%s() took too long to respond and was cancelled."),
CANCELLED:
(apiproxy_errors.CancelledError,
"The API call %s.%s() was explicitly cancelled."),
OTHER_ERROR:
(apiproxy_errors.Error,
"An error occurred for the API request %s.%s()."),
OVER_QUOTA:
(apiproxy_errors.OverQuotaError,
"The API call %s.%s() required more quota than is available."),
REQUEST_TOO_LARGE:
(apiproxy_errors.RequestTooLargeError,
"The request to API call %s.%s() was too large."),
}
class RPC(apiproxy_rpc.RPC):
"""A RPC object, suitable for talking to remote services.
Each instance of this object can be used only once, and should not be reused.
Stores the data members and methods for making RPC calls via the APIProxy.
"""
def __init__(self, *args, **kargs):
"""Constructor for the RPC object. All arguments are optional, and
simply set members on the class. These data members will be
overriden by values passed to MakeCall.
"""
super(RPC, self).__init__(*args, **kargs)
self.__result_dict = {}
def _WaitImpl(self):
"""Waits on the API call associated with this RPC. The callback,
if provided, will be executed before Wait() returns. If this RPC
is already complete, or if the RPC was never started, this
function will return immediately.
Raises:
InterruptedError if a callback throws an uncaught exception.
"""
try:
rpc_completed = _apphosting_runtime___python__apiproxy.Wait(self)
except (runtime.DeadlineExceededError, apiproxy_errors.InterruptedError):
raise
except:
exc_class, exc, tb = sys.exc_info()
if (isinstance(exc, SystemError) and
exc.args[0] == 'uncaught RPC exception'):
raise
rpc = None
if hasattr(exc, "_appengine_apiproxy_rpc"):
rpc = exc._appengine_apiproxy_rpc
new_exc = apiproxy_errors.InterruptedError(exc, rpc)
raise new_exc.__class__, new_exc, tb
return True
def _MakeCallImpl(self):
assert isinstance(self.request, ProtocolBuffer.ProtocolMessage)
assert isinstance(self.response, ProtocolBuffer.ProtocolMessage)
e = ProtocolBuffer.Encoder()
self.request.Output(e)
self.__state = RPC.RUNNING
_apphosting_runtime___python__apiproxy.MakeCall(
self.package, self.call, e.buffer(), self.__result_dict,
self.__MakeCallDone, self, deadline=(self.deadline or -1))
def __MakeCallDone(self):
self.__state = RPC.FINISHING
if self.__result_dict['error'] == APPLICATION_ERROR:
self.__exception = apiproxy_errors.ApplicationError(
self.__result_dict['application_error'],
self.__result_dict['error_detail'])
elif self.__result_dict['error'] == CAPABILITY_DISABLED:
if self.__result_dict['error_detail']:
self.__exception = apiproxy_errors.CapabilityDisabledError(
self.__result_dict['error_detail'])
else:
self.__exception = apiproxy_errors.CapabilityDisabledError(
"The API call %s.%s() is temporarily unavailable." % (
self.package, self.call))
elif self.__result_dict['error'] in _ExceptionsMap:
exception_entry = _ExceptionsMap[self.__result_dict['error']]
self.__exception = exception_entry[0](
exception_entry[1] % (self.package, self.call))
else:
try:
self.response.ParseFromString(self.__result_dict['result_string'])
except Exception, e:
self.__exception = e
self.__Callback()
def CreateRPC():
"""Create a RPC instance. suitable for talking to remote services.
Each RPC instance can be used only once, and should not be reused.
Returns:
an instance of RPC object
"""
return RPC()
def MakeSyncCall(package, call, request, response):
"""Makes a synchronous (i.e. blocking) API call within the specified
package for the specified call method. request and response must be the
appropriately typed ProtocolBuffers for the API call. An exception is
thrown if an error occurs when communicating with the system.
Args:
See MakeCall above.
Raises:
See CheckSuccess() above.
"""
rpc = CreateRPC()
rpc.MakeCall(package, call, request, response)
rpc.Wait()
rpc.CheckSuccess()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Define the DeadlineExceededError exception."""
try:
BaseException
except NameError:
BaseException = Exception
class DeadlineExceededError(BaseException):
"""Exception raised when the request reaches its overall time limit.
Not to be confused with runtime.apiproxy_errors.DeadlineExceededError.
That one is raised when individual API calls take too long.
"""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors thrown by apiproxy.MakeSyncCall.
"""
class Error(Exception):
"""Base APIProxy error type."""
class RPCFailedError(Error):
"""Raised by APIProxy calls when the RPC to the application server fails."""
class CallNotFoundError(Error):
"""Raised by APIProxy calls when the requested method cannot be found."""
class ArgumentError(Error):
"""Raised by APIProxy calls if there is an error parsing the arguments."""
class DeadlineExceededError(Error):
"""Raised by APIProxy calls if the call took too long to respond."""
class CancelledError(Error):
"""Raised by APIProxy calls if the call was cancelled, such as when
the user's request is exiting."""
class ApplicationError(Error):
"""Raised by APIProxy in the event of an application-level error."""
def __init__(self, application_error, error_detail=''):
self.application_error = application_error
self.error_detail = error_detail
Error.__init__(self, application_error)
def __str__(self):
return 'ApplicationError: %d %s' % (self.application_error,
self.error_detail)
class OverQuotaError(Error):
"""Raised by APIProxy calls when they have been blocked due to a lack of
available quota."""
class RequestTooLargeError(Error):
"""Raised by APIProxy calls if the request was too large."""
class CapabilityDisabledError(Error):
"""Raised by APIProxy when API calls are temporarily disabled."""
class InterruptedError(Error):
"""Raised by APIProxy.Wait() when the wait is interrupted by an uncaught
exception from some callback, not necessarily associated with the RPC in
question."""
def __init__(self, exception, rpc):
self.args = ("The Wait() request was interrupted by an exception from "
"another callback:", exception)
self.__rpc = rpc
self.__exception = exception
@property
def rpc(self):
return self.__rpc
@property
def exception(self):
return self.__exception
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Define the DeadlineExceededError exception."""
try:
BaseException
except NameError:
BaseException = Exception
class DeadlineExceededError(BaseException):
"""Exception raised when the request reaches its overall time limit.
Not to be confused with runtime.apiproxy_errors.DeadlineExceededError.
That one is raised when individual API calls take too long.
"""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import Integer64Proto;
from google.appengine.api.api_base_pb import StringProto;
from google.appengine.api.api_base_pb import VoidProto;
from google.appengine.datastore.entity_pb import CompositeIndex
from google.appengine.datastore.entity_pb import EntityProto
from google.appengine.datastore.entity_pb import Index
from google.appengine.datastore.entity_pb import Property
from google.appengine.datastore.entity_pb import Reference
class Transaction(ProtocolBuffer.ProtocolMessage):
has_handle_ = 0
handle_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def handle(self): return self.handle_
def set_handle(self, x):
self.has_handle_ = 1
self.handle_ = x
def clear_handle(self):
if self.has_handle_:
self.has_handle_ = 0
self.handle_ = 0
def has_handle(self): return self.has_handle_
def MergeFrom(self, x):
assert x is not self
if (x.has_handle()): self.set_handle(x.handle())
def Equals(self, x):
if x is self: return 1
if self.has_handle_ != x.has_handle_: return 0
if self.has_handle_ and self.handle_ != x.handle_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_handle_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: handle not set.')
return initialized
def ByteSize(self):
n = 0
return n + 9
def Clear(self):
self.clear_handle()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.handle_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_handle(d.get64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
return res
khandle = 1
_TEXT = (
"ErrorCode",
"handle",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.DOUBLE,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Query_Filter(ProtocolBuffer.ProtocolMessage):
LESS_THAN = 1
LESS_THAN_OR_EQUAL = 2
GREATER_THAN = 3
GREATER_THAN_OR_EQUAL = 4
EQUAL = 5
IN = 6
EXISTS = 7
_Operator_NAMES = {
1: "LESS_THAN",
2: "LESS_THAN_OR_EQUAL",
3: "GREATER_THAN",
4: "GREATER_THAN_OR_EQUAL",
5: "EQUAL",
6: "IN",
7: "EXISTS",
}
def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "")
Operator_Name = classmethod(Operator_Name)
has_op_ = 0
op_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def op(self): return self.op_
def set_op(self, x):
self.has_op_ = 1
self.op_ = x
def clear_op(self):
if self.has_op_:
self.has_op_ = 0
self.op_ = 0
def has_op(self): return self.has_op_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_op()): self.set_op(x.op())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_op_ != x.has_op_: return 0
if self.has_op_ and self.op_ != x.op_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_op_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: op not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.op_)
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
return n + 1
def Clear(self):
self.clear_op()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(48)
out.putVarInt32(self.op_)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 36: break
if tt == 48:
self.set_op(d.getVarInt32())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
class Query_Order(ProtocolBuffer.ProtocolMessage):
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_property_ = 0
property_ = ""
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def property(self): return self.property_
def set_property(self, x):
self.has_property_ = 1
self.property_ = x
def clear_property(self):
if self.has_property_:
self.has_property_ = 0
self.property_ = ""
def has_property(self): return self.has_property_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_property()): self.set_property(x.property())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_property_ != x.has_property_: return 0
if self.has_property_ and self.property_ != x.property_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_property_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: property not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.property_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def Clear(self):
self.clear_property()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(82)
out.putPrefixedString(self.property_)
if (self.has_direction_):
out.putVarInt32(88)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 76: break
if tt == 82:
self.set_property(d.getPrefixedString())
continue
if tt == 88:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
class Query(ProtocolBuffer.ProtocolMessage):
ORDER_FIRST = 1
ANCESTOR_FIRST = 2
FILTER_FIRST = 3
_Hint_NAMES = {
1: "ORDER_FIRST",
2: "ANCESTOR_FIRST",
3: "FILTER_FIRST",
}
def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
Hint_Name = classmethod(Hint_Name)
has_app_ = 0
app_ = ""
has_kind_ = 0
kind_ = ""
has_ancestor_ = 0
ancestor_ = None
has_search_query_ = 0
search_query_ = ""
has_hint_ = 0
hint_ = 0
has_offset_ = 0
offset_ = 0
has_limit_ = 0
limit_ = 0
has_require_perfect_plan_ = 0
require_perfect_plan_ = 0
def __init__(self, contents=None):
self.filter_ = []
self.order_ = []
self.composite_index_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def ancestor(self):
if self.ancestor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.ancestor_ is None: self.ancestor_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.ancestor_
def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0;
if self.ancestor_ is not None: self.ancestor_.Clear()
def has_ancestor(self): return self.has_ancestor_
def filter_size(self): return len(self.filter_)
def filter_list(self): return self.filter_
def filter(self, i):
return self.filter_[i]
def mutable_filter(self, i):
return self.filter_[i]
def add_filter(self):
x = Query_Filter()
self.filter_.append(x)
return x
def clear_filter(self):
self.filter_ = []
def search_query(self): return self.search_query_
def set_search_query(self, x):
self.has_search_query_ = 1
self.search_query_ = x
def clear_search_query(self):
if self.has_search_query_:
self.has_search_query_ = 0
self.search_query_ = ""
def has_search_query(self): return self.has_search_query_
def order_size(self): return len(self.order_)
def order_list(self): return self.order_
def order(self, i):
return self.order_[i]
def mutable_order(self, i):
return self.order_[i]
def add_order(self):
x = Query_Order()
self.order_.append(x)
return x
def clear_order(self):
self.order_ = []
def hint(self): return self.hint_
def set_hint(self, x):
self.has_hint_ = 1
self.hint_ = x
def clear_hint(self):
if self.has_hint_:
self.has_hint_ = 0
self.hint_ = 0
def has_hint(self): return self.has_hint_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def require_perfect_plan(self): return self.require_perfect_plan_
def set_require_perfect_plan(self, x):
self.has_require_perfect_plan_ = 1
self.require_perfect_plan_ = x
def clear_require_perfect_plan(self):
if self.has_require_perfect_plan_:
self.has_require_perfect_plan_ = 0
self.require_perfect_plan_ = 0
def has_require_perfect_plan(self): return self.has_require_perfect_plan_
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i))
if (x.has_search_query()): self.set_search_query(x.search_query())
for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
if (x.has_hint()): self.set_hint(x.hint())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_limit()): self.set_limit(x.limit())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.filter_) != len(x.filter_): return 0
for e1, e2 in zip(self.filter_, x.filter_):
if e1 != e2: return 0
if self.has_search_query_ != x.has_search_query_: return 0
if self.has_search_query_ and self.search_query_ != x.search_query_: return 0
if len(self.order_) != len(x.order_): return 0
for e1, e2 in zip(self.order_, x.order_):
if e1 != e2: return 0
if self.has_hint_ != x.has_hint_: return 0
if self.has_hint_ and self.hint_ != x.hint_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
for p in self.filter_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.order_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
n += 2 * len(self.filter_)
for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize()
if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
n += 2 * len(self.order_)
for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
n += 2 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_require_perfect_plan_): n += 3
return n + 1
def Clear(self):
self.clear_app()
self.clear_kind()
self.clear_ancestor()
self.clear_filter()
self.clear_search_query()
self.clear_order()
self.clear_hint()
self.clear_offset()
self.clear_limit()
self.clear_composite_index()
self.clear_require_perfect_plan()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_kind_):
out.putVarInt32(26)
out.putPrefixedString(self.kind_)
for i in xrange(len(self.filter_)):
out.putVarInt32(35)
self.filter_[i].OutputUnchecked(out)
out.putVarInt32(36)
if (self.has_search_query_):
out.putVarInt32(66)
out.putPrefixedString(self.search_query_)
for i in xrange(len(self.order_)):
out.putVarInt32(75)
self.order_[i].OutputUnchecked(out)
out.putVarInt32(76)
if (self.has_offset_):
out.putVarInt32(96)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(128)
out.putVarInt32(self.limit_)
if (self.has_ancestor_):
out.putVarInt32(138)
out.putVarInt32(self.ancestor_.ByteSize())
self.ancestor_.OutputUnchecked(out)
if (self.has_hint_):
out.putVarInt32(144)
out.putVarInt32(self.hint_)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(154)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_require_perfect_plan_):
out.putVarInt32(160)
out.putBoolean(self.require_perfect_plan_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app(d.getPrefixedString())
continue
if tt == 26:
self.set_kind(d.getPrefixedString())
continue
if tt == 35:
self.add_filter().TryMerge(d)
continue
if tt == 66:
self.set_search_query(d.getPrefixedString())
continue
if tt == 75:
self.add_order().TryMerge(d)
continue
if tt == 96:
self.set_offset(d.getVarInt32())
continue
if tt == 128:
self.set_limit(d.getVarInt32())
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_ancestor().TryMerge(tmp)
continue
if tt == 144:
self.set_hint(d.getVarInt32())
continue
if tt == 154:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 160:
self.set_require_perfect_plan(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_ancestor_:
res+=prefix+"ancestor <\n"
res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.filter_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Filter%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_))
cnt=0
for e in self.order_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Order%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
return res
kapp = 1
kkind = 3
kancestor = 17
kFilterGroup = 4
kFilterop = 6
kFilterproperty = 14
ksearch_query = 8
kOrderGroup = 9
kOrderproperty = 10
kOrderdirection = 11
khint = 18
koffset = 12
klimit = 16
kcomposite_index = 19
krequire_perfect_plan = 20
_TEXT = (
"ErrorCode",
"app",
None,
"kind",
"Filter",
None,
"op",
None,
"search_query",
"Order",
"property",
"direction",
"offset",
None,
"property",
None,
"limit",
"ancestor",
"hint",
"composite_index",
"require_perfect_plan",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class QueryExplanation(ProtocolBuffer.ProtocolMessage):
has_native_ancestor_ = 0
native_ancestor_ = 0
has_native_offset_ = 0
native_offset_ = 0
has_native_limit_ = 0
native_limit_ = 0
def __init__(self, contents=None):
self.native_index_ = []
if contents is not None: self.MergeFromString(contents)
def native_ancestor(self): return self.native_ancestor_
def set_native_ancestor(self, x):
self.has_native_ancestor_ = 1
self.native_ancestor_ = x
def clear_native_ancestor(self):
if self.has_native_ancestor_:
self.has_native_ancestor_ = 0
self.native_ancestor_ = 0
def has_native_ancestor(self): return self.has_native_ancestor_
def native_index_size(self): return len(self.native_index_)
def native_index_list(self): return self.native_index_
def native_index(self, i):
return self.native_index_[i]
def mutable_native_index(self, i):
return self.native_index_[i]
def add_native_index(self):
x = Index()
self.native_index_.append(x)
return x
def clear_native_index(self):
self.native_index_ = []
def native_offset(self): return self.native_offset_
def set_native_offset(self, x):
self.has_native_offset_ = 1
self.native_offset_ = x
def clear_native_offset(self):
if self.has_native_offset_:
self.has_native_offset_ = 0
self.native_offset_ = 0
def has_native_offset(self): return self.has_native_offset_
def native_limit(self): return self.native_limit_
def set_native_limit(self, x):
self.has_native_limit_ = 1
self.native_limit_ = x
def clear_native_limit(self):
if self.has_native_limit_:
self.has_native_limit_ = 0
self.native_limit_ = 0
def has_native_limit(self): return self.has_native_limit_
def MergeFrom(self, x):
assert x is not self
if (x.has_native_ancestor()): self.set_native_ancestor(x.native_ancestor())
for i in xrange(x.native_index_size()): self.add_native_index().CopyFrom(x.native_index(i))
if (x.has_native_offset()): self.set_native_offset(x.native_offset())
if (x.has_native_limit()): self.set_native_limit(x.native_limit())
def Equals(self, x):
if x is self: return 1
if self.has_native_ancestor_ != x.has_native_ancestor_: return 0
if self.has_native_ancestor_ and self.native_ancestor_ != x.native_ancestor_: return 0
if len(self.native_index_) != len(x.native_index_): return 0
for e1, e2 in zip(self.native_index_, x.native_index_):
if e1 != e2: return 0
if self.has_native_offset_ != x.has_native_offset_: return 0
if self.has_native_offset_ and self.native_offset_ != x.native_offset_: return 0
if self.has_native_limit_ != x.has_native_limit_: return 0
if self.has_native_limit_ and self.native_limit_ != x.native_limit_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.native_index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_native_ancestor_): n += 2
n += 1 * len(self.native_index_)
for i in xrange(len(self.native_index_)): n += self.lengthString(self.native_index_[i].ByteSize())
if (self.has_native_offset_): n += 1 + self.lengthVarInt64(self.native_offset_)
if (self.has_native_limit_): n += 1 + self.lengthVarInt64(self.native_limit_)
return n + 0
def Clear(self):
self.clear_native_ancestor()
self.clear_native_index()
self.clear_native_offset()
self.clear_native_limit()
def OutputUnchecked(self, out):
if (self.has_native_ancestor_):
out.putVarInt32(8)
out.putBoolean(self.native_ancestor_)
for i in xrange(len(self.native_index_)):
out.putVarInt32(18)
out.putVarInt32(self.native_index_[i].ByteSize())
self.native_index_[i].OutputUnchecked(out)
if (self.has_native_offset_):
out.putVarInt32(24)
out.putVarInt32(self.native_offset_)
if (self.has_native_limit_):
out.putVarInt32(32)
out.putVarInt32(self.native_limit_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_native_ancestor(d.getBoolean())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_native_index().TryMerge(tmp)
continue
if tt == 24:
self.set_native_offset(d.getVarInt32())
continue
if tt == 32:
self.set_native_limit(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_native_ancestor_: res+=prefix+("native_ancestor: %s\n" % self.DebugFormatBool(self.native_ancestor_))
cnt=0
for e in self.native_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("native_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_native_offset_: res+=prefix+("native_offset: %s\n" % self.DebugFormatInt32(self.native_offset_))
if self.has_native_limit_: res+=prefix+("native_limit: %s\n" % self.DebugFormatInt32(self.native_limit_))
return res
knative_ancestor = 1
knative_index = 2
knative_offset = 3
knative_limit = 4
_TEXT = (
"ErrorCode",
"native_ancestor",
"native_index",
"native_offset",
"native_limit",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Cursor(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def cursor(self): return self.cursor_
def set_cursor(self, x):
self.has_cursor_ = 1
self.cursor_ = x
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0
self.cursor_ = 0
def has_cursor(self): return self.has_cursor_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.set_cursor(x.cursor())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
return initialized
def ByteSize(self):
n = 0
return n + 9
def Clear(self):
self.clear_cursor()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.cursor_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_cursor(d.get64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
return res
kcursor = 1
_TEXT = (
"ErrorCode",
"cursor",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.DOUBLE,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Error(ProtocolBuffer.ProtocolMessage):
BAD_REQUEST = 1
CONCURRENT_TRANSACTION = 2
INTERNAL_ERROR = 3
NEED_INDEX = 4
TIMEOUT = 5
_ErrorCode_NAMES = {
1: "BAD_REQUEST",
2: "CONCURRENT_TRANSACTION",
3: "INTERNAL_ERROR",
4: "NEED_INDEX",
5: "TIMEOUT",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Cost(ProtocolBuffer.ProtocolMessage):
has_index_writes_ = 0
index_writes_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def index_writes(self): return self.index_writes_
def set_index_writes(self, x):
self.has_index_writes_ = 1
self.index_writes_ = x
def clear_index_writes(self):
if self.has_index_writes_:
self.has_index_writes_ = 0
self.index_writes_ = 0
def has_index_writes(self): return self.has_index_writes_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_writes()): self.set_index_writes(x.index_writes())
def Equals(self, x):
if x is self: return 1
if self.has_index_writes_ != x.has_index_writes_: return 0
if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
return n + 0
def Clear(self):
self.clear_index_writes()
def OutputUnchecked(self, out):
if (self.has_index_writes_):
out.putVarInt32(8)
out.putVarInt32(self.index_writes_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_writes(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_))
return res
kindex_writes = 1
_TEXT = (
"ErrorCode",
"index_writes",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class GetRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
return n + 0
def Clear(self):
self.clear_key()
self.clear_transaction()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kkey = 1
ktransaction = 2
_TEXT = (
"ErrorCode",
"key",
"transaction",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
has_entity_ = 0
entity_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def entity(self):
if self.entity_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entity_ is None: self.entity_ = EntityProto()
finally:
self.lazy_init_lock_.release()
return self.entity_
def mutable_entity(self): self.has_entity_ = 1; return self.entity()
def clear_entity(self):
if self.has_entity_:
self.has_entity_ = 0;
if self.entity_ is not None: self.entity_.Clear()
def has_entity(self): return self.has_entity_
def MergeFrom(self, x):
assert x is not self
if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
def Equals(self, x):
if x is self: return 1
if self.has_entity_ != x.has_entity_: return 0
if self.has_entity_ and self.entity_ != x.entity_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
return n + 0
def Clear(self):
self.clear_entity()
def OutputUnchecked(self, out):
if (self.has_entity_):
out.putVarInt32(18)
out.putVarInt32(self.entity_.ByteSize())
self.entity_.OutputUnchecked(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_:
res+=prefix+"entity <\n"
res+=self.entity_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class GetResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.entity_ = []
if contents is not None: self.MergeFromString(contents)
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = GetResponse_Entity()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
def Equals(self, x):
if x is self: return 1
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_entity()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(11)
self.entity_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_entity().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Entity%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kEntityGroup = 1
kEntityentity = 2
_TEXT = (
"ErrorCode",
"Entity",
"entity",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class PutRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
def __init__(self, contents=None):
self.entity_ = []
self.composite_index_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = EntityProto()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
def Equals(self, x):
if x is self: return 1
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_trusted_): n += 2
return n + 0
def Clear(self):
self.clear_entity()
self.clear_transaction()
self.clear_composite_index()
self.clear_trusted()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(10)
out.putVarInt32(self.entity_[i].ByteSize())
self.entity_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(26)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_entity().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("entity%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
return res
kentity = 1
ktransaction = 2
kcomposite_index = 3
ktrusted = 4
_TEXT = (
"ErrorCode",
"entity",
"transaction",
"composite_index",
"trusted",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class PutResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 0
def Clear(self):
self.clear_key()
self.clear_cost()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_cost_):
out.putVarInt32(18)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kkey = 1
kcost = 2
_TEXT = (
"ErrorCode",
"key",
"cost",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class DeleteRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_trusted()): self.set_trusted(x.trusted())
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
if (self.has_trusted_): n += 2
return n + 0
def Clear(self):
self.clear_key()
self.clear_transaction()
self.clear_trusted()
def OutputUnchecked(self, out):
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_transaction_):
out.putVarInt32(42)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.key_)):
out.putVarInt32(50)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
return res
kkey = 6
ktransaction = 5
ktrusted = 4
_TEXT = (
"ErrorCode",
None,
None,
None,
"trusted",
"transaction",
"key",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class DeleteResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 0
def Clear(self):
self.clear_cost()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kcost = 1
_TEXT = (
"ErrorCode",
"cost",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class NextRequest(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
has_count_ = 0
count_ = 1
def __init__(self, contents=None):
self.cursor_ = Cursor()
if contents is not None: self.MergeFromString(contents)
def cursor(self): return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_
def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def count(self): return self.count_
def set_count(self, x):
self.has_count_ = 1
self.count_ = x
def clear_count(self):
if self.has_count_:
self.has_count_ = 0
self.count_ = 1
def has_count(self): return self.has_count_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
if (x.has_count()): self.set_count(x.count())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if self.has_count_ != x.has_count_: return 0
if self.has_count_ and self.count_ != x.count_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
elif not self.cursor_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.cursor_.ByteSize())
if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
return n + 1
def Clear(self):
self.clear_cursor()
self.clear_count()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
if (self.has_count_):
out.putVarInt32(16)
out.putVarInt32(self.count_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 16:
self.set_count(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
return res
kcursor = 1
kcount = 2
_TEXT = (
"ErrorCode",
"cursor",
"count",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class QueryResult(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = None
has_more_results_ = 0
more_results_ = 0
def __init__(self, contents=None):
self.result_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cursor(self):
if self.cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cursor_ is None: self.cursor_ = Cursor()
finally:
self.lazy_init_lock_.release()
return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor()
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0;
if self.cursor_ is not None: self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def result_size(self): return len(self.result_)
def result_list(self): return self.result_
def result(self, i):
return self.result_[i]
def mutable_result(self, i):
return self.result_[i]
def add_result(self):
x = EntityProto()
self.result_.append(x)
return x
def clear_result(self):
self.result_ = []
def more_results(self): return self.more_results_
def set_more_results(self, x):
self.has_more_results_ = 1
self.more_results_ = x
def clear_more_results(self):
if self.has_more_results_:
self.has_more_results_ = 0
self.more_results_ = 0
def has_more_results(self): return self.has_more_results_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
if (x.has_more_results()): self.set_more_results(x.more_results())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if len(self.result_) != len(x.result_): return 0
for e1, e2 in zip(self.result_, x.result_):
if e1 != e2: return 0
if self.has_more_results_ != x.has_more_results_: return 0
if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.result_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_more_results_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: more_results not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
return n + 2
def Clear(self):
self.clear_cursor()
self.clear_result()
self.clear_more_results()
def OutputUnchecked(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
for i in xrange(len(self.result_)):
out.putVarInt32(18)
out.putVarInt32(self.result_[i].ByteSize())
self.result_[i].OutputUnchecked(out)
out.putVarInt32(24)
out.putBoolean(self.more_results_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_result().TryMerge(tmp)
continue
if tt == 24:
self.set_more_results(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.result_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
return res
kcursor = 1
kresult = 2
kmore_results = 3
_TEXT = (
"ErrorCode",
"cursor",
"result",
"more_results",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Schema(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.kind_ = []
if contents is not None: self.MergeFromString(contents)
def kind_size(self): return len(self.kind_)
def kind_list(self): return self.kind_
def kind(self, i):
return self.kind_[i]
def mutable_kind(self, i):
return self.kind_[i]
def add_kind(self):
x = EntityProto()
self.kind_.append(x)
return x
def clear_kind(self):
self.kind_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.kind_size()): self.add_kind().CopyFrom(x.kind(i))
def Equals(self, x):
if x is self: return 1
if len(self.kind_) != len(x.kind_): return 0
for e1, e2 in zip(self.kind_, x.kind_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.kind_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.kind_)
for i in xrange(len(self.kind_)): n += self.lengthString(self.kind_[i].ByteSize())
return n + 0
def Clear(self):
self.clear_kind()
def OutputUnchecked(self, out):
for i in xrange(len(self.kind_)):
out.putVarInt32(10)
out.putVarInt32(self.kind_[i].ByteSize())
self.kind_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_kind().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.kind_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("kind%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
kkind = 1
_TEXT = (
"ErrorCode",
"kind",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CompositeIndices(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.index_ = []
if contents is not None: self.MergeFromString(contents)
def index_size(self): return len(self.index_)
def index_list(self): return self.index_
def index(self, i):
return self.index_[i]
def mutable_index(self, i):
return self.index_[i]
def add_index(self):
x = CompositeIndex()
self.index_.append(x)
return x
def clear_index(self):
self.index_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
def Equals(self, x):
if x is self: return 1
if len(self.index_) != len(x.index_): return 0
for e1, e2 in zip(self.index_, x.index_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
return n + 0
def Clear(self):
self.clear_index()
def OutputUnchecked(self, out):
for i in xrange(len(self.index_)):
out.putVarInt32(10)
out.putVarInt32(self.index_[i].ByteSize())
self.index_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
kindex = 1
_TEXT = (
"ErrorCode",
"index",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CommitResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 0
def Clear(self):
self.clear_cost()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kcost = 1
_TEXT = (
"ErrorCode",
"cost",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['Transaction','Query','Query_Filter','Query_Order','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','Schema','CompositeIndices','CommitResponse']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
class PropertyValue_ReferenceValuePathElement(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = ""
def has_type(self): return self.has_type_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_type()): self.set_type(x.type())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 2 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 2 + self.lengthString(len(self.name_))
return n + 1
def Clear(self):
self.clear_type()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(122)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(128)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(138)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 116: break
if tt == 122:
self.set_type(d.getPrefixedString())
continue
if tt == 128:
self.set_id(d.getVarInt64())
continue
if tt == 138:
self.set_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
class PropertyValue_PointValue(ProtocolBuffer.ProtocolMessage):
has_x_ = 0
x_ = 0.0
has_y_ = 0
y_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def x(self): return self.x_
def set_x(self, x):
self.has_x_ = 1
self.x_ = x
def clear_x(self):
if self.has_x_:
self.has_x_ = 0
self.x_ = 0.0
def has_x(self): return self.has_x_
def y(self): return self.y_
def set_y(self, x):
self.has_y_ = 1
self.y_ = x
def clear_y(self):
if self.has_y_:
self.has_y_ = 0
self.y_ = 0.0
def has_y(self): return self.has_y_
def MergeFrom(self, x):
assert x is not self
if (x.has_x()): self.set_x(x.x())
if (x.has_y()): self.set_y(x.y())
def Equals(self, x):
if x is self: return 1
if self.has_x_ != x.has_x_: return 0
if self.has_x_ and self.x_ != x.x_: return 0
if self.has_y_ != x.has_y_: return 0
if self.has_y_ and self.y_ != x.y_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_x_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: x not set.')
if (not self.has_y_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: y not set.')
return initialized
def ByteSize(self):
n = 0
return n + 18
def Clear(self):
self.clear_x()
self.clear_y()
def OutputUnchecked(self, out):
out.putVarInt32(49)
out.putDouble(self.x_)
out.putVarInt32(57)
out.putDouble(self.y_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 44: break
if tt == 49:
self.set_x(d.getDouble())
continue
if tt == 57:
self.set_y(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_x_: res+=prefix+("x: %s\n" % self.DebugFormat(self.x_))
if self.has_y_: res+=prefix+("y: %s\n" % self.DebugFormat(self.y_))
return res
class PropertyValue_UserValue(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
has_auth_domain_ = 0
auth_domain_ = ""
has_nickname_ = 0
nickname_ = ""
has_gaiaid_ = 0
gaiaid_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def email(self): return self.email_
def set_email(self, x):
self.has_email_ = 1
self.email_ = x
def clear_email(self):
if self.has_email_:
self.has_email_ = 0
self.email_ = ""
def has_email(self): return self.has_email_
def auth_domain(self): return self.auth_domain_
def set_auth_domain(self, x):
self.has_auth_domain_ = 1
self.auth_domain_ = x
def clear_auth_domain(self):
if self.has_auth_domain_:
self.has_auth_domain_ = 0
self.auth_domain_ = ""
def has_auth_domain(self): return self.has_auth_domain_
def nickname(self): return self.nickname_
def set_nickname(self, x):
self.has_nickname_ = 1
self.nickname_ = x
def clear_nickname(self):
if self.has_nickname_:
self.has_nickname_ = 0
self.nickname_ = ""
def has_nickname(self): return self.has_nickname_
def gaiaid(self): return self.gaiaid_
def set_gaiaid(self, x):
self.has_gaiaid_ = 1
self.gaiaid_ = x
def clear_gaiaid(self):
if self.has_gaiaid_:
self.has_gaiaid_ = 0
self.gaiaid_ = 0
def has_gaiaid(self): return self.has_gaiaid_
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
if (x.has_nickname()): self.set_nickname(x.nickname())
if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
def Equals(self, x):
if x is self: return 1
if self.has_email_ != x.has_email_: return 0
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_auth_domain_ != x.has_auth_domain_: return 0
if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
if self.has_nickname_ != x.has_nickname_: return 0
if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
if self.has_gaiaid_ != x.has_gaiaid_: return 0
if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_email_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: email not set.')
if (not self.has_auth_domain_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: auth_domain not set.')
if (not self.has_gaiaid_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: gaiaid not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.email_))
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
n += self.lengthVarInt64(self.gaiaid_)
return n + 4
def Clear(self):
self.clear_email()
self.clear_auth_domain()
self.clear_nickname()
self.clear_gaiaid()
def OutputUnchecked(self, out):
out.putVarInt32(74)
out.putPrefixedString(self.email_)
out.putVarInt32(82)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(90)
out.putPrefixedString(self.nickname_)
out.putVarInt32(144)
out.putVarInt64(self.gaiaid_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 68: break
if tt == 74:
self.set_email(d.getPrefixedString())
continue
if tt == 82:
self.set_auth_domain(d.getPrefixedString())
continue
if tt == 90:
self.set_nickname(d.getPrefixedString())
continue
if tt == 144:
self.set_gaiaid(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
return res
class PropertyValue_ReferenceValue(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
def __init__(self, contents=None):
self.pathelement_ = []
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def pathelement_size(self): return len(self.pathelement_)
def pathelement_list(self): return self.pathelement_
def pathelement(self, i):
return self.pathelement_[i]
def mutable_pathelement(self, i):
return self.pathelement_[i]
def add_pathelement(self):
x = PropertyValue_ReferenceValuePathElement()
self.pathelement_.append(x)
return x
def clear_pathelement(self):
self.pathelement_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
for i in xrange(x.pathelement_size()): self.add_pathelement().CopyFrom(x.pathelement(i))
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if len(self.pathelement_) != len(x.pathelement_): return 0
for e1, e2 in zip(self.pathelement_, x.pathelement_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
for p in self.pathelement_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
n += 2 * len(self.pathelement_)
for i in xrange(len(self.pathelement_)): n += self.pathelement_[i].ByteSize()
return n + 1
def Clear(self):
self.clear_app()
self.clear_pathelement()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
for i in xrange(len(self.pathelement_)):
out.putVarInt32(115)
self.pathelement_[i].OutputUnchecked(out)
out.putVarInt32(116)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 100: break
if tt == 106:
self.set_app(d.getPrefixedString())
continue
if tt == 115:
self.add_pathelement().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
cnt=0
for e in self.pathelement_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("PathElement%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
class PropertyValue(ProtocolBuffer.ProtocolMessage):
has_int64value_ = 0
int64value_ = 0
has_booleanvalue_ = 0
booleanvalue_ = 0
has_stringvalue_ = 0
stringvalue_ = ""
has_doublevalue_ = 0
doublevalue_ = 0.0
has_pointvalue_ = 0
pointvalue_ = None
has_uservalue_ = 0
uservalue_ = None
has_referencevalue_ = 0
referencevalue_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def int64value(self): return self.int64value_
def set_int64value(self, x):
self.has_int64value_ = 1
self.int64value_ = x
def clear_int64value(self):
if self.has_int64value_:
self.has_int64value_ = 0
self.int64value_ = 0
def has_int64value(self): return self.has_int64value_
def booleanvalue(self): return self.booleanvalue_
def set_booleanvalue(self, x):
self.has_booleanvalue_ = 1
self.booleanvalue_ = x
def clear_booleanvalue(self):
if self.has_booleanvalue_:
self.has_booleanvalue_ = 0
self.booleanvalue_ = 0
def has_booleanvalue(self): return self.has_booleanvalue_
def stringvalue(self): return self.stringvalue_
def set_stringvalue(self, x):
self.has_stringvalue_ = 1
self.stringvalue_ = x
def clear_stringvalue(self):
if self.has_stringvalue_:
self.has_stringvalue_ = 0
self.stringvalue_ = ""
def has_stringvalue(self): return self.has_stringvalue_
def doublevalue(self): return self.doublevalue_
def set_doublevalue(self, x):
self.has_doublevalue_ = 1
self.doublevalue_ = x
def clear_doublevalue(self):
if self.has_doublevalue_:
self.has_doublevalue_ = 0
self.doublevalue_ = 0.0
def has_doublevalue(self): return self.has_doublevalue_
def pointvalue(self):
if self.pointvalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.pointvalue_ is None: self.pointvalue_ = PropertyValue_PointValue()
finally:
self.lazy_init_lock_.release()
return self.pointvalue_
def mutable_pointvalue(self): self.has_pointvalue_ = 1; return self.pointvalue()
def clear_pointvalue(self):
if self.has_pointvalue_:
self.has_pointvalue_ = 0;
if self.pointvalue_ is not None: self.pointvalue_.Clear()
def has_pointvalue(self): return self.has_pointvalue_
def uservalue(self):
if self.uservalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.uservalue_ is None: self.uservalue_ = PropertyValue_UserValue()
finally:
self.lazy_init_lock_.release()
return self.uservalue_
def mutable_uservalue(self): self.has_uservalue_ = 1; return self.uservalue()
def clear_uservalue(self):
if self.has_uservalue_:
self.has_uservalue_ = 0;
if self.uservalue_ is not None: self.uservalue_.Clear()
def has_uservalue(self): return self.has_uservalue_
def referencevalue(self):
if self.referencevalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.referencevalue_ is None: self.referencevalue_ = PropertyValue_ReferenceValue()
finally:
self.lazy_init_lock_.release()
return self.referencevalue_
def mutable_referencevalue(self): self.has_referencevalue_ = 1; return self.referencevalue()
def clear_referencevalue(self):
if self.has_referencevalue_:
self.has_referencevalue_ = 0;
if self.referencevalue_ is not None: self.referencevalue_.Clear()
def has_referencevalue(self): return self.has_referencevalue_
def MergeFrom(self, x):
assert x is not self
if (x.has_int64value()): self.set_int64value(x.int64value())
if (x.has_booleanvalue()): self.set_booleanvalue(x.booleanvalue())
if (x.has_stringvalue()): self.set_stringvalue(x.stringvalue())
if (x.has_doublevalue()): self.set_doublevalue(x.doublevalue())
if (x.has_pointvalue()): self.mutable_pointvalue().MergeFrom(x.pointvalue())
if (x.has_uservalue()): self.mutable_uservalue().MergeFrom(x.uservalue())
if (x.has_referencevalue()): self.mutable_referencevalue().MergeFrom(x.referencevalue())
def Equals(self, x):
if x is self: return 1
if self.has_int64value_ != x.has_int64value_: return 0
if self.has_int64value_ and self.int64value_ != x.int64value_: return 0
if self.has_booleanvalue_ != x.has_booleanvalue_: return 0
if self.has_booleanvalue_ and self.booleanvalue_ != x.booleanvalue_: return 0
if self.has_stringvalue_ != x.has_stringvalue_: return 0
if self.has_stringvalue_ and self.stringvalue_ != x.stringvalue_: return 0
if self.has_doublevalue_ != x.has_doublevalue_: return 0
if self.has_doublevalue_ and self.doublevalue_ != x.doublevalue_: return 0
if self.has_pointvalue_ != x.has_pointvalue_: return 0
if self.has_pointvalue_ and self.pointvalue_ != x.pointvalue_: return 0
if self.has_uservalue_ != x.has_uservalue_: return 0
if self.has_uservalue_ and self.uservalue_ != x.uservalue_: return 0
if self.has_referencevalue_ != x.has_referencevalue_: return 0
if self.has_referencevalue_ and self.referencevalue_ != x.referencevalue_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_pointvalue_ and not self.pointvalue_.IsInitialized(debug_strs)): initialized = 0
if (self.has_uservalue_ and not self.uservalue_.IsInitialized(debug_strs)): initialized = 0
if (self.has_referencevalue_ and not self.referencevalue_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_int64value_): n += 1 + self.lengthVarInt64(self.int64value_)
if (self.has_booleanvalue_): n += 2
if (self.has_stringvalue_): n += 1 + self.lengthString(len(self.stringvalue_))
if (self.has_doublevalue_): n += 9
if (self.has_pointvalue_): n += 2 + self.pointvalue_.ByteSize()
if (self.has_uservalue_): n += 2 + self.uservalue_.ByteSize()
if (self.has_referencevalue_): n += 2 + self.referencevalue_.ByteSize()
return n + 0
def Clear(self):
self.clear_int64value()
self.clear_booleanvalue()
self.clear_stringvalue()
self.clear_doublevalue()
self.clear_pointvalue()
self.clear_uservalue()
self.clear_referencevalue()
def OutputUnchecked(self, out):
if (self.has_int64value_):
out.putVarInt32(8)
out.putVarInt64(self.int64value_)
if (self.has_booleanvalue_):
out.putVarInt32(16)
out.putBoolean(self.booleanvalue_)
if (self.has_stringvalue_):
out.putVarInt32(26)
out.putPrefixedString(self.stringvalue_)
if (self.has_doublevalue_):
out.putVarInt32(33)
out.putDouble(self.doublevalue_)
if (self.has_pointvalue_):
out.putVarInt32(43)
self.pointvalue_.OutputUnchecked(out)
out.putVarInt32(44)
if (self.has_uservalue_):
out.putVarInt32(67)
self.uservalue_.OutputUnchecked(out)
out.putVarInt32(68)
if (self.has_referencevalue_):
out.putVarInt32(99)
self.referencevalue_.OutputUnchecked(out)
out.putVarInt32(100)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_int64value(d.getVarInt64())
continue
if tt == 16:
self.set_booleanvalue(d.getBoolean())
continue
if tt == 26:
self.set_stringvalue(d.getPrefixedString())
continue
if tt == 33:
self.set_doublevalue(d.getDouble())
continue
if tt == 43:
self.mutable_pointvalue().TryMerge(d)
continue
if tt == 67:
self.mutable_uservalue().TryMerge(d)
continue
if tt == 99:
self.mutable_referencevalue().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_int64value_: res+=prefix+("int64Value: %s\n" % self.DebugFormatInt64(self.int64value_))
if self.has_booleanvalue_: res+=prefix+("booleanValue: %s\n" % self.DebugFormatBool(self.booleanvalue_))
if self.has_stringvalue_: res+=prefix+("stringValue: %s\n" % self.DebugFormatString(self.stringvalue_))
if self.has_doublevalue_: res+=prefix+("doubleValue: %s\n" % self.DebugFormat(self.doublevalue_))
if self.has_pointvalue_:
res+=prefix+"PointValue {\n"
res+=self.pointvalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_uservalue_:
res+=prefix+"UserValue {\n"
res+=self.uservalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_referencevalue_:
res+=prefix+"ReferenceValue {\n"
res+=self.referencevalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
return res
kint64Value = 1
kbooleanValue = 2
kstringValue = 3
kdoubleValue = 4
kPointValueGroup = 5
kPointValuex = 6
kPointValuey = 7
kUserValueGroup = 8
kUserValueemail = 9
kUserValueauth_domain = 10
kUserValuenickname = 11
kUserValuegaiaid = 18
kReferenceValueGroup = 12
kReferenceValueapp = 13
kReferenceValuePathElementGroup = 14
kReferenceValuePathElementtype = 15
kReferenceValuePathElementid = 16
kReferenceValuePathElementname = 17
_TEXT = (
"ErrorCode",
"int64Value",
"booleanValue",
"stringValue",
"doubleValue",
"PointValue",
"x",
"y",
"UserValue",
"email",
"auth_domain",
"nickname",
"ReferenceValue",
"app",
"PathElement",
"type",
"id",
"name",
"gaiaid",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.DOUBLE,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.DOUBLE,
ProtocolBuffer.Encoder.DOUBLE,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Property(ProtocolBuffer.ProtocolMessage):
BLOB = 14
TEXT = 15
BYTESTRING = 16
ATOM_CATEGORY = 1
ATOM_LINK = 2
ATOM_TITLE = 3
ATOM_CONTENT = 4
ATOM_SUMMARY = 5
ATOM_AUTHOR = 6
GD_WHEN = 7
GD_EMAIL = 8
GEORSS_POINT = 9
GD_IM = 10
GD_PHONENUMBER = 11
GD_POSTALADDRESS = 12
GD_RATING = 13
_Meaning_NAMES = {
14: "BLOB",
15: "TEXT",
16: "BYTESTRING",
1: "ATOM_CATEGORY",
2: "ATOM_LINK",
3: "ATOM_TITLE",
4: "ATOM_CONTENT",
5: "ATOM_SUMMARY",
6: "ATOM_AUTHOR",
7: "GD_WHEN",
8: "GD_EMAIL",
9: "GEORSS_POINT",
10: "GD_IM",
11: "GD_PHONENUMBER",
12: "GD_POSTALADDRESS",
13: "GD_RATING",
}
def Meaning_Name(cls, x): return cls._Meaning_NAMES.get(x, "")
Meaning_Name = classmethod(Meaning_Name)
has_meaning_ = 0
meaning_ = 0
has_meaning_uri_ = 0
meaning_uri_ = ""
has_name_ = 0
name_ = ""
has_value_ = 0
has_multiple_ = 0
multiple_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def meaning(self): return self.meaning_
def set_meaning(self, x):
self.has_meaning_ = 1
self.meaning_ = x
def clear_meaning(self):
if self.has_meaning_:
self.has_meaning_ = 0
self.meaning_ = 0
def has_meaning(self): return self.has_meaning_
def meaning_uri(self): return self.meaning_uri_
def set_meaning_uri(self, x):
self.has_meaning_uri_ = 1
self.meaning_uri_ = x
def clear_meaning_uri(self):
if self.has_meaning_uri_:
self.has_meaning_uri_ = 0
self.meaning_uri_ = ""
def has_meaning_uri(self): return self.has_meaning_uri_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def multiple(self): return self.multiple_
def set_multiple(self, x):
self.has_multiple_ = 1
self.multiple_ = x
def clear_multiple(self):
if self.has_multiple_:
self.has_multiple_ = 0
self.multiple_ = 0
def has_multiple(self): return self.has_multiple_
def MergeFrom(self, x):
assert x is not self
if (x.has_meaning()): self.set_meaning(x.meaning())
if (x.has_meaning_uri()): self.set_meaning_uri(x.meaning_uri())
if (x.has_name()): self.set_name(x.name())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
if (x.has_multiple()): self.set_multiple(x.multiple())
def Equals(self, x):
if x is self: return 1
if self.has_meaning_ != x.has_meaning_: return 0
if self.has_meaning_ and self.meaning_ != x.meaning_: return 0
if self.has_meaning_uri_ != x.has_meaning_uri_: return 0
if self.has_meaning_uri_ and self.meaning_uri_ != x.meaning_uri_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
if self.has_multiple_ != x.has_multiple_: return 0
if self.has_multiple_ and self.multiple_ != x.multiple_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
n += self.lengthString(len(self.name_))
n += self.lengthString(self.value_.ByteSize())
if (self.has_multiple_): n += 2
return n + 2
def Clear(self):
self.clear_meaning()
self.clear_meaning_uri()
self.clear_name()
self.clear_value()
self.clear_multiple()
def OutputUnchecked(self, out):
if (self.has_meaning_):
out.putVarInt32(8)
out.putVarInt32(self.meaning_)
if (self.has_meaning_uri_):
out.putVarInt32(18)
out.putPrefixedString(self.meaning_uri_)
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_multiple_):
out.putVarInt32(32)
out.putBoolean(self.multiple_)
out.putVarInt32(42)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_meaning(d.getVarInt32())
continue
if tt == 18:
self.set_meaning_uri(d.getPrefixedString())
continue
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_multiple(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_))
if self.has_meaning_uri_: res+=prefix+("meaning_uri: %s\n" % self.DebugFormatString(self.meaning_uri_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_multiple_: res+=prefix+("multiple: %s\n" % self.DebugFormatBool(self.multiple_))
return res
kmeaning = 1
kmeaning_uri = 2
kname = 3
kvalue = 5
kmultiple = 4
_TEXT = (
"ErrorCode",
"meaning",
"meaning_uri",
"name",
"multiple",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Path_Element(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = ""
def has_type(self): return self.has_type_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_type()): self.set_type(x.type())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
return n + 1
def Clear(self):
self.clear_type()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(24)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(34)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_type(d.getPrefixedString())
continue
if tt == 24:
self.set_id(d.getVarInt64())
continue
if tt == 34:
self.set_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
class Path(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.element_ = []
if contents is not None: self.MergeFromString(contents)
def element_size(self): return len(self.element_)
def element_list(self): return self.element_
def element(self, i):
return self.element_[i]
def mutable_element(self, i):
return self.element_[i]
def add_element(self):
x = Path_Element()
self.element_.append(x)
return x
def clear_element(self):
self.element_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.element_size()): self.add_element().CopyFrom(x.element(i))
def Equals(self, x):
if x is self: return 1
if len(self.element_) != len(x.element_): return 0
for e1, e2 in zip(self.element_, x.element_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.element_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.element_)
for i in xrange(len(self.element_)): n += self.element_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_element()
def OutputUnchecked(self, out):
for i in xrange(len(self.element_)):
out.putVarInt32(11)
self.element_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_element().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.element_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Element%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kElementGroup = 1
kElementtype = 2
kElementid = 3
kElementname = 4
_TEXT = (
"ErrorCode",
"Element",
"type",
"id",
"name",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Reference(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
has_path_ = 0
def __init__(self, contents=None):
self.path_ = Path()
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def path(self): return self.path_
def mutable_path(self): self.has_path_ = 1; return self.path_
def clear_path(self):self.has_path_ = 0; self.path_.Clear()
def has_path(self): return self.has_path_
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
if (x.has_path()): self.mutable_path().MergeFrom(x.path())
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_path_ != x.has_path_: return 0
if self.has_path_ and self.path_ != x.path_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (not self.has_path_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: path not set.')
elif not self.path_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
n += self.lengthString(self.path_.ByteSize())
return n + 2
def Clear(self):
self.clear_app()
self.clear_path()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
out.putVarInt32(114)
out.putVarInt32(self.path_.ByteSize())
self.path_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 106:
self.set_app(d.getPrefixedString())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_path().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_path_:
res+=prefix+"path <\n"
res+=self.path_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kapp = 13
kpath = 14
_TEXT = (
"ErrorCode",
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
"app",
"path",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class User(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
has_auth_domain_ = 0
auth_domain_ = ""
has_nickname_ = 0
nickname_ = ""
has_gaiaid_ = 0
gaiaid_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def email(self): return self.email_
def set_email(self, x):
self.has_email_ = 1
self.email_ = x
def clear_email(self):
if self.has_email_:
self.has_email_ = 0
self.email_ = ""
def has_email(self): return self.has_email_
def auth_domain(self): return self.auth_domain_
def set_auth_domain(self, x):
self.has_auth_domain_ = 1
self.auth_domain_ = x
def clear_auth_domain(self):
if self.has_auth_domain_:
self.has_auth_domain_ = 0
self.auth_domain_ = ""
def has_auth_domain(self): return self.has_auth_domain_
def nickname(self): return self.nickname_
def set_nickname(self, x):
self.has_nickname_ = 1
self.nickname_ = x
def clear_nickname(self):
if self.has_nickname_:
self.has_nickname_ = 0
self.nickname_ = ""
def has_nickname(self): return self.has_nickname_
def gaiaid(self): return self.gaiaid_
def set_gaiaid(self, x):
self.has_gaiaid_ = 1
self.gaiaid_ = x
def clear_gaiaid(self):
if self.has_gaiaid_:
self.has_gaiaid_ = 0
self.gaiaid_ = 0
def has_gaiaid(self): return self.has_gaiaid_
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
if (x.has_nickname()): self.set_nickname(x.nickname())
if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
def Equals(self, x):
if x is self: return 1
if self.has_email_ != x.has_email_: return 0
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_auth_domain_ != x.has_auth_domain_: return 0
if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
if self.has_nickname_ != x.has_nickname_: return 0
if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
if self.has_gaiaid_ != x.has_gaiaid_: return 0
if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_email_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: email not set.')
if (not self.has_auth_domain_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: auth_domain not set.')
if (not self.has_gaiaid_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: gaiaid not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.email_))
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
n += self.lengthVarInt64(self.gaiaid_)
return n + 3
def Clear(self):
self.clear_email()
self.clear_auth_domain()
self.clear_nickname()
self.clear_gaiaid()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.email_)
out.putVarInt32(18)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(26)
out.putPrefixedString(self.nickname_)
out.putVarInt32(32)
out.putVarInt64(self.gaiaid_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_email(d.getPrefixedString())
continue
if tt == 18:
self.set_auth_domain(d.getPrefixedString())
continue
if tt == 26:
self.set_nickname(d.getPrefixedString())
continue
if tt == 32:
self.set_gaiaid(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
return res
kemail = 1
kauth_domain = 2
knickname = 3
kgaiaid = 4
_TEXT = (
"ErrorCode",
"email",
"auth_domain",
"nickname",
"gaiaid",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class EntityProto(ProtocolBuffer.ProtocolMessage):
GD_CONTACT = 1
GD_EVENT = 2
GD_MESSAGE = 3
_Kind_NAMES = {
1: "GD_CONTACT",
2: "GD_EVENT",
3: "GD_MESSAGE",
}
def Kind_Name(cls, x): return cls._Kind_NAMES.get(x, "")
Kind_Name = classmethod(Kind_Name)
has_key_ = 0
has_entity_group_ = 0
has_owner_ = 0
owner_ = None
has_kind_ = 0
kind_ = 0
has_kind_uri_ = 0
kind_uri_ = ""
def __init__(self, contents=None):
self.key_ = Reference()
self.entity_group_ = Path()
self.property_ = []
self.raw_property_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key_
def clear_key(self):self.has_key_ = 0; self.key_.Clear()
def has_key(self): return self.has_key_
def entity_group(self): return self.entity_group_
def mutable_entity_group(self): self.has_entity_group_ = 1; return self.entity_group_
def clear_entity_group(self):self.has_entity_group_ = 0; self.entity_group_.Clear()
def has_entity_group(self): return self.has_entity_group_
def owner(self):
if self.owner_ is None:
self.lazy_init_lock_.acquire()
try:
if self.owner_ is None: self.owner_ = User()
finally:
self.lazy_init_lock_.release()
return self.owner_
def mutable_owner(self): self.has_owner_ = 1; return self.owner()
def clear_owner(self):
if self.has_owner_:
self.has_owner_ = 0;
if self.owner_ is not None: self.owner_.Clear()
def has_owner(self): return self.has_owner_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = 0
def has_kind(self): return self.has_kind_
def kind_uri(self): return self.kind_uri_
def set_kind_uri(self, x):
self.has_kind_uri_ = 1
self.kind_uri_ = x
def clear_kind_uri(self):
if self.has_kind_uri_:
self.has_kind_uri_ = 0
self.kind_uri_ = ""
def has_kind_uri(self): return self.has_kind_uri_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def raw_property_size(self): return len(self.raw_property_)
def raw_property_list(self): return self.raw_property_
def raw_property(self, i):
return self.raw_property_[i]
def mutable_raw_property(self, i):
return self.raw_property_[i]
def add_raw_property(self):
x = Property()
self.raw_property_.append(x)
return x
def clear_raw_property(self):
self.raw_property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_entity_group()): self.mutable_entity_group().MergeFrom(x.entity_group())
if (x.has_owner()): self.mutable_owner().MergeFrom(x.owner())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_kind_uri()): self.set_kind_uri(x.kind_uri())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
for i in xrange(x.raw_property_size()): self.add_raw_property().CopyFrom(x.raw_property(i))
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_entity_group_ != x.has_entity_group_: return 0
if self.has_entity_group_ and self.entity_group_ != x.entity_group_: return 0
if self.has_owner_ != x.has_owner_: return 0
if self.has_owner_ and self.owner_ != x.owner_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_kind_uri_ != x.has_kind_uri_: return 0
if self.has_kind_uri_ and self.kind_uri_ != x.kind_uri_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
if len(self.raw_property_) != len(x.raw_property_): return 0
for e1, e2 in zip(self.raw_property_, x.raw_property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
elif not self.key_.IsInitialized(debug_strs): initialized = 0
if (not self.has_entity_group_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: entity_group not set.')
elif not self.entity_group_.IsInitialized(debug_strs): initialized = 0
if (self.has_owner_ and not self.owner_.IsInitialized(debug_strs)): initialized = 0
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.raw_property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.key_.ByteSize())
n += self.lengthString(self.entity_group_.ByteSize())
if (self.has_owner_): n += 2 + self.lengthString(self.owner_.ByteSize())
if (self.has_kind_): n += 1 + self.lengthVarInt64(self.kind_)
if (self.has_kind_uri_): n += 1 + self.lengthString(len(self.kind_uri_))
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
n += 1 * len(self.raw_property_)
for i in xrange(len(self.raw_property_)): n += self.lengthString(self.raw_property_[i].ByteSize())
return n + 3
def Clear(self):
self.clear_key()
self.clear_entity_group()
self.clear_owner()
self.clear_kind()
self.clear_kind_uri()
self.clear_property()
self.clear_raw_property()
def OutputUnchecked(self, out):
if (self.has_kind_):
out.putVarInt32(32)
out.putVarInt32(self.kind_)
if (self.has_kind_uri_):
out.putVarInt32(42)
out.putPrefixedString(self.kind_uri_)
out.putVarInt32(106)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
for i in xrange(len(self.raw_property_)):
out.putVarInt32(122)
out.putVarInt32(self.raw_property_[i].ByteSize())
self.raw_property_[i].OutputUnchecked(out)
out.putVarInt32(130)
out.putVarInt32(self.entity_group_.ByteSize())
self.entity_group_.OutputUnchecked(out)
if (self.has_owner_):
out.putVarInt32(138)
out.putVarInt32(self.owner_.ByteSize())
self.owner_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_kind(d.getVarInt32())
continue
if tt == 42:
self.set_kind_uri(d.getPrefixedString())
continue
if tt == 106:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if tt == 122:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_raw_property().TryMerge(tmp)
continue
if tt == 130:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity_group().TryMerge(tmp)
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_owner().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_entity_group_:
res+=prefix+"entity_group <\n"
res+=self.entity_group_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_owner_:
res+=prefix+"owner <\n"
res+=self.owner_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatInt32(self.kind_))
if self.has_kind_uri_: res+=prefix+("kind_uri: %s\n" % self.DebugFormatString(self.kind_uri_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.raw_property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("raw_property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
kkey = 13
kentity_group = 16
kowner = 17
kkind = 4
kkind_uri = 5
kproperty = 14
kraw_property = 15
_TEXT = (
"ErrorCode",
None,
None,
None,
"kind",
"kind_uri",
None,
None,
None,
None,
None,
None,
None,
"key",
"property",
"raw_property",
"entity_group",
"owner",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CompositeProperty(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
def __init__(self, contents=None):
self.value_ = []
if contents is not None: self.MergeFromString(contents)
def index_id(self): return self.index_id_
def set_index_id(self, x):
self.has_index_id_ = 1
self.index_id_ = x
def clear_index_id(self):
if self.has_index_id_:
self.has_index_id_ = 0
self.index_id_ = 0
def has_index_id(self): return self.has_index_id_
def value_size(self): return len(self.value_)
def value_list(self): return self.value_
def value(self, i):
return self.value_[i]
def set_value(self, i, x):
self.value_[i] = x
def add_value(self, x):
self.value_.append(x)
def clear_value(self):
self.value_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_index_id()): self.set_index_id(x.index_id())
for i in xrange(x.value_size()): self.add_value(x.value(i))
def Equals(self, x):
if x is self: return 1
if self.has_index_id_ != x.has_index_id_: return 0
if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
if len(self.value_) != len(x.value_): return 0
for e1, e2 in zip(self.value_, x.value_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_id not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.index_id_)
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
return n + 1
def Clear(self):
self.clear_index_id()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_id(d.getVarInt64())
continue
if tt == 18:
self.add_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
cnt=0
for e in self.value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
kindex_id = 1
kvalue = 2
_TEXT = (
"ErrorCode",
"index_id",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Index_Property(ProtocolBuffer.ProtocolMessage):
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_name_ = 0
name_ = ""
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def Clear(self):
self.clear_name()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_direction_):
out.putVarInt32(32)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 20: break
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
class Index(ProtocolBuffer.ProtocolMessage):
has_entity_type_ = 0
entity_type_ = ""
has_ancestor_ = 0
ancestor_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def entity_type(self): return self.entity_type_
def set_entity_type(self, x):
self.has_entity_type_ = 1
self.entity_type_ = x
def clear_entity_type(self):
if self.has_entity_type_:
self.has_entity_type_ = 0
self.entity_type_ = ""
def has_entity_type(self): return self.has_entity_type_
def ancestor(self): return self.ancestor_
def set_ancestor(self, x):
self.has_ancestor_ = 1
self.ancestor_ = x
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0
self.ancestor_ = 0
def has_ancestor(self): return self.has_ancestor_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Index_Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_entity_type()): self.set_entity_type(x.entity_type())
if (x.has_ancestor()): self.set_ancestor(x.ancestor())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_entity_type_ != x.has_entity_type_: return 0
if self.has_entity_type_ and self.entity_type_ != x.entity_type_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_entity_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: entity_type not set.')
if (not self.has_ancestor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: ancestor not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.entity_type_))
n += 2 * len(self.property_)
for i in xrange(len(self.property_)): n += self.property_[i].ByteSize()
return n + 3
def Clear(self):
self.clear_entity_type()
self.clear_ancestor()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.entity_type_)
for i in xrange(len(self.property_)):
out.putVarInt32(19)
self.property_[i].OutputUnchecked(out)
out.putVarInt32(20)
out.putVarInt32(40)
out.putBoolean(self.ancestor_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_entity_type(d.getPrefixedString())
continue
if tt == 19:
self.add_property().TryMerge(d)
continue
if tt == 40:
self.set_ancestor(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_type_: res+=prefix+("entity_type: %s\n" % self.DebugFormatString(self.entity_type_))
if self.has_ancestor_: res+=prefix+("ancestor: %s\n" % self.DebugFormatBool(self.ancestor_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Property%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kentity_type = 1
kancestor = 5
kPropertyGroup = 2
kPropertyname = 3
kPropertydirection = 4
_TEXT = (
"ErrorCode",
"entity_type",
"Property",
"name",
"direction",
"ancestor",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CompositeIndex(ProtocolBuffer.ProtocolMessage):
WRITE_ONLY = 1
READ_WRITE = 2
DELETED = 3
ERROR = 4
_State_NAMES = {
1: "WRITE_ONLY",
2: "READ_WRITE",
3: "DELETED",
4: "ERROR",
}
def State_Name(cls, x): return cls._State_NAMES.get(x, "")
State_Name = classmethod(State_Name)
has_app_id_ = 0
app_id_ = ""
has_id_ = 0
id_ = 0
has_definition_ = 0
has_state_ = 0
state_ = 0
def __init__(self, contents=None):
self.definition_ = Index()
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def definition(self): return self.definition_
def mutable_definition(self): self.has_definition_ = 1; return self.definition_
def clear_definition(self):self.has_definition_ = 0; self.definition_.Clear()
def has_definition(self): return self.has_definition_
def state(self): return self.state_
def set_state(self, x):
self.has_state_ = 1
self.state_ = x
def clear_state(self):
if self.has_state_:
self.has_state_ = 0
self.state_ = 0
def has_state(self): return self.has_state_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_id()): self.set_id(x.id())
if (x.has_definition()): self.mutable_definition().MergeFrom(x.definition())
if (x.has_state()): self.set_state(x.state())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_definition_ != x.has_definition_: return 0
if self.has_definition_ and self.definition_ != x.definition_: return 0
if self.has_state_ != x.has_state_: return 0
if self.has_state_ and self.state_ != x.state_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: id not set.')
if (not self.has_definition_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: definition not set.')
elif not self.definition_.IsInitialized(debug_strs): initialized = 0
if (not self.has_state_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: state not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.id_)
n += self.lengthString(self.definition_.ByteSize())
n += self.lengthVarInt64(self.state_)
return n + 4
def Clear(self):
self.clear_app_id()
self.clear_id()
self.clear_definition()
self.clear_state()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt64(self.id_)
out.putVarInt32(26)
out.putVarInt32(self.definition_.ByteSize())
self.definition_.OutputUnchecked(out)
out.putVarInt32(32)
out.putVarInt32(self.state_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_id(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_definition().TryMerge(tmp)
continue
if tt == 32:
self.set_state(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_definition_:
res+=prefix+"definition <\n"
res+=self.definition_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
return res
kapp_id = 1
kid = 2
kdefinition = 3
kstate = 4
_TEXT = (
"ErrorCode",
"app_id",
"id",
"definition",
"state",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['PropertyValue','PropertyValue_ReferenceValuePathElement','PropertyValue_PointValue','PropertyValue_UserValue','PropertyValue_ReferenceValue','Property','Path','Path_Element','Reference','User','EntityProto','CompositeProperty','Index','Index_Property','CompositeIndex']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Primitives for dealing with datastore indexes.
Example index.yaml file:
------------------------
indexes:
- kind: Cat
ancestor: no
properties:
- name: name
- name: age
direction: desc
- kind: Cat
properties:
- name: name
direction: ascending
- name: whiskers
direction: descending
- kind: Store
ancestor: yes
properties:
- name: business
direction: asc
- name: owner
direction: asc
"""
from google.appengine.api import datastore_types
from google.appengine.api import validation
from google.appengine.api import yaml_errors
from google.appengine.api import yaml_object
from google.appengine.datastore import datastore_pb
class Property(validation.Validated):
"""Representation for an individual property of an index.
Attributes:
name: Name of attribute to sort by.
direction: Direction of sort.
"""
ATTRIBUTES = {
'name': validation.TYPE_STR,
'direction': validation.Options(('asc', ('ascending',)),
('desc', ('descending',)),
default='asc'),
}
class Index(validation.Validated):
"""Individual index definition.
Order of the properties properties determins a given indixes sort priority.
Attributes:
kind: Datastore kind that index belongs to.
ancestors: Include ancestors in index.
properties: Properties to sort on.
"""
ATTRIBUTES = {
'kind': validation.TYPE_STR,
'ancestor': validation.Type(bool, default=False),
'properties': validation.Optional(validation.Repeated(Property)),
}
class IndexDefinitions(validation.Validated):
"""Top level for index definition file.
Attributes:
indexes: List of Index definitions.
"""
ATTRIBUTES = {
'indexes': validation.Optional(validation.Repeated(Index)),
}
def ParseIndexDefinitions(document):
"""Parse an individual index definitions document from string or stream.
Args:
document: Yaml document as a string or file-like stream.
Raises:
EmptyConfigurationFile when the configuration file is empty.
MultipleConfigurationFile when the configuration file contains more than
one document.
Returns:
Single parsed yaml file if one is defined, else None.
"""
try:
return yaml_object.BuildSingleObject(IndexDefinitions, document)
except yaml_errors.EmptyConfigurationFile:
return None
def ParseMultipleIndexDefinitions(document):
"""Parse multiple index definitions documents from a string or stream.
Args:
document: Yaml document as a string or file-like stream.
Returns:
A list of datstore_index.IndexDefinitions objects, one for each document.
"""
return yaml_object.BuildObjects(IndexDefinitions, document)
def IndexDefinitionsToKeys(indexes):
"""Convert IndexDefinitions to set of keys.
Args:
indexes: A datastore_index.IndexDefinitions instance, or None.
Returns:
A set of keys constructed from the argument, each key being a
tuple of the form (kind, ancestor, properties) where properties is
a tuple of (name, direction) pairs, direction being ASCENDING or
DESCENDING (the enums).
"""
keyset = set()
if indexes is not None:
if indexes.indexes:
for index in indexes.indexes:
keyset.add(IndexToKey(index))
return keyset
def IndexToKey(index):
"""Convert Index to key.
Args:
index: A datastore_index.Index instance (not None!).
Returns:
A tuple of the form (kind, ancestor, properties) where properties
is a tuple of (name, direction) pairs, direction being ASCENDING
or DESCENDING (the enums).
"""
props = []
if index.properties is not None:
for prop in index.properties:
if prop.direction == 'asc':
direction = ASCENDING
else:
direction = DESCENDING
props.append((prop.name, direction))
return index.kind, index.ancestor, tuple(props)
ASCENDING = datastore_pb.Query_Order.ASCENDING
DESCENDING = datastore_pb.Query_Order.DESCENDING
EQUALITY_OPERATORS = set((datastore_pb.Query_Filter.EQUAL,
))
INEQUALITY_OPERATORS = set((datastore_pb.Query_Filter.LESS_THAN,
datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
datastore_pb.Query_Filter.GREATER_THAN,
datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
))
EXISTS_OPERATORS = set((datastore_pb.Query_Filter.EXISTS,
))
def CompositeIndexForQuery(query):
"""Return the composite index needed for a query.
A query is translated into a tuple, as follows:
- The first item is the kind string, or None if we're not filtering
on kind (see below).
- The second item is a bool giving whether the query specifies an
ancestor.
- After that come (property, ASCENDING) pairs for those Filter
entries whose operator is EQUAL or IN. Since the order of these
doesn't matter, they are sorted by property name to normalize them
in order to avoid duplicates.
- After that comes at most one (property, ASCENDING) pair for a
Filter entry whose operator is on of the four inequalities. There
can be at most one of these.
- After that come all the (property, direction) pairs for the Order
entries, in the order given in the query. Exceptions: (a) if
there is a Filter entry with an inequality operator that matches
the first Order entry, the first order pair is omitted (or,
equivalently, in this case the inequality pair is omitted); (b) if
an Order entry corresponds to an equality filter, it is ignored
(since there will only ever be one value returned).
- Finally, if there are Filter entries whose operator is EXISTS, and
whose property names are not already listed, they are added, with
the direction set to ASCENDING.
This algorithm should consume all Filter and Order entries.
Additional notes:
- The low-level implementation allows queries that don't specify a
kind; but the Python API doesn't support this yet.
- If there's an inequality filter and one or more sort orders, the
first sort order *must* match the inequality filter.
- The following indexes are always built in and should be suppressed:
- query on kind only;
- query on kind and one filter *or* one order;
- query on ancestor only, without kind (not exposed in Python yet);
- query on kind and equality filters only, no order (with or without
ancestor).
- While the protocol buffer allows a Filter to contain multiple
properties, we don't use this. It is only needed for the IN operator
but this is (currently) handled on the client side, so in practice
each Filter is expected to have exactly one property.
Args:
query: A datastore_pb.Query instance.
Returns:
A tuple of the form (required, kind, ancestor, (prop1, prop2, ...), neq):
required: boolean, whether the index is required
kind: the kind or None;
ancestor: True if this is an ancestor query;
prop1, prop2, ...: tuples of the form (name, direction) where:
name: a property name;
direction: datastore_pb.Query_Order.ASCENDING or ...DESCENDING;
neq: the number of prop tuples corresponding to equality filters.
"""
required = True
kind = query.kind()
ancestor = query.has_ancestor()
filters = query.filter_list()
orders = query.order_list()
for filter in filters:
assert filter.op() != datastore_pb.Query_Filter.IN, 'Filter.op()==IN'
nprops = len(filter.property_list())
assert nprops == 1, 'Filter has %s properties, expected 1' % nprops
if ancestor and not kind and not filters and not orders:
required = False
eq_filters = [f for f in filters if f.op() in EQUALITY_OPERATORS]
ineq_filters = [f for f in filters if f.op() in INEQUALITY_OPERATORS]
exists_filters = [f for f in filters if f.op() in EXISTS_OPERATORS]
assert (len(eq_filters) + len(ineq_filters) +
len(exists_filters)) == len(filters), 'Not all filters used'
if (kind and eq_filters and not ineq_filters and not exists_filters and
not orders):
names = set(f.property(0).name() for f in eq_filters)
if not names.intersection(datastore_types._SPECIAL_PROPERTIES):
required = False
ineq_property = None
if ineq_filters:
ineq_property = ineq_filters[0].property(0).name()
for filter in ineq_filters:
assert filter.property(0).name() == ineq_property
new_orders = []
for order in orders:
name = order.property()
for filter in eq_filters:
if filter.property(0).name() == name:
break
else:
new_orders.append(order)
orders = new_orders
props = []
for f in eq_filters:
prop = f.property(0)
props.append((prop.name(), ASCENDING))
props.sort()
if ineq_property:
if orders:
assert ineq_property == orders[0].property()
else:
props.append((ineq_property, ASCENDING))
for order in orders:
props.append((order.property(), order.direction()))
for filter in exists_filters:
prop = filter.property(0)
prop_name = prop.name()
for name, direction in props:
if name == prop_name:
break
else:
props.append((prop_name, ASCENDING))
if kind and not ancestor and len(props) <= 1:
required = False
if props:
prop, dir = props[0]
if prop in datastore_types._SPECIAL_PROPERTIES and dir is DESCENDING:
required = True
unique_names = set(name for name, dir in props)
if len(props) > 1 and len(unique_names) == 1:
required = False
return (required, kind, ancestor, tuple(props), len(eq_filters))
def IndexYamlForQuery(kind, ancestor, props):
"""Return the composite index definition YAML needed for a query.
The arguments are the same as the tuples returned by CompositeIndexForQuery,
without the last neq element.
Args:
kind: the kind or None
ancestor: True if this is an ancestor query, False otherwise
prop1, prop2, ...: tuples of the form (name, direction) where:
name: a property name;
direction: datastore_pb.Query_Order.ASCENDING or ...DESCENDING;
Returns:
A string with the YAML for the composite index needed by the query.
"""
yaml = []
yaml.append('- kind: %s' % kind)
if ancestor:
yaml.append(' ancestor: yes')
if props:
yaml.append(' properties:')
for name, direction in props:
yaml.append(' - name: %s' % name)
if direction == DESCENDING:
yaml.append(' direction: desc')
return '\n'.join(yaml)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
class PropertyValue_ReferenceValuePathElement(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = ""
def has_type(self): return self.has_type_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_type()): self.set_type(x.type())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 2 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 2 + self.lengthString(len(self.name_))
return n + 1
def Clear(self):
self.clear_type()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(122)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(128)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(138)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 116: break
if tt == 122:
self.set_type(d.getPrefixedString())
continue
if tt == 128:
self.set_id(d.getVarInt64())
continue
if tt == 138:
self.set_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
class PropertyValue_PointValue(ProtocolBuffer.ProtocolMessage):
has_x_ = 0
x_ = 0.0
has_y_ = 0
y_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def x(self): return self.x_
def set_x(self, x):
self.has_x_ = 1
self.x_ = x
def clear_x(self):
if self.has_x_:
self.has_x_ = 0
self.x_ = 0.0
def has_x(self): return self.has_x_
def y(self): return self.y_
def set_y(self, x):
self.has_y_ = 1
self.y_ = x
def clear_y(self):
if self.has_y_:
self.has_y_ = 0
self.y_ = 0.0
def has_y(self): return self.has_y_
def MergeFrom(self, x):
assert x is not self
if (x.has_x()): self.set_x(x.x())
if (x.has_y()): self.set_y(x.y())
def Equals(self, x):
if x is self: return 1
if self.has_x_ != x.has_x_: return 0
if self.has_x_ and self.x_ != x.x_: return 0
if self.has_y_ != x.has_y_: return 0
if self.has_y_ and self.y_ != x.y_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_x_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: x not set.')
if (not self.has_y_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: y not set.')
return initialized
def ByteSize(self):
n = 0
return n + 18
def Clear(self):
self.clear_x()
self.clear_y()
def OutputUnchecked(self, out):
out.putVarInt32(49)
out.putDouble(self.x_)
out.putVarInt32(57)
out.putDouble(self.y_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 44: break
if tt == 49:
self.set_x(d.getDouble())
continue
if tt == 57:
self.set_y(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_x_: res+=prefix+("x: %s\n" % self.DebugFormat(self.x_))
if self.has_y_: res+=prefix+("y: %s\n" % self.DebugFormat(self.y_))
return res
class PropertyValue_UserValue(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
has_auth_domain_ = 0
auth_domain_ = ""
has_nickname_ = 0
nickname_ = ""
has_gaiaid_ = 0
gaiaid_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def email(self): return self.email_
def set_email(self, x):
self.has_email_ = 1
self.email_ = x
def clear_email(self):
if self.has_email_:
self.has_email_ = 0
self.email_ = ""
def has_email(self): return self.has_email_
def auth_domain(self): return self.auth_domain_
def set_auth_domain(self, x):
self.has_auth_domain_ = 1
self.auth_domain_ = x
def clear_auth_domain(self):
if self.has_auth_domain_:
self.has_auth_domain_ = 0
self.auth_domain_ = ""
def has_auth_domain(self): return self.has_auth_domain_
def nickname(self): return self.nickname_
def set_nickname(self, x):
self.has_nickname_ = 1
self.nickname_ = x
def clear_nickname(self):
if self.has_nickname_:
self.has_nickname_ = 0
self.nickname_ = ""
def has_nickname(self): return self.has_nickname_
def gaiaid(self): return self.gaiaid_
def set_gaiaid(self, x):
self.has_gaiaid_ = 1
self.gaiaid_ = x
def clear_gaiaid(self):
if self.has_gaiaid_:
self.has_gaiaid_ = 0
self.gaiaid_ = 0
def has_gaiaid(self): return self.has_gaiaid_
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
if (x.has_nickname()): self.set_nickname(x.nickname())
if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
def Equals(self, x):
if x is self: return 1
if self.has_email_ != x.has_email_: return 0
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_auth_domain_ != x.has_auth_domain_: return 0
if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
if self.has_nickname_ != x.has_nickname_: return 0
if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
if self.has_gaiaid_ != x.has_gaiaid_: return 0
if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_email_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: email not set.')
if (not self.has_auth_domain_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: auth_domain not set.')
if (not self.has_gaiaid_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: gaiaid not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.email_))
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
n += self.lengthVarInt64(self.gaiaid_)
return n + 4
def Clear(self):
self.clear_email()
self.clear_auth_domain()
self.clear_nickname()
self.clear_gaiaid()
def OutputUnchecked(self, out):
out.putVarInt32(74)
out.putPrefixedString(self.email_)
out.putVarInt32(82)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(90)
out.putPrefixedString(self.nickname_)
out.putVarInt32(144)
out.putVarInt64(self.gaiaid_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 68: break
if tt == 74:
self.set_email(d.getPrefixedString())
continue
if tt == 82:
self.set_auth_domain(d.getPrefixedString())
continue
if tt == 90:
self.set_nickname(d.getPrefixedString())
continue
if tt == 144:
self.set_gaiaid(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
return res
class PropertyValue_ReferenceValue(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
def __init__(self, contents=None):
self.pathelement_ = []
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def pathelement_size(self): return len(self.pathelement_)
def pathelement_list(self): return self.pathelement_
def pathelement(self, i):
return self.pathelement_[i]
def mutable_pathelement(self, i):
return self.pathelement_[i]
def add_pathelement(self):
x = PropertyValue_ReferenceValuePathElement()
self.pathelement_.append(x)
return x
def clear_pathelement(self):
self.pathelement_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
for i in xrange(x.pathelement_size()): self.add_pathelement().CopyFrom(x.pathelement(i))
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if len(self.pathelement_) != len(x.pathelement_): return 0
for e1, e2 in zip(self.pathelement_, x.pathelement_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
for p in self.pathelement_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
n += 2 * len(self.pathelement_)
for i in xrange(len(self.pathelement_)): n += self.pathelement_[i].ByteSize()
return n + 1
def Clear(self):
self.clear_app()
self.clear_pathelement()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
for i in xrange(len(self.pathelement_)):
out.putVarInt32(115)
self.pathelement_[i].OutputUnchecked(out)
out.putVarInt32(116)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 100: break
if tt == 106:
self.set_app(d.getPrefixedString())
continue
if tt == 115:
self.add_pathelement().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
cnt=0
for e in self.pathelement_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("PathElement%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
class PropertyValue(ProtocolBuffer.ProtocolMessage):
has_int64value_ = 0
int64value_ = 0
has_booleanvalue_ = 0
booleanvalue_ = 0
has_stringvalue_ = 0
stringvalue_ = ""
has_doublevalue_ = 0
doublevalue_ = 0.0
has_pointvalue_ = 0
pointvalue_ = None
has_uservalue_ = 0
uservalue_ = None
has_referencevalue_ = 0
referencevalue_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def int64value(self): return self.int64value_
def set_int64value(self, x):
self.has_int64value_ = 1
self.int64value_ = x
def clear_int64value(self):
if self.has_int64value_:
self.has_int64value_ = 0
self.int64value_ = 0
def has_int64value(self): return self.has_int64value_
def booleanvalue(self): return self.booleanvalue_
def set_booleanvalue(self, x):
self.has_booleanvalue_ = 1
self.booleanvalue_ = x
def clear_booleanvalue(self):
if self.has_booleanvalue_:
self.has_booleanvalue_ = 0
self.booleanvalue_ = 0
def has_booleanvalue(self): return self.has_booleanvalue_
def stringvalue(self): return self.stringvalue_
def set_stringvalue(self, x):
self.has_stringvalue_ = 1
self.stringvalue_ = x
def clear_stringvalue(self):
if self.has_stringvalue_:
self.has_stringvalue_ = 0
self.stringvalue_ = ""
def has_stringvalue(self): return self.has_stringvalue_
def doublevalue(self): return self.doublevalue_
def set_doublevalue(self, x):
self.has_doublevalue_ = 1
self.doublevalue_ = x
def clear_doublevalue(self):
if self.has_doublevalue_:
self.has_doublevalue_ = 0
self.doublevalue_ = 0.0
def has_doublevalue(self): return self.has_doublevalue_
def pointvalue(self):
if self.pointvalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.pointvalue_ is None: self.pointvalue_ = PropertyValue_PointValue()
finally:
self.lazy_init_lock_.release()
return self.pointvalue_
def mutable_pointvalue(self): self.has_pointvalue_ = 1; return self.pointvalue()
def clear_pointvalue(self):
if self.has_pointvalue_:
self.has_pointvalue_ = 0;
if self.pointvalue_ is not None: self.pointvalue_.Clear()
def has_pointvalue(self): return self.has_pointvalue_
def uservalue(self):
if self.uservalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.uservalue_ is None: self.uservalue_ = PropertyValue_UserValue()
finally:
self.lazy_init_lock_.release()
return self.uservalue_
def mutable_uservalue(self): self.has_uservalue_ = 1; return self.uservalue()
def clear_uservalue(self):
if self.has_uservalue_:
self.has_uservalue_ = 0;
if self.uservalue_ is not None: self.uservalue_.Clear()
def has_uservalue(self): return self.has_uservalue_
def referencevalue(self):
if self.referencevalue_ is None:
self.lazy_init_lock_.acquire()
try:
if self.referencevalue_ is None: self.referencevalue_ = PropertyValue_ReferenceValue()
finally:
self.lazy_init_lock_.release()
return self.referencevalue_
def mutable_referencevalue(self): self.has_referencevalue_ = 1; return self.referencevalue()
def clear_referencevalue(self):
if self.has_referencevalue_:
self.has_referencevalue_ = 0;
if self.referencevalue_ is not None: self.referencevalue_.Clear()
def has_referencevalue(self): return self.has_referencevalue_
def MergeFrom(self, x):
assert x is not self
if (x.has_int64value()): self.set_int64value(x.int64value())
if (x.has_booleanvalue()): self.set_booleanvalue(x.booleanvalue())
if (x.has_stringvalue()): self.set_stringvalue(x.stringvalue())
if (x.has_doublevalue()): self.set_doublevalue(x.doublevalue())
if (x.has_pointvalue()): self.mutable_pointvalue().MergeFrom(x.pointvalue())
if (x.has_uservalue()): self.mutable_uservalue().MergeFrom(x.uservalue())
if (x.has_referencevalue()): self.mutable_referencevalue().MergeFrom(x.referencevalue())
def Equals(self, x):
if x is self: return 1
if self.has_int64value_ != x.has_int64value_: return 0
if self.has_int64value_ and self.int64value_ != x.int64value_: return 0
if self.has_booleanvalue_ != x.has_booleanvalue_: return 0
if self.has_booleanvalue_ and self.booleanvalue_ != x.booleanvalue_: return 0
if self.has_stringvalue_ != x.has_stringvalue_: return 0
if self.has_stringvalue_ and self.stringvalue_ != x.stringvalue_: return 0
if self.has_doublevalue_ != x.has_doublevalue_: return 0
if self.has_doublevalue_ and self.doublevalue_ != x.doublevalue_: return 0
if self.has_pointvalue_ != x.has_pointvalue_: return 0
if self.has_pointvalue_ and self.pointvalue_ != x.pointvalue_: return 0
if self.has_uservalue_ != x.has_uservalue_: return 0
if self.has_uservalue_ and self.uservalue_ != x.uservalue_: return 0
if self.has_referencevalue_ != x.has_referencevalue_: return 0
if self.has_referencevalue_ and self.referencevalue_ != x.referencevalue_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_pointvalue_ and not self.pointvalue_.IsInitialized(debug_strs)): initialized = 0
if (self.has_uservalue_ and not self.uservalue_.IsInitialized(debug_strs)): initialized = 0
if (self.has_referencevalue_ and not self.referencevalue_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_int64value_): n += 1 + self.lengthVarInt64(self.int64value_)
if (self.has_booleanvalue_): n += 2
if (self.has_stringvalue_): n += 1 + self.lengthString(len(self.stringvalue_))
if (self.has_doublevalue_): n += 9
if (self.has_pointvalue_): n += 2 + self.pointvalue_.ByteSize()
if (self.has_uservalue_): n += 2 + self.uservalue_.ByteSize()
if (self.has_referencevalue_): n += 2 + self.referencevalue_.ByteSize()
return n + 0
def Clear(self):
self.clear_int64value()
self.clear_booleanvalue()
self.clear_stringvalue()
self.clear_doublevalue()
self.clear_pointvalue()
self.clear_uservalue()
self.clear_referencevalue()
def OutputUnchecked(self, out):
if (self.has_int64value_):
out.putVarInt32(8)
out.putVarInt64(self.int64value_)
if (self.has_booleanvalue_):
out.putVarInt32(16)
out.putBoolean(self.booleanvalue_)
if (self.has_stringvalue_):
out.putVarInt32(26)
out.putPrefixedString(self.stringvalue_)
if (self.has_doublevalue_):
out.putVarInt32(33)
out.putDouble(self.doublevalue_)
if (self.has_pointvalue_):
out.putVarInt32(43)
self.pointvalue_.OutputUnchecked(out)
out.putVarInt32(44)
if (self.has_uservalue_):
out.putVarInt32(67)
self.uservalue_.OutputUnchecked(out)
out.putVarInt32(68)
if (self.has_referencevalue_):
out.putVarInt32(99)
self.referencevalue_.OutputUnchecked(out)
out.putVarInt32(100)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_int64value(d.getVarInt64())
continue
if tt == 16:
self.set_booleanvalue(d.getBoolean())
continue
if tt == 26:
self.set_stringvalue(d.getPrefixedString())
continue
if tt == 33:
self.set_doublevalue(d.getDouble())
continue
if tt == 43:
self.mutable_pointvalue().TryMerge(d)
continue
if tt == 67:
self.mutable_uservalue().TryMerge(d)
continue
if tt == 99:
self.mutable_referencevalue().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_int64value_: res+=prefix+("int64Value: %s\n" % self.DebugFormatInt64(self.int64value_))
if self.has_booleanvalue_: res+=prefix+("booleanValue: %s\n" % self.DebugFormatBool(self.booleanvalue_))
if self.has_stringvalue_: res+=prefix+("stringValue: %s\n" % self.DebugFormatString(self.stringvalue_))
if self.has_doublevalue_: res+=prefix+("doubleValue: %s\n" % self.DebugFormat(self.doublevalue_))
if self.has_pointvalue_:
res+=prefix+"PointValue {\n"
res+=self.pointvalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_uservalue_:
res+=prefix+"UserValue {\n"
res+=self.uservalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
if self.has_referencevalue_:
res+=prefix+"ReferenceValue {\n"
res+=self.referencevalue_.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
return res
kint64Value = 1
kbooleanValue = 2
kstringValue = 3
kdoubleValue = 4
kPointValueGroup = 5
kPointValuex = 6
kPointValuey = 7
kUserValueGroup = 8
kUserValueemail = 9
kUserValueauth_domain = 10
kUserValuenickname = 11
kUserValuegaiaid = 18
kReferenceValueGroup = 12
kReferenceValueapp = 13
kReferenceValuePathElementGroup = 14
kReferenceValuePathElementtype = 15
kReferenceValuePathElementid = 16
kReferenceValuePathElementname = 17
_TEXT = (
"ErrorCode",
"int64Value",
"booleanValue",
"stringValue",
"doubleValue",
"PointValue",
"x",
"y",
"UserValue",
"email",
"auth_domain",
"nickname",
"ReferenceValue",
"app",
"PathElement",
"type",
"id",
"name",
"gaiaid",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.DOUBLE,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.DOUBLE,
ProtocolBuffer.Encoder.DOUBLE,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Property(ProtocolBuffer.ProtocolMessage):
BLOB = 14
TEXT = 15
BYTESTRING = 16
ATOM_CATEGORY = 1
ATOM_LINK = 2
ATOM_TITLE = 3
ATOM_CONTENT = 4
ATOM_SUMMARY = 5
ATOM_AUTHOR = 6
GD_WHEN = 7
GD_EMAIL = 8
GEORSS_POINT = 9
GD_IM = 10
GD_PHONENUMBER = 11
GD_POSTALADDRESS = 12
GD_RATING = 13
_Meaning_NAMES = {
14: "BLOB",
15: "TEXT",
16: "BYTESTRING",
1: "ATOM_CATEGORY",
2: "ATOM_LINK",
3: "ATOM_TITLE",
4: "ATOM_CONTENT",
5: "ATOM_SUMMARY",
6: "ATOM_AUTHOR",
7: "GD_WHEN",
8: "GD_EMAIL",
9: "GEORSS_POINT",
10: "GD_IM",
11: "GD_PHONENUMBER",
12: "GD_POSTALADDRESS",
13: "GD_RATING",
}
def Meaning_Name(cls, x): return cls._Meaning_NAMES.get(x, "")
Meaning_Name = classmethod(Meaning_Name)
has_meaning_ = 0
meaning_ = 0
has_meaning_uri_ = 0
meaning_uri_ = ""
has_name_ = 0
name_ = ""
has_value_ = 0
has_multiple_ = 0
multiple_ = 0
def __init__(self, contents=None):
self.value_ = PropertyValue()
if contents is not None: self.MergeFromString(contents)
def meaning(self): return self.meaning_
def set_meaning(self, x):
self.has_meaning_ = 1
self.meaning_ = x
def clear_meaning(self):
if self.has_meaning_:
self.has_meaning_ = 0
self.meaning_ = 0
def has_meaning(self): return self.has_meaning_
def meaning_uri(self): return self.meaning_uri_
def set_meaning_uri(self, x):
self.has_meaning_uri_ = 1
self.meaning_uri_ = x
def clear_meaning_uri(self):
if self.has_meaning_uri_:
self.has_meaning_uri_ = 0
self.meaning_uri_ = ""
def has_meaning_uri(self): return self.has_meaning_uri_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def value(self): return self.value_
def mutable_value(self): self.has_value_ = 1; return self.value_
def clear_value(self):self.has_value_ = 0; self.value_.Clear()
def has_value(self): return self.has_value_
def multiple(self): return self.multiple_
def set_multiple(self, x):
self.has_multiple_ = 1
self.multiple_ = x
def clear_multiple(self):
if self.has_multiple_:
self.has_multiple_ = 0
self.multiple_ = 0
def has_multiple(self): return self.has_multiple_
def MergeFrom(self, x):
assert x is not self
if (x.has_meaning()): self.set_meaning(x.meaning())
if (x.has_meaning_uri()): self.set_meaning_uri(x.meaning_uri())
if (x.has_name()): self.set_name(x.name())
if (x.has_value()): self.mutable_value().MergeFrom(x.value())
if (x.has_multiple()): self.set_multiple(x.multiple())
def Equals(self, x):
if x is self: return 1
if self.has_meaning_ != x.has_meaning_: return 0
if self.has_meaning_ and self.meaning_ != x.meaning_: return 0
if self.has_meaning_uri_ != x.has_meaning_uri_: return 0
if self.has_meaning_uri_ and self.meaning_uri_ != x.meaning_uri_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
if self.has_multiple_ != x.has_multiple_: return 0
if self.has_multiple_ and self.multiple_ != x.multiple_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
elif not self.value_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_meaning_): n += 1 + self.lengthVarInt64(self.meaning_)
if (self.has_meaning_uri_): n += 1 + self.lengthString(len(self.meaning_uri_))
n += self.lengthString(len(self.name_))
n += self.lengthString(self.value_.ByteSize())
if (self.has_multiple_): n += 2
return n + 2
def Clear(self):
self.clear_meaning()
self.clear_meaning_uri()
self.clear_name()
self.clear_value()
self.clear_multiple()
def OutputUnchecked(self, out):
if (self.has_meaning_):
out.putVarInt32(8)
out.putVarInt32(self.meaning_)
if (self.has_meaning_uri_):
out.putVarInt32(18)
out.putPrefixedString(self.meaning_uri_)
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_multiple_):
out.putVarInt32(32)
out.putBoolean(self.multiple_)
out.putVarInt32(42)
out.putVarInt32(self.value_.ByteSize())
self.value_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_meaning(d.getVarInt32())
continue
if tt == 18:
self.set_meaning_uri(d.getPrefixedString())
continue
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_multiple(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_value().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_meaning_: res+=prefix+("meaning: %s\n" % self.DebugFormatInt32(self.meaning_))
if self.has_meaning_uri_: res+=prefix+("meaning_uri: %s\n" % self.DebugFormatString(self.meaning_uri_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_value_:
res+=prefix+"value <\n"
res+=self.value_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_multiple_: res+=prefix+("multiple: %s\n" % self.DebugFormatBool(self.multiple_))
return res
kmeaning = 1
kmeaning_uri = 2
kname = 3
kvalue = 5
kmultiple = 4
_TEXT = (
"ErrorCode",
"meaning",
"meaning_uri",
"name",
"multiple",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Path_Element(ProtocolBuffer.ProtocolMessage):
has_type_ = 0
type_ = ""
has_id_ = 0
id_ = 0
has_name_ = 0
name_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def type(self): return self.type_
def set_type(self, x):
self.has_type_ = 1
self.type_ = x
def clear_type(self):
if self.has_type_:
self.has_type_ = 0
self.type_ = ""
def has_type(self): return self.has_type_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def MergeFrom(self, x):
assert x is not self
if (x.has_type()): self.set_type(x.type())
if (x.has_id()): self.set_id(x.id())
if (x.has_name()): self.set_name(x.name())
def Equals(self, x):
if x is self: return 1
if self.has_type_ != x.has_type_: return 0
if self.has_type_ and self.type_ != x.type_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: type not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.type_))
if (self.has_id_): n += 1 + self.lengthVarInt64(self.id_)
if (self.has_name_): n += 1 + self.lengthString(len(self.name_))
return n + 1
def Clear(self):
self.clear_type()
self.clear_id()
self.clear_name()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.type_)
if (self.has_id_):
out.putVarInt32(24)
out.putVarInt64(self.id_)
if (self.has_name_):
out.putVarInt32(34)
out.putPrefixedString(self.name_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_type(d.getPrefixedString())
continue
if tt == 24:
self.set_id(d.getVarInt64())
continue
if tt == 34:
self.set_name(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_type_: res+=prefix+("type: %s\n" % self.DebugFormatString(self.type_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
return res
class Path(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.element_ = []
if contents is not None: self.MergeFromString(contents)
def element_size(self): return len(self.element_)
def element_list(self): return self.element_
def element(self, i):
return self.element_[i]
def mutable_element(self, i):
return self.element_[i]
def add_element(self):
x = Path_Element()
self.element_.append(x)
return x
def clear_element(self):
self.element_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.element_size()): self.add_element().CopyFrom(x.element(i))
def Equals(self, x):
if x is self: return 1
if len(self.element_) != len(x.element_): return 0
for e1, e2 in zip(self.element_, x.element_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.element_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.element_)
for i in xrange(len(self.element_)): n += self.element_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_element()
def OutputUnchecked(self, out):
for i in xrange(len(self.element_)):
out.putVarInt32(11)
self.element_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_element().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.element_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Element%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kElementGroup = 1
kElementtype = 2
kElementid = 3
kElementname = 4
_TEXT = (
"ErrorCode",
"Element",
"type",
"id",
"name",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Reference(ProtocolBuffer.ProtocolMessage):
has_app_ = 0
app_ = ""
has_path_ = 0
def __init__(self, contents=None):
self.path_ = Path()
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def path(self): return self.path_
def mutable_path(self): self.has_path_ = 1; return self.path_
def clear_path(self):self.has_path_ = 0; self.path_.Clear()
def has_path(self): return self.has_path_
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
if (x.has_path()): self.mutable_path().MergeFrom(x.path())
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_path_ != x.has_path_: return 0
if self.has_path_ and self.path_ != x.path_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (not self.has_path_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: path not set.')
elif not self.path_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
n += self.lengthString(self.path_.ByteSize())
return n + 2
def Clear(self):
self.clear_app()
self.clear_path()
def OutputUnchecked(self, out):
out.putVarInt32(106)
out.putPrefixedString(self.app_)
out.putVarInt32(114)
out.putVarInt32(self.path_.ByteSize())
self.path_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 106:
self.set_app(d.getPrefixedString())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_path().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_path_:
res+=prefix+"path <\n"
res+=self.path_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kapp = 13
kpath = 14
_TEXT = (
"ErrorCode",
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
None,
"app",
"path",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class User(ProtocolBuffer.ProtocolMessage):
has_email_ = 0
email_ = ""
has_auth_domain_ = 0
auth_domain_ = ""
has_nickname_ = 0
nickname_ = ""
has_gaiaid_ = 0
gaiaid_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def email(self): return self.email_
def set_email(self, x):
self.has_email_ = 1
self.email_ = x
def clear_email(self):
if self.has_email_:
self.has_email_ = 0
self.email_ = ""
def has_email(self): return self.has_email_
def auth_domain(self): return self.auth_domain_
def set_auth_domain(self, x):
self.has_auth_domain_ = 1
self.auth_domain_ = x
def clear_auth_domain(self):
if self.has_auth_domain_:
self.has_auth_domain_ = 0
self.auth_domain_ = ""
def has_auth_domain(self): return self.has_auth_domain_
def nickname(self): return self.nickname_
def set_nickname(self, x):
self.has_nickname_ = 1
self.nickname_ = x
def clear_nickname(self):
if self.has_nickname_:
self.has_nickname_ = 0
self.nickname_ = ""
def has_nickname(self): return self.has_nickname_
def gaiaid(self): return self.gaiaid_
def set_gaiaid(self, x):
self.has_gaiaid_ = 1
self.gaiaid_ = x
def clear_gaiaid(self):
if self.has_gaiaid_:
self.has_gaiaid_ = 0
self.gaiaid_ = 0
def has_gaiaid(self): return self.has_gaiaid_
def MergeFrom(self, x):
assert x is not self
if (x.has_email()): self.set_email(x.email())
if (x.has_auth_domain()): self.set_auth_domain(x.auth_domain())
if (x.has_nickname()): self.set_nickname(x.nickname())
if (x.has_gaiaid()): self.set_gaiaid(x.gaiaid())
def Equals(self, x):
if x is self: return 1
if self.has_email_ != x.has_email_: return 0
if self.has_email_ and self.email_ != x.email_: return 0
if self.has_auth_domain_ != x.has_auth_domain_: return 0
if self.has_auth_domain_ and self.auth_domain_ != x.auth_domain_: return 0
if self.has_nickname_ != x.has_nickname_: return 0
if self.has_nickname_ and self.nickname_ != x.nickname_: return 0
if self.has_gaiaid_ != x.has_gaiaid_: return 0
if self.has_gaiaid_ and self.gaiaid_ != x.gaiaid_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_email_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: email not set.')
if (not self.has_auth_domain_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: auth_domain not set.')
if (not self.has_gaiaid_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: gaiaid not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.email_))
n += self.lengthString(len(self.auth_domain_))
if (self.has_nickname_): n += 1 + self.lengthString(len(self.nickname_))
n += self.lengthVarInt64(self.gaiaid_)
return n + 3
def Clear(self):
self.clear_email()
self.clear_auth_domain()
self.clear_nickname()
self.clear_gaiaid()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.email_)
out.putVarInt32(18)
out.putPrefixedString(self.auth_domain_)
if (self.has_nickname_):
out.putVarInt32(26)
out.putPrefixedString(self.nickname_)
out.putVarInt32(32)
out.putVarInt64(self.gaiaid_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_email(d.getPrefixedString())
continue
if tt == 18:
self.set_auth_domain(d.getPrefixedString())
continue
if tt == 26:
self.set_nickname(d.getPrefixedString())
continue
if tt == 32:
self.set_gaiaid(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_email_: res+=prefix+("email: %s\n" % self.DebugFormatString(self.email_))
if self.has_auth_domain_: res+=prefix+("auth_domain: %s\n" % self.DebugFormatString(self.auth_domain_))
if self.has_nickname_: res+=prefix+("nickname: %s\n" % self.DebugFormatString(self.nickname_))
if self.has_gaiaid_: res+=prefix+("gaiaid: %s\n" % self.DebugFormatInt64(self.gaiaid_))
return res
kemail = 1
kauth_domain = 2
knickname = 3
kgaiaid = 4
_TEXT = (
"ErrorCode",
"email",
"auth_domain",
"nickname",
"gaiaid",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class EntityProto(ProtocolBuffer.ProtocolMessage):
GD_CONTACT = 1
GD_EVENT = 2
GD_MESSAGE = 3
_Kind_NAMES = {
1: "GD_CONTACT",
2: "GD_EVENT",
3: "GD_MESSAGE",
}
def Kind_Name(cls, x): return cls._Kind_NAMES.get(x, "")
Kind_Name = classmethod(Kind_Name)
has_key_ = 0
has_entity_group_ = 0
has_owner_ = 0
owner_ = None
has_kind_ = 0
kind_ = 0
has_kind_uri_ = 0
kind_uri_ = ""
def __init__(self, contents=None):
self.key_ = Reference()
self.entity_group_ = Path()
self.property_ = []
self.raw_property_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def mutable_key(self): self.has_key_ = 1; return self.key_
def clear_key(self):self.has_key_ = 0; self.key_.Clear()
def has_key(self): return self.has_key_
def entity_group(self): return self.entity_group_
def mutable_entity_group(self): self.has_entity_group_ = 1; return self.entity_group_
def clear_entity_group(self):self.has_entity_group_ = 0; self.entity_group_.Clear()
def has_entity_group(self): return self.has_entity_group_
def owner(self):
if self.owner_ is None:
self.lazy_init_lock_.acquire()
try:
if self.owner_ is None: self.owner_ = User()
finally:
self.lazy_init_lock_.release()
return self.owner_
def mutable_owner(self): self.has_owner_ = 1; return self.owner()
def clear_owner(self):
if self.has_owner_:
self.has_owner_ = 0;
if self.owner_ is not None: self.owner_.Clear()
def has_owner(self): return self.has_owner_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = 0
def has_kind(self): return self.has_kind_
def kind_uri(self): return self.kind_uri_
def set_kind_uri(self, x):
self.has_kind_uri_ = 1
self.kind_uri_ = x
def clear_kind_uri(self):
if self.has_kind_uri_:
self.has_kind_uri_ = 0
self.kind_uri_ = ""
def has_kind_uri(self): return self.has_kind_uri_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def raw_property_size(self): return len(self.raw_property_)
def raw_property_list(self): return self.raw_property_
def raw_property(self, i):
return self.raw_property_[i]
def mutable_raw_property(self, i):
return self.raw_property_[i]
def add_raw_property(self):
x = Property()
self.raw_property_.append(x)
return x
def clear_raw_property(self):
self.raw_property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.mutable_key().MergeFrom(x.key())
if (x.has_entity_group()): self.mutable_entity_group().MergeFrom(x.entity_group())
if (x.has_owner()): self.mutable_owner().MergeFrom(x.owner())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_kind_uri()): self.set_kind_uri(x.kind_uri())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
for i in xrange(x.raw_property_size()): self.add_raw_property().CopyFrom(x.raw_property(i))
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_entity_group_ != x.has_entity_group_: return 0
if self.has_entity_group_ and self.entity_group_ != x.entity_group_: return 0
if self.has_owner_ != x.has_owner_: return 0
if self.has_owner_ and self.owner_ != x.owner_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_kind_uri_ != x.has_kind_uri_: return 0
if self.has_kind_uri_ and self.kind_uri_ != x.kind_uri_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
if len(self.raw_property_) != len(x.raw_property_): return 0
for e1, e2 in zip(self.raw_property_, x.raw_property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
elif not self.key_.IsInitialized(debug_strs): initialized = 0
if (not self.has_entity_group_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: entity_group not set.')
elif not self.entity_group_.IsInitialized(debug_strs): initialized = 0
if (self.has_owner_ and not self.owner_.IsInitialized(debug_strs)): initialized = 0
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.raw_property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.key_.ByteSize())
n += self.lengthString(self.entity_group_.ByteSize())
if (self.has_owner_): n += 2 + self.lengthString(self.owner_.ByteSize())
if (self.has_kind_): n += 1 + self.lengthVarInt64(self.kind_)
if (self.has_kind_uri_): n += 1 + self.lengthString(len(self.kind_uri_))
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
n += 1 * len(self.raw_property_)
for i in xrange(len(self.raw_property_)): n += self.lengthString(self.raw_property_[i].ByteSize())
return n + 3
def Clear(self):
self.clear_key()
self.clear_entity_group()
self.clear_owner()
self.clear_kind()
self.clear_kind_uri()
self.clear_property()
self.clear_raw_property()
def OutputUnchecked(self, out):
if (self.has_kind_):
out.putVarInt32(32)
out.putVarInt32(self.kind_)
if (self.has_kind_uri_):
out.putVarInt32(42)
out.putPrefixedString(self.kind_uri_)
out.putVarInt32(106)
out.putVarInt32(self.key_.ByteSize())
self.key_.OutputUnchecked(out)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
for i in xrange(len(self.raw_property_)):
out.putVarInt32(122)
out.putVarInt32(self.raw_property_[i].ByteSize())
self.raw_property_[i].OutputUnchecked(out)
out.putVarInt32(130)
out.putVarInt32(self.entity_group_.ByteSize())
self.entity_group_.OutputUnchecked(out)
if (self.has_owner_):
out.putVarInt32(138)
out.putVarInt32(self.owner_.ByteSize())
self.owner_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_kind(d.getVarInt32())
continue
if tt == 42:
self.set_kind_uri(d.getPrefixedString())
continue
if tt == 106:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_key().TryMerge(tmp)
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if tt == 122:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_raw_property().TryMerge(tmp)
continue
if tt == 130:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity_group().TryMerge(tmp)
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_owner().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_:
res+=prefix+"key <\n"
res+=self.key_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_entity_group_:
res+=prefix+"entity_group <\n"
res+=self.entity_group_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_owner_:
res+=prefix+"owner <\n"
res+=self.owner_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatInt32(self.kind_))
if self.has_kind_uri_: res+=prefix+("kind_uri: %s\n" % self.DebugFormatString(self.kind_uri_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
cnt=0
for e in self.raw_property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("raw_property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
kkey = 13
kentity_group = 16
kowner = 17
kkind = 4
kkind_uri = 5
kproperty = 14
kraw_property = 15
_TEXT = (
"ErrorCode",
None,
None,
None,
"kind",
"kind_uri",
None,
None,
None,
None,
None,
None,
None,
"key",
"property",
"raw_property",
"entity_group",
"owner",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CompositeProperty(ProtocolBuffer.ProtocolMessage):
has_index_id_ = 0
index_id_ = 0
def __init__(self, contents=None):
self.value_ = []
if contents is not None: self.MergeFromString(contents)
def index_id(self): return self.index_id_
def set_index_id(self, x):
self.has_index_id_ = 1
self.index_id_ = x
def clear_index_id(self):
if self.has_index_id_:
self.has_index_id_ = 0
self.index_id_ = 0
def has_index_id(self): return self.has_index_id_
def value_size(self): return len(self.value_)
def value_list(self): return self.value_
def value(self, i):
return self.value_[i]
def set_value(self, i, x):
self.value_[i] = x
def add_value(self, x):
self.value_.append(x)
def clear_value(self):
self.value_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_index_id()): self.set_index_id(x.index_id())
for i in xrange(x.value_size()): self.add_value(x.value(i))
def Equals(self, x):
if x is self: return 1
if self.has_index_id_ != x.has_index_id_: return 0
if self.has_index_id_ and self.index_id_ != x.index_id_: return 0
if len(self.value_) != len(x.value_): return 0
for e1, e2 in zip(self.value_, x.value_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_index_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: index_id not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.index_id_)
n += 1 * len(self.value_)
for i in xrange(len(self.value_)): n += self.lengthString(len(self.value_[i]))
return n + 1
def Clear(self):
self.clear_index_id()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.index_id_)
for i in xrange(len(self.value_)):
out.putVarInt32(18)
out.putPrefixedString(self.value_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_id(d.getVarInt64())
continue
if tt == 18:
self.add_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_id_: res+=prefix+("index_id: %s\n" % self.DebugFormatInt64(self.index_id_))
cnt=0
for e in self.value_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("value%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
kindex_id = 1
kvalue = 2
_TEXT = (
"ErrorCode",
"index_id",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Index_Property(ProtocolBuffer.ProtocolMessage):
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_name_ = 0
name_ = ""
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def name(self): return self.name_
def set_name(self, x):
self.has_name_ = 1
self.name_ = x
def clear_name(self):
if self.has_name_:
self.has_name_ = 0
self.name_ = ""
def has_name(self): return self.has_name_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_name()): self.set_name(x.name())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_name_ != x.has_name_: return 0
if self.has_name_ and self.name_ != x.name_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_name_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: name not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.name_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def Clear(self):
self.clear_name()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(26)
out.putPrefixedString(self.name_)
if (self.has_direction_):
out.putVarInt32(32)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 20: break
if tt == 26:
self.set_name(d.getPrefixedString())
continue
if tt == 32:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_name_: res+=prefix+("name: %s\n" % self.DebugFormatString(self.name_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
class Index(ProtocolBuffer.ProtocolMessage):
has_entity_type_ = 0
entity_type_ = ""
has_ancestor_ = 0
ancestor_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def entity_type(self): return self.entity_type_
def set_entity_type(self, x):
self.has_entity_type_ = 1
self.entity_type_ = x
def clear_entity_type(self):
if self.has_entity_type_:
self.has_entity_type_ = 0
self.entity_type_ = ""
def has_entity_type(self): return self.has_entity_type_
def ancestor(self): return self.ancestor_
def set_ancestor(self, x):
self.has_ancestor_ = 1
self.ancestor_ = x
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0
self.ancestor_ = 0
def has_ancestor(self): return self.has_ancestor_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Index_Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_entity_type()): self.set_entity_type(x.entity_type())
if (x.has_ancestor()): self.set_ancestor(x.ancestor())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_entity_type_ != x.has_entity_type_: return 0
if self.has_entity_type_ and self.entity_type_ != x.entity_type_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_entity_type_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: entity_type not set.')
if (not self.has_ancestor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: ancestor not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.entity_type_))
n += 2 * len(self.property_)
for i in xrange(len(self.property_)): n += self.property_[i].ByteSize()
return n + 3
def Clear(self):
self.clear_entity_type()
self.clear_ancestor()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.entity_type_)
for i in xrange(len(self.property_)):
out.putVarInt32(19)
self.property_[i].OutputUnchecked(out)
out.putVarInt32(20)
out.putVarInt32(40)
out.putBoolean(self.ancestor_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_entity_type(d.getPrefixedString())
continue
if tt == 19:
self.add_property().TryMerge(d)
continue
if tt == 40:
self.set_ancestor(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_type_: res+=prefix+("entity_type: %s\n" % self.DebugFormatString(self.entity_type_))
if self.has_ancestor_: res+=prefix+("ancestor: %s\n" % self.DebugFormatBool(self.ancestor_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Property%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kentity_type = 1
kancestor = 5
kPropertyGroup = 2
kPropertyname = 3
kPropertydirection = 4
_TEXT = (
"ErrorCode",
"entity_type",
"Property",
"name",
"direction",
"ancestor",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CompositeIndex(ProtocolBuffer.ProtocolMessage):
WRITE_ONLY = 1
READ_WRITE = 2
DELETED = 3
ERROR = 4
_State_NAMES = {
1: "WRITE_ONLY",
2: "READ_WRITE",
3: "DELETED",
4: "ERROR",
}
def State_Name(cls, x): return cls._State_NAMES.get(x, "")
State_Name = classmethod(State_Name)
has_app_id_ = 0
app_id_ = ""
has_id_ = 0
id_ = 0
has_definition_ = 0
has_state_ = 0
state_ = 0
def __init__(self, contents=None):
self.definition_ = Index()
if contents is not None: self.MergeFromString(contents)
def app_id(self): return self.app_id_
def set_app_id(self, x):
self.has_app_id_ = 1
self.app_id_ = x
def clear_app_id(self):
if self.has_app_id_:
self.has_app_id_ = 0
self.app_id_ = ""
def has_app_id(self): return self.has_app_id_
def id(self): return self.id_
def set_id(self, x):
self.has_id_ = 1
self.id_ = x
def clear_id(self):
if self.has_id_:
self.has_id_ = 0
self.id_ = 0
def has_id(self): return self.has_id_
def definition(self): return self.definition_
def mutable_definition(self): self.has_definition_ = 1; return self.definition_
def clear_definition(self):self.has_definition_ = 0; self.definition_.Clear()
def has_definition(self): return self.has_definition_
def state(self): return self.state_
def set_state(self, x):
self.has_state_ = 1
self.state_ = x
def clear_state(self):
if self.has_state_:
self.has_state_ = 0
self.state_ = 0
def has_state(self): return self.has_state_
def MergeFrom(self, x):
assert x is not self
if (x.has_app_id()): self.set_app_id(x.app_id())
if (x.has_id()): self.set_id(x.id())
if (x.has_definition()): self.mutable_definition().MergeFrom(x.definition())
if (x.has_state()): self.set_state(x.state())
def Equals(self, x):
if x is self: return 1
if self.has_app_id_ != x.has_app_id_: return 0
if self.has_app_id_ and self.app_id_ != x.app_id_: return 0
if self.has_id_ != x.has_id_: return 0
if self.has_id_ and self.id_ != x.id_: return 0
if self.has_definition_ != x.has_definition_: return 0
if self.has_definition_ and self.definition_ != x.definition_: return 0
if self.has_state_ != x.has_state_: return 0
if self.has_state_ and self.state_ != x.state_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app_id not set.')
if (not self.has_id_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: id not set.')
if (not self.has_definition_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: definition not set.')
elif not self.definition_.IsInitialized(debug_strs): initialized = 0
if (not self.has_state_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: state not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_id_))
n += self.lengthVarInt64(self.id_)
n += self.lengthString(self.definition_.ByteSize())
n += self.lengthVarInt64(self.state_)
return n + 4
def Clear(self):
self.clear_app_id()
self.clear_id()
self.clear_definition()
self.clear_state()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_id_)
out.putVarInt32(16)
out.putVarInt64(self.id_)
out.putVarInt32(26)
out.putVarInt32(self.definition_.ByteSize())
self.definition_.OutputUnchecked(out)
out.putVarInt32(32)
out.putVarInt32(self.state_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app_id(d.getPrefixedString())
continue
if tt == 16:
self.set_id(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_definition().TryMerge(tmp)
continue
if tt == 32:
self.set_state(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_id_: res+=prefix+("app_id: %s\n" % self.DebugFormatString(self.app_id_))
if self.has_id_: res+=prefix+("id: %s\n" % self.DebugFormatInt64(self.id_))
if self.has_definition_:
res+=prefix+"definition <\n"
res+=self.definition_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_state_: res+=prefix+("state: %s\n" % self.DebugFormatInt32(self.state_))
return res
kapp_id = 1
kid = 2
kdefinition = 3
kstate = 4
_TEXT = (
"ErrorCode",
"app_id",
"id",
"definition",
"state",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['PropertyValue','PropertyValue_ReferenceValuePathElement','PropertyValue_PointValue','PropertyValue_UserValue','PropertyValue_ReferenceValue','Property','Path','Path_Element','Reference','User','EntityProto','CompositeProperty','Index','Index_Property','CompositeIndex']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import Integer64Proto;
from google.appengine.api.api_base_pb import StringProto;
from google.appengine.api.api_base_pb import VoidProto;
from google.appengine.datastore.entity_pb import CompositeIndex
from google.appengine.datastore.entity_pb import EntityProto
from google.appengine.datastore.entity_pb import Index
from google.appengine.datastore.entity_pb import Property
from google.appengine.datastore.entity_pb import Reference
class Transaction(ProtocolBuffer.ProtocolMessage):
has_handle_ = 0
handle_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def handle(self): return self.handle_
def set_handle(self, x):
self.has_handle_ = 1
self.handle_ = x
def clear_handle(self):
if self.has_handle_:
self.has_handle_ = 0
self.handle_ = 0
def has_handle(self): return self.has_handle_
def MergeFrom(self, x):
assert x is not self
if (x.has_handle()): self.set_handle(x.handle())
def Equals(self, x):
if x is self: return 1
if self.has_handle_ != x.has_handle_: return 0
if self.has_handle_ and self.handle_ != x.handle_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_handle_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: handle not set.')
return initialized
def ByteSize(self):
n = 0
return n + 9
def Clear(self):
self.clear_handle()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.handle_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_handle(d.get64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_handle_: res+=prefix+("handle: %s\n" % self.DebugFormatFixed64(self.handle_))
return res
khandle = 1
_TEXT = (
"ErrorCode",
"handle",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.DOUBLE,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Query_Filter(ProtocolBuffer.ProtocolMessage):
LESS_THAN = 1
LESS_THAN_OR_EQUAL = 2
GREATER_THAN = 3
GREATER_THAN_OR_EQUAL = 4
EQUAL = 5
IN = 6
EXISTS = 7
_Operator_NAMES = {
1: "LESS_THAN",
2: "LESS_THAN_OR_EQUAL",
3: "GREATER_THAN",
4: "GREATER_THAN_OR_EQUAL",
5: "EQUAL",
6: "IN",
7: "EXISTS",
}
def Operator_Name(cls, x): return cls._Operator_NAMES.get(x, "")
Operator_Name = classmethod(Operator_Name)
has_op_ = 0
op_ = 0
def __init__(self, contents=None):
self.property_ = []
if contents is not None: self.MergeFromString(contents)
def op(self): return self.op_
def set_op(self, x):
self.has_op_ = 1
self.op_ = x
def clear_op(self):
if self.has_op_:
self.has_op_ = 0
self.op_ = 0
def has_op(self): return self.has_op_
def property_size(self): return len(self.property_)
def property_list(self): return self.property_
def property(self, i):
return self.property_[i]
def mutable_property(self, i):
return self.property_[i]
def add_property(self):
x = Property()
self.property_.append(x)
return x
def clear_property(self):
self.property_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_op()): self.set_op(x.op())
for i in xrange(x.property_size()): self.add_property().CopyFrom(x.property(i))
def Equals(self, x):
if x is self: return 1
if self.has_op_ != x.has_op_: return 0
if self.has_op_ and self.op_ != x.op_: return 0
if len(self.property_) != len(x.property_): return 0
for e1, e2 in zip(self.property_, x.property_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_op_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: op not set.')
for p in self.property_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.op_)
n += 1 * len(self.property_)
for i in xrange(len(self.property_)): n += self.lengthString(self.property_[i].ByteSize())
return n + 1
def Clear(self):
self.clear_op()
self.clear_property()
def OutputUnchecked(self, out):
out.putVarInt32(48)
out.putVarInt32(self.op_)
for i in xrange(len(self.property_)):
out.putVarInt32(114)
out.putVarInt32(self.property_[i].ByteSize())
self.property_[i].OutputUnchecked(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 36: break
if tt == 48:
self.set_op(d.getVarInt32())
continue
if tt == 114:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_property().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_op_: res+=prefix+("op: %s\n" % self.DebugFormatInt32(self.op_))
cnt=0
for e in self.property_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("property%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
class Query_Order(ProtocolBuffer.ProtocolMessage):
ASCENDING = 1
DESCENDING = 2
_Direction_NAMES = {
1: "ASCENDING",
2: "DESCENDING",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_property_ = 0
property_ = ""
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def property(self): return self.property_
def set_property(self, x):
self.has_property_ = 1
self.property_ = x
def clear_property(self):
if self.has_property_:
self.has_property_ = 0
self.property_ = ""
def has_property(self): return self.has_property_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_property()): self.set_property(x.property())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_property_ != x.has_property_: return 0
if self.has_property_ and self.property_ != x.property_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_property_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: property not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.property_))
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def Clear(self):
self.clear_property()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(82)
out.putPrefixedString(self.property_)
if (self.has_direction_):
out.putVarInt32(88)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 76: break
if tt == 82:
self.set_property(d.getPrefixedString())
continue
if tt == 88:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_property_: res+=prefix+("property: %s\n" % self.DebugFormatString(self.property_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
class Query(ProtocolBuffer.ProtocolMessage):
ORDER_FIRST = 1
ANCESTOR_FIRST = 2
FILTER_FIRST = 3
_Hint_NAMES = {
1: "ORDER_FIRST",
2: "ANCESTOR_FIRST",
3: "FILTER_FIRST",
}
def Hint_Name(cls, x): return cls._Hint_NAMES.get(x, "")
Hint_Name = classmethod(Hint_Name)
has_app_ = 0
app_ = ""
has_kind_ = 0
kind_ = ""
has_ancestor_ = 0
ancestor_ = None
has_search_query_ = 0
search_query_ = ""
has_hint_ = 0
hint_ = 0
has_offset_ = 0
offset_ = 0
has_limit_ = 0
limit_ = 0
has_require_perfect_plan_ = 0
require_perfect_plan_ = 0
def __init__(self, contents=None):
self.filter_ = []
self.order_ = []
self.composite_index_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def app(self): return self.app_
def set_app(self, x):
self.has_app_ = 1
self.app_ = x
def clear_app(self):
if self.has_app_:
self.has_app_ = 0
self.app_ = ""
def has_app(self): return self.has_app_
def kind(self): return self.kind_
def set_kind(self, x):
self.has_kind_ = 1
self.kind_ = x
def clear_kind(self):
if self.has_kind_:
self.has_kind_ = 0
self.kind_ = ""
def has_kind(self): return self.has_kind_
def ancestor(self):
if self.ancestor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.ancestor_ is None: self.ancestor_ = Reference()
finally:
self.lazy_init_lock_.release()
return self.ancestor_
def mutable_ancestor(self): self.has_ancestor_ = 1; return self.ancestor()
def clear_ancestor(self):
if self.has_ancestor_:
self.has_ancestor_ = 0;
if self.ancestor_ is not None: self.ancestor_.Clear()
def has_ancestor(self): return self.has_ancestor_
def filter_size(self): return len(self.filter_)
def filter_list(self): return self.filter_
def filter(self, i):
return self.filter_[i]
def mutable_filter(self, i):
return self.filter_[i]
def add_filter(self):
x = Query_Filter()
self.filter_.append(x)
return x
def clear_filter(self):
self.filter_ = []
def search_query(self): return self.search_query_
def set_search_query(self, x):
self.has_search_query_ = 1
self.search_query_ = x
def clear_search_query(self):
if self.has_search_query_:
self.has_search_query_ = 0
self.search_query_ = ""
def has_search_query(self): return self.has_search_query_
def order_size(self): return len(self.order_)
def order_list(self): return self.order_
def order(self, i):
return self.order_[i]
def mutable_order(self, i):
return self.order_[i]
def add_order(self):
x = Query_Order()
self.order_.append(x)
return x
def clear_order(self):
self.order_ = []
def hint(self): return self.hint_
def set_hint(self, x):
self.has_hint_ = 1
self.hint_ = x
def clear_hint(self):
if self.has_hint_:
self.has_hint_ = 0
self.hint_ = 0
def has_hint(self): return self.has_hint_
def offset(self): return self.offset_
def set_offset(self, x):
self.has_offset_ = 1
self.offset_ = x
def clear_offset(self):
if self.has_offset_:
self.has_offset_ = 0
self.offset_ = 0
def has_offset(self): return self.has_offset_
def limit(self): return self.limit_
def set_limit(self, x):
self.has_limit_ = 1
self.limit_ = x
def clear_limit(self):
if self.has_limit_:
self.has_limit_ = 0
self.limit_ = 0
def has_limit(self): return self.has_limit_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def require_perfect_plan(self): return self.require_perfect_plan_
def set_require_perfect_plan(self, x):
self.has_require_perfect_plan_ = 1
self.require_perfect_plan_ = x
def clear_require_perfect_plan(self):
if self.has_require_perfect_plan_:
self.has_require_perfect_plan_ = 0
self.require_perfect_plan_ = 0
def has_require_perfect_plan(self): return self.has_require_perfect_plan_
def MergeFrom(self, x):
assert x is not self
if (x.has_app()): self.set_app(x.app())
if (x.has_kind()): self.set_kind(x.kind())
if (x.has_ancestor()): self.mutable_ancestor().MergeFrom(x.ancestor())
for i in xrange(x.filter_size()): self.add_filter().CopyFrom(x.filter(i))
if (x.has_search_query()): self.set_search_query(x.search_query())
for i in xrange(x.order_size()): self.add_order().CopyFrom(x.order(i))
if (x.has_hint()): self.set_hint(x.hint())
if (x.has_offset()): self.set_offset(x.offset())
if (x.has_limit()): self.set_limit(x.limit())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_require_perfect_plan()): self.set_require_perfect_plan(x.require_perfect_plan())
def Equals(self, x):
if x is self: return 1
if self.has_app_ != x.has_app_: return 0
if self.has_app_ and self.app_ != x.app_: return 0
if self.has_kind_ != x.has_kind_: return 0
if self.has_kind_ and self.kind_ != x.kind_: return 0
if self.has_ancestor_ != x.has_ancestor_: return 0
if self.has_ancestor_ and self.ancestor_ != x.ancestor_: return 0
if len(self.filter_) != len(x.filter_): return 0
for e1, e2 in zip(self.filter_, x.filter_):
if e1 != e2: return 0
if self.has_search_query_ != x.has_search_query_: return 0
if self.has_search_query_ and self.search_query_ != x.search_query_: return 0
if len(self.order_) != len(x.order_): return 0
for e1, e2 in zip(self.order_, x.order_):
if e1 != e2: return 0
if self.has_hint_ != x.has_hint_: return 0
if self.has_hint_ and self.hint_ != x.hint_: return 0
if self.has_offset_ != x.has_offset_: return 0
if self.has_offset_ and self.offset_ != x.offset_: return 0
if self.has_limit_ != x.has_limit_: return 0
if self.has_limit_ and self.limit_ != x.limit_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_require_perfect_plan_ != x.has_require_perfect_plan_: return 0
if self.has_require_perfect_plan_ and self.require_perfect_plan_ != x.require_perfect_plan_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_app_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: app not set.')
if (self.has_ancestor_ and not self.ancestor_.IsInitialized(debug_strs)): initialized = 0
for p in self.filter_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.order_:
if not p.IsInitialized(debug_strs): initialized=0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.app_))
if (self.has_kind_): n += 1 + self.lengthString(len(self.kind_))
if (self.has_ancestor_): n += 2 + self.lengthString(self.ancestor_.ByteSize())
n += 2 * len(self.filter_)
for i in xrange(len(self.filter_)): n += self.filter_[i].ByteSize()
if (self.has_search_query_): n += 1 + self.lengthString(len(self.search_query_))
n += 2 * len(self.order_)
for i in xrange(len(self.order_)): n += self.order_[i].ByteSize()
if (self.has_hint_): n += 2 + self.lengthVarInt64(self.hint_)
if (self.has_offset_): n += 1 + self.lengthVarInt64(self.offset_)
if (self.has_limit_): n += 2 + self.lengthVarInt64(self.limit_)
n += 2 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_require_perfect_plan_): n += 3
return n + 1
def Clear(self):
self.clear_app()
self.clear_kind()
self.clear_ancestor()
self.clear_filter()
self.clear_search_query()
self.clear_order()
self.clear_hint()
self.clear_offset()
self.clear_limit()
self.clear_composite_index()
self.clear_require_perfect_plan()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.app_)
if (self.has_kind_):
out.putVarInt32(26)
out.putPrefixedString(self.kind_)
for i in xrange(len(self.filter_)):
out.putVarInt32(35)
self.filter_[i].OutputUnchecked(out)
out.putVarInt32(36)
if (self.has_search_query_):
out.putVarInt32(66)
out.putPrefixedString(self.search_query_)
for i in xrange(len(self.order_)):
out.putVarInt32(75)
self.order_[i].OutputUnchecked(out)
out.putVarInt32(76)
if (self.has_offset_):
out.putVarInt32(96)
out.putVarInt32(self.offset_)
if (self.has_limit_):
out.putVarInt32(128)
out.putVarInt32(self.limit_)
if (self.has_ancestor_):
out.putVarInt32(138)
out.putVarInt32(self.ancestor_.ByteSize())
self.ancestor_.OutputUnchecked(out)
if (self.has_hint_):
out.putVarInt32(144)
out.putVarInt32(self.hint_)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(154)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_require_perfect_plan_):
out.putVarInt32(160)
out.putBoolean(self.require_perfect_plan_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_app(d.getPrefixedString())
continue
if tt == 26:
self.set_kind(d.getPrefixedString())
continue
if tt == 35:
self.add_filter().TryMerge(d)
continue
if tt == 66:
self.set_search_query(d.getPrefixedString())
continue
if tt == 75:
self.add_order().TryMerge(d)
continue
if tt == 96:
self.set_offset(d.getVarInt32())
continue
if tt == 128:
self.set_limit(d.getVarInt32())
continue
if tt == 138:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_ancestor().TryMerge(tmp)
continue
if tt == 144:
self.set_hint(d.getVarInt32())
continue
if tt == 154:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 160:
self.set_require_perfect_plan(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_app_: res+=prefix+("app: %s\n" % self.DebugFormatString(self.app_))
if self.has_kind_: res+=prefix+("kind: %s\n" % self.DebugFormatString(self.kind_))
if self.has_ancestor_:
res+=prefix+"ancestor <\n"
res+=self.ancestor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.filter_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Filter%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_search_query_: res+=prefix+("search_query: %s\n" % self.DebugFormatString(self.search_query_))
cnt=0
for e in self.order_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Order%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_hint_: res+=prefix+("hint: %s\n" % self.DebugFormatInt32(self.hint_))
if self.has_offset_: res+=prefix+("offset: %s\n" % self.DebugFormatInt32(self.offset_))
if self.has_limit_: res+=prefix+("limit: %s\n" % self.DebugFormatInt32(self.limit_))
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_require_perfect_plan_: res+=prefix+("require_perfect_plan: %s\n" % self.DebugFormatBool(self.require_perfect_plan_))
return res
kapp = 1
kkind = 3
kancestor = 17
kFilterGroup = 4
kFilterop = 6
kFilterproperty = 14
ksearch_query = 8
kOrderGroup = 9
kOrderproperty = 10
kOrderdirection = 11
khint = 18
koffset = 12
klimit = 16
kcomposite_index = 19
krequire_perfect_plan = 20
_TEXT = (
"ErrorCode",
"app",
None,
"kind",
"Filter",
None,
"op",
None,
"search_query",
"Order",
"property",
"direction",
"offset",
None,
"property",
None,
"limit",
"ancestor",
"hint",
"composite_index",
"require_perfect_plan",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class QueryExplanation(ProtocolBuffer.ProtocolMessage):
has_native_ancestor_ = 0
native_ancestor_ = 0
has_native_offset_ = 0
native_offset_ = 0
has_native_limit_ = 0
native_limit_ = 0
def __init__(self, contents=None):
self.native_index_ = []
if contents is not None: self.MergeFromString(contents)
def native_ancestor(self): return self.native_ancestor_
def set_native_ancestor(self, x):
self.has_native_ancestor_ = 1
self.native_ancestor_ = x
def clear_native_ancestor(self):
if self.has_native_ancestor_:
self.has_native_ancestor_ = 0
self.native_ancestor_ = 0
def has_native_ancestor(self): return self.has_native_ancestor_
def native_index_size(self): return len(self.native_index_)
def native_index_list(self): return self.native_index_
def native_index(self, i):
return self.native_index_[i]
def mutable_native_index(self, i):
return self.native_index_[i]
def add_native_index(self):
x = Index()
self.native_index_.append(x)
return x
def clear_native_index(self):
self.native_index_ = []
def native_offset(self): return self.native_offset_
def set_native_offset(self, x):
self.has_native_offset_ = 1
self.native_offset_ = x
def clear_native_offset(self):
if self.has_native_offset_:
self.has_native_offset_ = 0
self.native_offset_ = 0
def has_native_offset(self): return self.has_native_offset_
def native_limit(self): return self.native_limit_
def set_native_limit(self, x):
self.has_native_limit_ = 1
self.native_limit_ = x
def clear_native_limit(self):
if self.has_native_limit_:
self.has_native_limit_ = 0
self.native_limit_ = 0
def has_native_limit(self): return self.has_native_limit_
def MergeFrom(self, x):
assert x is not self
if (x.has_native_ancestor()): self.set_native_ancestor(x.native_ancestor())
for i in xrange(x.native_index_size()): self.add_native_index().CopyFrom(x.native_index(i))
if (x.has_native_offset()): self.set_native_offset(x.native_offset())
if (x.has_native_limit()): self.set_native_limit(x.native_limit())
def Equals(self, x):
if x is self: return 1
if self.has_native_ancestor_ != x.has_native_ancestor_: return 0
if self.has_native_ancestor_ and self.native_ancestor_ != x.native_ancestor_: return 0
if len(self.native_index_) != len(x.native_index_): return 0
for e1, e2 in zip(self.native_index_, x.native_index_):
if e1 != e2: return 0
if self.has_native_offset_ != x.has_native_offset_: return 0
if self.has_native_offset_ and self.native_offset_ != x.native_offset_: return 0
if self.has_native_limit_ != x.has_native_limit_: return 0
if self.has_native_limit_ and self.native_limit_ != x.native_limit_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.native_index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_native_ancestor_): n += 2
n += 1 * len(self.native_index_)
for i in xrange(len(self.native_index_)): n += self.lengthString(self.native_index_[i].ByteSize())
if (self.has_native_offset_): n += 1 + self.lengthVarInt64(self.native_offset_)
if (self.has_native_limit_): n += 1 + self.lengthVarInt64(self.native_limit_)
return n + 0
def Clear(self):
self.clear_native_ancestor()
self.clear_native_index()
self.clear_native_offset()
self.clear_native_limit()
def OutputUnchecked(self, out):
if (self.has_native_ancestor_):
out.putVarInt32(8)
out.putBoolean(self.native_ancestor_)
for i in xrange(len(self.native_index_)):
out.putVarInt32(18)
out.putVarInt32(self.native_index_[i].ByteSize())
self.native_index_[i].OutputUnchecked(out)
if (self.has_native_offset_):
out.putVarInt32(24)
out.putVarInt32(self.native_offset_)
if (self.has_native_limit_):
out.putVarInt32(32)
out.putVarInt32(self.native_limit_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_native_ancestor(d.getBoolean())
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_native_index().TryMerge(tmp)
continue
if tt == 24:
self.set_native_offset(d.getVarInt32())
continue
if tt == 32:
self.set_native_limit(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_native_ancestor_: res+=prefix+("native_ancestor: %s\n" % self.DebugFormatBool(self.native_ancestor_))
cnt=0
for e in self.native_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("native_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_native_offset_: res+=prefix+("native_offset: %s\n" % self.DebugFormatInt32(self.native_offset_))
if self.has_native_limit_: res+=prefix+("native_limit: %s\n" % self.DebugFormatInt32(self.native_limit_))
return res
knative_ancestor = 1
knative_index = 2
knative_offset = 3
knative_limit = 4
_TEXT = (
"ErrorCode",
"native_ancestor",
"native_index",
"native_offset",
"native_limit",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Cursor(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def cursor(self): return self.cursor_
def set_cursor(self, x):
self.has_cursor_ = 1
self.cursor_ = x
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0
self.cursor_ = 0
def has_cursor(self): return self.has_cursor_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.set_cursor(x.cursor())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
return initialized
def ByteSize(self):
n = 0
return n + 9
def Clear(self):
self.clear_cursor()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.put64(self.cursor_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_cursor(d.get64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_: res+=prefix+("cursor: %s\n" % self.DebugFormatFixed64(self.cursor_))
return res
kcursor = 1
_TEXT = (
"ErrorCode",
"cursor",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.DOUBLE,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Error(ProtocolBuffer.ProtocolMessage):
BAD_REQUEST = 1
CONCURRENT_TRANSACTION = 2
INTERNAL_ERROR = 3
NEED_INDEX = 4
TIMEOUT = 5
_ErrorCode_NAMES = {
1: "BAD_REQUEST",
2: "CONCURRENT_TRANSACTION",
3: "INTERNAL_ERROR",
4: "NEED_INDEX",
5: "TIMEOUT",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Cost(ProtocolBuffer.ProtocolMessage):
has_index_writes_ = 0
index_writes_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def index_writes(self): return self.index_writes_
def set_index_writes(self, x):
self.has_index_writes_ = 1
self.index_writes_ = x
def clear_index_writes(self):
if self.has_index_writes_:
self.has_index_writes_ = 0
self.index_writes_ = 0
def has_index_writes(self): return self.has_index_writes_
def MergeFrom(self, x):
assert x is not self
if (x.has_index_writes()): self.set_index_writes(x.index_writes())
def Equals(self, x):
if x is self: return 1
if self.has_index_writes_ != x.has_index_writes_: return 0
if self.has_index_writes_ and self.index_writes_ != x.index_writes_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_index_writes_): n += 1 + self.lengthVarInt64(self.index_writes_)
return n + 0
def Clear(self):
self.clear_index_writes()
def OutputUnchecked(self, out):
if (self.has_index_writes_):
out.putVarInt32(8)
out.putVarInt32(self.index_writes_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_index_writes(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_index_writes_: res+=prefix+("index_writes: %s\n" % self.DebugFormatInt32(self.index_writes_))
return res
kindex_writes = 1
_TEXT = (
"ErrorCode",
"index_writes",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class GetRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
return n + 0
def Clear(self):
self.clear_key()
self.clear_transaction()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kkey = 1
ktransaction = 2
_TEXT = (
"ErrorCode",
"key",
"transaction",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class GetResponse_Entity(ProtocolBuffer.ProtocolMessage):
has_entity_ = 0
entity_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def entity(self):
if self.entity_ is None:
self.lazy_init_lock_.acquire()
try:
if self.entity_ is None: self.entity_ = EntityProto()
finally:
self.lazy_init_lock_.release()
return self.entity_
def mutable_entity(self): self.has_entity_ = 1; return self.entity()
def clear_entity(self):
if self.has_entity_:
self.has_entity_ = 0;
if self.entity_ is not None: self.entity_.Clear()
def has_entity(self): return self.has_entity_
def MergeFrom(self, x):
assert x is not self
if (x.has_entity()): self.mutable_entity().MergeFrom(x.entity())
def Equals(self, x):
if x is self: return 1
if self.has_entity_ != x.has_entity_: return 0
if self.has_entity_ and self.entity_ != x.entity_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_entity_ and not self.entity_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_entity_): n += 1 + self.lengthString(self.entity_.ByteSize())
return n + 0
def Clear(self):
self.clear_entity()
def OutputUnchecked(self, out):
if (self.has_entity_):
out.putVarInt32(18)
out.putVarInt32(self.entity_.ByteSize())
self.entity_.OutputUnchecked(out)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_entity().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_entity_:
res+=prefix+"entity <\n"
res+=self.entity_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
class GetResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.entity_ = []
if contents is not None: self.MergeFromString(contents)
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = GetResponse_Entity()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
def Equals(self, x):
if x is self: return 1
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.entity_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_entity()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(11)
self.entity_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_entity().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Entity%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kEntityGroup = 1
kEntityentity = 2
_TEXT = (
"ErrorCode",
"Entity",
"entity",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class PutRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
def __init__(self, contents=None):
self.entity_ = []
self.composite_index_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def entity_size(self): return len(self.entity_)
def entity_list(self): return self.entity_
def entity(self, i):
return self.entity_[i]
def mutable_entity(self, i):
return self.entity_[i]
def add_entity(self):
x = EntityProto()
self.entity_.append(x)
return x
def clear_entity(self):
self.entity_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def composite_index_size(self): return len(self.composite_index_)
def composite_index_list(self): return self.composite_index_
def composite_index(self, i):
return self.composite_index_[i]
def mutable_composite_index(self, i):
return self.composite_index_[i]
def add_composite_index(self):
x = CompositeIndex()
self.composite_index_.append(x)
return x
def clear_composite_index(self):
self.composite_index_ = []
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.entity_size()): self.add_entity().CopyFrom(x.entity(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
for i in xrange(x.composite_index_size()): self.add_composite_index().CopyFrom(x.composite_index(i))
if (x.has_trusted()): self.set_trusted(x.trusted())
def Equals(self, x):
if x is self: return 1
if len(self.entity_) != len(x.entity_): return 0
for e1, e2 in zip(self.entity_, x.entity_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if len(self.composite_index_) != len(x.composite_index_): return 0
for e1, e2 in zip(self.composite_index_, x.composite_index_):
if e1 != e2: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.entity_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
for p in self.composite_index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.entity_)
for i in xrange(len(self.entity_)): n += self.lengthString(self.entity_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
n += 1 * len(self.composite_index_)
for i in xrange(len(self.composite_index_)): n += self.lengthString(self.composite_index_[i].ByteSize())
if (self.has_trusted_): n += 2
return n + 0
def Clear(self):
self.clear_entity()
self.clear_transaction()
self.clear_composite_index()
self.clear_trusted()
def OutputUnchecked(self, out):
for i in xrange(len(self.entity_)):
out.putVarInt32(10)
out.putVarInt32(self.entity_[i].ByteSize())
self.entity_[i].OutputUnchecked(out)
if (self.has_transaction_):
out.putVarInt32(18)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.composite_index_)):
out.putVarInt32(26)
out.putVarInt32(self.composite_index_[i].ByteSize())
self.composite_index_[i].OutputUnchecked(out)
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_entity().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_composite_index().TryMerge(tmp)
continue
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.entity_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("entity%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.composite_index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("composite_index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
return res
kentity = 1
ktransaction = 2
kcomposite_index = 3
ktrusted = 4
_TEXT = (
"ErrorCode",
"entity",
"transaction",
"composite_index",
"trusted",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class PutResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 0
def Clear(self):
self.clear_key()
self.clear_cost()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
if (self.has_cost_):
out.putVarInt32(18)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kkey = 1
kcost = 2
_TEXT = (
"ErrorCode",
"key",
"cost",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class DeleteRequest(ProtocolBuffer.ProtocolMessage):
has_transaction_ = 0
transaction_ = None
has_trusted_ = 0
trusted_ = 0
def __init__(self, contents=None):
self.key_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def mutable_key(self, i):
return self.key_[i]
def add_key(self):
x = Reference()
self.key_.append(x)
return x
def clear_key(self):
self.key_ = []
def transaction(self):
if self.transaction_ is None:
self.lazy_init_lock_.acquire()
try:
if self.transaction_ is None: self.transaction_ = Transaction()
finally:
self.lazy_init_lock_.release()
return self.transaction_
def mutable_transaction(self): self.has_transaction_ = 1; return self.transaction()
def clear_transaction(self):
if self.has_transaction_:
self.has_transaction_ = 0;
if self.transaction_ is not None: self.transaction_.Clear()
def has_transaction(self): return self.has_transaction_
def trusted(self): return self.trusted_
def set_trusted(self, x):
self.has_trusted_ = 1
self.trusted_ = x
def clear_trusted(self):
if self.has_trusted_:
self.has_trusted_ = 0
self.trusted_ = 0
def has_trusted(self): return self.has_trusted_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key().CopyFrom(x.key(i))
if (x.has_transaction()): self.mutable_transaction().MergeFrom(x.transaction())
if (x.has_trusted()): self.set_trusted(x.trusted())
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
if self.has_transaction_ != x.has_transaction_: return 0
if self.has_transaction_ and self.transaction_ != x.transaction_: return 0
if self.has_trusted_ != x.has_trusted_: return 0
if self.has_trusted_ and self.trusted_ != x.trusted_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.key_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_transaction_ and not self.transaction_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(self.key_[i].ByteSize())
if (self.has_transaction_): n += 1 + self.lengthString(self.transaction_.ByteSize())
if (self.has_trusted_): n += 2
return n + 0
def Clear(self):
self.clear_key()
self.clear_transaction()
self.clear_trusted()
def OutputUnchecked(self, out):
if (self.has_trusted_):
out.putVarInt32(32)
out.putBoolean(self.trusted_)
if (self.has_transaction_):
out.putVarInt32(42)
out.putVarInt32(self.transaction_.ByteSize())
self.transaction_.OutputUnchecked(out)
for i in xrange(len(self.key_)):
out.putVarInt32(50)
out.putVarInt32(self.key_[i].ByteSize())
self.key_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 32:
self.set_trusted(d.getBoolean())
continue
if tt == 42:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_transaction().TryMerge(tmp)
continue
if tt == 50:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_key().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_transaction_:
res+=prefix+"transaction <\n"
res+=self.transaction_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_trusted_: res+=prefix+("trusted: %s\n" % self.DebugFormatBool(self.trusted_))
return res
kkey = 6
ktransaction = 5
ktrusted = 4
_TEXT = (
"ErrorCode",
None,
None,
None,
"trusted",
"transaction",
"key",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.MAX_TYPE,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class DeleteResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 0
def Clear(self):
self.clear_cost()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kcost = 1
_TEXT = (
"ErrorCode",
"cost",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class NextRequest(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
has_count_ = 0
count_ = 1
def __init__(self, contents=None):
self.cursor_ = Cursor()
if contents is not None: self.MergeFromString(contents)
def cursor(self): return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor_
def clear_cursor(self):self.has_cursor_ = 0; self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def count(self): return self.count_
def set_count(self, x):
self.has_count_ = 1
self.count_ = x
def clear_count(self):
if self.has_count_:
self.has_count_ = 0
self.count_ = 1
def has_count(self): return self.has_count_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
if (x.has_count()): self.set_count(x.count())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if self.has_count_ != x.has_count_: return 0
if self.has_count_ and self.count_ != x.count_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_cursor_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: cursor not set.')
elif not self.cursor_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.cursor_.ByteSize())
if (self.has_count_): n += 1 + self.lengthVarInt64(self.count_)
return n + 1
def Clear(self):
self.clear_cursor()
self.clear_count()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
if (self.has_count_):
out.putVarInt32(16)
out.putVarInt32(self.count_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 16:
self.set_count(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
if self.has_count_: res+=prefix+("count: %s\n" % self.DebugFormatInt32(self.count_))
return res
kcursor = 1
kcount = 2
_TEXT = (
"ErrorCode",
"cursor",
"count",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class QueryResult(ProtocolBuffer.ProtocolMessage):
has_cursor_ = 0
cursor_ = None
has_more_results_ = 0
more_results_ = 0
def __init__(self, contents=None):
self.result_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cursor(self):
if self.cursor_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cursor_ is None: self.cursor_ = Cursor()
finally:
self.lazy_init_lock_.release()
return self.cursor_
def mutable_cursor(self): self.has_cursor_ = 1; return self.cursor()
def clear_cursor(self):
if self.has_cursor_:
self.has_cursor_ = 0;
if self.cursor_ is not None: self.cursor_.Clear()
def has_cursor(self): return self.has_cursor_
def result_size(self): return len(self.result_)
def result_list(self): return self.result_
def result(self, i):
return self.result_[i]
def mutable_result(self, i):
return self.result_[i]
def add_result(self):
x = EntityProto()
self.result_.append(x)
return x
def clear_result(self):
self.result_ = []
def more_results(self): return self.more_results_
def set_more_results(self, x):
self.has_more_results_ = 1
self.more_results_ = x
def clear_more_results(self):
if self.has_more_results_:
self.has_more_results_ = 0
self.more_results_ = 0
def has_more_results(self): return self.has_more_results_
def MergeFrom(self, x):
assert x is not self
if (x.has_cursor()): self.mutable_cursor().MergeFrom(x.cursor())
for i in xrange(x.result_size()): self.add_result().CopyFrom(x.result(i))
if (x.has_more_results()): self.set_more_results(x.more_results())
def Equals(self, x):
if x is self: return 1
if self.has_cursor_ != x.has_cursor_: return 0
if self.has_cursor_ and self.cursor_ != x.cursor_: return 0
if len(self.result_) != len(x.result_): return 0
for e1, e2 in zip(self.result_, x.result_):
if e1 != e2: return 0
if self.has_more_results_ != x.has_more_results_: return 0
if self.has_more_results_ and self.more_results_ != x.more_results_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cursor_ and not self.cursor_.IsInitialized(debug_strs)): initialized = 0
for p in self.result_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_more_results_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: more_results not set.')
return initialized
def ByteSize(self):
n = 0
if (self.has_cursor_): n += 1 + self.lengthString(self.cursor_.ByteSize())
n += 1 * len(self.result_)
for i in xrange(len(self.result_)): n += self.lengthString(self.result_[i].ByteSize())
return n + 2
def Clear(self):
self.clear_cursor()
self.clear_result()
self.clear_more_results()
def OutputUnchecked(self, out):
if (self.has_cursor_):
out.putVarInt32(10)
out.putVarInt32(self.cursor_.ByteSize())
self.cursor_.OutputUnchecked(out)
for i in xrange(len(self.result_)):
out.putVarInt32(18)
out.putVarInt32(self.result_[i].ByteSize())
self.result_[i].OutputUnchecked(out)
out.putVarInt32(24)
out.putBoolean(self.more_results_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cursor().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_result().TryMerge(tmp)
continue
if tt == 24:
self.set_more_results(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cursor_:
res+=prefix+"cursor <\n"
res+=self.cursor_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.result_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("result%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_more_results_: res+=prefix+("more_results: %s\n" % self.DebugFormatBool(self.more_results_))
return res
kcursor = 1
kresult = 2
kmore_results = 3
_TEXT = (
"ErrorCode",
"cursor",
"result",
"more_results",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Schema(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.kind_ = []
if contents is not None: self.MergeFromString(contents)
def kind_size(self): return len(self.kind_)
def kind_list(self): return self.kind_
def kind(self, i):
return self.kind_[i]
def mutable_kind(self, i):
return self.kind_[i]
def add_kind(self):
x = EntityProto()
self.kind_.append(x)
return x
def clear_kind(self):
self.kind_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.kind_size()): self.add_kind().CopyFrom(x.kind(i))
def Equals(self, x):
if x is self: return 1
if len(self.kind_) != len(x.kind_): return 0
for e1, e2 in zip(self.kind_, x.kind_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.kind_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.kind_)
for i in xrange(len(self.kind_)): n += self.lengthString(self.kind_[i].ByteSize())
return n + 0
def Clear(self):
self.clear_kind()
def OutputUnchecked(self, out):
for i in xrange(len(self.kind_)):
out.putVarInt32(10)
out.putVarInt32(self.kind_[i].ByteSize())
self.kind_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_kind().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.kind_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("kind%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
kkind = 1
_TEXT = (
"ErrorCode",
"kind",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CompositeIndices(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.index_ = []
if contents is not None: self.MergeFromString(contents)
def index_size(self): return len(self.index_)
def index_list(self): return self.index_
def index(self, i):
return self.index_[i]
def mutable_index(self, i):
return self.index_[i]
def add_index(self):
x = CompositeIndex()
self.index_.append(x)
return x
def clear_index(self):
self.index_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.index_size()): self.add_index().CopyFrom(x.index(i))
def Equals(self, x):
if x is self: return 1
if len(self.index_) != len(x.index_): return 0
for e1, e2 in zip(self.index_, x.index_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.index_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.index_)
for i in xrange(len(self.index_)): n += self.lengthString(self.index_[i].ByteSize())
return n + 0
def Clear(self):
self.clear_index()
def OutputUnchecked(self, out):
for i in xrange(len(self.index_)):
out.putVarInt32(10)
out.putVarInt32(self.index_[i].ByteSize())
self.index_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_index().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.index_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("index%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
kindex = 1
_TEXT = (
"ErrorCode",
"index",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CommitResponse(ProtocolBuffer.ProtocolMessage):
has_cost_ = 0
cost_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def cost(self):
if self.cost_ is None:
self.lazy_init_lock_.acquire()
try:
if self.cost_ is None: self.cost_ = Cost()
finally:
self.lazy_init_lock_.release()
return self.cost_
def mutable_cost(self): self.has_cost_ = 1; return self.cost()
def clear_cost(self):
if self.has_cost_:
self.has_cost_ = 0;
if self.cost_ is not None: self.cost_.Clear()
def has_cost(self): return self.has_cost_
def MergeFrom(self, x):
assert x is not self
if (x.has_cost()): self.mutable_cost().MergeFrom(x.cost())
def Equals(self, x):
if x is self: return 1
if self.has_cost_ != x.has_cost_: return 0
if self.has_cost_ and self.cost_ != x.cost_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_cost_ and not self.cost_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_cost_): n += 1 + self.lengthString(self.cost_.ByteSize())
return n + 0
def Clear(self):
self.clear_cost()
def OutputUnchecked(self, out):
if (self.has_cost_):
out.putVarInt32(10)
out.putVarInt32(self.cost_.ByteSize())
self.cost_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_cost().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_cost_:
res+=prefix+"cost <\n"
res+=self.cost_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kcost = 1
_TEXT = (
"ErrorCode",
"cost",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['Transaction','Query','Query_Filter','Query_Order','QueryExplanation','Cursor','Error','Cost','GetRequest','GetResponse','GetResponse_Entity','PutRequest','PutResponse','DeleteRequest','DeleteResponse','NextRequest','QueryResult','Schema','CompositeIndices','CommitResponse']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Primitives for dealing with datastore indexes.
Example index.yaml file:
------------------------
indexes:
- kind: Cat
ancestor: no
properties:
- name: name
- name: age
direction: desc
- kind: Cat
properties:
- name: name
direction: ascending
- name: whiskers
direction: descending
- kind: Store
ancestor: yes
properties:
- name: business
direction: asc
- name: owner
direction: asc
"""
from google.appengine.api import datastore_types
from google.appengine.api import validation
from google.appengine.api import yaml_errors
from google.appengine.api import yaml_object
from google.appengine.datastore import datastore_pb
class Property(validation.Validated):
"""Representation for an individual property of an index.
Attributes:
name: Name of attribute to sort by.
direction: Direction of sort.
"""
ATTRIBUTES = {
'name': validation.TYPE_STR,
'direction': validation.Options(('asc', ('ascending',)),
('desc', ('descending',)),
default='asc'),
}
class Index(validation.Validated):
"""Individual index definition.
Order of the properties properties determins a given indixes sort priority.
Attributes:
kind: Datastore kind that index belongs to.
ancestors: Include ancestors in index.
properties: Properties to sort on.
"""
ATTRIBUTES = {
'kind': validation.TYPE_STR,
'ancestor': validation.Type(bool, default=False),
'properties': validation.Optional(validation.Repeated(Property)),
}
class IndexDefinitions(validation.Validated):
"""Top level for index definition file.
Attributes:
indexes: List of Index definitions.
"""
ATTRIBUTES = {
'indexes': validation.Optional(validation.Repeated(Index)),
}
def ParseIndexDefinitions(document):
"""Parse an individual index definitions document from string or stream.
Args:
document: Yaml document as a string or file-like stream.
Raises:
EmptyConfigurationFile when the configuration file is empty.
MultipleConfigurationFile when the configuration file contains more than
one document.
Returns:
Single parsed yaml file if one is defined, else None.
"""
try:
return yaml_object.BuildSingleObject(IndexDefinitions, document)
except yaml_errors.EmptyConfigurationFile:
return None
def ParseMultipleIndexDefinitions(document):
"""Parse multiple index definitions documents from a string or stream.
Args:
document: Yaml document as a string or file-like stream.
Returns:
A list of datstore_index.IndexDefinitions objects, one for each document.
"""
return yaml_object.BuildObjects(IndexDefinitions, document)
def IndexDefinitionsToKeys(indexes):
"""Convert IndexDefinitions to set of keys.
Args:
indexes: A datastore_index.IndexDefinitions instance, or None.
Returns:
A set of keys constructed from the argument, each key being a
tuple of the form (kind, ancestor, properties) where properties is
a tuple of (name, direction) pairs, direction being ASCENDING or
DESCENDING (the enums).
"""
keyset = set()
if indexes is not None:
if indexes.indexes:
for index in indexes.indexes:
keyset.add(IndexToKey(index))
return keyset
def IndexToKey(index):
"""Convert Index to key.
Args:
index: A datastore_index.Index instance (not None!).
Returns:
A tuple of the form (kind, ancestor, properties) where properties
is a tuple of (name, direction) pairs, direction being ASCENDING
or DESCENDING (the enums).
"""
props = []
if index.properties is not None:
for prop in index.properties:
if prop.direction == 'asc':
direction = ASCENDING
else:
direction = DESCENDING
props.append((prop.name, direction))
return index.kind, index.ancestor, tuple(props)
ASCENDING = datastore_pb.Query_Order.ASCENDING
DESCENDING = datastore_pb.Query_Order.DESCENDING
EQUALITY_OPERATORS = set((datastore_pb.Query_Filter.EQUAL,
))
INEQUALITY_OPERATORS = set((datastore_pb.Query_Filter.LESS_THAN,
datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
datastore_pb.Query_Filter.GREATER_THAN,
datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
))
EXISTS_OPERATORS = set((datastore_pb.Query_Filter.EXISTS,
))
def CompositeIndexForQuery(query):
"""Return the composite index needed for a query.
A query is translated into a tuple, as follows:
- The first item is the kind string, or None if we're not filtering
on kind (see below).
- The second item is a bool giving whether the query specifies an
ancestor.
- After that come (property, ASCENDING) pairs for those Filter
entries whose operator is EQUAL or IN. Since the order of these
doesn't matter, they are sorted by property name to normalize them
in order to avoid duplicates.
- After that comes at most one (property, ASCENDING) pair for a
Filter entry whose operator is on of the four inequalities. There
can be at most one of these.
- After that come all the (property, direction) pairs for the Order
entries, in the order given in the query. Exceptions: (a) if
there is a Filter entry with an inequality operator that matches
the first Order entry, the first order pair is omitted (or,
equivalently, in this case the inequality pair is omitted); (b) if
an Order entry corresponds to an equality filter, it is ignored
(since there will only ever be one value returned).
- Finally, if there are Filter entries whose operator is EXISTS, and
whose property names are not already listed, they are added, with
the direction set to ASCENDING.
This algorithm should consume all Filter and Order entries.
Additional notes:
- The low-level implementation allows queries that don't specify a
kind; but the Python API doesn't support this yet.
- If there's an inequality filter and one or more sort orders, the
first sort order *must* match the inequality filter.
- The following indexes are always built in and should be suppressed:
- query on kind only;
- query on kind and one filter *or* one order;
- query on ancestor only, without kind (not exposed in Python yet);
- query on kind and equality filters only, no order (with or without
ancestor).
- While the protocol buffer allows a Filter to contain multiple
properties, we don't use this. It is only needed for the IN operator
but this is (currently) handled on the client side, so in practice
each Filter is expected to have exactly one property.
Args:
query: A datastore_pb.Query instance.
Returns:
A tuple of the form (required, kind, ancestor, (prop1, prop2, ...), neq):
required: boolean, whether the index is required
kind: the kind or None;
ancestor: True if this is an ancestor query;
prop1, prop2, ...: tuples of the form (name, direction) where:
name: a property name;
direction: datastore_pb.Query_Order.ASCENDING or ...DESCENDING;
neq: the number of prop tuples corresponding to equality filters.
"""
required = True
kind = query.kind()
ancestor = query.has_ancestor()
filters = query.filter_list()
orders = query.order_list()
for filter in filters:
assert filter.op() != datastore_pb.Query_Filter.IN, 'Filter.op()==IN'
nprops = len(filter.property_list())
assert nprops == 1, 'Filter has %s properties, expected 1' % nprops
if ancestor and not kind and not filters and not orders:
required = False
eq_filters = [f for f in filters if f.op() in EQUALITY_OPERATORS]
ineq_filters = [f for f in filters if f.op() in INEQUALITY_OPERATORS]
exists_filters = [f for f in filters if f.op() in EXISTS_OPERATORS]
assert (len(eq_filters) + len(ineq_filters) +
len(exists_filters)) == len(filters), 'Not all filters used'
if (kind and eq_filters and not ineq_filters and not exists_filters and
not orders):
names = set(f.property(0).name() for f in eq_filters)
if not names.intersection(datastore_types._SPECIAL_PROPERTIES):
required = False
ineq_property = None
if ineq_filters:
ineq_property = ineq_filters[0].property(0).name()
for filter in ineq_filters:
assert filter.property(0).name() == ineq_property
new_orders = []
for order in orders:
name = order.property()
for filter in eq_filters:
if filter.property(0).name() == name:
break
else:
new_orders.append(order)
orders = new_orders
props = []
for f in eq_filters:
prop = f.property(0)
props.append((prop.name(), ASCENDING))
props.sort()
if ineq_property:
if orders:
assert ineq_property == orders[0].property()
else:
props.append((ineq_property, ASCENDING))
for order in orders:
props.append((order.property(), order.direction()))
for filter in exists_filters:
prop = filter.property(0)
prop_name = prop.name()
for name, direction in props:
if name == prop_name:
break
else:
props.append((prop_name, ASCENDING))
if kind and not ancestor and len(props) <= 1:
required = False
if props:
prop, dir = props[0]
if prop in datastore_types._SPECIAL_PROPERTIES and dir is DESCENDING:
required = True
unique_names = set(name for name, dir in props)
if len(props) > 1 and len(unique_names) == 1:
required = False
return (required, kind, ancestor, tuple(props), len(eq_filters))
def IndexYamlForQuery(kind, ancestor, props):
"""Return the composite index definition YAML needed for a query.
The arguments are the same as the tuples returned by CompositeIndexForQuery,
without the last neq element.
Args:
kind: the kind or None
ancestor: True if this is an ancestor query, False otherwise
prop1, prop2, ...: tuples of the form (name, direction) where:
name: a property name;
direction: datastore_pb.Query_Order.ASCENDING or ...DESCENDING;
Returns:
A string with the YAML for the composite index needed by the query.
"""
yaml = []
yaml.append('- kind: %s' % kind)
if ancestor:
yaml.append(' ancestor: yes')
if props:
yaml.append(' properties:')
for name, direction in props:
yaml.append(' - name: %s' % name)
if direction == DESCENDING:
yaml.append(' direction: desc')
return '\n'.join(yaml)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
class CapabilityConfigList(ProtocolBuffer.ProtocolMessage):
has_default_config_ = 0
default_config_ = None
def __init__(self, contents=None):
self.config_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def config_size(self): return len(self.config_)
def config_list(self): return self.config_
def config(self, i):
return self.config_[i]
def mutable_config(self, i):
return self.config_[i]
def add_config(self):
x = CapabilityConfig()
self.config_.append(x)
return x
def clear_config(self):
self.config_ = []
def default_config(self):
if self.default_config_ is None:
self.lazy_init_lock_.acquire()
try:
if self.default_config_ is None: self.default_config_ = CapabilityConfig()
finally:
self.lazy_init_lock_.release()
return self.default_config_
def mutable_default_config(self): self.has_default_config_ = 1; return self.default_config()
def clear_default_config(self):
if self.has_default_config_:
self.has_default_config_ = 0;
if self.default_config_ is not None: self.default_config_.Clear()
def has_default_config(self): return self.has_default_config_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.config_size()): self.add_config().CopyFrom(x.config(i))
if (x.has_default_config()): self.mutable_default_config().MergeFrom(x.default_config())
def Equals(self, x):
if x is self: return 1
if len(self.config_) != len(x.config_): return 0
for e1, e2 in zip(self.config_, x.config_):
if e1 != e2: return 0
if self.has_default_config_ != x.has_default_config_: return 0
if self.has_default_config_ and self.default_config_ != x.default_config_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.config_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_default_config_ and not self.default_config_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.config_)
for i in xrange(len(self.config_)): n += self.lengthString(self.config_[i].ByteSize())
if (self.has_default_config_): n += 1 + self.lengthString(self.default_config_.ByteSize())
return n + 0
def Clear(self):
self.clear_config()
self.clear_default_config()
def OutputUnchecked(self, out):
for i in xrange(len(self.config_)):
out.putVarInt32(10)
out.putVarInt32(self.config_[i].ByteSize())
self.config_[i].OutputUnchecked(out)
if (self.has_default_config_):
out.putVarInt32(18)
out.putVarInt32(self.default_config_.ByteSize())
self.default_config_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_config().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_default_config().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.config_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("config%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_default_config_:
res+=prefix+"default_config <\n"
res+=self.default_config_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kconfig = 1
kdefault_config = 2
_TEXT = (
"ErrorCode",
"config",
"default_config",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CapabilityConfig(ProtocolBuffer.ProtocolMessage):
ENABLED = 1
SCHEDULED = 2
DISABLED = 3
UNKNOWN = 4
_Status_NAMES = {
1: "ENABLED",
2: "SCHEDULED",
3: "DISABLED",
4: "UNKNOWN",
}
def Status_Name(cls, x): return cls._Status_NAMES.get(x, "")
Status_Name = classmethod(Status_Name)
has_package_ = 0
package_ = ""
has_capability_ = 0
capability_ = ""
has_status_ = 0
status_ = 4
has_scheduled_time_ = 0
scheduled_time_ = ""
has_internal_message_ = 0
internal_message_ = ""
has_admin_message_ = 0
admin_message_ = ""
has_error_message_ = 0
error_message_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def package(self): return self.package_
def set_package(self, x):
self.has_package_ = 1
self.package_ = x
def clear_package(self):
if self.has_package_:
self.has_package_ = 0
self.package_ = ""
def has_package(self): return self.has_package_
def capability(self): return self.capability_
def set_capability(self, x):
self.has_capability_ = 1
self.capability_ = x
def clear_capability(self):
if self.has_capability_:
self.has_capability_ = 0
self.capability_ = ""
def has_capability(self): return self.has_capability_
def status(self): return self.status_
def set_status(self, x):
self.has_status_ = 1
self.status_ = x
def clear_status(self):
if self.has_status_:
self.has_status_ = 0
self.status_ = 4
def has_status(self): return self.has_status_
def scheduled_time(self): return self.scheduled_time_
def set_scheduled_time(self, x):
self.has_scheduled_time_ = 1
self.scheduled_time_ = x
def clear_scheduled_time(self):
if self.has_scheduled_time_:
self.has_scheduled_time_ = 0
self.scheduled_time_ = ""
def has_scheduled_time(self): return self.has_scheduled_time_
def internal_message(self): return self.internal_message_
def set_internal_message(self, x):
self.has_internal_message_ = 1
self.internal_message_ = x
def clear_internal_message(self):
if self.has_internal_message_:
self.has_internal_message_ = 0
self.internal_message_ = ""
def has_internal_message(self): return self.has_internal_message_
def admin_message(self): return self.admin_message_
def set_admin_message(self, x):
self.has_admin_message_ = 1
self.admin_message_ = x
def clear_admin_message(self):
if self.has_admin_message_:
self.has_admin_message_ = 0
self.admin_message_ = ""
def has_admin_message(self): return self.has_admin_message_
def error_message(self): return self.error_message_
def set_error_message(self, x):
self.has_error_message_ = 1
self.error_message_ = x
def clear_error_message(self):
if self.has_error_message_:
self.has_error_message_ = 0
self.error_message_ = ""
def has_error_message(self): return self.has_error_message_
def MergeFrom(self, x):
assert x is not self
if (x.has_package()): self.set_package(x.package())
if (x.has_capability()): self.set_capability(x.capability())
if (x.has_status()): self.set_status(x.status())
if (x.has_scheduled_time()): self.set_scheduled_time(x.scheduled_time())
if (x.has_internal_message()): self.set_internal_message(x.internal_message())
if (x.has_admin_message()): self.set_admin_message(x.admin_message())
if (x.has_error_message()): self.set_error_message(x.error_message())
def Equals(self, x):
if x is self: return 1
if self.has_package_ != x.has_package_: return 0
if self.has_package_ and self.package_ != x.package_: return 0
if self.has_capability_ != x.has_capability_: return 0
if self.has_capability_ and self.capability_ != x.capability_: return 0
if self.has_status_ != x.has_status_: return 0
if self.has_status_ and self.status_ != x.status_: return 0
if self.has_scheduled_time_ != x.has_scheduled_time_: return 0
if self.has_scheduled_time_ and self.scheduled_time_ != x.scheduled_time_: return 0
if self.has_internal_message_ != x.has_internal_message_: return 0
if self.has_internal_message_ and self.internal_message_ != x.internal_message_: return 0
if self.has_admin_message_ != x.has_admin_message_: return 0
if self.has_admin_message_ and self.admin_message_ != x.admin_message_: return 0
if self.has_error_message_ != x.has_error_message_: return 0
if self.has_error_message_ and self.error_message_ != x.error_message_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_package_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: package not set.')
if (not self.has_capability_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: capability not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.package_))
n += self.lengthString(len(self.capability_))
if (self.has_status_): n += 1 + self.lengthVarInt64(self.status_)
if (self.has_scheduled_time_): n += 1 + self.lengthString(len(self.scheduled_time_))
if (self.has_internal_message_): n += 1 + self.lengthString(len(self.internal_message_))
if (self.has_admin_message_): n += 1 + self.lengthString(len(self.admin_message_))
if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
return n + 2
def Clear(self):
self.clear_package()
self.clear_capability()
self.clear_status()
self.clear_scheduled_time()
self.clear_internal_message()
self.clear_admin_message()
self.clear_error_message()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.package_)
out.putVarInt32(18)
out.putPrefixedString(self.capability_)
if (self.has_status_):
out.putVarInt32(24)
out.putVarInt32(self.status_)
if (self.has_internal_message_):
out.putVarInt32(34)
out.putPrefixedString(self.internal_message_)
if (self.has_admin_message_):
out.putVarInt32(42)
out.putPrefixedString(self.admin_message_)
if (self.has_error_message_):
out.putVarInt32(50)
out.putPrefixedString(self.error_message_)
if (self.has_scheduled_time_):
out.putVarInt32(58)
out.putPrefixedString(self.scheduled_time_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_package(d.getPrefixedString())
continue
if tt == 18:
self.set_capability(d.getPrefixedString())
continue
if tt == 24:
self.set_status(d.getVarInt32())
continue
if tt == 34:
self.set_internal_message(d.getPrefixedString())
continue
if tt == 42:
self.set_admin_message(d.getPrefixedString())
continue
if tt == 50:
self.set_error_message(d.getPrefixedString())
continue
if tt == 58:
self.set_scheduled_time(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_package_: res+=prefix+("package: %s\n" % self.DebugFormatString(self.package_))
if self.has_capability_: res+=prefix+("capability: %s\n" % self.DebugFormatString(self.capability_))
if self.has_status_: res+=prefix+("status: %s\n" % self.DebugFormatInt32(self.status_))
if self.has_scheduled_time_: res+=prefix+("scheduled_time: %s\n" % self.DebugFormatString(self.scheduled_time_))
if self.has_internal_message_: res+=prefix+("internal_message: %s\n" % self.DebugFormatString(self.internal_message_))
if self.has_admin_message_: res+=prefix+("admin_message: %s\n" % self.DebugFormatString(self.admin_message_))
if self.has_error_message_: res+=prefix+("error_message: %s\n" % self.DebugFormatString(self.error_message_))
return res
kpackage = 1
kcapability = 2
kstatus = 3
kscheduled_time = 7
kinternal_message = 4
kadmin_message = 5
kerror_message = 6
_TEXT = (
"ErrorCode",
"package",
"capability",
"status",
"internal_message",
"admin_message",
"error_message",
"scheduled_time",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['CapabilityConfigList','CapabilityConfig']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
class CapabilityConfigList(ProtocolBuffer.ProtocolMessage):
has_default_config_ = 0
default_config_ = None
def __init__(self, contents=None):
self.config_ = []
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def config_size(self): return len(self.config_)
def config_list(self): return self.config_
def config(self, i):
return self.config_[i]
def mutable_config(self, i):
return self.config_[i]
def add_config(self):
x = CapabilityConfig()
self.config_.append(x)
return x
def clear_config(self):
self.config_ = []
def default_config(self):
if self.default_config_ is None:
self.lazy_init_lock_.acquire()
try:
if self.default_config_ is None: self.default_config_ = CapabilityConfig()
finally:
self.lazy_init_lock_.release()
return self.default_config_
def mutable_default_config(self): self.has_default_config_ = 1; return self.default_config()
def clear_default_config(self):
if self.has_default_config_:
self.has_default_config_ = 0;
if self.default_config_ is not None: self.default_config_.Clear()
def has_default_config(self): return self.has_default_config_
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.config_size()): self.add_config().CopyFrom(x.config(i))
if (x.has_default_config()): self.mutable_default_config().MergeFrom(x.default_config())
def Equals(self, x):
if x is self: return 1
if len(self.config_) != len(x.config_): return 0
for e1, e2 in zip(self.config_, x.config_):
if e1 != e2: return 0
if self.has_default_config_ != x.has_default_config_: return 0
if self.has_default_config_ and self.default_config_ != x.default_config_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.config_:
if not p.IsInitialized(debug_strs): initialized=0
if (self.has_default_config_ and not self.default_config_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.config_)
for i in xrange(len(self.config_)): n += self.lengthString(self.config_[i].ByteSize())
if (self.has_default_config_): n += 1 + self.lengthString(self.default_config_.ByteSize())
return n + 0
def Clear(self):
self.clear_config()
self.clear_default_config()
def OutputUnchecked(self, out):
for i in xrange(len(self.config_)):
out.putVarInt32(10)
out.putVarInt32(self.config_[i].ByteSize())
self.config_[i].OutputUnchecked(out)
if (self.has_default_config_):
out.putVarInt32(18)
out.putVarInt32(self.default_config_.ByteSize())
self.default_config_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_config().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_default_config().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.config_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("config%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_default_config_:
res+=prefix+"default_config <\n"
res+=self.default_config_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kconfig = 1
kdefault_config = 2
_TEXT = (
"ErrorCode",
"config",
"default_config",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class CapabilityConfig(ProtocolBuffer.ProtocolMessage):
ENABLED = 1
SCHEDULED = 2
DISABLED = 3
UNKNOWN = 4
_Status_NAMES = {
1: "ENABLED",
2: "SCHEDULED",
3: "DISABLED",
4: "UNKNOWN",
}
def Status_Name(cls, x): return cls._Status_NAMES.get(x, "")
Status_Name = classmethod(Status_Name)
has_package_ = 0
package_ = ""
has_capability_ = 0
capability_ = ""
has_status_ = 0
status_ = 4
has_scheduled_time_ = 0
scheduled_time_ = ""
has_internal_message_ = 0
internal_message_ = ""
has_admin_message_ = 0
admin_message_ = ""
has_error_message_ = 0
error_message_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def package(self): return self.package_
def set_package(self, x):
self.has_package_ = 1
self.package_ = x
def clear_package(self):
if self.has_package_:
self.has_package_ = 0
self.package_ = ""
def has_package(self): return self.has_package_
def capability(self): return self.capability_
def set_capability(self, x):
self.has_capability_ = 1
self.capability_ = x
def clear_capability(self):
if self.has_capability_:
self.has_capability_ = 0
self.capability_ = ""
def has_capability(self): return self.has_capability_
def status(self): return self.status_
def set_status(self, x):
self.has_status_ = 1
self.status_ = x
def clear_status(self):
if self.has_status_:
self.has_status_ = 0
self.status_ = 4
def has_status(self): return self.has_status_
def scheduled_time(self): return self.scheduled_time_
def set_scheduled_time(self, x):
self.has_scheduled_time_ = 1
self.scheduled_time_ = x
def clear_scheduled_time(self):
if self.has_scheduled_time_:
self.has_scheduled_time_ = 0
self.scheduled_time_ = ""
def has_scheduled_time(self): return self.has_scheduled_time_
def internal_message(self): return self.internal_message_
def set_internal_message(self, x):
self.has_internal_message_ = 1
self.internal_message_ = x
def clear_internal_message(self):
if self.has_internal_message_:
self.has_internal_message_ = 0
self.internal_message_ = ""
def has_internal_message(self): return self.has_internal_message_
def admin_message(self): return self.admin_message_
def set_admin_message(self, x):
self.has_admin_message_ = 1
self.admin_message_ = x
def clear_admin_message(self):
if self.has_admin_message_:
self.has_admin_message_ = 0
self.admin_message_ = ""
def has_admin_message(self): return self.has_admin_message_
def error_message(self): return self.error_message_
def set_error_message(self, x):
self.has_error_message_ = 1
self.error_message_ = x
def clear_error_message(self):
if self.has_error_message_:
self.has_error_message_ = 0
self.error_message_ = ""
def has_error_message(self): return self.has_error_message_
def MergeFrom(self, x):
assert x is not self
if (x.has_package()): self.set_package(x.package())
if (x.has_capability()): self.set_capability(x.capability())
if (x.has_status()): self.set_status(x.status())
if (x.has_scheduled_time()): self.set_scheduled_time(x.scheduled_time())
if (x.has_internal_message()): self.set_internal_message(x.internal_message())
if (x.has_admin_message()): self.set_admin_message(x.admin_message())
if (x.has_error_message()): self.set_error_message(x.error_message())
def Equals(self, x):
if x is self: return 1
if self.has_package_ != x.has_package_: return 0
if self.has_package_ and self.package_ != x.package_: return 0
if self.has_capability_ != x.has_capability_: return 0
if self.has_capability_ and self.capability_ != x.capability_: return 0
if self.has_status_ != x.has_status_: return 0
if self.has_status_ and self.status_ != x.status_: return 0
if self.has_scheduled_time_ != x.has_scheduled_time_: return 0
if self.has_scheduled_time_ and self.scheduled_time_ != x.scheduled_time_: return 0
if self.has_internal_message_ != x.has_internal_message_: return 0
if self.has_internal_message_ and self.internal_message_ != x.internal_message_: return 0
if self.has_admin_message_ != x.has_admin_message_: return 0
if self.has_admin_message_ and self.admin_message_ != x.admin_message_: return 0
if self.has_error_message_ != x.has_error_message_: return 0
if self.has_error_message_ and self.error_message_ != x.error_message_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_package_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: package not set.')
if (not self.has_capability_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: capability not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.package_))
n += self.lengthString(len(self.capability_))
if (self.has_status_): n += 1 + self.lengthVarInt64(self.status_)
if (self.has_scheduled_time_): n += 1 + self.lengthString(len(self.scheduled_time_))
if (self.has_internal_message_): n += 1 + self.lengthString(len(self.internal_message_))
if (self.has_admin_message_): n += 1 + self.lengthString(len(self.admin_message_))
if (self.has_error_message_): n += 1 + self.lengthString(len(self.error_message_))
return n + 2
def Clear(self):
self.clear_package()
self.clear_capability()
self.clear_status()
self.clear_scheduled_time()
self.clear_internal_message()
self.clear_admin_message()
self.clear_error_message()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.package_)
out.putVarInt32(18)
out.putPrefixedString(self.capability_)
if (self.has_status_):
out.putVarInt32(24)
out.putVarInt32(self.status_)
if (self.has_internal_message_):
out.putVarInt32(34)
out.putPrefixedString(self.internal_message_)
if (self.has_admin_message_):
out.putVarInt32(42)
out.putPrefixedString(self.admin_message_)
if (self.has_error_message_):
out.putVarInt32(50)
out.putPrefixedString(self.error_message_)
if (self.has_scheduled_time_):
out.putVarInt32(58)
out.putPrefixedString(self.scheduled_time_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_package(d.getPrefixedString())
continue
if tt == 18:
self.set_capability(d.getPrefixedString())
continue
if tt == 24:
self.set_status(d.getVarInt32())
continue
if tt == 34:
self.set_internal_message(d.getPrefixedString())
continue
if tt == 42:
self.set_admin_message(d.getPrefixedString())
continue
if tt == 50:
self.set_error_message(d.getPrefixedString())
continue
if tt == 58:
self.set_scheduled_time(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_package_: res+=prefix+("package: %s\n" % self.DebugFormatString(self.package_))
if self.has_capability_: res+=prefix+("capability: %s\n" % self.DebugFormatString(self.capability_))
if self.has_status_: res+=prefix+("status: %s\n" % self.DebugFormatInt32(self.status_))
if self.has_scheduled_time_: res+=prefix+("scheduled_time: %s\n" % self.DebugFormatString(self.scheduled_time_))
if self.has_internal_message_: res+=prefix+("internal_message: %s\n" % self.DebugFormatString(self.internal_message_))
if self.has_admin_message_: res+=prefix+("admin_message: %s\n" % self.DebugFormatString(self.admin_message_))
if self.has_error_message_: res+=prefix+("error_message: %s\n" % self.DebugFormatString(self.error_message_))
return res
kpackage = 1
kcapability = 2
kstatus = 3
kscheduled_time = 7
kinternal_message = 4
kadmin_message = 5
kerror_message = 6
_TEXT = (
"ErrorCode",
"package",
"capability",
"status",
"internal_message",
"admin_message",
"error_message",
"scheduled_time",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['CapabilityConfigList','CapabilityConfig']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from antlr3 import *
from antlr3.compat import set, frozenset
HIDDEN = BaseRecognizer.HIDDEN
THIRD=12
SEPTEMBER=34
FOURTH=13
SECOND=11
WEDNESDAY=20
NOVEMBER=36
SATURDAY=23
JULY=32
APRIL=29
DIGITS=8
OCTOBER=35
MAY=30
EVERY=6
FEBRUARY=27
MONDAY=18
SUNDAY=24
JUNE=31
OF=4
MARCH=28
EOF=-1
JANUARY=26
MONTH=25
FRIDAY=22
MINUTES=17
FIFTH=14
TIME=5
WS=39
QUARTER=38
THURSDAY=21
COMMA=9
DECEMBER=37
AUGUST=33
DIGIT=7
TUESDAY=19
HOURS=16
FOURTH_OR_FIFTH=15
FIRST=10
class GrocLexer(Lexer):
grammarFileName = "Groc.g"
antlr_version = version_str_to_tuple("3.1.1")
antlr_version_str = "3.1.1"
def __init__(self, input=None, state=None):
if state is None:
state = RecognizerSharedState()
Lexer.__init__(self, input, state)
self.dfa25 = self.DFA25(
self, 25,
eot = self.DFA25_eot,
eof = self.DFA25_eof,
min = self.DFA25_min,
max = self.DFA25_max,
accept = self.DFA25_accept,
special = self.DFA25_special,
transition = self.DFA25_transition
)
def mTIME(self, ):
try:
_type = TIME
_channel = DEFAULT_CHANNEL
pass
alt1 = 4
LA1 = self.input.LA(1)
if LA1 == 48:
LA1_1 = self.input.LA(2)
if ((48 <= LA1_1 <= 57)) :
alt1 = 2
elif (LA1_1 == 58) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 1, self.input)
raise nvae
elif LA1 == 49:
LA1_2 = self.input.LA(2)
if ((48 <= LA1_2 <= 57)) :
alt1 = 3
elif (LA1_2 == 58) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 2, self.input)
raise nvae
elif LA1 == 50:
LA1_3 = self.input.LA(2)
if ((48 <= LA1_3 <= 52)) :
alt1 = 4
elif (LA1_3 == 58) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 3, self.input)
raise nvae
elif LA1 == 51 or LA1 == 52 or LA1 == 53 or LA1 == 54 or LA1 == 55 or LA1 == 56 or LA1 == 57:
alt1 = 1
else:
nvae = NoViableAltException("", 1, 0, self.input)
raise nvae
if alt1 == 1:
pass
self.mDIGIT()
elif alt1 == 2:
pass
pass
self.match(48)
self.mDIGIT()
elif alt1 == 3:
pass
pass
self.match(49)
self.mDIGIT()
elif alt1 == 4:
pass
pass
self.match(50)
self.matchRange(48, 52)
self.match(58)
pass
self.matchRange(48, 53)
self.mDIGIT()
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFIRST(self, ):
try:
_type = FIRST
_channel = DEFAULT_CHANNEL
pass
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == 49) :
alt2 = 1
elif (LA2_0 == 102) :
alt2 = 2
else:
nvae = NoViableAltException("", 2, 0, self.input)
raise nvae
if alt2 == 1:
pass
self.match("1st")
elif alt2 == 2:
pass
self.match("first")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mSECOND(self, ):
try:
_type = SECOND
_channel = DEFAULT_CHANNEL
pass
alt3 = 2
LA3_0 = self.input.LA(1)
if (LA3_0 == 50) :
alt3 = 1
elif (LA3_0 == 115) :
alt3 = 2
else:
nvae = NoViableAltException("", 3, 0, self.input)
raise nvae
if alt3 == 1:
pass
self.match("2nd")
elif alt3 == 2:
pass
self.match("second")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTHIRD(self, ):
try:
_type = THIRD
_channel = DEFAULT_CHANNEL
pass
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == 51) :
alt4 = 1
elif (LA4_0 == 116) :
alt4 = 2
else:
nvae = NoViableAltException("", 4, 0, self.input)
raise nvae
if alt4 == 1:
pass
self.match("3rd")
elif alt4 == 2:
pass
self.match("third")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFOURTH(self, ):
try:
_type = FOURTH
_channel = DEFAULT_CHANNEL
pass
pass
self.match("4th")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFIFTH(self, ):
try:
_type = FIFTH
_channel = DEFAULT_CHANNEL
pass
pass
self.match("5th")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFOURTH_OR_FIFTH(self, ):
try:
_type = FOURTH_OR_FIFTH
_channel = DEFAULT_CHANNEL
pass
alt5 = 2
LA5_0 = self.input.LA(1)
if (LA5_0 == 102) :
LA5_1 = self.input.LA(2)
if (LA5_1 == 111) :
alt5 = 1
elif (LA5_1 == 105) :
alt5 = 2
else:
nvae = NoViableAltException("", 5, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("", 5, 0, self.input)
raise nvae
if alt5 == 1:
pass
pass
self.match("fourth")
_type = FOURTH;
elif alt5 == 2:
pass
pass
self.match("fifth")
_type = FIFTH;
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMONDAY(self, ):
try:
_type = MONDAY
_channel = DEFAULT_CHANNEL
pass
self.match("mon")
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == 100) :
alt6 = 1
if alt6 == 1:
pass
self.match("day")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTUESDAY(self, ):
try:
_type = TUESDAY
_channel = DEFAULT_CHANNEL
pass
self.match("tue")
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == 115) :
alt7 = 1
if alt7 == 1:
pass
self.match("sday")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mWEDNESDAY(self, ):
try:
_type = WEDNESDAY
_channel = DEFAULT_CHANNEL
pass
self.match("wed")
alt8 = 2
LA8_0 = self.input.LA(1)
if (LA8_0 == 110) :
alt8 = 1
if alt8 == 1:
pass
self.match("nesday")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTHURSDAY(self, ):
try:
_type = THURSDAY
_channel = DEFAULT_CHANNEL
pass
self.match("thu")
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == 114) :
alt9 = 1
if alt9 == 1:
pass
self.match("rsday")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFRIDAY(self, ):
try:
_type = FRIDAY
_channel = DEFAULT_CHANNEL
pass
self.match("fri")
alt10 = 2
LA10_0 = self.input.LA(1)
if (LA10_0 == 100) :
alt10 = 1
if alt10 == 1:
pass
self.match("day")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mSATURDAY(self, ):
try:
_type = SATURDAY
_channel = DEFAULT_CHANNEL
pass
self.match("sat")
alt11 = 2
LA11_0 = self.input.LA(1)
if (LA11_0 == 117) :
alt11 = 1
if alt11 == 1:
pass
self.match("urday")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mSUNDAY(self, ):
try:
_type = SUNDAY
_channel = DEFAULT_CHANNEL
pass
self.match("sun")
alt12 = 2
LA12_0 = self.input.LA(1)
if (LA12_0 == 100) :
alt12 = 1
if alt12 == 1:
pass
self.match("day")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mJANUARY(self, ):
try:
_type = JANUARY
_channel = DEFAULT_CHANNEL
pass
self.match("jan")
alt13 = 2
LA13_0 = self.input.LA(1)
if (LA13_0 == 117) :
alt13 = 1
if alt13 == 1:
pass
self.match("uary")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFEBRUARY(self, ):
try:
_type = FEBRUARY
_channel = DEFAULT_CHANNEL
pass
self.match("feb")
alt14 = 2
LA14_0 = self.input.LA(1)
if (LA14_0 == 114) :
alt14 = 1
if alt14 == 1:
pass
self.match("ruary")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMARCH(self, ):
try:
_type = MARCH
_channel = DEFAULT_CHANNEL
pass
self.match("mar")
alt15 = 2
LA15_0 = self.input.LA(1)
if (LA15_0 == 99) :
alt15 = 1
if alt15 == 1:
pass
self.match("ch")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mAPRIL(self, ):
try:
_type = APRIL
_channel = DEFAULT_CHANNEL
pass
self.match("apr")
alt16 = 2
LA16_0 = self.input.LA(1)
if (LA16_0 == 105) :
alt16 = 1
if alt16 == 1:
pass
self.match("il")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMAY(self, ):
try:
_type = MAY
_channel = DEFAULT_CHANNEL
pass
self.match("may")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mJUNE(self, ):
try:
_type = JUNE
_channel = DEFAULT_CHANNEL
pass
self.match("jun")
alt17 = 2
LA17_0 = self.input.LA(1)
if (LA17_0 == 101) :
alt17 = 1
if alt17 == 1:
pass
self.match(101)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mJULY(self, ):
try:
_type = JULY
_channel = DEFAULT_CHANNEL
pass
self.match("jul")
alt18 = 2
LA18_0 = self.input.LA(1)
if (LA18_0 == 121) :
alt18 = 1
if alt18 == 1:
pass
self.match(121)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mAUGUST(self, ):
try:
_type = AUGUST
_channel = DEFAULT_CHANNEL
pass
self.match("aug")
alt19 = 2
LA19_0 = self.input.LA(1)
if (LA19_0 == 117) :
alt19 = 1
if alt19 == 1:
pass
self.match("ust")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mSEPTEMBER(self, ):
try:
_type = SEPTEMBER
_channel = DEFAULT_CHANNEL
pass
self.match("sep")
alt20 = 2
LA20_0 = self.input.LA(1)
if (LA20_0 == 116) :
alt20 = 1
if alt20 == 1:
pass
self.match("tember")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mOCTOBER(self, ):
try:
_type = OCTOBER
_channel = DEFAULT_CHANNEL
pass
self.match("oct")
alt21 = 2
LA21_0 = self.input.LA(1)
if (LA21_0 == 111) :
alt21 = 1
if alt21 == 1:
pass
self.match("ober")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mNOVEMBER(self, ):
try:
_type = NOVEMBER
_channel = DEFAULT_CHANNEL
pass
self.match("nov")
alt22 = 2
LA22_0 = self.input.LA(1)
if (LA22_0 == 101) :
alt22 = 1
if alt22 == 1:
pass
self.match("ember")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mDECEMBER(self, ):
try:
_type = DECEMBER
_channel = DEFAULT_CHANNEL
pass
self.match("dec")
alt23 = 2
LA23_0 = self.input.LA(1)
if (LA23_0 == 101) :
alt23 = 1
if alt23 == 1:
pass
self.match("ember")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMONTH(self, ):
try:
_type = MONTH
_channel = DEFAULT_CHANNEL
pass
pass
self.match("month")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mQUARTER(self, ):
try:
_type = QUARTER
_channel = DEFAULT_CHANNEL
pass
pass
self.match("quarter")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mEVERY(self, ):
try:
_type = EVERY
_channel = DEFAULT_CHANNEL
pass
pass
self.match("every")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mHOURS(self, ):
try:
_type = HOURS
_channel = DEFAULT_CHANNEL
pass
pass
self.match("hours")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMINUTES(self, ):
try:
_type = MINUTES
_channel = DEFAULT_CHANNEL
pass
alt24 = 2
LA24_0 = self.input.LA(1)
if (LA24_0 == 109) :
LA24_1 = self.input.LA(2)
if (LA24_1 == 105) :
LA24_2 = self.input.LA(3)
if (LA24_2 == 110) :
LA24_3 = self.input.LA(4)
if (LA24_3 == 115) :
alt24 = 1
elif (LA24_3 == 117) :
alt24 = 2
else:
nvae = NoViableAltException("", 24, 3, self.input)
raise nvae
else:
nvae = NoViableAltException("", 24, 2, self.input)
raise nvae
else:
nvae = NoViableAltException("", 24, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("", 24, 0, self.input)
raise nvae
if alt24 == 1:
pass
self.match("mins")
elif alt24 == 2:
pass
self.match("minutes")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mCOMMA(self, ):
try:
_type = COMMA
_channel = DEFAULT_CHANNEL
pass
pass
self.match(44)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mOF(self, ):
try:
_type = OF
_channel = DEFAULT_CHANNEL
pass
pass
self.match("of")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mWS(self, ):
try:
_type = WS
_channel = DEFAULT_CHANNEL
pass
if (9 <= self.input.LA(1) <= 10) or self.input.LA(1) == 13 or self.input.LA(1) == 32:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
_channel=HIDDEN;
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mDIGIT(self, ):
try:
_type = DIGIT
_channel = DEFAULT_CHANNEL
pass
pass
self.matchRange(48, 57)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mDIGITS(self, ):
try:
_type = DIGITS
_channel = DEFAULT_CHANNEL
pass
pass
self.mDIGIT()
self.mDIGIT()
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTokens(self):
alt25 = 36
alt25 = self.dfa25.predict(self.input)
if alt25 == 1:
pass
self.mTIME()
elif alt25 == 2:
pass
self.mFIRST()
elif alt25 == 3:
pass
self.mSECOND()
elif alt25 == 4:
pass
self.mTHIRD()
elif alt25 == 5:
pass
self.mFOURTH()
elif alt25 == 6:
pass
self.mFIFTH()
elif alt25 == 7:
pass
self.mFOURTH_OR_FIFTH()
elif alt25 == 8:
pass
self.mMONDAY()
elif alt25 == 9:
pass
self.mTUESDAY()
elif alt25 == 10:
pass
self.mWEDNESDAY()
elif alt25 == 11:
pass
self.mTHURSDAY()
elif alt25 == 12:
pass
self.mFRIDAY()
elif alt25 == 13:
pass
self.mSATURDAY()
elif alt25 == 14:
pass
self.mSUNDAY()
elif alt25 == 15:
pass
self.mJANUARY()
elif alt25 == 16:
pass
self.mFEBRUARY()
elif alt25 == 17:
pass
self.mMARCH()
elif alt25 == 18:
pass
self.mAPRIL()
elif alt25 == 19:
pass
self.mMAY()
elif alt25 == 20:
pass
self.mJUNE()
elif alt25 == 21:
pass
self.mJULY()
elif alt25 == 22:
pass
self.mAUGUST()
elif alt25 == 23:
pass
self.mSEPTEMBER()
elif alt25 == 24:
pass
self.mOCTOBER()
elif alt25 == 25:
pass
self.mNOVEMBER()
elif alt25 == 26:
pass
self.mDECEMBER()
elif alt25 == 27:
pass
self.mMONTH()
elif alt25 == 28:
pass
self.mQUARTER()
elif alt25 == 29:
pass
self.mEVERY()
elif alt25 == 30:
pass
self.mHOURS()
elif alt25 == 31:
pass
self.mMINUTES()
elif alt25 == 32:
pass
self.mCOMMA()
elif alt25 == 33:
pass
self.mOF()
elif alt25 == 34:
pass
self.mWS()
elif alt25 == 35:
pass
self.mDIGIT()
elif alt25 == 36:
pass
self.mDIGITS()
DFA25_eot = DFA.unpack(
u"\1\uffff\4\27\2\uffff\1\27\1\uffff\2\27\16\uffff\1\36\1\uffff\2"
u"\36\31\uffff\1\74\6\uffff"
)
DFA25_eof = DFA.unpack(
u"\75\uffff"
)
DFA25_min = DFA.unpack(
u"\1\11\4\60\1\145\1\141\1\60\1\150\2\60\1\141\1\uffff\1\141\1\160"
u"\1\143\11\uffff\1\72\1\uffff\2\72\3\uffff\1\146\3\uffff\1\143\3"
u"\uffff\1\151\2\uffff\1\156\1\162\2\uffff\1\154\6\uffff\1\164\6"
u"\uffff"
)
DFA25_max = DFA.unpack(
u"\1\167\1\72\1\163\1\156\2\162\1\165\1\164\1\165\1\164\1\72\1\157"
u"\1\uffff\2\165\1\146\11\uffff\1\72\1\uffff\2\72\3\uffff\1\162\3"
u"\uffff\1\160\3\uffff\1\165\2\uffff\1\156\1\171\2\uffff\1\156\6"
u"\uffff\1\164\6\uffff"
)
DFA25_accept = DFA.unpack(
u"\14\uffff\1\12\3\uffff\1\31\1\32\1\34\1\35\1\36\1\40\1\42\1\43"
u"\1\1\1\uffff\1\2\2\uffff\1\3\1\44\1\4\1\uffff\1\7\1\14\1\20\1\uffff"
u"\1\15\1\16\1\5\1\uffff\1\11\1\6\2\uffff\1\37\1\17\1\uffff\1\22"
u"\1\26\1\30\1\41\1\27\1\13\1\uffff\1\21\1\23\1\24\1\25\1\33\1\10"
)
DFA25_special = DFA.unpack(
u"\75\uffff"
)
DFA25_transition = [
DFA.unpack(u"\2\26\2\uffff\1\26\22\uffff\1\26\13\uffff\1\25\3\uffff"
u"\1\1\1\2\1\3\1\4\1\7\1\11\4\12\47\uffff\1\16\2\uffff\1\21\1\23"
u"\1\5\1\uffff\1\24\1\uffff\1\15\2\uffff\1\13\1\20\1\17\1\uffff\1"
u"\22\1\uffff\1\6\1\10\2\uffff\1\14"),
DFA.unpack(u"\12\31\1\30"),
DFA.unpack(u"\12\33\1\30\70\uffff\1\32"),
DFA.unpack(u"\5\34\5\36\1\30\63\uffff\1\35"),
DFA.unpack(u"\12\36\1\30\67\uffff\1\37"),
DFA.unpack(u"\1\43\3\uffff\1\40\5\uffff\1\41\2\uffff\1\42"),
DFA.unpack(u"\1\45\3\uffff\1\44\17\uffff\1\46"),
DFA.unpack(u"\12\36\1\30\71\uffff\1\47"),
DFA.unpack(u"\1\50\14\uffff\1\51"),
DFA.unpack(u"\12\36\1\30\71\uffff\1\52"),
DFA.unpack(u"\12\36\1\30"),
DFA.unpack(u"\1\54\7\uffff\1\55\5\uffff\1\53"),
DFA.unpack(u""),
DFA.unpack(u"\1\56\23\uffff\1\57"),
DFA.unpack(u"\1\60\4\uffff\1\61"),
DFA.unpack(u"\1\62\2\uffff\1\63"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\30"),
DFA.unpack(u""),
DFA.unpack(u"\1\30"),
DFA.unpack(u"\1\30"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\41\13\uffff\1\32"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\35\14\uffff\1\64"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\37\13\uffff\1\65"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\66"),
DFA.unpack(u"\1\67\6\uffff\1\70"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\72\1\uffff\1\71"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\73"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"")
]
DFA25 = DFA
def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
from antlr3.main import LexerMain
main = LexerMain(GrocLexer)
main.stdin = stdin
main.stdout = stdout
main.stderr = stderr
main.execute(argv)
if __name__ == '__main__':
main(sys.argv)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from antlr3 import *
from antlr3.compat import set, frozenset
allOrdinals = set([1, 2, 3, 4, 5])
numOrdinals = len(allOrdinals)
HIDDEN = BaseRecognizer.HIDDEN
THIRD=12
SEPTEMBER=34
FOURTH=13
SECOND=11
WEDNESDAY=20
NOVEMBER=36
SATURDAY=23
JULY=32
APRIL=29
DIGITS=8
OCTOBER=35
MAY=30
EVERY=6
FEBRUARY=27
MONDAY=18
SUNDAY=24
JUNE=31
MARCH=28
OF=4
EOF=-1
JANUARY=26
MONTH=25
FRIDAY=22
FIFTH=14
MINUTES=17
TIME=5
WS=39
QUARTER=38
THURSDAY=21
COMMA=9
DECEMBER=37
AUGUST=33
DIGIT=7
TUESDAY=19
HOURS=16
FIRST=10
FOURTH_OR_FIFTH=15
tokenNames = [
"<invalid>", "<EOR>", "<DOWN>", "<UP>",
"OF", "TIME", "EVERY", "DIGIT", "DIGITS", "COMMA", "FIRST", "SECOND",
"THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES", "MONDAY",
"TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY", "SUNDAY",
"MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE", "JULY",
"AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
"WS"
]
class GrocParser(Parser):
grammarFileName = "Groc.g"
antlr_version = version_str_to_tuple("3.1.1")
antlr_version_str = "3.1.1"
tokenNames = tokenNames
def __init__(self, input, state=None):
if state is None:
state = RecognizerSharedState()
Parser.__init__(self, input, state)
self.ordinal_set = set()
self.weekday_set = set()
self.month_set = set()
self.time_string = '';
self.interval_mins = 0;
self.period_string = '';
valuesDict = {
SUNDAY: 0,
FIRST: 1,
MONDAY: 1,
JANUARY: 1,
TUESDAY: 2,
SECOND: 2,
FEBRUARY: 2,
WEDNESDAY: 3,
THIRD: 3,
MARCH: 3,
THURSDAY: 4,
FOURTH: 4,
APRIL: 4,
FRIDAY: 5,
FIFTH: 5,
MAY: 5,
SATURDAY: 6,
JUNE: 6,
JULY: 7,
AUGUST: 8,
SEPTEMBER: 9,
OCTOBER: 10,
NOVEMBER: 11,
DECEMBER: 12,
}
def ValueOf(self, token_type):
return self.valuesDict.get(token_type, -1)
def timespec(self, ):
try:
try:
pass
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == EVERY) :
LA1_1 = self.input.LA(2)
if ((DIGIT <= LA1_1 <= DIGITS)) :
alt1 = 2
elif ((MONDAY <= LA1_1 <= SUNDAY)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 1, self.input)
raise nvae
elif ((FIRST <= LA1_0 <= FOURTH_OR_FIFTH)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 0, self.input)
raise nvae
if alt1 == 1:
pass
self._state.following.append(self.FOLLOW_specifictime_in_timespec44)
self.specifictime()
self._state.following.pop()
elif alt1 == 2:
pass
self._state.following.append(self.FOLLOW_interval_in_timespec48)
self.interval()
self._state.following.pop()
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def specifictime(self, ):
TIME1 = None
try:
try:
pass
pass
pass
pass
pass
self._state.following.append(self.FOLLOW_ordinals_in_specifictime69)
self.ordinals()
self._state.following.pop()
self._state.following.append(self.FOLLOW_weekdays_in_specifictime71)
self.weekdays()
self._state.following.pop()
self.match(self.input, OF, self.FOLLOW_OF_in_specifictime75)
alt2 = 2
LA2_0 = self.input.LA(1)
if ((MONTH <= LA2_0 <= DECEMBER)) :
alt2 = 1
elif ((FIRST <= LA2_0 <= THIRD) or LA2_0 == QUARTER) :
alt2 = 2
else:
nvae = NoViableAltException("", 2, 0, self.input)
raise nvae
if alt2 == 1:
pass
self._state.following.append(self.FOLLOW_monthspec_in_specifictime78)
self.monthspec()
self._state.following.pop()
elif alt2 == 2:
pass
self._state.following.append(self.FOLLOW_quarterspec_in_specifictime80)
self.quarterspec()
self._state.following.pop()
TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime93)
self.time_string = TIME1.text
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def interval(self, ):
intervalnum = None
period2 = None
try:
try:
pass
pass
self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval112)
intervalnum = self.input.LT(1)
if (DIGIT <= self.input.LA(1) <= DIGITS):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.interval_mins = int(intervalnum.text)
self._state.following.append(self.FOLLOW_period_in_interval138)
period2 = self.period()
self._state.following.pop()
if ((period2 is not None) and [self.input.toString(period2.start,period2.stop)] or [None])[0] == "hours":
self.period_string = "hours"
else:
self.period_string = "minutes"
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def ordinals(self, ):
try:
try:
pass
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == EVERY) :
alt4 = 1
elif ((FIRST <= LA4_0 <= FOURTH_OR_FIFTH)) :
alt4 = 2
else:
nvae = NoViableAltException("", 4, 0, self.input)
raise nvae
if alt4 == 1:
pass
self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals157)
self.ordinal_set = self.ordinal_set.union(allOrdinals)
elif alt4 == 2:
pass
pass
self._state.following.append(self.FOLLOW_ordinal_in_ordinals173)
self.ordinal()
self._state.following.pop()
while True:
alt3 = 2
LA3_0 = self.input.LA(1)
if (LA3_0 == COMMA) :
alt3 = 1
if alt3 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals176)
self._state.following.append(self.FOLLOW_ordinal_in_ordinals178)
self.ordinal()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def ordinal(self, ):
ord = None
try:
try:
pass
ord = self.input.LT(1)
if (FIRST <= self.input.LA(1) <= FOURTH_OR_FIFTH):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.ordinal_set.add(self.ValueOf(ord.type));
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
class period_return(ParserRuleReturnScope):
def __init__(self):
ParserRuleReturnScope.__init__(self)
def period(self, ):
retval = self.period_return()
retval.start = self.input.LT(1)
try:
try:
pass
if (HOURS <= self.input.LA(1) <= MINUTES):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
retval.stop = self.input.LT(-1)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return retval
def weekdays(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_weekday_in_weekdays261)
self.weekday()
self._state.following.pop()
while True:
alt5 = 2
LA5_0 = self.input.LA(1)
if (LA5_0 == COMMA) :
alt5 = 1
if alt5 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays264)
self._state.following.append(self.FOLLOW_weekday_in_weekdays266)
self.weekday()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def weekday(self, ):
dayname = None
try:
try:
pass
dayname = self.input.LT(1)
if (MONDAY <= self.input.LA(1) <= SUNDAY):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.weekday_set.add(self.ValueOf(dayname.type))
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def monthspec(self, ):
try:
try:
pass
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == MONTH) :
alt6 = 1
elif ((JANUARY <= LA6_0 <= DECEMBER)) :
alt6 = 2
else:
nvae = NoViableAltException("", 6, 0, self.input)
raise nvae
if alt6 == 1:
pass
self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec344)
self.month_set = self.month_set.union(set([
self.ValueOf(JANUARY), self.ValueOf(FEBRUARY), self.ValueOf(MARCH),
self.ValueOf(APRIL), self.ValueOf(MAY), self.ValueOf(JUNE),
self.ValueOf(JULY), self.ValueOf(AUGUST), self.ValueOf(SEPTEMBER),
self.ValueOf(OCTOBER), self.ValueOf(NOVEMBER),
self.ValueOf(DECEMBER)]))
elif alt6 == 2:
pass
self._state.following.append(self.FOLLOW_months_in_monthspec354)
self.months()
self._state.following.pop()
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def months(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_month_in_months371)
self.month()
self._state.following.pop()
while True:
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == COMMA) :
alt7 = 1
if alt7 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months374)
self._state.following.append(self.FOLLOW_month_in_months376)
self.month()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def month(self, ):
monthname = None
try:
try:
pass
monthname = self.input.LT(1)
if (JANUARY <= self.input.LA(1) <= DECEMBER):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.month_set.add(self.ValueOf(monthname.type));
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def quarterspec(self, ):
try:
try:
pass
alt8 = 2
LA8_0 = self.input.LA(1)
if (LA8_0 == QUARTER) :
alt8 = 1
elif ((FIRST <= LA8_0 <= THIRD)) :
alt8 = 2
else:
nvae = NoViableAltException("", 8, 0, self.input)
raise nvae
if alt8 == 1:
pass
self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec468)
self.month_set = self.month_set.union(set([
self.ValueOf(JANUARY), self.ValueOf(APRIL), self.ValueOf(JULY),
self.ValueOf(OCTOBER)]))
elif alt8 == 2:
pass
pass
self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec480)
self.quarter_ordinals()
self._state.following.pop()
self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec482)
self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec484)
self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec486)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def quarter_ordinals(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals505)
self.month_of_quarter_ordinal()
self._state.following.pop()
while True:
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == COMMA) :
alt9 = 1
if alt9 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals508)
self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals510)
self.month_of_quarter_ordinal()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def month_of_quarter_ordinal(self, ):
offset = None
try:
try:
pass
offset = self.input.LT(1)
if (FIRST <= self.input.LA(1) <= THIRD):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
jOffset = self.ValueOf(offset.type) - 1
self.month_set = self.month_set.union(set([
jOffset + self.ValueOf(JANUARY), jOffset + self.ValueOf(APRIL),
jOffset + self.ValueOf(JULY), jOffset + self.ValueOf(OCTOBER)]))
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
FOLLOW_specifictime_in_timespec44 = frozenset([1])
FOLLOW_interval_in_timespec48 = frozenset([1])
FOLLOW_ordinals_in_specifictime69 = frozenset([18, 19, 20, 21, 22, 23, 24])
FOLLOW_weekdays_in_specifictime71 = frozenset([4])
FOLLOW_OF_in_specifictime75 = frozenset([10, 11, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
FOLLOW_monthspec_in_specifictime78 = frozenset([5])
FOLLOW_quarterspec_in_specifictime80 = frozenset([5])
FOLLOW_TIME_in_specifictime93 = frozenset([1])
FOLLOW_EVERY_in_interval112 = frozenset([7, 8])
FOLLOW_set_in_interval122 = frozenset([16, 17])
FOLLOW_period_in_interval138 = frozenset([1])
FOLLOW_EVERY_in_ordinals157 = frozenset([1])
FOLLOW_ordinal_in_ordinals173 = frozenset([1, 9])
FOLLOW_COMMA_in_ordinals176 = frozenset([10, 11, 12, 13, 14, 15])
FOLLOW_ordinal_in_ordinals178 = frozenset([1, 9])
FOLLOW_set_in_ordinal199 = frozenset([1])
FOLLOW_set_in_period238 = frozenset([1])
FOLLOW_weekday_in_weekdays261 = frozenset([1, 9])
FOLLOW_COMMA_in_weekdays264 = frozenset([18, 19, 20, 21, 22, 23, 24])
FOLLOW_weekday_in_weekdays266 = frozenset([1, 9])
FOLLOW_set_in_weekday285 = frozenset([1])
FOLLOW_MONTH_in_monthspec344 = frozenset([1])
FOLLOW_months_in_monthspec354 = frozenset([1])
FOLLOW_month_in_months371 = frozenset([1, 9])
FOLLOW_COMMA_in_months374 = frozenset([25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37])
FOLLOW_month_in_months376 = frozenset([1, 9])
FOLLOW_set_in_month395 = frozenset([1])
FOLLOW_QUARTER_in_quarterspec468 = frozenset([1])
FOLLOW_quarter_ordinals_in_quarterspec480 = frozenset([25])
FOLLOW_MONTH_in_quarterspec482 = frozenset([4])
FOLLOW_OF_in_quarterspec484 = frozenset([38])
FOLLOW_QUARTER_in_quarterspec486 = frozenset([1])
FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals505 = frozenset([1, 9])
FOLLOW_COMMA_in_quarter_ordinals508 = frozenset([10, 11, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals510 = frozenset([1, 9])
FOLLOW_set_in_month_of_quarter_ordinal529 = frozenset([1])
def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
from antlr3.main import ParserMain
main = ParserMain("GrocLexer", GrocParser)
main.stdin = stdin
main.stdout = stdout
main.stderr = stderr
main.execute(argv)
if __name__ == '__main__':
main(sys.argv)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from antlr3 import *
from antlr3.compat import set, frozenset
HIDDEN = BaseRecognizer.HIDDEN
THIRD=12
SEPTEMBER=34
FOURTH=13
SECOND=11
WEDNESDAY=20
NOVEMBER=36
SATURDAY=23
JULY=32
APRIL=29
DIGITS=8
OCTOBER=35
MAY=30
EVERY=6
FEBRUARY=27
MONDAY=18
SUNDAY=24
JUNE=31
OF=4
MARCH=28
EOF=-1
JANUARY=26
MONTH=25
FRIDAY=22
MINUTES=17
FIFTH=14
TIME=5
WS=39
QUARTER=38
THURSDAY=21
COMMA=9
DECEMBER=37
AUGUST=33
DIGIT=7
TUESDAY=19
HOURS=16
FOURTH_OR_FIFTH=15
FIRST=10
class GrocLexer(Lexer):
grammarFileName = "Groc.g"
antlr_version = version_str_to_tuple("3.1.1")
antlr_version_str = "3.1.1"
def __init__(self, input=None, state=None):
if state is None:
state = RecognizerSharedState()
Lexer.__init__(self, input, state)
self.dfa25 = self.DFA25(
self, 25,
eot = self.DFA25_eot,
eof = self.DFA25_eof,
min = self.DFA25_min,
max = self.DFA25_max,
accept = self.DFA25_accept,
special = self.DFA25_special,
transition = self.DFA25_transition
)
def mTIME(self, ):
try:
_type = TIME
_channel = DEFAULT_CHANNEL
pass
alt1 = 4
LA1 = self.input.LA(1)
if LA1 == 48:
LA1_1 = self.input.LA(2)
if ((48 <= LA1_1 <= 57)) :
alt1 = 2
elif (LA1_1 == 58) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 1, self.input)
raise nvae
elif LA1 == 49:
LA1_2 = self.input.LA(2)
if ((48 <= LA1_2 <= 57)) :
alt1 = 3
elif (LA1_2 == 58) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 2, self.input)
raise nvae
elif LA1 == 50:
LA1_3 = self.input.LA(2)
if ((48 <= LA1_3 <= 52)) :
alt1 = 4
elif (LA1_3 == 58) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 3, self.input)
raise nvae
elif LA1 == 51 or LA1 == 52 or LA1 == 53 or LA1 == 54 or LA1 == 55 or LA1 == 56 or LA1 == 57:
alt1 = 1
else:
nvae = NoViableAltException("", 1, 0, self.input)
raise nvae
if alt1 == 1:
pass
self.mDIGIT()
elif alt1 == 2:
pass
pass
self.match(48)
self.mDIGIT()
elif alt1 == 3:
pass
pass
self.match(49)
self.mDIGIT()
elif alt1 == 4:
pass
pass
self.match(50)
self.matchRange(48, 52)
self.match(58)
pass
self.matchRange(48, 53)
self.mDIGIT()
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFIRST(self, ):
try:
_type = FIRST
_channel = DEFAULT_CHANNEL
pass
alt2 = 2
LA2_0 = self.input.LA(1)
if (LA2_0 == 49) :
alt2 = 1
elif (LA2_0 == 102) :
alt2 = 2
else:
nvae = NoViableAltException("", 2, 0, self.input)
raise nvae
if alt2 == 1:
pass
self.match("1st")
elif alt2 == 2:
pass
self.match("first")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mSECOND(self, ):
try:
_type = SECOND
_channel = DEFAULT_CHANNEL
pass
alt3 = 2
LA3_0 = self.input.LA(1)
if (LA3_0 == 50) :
alt3 = 1
elif (LA3_0 == 115) :
alt3 = 2
else:
nvae = NoViableAltException("", 3, 0, self.input)
raise nvae
if alt3 == 1:
pass
self.match("2nd")
elif alt3 == 2:
pass
self.match("second")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTHIRD(self, ):
try:
_type = THIRD
_channel = DEFAULT_CHANNEL
pass
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == 51) :
alt4 = 1
elif (LA4_0 == 116) :
alt4 = 2
else:
nvae = NoViableAltException("", 4, 0, self.input)
raise nvae
if alt4 == 1:
pass
self.match("3rd")
elif alt4 == 2:
pass
self.match("third")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFOURTH(self, ):
try:
_type = FOURTH
_channel = DEFAULT_CHANNEL
pass
pass
self.match("4th")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFIFTH(self, ):
try:
_type = FIFTH
_channel = DEFAULT_CHANNEL
pass
pass
self.match("5th")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFOURTH_OR_FIFTH(self, ):
try:
_type = FOURTH_OR_FIFTH
_channel = DEFAULT_CHANNEL
pass
alt5 = 2
LA5_0 = self.input.LA(1)
if (LA5_0 == 102) :
LA5_1 = self.input.LA(2)
if (LA5_1 == 111) :
alt5 = 1
elif (LA5_1 == 105) :
alt5 = 2
else:
nvae = NoViableAltException("", 5, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("", 5, 0, self.input)
raise nvae
if alt5 == 1:
pass
pass
self.match("fourth")
_type = FOURTH;
elif alt5 == 2:
pass
pass
self.match("fifth")
_type = FIFTH;
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMONDAY(self, ):
try:
_type = MONDAY
_channel = DEFAULT_CHANNEL
pass
self.match("mon")
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == 100) :
alt6 = 1
if alt6 == 1:
pass
self.match("day")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTUESDAY(self, ):
try:
_type = TUESDAY
_channel = DEFAULT_CHANNEL
pass
self.match("tue")
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == 115) :
alt7 = 1
if alt7 == 1:
pass
self.match("sday")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mWEDNESDAY(self, ):
try:
_type = WEDNESDAY
_channel = DEFAULT_CHANNEL
pass
self.match("wed")
alt8 = 2
LA8_0 = self.input.LA(1)
if (LA8_0 == 110) :
alt8 = 1
if alt8 == 1:
pass
self.match("nesday")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTHURSDAY(self, ):
try:
_type = THURSDAY
_channel = DEFAULT_CHANNEL
pass
self.match("thu")
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == 114) :
alt9 = 1
if alt9 == 1:
pass
self.match("rsday")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFRIDAY(self, ):
try:
_type = FRIDAY
_channel = DEFAULT_CHANNEL
pass
self.match("fri")
alt10 = 2
LA10_0 = self.input.LA(1)
if (LA10_0 == 100) :
alt10 = 1
if alt10 == 1:
pass
self.match("day")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mSATURDAY(self, ):
try:
_type = SATURDAY
_channel = DEFAULT_CHANNEL
pass
self.match("sat")
alt11 = 2
LA11_0 = self.input.LA(1)
if (LA11_0 == 117) :
alt11 = 1
if alt11 == 1:
pass
self.match("urday")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mSUNDAY(self, ):
try:
_type = SUNDAY
_channel = DEFAULT_CHANNEL
pass
self.match("sun")
alt12 = 2
LA12_0 = self.input.LA(1)
if (LA12_0 == 100) :
alt12 = 1
if alt12 == 1:
pass
self.match("day")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mJANUARY(self, ):
try:
_type = JANUARY
_channel = DEFAULT_CHANNEL
pass
self.match("jan")
alt13 = 2
LA13_0 = self.input.LA(1)
if (LA13_0 == 117) :
alt13 = 1
if alt13 == 1:
pass
self.match("uary")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mFEBRUARY(self, ):
try:
_type = FEBRUARY
_channel = DEFAULT_CHANNEL
pass
self.match("feb")
alt14 = 2
LA14_0 = self.input.LA(1)
if (LA14_0 == 114) :
alt14 = 1
if alt14 == 1:
pass
self.match("ruary")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMARCH(self, ):
try:
_type = MARCH
_channel = DEFAULT_CHANNEL
pass
self.match("mar")
alt15 = 2
LA15_0 = self.input.LA(1)
if (LA15_0 == 99) :
alt15 = 1
if alt15 == 1:
pass
self.match("ch")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mAPRIL(self, ):
try:
_type = APRIL
_channel = DEFAULT_CHANNEL
pass
self.match("apr")
alt16 = 2
LA16_0 = self.input.LA(1)
if (LA16_0 == 105) :
alt16 = 1
if alt16 == 1:
pass
self.match("il")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMAY(self, ):
try:
_type = MAY
_channel = DEFAULT_CHANNEL
pass
self.match("may")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mJUNE(self, ):
try:
_type = JUNE
_channel = DEFAULT_CHANNEL
pass
self.match("jun")
alt17 = 2
LA17_0 = self.input.LA(1)
if (LA17_0 == 101) :
alt17 = 1
if alt17 == 1:
pass
self.match(101)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mJULY(self, ):
try:
_type = JULY
_channel = DEFAULT_CHANNEL
pass
self.match("jul")
alt18 = 2
LA18_0 = self.input.LA(1)
if (LA18_0 == 121) :
alt18 = 1
if alt18 == 1:
pass
self.match(121)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mAUGUST(self, ):
try:
_type = AUGUST
_channel = DEFAULT_CHANNEL
pass
self.match("aug")
alt19 = 2
LA19_0 = self.input.LA(1)
if (LA19_0 == 117) :
alt19 = 1
if alt19 == 1:
pass
self.match("ust")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mSEPTEMBER(self, ):
try:
_type = SEPTEMBER
_channel = DEFAULT_CHANNEL
pass
self.match("sep")
alt20 = 2
LA20_0 = self.input.LA(1)
if (LA20_0 == 116) :
alt20 = 1
if alt20 == 1:
pass
self.match("tember")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mOCTOBER(self, ):
try:
_type = OCTOBER
_channel = DEFAULT_CHANNEL
pass
self.match("oct")
alt21 = 2
LA21_0 = self.input.LA(1)
if (LA21_0 == 111) :
alt21 = 1
if alt21 == 1:
pass
self.match("ober")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mNOVEMBER(self, ):
try:
_type = NOVEMBER
_channel = DEFAULT_CHANNEL
pass
self.match("nov")
alt22 = 2
LA22_0 = self.input.LA(1)
if (LA22_0 == 101) :
alt22 = 1
if alt22 == 1:
pass
self.match("ember")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mDECEMBER(self, ):
try:
_type = DECEMBER
_channel = DEFAULT_CHANNEL
pass
self.match("dec")
alt23 = 2
LA23_0 = self.input.LA(1)
if (LA23_0 == 101) :
alt23 = 1
if alt23 == 1:
pass
self.match("ember")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMONTH(self, ):
try:
_type = MONTH
_channel = DEFAULT_CHANNEL
pass
pass
self.match("month")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mQUARTER(self, ):
try:
_type = QUARTER
_channel = DEFAULT_CHANNEL
pass
pass
self.match("quarter")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mEVERY(self, ):
try:
_type = EVERY
_channel = DEFAULT_CHANNEL
pass
pass
self.match("every")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mHOURS(self, ):
try:
_type = HOURS
_channel = DEFAULT_CHANNEL
pass
pass
self.match("hours")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mMINUTES(self, ):
try:
_type = MINUTES
_channel = DEFAULT_CHANNEL
pass
alt24 = 2
LA24_0 = self.input.LA(1)
if (LA24_0 == 109) :
LA24_1 = self.input.LA(2)
if (LA24_1 == 105) :
LA24_2 = self.input.LA(3)
if (LA24_2 == 110) :
LA24_3 = self.input.LA(4)
if (LA24_3 == 115) :
alt24 = 1
elif (LA24_3 == 117) :
alt24 = 2
else:
nvae = NoViableAltException("", 24, 3, self.input)
raise nvae
else:
nvae = NoViableAltException("", 24, 2, self.input)
raise nvae
else:
nvae = NoViableAltException("", 24, 1, self.input)
raise nvae
else:
nvae = NoViableAltException("", 24, 0, self.input)
raise nvae
if alt24 == 1:
pass
self.match("mins")
elif alt24 == 2:
pass
self.match("minutes")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mCOMMA(self, ):
try:
_type = COMMA
_channel = DEFAULT_CHANNEL
pass
pass
self.match(44)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mOF(self, ):
try:
_type = OF
_channel = DEFAULT_CHANNEL
pass
pass
self.match("of")
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mWS(self, ):
try:
_type = WS
_channel = DEFAULT_CHANNEL
pass
if (9 <= self.input.LA(1) <= 10) or self.input.LA(1) == 13 or self.input.LA(1) == 32:
self.input.consume()
else:
mse = MismatchedSetException(None, self.input)
self.recover(mse)
raise mse
_channel=HIDDEN;
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mDIGIT(self, ):
try:
_type = DIGIT
_channel = DEFAULT_CHANNEL
pass
pass
self.matchRange(48, 57)
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mDIGITS(self, ):
try:
_type = DIGITS
_channel = DEFAULT_CHANNEL
pass
pass
self.mDIGIT()
self.mDIGIT()
self._state.type = _type
self._state.channel = _channel
finally:
pass
def mTokens(self):
alt25 = 36
alt25 = self.dfa25.predict(self.input)
if alt25 == 1:
pass
self.mTIME()
elif alt25 == 2:
pass
self.mFIRST()
elif alt25 == 3:
pass
self.mSECOND()
elif alt25 == 4:
pass
self.mTHIRD()
elif alt25 == 5:
pass
self.mFOURTH()
elif alt25 == 6:
pass
self.mFIFTH()
elif alt25 == 7:
pass
self.mFOURTH_OR_FIFTH()
elif alt25 == 8:
pass
self.mMONDAY()
elif alt25 == 9:
pass
self.mTUESDAY()
elif alt25 == 10:
pass
self.mWEDNESDAY()
elif alt25 == 11:
pass
self.mTHURSDAY()
elif alt25 == 12:
pass
self.mFRIDAY()
elif alt25 == 13:
pass
self.mSATURDAY()
elif alt25 == 14:
pass
self.mSUNDAY()
elif alt25 == 15:
pass
self.mJANUARY()
elif alt25 == 16:
pass
self.mFEBRUARY()
elif alt25 == 17:
pass
self.mMARCH()
elif alt25 == 18:
pass
self.mAPRIL()
elif alt25 == 19:
pass
self.mMAY()
elif alt25 == 20:
pass
self.mJUNE()
elif alt25 == 21:
pass
self.mJULY()
elif alt25 == 22:
pass
self.mAUGUST()
elif alt25 == 23:
pass
self.mSEPTEMBER()
elif alt25 == 24:
pass
self.mOCTOBER()
elif alt25 == 25:
pass
self.mNOVEMBER()
elif alt25 == 26:
pass
self.mDECEMBER()
elif alt25 == 27:
pass
self.mMONTH()
elif alt25 == 28:
pass
self.mQUARTER()
elif alt25 == 29:
pass
self.mEVERY()
elif alt25 == 30:
pass
self.mHOURS()
elif alt25 == 31:
pass
self.mMINUTES()
elif alt25 == 32:
pass
self.mCOMMA()
elif alt25 == 33:
pass
self.mOF()
elif alt25 == 34:
pass
self.mWS()
elif alt25 == 35:
pass
self.mDIGIT()
elif alt25 == 36:
pass
self.mDIGITS()
DFA25_eot = DFA.unpack(
u"\1\uffff\4\27\2\uffff\1\27\1\uffff\2\27\16\uffff\1\36\1\uffff\2"
u"\36\31\uffff\1\74\6\uffff"
)
DFA25_eof = DFA.unpack(
u"\75\uffff"
)
DFA25_min = DFA.unpack(
u"\1\11\4\60\1\145\1\141\1\60\1\150\2\60\1\141\1\uffff\1\141\1\160"
u"\1\143\11\uffff\1\72\1\uffff\2\72\3\uffff\1\146\3\uffff\1\143\3"
u"\uffff\1\151\2\uffff\1\156\1\162\2\uffff\1\154\6\uffff\1\164\6"
u"\uffff"
)
DFA25_max = DFA.unpack(
u"\1\167\1\72\1\163\1\156\2\162\1\165\1\164\1\165\1\164\1\72\1\157"
u"\1\uffff\2\165\1\146\11\uffff\1\72\1\uffff\2\72\3\uffff\1\162\3"
u"\uffff\1\160\3\uffff\1\165\2\uffff\1\156\1\171\2\uffff\1\156\6"
u"\uffff\1\164\6\uffff"
)
DFA25_accept = DFA.unpack(
u"\14\uffff\1\12\3\uffff\1\31\1\32\1\34\1\35\1\36\1\40\1\42\1\43"
u"\1\1\1\uffff\1\2\2\uffff\1\3\1\44\1\4\1\uffff\1\7\1\14\1\20\1\uffff"
u"\1\15\1\16\1\5\1\uffff\1\11\1\6\2\uffff\1\37\1\17\1\uffff\1\22"
u"\1\26\1\30\1\41\1\27\1\13\1\uffff\1\21\1\23\1\24\1\25\1\33\1\10"
)
DFA25_special = DFA.unpack(
u"\75\uffff"
)
DFA25_transition = [
DFA.unpack(u"\2\26\2\uffff\1\26\22\uffff\1\26\13\uffff\1\25\3\uffff"
u"\1\1\1\2\1\3\1\4\1\7\1\11\4\12\47\uffff\1\16\2\uffff\1\21\1\23"
u"\1\5\1\uffff\1\24\1\uffff\1\15\2\uffff\1\13\1\20\1\17\1\uffff\1"
u"\22\1\uffff\1\6\1\10\2\uffff\1\14"),
DFA.unpack(u"\12\31\1\30"),
DFA.unpack(u"\12\33\1\30\70\uffff\1\32"),
DFA.unpack(u"\5\34\5\36\1\30\63\uffff\1\35"),
DFA.unpack(u"\12\36\1\30\67\uffff\1\37"),
DFA.unpack(u"\1\43\3\uffff\1\40\5\uffff\1\41\2\uffff\1\42"),
DFA.unpack(u"\1\45\3\uffff\1\44\17\uffff\1\46"),
DFA.unpack(u"\12\36\1\30\71\uffff\1\47"),
DFA.unpack(u"\1\50\14\uffff\1\51"),
DFA.unpack(u"\12\36\1\30\71\uffff\1\52"),
DFA.unpack(u"\12\36\1\30"),
DFA.unpack(u"\1\54\7\uffff\1\55\5\uffff\1\53"),
DFA.unpack(u""),
DFA.unpack(u"\1\56\23\uffff\1\57"),
DFA.unpack(u"\1\60\4\uffff\1\61"),
DFA.unpack(u"\1\62\2\uffff\1\63"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\30"),
DFA.unpack(u""),
DFA.unpack(u"\1\30"),
DFA.unpack(u"\1\30"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\41\13\uffff\1\32"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\35\14\uffff\1\64"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\37\13\uffff\1\65"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\66"),
DFA.unpack(u"\1\67\6\uffff\1\70"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\72\1\uffff\1\71"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"\1\73"),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u""),
DFA.unpack(u"")
]
DFA25 = DFA
def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
from antlr3.main import LexerMain
main = LexerMain(GrocLexer)
main.stdin = stdin
main.stdout = stdout
main.stderr = stderr
main.execute(argv)
if __name__ == '__main__':
main(sys.argv)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A wrapper around the generated Groc parser and lexer."""
import google
import antlr3
import GrocLexer
import GrocParser
class GrocException(Exception):
"""An error occurred while parsing the groc input string."""
class GrocLexerWithErrors(GrocLexer.GrocLexer):
"""An overridden Lexer that raises exceptions."""
def emitErrorMessage(self, msg):
"""Raise an exception if the input fails to parse correctly.
Overriding the default, which normally just prints a message to
stderr.
Arguments:
msg: the error message
Raises:
GrocException: always.
"""
raise GrocException(msg)
class GrocParserWithErrors(GrocParser.GrocParser):
"""An overridden Parser that raises exceptions."""
def emitErrorMessage(self, msg):
"""Raise an exception if the input fails to parse correctly.
Overriding the default, which normally just prints a message to
stderr.
Arguments:
msg: the error message
Raises:
GrocException: always.
"""
raise GrocException(msg)
def CreateParser(parse_string):
"""Creates a Groc Parser."""
input_string = antlr3.ANTLRStringStream(parse_string)
lexer = GrocLexerWithErrors(input_string)
tokens = antlr3.CommonTokenStream(lexer)
parser = GrocParserWithErrors(tokens)
return parser
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import sys
from antlr3 import *
from antlr3.compat import set, frozenset
allOrdinals = set([1, 2, 3, 4, 5])
numOrdinals = len(allOrdinals)
HIDDEN = BaseRecognizer.HIDDEN
THIRD=12
SEPTEMBER=34
FOURTH=13
SECOND=11
WEDNESDAY=20
NOVEMBER=36
SATURDAY=23
JULY=32
APRIL=29
DIGITS=8
OCTOBER=35
MAY=30
EVERY=6
FEBRUARY=27
MONDAY=18
SUNDAY=24
JUNE=31
MARCH=28
OF=4
EOF=-1
JANUARY=26
MONTH=25
FRIDAY=22
FIFTH=14
MINUTES=17
TIME=5
WS=39
QUARTER=38
THURSDAY=21
COMMA=9
DECEMBER=37
AUGUST=33
DIGIT=7
TUESDAY=19
HOURS=16
FIRST=10
FOURTH_OR_FIFTH=15
tokenNames = [
"<invalid>", "<EOR>", "<DOWN>", "<UP>",
"OF", "TIME", "EVERY", "DIGIT", "DIGITS", "COMMA", "FIRST", "SECOND",
"THIRD", "FOURTH", "FIFTH", "FOURTH_OR_FIFTH", "HOURS", "MINUTES", "MONDAY",
"TUESDAY", "WEDNESDAY", "THURSDAY", "FRIDAY", "SATURDAY", "SUNDAY",
"MONTH", "JANUARY", "FEBRUARY", "MARCH", "APRIL", "MAY", "JUNE", "JULY",
"AUGUST", "SEPTEMBER", "OCTOBER", "NOVEMBER", "DECEMBER", "QUARTER",
"WS"
]
class GrocParser(Parser):
grammarFileName = "Groc.g"
antlr_version = version_str_to_tuple("3.1.1")
antlr_version_str = "3.1.1"
tokenNames = tokenNames
def __init__(self, input, state=None):
if state is None:
state = RecognizerSharedState()
Parser.__init__(self, input, state)
self.ordinal_set = set()
self.weekday_set = set()
self.month_set = set()
self.time_string = '';
self.interval_mins = 0;
self.period_string = '';
valuesDict = {
SUNDAY: 0,
FIRST: 1,
MONDAY: 1,
JANUARY: 1,
TUESDAY: 2,
SECOND: 2,
FEBRUARY: 2,
WEDNESDAY: 3,
THIRD: 3,
MARCH: 3,
THURSDAY: 4,
FOURTH: 4,
APRIL: 4,
FRIDAY: 5,
FIFTH: 5,
MAY: 5,
SATURDAY: 6,
JUNE: 6,
JULY: 7,
AUGUST: 8,
SEPTEMBER: 9,
OCTOBER: 10,
NOVEMBER: 11,
DECEMBER: 12,
}
def ValueOf(self, token_type):
return self.valuesDict.get(token_type, -1)
def timespec(self, ):
try:
try:
pass
alt1 = 2
LA1_0 = self.input.LA(1)
if (LA1_0 == EVERY) :
LA1_1 = self.input.LA(2)
if ((DIGIT <= LA1_1 <= DIGITS)) :
alt1 = 2
elif ((MONDAY <= LA1_1 <= SUNDAY)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 1, self.input)
raise nvae
elif ((FIRST <= LA1_0 <= FOURTH_OR_FIFTH)) :
alt1 = 1
else:
nvae = NoViableAltException("", 1, 0, self.input)
raise nvae
if alt1 == 1:
pass
self._state.following.append(self.FOLLOW_specifictime_in_timespec44)
self.specifictime()
self._state.following.pop()
elif alt1 == 2:
pass
self._state.following.append(self.FOLLOW_interval_in_timespec48)
self.interval()
self._state.following.pop()
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def specifictime(self, ):
TIME1 = None
try:
try:
pass
pass
pass
pass
pass
self._state.following.append(self.FOLLOW_ordinals_in_specifictime69)
self.ordinals()
self._state.following.pop()
self._state.following.append(self.FOLLOW_weekdays_in_specifictime71)
self.weekdays()
self._state.following.pop()
self.match(self.input, OF, self.FOLLOW_OF_in_specifictime75)
alt2 = 2
LA2_0 = self.input.LA(1)
if ((MONTH <= LA2_0 <= DECEMBER)) :
alt2 = 1
elif ((FIRST <= LA2_0 <= THIRD) or LA2_0 == QUARTER) :
alt2 = 2
else:
nvae = NoViableAltException("", 2, 0, self.input)
raise nvae
if alt2 == 1:
pass
self._state.following.append(self.FOLLOW_monthspec_in_specifictime78)
self.monthspec()
self._state.following.pop()
elif alt2 == 2:
pass
self._state.following.append(self.FOLLOW_quarterspec_in_specifictime80)
self.quarterspec()
self._state.following.pop()
TIME1=self.match(self.input, TIME, self.FOLLOW_TIME_in_specifictime93)
self.time_string = TIME1.text
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def interval(self, ):
intervalnum = None
period2 = None
try:
try:
pass
pass
self.match(self.input, EVERY, self.FOLLOW_EVERY_in_interval112)
intervalnum = self.input.LT(1)
if (DIGIT <= self.input.LA(1) <= DIGITS):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.interval_mins = int(intervalnum.text)
self._state.following.append(self.FOLLOW_period_in_interval138)
period2 = self.period()
self._state.following.pop()
if ((period2 is not None) and [self.input.toString(period2.start,period2.stop)] or [None])[0] == "hours":
self.period_string = "hours"
else:
self.period_string = "minutes"
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def ordinals(self, ):
try:
try:
pass
alt4 = 2
LA4_0 = self.input.LA(1)
if (LA4_0 == EVERY) :
alt4 = 1
elif ((FIRST <= LA4_0 <= FOURTH_OR_FIFTH)) :
alt4 = 2
else:
nvae = NoViableAltException("", 4, 0, self.input)
raise nvae
if alt4 == 1:
pass
self.match(self.input, EVERY, self.FOLLOW_EVERY_in_ordinals157)
self.ordinal_set = self.ordinal_set.union(allOrdinals)
elif alt4 == 2:
pass
pass
self._state.following.append(self.FOLLOW_ordinal_in_ordinals173)
self.ordinal()
self._state.following.pop()
while True:
alt3 = 2
LA3_0 = self.input.LA(1)
if (LA3_0 == COMMA) :
alt3 = 1
if alt3 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_ordinals176)
self._state.following.append(self.FOLLOW_ordinal_in_ordinals178)
self.ordinal()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def ordinal(self, ):
ord = None
try:
try:
pass
ord = self.input.LT(1)
if (FIRST <= self.input.LA(1) <= FOURTH_OR_FIFTH):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.ordinal_set.add(self.ValueOf(ord.type));
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
class period_return(ParserRuleReturnScope):
def __init__(self):
ParserRuleReturnScope.__init__(self)
def period(self, ):
retval = self.period_return()
retval.start = self.input.LT(1)
try:
try:
pass
if (HOURS <= self.input.LA(1) <= MINUTES):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
retval.stop = self.input.LT(-1)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return retval
def weekdays(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_weekday_in_weekdays261)
self.weekday()
self._state.following.pop()
while True:
alt5 = 2
LA5_0 = self.input.LA(1)
if (LA5_0 == COMMA) :
alt5 = 1
if alt5 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_weekdays264)
self._state.following.append(self.FOLLOW_weekday_in_weekdays266)
self.weekday()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def weekday(self, ):
dayname = None
try:
try:
pass
dayname = self.input.LT(1)
if (MONDAY <= self.input.LA(1) <= SUNDAY):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.weekday_set.add(self.ValueOf(dayname.type))
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def monthspec(self, ):
try:
try:
pass
alt6 = 2
LA6_0 = self.input.LA(1)
if (LA6_0 == MONTH) :
alt6 = 1
elif ((JANUARY <= LA6_0 <= DECEMBER)) :
alt6 = 2
else:
nvae = NoViableAltException("", 6, 0, self.input)
raise nvae
if alt6 == 1:
pass
self.match(self.input, MONTH, self.FOLLOW_MONTH_in_monthspec344)
self.month_set = self.month_set.union(set([
self.ValueOf(JANUARY), self.ValueOf(FEBRUARY), self.ValueOf(MARCH),
self.ValueOf(APRIL), self.ValueOf(MAY), self.ValueOf(JUNE),
self.ValueOf(JULY), self.ValueOf(AUGUST), self.ValueOf(SEPTEMBER),
self.ValueOf(OCTOBER), self.ValueOf(NOVEMBER),
self.ValueOf(DECEMBER)]))
elif alt6 == 2:
pass
self._state.following.append(self.FOLLOW_months_in_monthspec354)
self.months()
self._state.following.pop()
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def months(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_month_in_months371)
self.month()
self._state.following.pop()
while True:
alt7 = 2
LA7_0 = self.input.LA(1)
if (LA7_0 == COMMA) :
alt7 = 1
if alt7 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_months374)
self._state.following.append(self.FOLLOW_month_in_months376)
self.month()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def month(self, ):
monthname = None
try:
try:
pass
monthname = self.input.LT(1)
if (JANUARY <= self.input.LA(1) <= DECEMBER):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
self.month_set.add(self.ValueOf(monthname.type));
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def quarterspec(self, ):
try:
try:
pass
alt8 = 2
LA8_0 = self.input.LA(1)
if (LA8_0 == QUARTER) :
alt8 = 1
elif ((FIRST <= LA8_0 <= THIRD)) :
alt8 = 2
else:
nvae = NoViableAltException("", 8, 0, self.input)
raise nvae
if alt8 == 1:
pass
self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec468)
self.month_set = self.month_set.union(set([
self.ValueOf(JANUARY), self.ValueOf(APRIL), self.ValueOf(JULY),
self.ValueOf(OCTOBER)]))
elif alt8 == 2:
pass
pass
self._state.following.append(self.FOLLOW_quarter_ordinals_in_quarterspec480)
self.quarter_ordinals()
self._state.following.pop()
self.match(self.input, MONTH, self.FOLLOW_MONTH_in_quarterspec482)
self.match(self.input, OF, self.FOLLOW_OF_in_quarterspec484)
self.match(self.input, QUARTER, self.FOLLOW_QUARTER_in_quarterspec486)
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def quarter_ordinals(self, ):
try:
try:
pass
pass
self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals505)
self.month_of_quarter_ordinal()
self._state.following.pop()
while True:
alt9 = 2
LA9_0 = self.input.LA(1)
if (LA9_0 == COMMA) :
alt9 = 1
if alt9 == 1:
pass
self.match(self.input, COMMA, self.FOLLOW_COMMA_in_quarter_ordinals508)
self._state.following.append(self.FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals510)
self.month_of_quarter_ordinal()
self._state.following.pop()
else:
break
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
def month_of_quarter_ordinal(self, ):
offset = None
try:
try:
pass
offset = self.input.LT(1)
if (FIRST <= self.input.LA(1) <= THIRD):
self.input.consume()
self._state.errorRecovery = False
else:
mse = MismatchedSetException(None, self.input)
raise mse
jOffset = self.ValueOf(offset.type) - 1
self.month_set = self.month_set.union(set([
jOffset + self.ValueOf(JANUARY), jOffset + self.ValueOf(APRIL),
jOffset + self.ValueOf(JULY), jOffset + self.ValueOf(OCTOBER)]))
except RecognitionException, re:
self.reportError(re)
self.recover(self.input, re)
finally:
pass
return
FOLLOW_specifictime_in_timespec44 = frozenset([1])
FOLLOW_interval_in_timespec48 = frozenset([1])
FOLLOW_ordinals_in_specifictime69 = frozenset([18, 19, 20, 21, 22, 23, 24])
FOLLOW_weekdays_in_specifictime71 = frozenset([4])
FOLLOW_OF_in_specifictime75 = frozenset([10, 11, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
FOLLOW_monthspec_in_specifictime78 = frozenset([5])
FOLLOW_quarterspec_in_specifictime80 = frozenset([5])
FOLLOW_TIME_in_specifictime93 = frozenset([1])
FOLLOW_EVERY_in_interval112 = frozenset([7, 8])
FOLLOW_set_in_interval122 = frozenset([16, 17])
FOLLOW_period_in_interval138 = frozenset([1])
FOLLOW_EVERY_in_ordinals157 = frozenset([1])
FOLLOW_ordinal_in_ordinals173 = frozenset([1, 9])
FOLLOW_COMMA_in_ordinals176 = frozenset([10, 11, 12, 13, 14, 15])
FOLLOW_ordinal_in_ordinals178 = frozenset([1, 9])
FOLLOW_set_in_ordinal199 = frozenset([1])
FOLLOW_set_in_period238 = frozenset([1])
FOLLOW_weekday_in_weekdays261 = frozenset([1, 9])
FOLLOW_COMMA_in_weekdays264 = frozenset([18, 19, 20, 21, 22, 23, 24])
FOLLOW_weekday_in_weekdays266 = frozenset([1, 9])
FOLLOW_set_in_weekday285 = frozenset([1])
FOLLOW_MONTH_in_monthspec344 = frozenset([1])
FOLLOW_months_in_monthspec354 = frozenset([1])
FOLLOW_month_in_months371 = frozenset([1, 9])
FOLLOW_COMMA_in_months374 = frozenset([25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37])
FOLLOW_month_in_months376 = frozenset([1, 9])
FOLLOW_set_in_month395 = frozenset([1])
FOLLOW_QUARTER_in_quarterspec468 = frozenset([1])
FOLLOW_quarter_ordinals_in_quarterspec480 = frozenset([25])
FOLLOW_MONTH_in_quarterspec482 = frozenset([4])
FOLLOW_OF_in_quarterspec484 = frozenset([38])
FOLLOW_QUARTER_in_quarterspec486 = frozenset([1])
FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals505 = frozenset([1, 9])
FOLLOW_COMMA_in_quarter_ordinals508 = frozenset([10, 11, 12, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38])
FOLLOW_month_of_quarter_ordinal_in_quarter_ordinals510 = frozenset([1, 9])
FOLLOW_set_in_month_of_quarter_ordinal529 = frozenset([1])
def main(argv, stdin=sys.stdin, stdout=sys.stdout, stderr=sys.stderr):
from antlr3.main import ParserMain
main = ParserMain("GrocLexer", GrocParser)
main.stdin = stdin
main.stdout = stdout
main.stderr = stderr
main.execute(argv)
if __name__ == '__main__':
main(sys.argv)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"this file is needed to make this a package"
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Implementation of scheduling for Groc format schedules.
A Groc schedule looks like '1st,2nd monday 9:00', or 'every 20 mins'. This
module takes a parsed schedule (produced by Antlr) and creates objects that
can produce times that match this schedule.
A parsed schedule is one of two types - an Interval or a Specific Time.
See the class docstrings for more.
Extensions to be considered:
allowing a comma separated list of times to run
allowing the user to specify particular days of the month to run
"""
import calendar
import datetime
try:
import pytz
except ImportError:
pytz = None
import groc
HOURS = 'hours'
MINUTES = 'minutes'
try:
from pytz import NonExistentTimeError
except ImportError:
class NonExistentTimeError(Exception):
pass
def GrocTimeSpecification(schedule):
"""Factory function.
Turns a schedule specification into a TimeSpecification.
Arguments:
schedule: the schedule specification, as a string
Returns:
a TimeSpecification instance
"""
parser = groc.CreateParser(schedule)
parser.timespec()
if parser.interval_mins:
return IntervalTimeSpecification(parser.interval_mins,
parser.period_string)
else:
return SpecificTimeSpecification(parser.ordinal_set, parser.weekday_set,
parser.month_set,
None,
parser.time_string)
class TimeSpecification(object):
"""Base class for time specifications."""
def GetMatches(self, start, n):
"""Returns the next n times that match the schedule, starting at time start.
Arguments:
start: a datetime to start from. Matches will start from after this time.
n: the number of matching times to return
Returns:
a list of n datetime objects
"""
out = []
for _ in range(n):
start = self.GetMatch(start)
out.append(start)
return out
def GetMatch(self, start):
"""Returns the next match after time start.
Must be implemented in subclasses.
Arguments:
start: a datetime to start with. Matches will start from this time.
Returns:
a datetime object
"""
raise NotImplementedError
class IntervalTimeSpecification(TimeSpecification):
"""A time specification for a given interval.
An Interval type spec runs at the given fixed interval. It has two
attributes:
period - the type of interval, either "hours" or "minutes"
interval - the number of units of type period.
"""
def __init__(self, interval, period):
super(IntervalTimeSpecification, self).__init__(self)
self.interval = interval
self.period = period
def GetMatch(self, t):
"""Returns the next match after time 't'.
Arguments:
t: a datetime to start from. Matches will start from after this time.
Returns:
a datetime object
"""
if self.period == HOURS:
return t + datetime.timedelta(hours=self.interval)
else:
return t + datetime.timedelta(minutes=self.interval)
class SpecificTimeSpecification(TimeSpecification):
"""Specific time specification.
A Specific interval is more complex, but defines a certain time to run and
the days that it should run. It has the following attributes:
time - the time of day to run, as "HH:MM"
ordinals - first, second, third &c, as a set of integers in 1..5
months - the months that this should run, as a set of integers in 1..12
weekdays - the days of the week that this should run, as a set of integers,
0=Sunday, 6=Saturday
timezone - the optional timezone as a string for this specification.
Defaults to UTC - valid entries are things like Australia/Victoria
or PST8PDT.
A specific time schedule can be quite complex. A schedule could look like
this:
"1st,third sat,sun of jan,feb,mar 09:15"
In this case, ordinals would be {1,3}, weekdays {0,6}, months {1,2,3} and
time would be "09:15".
"""
timezone = None
def __init__(self, ordinals=None, weekdays=None, months=None, monthdays=None,
timestr='00:00', timezone=None):
super(SpecificTimeSpecification, self).__init__(self)
if weekdays is not None and monthdays is not None:
raise ValueError("can't supply both monthdays and weekdays")
if ordinals is None:
self.ordinals = set(range(1, 6))
else:
self.ordinals = set(ordinals)
if weekdays is None:
self.weekdays = set(range(7))
else:
self.weekdays = set(weekdays)
if months is None:
self.months = set(range(1, 13))
else:
self.months = set(months)
if monthdays is None:
self.monthdays = set()
else:
self.monthdays = set(monthdays)
hourstr, minutestr = timestr.split(':')
self.time = datetime.time(int(hourstr), int(minutestr))
if timezone:
if pytz is None:
raise ValueError("need pytz in order to specify a timezone")
self.timezone = pytz.timezone(timezone)
def _MatchingDays(self, year, month):
"""Returns matching days for the given year and month.
For the given year and month, return the days that match this instance's
day specification, based on the ordinals and weekdays.
Arguments:
year: the year as an integer
month: the month as an integer, in range 1-12
Returns:
a list of matching days, as ints in range 1-31
"""
out_days = []
start_day, last_day = calendar.monthrange(year, month)
start_day = (start_day + 1) % 7
for ordinal in self.ordinals:
for weekday in self.weekdays:
day = ((weekday - start_day) % 7) + 1
day += 7 * (ordinal - 1)
if day <= last_day:
out_days.append(day)
return sorted(out_days)
def _NextMonthGenerator(self, start, matches):
"""Creates a generator that produces results from the set 'matches'.
Matches must be >= 'start'. If none match, the wrap counter is incremented,
and the result set is reset to the full set. Yields a 2-tuple of (match,
wrapcount).
Arguments:
start: first set of matches will be >= this value (an int)
matches: the set of potential matches (a sequence of ints)
Yields:
a two-tuple of (match, wrap counter). match is an int in range (1-12),
wrapcount is a int indicating how many times we've wrapped around.
"""
potential = matches = sorted(matches)
after = start - 1
wrapcount = 0
while True:
potential = [x for x in potential if x > after]
if not potential:
wrapcount += 1
potential = matches
after = potential[0]
yield (after, wrapcount)
def GetMatch(self, start):
"""Returns the next time that matches the schedule after time start.
Arguments:
start: a UTC datetime to start from. Matches will start after this time
Returns:
a datetime object
"""
start_time = start
if self.timezone and pytz is not None:
if not start_time.tzinfo:
start_time = pytz.utc.localize(start_time)
start_time = start_time.astimezone(self.timezone)
start_time = start_time.replace(tzinfo=None)
if self.months:
months = self._NextMonthGenerator(start_time.month, self.months)
while True:
month, yearwraps = months.next()
candidate_month = start_time.replace(day=1, month=month,
year=start_time.year + yearwraps)
if self.monthdays:
_, last_day = calendar.monthrange(candidate_month.year,
candidate_month.month)
day_matches = sorted(x for x in self.monthdays if x <= last_day)
else:
day_matches = self._MatchingDays(candidate_month.year, month)
if ((candidate_month.year, candidate_month.month)
== (start_time.year, start_time.month)):
day_matches = [x for x in day_matches if x >= start_time.day]
while (day_matches and day_matches[0] == start_time.day
and start_time.time() >= self.time):
day_matches.pop(0)
while day_matches:
out = candidate_month.replace(day=day_matches[0], hour=self.time.hour,
minute=self.time.minute, second=0,
microsecond=0)
if self.timezone and pytz is not None:
try:
out = self.timezone.localize(out)
except (NonExistentTimeError, IndexError):
for _ in range(24):
out = out.replace(minute=1) + datetime.timedelta(minutes=60)
try:
out = self.timezone.localize(out)
except (NonExistentTimeError, IndexError):
continue
break
out = out.astimezone(pytz.utc)
return out
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Implementation of scheduling for Groc format schedules.
A Groc schedule looks like '1st,2nd monday 9:00', or 'every 20 mins'. This
module takes a parsed schedule (produced by Antlr) and creates objects that
can produce times that match this schedule.
A parsed schedule is one of two types - an Interval or a Specific Time.
See the class docstrings for more.
Extensions to be considered:
allowing a comma separated list of times to run
allowing the user to specify particular days of the month to run
"""
import calendar
import datetime
try:
import pytz
except ImportError:
pytz = None
import groc
HOURS = 'hours'
MINUTES = 'minutes'
try:
from pytz import NonExistentTimeError
except ImportError:
class NonExistentTimeError(Exception):
pass
def GrocTimeSpecification(schedule):
"""Factory function.
Turns a schedule specification into a TimeSpecification.
Arguments:
schedule: the schedule specification, as a string
Returns:
a TimeSpecification instance
"""
parser = groc.CreateParser(schedule)
parser.timespec()
if parser.interval_mins:
return IntervalTimeSpecification(parser.interval_mins,
parser.period_string)
else:
return SpecificTimeSpecification(parser.ordinal_set, parser.weekday_set,
parser.month_set,
None,
parser.time_string)
class TimeSpecification(object):
"""Base class for time specifications."""
def GetMatches(self, start, n):
"""Returns the next n times that match the schedule, starting at time start.
Arguments:
start: a datetime to start from. Matches will start from after this time.
n: the number of matching times to return
Returns:
a list of n datetime objects
"""
out = []
for _ in range(n):
start = self.GetMatch(start)
out.append(start)
return out
def GetMatch(self, start):
"""Returns the next match after time start.
Must be implemented in subclasses.
Arguments:
start: a datetime to start with. Matches will start from this time.
Returns:
a datetime object
"""
raise NotImplementedError
class IntervalTimeSpecification(TimeSpecification):
"""A time specification for a given interval.
An Interval type spec runs at the given fixed interval. It has two
attributes:
period - the type of interval, either "hours" or "minutes"
interval - the number of units of type period.
"""
def __init__(self, interval, period):
super(IntervalTimeSpecification, self).__init__(self)
self.interval = interval
self.period = period
def GetMatch(self, t):
"""Returns the next match after time 't'.
Arguments:
t: a datetime to start from. Matches will start from after this time.
Returns:
a datetime object
"""
if self.period == HOURS:
return t + datetime.timedelta(hours=self.interval)
else:
return t + datetime.timedelta(minutes=self.interval)
class SpecificTimeSpecification(TimeSpecification):
"""Specific time specification.
A Specific interval is more complex, but defines a certain time to run and
the days that it should run. It has the following attributes:
time - the time of day to run, as "HH:MM"
ordinals - first, second, third &c, as a set of integers in 1..5
months - the months that this should run, as a set of integers in 1..12
weekdays - the days of the week that this should run, as a set of integers,
0=Sunday, 6=Saturday
timezone - the optional timezone as a string for this specification.
Defaults to UTC - valid entries are things like Australia/Victoria
or PST8PDT.
A specific time schedule can be quite complex. A schedule could look like
this:
"1st,third sat,sun of jan,feb,mar 09:15"
In this case, ordinals would be {1,3}, weekdays {0,6}, months {1,2,3} and
time would be "09:15".
"""
timezone = None
def __init__(self, ordinals=None, weekdays=None, months=None, monthdays=None,
timestr='00:00', timezone=None):
super(SpecificTimeSpecification, self).__init__(self)
if weekdays is not None and monthdays is not None:
raise ValueError("can't supply both monthdays and weekdays")
if ordinals is None:
self.ordinals = set(range(1, 6))
else:
self.ordinals = set(ordinals)
if weekdays is None:
self.weekdays = set(range(7))
else:
self.weekdays = set(weekdays)
if months is None:
self.months = set(range(1, 13))
else:
self.months = set(months)
if monthdays is None:
self.monthdays = set()
else:
self.monthdays = set(monthdays)
hourstr, minutestr = timestr.split(':')
self.time = datetime.time(int(hourstr), int(minutestr))
if timezone:
if pytz is None:
raise ValueError("need pytz in order to specify a timezone")
self.timezone = pytz.timezone(timezone)
def _MatchingDays(self, year, month):
"""Returns matching days for the given year and month.
For the given year and month, return the days that match this instance's
day specification, based on the ordinals and weekdays.
Arguments:
year: the year as an integer
month: the month as an integer, in range 1-12
Returns:
a list of matching days, as ints in range 1-31
"""
out_days = []
start_day, last_day = calendar.monthrange(year, month)
start_day = (start_day + 1) % 7
for ordinal in self.ordinals:
for weekday in self.weekdays:
day = ((weekday - start_day) % 7) + 1
day += 7 * (ordinal - 1)
if day <= last_day:
out_days.append(day)
return sorted(out_days)
def _NextMonthGenerator(self, start, matches):
"""Creates a generator that produces results from the set 'matches'.
Matches must be >= 'start'. If none match, the wrap counter is incremented,
and the result set is reset to the full set. Yields a 2-tuple of (match,
wrapcount).
Arguments:
start: first set of matches will be >= this value (an int)
matches: the set of potential matches (a sequence of ints)
Yields:
a two-tuple of (match, wrap counter). match is an int in range (1-12),
wrapcount is a int indicating how many times we've wrapped around.
"""
potential = matches = sorted(matches)
after = start - 1
wrapcount = 0
while True:
potential = [x for x in potential if x > after]
if not potential:
wrapcount += 1
potential = matches
after = potential[0]
yield (after, wrapcount)
def GetMatch(self, start):
"""Returns the next time that matches the schedule after time start.
Arguments:
start: a UTC datetime to start from. Matches will start after this time
Returns:
a datetime object
"""
start_time = start
if self.timezone and pytz is not None:
if not start_time.tzinfo:
start_time = pytz.utc.localize(start_time)
start_time = start_time.astimezone(self.timezone)
start_time = start_time.replace(tzinfo=None)
if self.months:
months = self._NextMonthGenerator(start_time.month, self.months)
while True:
month, yearwraps = months.next()
candidate_month = start_time.replace(day=1, month=month,
year=start_time.year + yearwraps)
if self.monthdays:
_, last_day = calendar.monthrange(candidate_month.year,
candidate_month.month)
day_matches = sorted(x for x in self.monthdays if x <= last_day)
else:
day_matches = self._MatchingDays(candidate_month.year, month)
if ((candidate_month.year, candidate_month.month)
== (start_time.year, start_time.month)):
day_matches = [x for x in day_matches if x >= start_time.day]
while (day_matches and day_matches[0] == start_time.day
and start_time.time() >= self.time):
day_matches.pop(0)
while day_matches:
out = candidate_month.replace(day=day_matches[0], hour=self.time.hour,
minute=self.time.minute, second=0,
microsecond=0)
if self.timezone and pytz is not None:
try:
out = self.timezone.localize(out)
except (NonExistentTimeError, IndexError):
for _ in range(24):
out = out.replace(minute=1) + datetime.timedelta(minutes=60)
try:
out = self.timezone.localize(out)
except (NonExistentTimeError, IndexError):
continue
break
out = out.astimezone(pytz.utc)
return out
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A wrapper around the generated Groc parser and lexer."""
import google
import antlr3
import GrocLexer
import GrocParser
class GrocException(Exception):
"""An error occurred while parsing the groc input string."""
class GrocLexerWithErrors(GrocLexer.GrocLexer):
"""An overridden Lexer that raises exceptions."""
def emitErrorMessage(self, msg):
"""Raise an exception if the input fails to parse correctly.
Overriding the default, which normally just prints a message to
stderr.
Arguments:
msg: the error message
Raises:
GrocException: always.
"""
raise GrocException(msg)
class GrocParserWithErrors(GrocParser.GrocParser):
"""An overridden Parser that raises exceptions."""
def emitErrorMessage(self, msg):
"""Raise an exception if the input fails to parse correctly.
Overriding the default, which normally just prints a message to
stderr.
Arguments:
msg: the error message
Raises:
GrocException: always.
"""
raise GrocException(msg)
def CreateParser(parse_string):
"""Creates a Groc Parser."""
input_string = antlr3.ANTLRStringStream(parse_string)
lexer = GrocLexerWithErrors(input_string)
tokens = antlr3.CommonTokenStream(lexer)
parser = GrocParserWithErrors(tokens)
return parser
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"this file is needed to make this a package"
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import struct
import array
import string
import re
from google.pyglib.gexcept import AbstractMethod
import httplib
__all__ = ['ProtocolMessage', 'Encoder', 'Decoder',
'ProtocolBufferDecodeError',
'ProtocolBufferEncodeError',
'ProtocolBufferReturnError']
URL_RE = re.compile('^(https?)://([^/]+)(/.*)$')
class ProtocolMessage:
def __init__(self, contents=None):
raise AbstractMethod
def Clear(self):
raise AbstractMethod
def IsInitialized(self, debug_strs=None):
raise AbstractMethod
def Encode(self):
try:
return self._CEncode()
except AbstractMethod:
e = Encoder()
self.Output(e)
return e.buffer().tostring()
def _CEncode(self):
raise AbstractMethod
def ParseFromString(self, s):
self.Clear()
self.MergeFromString(s)
return
def MergeFromString(self, s):
try:
self._CMergeFromString(s)
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferDecodeError, '\n\t'.join(dbg)
except AbstractMethod:
a = array.array('B')
a.fromstring(s)
d = Decoder(a, 0, len(a))
self.Merge(d)
return
def _CMergeFromString(self, s):
raise AbstractMethod
def __getstate__(self):
return self.Encode()
def __setstate__(self, contents_):
self.__init__(contents=contents_)
def sendCommand(self, server, url, response, follow_redirects=1,
secure=0, keyfile=None, certfile=None):
data = self.Encode()
if secure:
if keyfile and certfile:
conn = httplib.HTTPSConnection(server, key_file=keyfile,
cert_file=certfile)
else:
conn = httplib.HTTPSConnection(server)
else:
conn = httplib.HTTPConnection(server)
conn.putrequest("POST", url)
conn.putheader("Content-Length", "%d" %len(data))
conn.endheaders()
conn.send(data)
resp = conn.getresponse()
if follow_redirects > 0 and resp.status == 302:
m = URL_RE.match(resp.getheader('Location'))
if m:
protocol, server, url = m.groups()
return self.sendCommand(server, url, response,
follow_redirects=follow_redirects - 1,
secure=(protocol == 'https'),
keyfile=keyfile,
certfile=certfile)
if resp.status != 200:
raise ProtocolBufferReturnError(resp.status)
if response is not None:
response.ParseFromString(resp.read())
return response
def sendSecureCommand(self, server, keyfile, certfile, url, response,
follow_redirects=1):
return self.sendCommand(server, url, response,
follow_redirects=follow_redirects,
secure=1, keyfile=keyfile, certfile=certfile)
def __str__(self, prefix="", printElemNumber=0):
raise AbstractMethod
def ToASCII(self):
return self._CToASCII(ProtocolMessage._SYMBOLIC_FULL_ASCII)
def ToCompactASCII(self):
return self._CToASCII(ProtocolMessage._NUMERIC_ASCII)
def ToShortASCII(self):
return self._CToASCII(ProtocolMessage._SYMBOLIC_SHORT_ASCII)
_NUMERIC_ASCII = 0
_SYMBOLIC_SHORT_ASCII = 1
_SYMBOLIC_FULL_ASCII = 2
def _CToASCII(self, output_format):
raise AbstractMethod
def ParseASCII(self, ascii_string):
raise AbstractMethod
def ParseASCIIIgnoreUnknown(self, ascii_string):
raise AbstractMethod
def Equals(self, other):
raise AbstractMethod
def __eq__(self, other):
if other.__class__ is self.__class__:
return self.Equals(other)
return NotImplemented
def __ne__(self, other):
if other.__class__ is self.__class__:
return not self.Equals(other)
return NotImplemented
def Output(self, e):
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferEncodeError, '\n\t'.join(dbg)
self.OutputUnchecked(e)
return
def OutputUnchecked(self, e):
raise AbstractMethod
def Parse(self, d):
self.Clear()
self.Merge(d)
return
def Merge(self, d):
self.TryMerge(d)
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferDecodeError, '\n\t'.join(dbg)
return
def TryMerge(self, d):
raise AbstractMethod
def CopyFrom(self, pb):
if (pb == self): return
self.Clear()
self.MergeFrom(pb)
def MergeFrom(self, pb):
raise AbstractMethod
def lengthVarInt32(self, n):
return self.lengthVarInt64(n)
def lengthVarInt64(self, n):
if n < 0:
return 10
result = 0
while 1:
result += 1
n >>= 7
if n == 0:
break
return result
def lengthString(self, n):
return self.lengthVarInt32(n) + n
def DebugFormat(self, value):
return "%s" % value
def DebugFormatInt32(self, value):
if (value <= -2000000000 or value >= 2000000000):
return self.DebugFormatFixed32(value)
return "%d" % value
def DebugFormatInt64(self, value):
if (value <= -2000000000 or value >= 2000000000):
return self.DebugFormatFixed64(value)
return "%d" % value
def DebugFormatString(self, value):
def escape(c):
o = ord(c)
if o == 10: return r"\n"
if o == 39: return r"\'"
if o == 34: return r'\"'
if o == 92: return r"\\"
if o >= 127 or o < 32: return "\\%03o" % o
return c
return '"' + "".join([escape(c) for c in value]) + '"'
def DebugFormatFloat(self, value):
return "%ff" % value
def DebugFormatFixed32(self, value):
if (value < 0): value += (1L<<32)
return "0x%x" % value
def DebugFormatFixed64(self, value):
if (value < 0): value += (1L<<64)
return "0x%x" % value
def DebugFormatBool(self, value):
if value:
return "true"
else:
return "false"
class Encoder:
NUMERIC = 0
DOUBLE = 1
STRING = 2
STARTGROUP = 3
ENDGROUP = 4
FLOAT = 5
MAX_TYPE = 6
def __init__(self):
self.buf = array.array('B')
return
def buffer(self):
return self.buf
def put8(self, v):
if v < 0 or v >= (1<<8): raise ProtocolBufferEncodeError, "u8 too big"
self.buf.append(v & 255)
return
def put16(self, v):
if v < 0 or v >= (1<<16): raise ProtocolBufferEncodeError, "u16 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
return
def put32(self, v):
if v < 0 or v >= (1L<<32): raise ProtocolBufferEncodeError, "u32 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
self.buf.append((v >> 16) & 255)
self.buf.append((v >> 24) & 255)
return
def put64(self, v):
if v < 0 or v >= (1L<<64): raise ProtocolBufferEncodeError, "u64 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
self.buf.append((v >> 16) & 255)
self.buf.append((v >> 24) & 255)
self.buf.append((v >> 32) & 255)
self.buf.append((v >> 40) & 255)
self.buf.append((v >> 48) & 255)
self.buf.append((v >> 56) & 255)
return
def putVarInt32(self, v):
buf_append = self.buf.append
if v & 127 == v:
buf_append(v)
return
if v >= 0x80000000 or v < -0x80000000:
raise ProtocolBufferEncodeError, "int32 too big"
if v < 0:
v += 0x10000000000000000
while True:
bits = v & 127
v >>= 7
if v:
bits |= 128
buf_append(bits)
if not v:
break
return
def putVarInt64(self, v):
buf_append = self.buf.append
if v >= 0x8000000000000000 or v < -0x8000000000000000:
raise ProtocolBufferEncodeError, "int64 too big"
if v < 0:
v += 0x10000000000000000
while True:
bits = v & 127
v >>= 7
if v:
bits |= 128
buf_append(bits)
if not v:
break
return
def putVarUint64(self, v):
buf_append = self.buf.append
if v < 0 or v >= 0x10000000000000000:
raise ProtocolBufferEncodeError, "uint64 too big"
while True:
bits = v & 127
v >>= 7
if v:
bits |= 128
buf_append(bits)
if not v:
break
return
def putFloat(self, v):
a = array.array('B')
a.fromstring(struct.pack("f", v))
self.buf.extend(a)
return
def putDouble(self, v):
a = array.array('B')
a.fromstring(struct.pack("d", v))
self.buf.extend(a)
return
def putBoolean(self, v):
if v:
self.buf.append(1)
else:
self.buf.append(0)
return
def putPrefixedString(self, v):
self.putVarInt32(len(v))
self.buf.fromstring(v)
return
def putRawString(self, v):
self.buf.fromstring(v)
class Decoder:
def __init__(self, buf, idx, limit):
self.buf = buf
self.idx = idx
self.limit = limit
return
def avail(self):
return self.limit - self.idx
def buffer(self):
return self.buf
def pos(self):
return self.idx
def skip(self, n):
if self.idx + n > self.limit: raise ProtocolBufferDecodeError, "truncated"
self.idx += n
return
def skipData(self, tag):
t = tag & 7
if t == Encoder.NUMERIC:
self.getVarInt64()
elif t == Encoder.DOUBLE:
self.skip(8)
elif t == Encoder.STRING:
n = self.getVarInt32()
self.skip(n)
elif t == Encoder.STARTGROUP:
while 1:
t = self.getVarInt32()
if (t & 7) == Encoder.ENDGROUP:
break
else:
self.skipData(t)
if (t - Encoder.ENDGROUP) != (tag - Encoder.STARTGROUP):
raise ProtocolBufferDecodeError, "corrupted"
elif t == Encoder.ENDGROUP:
raise ProtocolBufferDecodeError, "corrupted"
elif t == Encoder.FLOAT:
self.skip(4)
else:
raise ProtocolBufferDecodeError, "corrupted"
def get8(self):
if self.idx >= self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
self.idx += 1
return c
def get16(self):
if self.idx + 2 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
self.idx += 2
return (d << 8) | c
def get32(self):
if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
e = self.buf[self.idx + 2]
f = long(self.buf[self.idx + 3])
self.idx += 4
return (f << 24) | (e << 16) | (d << 8) | c
def get64(self):
if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
e = self.buf[self.idx + 2]
f = long(self.buf[self.idx + 3])
g = long(self.buf[self.idx + 4])
h = long(self.buf[self.idx + 5])
i = long(self.buf[self.idx + 6])
j = long(self.buf[self.idx + 7])
self.idx += 8
return ((j << 56) | (i << 48) | (h << 40) | (g << 32) | (f << 24)
| (e << 16) | (d << 8) | c)
def getVarInt32(self):
b = self.get8()
if not (b & 128):
return b
result = long(0)
shift = 0
while 1:
result |= (long(b & 127) << shift)
shift += 7
if not (b & 128):
if result >= 0x10000000000000000L:
raise ProtocolBufferDecodeError, "corrupted"
break
if shift >= 64: raise ProtocolBufferDecodeError, "corrupted"
b = self.get8()
if result >= 0x8000000000000000L:
result -= 0x10000000000000000L
if result >= 0x80000000L or result < -0x80000000L:
raise ProtocolBufferDecodeError, "corrupted"
return result
def getVarInt64(self):
result = self.getVarUint64()
if result >= (1L << 63):
result -= (1L << 64)
return result
def getVarUint64(self):
result = long(0)
shift = 0
while 1:
if shift >= 64: raise ProtocolBufferDecodeError, "corrupted"
b = self.get8()
result |= (long(b & 127) << shift)
shift += 7
if not (b & 128):
if result >= (1L << 64): raise ProtocolBufferDecodeError, "corrupted"
return result
return result
def getFloat(self):
if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError, "truncated"
a = self.buf[self.idx:self.idx+4]
self.idx += 4
return struct.unpack("f", a)[0]
def getDouble(self):
if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError, "truncated"
a = self.buf[self.idx:self.idx+8]
self.idx += 8
return struct.unpack("d", a)[0]
def getBoolean(self):
b = self.get8()
if b != 0 and b != 1: raise ProtocolBufferDecodeError, "corrupted"
return b
def getPrefixedString(self):
length = self.getVarInt32()
if self.idx + length > self.limit:
raise ProtocolBufferDecodeError, "truncated"
r = self.buf[self.idx : self.idx + length]
self.idx += length
return r.tostring()
def getRawString(self):
r = self.buf[self.idx:self.limit]
self.idx = self.limit
return r.tostring()
class ProtocolBufferDecodeError(Exception): pass
class ProtocolBufferEncodeError(Exception): pass
class ProtocolBufferReturnError(Exception): pass
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This is the Python counterpart to the RawMessage class defined in rawmessage.h.
To use this, put the following line in your .proto file:
python from google.net.proto.RawMessage import RawMessage
"""
__pychecker__ = 'no-callinit no-argsused'
from google.net.proto import ProtocolBuffer
class RawMessage(ProtocolBuffer.ProtocolMessage):
"""
This is a special subclass of ProtocolMessage that doesn't interpret its data
in any way. Instead, it just stores it in a string.
See rawmessage.h for more details.
"""
def __init__(self, initial=None):
self.__contents = ''
if initial is not None:
self.MergeFromString(initial)
def contents(self):
return self.__contents
def set_contents(self, contents):
self.__contents = contents
def Clear(self):
self.__contents = ''
def IsInitialized(self, debug_strs=None):
return 1
def __str__(self, prefix="", printElemNumber=0):
return prefix + self.DebugFormatString(self.__contents)
def OutputUnchecked(self, e):
e.putRawString(self.__contents)
def TryMerge(self, d):
self.__contents = d.getRawString()
def MergeFrom(self, pb):
assert pb is not self
if pb.__class__ != self.__class__:
return 0
self.__contents = pb.__contents
return 1
def Equals(self, pb):
return self.__contents == pb.__contents
def __eq__(self, other):
return (other is not None) and (other.__class__ == self.__class__) and self.Equals(other)
def __ne__(self, other):
return not (self == other)
def ByteSize(self):
return len(self.__contents)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
This is the Python counterpart to the RawMessage class defined in rawmessage.h.
To use this, put the following line in your .proto file:
python from google.net.proto.RawMessage import RawMessage
"""
__pychecker__ = 'no-callinit no-argsused'
from google.net.proto import ProtocolBuffer
class RawMessage(ProtocolBuffer.ProtocolMessage):
"""
This is a special subclass of ProtocolMessage that doesn't interpret its data
in any way. Instead, it just stores it in a string.
See rawmessage.h for more details.
"""
def __init__(self, initial=None):
self.__contents = ''
if initial is not None:
self.MergeFromString(initial)
def contents(self):
return self.__contents
def set_contents(self, contents):
self.__contents = contents
def Clear(self):
self.__contents = ''
def IsInitialized(self, debug_strs=None):
return 1
def __str__(self, prefix="", printElemNumber=0):
return prefix + self.DebugFormatString(self.__contents)
def OutputUnchecked(self, e):
e.putRawString(self.__contents)
def TryMerge(self, d):
self.__contents = d.getRawString()
def MergeFrom(self, pb):
assert pb is not self
if pb.__class__ != self.__class__:
return 0
self.__contents = pb.__contents
return 1
def Equals(self, pb):
return self.__contents == pb.__contents
def __eq__(self, other):
return (other is not None) and (other.__class__ == self.__class__) and self.Equals(other)
def __ne__(self, other):
return not (self == other)
def ByteSize(self):
return len(self.__contents)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import struct
import array
import string
import re
from google.pyglib.gexcept import AbstractMethod
import httplib
__all__ = ['ProtocolMessage', 'Encoder', 'Decoder',
'ProtocolBufferDecodeError',
'ProtocolBufferEncodeError',
'ProtocolBufferReturnError']
URL_RE = re.compile('^(https?)://([^/]+)(/.*)$')
class ProtocolMessage:
def __init__(self, contents=None):
raise AbstractMethod
def Clear(self):
raise AbstractMethod
def IsInitialized(self, debug_strs=None):
raise AbstractMethod
def Encode(self):
try:
return self._CEncode()
except AbstractMethod:
e = Encoder()
self.Output(e)
return e.buffer().tostring()
def _CEncode(self):
raise AbstractMethod
def ParseFromString(self, s):
self.Clear()
self.MergeFromString(s)
return
def MergeFromString(self, s):
try:
self._CMergeFromString(s)
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferDecodeError, '\n\t'.join(dbg)
except AbstractMethod:
a = array.array('B')
a.fromstring(s)
d = Decoder(a, 0, len(a))
self.Merge(d)
return
def _CMergeFromString(self, s):
raise AbstractMethod
def __getstate__(self):
return self.Encode()
def __setstate__(self, contents_):
self.__init__(contents=contents_)
def sendCommand(self, server, url, response, follow_redirects=1,
secure=0, keyfile=None, certfile=None):
data = self.Encode()
if secure:
if keyfile and certfile:
conn = httplib.HTTPSConnection(server, key_file=keyfile,
cert_file=certfile)
else:
conn = httplib.HTTPSConnection(server)
else:
conn = httplib.HTTPConnection(server)
conn.putrequest("POST", url)
conn.putheader("Content-Length", "%d" %len(data))
conn.endheaders()
conn.send(data)
resp = conn.getresponse()
if follow_redirects > 0 and resp.status == 302:
m = URL_RE.match(resp.getheader('Location'))
if m:
protocol, server, url = m.groups()
return self.sendCommand(server, url, response,
follow_redirects=follow_redirects - 1,
secure=(protocol == 'https'),
keyfile=keyfile,
certfile=certfile)
if resp.status != 200:
raise ProtocolBufferReturnError(resp.status)
if response is not None:
response.ParseFromString(resp.read())
return response
def sendSecureCommand(self, server, keyfile, certfile, url, response,
follow_redirects=1):
return self.sendCommand(server, url, response,
follow_redirects=follow_redirects,
secure=1, keyfile=keyfile, certfile=certfile)
def __str__(self, prefix="", printElemNumber=0):
raise AbstractMethod
def ToASCII(self):
return self._CToASCII(ProtocolMessage._SYMBOLIC_FULL_ASCII)
def ToCompactASCII(self):
return self._CToASCII(ProtocolMessage._NUMERIC_ASCII)
def ToShortASCII(self):
return self._CToASCII(ProtocolMessage._SYMBOLIC_SHORT_ASCII)
_NUMERIC_ASCII = 0
_SYMBOLIC_SHORT_ASCII = 1
_SYMBOLIC_FULL_ASCII = 2
def _CToASCII(self, output_format):
raise AbstractMethod
def ParseASCII(self, ascii_string):
raise AbstractMethod
def ParseASCIIIgnoreUnknown(self, ascii_string):
raise AbstractMethod
def Equals(self, other):
raise AbstractMethod
def __eq__(self, other):
if other.__class__ is self.__class__:
return self.Equals(other)
return NotImplemented
def __ne__(self, other):
if other.__class__ is self.__class__:
return not self.Equals(other)
return NotImplemented
def Output(self, e):
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferEncodeError, '\n\t'.join(dbg)
self.OutputUnchecked(e)
return
def OutputUnchecked(self, e):
raise AbstractMethod
def Parse(self, d):
self.Clear()
self.Merge(d)
return
def Merge(self, d):
self.TryMerge(d)
dbg = []
if not self.IsInitialized(dbg):
raise ProtocolBufferDecodeError, '\n\t'.join(dbg)
return
def TryMerge(self, d):
raise AbstractMethod
def CopyFrom(self, pb):
if (pb == self): return
self.Clear()
self.MergeFrom(pb)
def MergeFrom(self, pb):
raise AbstractMethod
def lengthVarInt32(self, n):
return self.lengthVarInt64(n)
def lengthVarInt64(self, n):
if n < 0:
return 10
result = 0
while 1:
result += 1
n >>= 7
if n == 0:
break
return result
def lengthString(self, n):
return self.lengthVarInt32(n) + n
def DebugFormat(self, value):
return "%s" % value
def DebugFormatInt32(self, value):
if (value <= -2000000000 or value >= 2000000000):
return self.DebugFormatFixed32(value)
return "%d" % value
def DebugFormatInt64(self, value):
if (value <= -2000000000 or value >= 2000000000):
return self.DebugFormatFixed64(value)
return "%d" % value
def DebugFormatString(self, value):
def escape(c):
o = ord(c)
if o == 10: return r"\n"
if o == 39: return r"\'"
if o == 34: return r'\"'
if o == 92: return r"\\"
if o >= 127 or o < 32: return "\\%03o" % o
return c
return '"' + "".join([escape(c) for c in value]) + '"'
def DebugFormatFloat(self, value):
return "%ff" % value
def DebugFormatFixed32(self, value):
if (value < 0): value += (1L<<32)
return "0x%x" % value
def DebugFormatFixed64(self, value):
if (value < 0): value += (1L<<64)
return "0x%x" % value
def DebugFormatBool(self, value):
if value:
return "true"
else:
return "false"
class Encoder:
NUMERIC = 0
DOUBLE = 1
STRING = 2
STARTGROUP = 3
ENDGROUP = 4
FLOAT = 5
MAX_TYPE = 6
def __init__(self):
self.buf = array.array('B')
return
def buffer(self):
return self.buf
def put8(self, v):
if v < 0 or v >= (1<<8): raise ProtocolBufferEncodeError, "u8 too big"
self.buf.append(v & 255)
return
def put16(self, v):
if v < 0 or v >= (1<<16): raise ProtocolBufferEncodeError, "u16 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
return
def put32(self, v):
if v < 0 or v >= (1L<<32): raise ProtocolBufferEncodeError, "u32 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
self.buf.append((v >> 16) & 255)
self.buf.append((v >> 24) & 255)
return
def put64(self, v):
if v < 0 or v >= (1L<<64): raise ProtocolBufferEncodeError, "u64 too big"
self.buf.append((v >> 0) & 255)
self.buf.append((v >> 8) & 255)
self.buf.append((v >> 16) & 255)
self.buf.append((v >> 24) & 255)
self.buf.append((v >> 32) & 255)
self.buf.append((v >> 40) & 255)
self.buf.append((v >> 48) & 255)
self.buf.append((v >> 56) & 255)
return
def putVarInt32(self, v):
buf_append = self.buf.append
if v & 127 == v:
buf_append(v)
return
if v >= 0x80000000 or v < -0x80000000:
raise ProtocolBufferEncodeError, "int32 too big"
if v < 0:
v += 0x10000000000000000
while True:
bits = v & 127
v >>= 7
if v:
bits |= 128
buf_append(bits)
if not v:
break
return
def putVarInt64(self, v):
buf_append = self.buf.append
if v >= 0x8000000000000000 or v < -0x8000000000000000:
raise ProtocolBufferEncodeError, "int64 too big"
if v < 0:
v += 0x10000000000000000
while True:
bits = v & 127
v >>= 7
if v:
bits |= 128
buf_append(bits)
if not v:
break
return
def putVarUint64(self, v):
buf_append = self.buf.append
if v < 0 or v >= 0x10000000000000000:
raise ProtocolBufferEncodeError, "uint64 too big"
while True:
bits = v & 127
v >>= 7
if v:
bits |= 128
buf_append(bits)
if not v:
break
return
def putFloat(self, v):
a = array.array('B')
a.fromstring(struct.pack("f", v))
self.buf.extend(a)
return
def putDouble(self, v):
a = array.array('B')
a.fromstring(struct.pack("d", v))
self.buf.extend(a)
return
def putBoolean(self, v):
if v:
self.buf.append(1)
else:
self.buf.append(0)
return
def putPrefixedString(self, v):
self.putVarInt32(len(v))
self.buf.fromstring(v)
return
def putRawString(self, v):
self.buf.fromstring(v)
class Decoder:
def __init__(self, buf, idx, limit):
self.buf = buf
self.idx = idx
self.limit = limit
return
def avail(self):
return self.limit - self.idx
def buffer(self):
return self.buf
def pos(self):
return self.idx
def skip(self, n):
if self.idx + n > self.limit: raise ProtocolBufferDecodeError, "truncated"
self.idx += n
return
def skipData(self, tag):
t = tag & 7
if t == Encoder.NUMERIC:
self.getVarInt64()
elif t == Encoder.DOUBLE:
self.skip(8)
elif t == Encoder.STRING:
n = self.getVarInt32()
self.skip(n)
elif t == Encoder.STARTGROUP:
while 1:
t = self.getVarInt32()
if (t & 7) == Encoder.ENDGROUP:
break
else:
self.skipData(t)
if (t - Encoder.ENDGROUP) != (tag - Encoder.STARTGROUP):
raise ProtocolBufferDecodeError, "corrupted"
elif t == Encoder.ENDGROUP:
raise ProtocolBufferDecodeError, "corrupted"
elif t == Encoder.FLOAT:
self.skip(4)
else:
raise ProtocolBufferDecodeError, "corrupted"
def get8(self):
if self.idx >= self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
self.idx += 1
return c
def get16(self):
if self.idx + 2 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
self.idx += 2
return (d << 8) | c
def get32(self):
if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
e = self.buf[self.idx + 2]
f = long(self.buf[self.idx + 3])
self.idx += 4
return (f << 24) | (e << 16) | (d << 8) | c
def get64(self):
if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError, "truncated"
c = self.buf[self.idx]
d = self.buf[self.idx + 1]
e = self.buf[self.idx + 2]
f = long(self.buf[self.idx + 3])
g = long(self.buf[self.idx + 4])
h = long(self.buf[self.idx + 5])
i = long(self.buf[self.idx + 6])
j = long(self.buf[self.idx + 7])
self.idx += 8
return ((j << 56) | (i << 48) | (h << 40) | (g << 32) | (f << 24)
| (e << 16) | (d << 8) | c)
def getVarInt32(self):
b = self.get8()
if not (b & 128):
return b
result = long(0)
shift = 0
while 1:
result |= (long(b & 127) << shift)
shift += 7
if not (b & 128):
if result >= 0x10000000000000000L:
raise ProtocolBufferDecodeError, "corrupted"
break
if shift >= 64: raise ProtocolBufferDecodeError, "corrupted"
b = self.get8()
if result >= 0x8000000000000000L:
result -= 0x10000000000000000L
if result >= 0x80000000L or result < -0x80000000L:
raise ProtocolBufferDecodeError, "corrupted"
return result
def getVarInt64(self):
result = self.getVarUint64()
if result >= (1L << 63):
result -= (1L << 64)
return result
def getVarUint64(self):
result = long(0)
shift = 0
while 1:
if shift >= 64: raise ProtocolBufferDecodeError, "corrupted"
b = self.get8()
result |= (long(b & 127) << shift)
shift += 7
if not (b & 128):
if result >= (1L << 64): raise ProtocolBufferDecodeError, "corrupted"
return result
return result
def getFloat(self):
if self.idx + 4 > self.limit: raise ProtocolBufferDecodeError, "truncated"
a = self.buf[self.idx:self.idx+4]
self.idx += 4
return struct.unpack("f", a)[0]
def getDouble(self):
if self.idx + 8 > self.limit: raise ProtocolBufferDecodeError, "truncated"
a = self.buf[self.idx:self.idx+8]
self.idx += 8
return struct.unpack("d", a)[0]
def getBoolean(self):
b = self.get8()
if b != 0 and b != 1: raise ProtocolBufferDecodeError, "corrupted"
return b
def getPrefixedString(self):
length = self.getVarInt32()
if self.idx + length > self.limit:
raise ProtocolBufferDecodeError, "truncated"
r = self.buf[self.idx : self.idx + length]
self.idx += length
return r.tostring()
def getRawString(self):
r = self.buf[self.idx:self.limit]
self.idx = self.limit
return r.tostring()
class ProtocolBufferDecodeError(Exception): pass
class ProtocolBufferEncodeError(Exception): pass
class ProtocolBufferReturnError(Exception): pass
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import cgi
import datetime
import wsgiref.handlers
from google.appengine.ext import db
from google.appengine.api import users
from google.appengine.ext import webapp
class Greeting(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now_add=True)
class MainPage(webapp.RequestHandler):
def get(self):
self.response.out.write('<html><body>')
greetings = db.GqlQuery("SELECT * "
"FROM Greeting "
"ORDER BY date DESC LIMIT 10")
for greeting in greetings:
if greeting.author:
self.response.out.write('<b>%s</b> wrote:' % greeting.author.nickname())
else:
self.response.out.write('An anonymous person wrote:')
self.response.out.write('<blockquote>%s</blockquote>' %
cgi.escape(greeting.content))
self.response.out.write("""
<form action="/sign" method="post">
<div><textarea name="content" rows="3" cols="60"></textarea></div>
<div><input type="submit" value="Sign Guestbook"></div>
</form>
</body>
</html>""")
class Guestbook(webapp.RequestHandler):
def post(self):
greeting = Greeting()
if users.get_current_user():
greeting.author = users.get_current_user()
greeting.content = self.request.get('content')
greeting.put()
self.redirect('/')
application = webapp.WSGIApplication([
('/', MainPage),
('/sign', Guestbook)
], debug=True)
def main():
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import cgi
import datetime
import wsgiref.handlers
from google.appengine.ext import db
from google.appengine.api import users
from google.appengine.ext import webapp
class Greeting(db.Model):
author = db.UserProperty()
content = db.StringProperty(multiline=True)
date = db.DateTimeProperty(auto_now_add=True)
class MainPage(webapp.RequestHandler):
def get(self):
self.response.out.write('<html><body>')
greetings = db.GqlQuery("SELECT * "
"FROM Greeting "
"ORDER BY date DESC LIMIT 10")
for greeting in greetings:
if greeting.author:
self.response.out.write('<b>%s</b> wrote:' % greeting.author.nickname())
else:
self.response.out.write('An anonymous person wrote:')
self.response.out.write('<blockquote>%s</blockquote>' %
cgi.escape(greeting.content))
self.response.out.write("""
<form action="/sign" method="post">
<div><textarea name="content" rows="3" cols="60"></textarea></div>
<div><input type="submit" value="Sign Guestbook"></div>
</form>
</body>
</html>""")
class Guestbook(webapp.RequestHandler):
def post(self):
greeting = Greeting()
if users.get_current_user():
greeting.author = users.get_current_user()
greeting.content = self.request.get('content')
greeting.put()
self.redirect('/')
application = webapp.WSGIApplication([
('/', MainPage),
('/sign', Guestbook)
], debug=True)
def main():
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
import sys, time
from django.conf import settings
from django.db import connection, transaction, backend
from django.core import management
from django.dispatch import dispatcher
from django.test import signals
from django.template import Template
# The prefix to put on the default database name when creating
# the test database.
TEST_DATABASE_PREFIX = 'test_'
def instrumented_test_render(self, context):
"""An instrumented Template render method, providing a signal
that can be intercepted by the test system Client
"""
dispatcher.send(signal=signals.template_rendered, sender=self, template=self, context=context)
return self.nodelist.render(context)
def setup_test_environment():
"""Perform any global pre-test setup. This involves:
- Installing the instrumented test renderer
"""
Template.original_render = Template.render
Template.render = instrumented_test_render
def teardown_test_environment():
"""Perform any global post-test teardown. This involves:
- Restoring the original test renderer
"""
Template.render = Template.original_render
del Template.original_render
def _set_autocommit(connection):
"Make sure a connection is in autocommit mode."
if hasattr(connection.connection, "autocommit"):
connection.connection.autocommit(True)
elif hasattr(connection.connection, "set_isolation_level"):
connection.connection.set_isolation_level(0)
def create_test_db(verbosity=1, autoclobber=False):
if verbosity >= 1:
print "Creating test database..."
# If we're using SQLite, it's more convenient to test against an
# in-memory database.
if settings.DATABASE_ENGINE == "sqlite3":
TEST_DATABASE_NAME = ":memory:"
else:
if settings.TEST_DATABASE_NAME:
TEST_DATABASE_NAME = settings.TEST_DATABASE_NAME
else:
TEST_DATABASE_NAME = TEST_DATABASE_PREFIX + settings.DATABASE_NAME
# Create the test database and connect to it. We need to autocommit
# if the database supports it because PostgreSQL doesn't allow
# CREATE/DROP DATABASE statements within transactions.
cursor = connection.cursor()
_set_autocommit(connection)
try:
cursor.execute("CREATE DATABASE %s" % backend.quote_name(TEST_DATABASE_NAME))
except Exception, e:
sys.stderr.write("Got an error creating the test database: %s\n" % e)
if not autoclobber:
confirm = raw_input("It appears the test database, %s, already exists. Type 'yes' to delete it, or 'no' to cancel: " % TEST_DATABASE_NAME)
if autoclobber or confirm == 'yes':
try:
if verbosity >= 1:
print "Destroying old test database..."
cursor.execute("DROP DATABASE %s" % backend.quote_name(TEST_DATABASE_NAME))
if verbosity >= 1:
print "Creating test database..."
cursor.execute("CREATE DATABASE %s" % backend.quote_name(TEST_DATABASE_NAME))
except Exception, e:
sys.stderr.write("Got an error recreating the test database: %s\n" % e)
sys.exit(2)
else:
print "Tests cancelled."
sys.exit(1)
connection.close()
settings.DATABASE_NAME = TEST_DATABASE_NAME
management.syncdb(verbosity, interactive=False)
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database.
cursor = connection.cursor()
def destroy_test_db(old_database_name, verbosity=1):
# Unless we're using SQLite, remove the test database to clean up after
# ourselves. Connect to the previous database (not the test database)
# to do so, because it's not allowed to delete a database while being
# connected to it.
if verbosity >= 1:
print "Destroying test database..."
connection.close()
TEST_DATABASE_NAME = settings.DATABASE_NAME
settings.DATABASE_NAME = old_database_name
if settings.DATABASE_ENGINE != "sqlite3":
cursor = connection.cursor()
_set_autocommit(connection)
time.sleep(1) # To avoid "database is being accessed by other users" errors.
cursor.execute("DROP DATABASE %s" % backend.quote_name(TEST_DATABASE_NAME))
connection.close()
| Python |
import re, doctest, unittest
from django.db import transaction
from django.core import management
from django.db.models import get_apps
normalize_long_ints = lambda s: re.sub(r'(?<![\w])(\d+)L(?![\w])', '\\1', s)
class OutputChecker(doctest.OutputChecker):
def check_output(self, want, got, optionflags):
ok = doctest.OutputChecker.check_output(self, want, got, optionflags)
# Doctest does an exact string comparison of output, which means long
# integers aren't equal to normal integers ("22L" vs. "22"). The
# following code normalizes long integers so that they equal normal
# integers.
if not ok:
return normalize_long_ints(want) == normalize_long_ints(got)
return ok
class DocTestRunner(doctest.DocTestRunner):
def __init__(self, *args, **kwargs):
doctest.DocTestRunner.__init__(self, *args, **kwargs)
self.optionflags = doctest.ELLIPSIS
def report_unexpected_exception(self, out, test, example, exc_info):
doctest.DocTestRunner.report_unexpected_exception(self,out,test,example,exc_info)
# Rollback, in case of database errors. Otherwise they'd have
# side effects on other tests.
from django.db import transaction
transaction.rollback_unless_managed()
class TestCase(unittest.TestCase):
def install_fixtures(self):
"""If the Test Case class has a 'fixtures' member, clear the database and
install the named fixtures at the start of each test.
"""
management.flush(verbosity=0, interactive=False)
if hasattr(self, 'fixtures'):
management.load_data(self.fixtures, verbosity=0)
def run(self, result=None):
"""Wrapper around default run method so that user-defined Test Cases
automatically call install_fixtures without having to include a call to
super().
"""
self.install_fixtures()
super(TestCase, self).run(result)
| Python |
template_rendered = object() | Python |
import unittest, doctest
from django.conf import settings
from django.test.utils import setup_test_environment, teardown_test_environment
from django.test.utils import create_test_db, destroy_test_db
from django.test.testcases import OutputChecker, DocTestRunner
# The module name for tests outside models.py
TEST_MODULE = 'tests'
doctestOutputChecker = OutputChecker()
def build_suite(app_module):
"Create a complete Django test suite for the provided application module"
suite = unittest.TestSuite()
# Load unit and doctests in the models.py file
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(app_module))
try:
suite.addTest(doctest.DocTestSuite(app_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in models.py
pass
# Check to see if a separate 'tests' module exists parallel to the
# models module
try:
app_path = app_module.__name__.split('.')[:-1]
test_module = __import__('.'.join(app_path + [TEST_MODULE]), {}, {}, TEST_MODULE)
suite.addTest(unittest.defaultTestLoader.loadTestsFromModule(test_module))
try:
suite.addTest(doctest.DocTestSuite(test_module,
checker=doctestOutputChecker,
runner=DocTestRunner))
except ValueError:
# No doc tests in tests.py
pass
except ImportError, e:
# Couldn't import tests.py. Was it due to a missing file, or
# due to an import error in a tests.py that actually exists?
import os.path
from imp import find_module
try:
mod = find_module(TEST_MODULE, [os.path.dirname(app_module.__file__)])
except ImportError:
# 'tests' module doesn't exist. Move on.
pass
else:
# The module exists, so there must be an import error in the
# test module itself. We don't need the module; close the file
# handle returned by find_module.
mod[0].close()
raise
return suite
def run_tests(module_list, verbosity=1, extra_tests=[]):
"""
Run the unit tests for all the modules in the provided list.
This testrunner will search each of the modules in the provided list,
looking for doctests and unittests in models.py or tests.py within
the module. A list of 'extra' tests may also be provided; these tests
will be added to the test suite.
Returns the number of tests that failed.
"""
setup_test_environment()
settings.DEBUG = False
suite = unittest.TestSuite()
for module in module_list:
suite.addTest(build_suite(module))
for test in extra_tests:
suite.addTest(test)
old_name = settings.DATABASE_NAME
create_test_db(verbosity)
result = unittest.TextTestRunner(verbosity=verbosity).run(suite)
destroy_test_db(old_name, verbosity)
teardown_test_environment()
return len(result.failures)
| Python |
# Module doctest.
# Released to the public domain 16-Jan-2001, by Tim Peters (tim@python.org).
# Major enhancements and refactoring by:
# Jim Fulton
# Edward Loper
# Provided as-is; use at your own risk; no warranty; no promises; enjoy!
r"""Module doctest -- a framework for running examples in docstrings.
In simplest use, end each module M to be tested with:
def _test():
import doctest
doctest.testmod()
if __name__ == "__main__":
_test()
Then running the module as a script will cause the examples in the
docstrings to get executed and verified:
python M.py
This won't display anything unless an example fails, in which case the
failing example(s) and the cause(s) of the failure(s) are printed to stdout
(why not stderr? because stderr is a lame hack <0.2 wink>), and the final
line of output is "Test failed.".
Run it with the -v switch instead:
python M.py -v
and a detailed report of all examples tried is printed to stdout, along
with assorted summaries at the end.
You can force verbose mode by passing "verbose=True" to testmod, or prohibit
it by passing "verbose=False". In either of those cases, sys.argv is not
examined by testmod.
There are a variety of other ways to run doctests, including integration
with the unittest framework, and support for running non-Python text
files containing doctests. There are also many ways to override parts
of doctest's default behaviors. See the Library Reference Manual for
details.
"""
__docformat__ = 'reStructuredText en'
__all__ = [
# 0, Option Flags
'register_optionflag',
'DONT_ACCEPT_TRUE_FOR_1',
'DONT_ACCEPT_BLANKLINE',
'NORMALIZE_WHITESPACE',
'ELLIPSIS',
'IGNORE_EXCEPTION_DETAIL',
'COMPARISON_FLAGS',
'REPORT_UDIFF',
'REPORT_CDIFF',
'REPORT_NDIFF',
'REPORT_ONLY_FIRST_FAILURE',
'REPORTING_FLAGS',
# 1. Utility Functions
'is_private',
# 2. Example & DocTest
'Example',
'DocTest',
# 3. Doctest Parser
'DocTestParser',
# 4. Doctest Finder
'DocTestFinder',
# 5. Doctest Runner
'DocTestRunner',
'OutputChecker',
'DocTestFailure',
'UnexpectedException',
'DebugRunner',
# 6. Test Functions
'testmod',
'testfile',
'run_docstring_examples',
# 7. Tester
'Tester',
# 8. Unittest Support
'DocTestSuite',
'DocFileSuite',
'set_unittest_reportflags',
# 9. Debugging Support
'script_from_examples',
'testsource',
'debug_src',
'debug',
]
import __future__
import sys, traceback, inspect, linecache, os, re, types
import unittest, difflib, pdb, tempfile
import warnings
from StringIO import StringIO
# Don't whine about the deprecated is_private function in this
# module's tests.
warnings.filterwarnings("ignore", "is_private", DeprecationWarning,
__name__, 0)
# There are 4 basic classes:
# - Example: a <source, want> pair, plus an intra-docstring line number.
# - DocTest: a collection of examples, parsed from a docstring, plus
# info about where the docstring came from (name, filename, lineno).
# - DocTestFinder: extracts DocTests from a given object's docstring and
# its contained objects' docstrings.
# - DocTestRunner: runs DocTest cases, and accumulates statistics.
#
# So the basic picture is:
#
# list of:
# +------+ +---------+ +-------+
# |object| --DocTestFinder-> | DocTest | --DocTestRunner-> |results|
# +------+ +---------+ +-------+
# | Example |
# | ... |
# | Example |
# +---------+
# Option constants.
OPTIONFLAGS_BY_NAME = {}
def register_optionflag(name):
flag = 1 << len(OPTIONFLAGS_BY_NAME)
OPTIONFLAGS_BY_NAME[name] = flag
return flag
DONT_ACCEPT_TRUE_FOR_1 = register_optionflag('DONT_ACCEPT_TRUE_FOR_1')
DONT_ACCEPT_BLANKLINE = register_optionflag('DONT_ACCEPT_BLANKLINE')
NORMALIZE_WHITESPACE = register_optionflag('NORMALIZE_WHITESPACE')
ELLIPSIS = register_optionflag('ELLIPSIS')
IGNORE_EXCEPTION_DETAIL = register_optionflag('IGNORE_EXCEPTION_DETAIL')
COMPARISON_FLAGS = (DONT_ACCEPT_TRUE_FOR_1 |
DONT_ACCEPT_BLANKLINE |
NORMALIZE_WHITESPACE |
ELLIPSIS |
IGNORE_EXCEPTION_DETAIL)
REPORT_UDIFF = register_optionflag('REPORT_UDIFF')
REPORT_CDIFF = register_optionflag('REPORT_CDIFF')
REPORT_NDIFF = register_optionflag('REPORT_NDIFF')
REPORT_ONLY_FIRST_FAILURE = register_optionflag('REPORT_ONLY_FIRST_FAILURE')
REPORTING_FLAGS = (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF |
REPORT_ONLY_FIRST_FAILURE)
# Special string markers for use in `want` strings:
BLANKLINE_MARKER = '<BLANKLINE>'
ELLIPSIS_MARKER = '...'
######################################################################
## Table of Contents
######################################################################
# 1. Utility Functions
# 2. Example & DocTest -- store test cases
# 3. DocTest Parser -- extracts examples from strings
# 4. DocTest Finder -- extracts test cases from objects
# 5. DocTest Runner -- runs test cases
# 6. Test Functions -- convenient wrappers for testing
# 7. Tester Class -- for backwards compatibility
# 8. Unittest Support
# 9. Debugging Support
# 10. Example Usage
######################################################################
## 1. Utility Functions
######################################################################
def is_private(prefix, base):
"""prefix, base -> true iff name prefix + "." + base is "private".
Prefix may be an empty string, and base does not contain a period.
Prefix is ignored (although functions you write conforming to this
protocol may make use of it).
Return true iff base begins with an (at least one) underscore, but
does not both begin and end with (at least) two underscores.
>>> is_private("a.b", "my_func")
False
>>> is_private("____", "_my_func")
True
>>> is_private("someclass", "__init__")
False
>>> is_private("sometypo", "__init_")
True
>>> is_private("x.y.z", "_")
True
>>> is_private("_x.y.z", "__")
False
>>> is_private("", "") # senseless but consistent
False
"""
warnings.warn("is_private is deprecated; it wasn't useful; "
"examine DocTestFinder.find() lists instead",
DeprecationWarning, stacklevel=2)
return base[:1] == "_" and not base[:2] == "__" == base[-2:]
def _extract_future_flags(globs):
"""
Return the compiler-flags associated with the future features that
have been imported into the given namespace (globs).
"""
flags = 0
for fname in __future__.all_feature_names:
feature = globs.get(fname, None)
if feature is getattr(__future__, fname):
flags |= feature.compiler_flag
return flags
def _normalize_module(module, depth=2):
"""
Return the module specified by `module`. In particular:
- If `module` is a module, then return module.
- If `module` is a string, then import and return the
module with that name.
- If `module` is None, then return the calling module.
The calling module is assumed to be the module of
the stack frame at the given depth in the call stack.
"""
if inspect.ismodule(module):
return module
elif isinstance(module, (str, unicode)):
return __import__(module, globals(), locals(), ["*"])
elif module is None:
return sys.modules[sys._getframe(depth).f_globals['__name__']]
else:
raise TypeError("Expected a module, string, or None")
def _indent(s, indent=4):
"""
Add the given number of space characters to the beginning every
non-blank line in `s`, and return the result.
"""
# This regexp matches the start of non-blank lines:
return re.sub('(?m)^(?!$)', indent*' ', s)
def _exception_traceback(exc_info):
"""
Return a string containing a traceback message for the given
exc_info tuple (as returned by sys.exc_info()).
"""
# Get a traceback message.
excout = StringIO()
exc_type, exc_val, exc_tb = exc_info
traceback.print_exception(exc_type, exc_val, exc_tb, file=excout)
return excout.getvalue()
# Override some StringIO methods.
class _SpoofOut(StringIO):
def getvalue(self):
result = StringIO.getvalue(self)
# If anything at all was written, make sure there's a trailing
# newline. There's no way for the expected output to indicate
# that a trailing newline is missing.
if result and not result.endswith("\n"):
result += "\n"
# Prevent softspace from screwing up the next test case, in
# case they used print with a trailing comma in an example.
if hasattr(self, "softspace"):
del self.softspace
return result
def truncate(self, size=None):
StringIO.truncate(self, size)
if hasattr(self, "softspace"):
del self.softspace
# Worst-case linear-time ellipsis matching.
def _ellipsis_match(want, got):
"""
Essentially the only subtle case:
>>> _ellipsis_match('aa...aa', 'aaa')
False
"""
if ELLIPSIS_MARKER not in want:
return want == got
# Find "the real" strings.
ws = want.split(ELLIPSIS_MARKER)
assert len(ws) >= 2
# Deal with exact matches possibly needed at one or both ends.
startpos, endpos = 0, len(got)
w = ws[0]
if w: # starts with exact match
if got.startswith(w):
startpos = len(w)
del ws[0]
else:
return False
w = ws[-1]
if w: # ends with exact match
if got.endswith(w):
endpos -= len(w)
del ws[-1]
else:
return False
if startpos > endpos:
# Exact end matches required more characters than we have, as in
# _ellipsis_match('aa...aa', 'aaa')
return False
# For the rest, we only need to find the leftmost non-overlapping
# match for each piece. If there's no overall match that way alone,
# there's no overall match period.
for w in ws:
# w may be '' at times, if there are consecutive ellipses, or
# due to an ellipsis at the start or end of `want`. That's OK.
# Search for an empty string succeeds, and doesn't change startpos.
startpos = got.find(w, startpos, endpos)
if startpos < 0:
return False
startpos += len(w)
return True
def _comment_line(line):
"Return a commented form of the given line"
line = line.rstrip()
if line:
return '# '+line
else:
return '#'
class _OutputRedirectingPdb(pdb.Pdb):
"""
A specialized version of the python debugger that redirects stdout
to a given stream when interacting with the user. Stdout is *not*
redirected when traced code is executed.
"""
def __init__(self, out):
self.__out = out
pdb.Pdb.__init__(self)
def trace_dispatch(self, *args):
# Redirect stdout to the given stream.
save_stdout = sys.stdout
sys.stdout = self.__out
# Call Pdb's trace dispatch method.
try:
return pdb.Pdb.trace_dispatch(self, *args)
finally:
sys.stdout = save_stdout
# [XX] Normalize with respect to os.path.pardir?
def _module_relative_path(module, path):
if not inspect.ismodule(module):
raise TypeError, 'Expected a module: %r' % module
if path.startswith('/'):
raise ValueError, 'Module-relative files may not have absolute paths'
# Find the base directory for the path.
if hasattr(module, '__file__'):
# A normal module/package
basedir = os.path.split(module.__file__)[0]
elif module.__name__ == '__main__':
# An interactive session.
if len(sys.argv)>0 and sys.argv[0] != '':
basedir = os.path.split(sys.argv[0])[0]
else:
basedir = os.curdir
else:
# A module w/o __file__ (this includes builtins)
raise ValueError("Can't resolve paths relative to the module " +
module + " (it has no __file__)")
# Combine the base directory and the path.
return os.path.join(basedir, *(path.split('/')))
######################################################################
## 2. Example & DocTest
######################################################################
## - An "example" is a <source, want> pair, where "source" is a
## fragment of source code, and "want" is the expected output for
## "source." The Example class also includes information about
## where the example was extracted from.
##
## - A "doctest" is a collection of examples, typically extracted from
## a string (such as an object's docstring). The DocTest class also
## includes information about where the string was extracted from.
class Example:
"""
A single doctest example, consisting of source code and expected
output. `Example` defines the following attributes:
- source: A single Python statement, always ending with a newline.
The constructor adds a newline if needed.
- want: The expected output from running the source code (either
from stdout, or a traceback in case of exception). `want` ends
with a newline unless it's empty, in which case it's an empty
string. The constructor adds a newline if needed.
- exc_msg: The exception message generated by the example, if
the example is expected to generate an exception; or `None` if
it is not expected to generate an exception. This exception
message is compared against the return value of
`traceback.format_exception_only()`. `exc_msg` ends with a
newline unless it's `None`. The constructor adds a newline
if needed.
- lineno: The line number within the DocTest string containing
this Example where the Example begins. This line number is
zero-based, with respect to the beginning of the DocTest.
- indent: The example's indentation in the DocTest string.
I.e., the number of space characters that preceed the
example's first prompt.
- options: A dictionary mapping from option flags to True or
False, which is used to override default options for this
example. Any option flags not contained in this dictionary
are left at their default value (as specified by the
DocTestRunner's optionflags). By default, no options are set.
"""
def __init__(self, source, want, exc_msg=None, lineno=0, indent=0,
options=None):
# Normalize inputs.
if not source.endswith('\n'):
source += '\n'
if want and not want.endswith('\n'):
want += '\n'
if exc_msg is not None and not exc_msg.endswith('\n'):
exc_msg += '\n'
# Store properties.
self.source = source
self.want = want
self.lineno = lineno
self.indent = indent
if options is None: options = {}
self.options = options
self.exc_msg = exc_msg
class DocTest:
"""
A collection of doctest examples that should be run in a single
namespace. Each `DocTest` defines the following attributes:
- examples: the list of examples.
- globs: The namespace (aka globals) that the examples should
be run in.
- name: A name identifying the DocTest (typically, the name of
the object whose docstring this DocTest was extracted from).
- filename: The name of the file that this DocTest was extracted
from, or `None` if the filename is unknown.
- lineno: The line number within filename where this DocTest
begins, or `None` if the line number is unavailable. This
line number is zero-based, with respect to the beginning of
the file.
- docstring: The string that the examples were extracted from,
or `None` if the string is unavailable.
"""
def __init__(self, examples, globs, name, filename, lineno, docstring):
"""
Create a new DocTest containing the given examples. The
DocTest's globals are initialized with a copy of `globs`.
"""
assert not isinstance(examples, basestring), \
"DocTest no longer accepts str; use DocTestParser instead"
self.examples = examples
self.docstring = docstring
self.globs = globs.copy()
self.name = name
self.filename = filename
self.lineno = lineno
def __repr__(self):
if len(self.examples) == 0:
examples = 'no examples'
elif len(self.examples) == 1:
examples = '1 example'
else:
examples = '%d examples' % len(self.examples)
return ('<DocTest %s from %s:%s (%s)>' %
(self.name, self.filename, self.lineno, examples))
# This lets us sort tests by name:
def __cmp__(self, other):
if not isinstance(other, DocTest):
return -1
return cmp((self.name, self.filename, self.lineno, id(self)),
(other.name, other.filename, other.lineno, id(other)))
######################################################################
## 3. DocTestParser
######################################################################
class DocTestParser:
"""
A class used to parse strings containing doctest examples.
"""
# This regular expression is used to find doctest examples in a
# string. It defines three groups: `source` is the source code
# (including leading indentation and prompts); `indent` is the
# indentation of the first (PS1) line of the source code; and
# `want` is the expected output (including leading indentation).
_EXAMPLE_RE = re.compile(r'''
# Source consists of a PS1 line followed by zero or more PS2 lines.
(?P<source>
(?:^(?P<indent> [ ]*) >>> .*) # PS1 line
(?:\n [ ]* \.\.\. .*)*) # PS2 lines
\n?
# Want consists of any non-blank lines that do not start with PS1.
(?P<want> (?:(?![ ]*$) # Not a blank line
(?![ ]*>>>) # Not a line starting with PS1
.*$\n? # But any other line
)*)
''', re.MULTILINE | re.VERBOSE)
# A regular expression for handling `want` strings that contain
# expected exceptions. It divides `want` into three pieces:
# - the traceback header line (`hdr`)
# - the traceback stack (`stack`)
# - the exception message (`msg`), as generated by
# traceback.format_exception_only()
# `msg` may have multiple lines. We assume/require that the
# exception message is the first non-indented line starting with a word
# character following the traceback header line.
_EXCEPTION_RE = re.compile(r"""
# Grab the traceback header. Different versions of Python have
# said different things on the first traceback line.
^(?P<hdr> Traceback\ \(
(?: most\ recent\ call\ last
| innermost\ last
) \) :
)
\s* $ # toss trailing whitespace on the header.
(?P<stack> .*?) # don't blink: absorb stuff until...
^ (?P<msg> \w+ .*) # a line *starts* with alphanum.
""", re.VERBOSE | re.MULTILINE | re.DOTALL)
# A callable returning a true value iff its argument is a blank line
# or contains a single comment.
_IS_BLANK_OR_COMMENT = re.compile(r'^[ ]*(#.*)?$').match
def parse(self, string, name='<string>'):
"""
Divide the given string into examples and intervening text,
and return them as a list of alternating Examples and strings.
Line numbers for the Examples are 0-based. The optional
argument `name` is a name identifying this string, and is only
used for error messages.
"""
string = string.expandtabs()
# If all lines begin with the same indentation, then strip it.
min_indent = self._min_indent(string)
if min_indent > 0:
string = '\n'.join([l[min_indent:] for l in string.split('\n')])
output = []
charno, lineno = 0, 0
# Find all doctest examples in the string:
for m in self._EXAMPLE_RE.finditer(string):
# Add the pre-example text to `output`.
output.append(string[charno:m.start()])
# Update lineno (lines before this example)
lineno += string.count('\n', charno, m.start())
# Extract info from the regexp match.
(source, options, want, exc_msg) = \
self._parse_example(m, name, lineno)
# Create an Example, and add it to the list.
if not self._IS_BLANK_OR_COMMENT(source):
output.append( Example(source, want, exc_msg,
lineno=lineno,
indent=min_indent+len(m.group('indent')),
options=options) )
# Update lineno (lines inside this example)
lineno += string.count('\n', m.start(), m.end())
# Update charno.
charno = m.end()
# Add any remaining post-example text to `output`.
output.append(string[charno:])
return output
def get_doctest(self, string, globs, name, filename, lineno):
"""
Extract all doctest examples from the given string, and
collect them into a `DocTest` object.
`globs`, `name`, `filename`, and `lineno` are attributes for
the new `DocTest` object. See the documentation for `DocTest`
for more information.
"""
return DocTest(self.get_examples(string, name), globs,
name, filename, lineno, string)
def get_examples(self, string, name='<string>'):
"""
Extract all doctest examples from the given string, and return
them as a list of `Example` objects. Line numbers are
0-based, because it's most common in doctests that nothing
interesting appears on the same line as opening triple-quote,
and so the first interesting line is called \"line 1\" then.
The optional argument `name` is a name identifying this
string, and is only used for error messages.
"""
return [x for x in self.parse(string, name)
if isinstance(x, Example)]
def _parse_example(self, m, name, lineno):
"""
Given a regular expression match from `_EXAMPLE_RE` (`m`),
return a pair `(source, want)`, where `source` is the matched
example's source code (with prompts and indentation stripped);
and `want` is the example's expected output (with indentation
stripped).
`name` is the string's name, and `lineno` is the line number
where the example starts; both are used for error messages.
"""
# Get the example's indentation level.
indent = len(m.group('indent'))
# Divide source into lines; check that they're properly
# indented; and then strip their indentation & prompts.
source_lines = m.group('source').split('\n')
self._check_prompt_blank(source_lines, indent, name, lineno)
self._check_prefix(source_lines[1:], ' '*indent + '.', name, lineno)
source = '\n'.join([sl[indent+4:] for sl in source_lines])
# Divide want into lines; check that it's properly indented; and
# then strip the indentation. Spaces before the last newline should
# be preserved, so plain rstrip() isn't good enough.
want = m.group('want')
want_lines = want.split('\n')
if len(want_lines) > 1 and re.match(r' *$', want_lines[-1]):
del want_lines[-1] # forget final newline & spaces after it
self._check_prefix(want_lines, ' '*indent, name,
lineno + len(source_lines))
want = '\n'.join([wl[indent:] for wl in want_lines])
# If `want` contains a traceback message, then extract it.
m = self._EXCEPTION_RE.match(want)
if m:
exc_msg = m.group('msg')
else:
exc_msg = None
# Extract options from the source.
options = self._find_options(source, name, lineno)
return source, options, want, exc_msg
# This regular expression looks for option directives in the
# source code of an example. Option directives are comments
# starting with "doctest:". Warning: this may give false
# positives for string-literals that contain the string
# "#doctest:". Eliminating these false positives would require
# actually parsing the string; but we limit them by ignoring any
# line containing "#doctest:" that is *followed* by a quote mark.
_OPTION_DIRECTIVE_RE = re.compile(r'#\s*doctest:\s*([^\n\'"]*)$',
re.MULTILINE)
def _find_options(self, source, name, lineno):
"""
Return a dictionary containing option overrides extracted from
option directives in the given source string.
`name` is the string's name, and `lineno` is the line number
where the example starts; both are used for error messages.
"""
options = {}
# (note: with the current regexp, this will match at most once:)
for m in self._OPTION_DIRECTIVE_RE.finditer(source):
option_strings = m.group(1).replace(',', ' ').split()
for option in option_strings:
if (option[0] not in '+-' or
option[1:] not in OPTIONFLAGS_BY_NAME):
raise ValueError('line %r of the doctest for %s '
'has an invalid option: %r' %
(lineno+1, name, option))
flag = OPTIONFLAGS_BY_NAME[option[1:]]
options[flag] = (option[0] == '+')
if options and self._IS_BLANK_OR_COMMENT(source):
raise ValueError('line %r of the doctest for %s has an option '
'directive on a line with no example: %r' %
(lineno, name, source))
return options
# This regular expression finds the indentation of every non-blank
# line in a string.
_INDENT_RE = re.compile('^([ ]*)(?=\S)', re.MULTILINE)
def _min_indent(self, s):
"Return the minimum indentation of any non-blank line in `s`"
indents = [len(indent) for indent in self._INDENT_RE.findall(s)]
if len(indents) > 0:
return min(indents)
else:
return 0
def _check_prompt_blank(self, lines, indent, name, lineno):
"""
Given the lines of a source string (including prompts and
leading indentation), check to make sure that every prompt is
followed by a space character. If any line is not followed by
a space character, then raise ValueError.
"""
for i, line in enumerate(lines):
if len(line) >= indent+4 and line[indent+3] != ' ':
raise ValueError('line %r of the docstring for %s '
'lacks blank after %s: %r' %
(lineno+i+1, name,
line[indent:indent+3], line))
def _check_prefix(self, lines, prefix, name, lineno):
"""
Check that every line in the given list starts with the given
prefix; if any line does not, then raise a ValueError.
"""
for i, line in enumerate(lines):
if line and not line.startswith(prefix):
raise ValueError('line %r of the docstring for %s has '
'inconsistent leading whitespace: %r' %
(lineno+i+1, name, line))
######################################################################
## 4. DocTest Finder
######################################################################
class DocTestFinder:
"""
A class used to extract the DocTests that are relevant to a given
object, from its docstring and the docstrings of its contained
objects. Doctests can currently be extracted from the following
object types: modules, functions, classes, methods, staticmethods,
classmethods, and properties.
"""
def __init__(self, verbose=False, parser=DocTestParser(),
recurse=True, _namefilter=None, exclude_empty=True):
"""
Create a new doctest finder.
The optional argument `parser` specifies a class or
function that should be used to create new DocTest objects (or
objects that implement the same interface as DocTest). The
signature for this factory function should match the signature
of the DocTest constructor.
If the optional argument `recurse` is false, then `find` will
only examine the given object, and not any contained objects.
If the optional argument `exclude_empty` is false, then `find`
will include tests for objects with empty docstrings.
"""
self._parser = parser
self._verbose = verbose
self._recurse = recurse
self._exclude_empty = exclude_empty
# _namefilter is undocumented, and exists only for temporary backward-
# compatibility support of testmod's deprecated isprivate mess.
self._namefilter = _namefilter
def find(self, obj, name=None, module=None, globs=None,
extraglobs=None):
"""
Return a list of the DocTests that are defined by the given
object's docstring, or by any of its contained objects'
docstrings.
The optional parameter `module` is the module that contains
the given object. If the module is not specified or is None, then
the test finder will attempt to automatically determine the
correct module. The object's module is used:
- As a default namespace, if `globs` is not specified.
- To prevent the DocTestFinder from extracting DocTests
from objects that are imported from other modules.
- To find the name of the file containing the object.
- To help find the line number of the object within its
file.
Contained objects whose module does not match `module` are ignored.
If `module` is False, no attempt to find the module will be made.
This is obscure, of use mostly in tests: if `module` is False, or
is None but cannot be found automatically, then all objects are
considered to belong to the (non-existent) module, so all contained
objects will (recursively) be searched for doctests.
The globals for each DocTest is formed by combining `globs`
and `extraglobs` (bindings in `extraglobs` override bindings
in `globs`). A new copy of the globals dictionary is created
for each DocTest. If `globs` is not specified, then it
defaults to the module's `__dict__`, if specified, or {}
otherwise. If `extraglobs` is not specified, then it defaults
to {}.
"""
# If name was not specified, then extract it from the object.
if name is None:
name = getattr(obj, '__name__', None)
if name is None:
raise ValueError("DocTestFinder.find: name must be given "
"when obj.__name__ doesn't exist: %r" %
(type(obj),))
# Find the module that contains the given object (if obj is
# a module, then module=obj.). Note: this may fail, in which
# case module will be None.
if module is False:
module = None
elif module is None:
module = inspect.getmodule(obj)
# Read the module's source code. This is used by
# DocTestFinder._find_lineno to find the line number for a
# given object's docstring.
try:
file = inspect.getsourcefile(obj) or inspect.getfile(obj)
source_lines = linecache.getlines(file)
if not source_lines:
source_lines = None
except TypeError:
source_lines = None
# Initialize globals, and merge in extraglobs.
if globs is None:
if module is None:
globs = {}
else:
globs = module.__dict__.copy()
else:
globs = globs.copy()
if extraglobs is not None:
globs.update(extraglobs)
# Recursively explore `obj`, extracting DocTests.
tests = []
self._find(tests, obj, name, module, source_lines, globs, {})
return tests
def _filter(self, obj, prefix, base):
"""
Return true if the given object should not be examined.
"""
return (self._namefilter is not None and
self._namefilter(prefix, base))
def _from_module(self, module, object):
"""
Return true if the given object is defined in the given
module.
"""
if module is None:
return True
elif inspect.isfunction(object):
return module.__dict__ is object.func_globals
elif inspect.isclass(object):
return module.__name__ == object.__module__
elif inspect.getmodule(object) is not None:
return module is inspect.getmodule(object)
elif hasattr(object, '__module__'):
return module.__name__ == object.__module__
elif isinstance(object, property):
return True # [XX] no way not be sure.
else:
raise ValueError("object must be a class or function")
def _find(self, tests, obj, name, module, source_lines, globs, seen):
"""
Find tests for the given object and any contained objects, and
add them to `tests`.
"""
if self._verbose:
print 'Finding tests in %s' % name
# If we've already processed this object, then ignore it.
if id(obj) in seen:
return
seen[id(obj)] = 1
# Find a test for this object, and add it to the list of tests.
test = self._get_test(obj, name, module, globs, source_lines)
if test is not None:
tests.append(test)
# Look for tests in a module's contained objects.
if inspect.ismodule(obj) and self._recurse:
for valname, val in obj.__dict__.items():
# Check if this contained object should be ignored.
if self._filter(val, name, valname):
continue
valname = '%s.%s' % (name, valname)
# Recurse to functions & classes.
if ((inspect.isfunction(val) or inspect.isclass(val)) and
self._from_module(module, val)):
self._find(tests, val, valname, module, source_lines,
globs, seen)
# Look for tests in a module's __test__ dictionary.
if inspect.ismodule(obj) and self._recurse:
for valname, val in getattr(obj, '__test__', {}).items():
if not isinstance(valname, basestring):
raise ValueError("DocTestFinder.find: __test__ keys "
"must be strings: %r" %
(type(valname),))
if not (inspect.isfunction(val) or inspect.isclass(val) or
inspect.ismethod(val) or inspect.ismodule(val) or
isinstance(val, basestring)):
raise ValueError("DocTestFinder.find: __test__ values "
"must be strings, functions, methods, "
"classes, or modules: %r" %
(type(val),))
valname = '%s.__test__.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
# Look for tests in a class's contained objects.
if inspect.isclass(obj) and self._recurse:
for valname, val in obj.__dict__.items():
# Check if this contained object should be ignored.
if self._filter(val, name, valname):
continue
# Special handling for staticmethod/classmethod.
if isinstance(val, staticmethod):
val = getattr(obj, valname)
if isinstance(val, classmethod):
val = getattr(obj, valname).im_func
# Recurse to methods, properties, and nested classes.
if ((inspect.isfunction(val) or inspect.isclass(val) or
isinstance(val, property)) and
self._from_module(module, val)):
valname = '%s.%s' % (name, valname)
self._find(tests, val, valname, module, source_lines,
globs, seen)
def _get_test(self, obj, name, module, globs, source_lines):
"""
Return a DocTest for the given object, if it defines a docstring;
otherwise, return None.
"""
# Extract the object's docstring. If it doesn't have one,
# then return None (no test for this object).
if isinstance(obj, basestring):
docstring = obj
else:
try:
if obj.__doc__ is None:
docstring = ''
else:
docstring = obj.__doc__
if not isinstance(docstring, basestring):
docstring = str(docstring)
except (TypeError, AttributeError):
docstring = ''
# Find the docstring's location in the file.
lineno = self._find_lineno(obj, source_lines)
# Don't bother if the docstring is empty.
if self._exclude_empty and not docstring:
return None
# Return a DocTest for this object.
if module is None:
filename = None
else:
filename = getattr(module, '__file__', module.__name__)
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
return self._parser.get_doctest(docstring, globs, name,
filename, lineno)
def _find_lineno(self, obj, source_lines):
"""
Return a line number of the given object's docstring. Note:
this method assumes that the object has a docstring.
"""
lineno = None
# Find the line number for modules.
if inspect.ismodule(obj):
lineno = 0
# Find the line number for classes.
# Note: this could be fooled if a class is defined multiple
# times in a single file.
if inspect.isclass(obj):
if source_lines is None:
return None
pat = re.compile(r'^\s*class\s*%s\b' %
getattr(obj, '__name__', '-'))
for i, line in enumerate(source_lines):
if pat.match(line):
lineno = i
break
# Find the line number for functions & methods.
if inspect.ismethod(obj): obj = obj.im_func
if inspect.isfunction(obj): obj = obj.func_code
if inspect.istraceback(obj): obj = obj.tb_frame
if inspect.isframe(obj): obj = obj.f_code
if inspect.iscode(obj):
lineno = getattr(obj, 'co_firstlineno', None)-1
# Find the line number where the docstring starts. Assume
# that it's the first line that begins with a quote mark.
# Note: this could be fooled by a multiline function
# signature, where a continuation line begins with a quote
# mark.
if lineno is not None:
if source_lines is None:
return lineno+1
pat = re.compile('(^|.*:)\s*\w*("|\')')
for lineno in range(lineno, len(source_lines)):
if pat.match(source_lines[lineno]):
return lineno
# We couldn't find the line number.
return None
######################################################################
## 5. DocTest Runner
######################################################################
class DocTestRunner:
"""
A class used to run DocTest test cases, and accumulate statistics.
The `run` method is used to process a single DocTest case. It
returns a tuple `(f, t)`, where `t` is the number of test cases
tried, and `f` is the number of test cases that failed.
>>> tests = DocTestFinder().find(_TestClass)
>>> runner = DocTestRunner(verbose=False)
>>> for test in tests:
... print runner.run(test)
(0, 2)
(0, 1)
(0, 2)
(0, 2)
The `summarize` method prints a summary of all the test cases that
have been run by the runner, and returns an aggregated `(f, t)`
tuple:
>>> runner.summarize(verbose=1)
4 items passed all tests:
2 tests in _TestClass
2 tests in _TestClass.__init__
2 tests in _TestClass.get
1 tests in _TestClass.square
7 tests in 4 items.
7 passed and 0 failed.
Test passed.
(0, 7)
The aggregated number of tried examples and failed examples is
also available via the `tries` and `failures` attributes:
>>> runner.tries
7
>>> runner.failures
0
The comparison between expected outputs and actual outputs is done
by an `OutputChecker`. This comparison may be customized with a
number of option flags; see the documentation for `testmod` for
more information. If the option flags are insufficient, then the
comparison may also be customized by passing a subclass of
`OutputChecker` to the constructor.
The test runner's display output can be controlled in two ways.
First, an output function (`out) can be passed to
`TestRunner.run`; this function will be called with strings that
should be displayed. It defaults to `sys.stdout.write`. If
capturing the output is not sufficient, then the display output
can be also customized by subclassing DocTestRunner, and
overriding the methods `report_start`, `report_success`,
`report_unexpected_exception`, and `report_failure`.
"""
# This divider string is used to separate failure messages, and to
# separate sections of the summary.
DIVIDER = "*" * 70
def __init__(self, checker=None, verbose=None, optionflags=0):
"""
Create a new test runner.
Optional keyword arg `checker` is the `OutputChecker` that
should be used to compare the expected outputs and actual
outputs of doctest examples.
Optional keyword arg 'verbose' prints lots of stuff if true,
only failures if false; by default, it's true iff '-v' is in
sys.argv.
Optional argument `optionflags` can be used to control how the
test runner compares expected output to actual output, and how
it displays failures. See the documentation for `testmod` for
more information.
"""
self._checker = checker or OutputChecker()
if verbose is None:
verbose = '-v' in sys.argv
self._verbose = verbose
self.optionflags = optionflags
self.original_optionflags = optionflags
# Keep track of the examples we've run.
self.tries = 0
self.failures = 0
self._name2ft = {}
# Create a fake output target for capturing doctest output.
self._fakeout = _SpoofOut()
#/////////////////////////////////////////////////////////////////
# Reporting methods
#/////////////////////////////////////////////////////////////////
def report_start(self, out, test, example):
"""
Report that the test runner is about to process the given
example. (Only displays a message if verbose=True)
"""
if self._verbose:
if example.want:
out('Trying:\n' + _indent(example.source) +
'Expecting:\n' + _indent(example.want))
else:
out('Trying:\n' + _indent(example.source) +
'Expecting nothing\n')
def report_success(self, out, test, example, got):
"""
Report that the given example ran successfully. (Only
displays a message if verbose=True)
"""
if self._verbose:
out("ok\n")
def report_failure(self, out, test, example, got):
"""
Report that the given example failed.
"""
out(self._failure_header(test, example) +
self._checker.output_difference(example, got, self.optionflags))
def report_unexpected_exception(self, out, test, example, exc_info):
"""
Report that the given example raised an unexpected exception.
"""
out(self._failure_header(test, example) +
'Exception raised:\n' + _indent(_exception_traceback(exc_info)))
def _failure_header(self, test, example):
out = [self.DIVIDER]
if test.filename:
if test.lineno is not None and example.lineno is not None:
lineno = test.lineno + example.lineno + 1
else:
lineno = '?'
out.append('File "%s", line %s, in %s' %
(test.filename, lineno, test.name))
else:
out.append('Line %s, in %s' % (example.lineno+1, test.name))
out.append('Failed example:')
source = example.source
out.append(_indent(source))
return '\n'.join(out)
#/////////////////////////////////////////////////////////////////
# DocTest Running
#/////////////////////////////////////////////////////////////////
def __run(self, test, compileflags, out):
"""
Run the examples in `test`. Write the outcome of each example
with one of the `DocTestRunner.report_*` methods, using the
writer function `out`. `compileflags` is the set of compiler
flags that should be used to execute examples. Return a tuple
`(f, t)`, where `t` is the number of examples tried, and `f`
is the number of examples that failed. The examples are run
in the namespace `test.globs`.
"""
# Keep track of the number of failures and tries.
failures = tries = 0
# Save the option flags (since option directives can be used
# to modify them).
original_optionflags = self.optionflags
SUCCESS, FAILURE, BOOM = range(3) # `outcome` state
check = self._checker.check_output
# Process each example.
for examplenum, example in enumerate(test.examples):
# If REPORT_ONLY_FIRST_FAILURE is set, then suppress
# reporting after the first failure.
quiet = (self.optionflags & REPORT_ONLY_FIRST_FAILURE and
failures > 0)
# Merge in the example's options.
self.optionflags = original_optionflags
if example.options:
for (optionflag, val) in example.options.items():
if val:
self.optionflags |= optionflag
else:
self.optionflags &= ~optionflag
# Record that we started this example.
tries += 1
if not quiet:
self.report_start(out, test, example)
# Use a special filename for compile(), so we can retrieve
# the source code during interactive debugging (see
# __patched_linecache_getlines).
filename = '<doctest %s[%d]>' % (test.name, examplenum)
# Run the example in the given context (globs), and record
# any exception that gets raised. (But don't intercept
# keyboard interrupts.)
try:
# Don't blink! This is where the user's code gets run.
exec compile(example.source, filename, "single",
compileflags, 1) in test.globs
self.debugger.set_continue() # ==== Example Finished ====
exception = None
except KeyboardInterrupt:
raise
except:
exception = sys.exc_info()
self.debugger.set_continue() # ==== Example Finished ====
got = self._fakeout.getvalue() # the actual output
self._fakeout.truncate(0)
outcome = FAILURE # guilty until proved innocent or insane
# If the example executed without raising any exceptions,
# verify its output.
if exception is None:
if check(example.want, got, self.optionflags):
outcome = SUCCESS
# The example raised an exception: check if it was expected.
else:
exc_info = sys.exc_info()
exc_msg = traceback.format_exception_only(*exc_info[:2])[-1]
if not quiet:
got += _exception_traceback(exc_info)
# If `example.exc_msg` is None, then we weren't expecting
# an exception.
if example.exc_msg is None:
outcome = BOOM
# We expected an exception: see whether it matches.
elif check(example.exc_msg, exc_msg, self.optionflags):
outcome = SUCCESS
# Another chance if they didn't care about the detail.
elif self.optionflags & IGNORE_EXCEPTION_DETAIL:
m1 = re.match(r'[^:]*:', example.exc_msg)
m2 = re.match(r'[^:]*:', exc_msg)
if m1 and m2 and check(m1.group(0), m2.group(0),
self.optionflags):
outcome = SUCCESS
# Report the outcome.
if outcome is SUCCESS:
if not quiet:
self.report_success(out, test, example, got)
elif outcome is FAILURE:
if not quiet:
self.report_failure(out, test, example, got)
failures += 1
elif outcome is BOOM:
if not quiet:
self.report_unexpected_exception(out, test, example,
exc_info)
failures += 1
else:
assert False, ("unknown outcome", outcome)
# Restore the option flags (in case they were modified)
self.optionflags = original_optionflags
# Record and return the number of failures and tries.
self.__record_outcome(test, failures, tries)
return failures, tries
def __record_outcome(self, test, f, t):
"""
Record the fact that the given DocTest (`test`) generated `f`
failures out of `t` tried examples.
"""
f2, t2 = self._name2ft.get(test.name, (0,0))
self._name2ft[test.name] = (f+f2, t+t2)
self.failures += f
self.tries += t
__LINECACHE_FILENAME_RE = re.compile(r'<doctest '
r'(?P<name>[\w\.]+)'
r'\[(?P<examplenum>\d+)\]>$')
def __patched_linecache_getlines(self, filename, module_globals=None):
m = self.__LINECACHE_FILENAME_RE.match(filename)
if m and m.group('name') == self.test.name:
example = self.test.examples[int(m.group('examplenum'))]
return example.source.splitlines(True)
else:
if sys.version_info < (2, 5, 0):
return self.save_linecache_getlines(filename)
else:
return self.save_linecache_getlines(filename, module_globals)
def run(self, test, compileflags=None, out=None, clear_globs=True):
"""
Run the examples in `test`, and display the results using the
writer function `out`.
The examples are run in the namespace `test.globs`. If
`clear_globs` is true (the default), then this namespace will
be cleared after the test runs, to help with garbage
collection. If you would like to examine the namespace after
the test completes, then use `clear_globs=False`.
`compileflags` gives the set of flags that should be used by
the Python compiler when running the examples. If not
specified, then it will default to the set of future-import
flags that apply to `globs`.
The output of each example is checked using
`DocTestRunner.check_output`, and the results are formatted by
the `DocTestRunner.report_*` methods.
"""
self.test = test
if compileflags is None:
compileflags = _extract_future_flags(test.globs)
save_stdout = sys.stdout
if out is None:
out = save_stdout.write
sys.stdout = self._fakeout
# Patch pdb.set_trace to restore sys.stdout during interactive
# debugging (so it's not still redirected to self._fakeout).
# Note that the interactive output will go to *our*
# save_stdout, even if that's not the real sys.stdout; this
# allows us to write test cases for the set_trace behavior.
save_set_trace = pdb.set_trace
self.debugger = _OutputRedirectingPdb(save_stdout)
self.debugger.reset()
pdb.set_trace = self.debugger.set_trace
# Patch linecache.getlines, so we can see the example's source
# when we're inside the debugger.
self.save_linecache_getlines = linecache.getlines
linecache.getlines = self.__patched_linecache_getlines
try:
return self.__run(test, compileflags, out)
finally:
sys.stdout = save_stdout
pdb.set_trace = save_set_trace
linecache.getlines = self.save_linecache_getlines
if clear_globs:
test.globs.clear()
#/////////////////////////////////////////////////////////////////
# Summarization
#/////////////////////////////////////////////////////////////////
def summarize(self, verbose=None):
"""
Print a summary of all the test cases that have been run by
this DocTestRunner, and return a tuple `(f, t)`, where `f` is
the total number of failed examples, and `t` is the total
number of tried examples.
The optional `verbose` argument controls how detailed the
summary is. If the verbosity is not specified, then the
DocTestRunner's verbosity is used.
"""
if verbose is None:
verbose = self._verbose
notests = []
passed = []
failed = []
totalt = totalf = 0
for x in self._name2ft.items():
name, (f, t) = x
assert f <= t
totalt += t
totalf += f
if t == 0:
notests.append(name)
elif f == 0:
passed.append( (name, t) )
else:
failed.append(x)
if verbose:
if notests:
print len(notests), "items had no tests:"
notests.sort()
for thing in notests:
print " ", thing
if passed:
print len(passed), "items passed all tests:"
passed.sort()
for thing, count in passed:
print " %3d tests in %s" % (count, thing)
if failed:
print self.DIVIDER
print len(failed), "items had failures:"
failed.sort()
for thing, (f, t) in failed:
print " %3d of %3d in %s" % (f, t, thing)
if verbose:
print totalt, "tests in", len(self._name2ft), "items."
print totalt - totalf, "passed and", totalf, "failed."
if totalf:
print "***Test Failed***", totalf, "failures."
elif verbose:
print "Test passed."
return totalf, totalt
#/////////////////////////////////////////////////////////////////
# Backward compatibility cruft to maintain doctest.master.
#/////////////////////////////////////////////////////////////////
def merge(self, other):
d = self._name2ft
for name, (f, t) in other._name2ft.items():
if name in d:
print "*** DocTestRunner.merge: '" + name + "' in both" \
" testers; summing outcomes."
f2, t2 = d[name]
f = f + f2
t = t + t2
d[name] = f, t
class OutputChecker:
"""
A class used to check the whether the actual output from a doctest
example matches the expected output. `OutputChecker` defines two
methods: `check_output`, which compares a given pair of outputs,
and returns true if they match; and `output_difference`, which
returns a string describing the differences between two outputs.
"""
def check_output(self, want, got, optionflags):
"""
Return True iff the actual output from an example (`got`)
matches the expected output (`want`). These strings are
always considered to match if they are identical; but
depending on what option flags the test runner is using,
several non-exact match types are also possible. See the
documentation for `TestRunner` for more information about
option flags.
"""
# Handle the common case first, for efficiency:
# if they're string-identical, always return true.
if got == want:
return True
# The values True and False replaced 1 and 0 as the return
# value for boolean comparisons in Python 2.3.
if not (optionflags & DONT_ACCEPT_TRUE_FOR_1):
if (got,want) == ("True\n", "1\n"):
return True
if (got,want) == ("False\n", "0\n"):
return True
# <BLANKLINE> can be used as a special sequence to signify a
# blank line, unless the DONT_ACCEPT_BLANKLINE flag is used.
if not (optionflags & DONT_ACCEPT_BLANKLINE):
# Replace <BLANKLINE> in want with a blank line.
want = re.sub('(?m)^%s\s*?$' % re.escape(BLANKLINE_MARKER),
'', want)
# If a line in got contains only spaces, then remove the
# spaces.
got = re.sub('(?m)^\s*?$', '', got)
if got == want:
return True
# This flag causes doctest to ignore any differences in the
# contents of whitespace strings. Note that this can be used
# in conjunction with the ELLIPSIS flag.
if optionflags & NORMALIZE_WHITESPACE:
got = ' '.join(got.split())
want = ' '.join(want.split())
if got == want:
return True
# The ELLIPSIS flag says to let the sequence "..." in `want`
# match any substring in `got`.
if optionflags & ELLIPSIS:
if _ellipsis_match(want, got):
return True
# We didn't find any match; return false.
return False
# Should we do a fancy diff?
def _do_a_fancy_diff(self, want, got, optionflags):
# Not unless they asked for a fancy diff.
if not optionflags & (REPORT_UDIFF |
REPORT_CDIFF |
REPORT_NDIFF):
return False
# If expected output uses ellipsis, a meaningful fancy diff is
# too hard ... or maybe not. In two real-life failures Tim saw,
# a diff was a major help anyway, so this is commented out.
# [todo] _ellipsis_match() knows which pieces do and don't match,
# and could be the basis for a kick-ass diff in this case.
##if optionflags & ELLIPSIS and ELLIPSIS_MARKER in want:
## return False
# ndiff does intraline difference marking, so can be useful even
# for 1-line differences.
if optionflags & REPORT_NDIFF:
return True
# The other diff types need at least a few lines to be helpful.
return want.count('\n') > 2 and got.count('\n') > 2
def output_difference(self, example, got, optionflags):
"""
Return a string describing the differences between the
expected output for a given example (`example`) and the actual
output (`got`). `optionflags` is the set of option flags used
to compare `want` and `got`.
"""
want = example.want
# If <BLANKLINE>s are being used, then replace blank lines
# with <BLANKLINE> in the actual output string.
if not (optionflags & DONT_ACCEPT_BLANKLINE):
got = re.sub('(?m)^[ ]*(?=\n)', BLANKLINE_MARKER, got)
# Check if we should use diff.
if self._do_a_fancy_diff(want, got, optionflags):
# Split want & got into lines.
want_lines = want.splitlines(True) # True == keep line ends
got_lines = got.splitlines(True)
# Use difflib to find their differences.
if optionflags & REPORT_UDIFF:
diff = difflib.unified_diff(want_lines, got_lines, n=2)
diff = list(diff)[2:] # strip the diff header
kind = 'unified diff with -expected +actual'
elif optionflags & REPORT_CDIFF:
diff = difflib.context_diff(want_lines, got_lines, n=2)
diff = list(diff)[2:] # strip the diff header
kind = 'context diff with expected followed by actual'
elif optionflags & REPORT_NDIFF:
engine = difflib.Differ(charjunk=difflib.IS_CHARACTER_JUNK)
diff = list(engine.compare(want_lines, got_lines))
kind = 'ndiff with -expected +actual'
else:
assert 0, 'Bad diff option'
# Remove trailing whitespace on diff output.
diff = [line.rstrip() + '\n' for line in diff]
return 'Differences (%s):\n' % kind + _indent(''.join(diff))
# If we're not using diff, then simply list the expected
# output followed by the actual output.
if want and got:
return 'Expected:\n%sGot:\n%s' % (_indent(want), _indent(got))
elif want:
return 'Expected:\n%sGot nothing\n' % _indent(want)
elif got:
return 'Expected nothing\nGot:\n%s' % _indent(got)
else:
return 'Expected nothing\nGot nothing\n'
class DocTestFailure(Exception):
"""A DocTest example has failed in debugging mode.
The exception instance has variables:
- test: the DocTest object being run
- excample: the Example object that failed
- got: the actual output
"""
def __init__(self, test, example, got):
self.test = test
self.example = example
self.got = got
def __str__(self):
return str(self.test)
class UnexpectedException(Exception):
"""A DocTest example has encountered an unexpected exception
The exception instance has variables:
- test: the DocTest object being run
- excample: the Example object that failed
- exc_info: the exception info
"""
def __init__(self, test, example, exc_info):
self.test = test
self.example = example
self.exc_info = exc_info
def __str__(self):
return str(self.test)
class DebugRunner(DocTestRunner):
r"""Run doc tests but raise an exception as soon as there is a failure.
If an unexpected exception occurs, an UnexpectedException is raised.
It contains the test, the example, and the original exception:
>>> runner = DebugRunner(verbose=False)
>>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
... {}, 'foo', 'foo.py', 0)
>>> try:
... runner.run(test)
... except UnexpectedException, failure:
... pass
>>> failure.test is test
True
>>> failure.example.want
'42\n'
>>> exc_info = failure.exc_info
>>> raise exc_info[0], exc_info[1], exc_info[2]
Traceback (most recent call last):
...
KeyError
We wrap the original exception to give the calling application
access to the test and example information.
If the output doesn't match, then a DocTestFailure is raised:
>>> test = DocTestParser().get_doctest('''
... >>> x = 1
... >>> x
... 2
... ''', {}, 'foo', 'foo.py', 0)
>>> try:
... runner.run(test)
... except DocTestFailure, failure:
... pass
DocTestFailure objects provide access to the test:
>>> failure.test is test
True
As well as to the example:
>>> failure.example.want
'2\n'
and the actual output:
>>> failure.got
'1\n'
If a failure or error occurs, the globals are left intact:
>>> del test.globs['__builtins__']
>>> test.globs
{'x': 1}
>>> test = DocTestParser().get_doctest('''
... >>> x = 2
... >>> raise KeyError
... ''', {}, 'foo', 'foo.py', 0)
>>> runner.run(test)
Traceback (most recent call last):
...
UnexpectedException: <DocTest foo from foo.py:0 (2 examples)>
>>> del test.globs['__builtins__']
>>> test.globs
{'x': 2}
But the globals are cleared if there is no error:
>>> test = DocTestParser().get_doctest('''
... >>> x = 2
... ''', {}, 'foo', 'foo.py', 0)
>>> runner.run(test)
(0, 1)
>>> test.globs
{}
"""
def run(self, test, compileflags=None, out=None, clear_globs=True):
r = DocTestRunner.run(self, test, compileflags, out, False)
if clear_globs:
test.globs.clear()
return r
def report_unexpected_exception(self, out, test, example, exc_info):
raise UnexpectedException(test, example, exc_info)
def report_failure(self, out, test, example, got):
raise DocTestFailure(test, example, got)
######################################################################
## 6. Test Functions
######################################################################
# These should be backwards compatible.
# For backward compatibility, a global instance of a DocTestRunner
# class, updated by testmod.
master = None
def testmod(m=None, name=None, globs=None, verbose=None, isprivate=None,
report=True, optionflags=0, extraglobs=None,
raise_on_error=False, exclude_empty=False):
"""m=None, name=None, globs=None, verbose=None, isprivate=None,
report=True, optionflags=0, extraglobs=None, raise_on_error=False,
exclude_empty=False
Test examples in docstrings in functions and classes reachable
from module m (or the current module if m is not supplied), starting
with m.__doc__. Unless isprivate is specified, private names
are not skipped.
Also test examples reachable from dict m.__test__ if it exists and is
not None. m.__test__ maps names to functions, classes and strings;
function and class docstrings are tested even if the name is private;
strings are tested directly, as if they were docstrings.
Return (#failures, #tests).
See doctest.__doc__ for an overview.
Optional keyword arg "name" gives the name of the module; by default
use m.__name__.
Optional keyword arg "globs" gives a dict to be used as the globals
when executing examples; by default, use m.__dict__. A copy of this
dict is actually used for each docstring, so that each docstring's
examples start with a clean slate.
Optional keyword arg "extraglobs" gives a dictionary that should be
merged into the globals that are used to execute examples. By
default, no extra globals are used. This is new in 2.4.
Optional keyword arg "verbose" prints lots of stuff if true, prints
only failures if false; by default, it's true iff "-v" is in sys.argv.
Optional keyword arg "report" prints a summary at the end when true,
else prints nothing at the end. In verbose mode, the summary is
detailed, else very brief (in fact, empty if all tests passed).
Optional keyword arg "optionflags" or's together module constants,
and defaults to 0. This is new in 2.3. Possible values (see the
docs for details):
DONT_ACCEPT_TRUE_FOR_1
DONT_ACCEPT_BLANKLINE
NORMALIZE_WHITESPACE
ELLIPSIS
IGNORE_EXCEPTION_DETAIL
REPORT_UDIFF
REPORT_CDIFF
REPORT_NDIFF
REPORT_ONLY_FIRST_FAILURE
Optional keyword arg "raise_on_error" raises an exception on the
first unexpected exception or failure. This allows failures to be
post-mortem debugged.
Deprecated in Python 2.4:
Optional keyword arg "isprivate" specifies a function used to
determine whether a name is private. The default function is
treat all functions as public. Optionally, "isprivate" can be
set to doctest.is_private to skip over functions marked as private
using the underscore naming convention; see its docs for details.
Advanced tomfoolery: testmod runs methods of a local instance of
class doctest.Tester, then merges the results into (or creates)
global Tester instance doctest.master. Methods of doctest.master
can be called directly too, if you want to do something unusual.
Passing report=0 to testmod is especially useful then, to delay
displaying a summary. Invoke doctest.master.summarize(verbose)
when you're done fiddling.
"""
global master
if isprivate is not None:
warnings.warn("the isprivate argument is deprecated; "
"examine DocTestFinder.find() lists instead",
DeprecationWarning)
# If no module was given, then use __main__.
if m is None:
# DWA - m will still be None if this wasn't invoked from the command
# line, in which case the following TypeError is about as good an error
# as we should expect
m = sys.modules.get('__main__')
# Check that we were actually given a module.
if not inspect.ismodule(m):
raise TypeError("testmod: module required; %r" % (m,))
# If no name was given, then use the module's name.
if name is None:
name = m.__name__
# Find, parse, and run all tests in the given module.
finder = DocTestFinder(_namefilter=isprivate, exclude_empty=exclude_empty)
if raise_on_error:
runner = DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
for test in finder.find(m, name, globs=globs, extraglobs=extraglobs):
runner.run(test)
if report:
runner.summarize()
if master is None:
master = runner
else:
master.merge(runner)
return runner.failures, runner.tries
def testfile(filename, module_relative=True, name=None, package=None,
globs=None, verbose=None, report=True, optionflags=0,
extraglobs=None, raise_on_error=False, parser=DocTestParser()):
"""
Test examples in the given file. Return (#failures, #tests).
Optional keyword arg "module_relative" specifies how filenames
should be interpreted:
- If "module_relative" is True (the default), then "filename"
specifies a module-relative path. By default, this path is
relative to the calling module's directory; but if the
"package" argument is specified, then it is relative to that
package. To ensure os-independence, "filename" should use
"/" characters to separate path segments, and should not
be an absolute path (i.e., it may not begin with "/").
- If "module_relative" is False, then "filename" specifies an
os-specific path. The path may be absolute or relative (to
the current working directory).
Optional keyword arg "name" gives the name of the test; by default
use the file's basename.
Optional keyword argument "package" is a Python package or the
name of a Python package whose directory should be used as the
base directory for a module relative filename. If no package is
specified, then the calling module's directory is used as the base
directory for module relative filenames. It is an error to
specify "package" if "module_relative" is False.
Optional keyword arg "globs" gives a dict to be used as the globals
when executing examples; by default, use {}. A copy of this dict
is actually used for each docstring, so that each docstring's
examples start with a clean slate.
Optional keyword arg "extraglobs" gives a dictionary that should be
merged into the globals that are used to execute examples. By
default, no extra globals are used.
Optional keyword arg "verbose" prints lots of stuff if true, prints
only failures if false; by default, it's true iff "-v" is in sys.argv.
Optional keyword arg "report" prints a summary at the end when true,
else prints nothing at the end. In verbose mode, the summary is
detailed, else very brief (in fact, empty if all tests passed).
Optional keyword arg "optionflags" or's together module constants,
and defaults to 0. Possible values (see the docs for details):
DONT_ACCEPT_TRUE_FOR_1
DONT_ACCEPT_BLANKLINE
NORMALIZE_WHITESPACE
ELLIPSIS
IGNORE_EXCEPTION_DETAIL
REPORT_UDIFF
REPORT_CDIFF
REPORT_NDIFF
REPORT_ONLY_FIRST_FAILURE
Optional keyword arg "raise_on_error" raises an exception on the
first unexpected exception or failure. This allows failures to be
post-mortem debugged.
Optional keyword arg "parser" specifies a DocTestParser (or
subclass) that should be used to extract tests from the files.
Advanced tomfoolery: testmod runs methods of a local instance of
class doctest.Tester, then merges the results into (or creates)
global Tester instance doctest.master. Methods of doctest.master
can be called directly too, if you want to do something unusual.
Passing report=0 to testmod is especially useful then, to delay
displaying a summary. Invoke doctest.master.summarize(verbose)
when you're done fiddling.
"""
global master
if package and not module_relative:
raise ValueError("Package may only be specified for module-"
"relative paths.")
# Relativize the path
if module_relative:
package = _normalize_module(package)
filename = _module_relative_path(package, filename)
# If no name was given, then use the file's name.
if name is None:
name = os.path.basename(filename)
# Assemble the globals.
if globs is None:
globs = {}
else:
globs = globs.copy()
if extraglobs is not None:
globs.update(extraglobs)
if raise_on_error:
runner = DebugRunner(verbose=verbose, optionflags=optionflags)
else:
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
# Read the file, convert it to a test, and run it.
s = open(filename).read()
test = parser.get_doctest(s, globs, name, filename, 0)
runner.run(test)
if report:
runner.summarize()
if master is None:
master = runner
else:
master.merge(runner)
return runner.failures, runner.tries
def run_docstring_examples(f, globs, verbose=False, name="NoName",
compileflags=None, optionflags=0):
"""
Test examples in the given object's docstring (`f`), using `globs`
as globals. Optional argument `name` is used in failure messages.
If the optional argument `verbose` is true, then generate output
even if there are no failures.
`compileflags` gives the set of flags that should be used by the
Python compiler when running the examples. If not specified, then
it will default to the set of future-import flags that apply to
`globs`.
Optional keyword arg `optionflags` specifies options for the
testing and output. See the documentation for `testmod` for more
information.
"""
# Find, parse, and run all tests in the given module.
finder = DocTestFinder(verbose=verbose, recurse=False)
runner = DocTestRunner(verbose=verbose, optionflags=optionflags)
for test in finder.find(f, name, globs=globs):
runner.run(test, compileflags=compileflags)
######################################################################
## 7. Tester
######################################################################
# This is provided only for backwards compatibility. It's not
# actually used in any way.
class Tester:
def __init__(self, mod=None, globs=None, verbose=None,
isprivate=None, optionflags=0):
warnings.warn("class Tester is deprecated; "
"use class doctest.DocTestRunner instead",
DeprecationWarning, stacklevel=2)
if mod is None and globs is None:
raise TypeError("Tester.__init__: must specify mod or globs")
if mod is not None and not inspect.ismodule(mod):
raise TypeError("Tester.__init__: mod must be a module; %r" %
(mod,))
if globs is None:
globs = mod.__dict__
self.globs = globs
self.verbose = verbose
self.isprivate = isprivate
self.optionflags = optionflags
self.testfinder = DocTestFinder(_namefilter=isprivate)
self.testrunner = DocTestRunner(verbose=verbose,
optionflags=optionflags)
def runstring(self, s, name):
test = DocTestParser().get_doctest(s, self.globs, name, None, None)
if self.verbose:
print "Running string", name
(f,t) = self.testrunner.run(test)
if self.verbose:
print f, "of", t, "examples failed in string", name
return (f,t)
def rundoc(self, object, name=None, module=None):
f = t = 0
tests = self.testfinder.find(object, name, module=module,
globs=self.globs)
for test in tests:
(f2, t2) = self.testrunner.run(test)
(f,t) = (f+f2, t+t2)
return (f,t)
def rundict(self, d, name, module=None):
import new
m = new.module(name)
m.__dict__.update(d)
if module is None:
module = False
return self.rundoc(m, name, module)
def run__test__(self, d, name):
import new
m = new.module(name)
m.__test__ = d
return self.rundoc(m, name)
def summarize(self, verbose=None):
return self.testrunner.summarize(verbose)
def merge(self, other):
self.testrunner.merge(other.testrunner)
######################################################################
## 8. Unittest Support
######################################################################
_unittest_reportflags = 0
def set_unittest_reportflags(flags):
"""Sets the unittest option flags.
The old flag is returned so that a runner could restore the old
value if it wished to:
>>> old = _unittest_reportflags
>>> set_unittest_reportflags(REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE) == old
True
>>> import doctest
>>> doctest._unittest_reportflags == (REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE)
True
Only reporting flags can be set:
>>> set_unittest_reportflags(ELLIPSIS)
Traceback (most recent call last):
...
ValueError: ('Only reporting flags allowed', 8)
>>> set_unittest_reportflags(old) == (REPORT_NDIFF |
... REPORT_ONLY_FIRST_FAILURE)
True
"""
global _unittest_reportflags
if (flags & REPORTING_FLAGS) != flags:
raise ValueError("Only reporting flags allowed", flags)
old = _unittest_reportflags
_unittest_reportflags = flags
return old
class DocTestCase(unittest.TestCase):
def __init__(self, test, optionflags=0, setUp=None, tearDown=None,
checker=None, runner=DocTestRunner):
unittest.TestCase.__init__(self)
self._dt_optionflags = optionflags
self._dt_checker = checker
self._dt_test = test
self._dt_setUp = setUp
self._dt_tearDown = tearDown
self._dt_runner = runner
def setUp(self):
test = self._dt_test
if self._dt_setUp is not None:
self._dt_setUp(test)
def tearDown(self):
test = self._dt_test
if self._dt_tearDown is not None:
self._dt_tearDown(test)
test.globs.clear()
def runTest(self):
test = self._dt_test
old = sys.stdout
new = StringIO()
optionflags = self._dt_optionflags
if not (optionflags & REPORTING_FLAGS):
# The option flags don't include any reporting flags,
# so add the default reporting flags
optionflags |= _unittest_reportflags
runner = self._dt_runner(optionflags=optionflags,
checker=self._dt_checker, verbose=False)
try:
runner.DIVIDER = "-"*70
failures, tries = runner.run(
test, out=new.write, clear_globs=False)
finally:
sys.stdout = old
if failures:
raise self.failureException(self.format_failure(new.getvalue()))
def format_failure(self, err):
test = self._dt_test
if test.lineno is None:
lineno = 'unknown line number'
else:
lineno = '%s' % test.lineno
lname = '.'.join(test.name.split('.')[-1:])
return ('Failed doctest test for %s\n'
' File "%s", line %s, in %s\n\n%s'
% (test.name, test.filename, lineno, lname, err)
)
def debug(self):
r"""Run the test case without results and without catching exceptions
The unit test framework includes a debug method on test cases
and test suites to support post-mortem debugging. The test code
is run in such a way that errors are not caught. This way a
caller can catch the errors and initiate post-mortem debugging.
The DocTestCase provides a debug method that raises
UnexpectedException errors if there is an unexepcted
exception:
>>> test = DocTestParser().get_doctest('>>> raise KeyError\n42',
... {}, 'foo', 'foo.py', 0)
>>> case = DocTestCase(test)
>>> try:
... case.debug()
... except UnexpectedException, failure:
... pass
The UnexpectedException contains the test, the example, and
the original exception:
>>> failure.test is test
True
>>> failure.example.want
'42\n'
>>> exc_info = failure.exc_info
>>> raise exc_info[0], exc_info[1], exc_info[2]
Traceback (most recent call last):
...
KeyError
If the output doesn't match, then a DocTestFailure is raised:
>>> test = DocTestParser().get_doctest('''
... >>> x = 1
... >>> x
... 2
... ''', {}, 'foo', 'foo.py', 0)
>>> case = DocTestCase(test)
>>> try:
... case.debug()
... except DocTestFailure, failure:
... pass
DocTestFailure objects provide access to the test:
>>> failure.test is test
True
As well as to the example:
>>> failure.example.want
'2\n'
and the actual output:
>>> failure.got
'1\n'
"""
self.setUp()
runner = DebugRunner(optionflags=self._dt_optionflags,
checker=self._dt_checker, verbose=False)
runner.run(self._dt_test)
self.tearDown()
def id(self):
return self._dt_test.name
def __repr__(self):
name = self._dt_test.name.split('.')
return "%s (%s)" % (name[-1], '.'.join(name[:-1]))
__str__ = __repr__
def shortDescription(self):
return "Doctest: " + self._dt_test.name
def DocTestSuite(module=None, globs=None, extraglobs=None, test_finder=None,
test_class=DocTestCase, **options):
"""
Convert doctest tests for a module to a unittest test suite.
This converts each documentation string in a module that
contains doctest tests to a unittest test case. If any of the
tests in a doc string fail, then the test case fails. An exception
is raised showing the name of the file containing the test and a
(sometimes approximate) line number.
The `module` argument provides the module to be tested. The argument
can be either a module or a module name.
If no argument is given, the calling module is used.
A number of options may be provided as keyword arguments:
setUp
A set-up function. This is called before running the
tests in each file. The setUp function will be passed a DocTest
object. The setUp function can access the test globals as the
globs attribute of the test passed.
tearDown
A tear-down function. This is called after running the
tests in each file. The tearDown function will be passed a DocTest
object. The tearDown function can access the test globals as the
globs attribute of the test passed.
globs
A dictionary containing initial global variables for the tests.
optionflags
A set of doctest option flags expressed as an integer.
"""
if test_finder is None:
test_finder = DocTestFinder()
module = _normalize_module(module)
tests = test_finder.find(module, globs=globs, extraglobs=extraglobs)
if globs is None:
globs = module.__dict__
if not tests:
# Why do we want to do this? Because it reveals a bug that might
# otherwise be hidden.
raise ValueError(module, "has no tests")
tests.sort()
suite = unittest.TestSuite()
for test in tests:
if len(test.examples) == 0:
continue
if not test.filename:
filename = module.__file__
if filename[-4:] in (".pyc", ".pyo"):
filename = filename[:-1]
test.filename = filename
suite.addTest(test_class(test, **options))
return suite
class DocFileCase(DocTestCase):
def id(self):
return '_'.join(self._dt_test.name.split('.'))
def __repr__(self):
return self._dt_test.filename
__str__ = __repr__
def format_failure(self, err):
return ('Failed doctest test for %s\n File "%s", line 0\n\n%s'
% (self._dt_test.name, self._dt_test.filename, err)
)
def DocFileTest(path, module_relative=True, package=None,
globs=None, parser=DocTestParser(), **options):
if globs is None:
globs = {}
if package and not module_relative:
raise ValueError("Package may only be specified for module-"
"relative paths.")
# Relativize the path.
if module_relative:
package = _normalize_module(package)
path = _module_relative_path(package, path)
# Find the file and read it.
name = os.path.basename(path)
doc = open(path).read()
# Convert it to a test, and wrap it in a DocFileCase.
test = parser.get_doctest(doc, globs, name, path, 0)
return DocFileCase(test, **options)
def DocFileSuite(*paths, **kw):
"""A unittest suite for one or more doctest files.
The path to each doctest file is given as a string; the
interpretation of that string depends on the keyword argument
"module_relative".
A number of options may be provided as keyword arguments:
module_relative
If "module_relative" is True, then the given file paths are
interpreted as os-independent module-relative paths. By
default, these paths are relative to the calling module's
directory; but if the "package" argument is specified, then
they are relative to that package. To ensure os-independence,
"filename" should use "/" characters to separate path
segments, and may not be an absolute path (i.e., it may not
begin with "/").
If "module_relative" is False, then the given file paths are
interpreted as os-specific paths. These paths may be absolute
or relative (to the current working directory).
package
A Python package or the name of a Python package whose directory
should be used as the base directory for module relative paths.
If "package" is not specified, then the calling module's
directory is used as the base directory for module relative
filenames. It is an error to specify "package" if
"module_relative" is False.
setUp
A set-up function. This is called before running the
tests in each file. The setUp function will be passed a DocTest
object. The setUp function can access the test globals as the
globs attribute of the test passed.
tearDown
A tear-down function. This is called after running the
tests in each file. The tearDown function will be passed a DocTest
object. The tearDown function can access the test globals as the
globs attribute of the test passed.
globs
A dictionary containing initial global variables for the tests.
optionflags
A set of doctest option flags expressed as an integer.
parser
A DocTestParser (or subclass) that should be used to extract
tests from the files.
"""
suite = unittest.TestSuite()
# We do this here so that _normalize_module is called at the right
# level. If it were called in DocFileTest, then this function
# would be the caller and we might guess the package incorrectly.
if kw.get('module_relative', True):
kw['package'] = _normalize_module(kw.get('package'))
for path in paths:
suite.addTest(DocFileTest(path, **kw))
return suite
######################################################################
## 9. Debugging Support
######################################################################
def script_from_examples(s):
r"""Extract script from text with examples.
Converts text with examples to a Python script. Example input is
converted to regular code. Example output and all other words
are converted to comments:
>>> text = '''
... Here are examples of simple math.
...
... Python has super accurate integer addition
...
... >>> 2 + 2
... 5
...
... And very friendly error messages:
...
... >>> 1/0
... To Infinity
... And
... Beyond
...
... You can use logic if you want:
...
... >>> if 0:
... ... blah
... ... blah
... ...
...
... Ho hum
... '''
>>> print script_from_examples(text)
# Here are examples of simple math.
#
# Python has super accurate integer addition
#
2 + 2
# Expected:
## 5
#
# And very friendly error messages:
#
1/0
# Expected:
## To Infinity
## And
## Beyond
#
# You can use logic if you want:
#
if 0:
blah
blah
#
# Ho hum
"""
output = []
for piece in DocTestParser().parse(s):
if isinstance(piece, Example):
# Add the example's source code (strip trailing NL)
output.append(piece.source[:-1])
# Add the expected output:
want = piece.want
if want:
output.append('# Expected:')
output += ['## '+l for l in want.split('\n')[:-1]]
else:
# Add non-example text.
output += [_comment_line(l)
for l in piece.split('\n')[:-1]]
# Trim junk on both ends.
while output and output[-1] == '#':
output.pop()
while output and output[0] == '#':
output.pop(0)
# Combine the output, and return it.
return '\n'.join(output)
def testsource(module, name):
"""Extract the test sources from a doctest docstring as a script.
Provide the module (or dotted name of the module) containing the
test to be debugged and the name (within the module) of the object
with the doc string with tests to be debugged.
"""
module = _normalize_module(module)
tests = DocTestFinder().find(module)
test = [t for t in tests if t.name == name]
if not test:
raise ValueError(name, "not found in tests")
test = test[0]
testsrc = script_from_examples(test.docstring)
return testsrc
def debug_src(src, pm=False, globs=None):
"""Debug a single doctest docstring, in argument `src`'"""
testsrc = script_from_examples(src)
debug_script(testsrc, pm, globs)
def debug_script(src, pm=False, globs=None):
"Debug a test script. `src` is the script, as a string."
import pdb
# Note that tempfile.NameTemporaryFile() cannot be used. As the
# docs say, a file so created cannot be opened by name a second time
# on modern Windows boxes, and execfile() needs to open it.
srcfilename = tempfile.mktemp(".py", "doctestdebug")
f = open(srcfilename, 'w')
f.write(src)
f.close()
try:
if globs:
globs = globs.copy()
else:
globs = {}
if pm:
try:
execfile(srcfilename, globs, globs)
except:
print sys.exc_info()[1]
pdb.post_mortem(sys.exc_info()[2])
else:
# Note that %r is vital here. '%s' instead can, e.g., cause
# backslashes to get treated as metacharacters on Windows.
pdb.run("execfile(%r)" % srcfilename, globs, globs)
finally:
os.remove(srcfilename)
def debug(module, name, pm=False):
"""Debug a single doctest docstring.
Provide the module (or dotted name of the module) containing the
test to be debugged and the name (within the module) of the object
with the docstring with tests to be debugged.
"""
module = _normalize_module(module)
testsrc = testsource(module, name)
debug_script(testsrc, pm, module.__dict__)
######################################################################
## 10. Example Usage
######################################################################
class _TestClass:
"""
A pointless class, for sanity-checking of docstring testing.
Methods:
square()
get()
>>> _TestClass(13).get() + _TestClass(-12).get()
1
>>> hex(_TestClass(13).square().get())
'0xa9'
"""
def __init__(self, val):
"""val -> _TestClass object with associated value val.
>>> t = _TestClass(123)
>>> print t.get()
123
"""
self.val = val
def square(self):
"""square() -> square TestClass's associated value
>>> _TestClass(13).square().get()
169
"""
self.val = self.val ** 2
return self
def get(self):
"""get() -> return TestClass's associated value.
>>> x = _TestClass(-42)
>>> print x.get()
-42
"""
return self.val
__test__ = {"_TestClass": _TestClass,
"string": r"""
Example of a string object, searched as-is.
>>> x = 1; y = 2
>>> x + y, x * y
(3, 2)
""",
"bool-int equivalence": r"""
In 2.2, boolean expressions displayed
0 or 1. By default, we still accept
them. This can be disabled by passing
DONT_ACCEPT_TRUE_FOR_1 to the new
optionflags argument.
>>> 4 == 4
1
>>> 4 == 4
True
>>> 4 > 4
0
>>> 4 > 4
False
""",
"blank lines": r"""
Blank lines can be marked with <BLANKLINE>:
>>> print 'foo\n\nbar\n'
foo
<BLANKLINE>
bar
<BLANKLINE>
""",
"ellipsis": r"""
If the ellipsis flag is used, then '...' can be used to
elide substrings in the desired output:
>>> print range(1000) #doctest: +ELLIPSIS
[0, 1, 2, ..., 999]
""",
"whitespace normalization": r"""
If the whitespace normalization flag is used, then
differences in whitespace are ignored.
>>> print range(30) #doctest: +NORMALIZE_WHITESPACE
[0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14,
15, 16, 17, 18, 19, 20, 21, 22, 23, 24, 25, 26,
27, 28, 29]
""",
}
def _test():
r = unittest.TextTestRunner()
r.run(DocTestSuite())
if __name__ == "__main__":
_test()
| Python |
"""
Django Unit Test and Doctest framework.
"""
from django.test.client import Client
from django.test.testcases import TestCase
| Python |
import sys
from cStringIO import StringIO
from urlparse import urlparse
from django.conf import settings
from django.core.handlers.base import BaseHandler
from django.core.handlers.wsgi import WSGIRequest
from django.core.signals import got_request_exception
from django.dispatch import dispatcher
from django.http import urlencode, SimpleCookie
from django.test import signals
from django.utils.functional import curry
BOUNDARY = 'BoUnDaRyStRiNg'
MULTIPART_CONTENT = 'multipart/form-data; boundary=%s' % BOUNDARY
class ClientHandler(BaseHandler):
"""
A HTTP Handler that can be used for testing purposes.
Uses the WSGI interface to compose requests, but returns
the raw HttpResponse object
"""
def __call__(self, environ):
from django.conf import settings
from django.core import signals
# Set up middleware if needed. We couldn't do this earlier, because
# settings weren't available.
if self._request_middleware is None:
self.load_middleware()
dispatcher.send(signal=signals.request_started)
try:
request = WSGIRequest(environ)
response = self.get_response(request)
# Apply response middleware
for middleware_method in self._response_middleware:
response = middleware_method(request, response)
finally:
dispatcher.send(signal=signals.request_finished)
return response
def store_rendered_templates(store, signal, sender, template, context):
"A utility function for storing templates and contexts that are rendered"
store.setdefault('template',[]).append(template)
store.setdefault('context',[]).append(context)
def encode_multipart(boundary, data):
"""
A simple method for encoding multipart POST data from a dictionary of
form values.
The key will be used as the form data name; the value will be transmitted
as content. If the value is a file, the contents of the file will be sent
as an application/octet-stream; otherwise, str(value) will be sent.
"""
lines = []
for (key, value) in data.items():
if isinstance(value, file):
lines.extend([
'--' + boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
'--' + boundary,
'Content-Disposition: form-data; name="%s_file"; filename="%s"' % (key, value.name),
'Content-Type: application/octet-stream',
'',
value.read()
])
elif hasattr(value, '__iter__'):
for item in value:
lines.extend([
'--' + boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
str(item)
])
else:
lines.extend([
'--' + boundary,
'Content-Disposition: form-data; name="%s"' % key,
'',
str(value)
])
lines.extend([
'--' + boundary + '--',
'',
])
return '\r\n'.join(lines)
class Client:
"""
A class that can act as a client for testing purposes.
It allows the user to compose GET and POST requests, and
obtain the response that the server gave to those requests.
The server Response objects are annotated with the details
of the contexts and templates that were rendered during the
process of serving the request.
Client objects are stateful - they will retain cookie (and
thus session) details for the lifetime of the Client instance.
This is not intended as a replacement for Twill/Selenium or
the like - it is here to allow testing against the
contexts and templates produced by a view, rather than the
HTML rendered to the end-user.
"""
def __init__(self, **defaults):
self.handler = ClientHandler()
self.defaults = defaults
self.cookies = SimpleCookie()
self.session = {}
self.exc_info = None
def store_exc_info(self, *args, **kwargs):
"""
Utility method that can be used to store exceptions when they are
generated by a view.
"""
self.exc_info = sys.exc_info()
def request(self, **request):
"""
The master request method. Composes the environment dictionary
and passes to the handler, returning the result of the handler.
Assumes defaults for the query environment, which can be overridden
using the arguments to the request.
"""
environ = {
'HTTP_COOKIE': self.cookies,
'PATH_INFO': '/',
'QUERY_STRING': '',
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': None,
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
'SERVER_PROTOCOL': 'HTTP/1.1',
}
environ.update(self.defaults)
environ.update(request)
# Curry a data dictionary into an instance of
# the template renderer callback function
data = {}
on_template_render = curry(store_rendered_templates, data)
dispatcher.connect(on_template_render, signal=signals.template_rendered)
# Capture exceptions created by the handler
dispatcher.connect(self.store_exc_info, signal=got_request_exception)
response = self.handler(environ)
# Add any rendered template detail to the response
# If there was only one template rendered (the most likely case),
# flatten the list to a single element
for detail in ('template', 'context'):
if data.get(detail):
if len(data[detail]) == 1:
setattr(response, detail, data[detail][0]);
else:
setattr(response, detail, data[detail])
else:
setattr(response, detail, None)
# Look for a signalled exception and reraise it
if self.exc_info:
raise self.exc_info[1], None, self.exc_info[2]
# Update persistent cookie and session data
if response.cookies:
self.cookies.update(response.cookies)
if 'django.contrib.sessions' in settings.INSTALLED_APPS:
from django.contrib.sessions.middleware import SessionWrapper
cookie = self.cookies.get(settings.SESSION_COOKIE_NAME, None)
if cookie:
self.session = SessionWrapper(cookie.value)
return response
def get(self, path, data={}, **extra):
"Request a response from the server using GET."
r = {
'CONTENT_LENGTH': None,
'CONTENT_TYPE': 'text/html; charset=utf-8',
'PATH_INFO': path,
'QUERY_STRING': urlencode(data),
'REQUEST_METHOD': 'GET',
}
r.update(extra)
return self.request(**r)
def post(self, path, data={}, content_type=MULTIPART_CONTENT, **extra):
"Request a response from the server using POST."
if content_type is MULTIPART_CONTENT:
post_data = encode_multipart(BOUNDARY, data)
else:
post_data = data
r = {
'CONTENT_LENGTH': len(post_data),
'CONTENT_TYPE': content_type,
'PATH_INFO': path,
'REQUEST_METHOD': 'POST',
'wsgi.input': StringIO(post_data),
}
r.update(extra)
return self.request(**r)
def login(self, path, username, password, **extra):
"""
A specialized sequence of GET and POST to log into a view that
is protected by a @login_required access decorator.
path should be the URL of the page that is login protected.
Returns the response from GETting the requested URL after
login is complete. Returns False if login process failed.
"""
# First, GET the page that is login protected.
# This page will redirect to the login page.
response = self.get(path)
if response.status_code != 302:
return False
_, _, login_path, _, data, _= urlparse(response['Location'])
next = data.split('=')[1]
# Second, GET the login page; required to set up cookies
response = self.get(login_path, **extra)
if response.status_code != 200:
return False
# Last, POST the login data.
form_data = {
'username': username,
'password': password,
'next' : next,
}
response = self.post(login_path, data=form_data, **extra)
# Login page should 302 redirect to the originally requested page
if (response.status_code != 302 or
urlparse(response['Location'])[2] != path):
return False
# Since we are logged in, request the actual page again
return self.get(path)
| Python |
"""
A library of validators that return None and raise ValidationError when the
provided data isn't valid.
Validators may be callable classes, and they may have an 'always_test'
attribute. If an 'always_test' attribute exists (regardless of value), the
validator will *always* be run, regardless of whether its associated
form field is required.
"""
import urllib2
from django.conf import settings
from django.utils.translation import gettext, gettext_lazy, ngettext
from django.utils.functional import Promise, lazy
import re
_datere = r'\d{4}-\d{1,2}-\d{1,2}'
_timere = r'(?:[01]?[0-9]|2[0-3]):[0-5][0-9](?::[0-5][0-9])?'
alnum_re = re.compile(r'^\w+$')
alnumurl_re = re.compile(r'^[-\w/]+$')
ansi_date_re = re.compile('^%s$' % _datere)
ansi_time_re = re.compile('^%s$' % _timere)
ansi_datetime_re = re.compile('^%s %s$' % (_datere, _timere))
email_re = re.compile(
r"(^[-!#$%&'*+/=?^_`{}|~0-9A-Z]+(\.[-!#$%&'*+/=?^_`{}|~0-9A-Z]+)*" # dot-atom
r'|^"([\001-\010\013\014\016-\037!#-\[\]-\177]|\\[\001-011\013\014\016-\177])*"' # quoted-string
r')@(?:[A-Z0-9-]+\.)+[A-Z]{2,6}$', re.IGNORECASE) # domain
integer_re = re.compile(r'^-?\d+$')
ip4_re = re.compile(r'^(25[0-5]|2[0-4]\d|[0-1]?\d?\d)(\.(25[0-5]|2[0-4]\d|[0-1]?\d?\d)){3}$')
phone_re = re.compile(r'^[A-PR-Y0-9]{3}-[A-PR-Y0-9]{3}-[A-PR-Y0-9]{4}$', re.IGNORECASE)
slug_re = re.compile(r'^[-\w]+$')
url_re = re.compile(r'^https?://\S+$')
lazy_inter = lazy(lambda a,b: str(a) % b, str)
class ValidationError(Exception):
def __init__(self, message):
"ValidationError can be passed a string or a list."
if isinstance(message, list):
self.messages = message
else:
assert isinstance(message, (basestring, Promise)), ("%s should be a string" % repr(message))
self.messages = [message]
def __str__(self):
# This is needed because, without a __str__(), printing an exception
# instance would result in this:
# AttributeError: ValidationError instance has no attribute 'args'
# See http://www.python.org/doc/current/tut/node10.html#handling
return str(self.messages)
class CriticalValidationError(Exception):
def __init__(self, message):
"ValidationError can be passed a string or a list."
if isinstance(message, list):
self.messages = message
else:
assert isinstance(message, (basestring, Promise)), ("'%s' should be a string" % message)
self.messages = [message]
def __str__(self):
return str(self.messages)
def isAlphaNumeric(field_data, all_data):
if not alnum_re.search(field_data):
raise ValidationError, gettext("This value must contain only letters, numbers and underscores.")
def isAlphaNumericURL(field_data, all_data):
if not alnumurl_re.search(field_data):
raise ValidationError, gettext("This value must contain only letters, numbers, underscores, dashes or slashes.")
def isSlug(field_data, all_data):
if not slug_re.search(field_data):
raise ValidationError, gettext("This value must contain only letters, numbers, underscores or hyphens.")
def isLowerCase(field_data, all_data):
if field_data.lower() != field_data:
raise ValidationError, gettext("Uppercase letters are not allowed here.")
def isUpperCase(field_data, all_data):
if field_data.upper() != field_data:
raise ValidationError, gettext("Lowercase letters are not allowed here.")
def isCommaSeparatedIntegerList(field_data, all_data):
for supposed_int in field_data.split(','):
try:
int(supposed_int)
except ValueError:
raise ValidationError, gettext("Enter only digits separated by commas.")
def isCommaSeparatedEmailList(field_data, all_data):
"""
Checks that field_data is a string of e-mail addresses separated by commas.
Blank field_data values will not throw a validation error, and whitespace
is allowed around the commas.
"""
for supposed_email in field_data.split(','):
try:
isValidEmail(supposed_email.strip(), '')
except ValidationError:
raise ValidationError, gettext("Enter valid e-mail addresses separated by commas.")
def isValidIPAddress4(field_data, all_data):
if not ip4_re.search(field_data):
raise ValidationError, gettext("Please enter a valid IP address.")
def isNotEmpty(field_data, all_data):
if field_data.strip() == '':
raise ValidationError, gettext("Empty values are not allowed here.")
def isOnlyDigits(field_data, all_data):
if not field_data.isdigit():
raise ValidationError, gettext("Non-numeric characters aren't allowed here.")
def isNotOnlyDigits(field_data, all_data):
if field_data.isdigit():
raise ValidationError, gettext("This value can't be comprised solely of digits.")
def isInteger(field_data, all_data):
# This differs from isOnlyDigits because this accepts the negative sign
if not integer_re.search(field_data):
raise ValidationError, gettext("Enter a whole number.")
def isOnlyLetters(field_data, all_data):
if not field_data.isalpha():
raise ValidationError, gettext("Only alphabetical characters are allowed here.")
def _isValidDate(date_string):
"""
A helper function used by isValidANSIDate and isValidANSIDatetime to
check if the date is valid. The date string is assumed to already be in
YYYY-MM-DD format.
"""
from datetime import date
# Could use time.strptime here and catch errors, but datetime.date below
# produces much friendlier error messages.
year, month, day = map(int, date_string.split('-'))
# This check is needed because strftime is used when saving the date
# value to the database, and strftime requires that the year be >=1900.
if year < 1900:
raise ValidationError, gettext('Year must be 1900 or later.')
try:
date(year, month, day)
except ValueError, e:
msg = gettext('Invalid date: %s') % gettext(str(e))
raise ValidationError, msg
def isValidANSIDate(field_data, all_data):
if not ansi_date_re.search(field_data):
raise ValidationError, gettext('Enter a valid date in YYYY-MM-DD format.')
_isValidDate(field_data)
def isValidANSITime(field_data, all_data):
if not ansi_time_re.search(field_data):
raise ValidationError, gettext('Enter a valid time in HH:MM format.')
def isValidANSIDatetime(field_data, all_data):
if not ansi_datetime_re.search(field_data):
raise ValidationError, gettext('Enter a valid date/time in YYYY-MM-DD HH:MM format.')
_isValidDate(field_data.split()[0])
def isValidEmail(field_data, all_data):
if not email_re.search(field_data):
raise ValidationError, gettext('Enter a valid e-mail address.')
def isValidImage(field_data, all_data):
"""
Checks that the file-upload field data contains a valid image (GIF, JPG,
PNG, possibly others -- whatever the Python Imaging Library supports).
"""
from PIL import Image
from cStringIO import StringIO
try:
content = field_data['content']
except TypeError:
raise ValidationError, gettext("No file was submitted. Check the encoding type on the form.")
try:
Image.open(StringIO(content))
except IOError: # Python Imaging Library doesn't recognize it as an image
raise ValidationError, gettext("Upload a valid image. The file you uploaded was either not an image or a corrupted image.")
def isValidImageURL(field_data, all_data):
uc = URLMimeTypeCheck(('image/jpeg', 'image/gif', 'image/png'))
try:
uc(field_data, all_data)
except URLMimeTypeCheck.InvalidContentType:
raise ValidationError, gettext("The URL %s does not point to a valid image.") % field_data
def isValidPhone(field_data, all_data):
if not phone_re.search(field_data):
raise ValidationError, gettext('Phone numbers must be in XXX-XXX-XXXX format. "%s" is invalid.') % field_data
def isValidQuicktimeVideoURL(field_data, all_data):
"Checks that the given URL is a video that can be played by QuickTime (qt, mpeg)"
uc = URLMimeTypeCheck(('video/quicktime', 'video/mpeg',))
try:
uc(field_data, all_data)
except URLMimeTypeCheck.InvalidContentType:
raise ValidationError, gettext("The URL %s does not point to a valid QuickTime video.") % field_data
def isValidURL(field_data, all_data):
if not url_re.search(field_data):
raise ValidationError, gettext("A valid URL is required.")
def isValidHTML(field_data, all_data):
import urllib, urllib2
try:
u = urllib2.urlopen('http://validator.w3.org/check', urllib.urlencode({'fragment': field_data, 'output': 'xml'}))
except:
# Validator or Internet connection is unavailable. Fail silently.
return
html_is_valid = (u.headers.get('x-w3c-validator-status', 'Invalid') == 'Valid')
if html_is_valid:
return
from xml.dom.minidom import parseString
error_messages = [e.firstChild.wholeText for e in parseString(u.read()).getElementsByTagName('messages')[0].getElementsByTagName('msg')]
raise ValidationError, gettext("Valid HTML is required. Specific errors are:\n%s") % "\n".join(error_messages)
def isWellFormedXml(field_data, all_data):
from xml.dom.minidom import parseString
try:
parseString(field_data)
except Exception, e: # Naked except because we're not sure what will be thrown
raise ValidationError, gettext("Badly formed XML: %s") % str(e)
def isWellFormedXmlFragment(field_data, all_data):
isWellFormedXml('<root>%s</root>' % field_data, all_data)
def isExistingURL(field_data, all_data):
try:
headers = {
"Accept" : "text/xml,application/xml,application/xhtml+xml,text/html;q=0.9,text/plain;q=0.8,image/png,*/*;q=0.5",
"Accept-Language" : "en-us,en;q=0.5",
"Accept-Charset": "ISO-8859-1,utf-8;q=0.7,*;q=0.7",
"Connection" : "close",
"User-Agent": settings.URL_VALIDATOR_USER_AGENT
}
req = urllib2.Request(field_data,None, headers)
u = urllib2.urlopen(req)
except ValueError:
raise ValidationError, _("Invalid URL: %s") % field_data
except urllib2.HTTPError, e:
# 401s are valid; they just mean authorization is required.
# 301 and 302 are redirects; they just mean look somewhere else.
if str(e.code) not in ('401','301','302'):
raise ValidationError, _("The URL %s is a broken link.") % field_data
except: # urllib2.URLError, httplib.InvalidURL, etc.
raise ValidationError, _("The URL %s is a broken link.") % field_data
def isValidUSState(field_data, all_data):
"Checks that the given string is a valid two-letter U.S. state abbreviation"
states = ['AA', 'AE', 'AK', 'AL', 'AP', 'AR', 'AS', 'AZ', 'CA', 'CO', 'CT', 'DC', 'DE', 'FL', 'FM', 'GA', 'GU', 'HI', 'IA', 'ID', 'IL', 'IN', 'KS', 'KY', 'LA', 'MA', 'MD', 'ME', 'MH', 'MI', 'MN', 'MO', 'MP', 'MS', 'MT', 'NC', 'ND', 'NE', 'NH', 'NJ', 'NM', 'NV', 'NY', 'OH', 'OK', 'OR', 'PA', 'PR', 'PW', 'RI', 'SC', 'SD', 'TN', 'TX', 'UT', 'VA', 'VI', 'VT', 'WA', 'WI', 'WV', 'WY']
if field_data.upper() not in states:
raise ValidationError, gettext("Enter a valid U.S. state abbreviation.")
def hasNoProfanities(field_data, all_data):
"""
Checks that the given string has no profanities in it. This does a simple
check for whether each profanity exists within the string, so 'fuck' will
catch 'motherfucker' as well. Raises a ValidationError such as:
Watch your mouth! The words "f--k" and "s--t" are not allowed here.
"""
field_data = field_data.lower() # normalize
words_seen = [w for w in settings.PROFANITIES_LIST if w in field_data]
if words_seen:
from django.utils.text import get_text_list
plural = len(words_seen) > 1
raise ValidationError, ngettext("Watch your mouth! The word %s is not allowed here.",
"Watch your mouth! The words %s are not allowed here.", plural) % \
get_text_list(['"%s%s%s"' % (i[0], '-'*(len(i)-2), i[-1]) for i in words_seen], 'and')
class AlwaysMatchesOtherField(object):
def __init__(self, other_field_name, error_message=None):
self.other = other_field_name
self.error_message = error_message or lazy_inter(gettext_lazy("This field must match the '%s' field."), self.other)
self.always_test = True
def __call__(self, field_data, all_data):
if field_data != all_data[self.other]:
raise ValidationError, self.error_message
class ValidateIfOtherFieldEquals(object):
def __init__(self, other_field, other_value, validator_list):
self.other_field, self.other_value = other_field, other_value
self.validator_list = validator_list
self.always_test = True
def __call__(self, field_data, all_data):
if all_data.has_key(self.other_field) and all_data[self.other_field] == self.other_value:
for v in self.validator_list:
v(field_data, all_data)
class RequiredIfOtherFieldNotGiven(object):
def __init__(self, other_field_name, error_message=gettext_lazy("Please enter something for at least one field.")):
self.other, self.error_message = other_field_name, error_message
self.always_test = True
def __call__(self, field_data, all_data):
if not all_data.get(self.other, False) and not field_data:
raise ValidationError, self.error_message
class RequiredIfOtherFieldsGiven(object):
def __init__(self, other_field_names, error_message=gettext_lazy("Please enter both fields or leave them both empty.")):
self.other, self.error_message = other_field_names, error_message
self.always_test = True
def __call__(self, field_data, all_data):
for field in self.other:
if all_data.get(field, False) and not field_data:
raise ValidationError, self.error_message
class RequiredIfOtherFieldGiven(RequiredIfOtherFieldsGiven):
"Like RequiredIfOtherFieldsGiven, but takes a single field name instead of a list."
def __init__(self, other_field_name, error_message=gettext_lazy("Please enter both fields or leave them both empty.")):
RequiredIfOtherFieldsGiven.__init__(self, [other_field_name], error_message)
class RequiredIfOtherFieldEquals(object):
def __init__(self, other_field, other_value, error_message=None, other_label=None):
self.other_field = other_field
self.other_value = other_value
other_label = other_label or other_value
self.error_message = error_message or lazy_inter(gettext_lazy("This field must be given if %(field)s is %(value)s"), {
'field': other_field, 'value': other_label})
self.always_test = True
def __call__(self, field_data, all_data):
if all_data.has_key(self.other_field) and all_data[self.other_field] == self.other_value and not field_data:
raise ValidationError(self.error_message)
class RequiredIfOtherFieldDoesNotEqual(object):
def __init__(self, other_field, other_value, other_label=None, error_message=None):
self.other_field = other_field
self.other_value = other_value
other_label = other_label or other_value
self.error_message = error_message or lazy_inter(gettext_lazy("This field must be given if %(field)s is not %(value)s"), {
'field': other_field, 'value': other_label})
self.always_test = True
def __call__(self, field_data, all_data):
if all_data.has_key(self.other_field) and all_data[self.other_field] != self.other_value and not field_data:
raise ValidationError(self.error_message)
class IsLessThanOtherField(object):
def __init__(self, other_field_name, error_message):
self.other, self.error_message = other_field_name, error_message
def __call__(self, field_data, all_data):
if field_data > all_data[self.other]:
raise ValidationError, self.error_message
class UniqueAmongstFieldsWithPrefix(object):
def __init__(self, field_name, prefix, error_message):
self.field_name, self.prefix = field_name, prefix
self.error_message = error_message or gettext_lazy("Duplicate values are not allowed.")
def __call__(self, field_data, all_data):
for field_name, value in all_data.items():
if field_name != self.field_name and value == field_data:
raise ValidationError, self.error_message
class NumberIsInRange(object):
"""
Validator that tests if a value is in a range (inclusive).
"""
def __init__(self, lower=None, upper=None, error_message=''):
self.lower, self.upper = lower, upper
if not error_message:
if lower and upper:
self.error_message = gettext("This value must be between %(lower)s and %(upper)s.") % {'lower': lower, 'upper': upper}
elif lower:
self.error_message = gettext("This value must be at least %s.") % lower
elif upper:
self.error_message = gettext("This value must be no more than %s.") % upper
else:
self.error_message = error_message
def __call__(self, field_data, all_data):
# Try to make the value numeric. If this fails, we assume another
# validator will catch the problem.
try:
val = float(field_data)
except ValueError:
return
# Now validate
if self.lower and self.upper and (val < self.lower or val > self.upper):
raise ValidationError(self.error_message)
elif self.lower and val < self.lower:
raise ValidationError(self.error_message)
elif self.upper and val > self.upper:
raise ValidationError(self.error_message)
class IsAPowerOf(object):
"""
>>> v = IsAPowerOf(2)
>>> v(4, None)
>>> v(8, None)
>>> v(16, None)
>>> v(17, None)
django.core.validators.ValidationError: ['This value must be a power of 2.']
"""
def __init__(self, power_of):
self.power_of = power_of
def __call__(self, field_data, all_data):
from math import log
val = log(int(field_data)) / log(self.power_of)
if val != int(val):
raise ValidationError, gettext("This value must be a power of %s.") % self.power_of
class IsValidFloat(object):
def __init__(self, max_digits, decimal_places):
self.max_digits, self.decimal_places = max_digits, decimal_places
def __call__(self, field_data, all_data):
data = str(field_data)
try:
float(data)
except ValueError:
raise ValidationError, gettext("Please enter a valid decimal number.")
# Negative floats require more space to input.
max_allowed_length = data.startswith('-') and (self.max_digits + 2) or (self.max_digits + 1)
if len(data) > max_allowed_length:
raise ValidationError, ngettext("Please enter a valid decimal number with at most %s total digit.",
"Please enter a valid decimal number with at most %s total digits.", self.max_digits) % self.max_digits
if (not '.' in data and len(data) > (max_allowed_length - self.decimal_places - 1)) or ('.' in data and len(data) > (max_allowed_length - (self.decimal_places - len(data.split('.')[1])))):
raise ValidationError, ngettext( "Please enter a valid decimal number with a whole part of at most %s digit.",
"Please enter a valid decimal number with a whole part of at most %s digits.", str(self.max_digits-self.decimal_places)) % str(self.max_digits-self.decimal_places)
if '.' in data and len(data.split('.')[1]) > self.decimal_places:
raise ValidationError, ngettext("Please enter a valid decimal number with at most %s decimal place.",
"Please enter a valid decimal number with at most %s decimal places.", self.decimal_places) % self.decimal_places
class HasAllowableSize(object):
"""
Checks that the file-upload field data is a certain size. min_size and
max_size are measurements in bytes.
"""
def __init__(self, min_size=None, max_size=None, min_error_message=None, max_error_message=None):
self.min_size, self.max_size = min_size, max_size
self.min_error_message = min_error_message or lazy_inter(gettext_lazy("Make sure your uploaded file is at least %s bytes big."), min_size)
self.max_error_message = max_error_message or lazy_inter(gettext_lazy("Make sure your uploaded file is at most %s bytes big."), max_size)
def __call__(self, field_data, all_data):
try:
content = field_data['content']
except TypeError:
raise ValidationError, gettext_lazy("No file was submitted. Check the encoding type on the form.")
if self.min_size is not None and len(content) < self.min_size:
raise ValidationError, self.min_error_message
if self.max_size is not None and len(content) > self.max_size:
raise ValidationError, self.max_error_message
class MatchesRegularExpression(object):
"""
Checks that the field matches the given regular-expression. The regex
should be in string format, not already compiled.
"""
def __init__(self, regexp, error_message=gettext_lazy("The format for this field is wrong.")):
self.regexp = re.compile(regexp)
self.error_message = error_message
def __call__(self, field_data, all_data):
if not self.regexp.search(field_data):
raise ValidationError(self.error_message)
class AnyValidator(object):
"""
This validator tries all given validators. If any one of them succeeds,
validation passes. If none of them succeeds, the given message is thrown
as a validation error. The message is rather unspecific, so it's best to
specify one on instantiation.
"""
def __init__(self, validator_list=None, error_message=gettext_lazy("This field is invalid.")):
if validator_list is None: validator_list = []
self.validator_list = validator_list
self.error_message = error_message
for v in validator_list:
if hasattr(v, 'always_test'):
self.always_test = True
def __call__(self, field_data, all_data):
for v in self.validator_list:
try:
v(field_data, all_data)
return
except ValidationError, e:
pass
raise ValidationError(self.error_message)
class URLMimeTypeCheck(object):
"Checks that the provided URL points to a document with a listed mime type"
class CouldNotRetrieve(ValidationError):
pass
class InvalidContentType(ValidationError):
pass
def __init__(self, mime_type_list):
self.mime_type_list = mime_type_list
def __call__(self, field_data, all_data):
import urllib2
try:
isValidURL(field_data, all_data)
except ValidationError:
raise
try:
info = urllib2.urlopen(field_data).info()
except (urllib2.HTTPError, urllib2.URLError):
raise URLMimeTypeCheck.CouldNotRetrieve, gettext("Could not retrieve anything from %s.") % field_data
content_type = info['content-type']
if content_type not in self.mime_type_list:
raise URLMimeTypeCheck.InvalidContentType, gettext("The URL %(url)s returned the invalid Content-Type header '%(contenttype)s'.") % {
'url': field_data, 'contenttype': content_type}
class RelaxNGCompact(object):
"Validate against a Relax NG compact schema"
def __init__(self, schema_path, additional_root_element=None):
self.schema_path = schema_path
self.additional_root_element = additional_root_element
def __call__(self, field_data, all_data):
import os, tempfile
if self.additional_root_element:
field_data = '<%(are)s>%(data)s\n</%(are)s>' % {
'are': self.additional_root_element,
'data': field_data
}
filename = tempfile.mktemp() # Insecure, but nothing else worked
fp = open(filename, 'w')
fp.write(field_data)
fp.close()
if not os.path.exists(settings.JING_PATH):
raise Exception, "%s not found!" % settings.JING_PATH
p = os.popen('%s -c %s %s' % (settings.JING_PATH, self.schema_path, filename))
errors = [line.strip() for line in p.readlines()]
p.close()
os.unlink(filename)
display_errors = []
lines = field_data.split('\n')
for error in errors:
ignored, line, level, message = error.split(':', 3)
# Scrape the Jing error messages to reword them more nicely.
m = re.search(r'Expected "(.*?)" to terminate element starting on line (\d+)', message)
if m:
display_errors.append(_('Please close the unclosed %(tag)s tag from line %(line)s. (Line starts with "%(start)s".)') % \
{'tag':m.group(1).replace('/', ''), 'line':m.group(2), 'start':lines[int(m.group(2)) - 1][:30]})
continue
if message.strip() == 'text not allowed here':
display_errors.append(_('Some text starting on line %(line)s is not allowed in that context. (Line starts with "%(start)s".)') % \
{'line':line, 'start':lines[int(line) - 1][:30]})
continue
m = re.search(r'\s*attribute "(.*?)" not allowed at this point; ignored', message)
if m:
display_errors.append(_('"%(attr)s" on line %(line)s is an invalid attribute. (Line starts with "%(start)s".)') % \
{'attr':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
continue
m = re.search(r'\s*unknown element "(.*?)"', message)
if m:
display_errors.append(_('"<%(tag)s>" on line %(line)s is an invalid tag. (Line starts with "%(start)s".)') % \
{'tag':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
continue
if message.strip() == 'required attributes missing':
display_errors.append(_('A tag on line %(line)s is missing one or more required attributes. (Line starts with "%(start)s".)') % \
{'line':line, 'start':lines[int(line) - 1][:30]})
continue
m = re.search(r'\s*bad value for attribute "(.*?)"', message)
if m:
display_errors.append(_('The "%(attr)s" attribute on line %(line)s has an invalid value. (Line starts with "%(start)s".)') % \
{'attr':m.group(1), 'line':line, 'start':lines[int(line) - 1][:30]})
continue
# Failing all those checks, use the default error message.
display_error = 'Line %s: %s [%s]' % (line, message, level.strip())
display_errors.append(display_error)
if len(display_errors) > 0:
raise ValidationError, display_errors
| Python |
# Use this module for e-mailing.
from django.conf import settings
from email.MIMEText import MIMEText
from email.Header import Header
from email.Utils import formatdate
import smtplib
import socket
import time
import random
# Cache the hostname, but do it lazily: socket.getfqdn() can take a couple of
# seconds, which slows down the restart of the server.
class CachedDnsName(object):
def __str__(self):
return self.get_fqdn()
def get_fqdn(self):
if not hasattr(self, '_fqdn'):
self._fqdn = socket.getfqdn()
return self._fqdn
DNS_NAME = CachedDnsName()
class BadHeaderError(ValueError):
pass
class SafeMIMEText(MIMEText):
def __setitem__(self, name, val):
"Forbids multi-line headers, to prevent header injection."
if '\n' in val or '\r' in val:
raise BadHeaderError, "Header values can't contain newlines (got %r for header %r)" % (val, name)
if name == "Subject":
val = Header(val, settings.DEFAULT_CHARSET)
MIMEText.__setitem__(self, name, val)
def send_mail(subject, message, from_email, recipient_list, fail_silently=False, auth_user=None, auth_password=None):
"""
Easy wrapper for sending a single message to a recipient list. All members
of the recipient list will see the other recipients in the 'To' field.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
"""
if auth_user is None:
auth_user = settings.EMAIL_HOST_USER
if auth_password is None:
auth_password = settings.EMAIL_HOST_PASSWORD
return send_mass_mail([[subject, message, from_email, recipient_list]], fail_silently, auth_user, auth_password)
def send_mass_mail(datatuple, fail_silently=False, auth_user=None, auth_password=None):
"""
Given a datatuple of (subject, message, from_email, recipient_list), sends
each message to each recipient list. Returns the number of e-mails sent.
If from_email is None, the DEFAULT_FROM_EMAIL setting is used.
If auth_user and auth_password are set, they're used to log in.
If auth_user is None, the EMAIL_HOST_USER setting is used.
If auth_password is None, the EMAIL_HOST_PASSWORD setting is used.
"""
if auth_user is None:
auth_user = settings.EMAIL_HOST_USER
if auth_password is None:
auth_password = settings.EMAIL_HOST_PASSWORD
try:
server = smtplib.SMTP(settings.EMAIL_HOST, settings.EMAIL_PORT)
if auth_user and auth_password:
server.login(auth_user, auth_password)
except:
if fail_silently:
return
raise
num_sent = 0
for subject, message, from_email, recipient_list in datatuple:
if not recipient_list:
continue
from_email = from_email or settings.DEFAULT_FROM_EMAIL
msg = SafeMIMEText(message, 'plain', settings.DEFAULT_CHARSET)
msg['Subject'] = subject
msg['From'] = from_email
msg['To'] = ', '.join(recipient_list)
msg['Date'] = formatdate()
try:
random_bits = str(random.getrandbits(64))
except AttributeError: # Python 2.3 doesn't have random.getrandbits().
random_bits = ''.join([random.choice('1234567890') for i in range(19)])
msg['Message-ID'] = "<%d.%s@%s>" % (time.time(), random_bits, DNS_NAME)
try:
server.sendmail(from_email, recipient_list, msg.as_string())
num_sent += 1
except:
if not fail_silently:
raise
try:
server.quit()
except:
if fail_silently:
return
raise
return num_sent
def mail_admins(subject, message, fail_silently=False):
"Sends a message to the admins, as defined by the ADMINS setting."
send_mail(settings.EMAIL_SUBJECT_PREFIX + subject, message, settings.SERVER_EMAIL, [a[1] for a in settings.ADMINS], fail_silently)
def mail_managers(subject, message, fail_silently=False):
"Sends a message to the managers, as defined by the MANAGERS setting."
send_mail(settings.EMAIL_SUBJECT_PREFIX + subject, message, settings.SERVER_EMAIL, [a[1] for a in settings.MANAGERS], fail_silently)
| Python |
"""
BaseHTTPServer that implements the Python WSGI protocol (PEP 333, rev 1.21).
Adapted from wsgiref.simple_server: http://svn.eby-sarna.com/wsgiref/
This is a simple server for use in testing or debugging Django apps. It hasn't
been reviewed for security issues. Don't use it for production use.
"""
from BaseHTTPServer import BaseHTTPRequestHandler, HTTPServer
from types import ListType, StringType
import os, re, sys, time, urllib
__version__ = "0.1"
__all__ = ['WSGIServer','WSGIRequestHandler','demo_app']
server_version = "WSGIServer/" + __version__
sys_version = "Python/" + sys.version.split()[0]
software_version = server_version + ' ' + sys_version
class WSGIServerException(Exception):
pass
class FileWrapper(object):
"""Wrapper to convert file-like objects to iterables"""
def __init__(self, filelike, blksize=8192):
self.filelike = filelike
self.blksize = blksize
if hasattr(filelike,'close'):
self.close = filelike.close
def __getitem__(self,key):
data = self.filelike.read(self.blksize)
if data:
return data
raise IndexError
def __iter__(self):
return self
def next(self):
data = self.filelike.read(self.blksize)
if data:
return data
raise StopIteration
# Regular expression that matches `special' characters in parameters, the
# existence of which force quoting of the parameter value.
tspecials = re.compile(r'[ \(\)<>@,;:\\"/\[\]\?=]')
def _formatparam(param, value=None, quote=1):
"""Convenience function to format and return a key=value pair.
This will quote the value if needed or if quote is true.
"""
if value is not None and len(value) > 0:
if quote or tspecials.search(value):
value = value.replace('\\', '\\\\').replace('"', r'\"')
return '%s="%s"' % (param, value)
else:
return '%s=%s' % (param, value)
else:
return param
class Headers(object):
"""Manage a collection of HTTP response headers"""
def __init__(self,headers):
if type(headers) is not ListType:
raise TypeError("Headers must be a list of name/value tuples")
self._headers = headers
def __len__(self):
"""Return the total number of headers, including duplicates."""
return len(self._headers)
def __setitem__(self, name, val):
"""Set the value of a header."""
del self[name]
self._headers.append((name, val))
def __delitem__(self,name):
"""Delete all occurrences of a header, if present.
Does *not* raise an exception if the header is missing.
"""
name = name.lower()
self._headers[:] = [kv for kv in self._headers if kv[0].lower()<>name]
def __getitem__(self,name):
"""Get the first header value for 'name'
Return None if the header is missing instead of raising an exception.
Note that if the header appeared multiple times, the first exactly which
occurrance gets returned is undefined. Use getall() to get all
the values matching a header field name.
"""
return self.get(name)
def has_key(self, name):
"""Return true if the message contains the header."""
return self.get(name) is not None
__contains__ = has_key
def get_all(self, name):
"""Return a list of all the values for the named field.
These will be sorted in the order they appeared in the original header
list or were added to this instance, and may contain duplicates. Any
fields deleted and re-inserted are always appended to the header list.
If no fields exist with the given name, returns an empty list.
"""
name = name.lower()
return [kv[1] for kv in self._headers if kv[0].lower()==name]
def get(self,name,default=None):
"""Get the first header value for 'name', or return 'default'"""
name = name.lower()
for k,v in self._headers:
if k.lower()==name:
return v
return default
def keys(self):
"""Return a list of all the header field names.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [k for k, v in self._headers]
def values(self):
"""Return a list of all header values.
These will be sorted in the order they appeared in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return [v for k, v in self._headers]
def items(self):
"""Get all the header fields and values.
These will be sorted in the order they were in the original header
list, or were added to this instance, and may contain duplicates.
Any fields deleted and re-inserted are always appended to the header
list.
"""
return self._headers[:]
def __repr__(self):
return "Headers(%s)" % `self._headers`
def __str__(self):
"""str() returns the formatted headers, complete with end line,
suitable for direct HTTP transmission."""
return '\r\n'.join(["%s: %s" % kv for kv in self._headers]+['',''])
def setdefault(self,name,value):
"""Return first matching header value for 'name', or 'value'
If there is no header named 'name', add a new header with name 'name'
and value 'value'."""
result = self.get(name)
if result is None:
self._headers.append((name,value))
return value
else:
return result
def add_header(self, _name, _value, **_params):
"""Extended header setting.
_name is the header field to add. keyword arguments can be used to set
additional parameters for the header field, with underscores converted
to dashes. Normally the parameter will be added as key="value" unless
value is None, in which case only the key will be added.
Example:
h.add_header('content-disposition', 'attachment', filename='bud.gif')
Note that unlike the corresponding 'email.Message' method, this does
*not* handle '(charset, language, value)' tuples: all values must be
strings or None.
"""
parts = []
if _value is not None:
parts.append(_value)
for k, v in _params.items():
if v is None:
parts.append(k.replace('_', '-'))
else:
parts.append(_formatparam(k.replace('_', '-'), v))
self._headers.append((_name, "; ".join(parts)))
def guess_scheme(environ):
"""Return a guess for whether 'wsgi.url_scheme' should be 'http' or 'https'
"""
if environ.get("HTTPS") in ('yes','on','1'):
return 'https'
else:
return 'http'
_hoppish = {
'connection':1, 'keep-alive':1, 'proxy-authenticate':1,
'proxy-authorization':1, 'te':1, 'trailers':1, 'transfer-encoding':1,
'upgrade':1
}.has_key
def is_hop_by_hop(header_name):
"""Return true if 'header_name' is an HTTP/1.1 "Hop-by-Hop" header"""
return _hoppish(header_name.lower())
class ServerHandler(object):
"""Manage the invocation of a WSGI application"""
# Configuration parameters; can override per-subclass or per-instance
wsgi_version = (1,0)
wsgi_multithread = True
wsgi_multiprocess = True
wsgi_run_once = False
origin_server = True # We are transmitting direct to client
http_version = "1.0" # Version that should be used for response
server_software = software_version
# os_environ is used to supply configuration from the OS environment:
# by default it's a copy of 'os.environ' as of import time, but you can
# override this in e.g. your __init__ method.
os_environ = dict(os.environ.items())
# Collaborator classes
wsgi_file_wrapper = FileWrapper # set to None to disable
headers_class = Headers # must be a Headers-like class
# Error handling (also per-subclass or per-instance)
traceback_limit = None # Print entire traceback to self.get_stderr()
error_status = "500 INTERNAL SERVER ERROR"
error_headers = [('Content-Type','text/plain')]
# State variables (don't mess with these)
status = result = None
headers_sent = False
headers = None
bytes_sent = 0
def __init__(self, stdin, stdout, stderr, environ, multithread=True,
multiprocess=False):
self.stdin = stdin
self.stdout = stdout
self.stderr = stderr
self.base_env = environ
self.wsgi_multithread = multithread
self.wsgi_multiprocess = multiprocess
def run(self, application):
"""Invoke the application"""
# Note to self: don't move the close()! Asynchronous servers shouldn't
# call close() from finish_response(), so if you close() anywhere but
# the double-error branch here, you'll break asynchronous servers by
# prematurely closing. Async servers must return from 'run()' without
# closing if there might still be output to iterate over.
try:
self.setup_environ()
self.result = application(self.environ, self.start_response)
self.finish_response()
except:
try:
self.handle_error()
except:
# If we get an error handling an error, just give up already!
self.close()
raise # ...and let the actual server figure it out.
def setup_environ(self):
"""Set up the environment for one request"""
env = self.environ = self.os_environ.copy()
self.add_cgi_vars()
env['wsgi.input'] = self.get_stdin()
env['wsgi.errors'] = self.get_stderr()
env['wsgi.version'] = self.wsgi_version
env['wsgi.run_once'] = self.wsgi_run_once
env['wsgi.url_scheme'] = self.get_scheme()
env['wsgi.multithread'] = self.wsgi_multithread
env['wsgi.multiprocess'] = self.wsgi_multiprocess
if self.wsgi_file_wrapper is not None:
env['wsgi.file_wrapper'] = self.wsgi_file_wrapper
if self.origin_server and self.server_software:
env.setdefault('SERVER_SOFTWARE',self.server_software)
def finish_response(self):
"""Send any iterable data, then close self and the iterable
Subclasses intended for use in asynchronous servers will
want to redefine this method, such that it sets up callbacks
in the event loop to iterate over the data, and to call
'self.close()' once the response is finished.
"""
if not self.result_is_file() and not self.sendfile():
for data in self.result:
self.write(data)
self.finish_content()
self.close()
def get_scheme(self):
"""Return the URL scheme being used"""
return guess_scheme(self.environ)
def set_content_length(self):
"""Compute Content-Length or switch to chunked encoding if possible"""
try:
blocks = len(self.result)
except (TypeError,AttributeError,NotImplementedError):
pass
else:
if blocks==1:
self.headers['Content-Length'] = str(self.bytes_sent)
return
# XXX Try for chunked encoding if origin server and client is 1.1
def cleanup_headers(self):
"""Make any necessary header changes or defaults
Subclasses can extend this to add other defaults.
"""
if not self.headers.has_key('Content-Length'):
self.set_content_length()
def start_response(self, status, headers,exc_info=None):
"""'start_response()' callable as specified by PEP 333"""
if exc_info:
try:
if self.headers_sent:
# Re-raise original exception if headers sent
raise exc_info[0], exc_info[1], exc_info[2]
finally:
exc_info = None # avoid dangling circular ref
elif self.headers is not None:
raise AssertionError("Headers already set!")
assert type(status) is StringType,"Status must be a string"
assert len(status)>=4,"Status must be at least 4 characters"
assert int(status[:3]),"Status message must begin w/3-digit code"
assert status[3]==" ", "Status message must have a space after code"
if __debug__:
for name,val in headers:
assert type(name) is StringType,"Header names must be strings"
assert type(val) is StringType,"Header values must be strings"
assert not is_hop_by_hop(name),"Hop-by-hop headers not allowed"
self.status = status
self.headers = self.headers_class(headers)
return self.write
def send_preamble(self):
"""Transmit version/status/date/server, via self._write()"""
if self.origin_server:
if self.client_is_modern():
self._write('HTTP/%s %s\r\n' % (self.http_version,self.status))
if not self.headers.has_key('Date'):
self._write(
'Date: %s\r\n' % time.asctime(time.gmtime(time.time()))
)
if self.server_software and not self.headers.has_key('Server'):
self._write('Server: %s\r\n' % self.server_software)
else:
self._write('Status: %s\r\n' % self.status)
def write(self, data):
"""'write()' callable as specified by PEP 333"""
assert type(data) is StringType,"write() argument must be string"
if not self.status:
raise AssertionError("write() before start_response()")
elif not self.headers_sent:
# Before the first output, send the stored headers
self.bytes_sent = len(data) # make sure we know content-length
self.send_headers()
else:
self.bytes_sent += len(data)
# XXX check Content-Length and truncate if too many bytes written?
self._write(data)
self._flush()
def sendfile(self):
"""Platform-specific file transmission
Override this method in subclasses to support platform-specific
file transmission. It is only called if the application's
return iterable ('self.result') is an instance of
'self.wsgi_file_wrapper'.
This method should return a true value if it was able to actually
transmit the wrapped file-like object using a platform-specific
approach. It should return a false value if normal iteration
should be used instead. An exception can be raised to indicate
that transmission was attempted, but failed.
NOTE: this method should call 'self.send_headers()' if
'self.headers_sent' is false and it is going to attempt direct
transmission of the file1.
"""
return False # No platform-specific transmission by default
def finish_content(self):
"""Ensure headers and content have both been sent"""
if not self.headers_sent:
self.headers['Content-Length'] = "0"
self.send_headers()
else:
pass # XXX check if content-length was too short?
def close(self):
try:
self.request_handler.log_request(self.status.split(' ',1)[0], self.bytes_sent)
finally:
try:
if hasattr(self.result,'close'):
self.result.close()
finally:
self.result = self.headers = self.status = self.environ = None
self.bytes_sent = 0; self.headers_sent = False
def send_headers(self):
"""Transmit headers to the client, via self._write()"""
self.cleanup_headers()
self.headers_sent = True
if not self.origin_server or self.client_is_modern():
self.send_preamble()
self._write(str(self.headers))
def result_is_file(self):
"""True if 'self.result' is an instance of 'self.wsgi_file_wrapper'"""
wrapper = self.wsgi_file_wrapper
return wrapper is not None and isinstance(self.result,wrapper)
def client_is_modern(self):
"""True if client can accept status and headers"""
return self.environ['SERVER_PROTOCOL'].upper() != 'HTTP/0.9'
def log_exception(self,exc_info):
"""Log the 'exc_info' tuple in the server log
Subclasses may override to retarget the output or change its format.
"""
try:
from traceback import print_exception
stderr = self.get_stderr()
print_exception(
exc_info[0], exc_info[1], exc_info[2],
self.traceback_limit, stderr
)
stderr.flush()
finally:
exc_info = None
def handle_error(self):
"""Log current error, and send error output to client if possible"""
self.log_exception(sys.exc_info())
if not self.headers_sent:
self.result = self.error_output(self.environ, self.start_response)
self.finish_response()
# XXX else: attempt advanced recovery techniques for HTML or text?
def error_output(self, environ, start_response):
import traceback
start_response(self.error_status, self.error_headers[:], sys.exc_info())
return ['\n'.join(traceback.format_exception(*sys.exc_info()))]
# Pure abstract methods; *must* be overridden in subclasses
def _write(self,data):
self.stdout.write(data)
self._write = self.stdout.write
def _flush(self):
self.stdout.flush()
self._flush = self.stdout.flush
def get_stdin(self):
return self.stdin
def get_stderr(self):
return self.stderr
def add_cgi_vars(self):
self.environ.update(self.base_env)
class WSGIServer(HTTPServer):
"""BaseHTTPServer that implements the Python WSGI protocol"""
application = None
def server_bind(self):
"""Override server_bind to store the server name."""
try:
HTTPServer.server_bind(self)
except Exception, e:
raise WSGIServerException, e
self.setup_environ()
def setup_environ(self):
# Set up base environment
env = self.base_environ = {}
env['SERVER_NAME'] = self.server_name
env['GATEWAY_INTERFACE'] = 'CGI/1.1'
env['SERVER_PORT'] = str(self.server_port)
env['REMOTE_HOST']=''
env['CONTENT_LENGTH']=''
env['SCRIPT_NAME'] = ''
def get_app(self):
return self.application
def set_app(self,application):
self.application = application
class WSGIRequestHandler(BaseHTTPRequestHandler):
server_version = "WSGIServer/" + __version__
def __init__(self, *args, **kwargs):
from django.conf import settings
self.admin_media_prefix = settings.ADMIN_MEDIA_PREFIX
BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def get_environ(self):
env = self.server.base_environ.copy()
env['SERVER_PROTOCOL'] = self.request_version
env['REQUEST_METHOD'] = self.command
if '?' in self.path:
path,query = self.path.split('?',1)
else:
path,query = self.path,''
env['PATH_INFO'] = urllib.unquote(path)
env['QUERY_STRING'] = query
env['REMOTE_ADDR'] = self.client_address[0]
if self.headers.typeheader is None:
env['CONTENT_TYPE'] = self.headers.type
else:
env['CONTENT_TYPE'] = self.headers.typeheader
length = self.headers.getheader('content-length')
if length:
env['CONTENT_LENGTH'] = length
for h in self.headers.headers:
k,v = h.split(':',1)
k=k.replace('-','_').upper(); v=v.strip()
if k in env:
continue # skip content length, type,etc.
if 'HTTP_'+k in env:
env['HTTP_'+k] += ','+v # comma-separate multiple headers
else:
env['HTTP_'+k] = v
return env
def get_stderr(self):
return sys.stderr
def handle(self):
"""Handle a single HTTP request"""
self.raw_requestline = self.rfile.readline()
if not self.parse_request(): # An error code has been sent, just exit
return
handler = ServerHandler(self.rfile, self.wfile, self.get_stderr(), self.get_environ())
handler.request_handler = self # backpointer for logging
handler.run(self.server.get_app())
def log_message(self, format, *args):
# Don't bother logging requests for admin images or the favicon.
if self.path.startswith(self.admin_media_prefix) or self.path == '/favicon.ico':
return
sys.stderr.write("[%s] %s\n" % (self.log_date_time_string(), format % args))
class AdminMediaHandler(object):
"""
WSGI middleware that intercepts calls to the admin media directory, as
defined by the ADMIN_MEDIA_PREFIX setting, and serves those images.
Use this ONLY LOCALLY, for development! This hasn't been tested for
security and is not super efficient.
"""
def __init__(self, application, media_dir=None):
from django.conf import settings
self.application = application
if not media_dir:
import django
self.media_dir = django.__path__[0] + '/contrib/admin/media'
else:
self.media_dir = media_dir
self.media_url = settings.ADMIN_MEDIA_PREFIX
def __call__(self, environ, start_response):
import os.path
# Ignore requests that aren't under ADMIN_MEDIA_PREFIX. Also ignore
# all requests if ADMIN_MEDIA_PREFIX isn't a relative URL.
if self.media_url.startswith('http://') or self.media_url.startswith('https://') \
or not environ['PATH_INFO'].startswith(self.media_url):
return self.application(environ, start_response)
# Find the admin file and serve it up, if it exists and is readable.
relative_url = environ['PATH_INFO'][len(self.media_url):]
file_path = os.path.join(self.media_dir, relative_url)
if not os.path.exists(file_path):
status = '404 NOT FOUND'
headers = {'Content-type': 'text/plain'}
output = ['Page not found: %s' % file_path]
else:
try:
fp = open(file_path, 'rb')
except IOError:
status = '401 UNAUTHORIZED'
headers = {'Content-type': 'text/plain'}
output = ['Permission denied: %s' % file_path]
else:
status = '200 OK'
headers = {}
output = [fp.read()]
fp.close()
start_response(status, headers.items())
return output
def run(addr, port, wsgi_handler):
server_address = (addr, port)
httpd = WSGIServer(server_address, WSGIRequestHandler)
httpd.set_app(wsgi_handler)
httpd.serve_forever()
| Python |
"""
FastCGI server that implements the WSGI protocol.
Uses the flup python package: http://www.saddi.com/software/flup/
This is a adaptation of the flup package to add FastCGI server support
to run Django apps from Web servers that support the FastCGI protocol.
This module can be run standalone or from the django-admin / manage.py
scripts using the "runfcgi" directive.
Run with the extra option "help" for a list of additional options you can
pass to this server.
"""
import sys, os
__version__ = "0.1"
__all__ = ["runfastcgi"]
FASTCGI_HELP = r"""runfcgi:
Run this project as a fastcgi application. To do this, the
flup package from http://www.saddi.com/software/flup/ is
required.
Usage:
django-admin.py runfcgi --settings=yourproject.settings [fcgi settings]
manage.py runfcgi [fcgi settings]
Optional Fcgi settings: (setting=value)
host=HOSTNAME hostname to listen on..
port=PORTNUM port to listen on.
socket=FILE UNIX socket to listen on.
method=IMPL prefork or threaded (default prefork)
maxrequests=NUMBER number of requests a child handles before it is
killed and a new child is forked (0 = no limit).
maxspare=NUMBER max number of spare processes / threads
minspare=NUMBER min number of spare processes / threads.
maxchildren=NUMBER hard limit number of processes / threads
daemonize=BOOL whether to detach from terminal.
pidfile=FILE write the spawned process-id to this file.
workdir=DIRECTORY change to this directory when daemonizing
Examples:
Run a "standard" fastcgi process on a file-descriptor
(for webservers which spawn your processes for you)
$ manage.py runfcgi method=threaded
Run a fastcgi server on a TCP host/port
$ manage.py runfcgi method=prefork host=127.0.0.1 port=8025
Run a fastcgi server on a UNIX domain socket (posix platforms only)
$ manage.py runfcgi method=prefork socket=/tmp/fcgi.sock
Run a fastCGI as a daemon and write the spawned PID in a file
$ manage.py runfcgi socket=/tmp/fcgi.sock method=prefork \
daemonize=true pidfile=/var/run/django-fcgi.pid
"""
FASTCGI_OPTIONS = {
'host': None,
'port': None,
'socket': None,
'method': 'fork',
'daemonize': None,
'workdir': '/',
'pidfile': None,
'maxspare': 5,
'minspare': 2,
'maxchildren': 50,
'maxrequests': 0,
}
def fastcgi_help(message=None):
print FASTCGI_HELP
if message:
print message
return False
def runfastcgi(argset=[], **kwargs):
options = FASTCGI_OPTIONS.copy()
options.update(kwargs)
for x in argset:
if "=" in x:
k, v = x.split('=', 1)
else:
k, v = x, True
options[k.lower()] = v
if "help" in options:
return fastcgi_help()
try:
import flup
except ImportError, e:
print >> sys.stderr, "ERROR: %s" % e
print >> sys.stderr, " Unable to load the flup package. In order to run django"
print >> sys.stderr, " as a FastCGI application, you will need to get flup from"
print >> sys.stderr, " http://www.saddi.com/software/flup/ If you've already"
print >> sys.stderr, " installed flup, then make sure you have it in your PYTHONPATH."
return False
if options['method'] in ('prefork', 'fork'):
from flup.server.fcgi_fork import WSGIServer
wsgi_opts = {
'maxSpare': int(options["maxspare"]),
'minSpare': int(options["minspare"]),
'maxChildren': int(options["maxchildren"]),
'maxRequests': int(options["maxrequests"]),
}
elif options['method'] in ('thread', 'threaded'):
from flup.server.fcgi import WSGIServer
wsgi_opts = {
'maxSpare': int(options["maxspare"]),
'minSpare': int(options["minspare"]),
'maxThreads': int(options["maxchildren"]),
}
else:
return fastcgi_help("ERROR: Implementation must be one of prefork or thread.")
wsgi_opts['debug'] = False # Turn off flup tracebacks
# Prep up and go
from django.core.handlers.wsgi import WSGIHandler
if options["host"] and options["port"] and not options["socket"]:
wsgi_opts['bindAddress'] = (options["host"], int(options["port"]))
elif options["socket"] and not options["host"] and not options["port"]:
wsgi_opts['bindAddress'] = options["socket"]
elif not options["socket"] and not options["host"] and not options["port"]:
wsgi_opts['bindAddress'] = None
else:
return fastcgi_help("Invalid combination of host, port, socket.")
if options["daemonize"] is None:
# Default to daemonizing if we're running on a socket/named pipe.
daemonize = (wsgi_opts['bindAddress'] is not None)
else:
if options["daemonize"].lower() in ('true', 'yes', 't'):
daemonize = True
elif options["daemonize"].lower() in ('false', 'no', 'f'):
daemonize = False
else:
return fastcgi_help("ERROR: Invalid option for daemonize parameter.")
if daemonize:
from django.utils.daemonize import become_daemon
become_daemon(our_home_dir=options["workdir"])
if options["pidfile"]:
fp = open(options["pidfile"], "w")
fp.write("%d\n" % os.getpid())
fp.close()
WSGIServer(WSGIHandler(), **wsgi_opts).run()
if __name__ == '__main__':
runfastcgi(sys.argv[1:])
| Python |
# This module is DEPRECATED!
#
# You should no longer be pointing your mod_python configuration
# at "django.core.handler".
#
# Use "django.core.handlers.modpython" instead.
from django.core.handlers.modpython import ModPythonHandler
def handler(req):
return ModPythonHandler()(req)
| Python |
"""
Pages in Django can are served up with custom HTTP headers containing useful
information about those pages -- namely, the content type and object ID.
This module contains utility functions for retrieving and doing interesting
things with these special "X-Headers" (so called because the HTTP spec demands
that custom headers are prefxed with "X-").
Next time you're at slashdot.org, watch out for X-Fry and X-Bender. :)
"""
def populate_xheaders(request, response, model, object_id):
"""
Adds the "X-Object-Type" and "X-Object-Id" headers to the given
HttpResponse according to the given model and object_id -- but only if the
given HttpRequest object has an IP address within the INTERNAL_IPS setting
or if the request is from a logged in staff member.
"""
from django.conf import settings
if request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS or (hasattr(request, 'user') and request.user.is_authenticated() and request.user.is_staff):
response['X-Object-Type'] = "%s.%s" % (model._meta.app_label, model._meta.object_name.lower())
response['X-Object-Id'] = str(object_id)
| Python |
"""
A set of request processors that return dictionaries to be merged into a
template context. Each function takes the request object as its only parameter
and returns a dictionary to add to the context.
These are referenced from the setting TEMPLATE_CONTEXT_PROCESSORS and used by
RequestContext.
"""
from django.conf import settings
def auth(request):
"""
Returns context variables required by apps that use Django's authentication
system.
"""
return {
'user': request.user,
'messages': request.user.get_and_delete_messages(),
'perms': PermWrapper(request.user),
}
def debug(request):
"Returns context variables helpful for debugging."
context_extras = {}
if settings.DEBUG and request.META.get('REMOTE_ADDR') in settings.INTERNAL_IPS:
context_extras['debug'] = True
from django.db import connection
context_extras['sql_queries'] = connection.queries
return context_extras
def i18n(request):
context_extras = {}
context_extras['LANGUAGES'] = settings.LANGUAGES
if hasattr(request, 'LANGUAGE_CODE'):
context_extras['LANGUAGE_CODE'] = request.LANGUAGE_CODE
else:
context_extras['LANGUAGE_CODE'] = settings.LANGUAGE_CODE
from django.utils import translation
context_extras['LANGUAGE_BIDI'] = translation.get_language_bidi()
return context_extras
def request(request):
return {'request': request}
# PermWrapper and PermLookupDict proxy the permissions system into objects that
# the template system can understand.
class PermLookupDict(object):
def __init__(self, user, module_name):
self.user, self.module_name = user, module_name
def __repr__(self):
return str(self.user.get_all_permissions())
def __getitem__(self, perm_name):
return self.user.has_perm("%s.%s" % (self.module_name, perm_name))
def __nonzero__(self):
return self.user.has_module_perms(self.module_name)
class PermWrapper(object):
def __init__(self, user):
self.user = user
def __getitem__(self, module_name):
return PermLookupDict(self.user, module_name)
| Python |
"""
A Python "serializer". Doesn't do much serializing per se -- just converts to
and from basic Python data types (lists, dicts, strings, etc.). Useful as a basis for
other serializers.
"""
from django.conf import settings
from django.core.serializers import base
from django.db import models
class Serializer(base.Serializer):
"""
Serializes a QuerySet to basic Python objects.
"""
def start_serialization(self):
self._current = None
self.objects = []
def end_serialization(self):
pass
def start_object(self, obj):
self._current = {}
def end_object(self, obj):
self.objects.append({
"model" : str(obj._meta),
"pk" : str(obj._get_pk_val()),
"fields" : self._current
})
self._current = None
def handle_field(self, obj, field):
self._current[field.name] = getattr(obj, field.name)
def handle_fk_field(self, obj, field):
related = getattr(obj, field.name)
if related is not None:
related = related._get_pk_val()
self._current[field.name] = related
def handle_m2m_field(self, obj, field):
self._current[field.name] = [related._get_pk_val() for related in getattr(obj, field.name).iterator()]
def getvalue(self):
return self.objects
def Deserializer(object_list, **options):
"""
Deserialize simple Python objects back into Django ORM instances.
It's expected that you pass the Python objects themselves (instead of a
stream or a string) to the constructor
"""
models.get_apps()
for d in object_list:
# Look up the model and starting build a dict of data for it.
Model = _get_model(d["model"])
data = {Model._meta.pk.attname : Model._meta.pk.to_python(d["pk"])}
m2m_data = {}
# Handle each field
for (field_name, field_value) in d["fields"].iteritems():
if isinstance(field_value, unicode):
field_value = field_value.encode(options.get("encoding", settings.DEFAULT_CHARSET))
field = Model._meta.get_field(field_name)
# Handle M2M relations
if field.rel and isinstance(field.rel, models.ManyToManyRel):
pks = []
m2m_convert = field.rel.to._meta.pk.to_python
for pk in field_value:
if isinstance(pk, unicode):
pks.append(m2m_convert(pk.encode(options.get("encoding", settings.DEFAULT_CHARSET))))
else:
pks.append(m2m_convert(pk))
m2m_data[field.name] = pks
# Handle FK fields
elif field.rel and isinstance(field.rel, models.ManyToOneRel):
data[field.attname] = field.rel.to._meta.pk.to_python(field_value)
# Handle all other fields
else:
data[field.name] = field.to_python(field_value)
yield base.DeserializedObject(Model(**data), m2m_data)
def _get_model(model_identifier):
"""
Helper to look up a model from an "app_label.module_name" string.
"""
try:
Model = models.get_model(*model_identifier.split("."))
except TypeError:
Model = None
if Model is None:
raise base.DeserializationError("Invalid model identifier: '%s'" % model_identifier)
return Model
| Python |
"""
Module for abstract serializer/unserializer base classes.
"""
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
from django.db import models
class SerializationError(Exception):
"""Something bad happened during serialization."""
pass
class DeserializationError(Exception):
"""Something bad happened during deserialization."""
pass
class Serializer(object):
"""
Abstract serializer base class.
"""
def serialize(self, queryset, **options):
"""
Serialize a queryset.
"""
self.options = options
self.stream = options.get("stream", StringIO())
self.selected_fields = options.get("fields")
self.start_serialization()
for obj in queryset:
self.start_object(obj)
for field in obj._meta.fields:
if field.serialize:
if field.rel is None:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_field(obj, field)
else:
if self.selected_fields is None or field.attname[:-3] in self.selected_fields:
self.handle_fk_field(obj, field)
for field in obj._meta.many_to_many:
if field.serialize:
if self.selected_fields is None or field.attname in self.selected_fields:
self.handle_m2m_field(obj, field)
self.end_object(obj)
self.end_serialization()
return self.getvalue()
def get_string_value(self, obj, field):
"""
Convert a field's value to a string.
"""
if isinstance(field, models.DateTimeField):
value = getattr(obj, field.name).strftime("%Y-%m-%d %H:%M:%S")
elif isinstance(field, models.FileField):
value = getattr(obj, "get_%s_url" % field.name, lambda: None)()
else:
value = field.flatten_data(follow=None, obj=obj).get(field.name, "")
return str(value)
def start_serialization(self):
"""
Called when serializing of the queryset starts.
"""
raise NotImplementedError
def end_serialization(self):
"""
Called when serializing of the queryset ends.
"""
pass
def start_object(self, obj):
"""
Called when serializing of an object starts.
"""
raise NotImplementedError
def end_object(self, obj):
"""
Called when serializing of an object ends.
"""
pass
def handle_field(self, obj, field):
"""
Called to handle each individual (non-relational) field on an object.
"""
raise NotImplementedError
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey field.
"""
raise NotImplementedError
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField.
"""
raise NotImplementedError
def getvalue(self):
"""
Return the fully serialized queryset.
"""
return self.stream.getvalue()
class Deserializer(object):
"""
Abstract base deserializer class.
"""
def __init__(self, stream_or_string, **options):
"""
Init this serializer given a stream or a string
"""
self.options = options
if isinstance(stream_or_string, basestring):
self.stream = StringIO(stream_or_string)
else:
self.stream = stream_or_string
# hack to make sure that the models have all been loaded before
# deserialization starts (otherwise subclass calls to get_model()
# and friends might fail...)
models.get_apps()
def __iter__(self):
return self
def next(self):
"""Iteration iterface -- return the next item in the stream"""
raise NotImplementedError
class DeserializedObject(object):
"""
A deserialized model.
Basically a container for holding the pre-saved deserialized data along
with the many-to-many data saved with the object.
Call ``save()`` to save the object (with the many-to-many data) to the
database; call ``save(save_m2m=False)`` to save just the object fields
(and not touch the many-to-many stuff.)
"""
def __init__(self, obj, m2m_data=None):
self.object = obj
self.m2m_data = m2m_data
def __repr__(self):
return "<DeserializedObject: %s>" % str(self.object)
def save(self, save_m2m=True):
self.object.save()
if self.m2m_data and save_m2m:
for accessor_name, object_list in self.m2m_data.items():
setattr(self.object, accessor_name, object_list)
# prevent a second (possibly accidental) call to save() from saving
# the m2m data twice.
self.m2m_data = None
| Python |
"""
Serialize data to/from JSON
"""
import datetime
from django.utils import simplejson
from django.core.serializers.python import Serializer as PythonSerializer
from django.core.serializers.python import Deserializer as PythonDeserializer
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
class Serializer(PythonSerializer):
"""
Convert a queryset to JSON.
"""
def end_serialization(self):
simplejson.dump(self.objects, self.stream, cls=DateTimeAwareJSONEncoder, **self.options)
def getvalue(self):
return self.stream.getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of JSON data.
"""
if isinstance(stream_or_string, basestring):
stream = StringIO(stream_or_string)
else:
stream = stream_or_string
for obj in PythonDeserializer(simplejson.load(stream)):
yield obj
class DateTimeAwareJSONEncoder(simplejson.JSONEncoder):
"""
JSONEncoder subclass that knows how to encode date/time types
"""
DATE_FORMAT = "%Y-%m-%d"
TIME_FORMAT = "%H:%M:%S"
def default(self, o):
if isinstance(o, datetime.datetime):
return o.strftime("%s %s" % (self.DATE_FORMAT, self.TIME_FORMAT))
elif isinstance(o, datetime.date):
return o.strftime(self.DATE_FORMAT)
elif isinstance(o, datetime.time):
return o.strftime(self.TIME_FORMAT)
else:
return super(DateTimeAwareJSONEncoder, self).default(o)
| Python |
"""
YAML serializer.
Requires PyYaml (http://pyyaml.org/), but that's checked for in __init__.
"""
import datetime
from django.core.serializers.python import Serializer as PythonSerializer
from django.core.serializers.python import Deserializer as PythonDeserializer
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
import yaml
class Serializer(PythonSerializer):
"""
Convert a queryset to YAML.
"""
def end_serialization(self):
yaml.dump(self.objects, self.stream, **self.options)
def getvalue(self):
return self.stream.getvalue()
def Deserializer(stream_or_string, **options):
"""
Deserialize a stream or string of YAML data.
"""
if isinstance(stream_or_string, basestring):
stream = StringIO(stream_or_string)
else:
stream = stream_or_string
for obj in PythonDeserializer(yaml.load(stream)):
yield obj
| Python |
"""
Interfaces for serializing Django objects.
Usage::
>>> from django.core import serializers
>>> json = serializers.serialize("json", some_query_set)
>>> objects = list(serializers.deserialize("json", json))
To add your own serializers, use the SERIALIZATION_MODULES setting::
SERIALIZATION_MODULES = {
"csv" : "path.to.csv.serializer",
"txt" : "path.to.txt.serializer",
}
"""
from django.conf import settings
# Built-in serializers
BUILTIN_SERIALIZERS = {
"xml" : "django.core.serializers.xml_serializer",
"python" : "django.core.serializers.python",
"json" : "django.core.serializers.json",
}
# Check for PyYaml and register the serializer if it's available.
try:
import yaml
BUILTIN_SERIALIZERS["yaml"] = "django.core.serializers.pyyaml"
except ImportError:
pass
_serializers = {}
def register_serializer(format, serializer_module):
"""Register a new serializer by passing in a module name."""
module = __import__(serializer_module, {}, {}, [''])
_serializers[format] = module
def unregister_serializer(format):
"""Unregister a given serializer"""
del _serializers[format]
def get_serializer(format):
if not _serializers:
_load_serializers()
return _serializers[format].Serializer
def get_serializer_formats():
if not _serializers:
_load_serializers()
return _serializers.keys()
def get_deserializer(format):
if not _serializers:
_load_serializers()
return _serializers[format].Deserializer
def serialize(format, queryset, **options):
"""
Serialize a queryset (or any iterator that returns database objects) using
a certain serializer.
"""
s = get_serializer(format)()
s.serialize(queryset, **options)
return s.getvalue()
def deserialize(format, stream_or_string):
"""
Deserialize a stream or a string. Returns an iterator that yields ``(obj,
m2m_relation_dict)``, where ``obj`` is a instantiated -- but *unsaved* --
object, and ``m2m_relation_dict`` is a dictionary of ``{m2m_field_name :
list_of_related_objects}``.
"""
d = get_deserializer(format)
return d(stream_or_string)
def _load_serializers():
"""
Register built-in and settings-defined serializers. This is done lazily so
that user code has a chance to (e.g.) set up custom settings without
needing to be careful of import order.
"""
for format in BUILTIN_SERIALIZERS:
register_serializer(format, BUILTIN_SERIALIZERS[format])
if hasattr(settings, "SERIALIZATION_MODULES"):
for format in settings.SERIALIZATION_MODULES:
register_serializer(format, settings.SERIALIZATION_MODULES[format]) | Python |
"""
XML serializer.
"""
from django.conf import settings
from django.core.serializers import base
from django.db import models
from django.utils.xmlutils import SimplerXMLGenerator
from xml.dom import pulldom
class Serializer(base.Serializer):
"""
Serializes a QuerySet to XML.
"""
def indent(self, level):
if self.options.get('indent', None) is not None:
self.xml.ignorableWhitespace('\n' + ' ' * self.options.get('indent', None) * level)
def start_serialization(self):
"""
Start serialization -- open the XML document and the root element.
"""
self.xml = SimplerXMLGenerator(self.stream, self.options.get("encoding", settings.DEFAULT_CHARSET))
self.xml.startDocument()
self.xml.startElement("django-objects", {"version" : "1.0"})
def end_serialization(self):
"""
End serialization -- end the document.
"""
self.indent(0)
self.xml.endElement("django-objects")
self.xml.endDocument()
def start_object(self, obj):
"""
Called as each object is handled.
"""
if not hasattr(obj, "_meta"):
raise base.SerializationError("Non-model object (%s) encountered during serialization" % type(obj))
self.indent(1)
self.xml.startElement("object", {
"pk" : str(obj._get_pk_val()),
"model" : str(obj._meta),
})
def end_object(self, obj):
"""
Called after handling all fields for an object.
"""
self.indent(1)
self.xml.endElement("object")
def handle_field(self, obj, field):
"""
Called to handle each field on an object (except for ForeignKeys and
ManyToManyFields)
"""
self.indent(2)
self.xml.startElement("field", {
"name" : field.name,
"type" : field.get_internal_type()
})
# Get a "string version" of the object's data (this is handled by the
# serializer base class).
if getattr(obj, field.name) is not None:
value = self.get_string_value(obj, field)
self.xml.characters(str(value))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_fk_field(self, obj, field):
"""
Called to handle a ForeignKey (we need to treat them slightly
differently from regular fields).
"""
self._start_relational_field(field)
related = getattr(obj, field.name)
if related is not None:
self.xml.characters(str(related._get_pk_val()))
else:
self.xml.addQuickElement("None")
self.xml.endElement("field")
def handle_m2m_field(self, obj, field):
"""
Called to handle a ManyToManyField. Related objects are only
serialized as references to the object's PK (i.e. the related *data*
is not dumped, just the relation).
"""
self._start_relational_field(field)
for relobj in getattr(obj, field.name).iterator():
self.xml.addQuickElement("object", attrs={"pk" : str(relobj._get_pk_val())})
self.xml.endElement("field")
def _start_relational_field(self, field):
"""
Helper to output the <field> element for relational fields
"""
self.indent(2)
self.xml.startElement("field", {
"name" : field.name,
"rel" : field.rel.__class__.__name__,
"to" : str(field.rel.to._meta),
})
class Deserializer(base.Deserializer):
"""
Deserialize XML.
"""
def __init__(self, stream_or_string, **options):
super(Deserializer, self).__init__(stream_or_string, **options)
self.encoding = self.options.get("encoding", settings.DEFAULT_CHARSET)
self.event_stream = pulldom.parse(self.stream)
def next(self):
for event, node in self.event_stream:
if event == "START_ELEMENT" and node.nodeName == "object":
self.event_stream.expandNode(node)
return self._handle_object(node)
raise StopIteration
def _handle_object(self, node):
"""
Convert an <object> node to a DeserializedObject.
"""
# Look up the model using the model loading mechanism. If this fails, bail.
Model = self._get_model_from_node(node, "model")
# Start building a data dictionary from the object. If the node is
# missing the pk attribute, bail.
pk = node.getAttribute("pk")
if not pk:
raise base.DeserializationError("<object> node is missing the 'pk' attribute")
data = {Model._meta.pk.attname : Model._meta.pk.to_python(pk)}
# Also start building a dict of m2m data (this is saved as
# {m2m_accessor_attribute : [list_of_related_objects]})
m2m_data = {}
# Deseralize each field.
for field_node in node.getElementsByTagName("field"):
# If the field is missing the name attribute, bail (are you
# sensing a pattern here?)
field_name = field_node.getAttribute("name")
if not field_name:
raise base.DeserializationError("<field> node is missing the 'name' attribute")
# Get the field from the Model. This will raise a
# FieldDoesNotExist if, well, the field doesn't exist, which will
# be propagated correctly.
field = Model._meta.get_field(field_name)
# As is usually the case, relation fields get the special treatment.
if field.rel and isinstance(field.rel, models.ManyToManyRel):
m2m_data[field.name] = self._handle_m2m_field_node(field_node, field)
elif field.rel and isinstance(field.rel, models.ManyToOneRel):
data[field.attname] = self._handle_fk_field_node(field_node, field)
else:
if len(field_node.childNodes) == 1 and field_node.childNodes[0].nodeName == 'None':
value = None
else:
value = field.to_python(getInnerText(field_node).strip().encode(self.encoding))
data[field.name] = value
# Return a DeserializedObject so that the m2m data has a place to live.
return base.DeserializedObject(Model(**data), m2m_data)
def _handle_fk_field_node(self, node, field):
"""
Handle a <field> node for a ForeignKey
"""
# Check if there is a child node named 'None', returning None if so.
if len(node.childNodes) == 1 and node.childNodes[0].nodeName == 'None':
return None
else:
return field.rel.to._meta.pk.to_python(
getInnerText(node).strip().encode(self.encoding))
def _handle_m2m_field_node(self, node, field):
"""
Handle a <field> node for a ManyToManyField
"""
return [field.rel.to._meta.pk.to_python(
c.getAttribute("pk").encode(self.encoding))
for c in node.getElementsByTagName("object")]
def _get_model_from_node(self, node, attr):
"""
Helper to look up a model from a <object model=...> or a <field
rel=... to=...> node.
"""
model_identifier = node.getAttribute(attr)
if not model_identifier:
raise base.DeserializationError(
"<%s> node is missing the required '%s' attribute" \
% (node.nodeName, attr))
try:
Model = models.get_model(*model_identifier.split("."))
except TypeError:
Model = None
if Model is None:
raise base.DeserializationError(
"<%s> node has invalid model identifier: '%s'" % \
(node.nodeName, model_identifier))
return Model
def getInnerText(node):
"""
Get all the inner text of a DOM node (recursively).
"""
# inspired by http://mail.python.org/pipermail/xml-sig/2005-March/011022.html
inner_text = []
for child in node.childNodes:
if child.nodeType == child.TEXT_NODE or child.nodeType == child.CDATA_SECTION_NODE:
inner_text.append(child.data)
elif child.nodeType == child.ELEMENT_NODE:
inner_text.extend(getInnerText(child))
else:
pass
return "".join(inner_text) | Python |
"Global Django exceptions"
class ObjectDoesNotExist(Exception):
"The requested object does not exist"
silent_variable_failure = True
class SuspiciousOperation(Exception):
"The user did something suspicious"
pass
class PermissionDenied(Exception):
"The user did not have permission to do that"
pass
class ViewDoesNotExist(Exception):
"The requested view does not exist"
pass
class MiddlewareNotUsed(Exception):
"This middleware is not used in this server configuration"
pass
class ImproperlyConfigured(Exception):
"Django is somehow improperly configured"
pass
| Python |
# This module is DEPRECATED!
#
# You should no longer be using django.template_loader.
#
# Use django.template.loader instead.
from django.template.loader import *
| Python |
"""
This module converts requested URLs to callback view functions.
RegexURLResolver is the main class here. Its resolve() method takes a URL (as
a string) and returns a tuple in this format:
(view_function, function_args, function_kwargs)
"""
from django.http import Http404
from django.core.exceptions import ImproperlyConfigured, ViewDoesNotExist
import re
class Resolver404(Http404):
pass
class NoReverseMatch(Exception):
# Don't make this raise an error when used in a template.
silent_variable_failure = True
def get_mod_func(callback):
# Converts 'django.views.news.stories.story_detail' to
# ['django.views.news.stories', 'story_detail']
try:
dot = callback.rindex('.')
except ValueError:
return callback, ''
return callback[:dot], callback[dot+1:]
def reverse_helper(regex, *args, **kwargs):
"""
Does a "reverse" lookup -- returns the URL for the given args/kwargs.
The args/kwargs are applied to the given compiled regular expression.
For example:
>>> reverse_helper(re.compile('^places/(\d+)/$'), 3)
'places/3/'
>>> reverse_helper(re.compile('^places/(?P<id>\d+)/$'), id=3)
'places/3/'
>>> reverse_helper(re.compile('^people/(?P<state>\w\w)/(\w+)/$'), 'adrian', state='il')
'people/il/adrian/'
Raises NoReverseMatch if the args/kwargs aren't valid for the regex.
"""
# TODO: Handle nested parenthesis in the following regex.
result = re.sub(r'\(([^)]+)\)', MatchChecker(args, kwargs), regex.pattern)
return result.replace('^', '').replace('$', '')
class MatchChecker(object):
"Class used in reverse RegexURLPattern lookup."
def __init__(self, args, kwargs):
self.args, self.kwargs = args, kwargs
self.current_arg = 0
def __call__(self, match_obj):
# match_obj.group(1) is the contents of the parenthesis.
# First we need to figure out whether it's a named or unnamed group.
#
grouped = match_obj.group(1)
m = re.search(r'^\?P<(\w+)>(.*?)$', grouped)
if m: # If this was a named group...
# m.group(1) is the name of the group
# m.group(2) is the regex.
try:
value = self.kwargs[m.group(1)]
except KeyError:
# It was a named group, but the arg was passed in as a
# positional arg or not at all.
try:
value = self.args[self.current_arg]
self.current_arg += 1
except IndexError:
# The arg wasn't passed in.
raise NoReverseMatch('Not enough positional arguments passed in')
test_regex = m.group(2)
else: # Otherwise, this was a positional (unnamed) group.
try:
value = self.args[self.current_arg]
self.current_arg += 1
except IndexError:
# The arg wasn't passed in.
raise NoReverseMatch('Not enough positional arguments passed in')
test_regex = grouped
# Note we're using re.match here on purpose because the start of
# to string needs to match.
if not re.match(test_regex + '$', str(value)): # TODO: Unicode?
raise NoReverseMatch("Value %r didn't match regular expression %r" % (value, test_regex))
return str(value) # TODO: Unicode?
class RegexURLPattern(object):
def __init__(self, regex, callback, default_args=None):
# regex is a string representing a regular expression.
# callback is either a string like 'foo.views.news.stories.story_detail'
# which represents the path to a module and a view function name, or a
# callable object (view).
self.regex = re.compile(regex)
if callable(callback):
self._callback = callback
else:
self._callback = None
self._callback_str = callback
self.default_args = default_args or {}
def resolve(self, path):
match = self.regex.search(path)
if match:
# If there are any named groups, use those as kwargs, ignoring
# non-named groups. Otherwise, pass all non-named arguments as
# positional arguments.
kwargs = match.groupdict()
if kwargs:
args = ()
else:
args = match.groups()
# In both cases, pass any extra_kwargs as **kwargs.
kwargs.update(self.default_args)
return self.callback, args, kwargs
def _get_callback(self):
if self._callback is not None:
return self._callback
mod_name, func_name = get_mod_func(self._callback_str)
try:
self._callback = getattr(__import__(mod_name, {}, {}, ['']), func_name)
except ImportError, e:
raise ViewDoesNotExist, "Could not import %s. Error was: %s" % (mod_name, str(e))
except AttributeError, e:
raise ViewDoesNotExist, "Tried %s in module %s. Error was: %s" % (func_name, mod_name, str(e))
return self._callback
callback = property(_get_callback)
def reverse(self, viewname, *args, **kwargs):
mod_name, func_name = get_mod_func(viewname)
try:
lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
except (ImportError, AttributeError):
raise NoReverseMatch
if lookup_view != self.callback:
raise NoReverseMatch
return self.reverse_helper(*args, **kwargs)
def reverse_helper(self, *args, **kwargs):
return reverse_helper(self.regex, *args, **kwargs)
class RegexURLResolver(object):
def __init__(self, regex, urlconf_name, default_kwargs=None):
# regex is a string representing a regular expression.
# urlconf_name is a string representing the module containing urlconfs.
self.regex = re.compile(regex)
self.urlconf_name = urlconf_name
self.callback = None
self.default_kwargs = default_kwargs or {}
def resolve(self, path):
tried = []
match = self.regex.search(path)
if match:
new_path = path[match.end():]
for pattern in self.urlconf_module.urlpatterns:
try:
sub_match = pattern.resolve(new_path)
except Resolver404, e:
tried.extend([(pattern.regex.pattern + ' ' + t) for t in e.args[0]['tried']])
else:
if sub_match:
sub_match_dict = dict(self.default_kwargs, **sub_match[2])
return sub_match[0], sub_match[1], dict(match.groupdict(), **sub_match_dict)
tried.append(pattern.regex.pattern)
raise Resolver404, {'tried': tried, 'path': new_path}
def _get_urlconf_module(self):
try:
return self._urlconf_module
except AttributeError:
try:
self._urlconf_module = __import__(self.urlconf_name, {}, {}, [''])
except ValueError, e:
# Invalid urlconf_name, such as "foo.bar." (note trailing period)
raise ImproperlyConfigured, "Error while importing URLconf %r: %s" % (self.urlconf_name, e)
return self._urlconf_module
urlconf_module = property(_get_urlconf_module)
def _get_url_patterns(self):
return self.urlconf_module.urlpatterns
url_patterns = property(_get_url_patterns)
def _resolve_special(self, view_type):
callback = getattr(self.urlconf_module, 'handler%s' % view_type)
mod_name, func_name = get_mod_func(callback)
try:
return getattr(__import__(mod_name, {}, {}, ['']), func_name), {}
except (ImportError, AttributeError), e:
raise ViewDoesNotExist, "Tried %s. Error was: %s" % (callback, str(e))
def resolve404(self):
return self._resolve_special('404')
def resolve500(self):
return self._resolve_special('500')
def reverse(self, lookup_view, *args, **kwargs):
if not callable(lookup_view):
mod_name, func_name = get_mod_func(lookup_view)
try:
lookup_view = getattr(__import__(mod_name, {}, {}, ['']), func_name)
except (ImportError, AttributeError):
raise NoReverseMatch
for pattern in self.urlconf_module.urlpatterns:
if isinstance(pattern, RegexURLResolver):
try:
return pattern.reverse_helper(lookup_view, *args, **kwargs)
except NoReverseMatch:
continue
elif pattern.callback == lookup_view:
try:
return pattern.reverse_helper(*args, **kwargs)
except NoReverseMatch:
continue
raise NoReverseMatch
def reverse_helper(self, lookup_view, *args, **kwargs):
sub_match = self.reverse(lookup_view, *args, **kwargs)
result = reverse_helper(self.regex, *args, **kwargs)
return result + sub_match
def resolve(path, urlconf=None):
if urlconf is None:
from django.conf import settings
urlconf = settings.ROOT_URLCONF
resolver = RegexURLResolver(r'^/', urlconf)
return resolver.resolve(path)
def reverse(viewname, urlconf=None, args=None, kwargs=None):
args = args or []
kwargs = kwargs or {}
if urlconf is None:
from django.conf import settings
urlconf = settings.ROOT_URLCONF
resolver = RegexURLResolver(r'^/', urlconf)
return '/' + resolver.reverse(viewname, *args, **kwargs)
| Python |
request_started = object()
request_finished = object()
got_request_exception = object()
| Python |
# Django management-related functions, including "CREATE TABLE" generation and
# development-server initialization.
import django
from django.core.exceptions import ImproperlyConfigured
import os, re, shutil, sys, textwrap
from optparse import OptionParser
from django.utils import termcolors
# For Python 2.3
if not hasattr(__builtins__, 'set'):
from sets import Set as set
MODULE_TEMPLATE = ''' {%% if perms.%(app)s.%(addperm)s or perms.%(app)s.%(changeperm)s %%}
<tr>
<th>{%% if perms.%(app)s.%(changeperm)s %%}<a href="%(app)s/%(mod)s/">{%% endif %%}%(name)s{%% if perms.%(app)s.%(changeperm)s %%}</a>{%% endif %%}</th>
<td class="x50">{%% if perms.%(app)s.%(addperm)s %%}<a href="%(app)s/%(mod)s/add/" class="addlink">{%% endif %%}Add{%% if perms.%(app)s.%(addperm)s %%}</a>{%% endif %%}</td>
<td class="x75">{%% if perms.%(app)s.%(changeperm)s %%}<a href="%(app)s/%(mod)s/" class="changelink">{%% endif %%}Change{%% if perms.%(app)s.%(changeperm)s %%}</a>{%% endif %%}</td>
</tr>
{%% endif %%}'''
APP_ARGS = '[appname ...]'
# Use django.__path__[0] because we don't know which directory django into
# which has been installed.
PROJECT_TEMPLATE_DIR = os.path.join(django.__path__[0], 'conf', '%s_template')
INVALID_PROJECT_NAMES = ('django', 'site', 'test')
# Set up the terminal color scheme.
class dummy: pass
style = dummy()
style.ERROR = termcolors.make_style(fg='red', opts=('bold',))
style.ERROR_OUTPUT = termcolors.make_style(fg='red', opts=('bold',))
style.NOTICE = termcolors.make_style(fg='red')
style.SQL_FIELD = termcolors.make_style(fg='green', opts=('bold',))
style.SQL_COLTYPE = termcolors.make_style(fg='green')
style.SQL_KEYWORD = termcolors.make_style(fg='yellow')
style.SQL_TABLE = termcolors.make_style(opts=('bold',))
del dummy
def disable_termcolors():
class dummy:
def __getattr__(self, attr):
return lambda x: x
global style
style = dummy()
# Disable terminal coloring on Windows, Pocket PC, or if somebody's piping the output.
if sys.platform == 'win32' or sys.platform == 'Pocket PC' or not sys.stdout.isatty():
disable_termcolors()
def _is_valid_dir_name(s):
return bool(re.search(r'^\w+$', s))
def _get_installed_models(table_list):
"Gets a set of all models that are installed, given a list of existing tables"
from django.db import models
all_models = []
for app in models.get_apps():
for model in models.get_models(app):
all_models.append(model)
return set([m for m in all_models if m._meta.db_table in table_list])
def _get_table_list():
"Gets a list of all db tables that are physically installed."
from django.db import connection, get_introspection_module
cursor = connection.cursor()
return get_introspection_module().get_table_list(cursor)
def _get_sequence_list():
"Returns a list of information about all DB sequences for all models in all apps"
from django.db import models
apps = models.get_apps()
sequence_list = []
for app in apps:
for model in models.get_models(app):
for f in model._meta.fields:
if isinstance(f, models.AutoField):
sequence_list.append({'table':model._meta.db_table,'column':f.column,})
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.many_to_many:
sequence_list.append({'table':f.m2m_db_table(),'column':None,})
return sequence_list
# If the foreign key points to an AutoField, a PositiveIntegerField or a
# PositiveSmallIntegerField, the foreign key should be an IntegerField, not the
# referred field type. Otherwise, the foreign key should be the same type of
# field as the field to which it points.
get_rel_data_type = lambda f: (f.get_internal_type() in ('AutoField', 'PositiveIntegerField', 'PositiveSmallIntegerField')) and 'IntegerField' or f.get_internal_type()
def get_version():
"Returns the version as a human-format string."
from django import VERSION
v = '.'.join([str(i) for i in VERSION[:-1]])
if VERSION[-1]:
v += '-' + VERSION[-1]
return v
def get_sql_create(app):
"Returns a list of the CREATE TABLE SQL statements for the given app."
from django.db import get_creation_module, models
data_types = get_creation_module().DATA_TYPES
if not data_types:
# This must be the "dummy" database backend, which means the user
# hasn't set DATABASE_ENGINE.
sys.stderr.write(style.ERROR("Error: Django doesn't know which syntax to use for your SQL statements,\n" +
"because you haven't specified the DATABASE_ENGINE setting.\n" +
"Edit your settings file and change DATABASE_ENGINE to something like 'postgresql' or 'mysql'.\n"))
sys.exit(1)
# Get installed models, so we generate REFERENCES right.
# We trim models from the current app so that the sqlreset command does not
# generate invalid SQL (leaving models out of known_models is harmless, so
# we can be conservative).
app_models = models.get_models(app)
final_output = []
known_models = set([model for model in _get_installed_models(_get_table_list()) if model not in app_models])
pending_references = {}
for model in app_models:
output, references = _get_sql_model_create(model, known_models)
final_output.extend(output)
for refto, refs in references.items():
pending_references.setdefault(refto,[]).extend(refs)
final_output.extend(_get_sql_for_pending_references(model, pending_references))
# Keep track of the fact that we've created the table for this model.
known_models.add(model)
# Create the many-to-many join tables.
for model in app_models:
final_output.extend(_get_many_to_many_sql_for_model(model))
# Handle references to tables that are from other apps
# but don't exist physically
not_installed_models = set(pending_references.keys())
if not_installed_models:
alter_sql = []
for model in not_installed_models:
alter_sql.extend(['-- ' + sql for sql in
_get_sql_for_pending_references(model, pending_references)])
if alter_sql:
final_output.append('-- The following references should be added but depend on non-existent tables:')
final_output.extend(alter_sql)
return final_output
get_sql_create.help_doc = "Prints the CREATE TABLE SQL statements for the given app name(s)."
get_sql_create.args = APP_ARGS
def _get_sql_model_create(model, known_models=set()):
"""
Get the SQL required to create a single model.
Returns list_of_sql, pending_references_dict
"""
from django.db import backend, get_creation_module, models
data_types = get_creation_module().DATA_TYPES
opts = model._meta
final_output = []
table_output = []
pending_references = {}
for f in opts.fields:
if isinstance(f, (models.ForeignKey, models.OneToOneField)):
rel_field = f.rel.get_related_field()
data_type = get_rel_data_type(rel_field)
else:
rel_field = f
data_type = f.get_internal_type()
col_type = data_types[data_type]
if col_type is not None:
# Make the definition (e.g. 'foo VARCHAR(30)') for this field.
field_output = [style.SQL_FIELD(backend.quote_name(f.column)),
style.SQL_COLTYPE(col_type % rel_field.__dict__)]
field_output.append(style.SQL_KEYWORD('%sNULL' % (not f.null and 'NOT ' or '')))
if f.unique:
field_output.append(style.SQL_KEYWORD('UNIQUE'))
if f.primary_key:
field_output.append(style.SQL_KEYWORD('PRIMARY KEY'))
if f.rel:
if f.rel.to in known_models:
field_output.append(style.SQL_KEYWORD('REFERENCES') + ' ' + \
style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)) + ' (' + \
style.SQL_FIELD(backend.quote_name(f.rel.to._meta.get_field(f.rel.field_name).column)) + ')' +
backend.get_deferrable_sql()
)
else:
# We haven't yet created the table to which this field
# is related, so save it for later.
pr = pending_references.setdefault(f.rel.to, []).append((model, f))
table_output.append(' '.join(field_output))
if opts.order_with_respect_to:
table_output.append(style.SQL_FIELD(backend.quote_name('_order')) + ' ' + \
style.SQL_COLTYPE(data_types['IntegerField']) + ' ' + \
style.SQL_KEYWORD('NULL'))
for field_constraints in opts.unique_together:
table_output.append(style.SQL_KEYWORD('UNIQUE') + ' (%s)' % \
", ".join([backend.quote_name(style.SQL_FIELD(opts.get_field(f).column)) for f in field_constraints]))
full_statement = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + style.SQL_TABLE(backend.quote_name(opts.db_table)) + ' (']
for i, line in enumerate(table_output): # Combine and add commas.
full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
full_statement.append(');')
final_output.append('\n'.join(full_statement))
return final_output, pending_references
def _get_sql_for_pending_references(model, pending_references):
"""
Get any ALTER TABLE statements to add constraints after the fact.
"""
from django.db import backend, get_creation_module
data_types = get_creation_module().DATA_TYPES
final_output = []
if backend.supports_constraints:
opts = model._meta
if model in pending_references:
for rel_class, f in pending_references[model]:
rel_opts = rel_class._meta
r_table = rel_opts.db_table
r_col = f.column
table = opts.db_table
col = opts.get_field(f.rel.field_name).column
# For MySQL, r_name must be unique in the first 64 characters.
# So we are careful with character usage here.
r_name = '%s_refs_%s_%x' % (r_col, col, abs(hash((r_table, table))))
final_output.append(style.SQL_KEYWORD('ALTER TABLE') + ' %s ADD CONSTRAINT %s FOREIGN KEY (%s) REFERENCES %s (%s)%s;' % \
(backend.quote_name(r_table), r_name,
backend.quote_name(r_col), backend.quote_name(table), backend.quote_name(col),
backend.get_deferrable_sql()))
del pending_references[model]
return final_output
def _get_many_to_many_sql_for_model(model):
from django.db import backend, get_creation_module
from django.db.models import GenericRel
data_types = get_creation_module().DATA_TYPES
opts = model._meta
final_output = []
for f in opts.many_to_many:
if not isinstance(f.rel, GenericRel):
table_output = [style.SQL_KEYWORD('CREATE TABLE') + ' ' + \
style.SQL_TABLE(backend.quote_name(f.m2m_db_table())) + ' (']
table_output.append(' %s %s %s,' % \
(style.SQL_FIELD(backend.quote_name('id')),
style.SQL_COLTYPE(data_types['AutoField']),
style.SQL_KEYWORD('NOT NULL PRIMARY KEY')))
table_output.append(' %s %s %s %s (%s)%s,' % \
(style.SQL_FIELD(backend.quote_name(f.m2m_column_name())),
style.SQL_COLTYPE(data_types[get_rel_data_type(opts.pk)] % opts.pk.__dict__),
style.SQL_KEYWORD('NOT NULL REFERENCES'),
style.SQL_TABLE(backend.quote_name(opts.db_table)),
style.SQL_FIELD(backend.quote_name(opts.pk.column)),
backend.get_deferrable_sql()))
table_output.append(' %s %s %s %s (%s)%s,' % \
(style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name())),
style.SQL_COLTYPE(data_types[get_rel_data_type(f.rel.to._meta.pk)] % f.rel.to._meta.pk.__dict__),
style.SQL_KEYWORD('NOT NULL REFERENCES'),
style.SQL_TABLE(backend.quote_name(f.rel.to._meta.db_table)),
style.SQL_FIELD(backend.quote_name(f.rel.to._meta.pk.column)),
backend.get_deferrable_sql()))
table_output.append(' %s (%s, %s)' % \
(style.SQL_KEYWORD('UNIQUE'),
style.SQL_FIELD(backend.quote_name(f.m2m_column_name())),
style.SQL_FIELD(backend.quote_name(f.m2m_reverse_name()))))
table_output.append(');')
final_output.append('\n'.join(table_output))
return final_output
def get_sql_delete(app):
"Returns a list of the DROP TABLE SQL statements for the given app."
from django.db import backend, connection, models, get_introspection_module
introspection = get_introspection_module()
# This should work even if a connection isn't available
try:
cursor = connection.cursor()
except:
cursor = None
# Figure out which tables already exist
if cursor:
table_names = introspection.get_table_list(cursor)
else:
table_names = []
output = []
# Output DROP TABLE statements for standard application tables.
to_delete = set()
references_to_delete = {}
app_models = models.get_models(app)
for model in app_models:
if cursor and model._meta.db_table in table_names:
# The table exists, so it needs to be dropped
opts = model._meta
for f in opts.fields:
if f.rel and f.rel.to not in to_delete:
references_to_delete.setdefault(f.rel.to, []).append( (model, f) )
to_delete.add(model)
for model in app_models:
if cursor and model._meta.db_table in table_names:
# Drop the table now
output.append('%s %s;' % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(backend.quote_name(model._meta.db_table))))
if backend.supports_constraints and references_to_delete.has_key(model):
for rel_class, f in references_to_delete[model]:
table = rel_class._meta.db_table
col = f.column
r_table = model._meta.db_table
r_col = model._meta.get_field(f.rel.field_name).column
output.append('%s %s %s %s;' % \
(style.SQL_KEYWORD('ALTER TABLE'),
style.SQL_TABLE(backend.quote_name(table)),
style.SQL_KEYWORD(backend.get_drop_foreignkey_sql()),
style.SQL_FIELD(backend.quote_name('%s_refs_%s_%x' % (col, r_col, abs(hash((table, r_table))))))))
del references_to_delete[model]
# Output DROP TABLE statements for many-to-many tables.
for model in app_models:
opts = model._meta
for f in opts.many_to_many:
if cursor and f.m2m_db_table() in table_names:
output.append("%s %s;" % (style.SQL_KEYWORD('DROP TABLE'),
style.SQL_TABLE(backend.quote_name(f.m2m_db_table()))))
app_label = app_models[0]._meta.app_label
# Close database connection explicitly, in case this output is being piped
# directly into a database client, to avoid locking issues.
if cursor:
cursor.close()
connection.close()
return output[::-1] # Reverse it, to deal with table dependencies.
get_sql_delete.help_doc = "Prints the DROP TABLE SQL statements for the given app name(s)."
get_sql_delete.args = APP_ARGS
def get_sql_reset(app):
"Returns a list of the DROP TABLE SQL, then the CREATE TABLE SQL, for the given module."
return get_sql_delete(app) + get_sql_all(app)
get_sql_reset.help_doc = "Prints the DROP TABLE SQL, then the CREATE TABLE SQL, for the given app name(s)."
get_sql_reset.args = APP_ARGS
def get_sql_flush():
"Returns a list of the SQL statements used to flush the database"
from django.db import backend
statements = backend.get_sql_flush(style, _get_table_list(), _get_sequence_list())
return statements
get_sql_flush.help_doc = "Returns a list of the SQL statements required to return all tables in the database to the state they were in just after they were installed."
get_sql_flush.args = ''
def get_custom_sql_for_model(model):
from django.db import models
from django.conf import settings
opts = model._meta
app_dir = os.path.normpath(os.path.join(os.path.dirname(models.get_app(model._meta.app_label).__file__), 'sql'))
output = []
# Some backends can't execute more than one SQL statement at a time,
# so split into separate statements.
statements = re.compile(r";[ \t]*$", re.M)
# Find custom SQL, if it's available.
sql_files = [os.path.join(app_dir, "%s.%s.sql" % (opts.object_name.lower(), settings.DATABASE_ENGINE)),
os.path.join(app_dir, "%s.sql" % opts.object_name.lower())]
for sql_file in sql_files:
if os.path.exists(sql_file):
fp = open(sql_file, 'U')
for statement in statements.split(fp.read()):
# Remove any comments from the file
statement = re.sub(r"--.*[\n\Z]", "", statement)
if statement.strip():
output.append(statement + ";")
fp.close()
return output
def get_custom_sql(app):
"Returns a list of the custom table modifying SQL statements for the given app."
from django.db.models import get_models
output = []
app_models = get_models(app)
app_dir = os.path.normpath(os.path.join(os.path.dirname(app.__file__), 'sql'))
for model in app_models:
output.extend(get_custom_sql_for_model(model))
return output
get_custom_sql.help_doc = "Prints the custom table modifying SQL statements for the given app name(s)."
get_custom_sql.args = APP_ARGS
def get_sql_initial_data(apps):
"Returns a list of the initial INSERT SQL statements for the given app."
return style.ERROR("This action has been renamed. Try './manage.py sqlcustom %s'." % ' '.join(apps and apps or ['app1', 'app2']))
get_sql_initial_data.help_doc = "RENAMED: see 'sqlcustom'"
get_sql_initial_data.args = ''
def get_sql_sequence_reset(app):
"Returns a list of the SQL statements to reset PostgreSQL sequences for the given app."
from django.db import backend, models
output = []
for model in models.get_models(app):
for f in model._meta.fields:
if isinstance(f, models.AutoField):
output.append("%s setval('%s', (%s max(%s) %s %s));" % \
(style.SQL_KEYWORD('SELECT'),
style.SQL_FIELD('%s_%s_seq' % (model._meta.db_table, f.column)),
style.SQL_KEYWORD('SELECT'),
style.SQL_FIELD(backend.quote_name(f.column)),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(backend.quote_name(model._meta.db_table))))
break # Only one AutoField is allowed per model, so don't bother continuing.
for f in model._meta.many_to_many:
output.append("%s setval('%s', (%s max(%s) %s %s));" % \
(style.SQL_KEYWORD('SELECT'),
style.SQL_FIELD('%s_id_seq' % f.m2m_db_table()),
style.SQL_KEYWORD('SELECT'),
style.SQL_FIELD(backend.quote_name('id')),
style.SQL_KEYWORD('FROM'),
style.SQL_TABLE(f.m2m_db_table())))
return output
get_sql_sequence_reset.help_doc = "Prints the SQL statements for resetting PostgreSQL sequences for the given app name(s)."
get_sql_sequence_reset.args = APP_ARGS
def get_sql_indexes(app):
"Returns a list of the CREATE INDEX SQL statements for all models in the given app."
from django.db import models
output = []
for model in models.get_models(app):
output.extend(get_sql_indexes_for_model(model))
return output
get_sql_indexes.help_doc = "Prints the CREATE INDEX SQL statements for the given model module name(s)."
get_sql_indexes.args = APP_ARGS
def get_sql_indexes_for_model(model):
"Returns the CREATE INDEX SQL statements for a single model"
from django.db import backend
output = []
for f in model._meta.fields:
if f.db_index:
unique = f.unique and 'UNIQUE ' or ''
output.append(
style.SQL_KEYWORD('CREATE %sINDEX' % unique) + ' ' + \
style.SQL_TABLE('%s_%s' % (model._meta.db_table, f.column)) + ' ' + \
style.SQL_KEYWORD('ON') + ' ' + \
style.SQL_TABLE(backend.quote_name(model._meta.db_table)) + ' ' + \
"(%s);" % style.SQL_FIELD(backend.quote_name(f.column))
)
return output
def get_sql_all(app):
"Returns a list of CREATE TABLE SQL, initial-data inserts, and CREATE INDEX SQL for the given module."
return get_sql_create(app) + get_custom_sql(app) + get_sql_indexes(app)
get_sql_all.help_doc = "Prints the CREATE TABLE, initial-data and CREATE INDEX SQL statements for the given model module name(s)."
get_sql_all.args = APP_ARGS
def _emit_post_sync_signal(created_models, verbosity, interactive):
from django.db import models
from django.dispatch import dispatcher
# Emit the post_sync signal for every application.
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
if verbosity >= 2:
print "Running post-sync handlers for application", app_name
dispatcher.send(signal=models.signals.post_syncdb, sender=app,
app=app, created_models=created_models,
verbosity=verbosity, interactive=interactive)
def syncdb(verbosity=1, interactive=True):
"Creates the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
from django.db import connection, transaction, models, get_creation_module
from django.conf import settings
disable_termcolors()
# First, try validating the models.
_check_for_validation_errors()
# Import the 'management' module within each installed app, to register
# dispatcher events.
for app_name in settings.INSTALLED_APPS:
try:
__import__(app_name + '.management', {}, {}, [''])
except ImportError:
pass
data_types = get_creation_module().DATA_TYPES
cursor = connection.cursor()
# Get a list of all existing database tables,
# so we know what needs to be added.
table_list = _get_table_list()
# Get a list of already installed *models* so that references work right.
seen_models = _get_installed_models(table_list)
created_models = set()
pending_references = {}
# Create the tables for each model
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
model_list = models.get_models(app)
for model in model_list:
# Create the model's database table, if it doesn't already exist.
if verbosity >= 2:
print "Processing %s.%s model" % (app_name, model._meta.object_name)
if model._meta.db_table in table_list:
continue
sql, references = _get_sql_model_create(model, seen_models)
seen_models.add(model)
created_models.add(model)
for refto, refs in references.items():
pending_references.setdefault(refto, []).extend(refs)
sql.extend(_get_sql_for_pending_references(model, pending_references))
if verbosity >= 1:
print "Creating table %s" % model._meta.db_table
for statement in sql:
cursor.execute(statement)
table_list.append(model._meta.db_table)
# Create the m2m tables. This must be done after all tables have been created
# to ensure that all referred tables will exist.
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
model_list = models.get_models(app)
for model in model_list:
if model in created_models:
sql = _get_many_to_many_sql_for_model(model)
if sql:
if verbosity >= 2:
print "Creating many-to-many tables for %s.%s model" % (app_name, model._meta.object_name)
for statement in sql:
cursor.execute(statement)
transaction.commit_unless_managed()
# Send the post_syncdb signal, so individual apps can do whatever they need
# to do at this point.
_emit_post_sync_signal(created_models, verbosity, interactive)
# Install custom SQL for the app (but only if this
# is a model we've just created)
for app in models.get_apps():
for model in models.get_models(app):
if model in created_models:
custom_sql = get_custom_sql_for_model(model)
if custom_sql:
if verbosity >= 1:
print "Installing custom SQL for %s.%s model" % (app_name, model._meta.object_name)
try:
for sql in custom_sql:
cursor.execute(sql)
except Exception, e:
sys.stderr.write("Failed to install custom SQL for %s.%s model: %s" % \
(app_name, model._meta.object_name, e))
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
# Install SQL indicies for all newly created models
for app in models.get_apps():
app_name = app.__name__.split('.')[-2]
for model in models.get_models(app):
if model in created_models:
index_sql = get_sql_indexes_for_model(model)
if index_sql:
if verbosity >= 1:
print "Installing index for %s.%s model" % (app_name, model._meta.object_name)
try:
for sql in index_sql:
cursor.execute(sql)
except Exception, e:
sys.stderr.write("Failed to install index for %s.%s model: %s" % \
(app_name, model._meta.object_name, e))
transaction.rollback_unless_managed()
else:
transaction.commit_unless_managed()
# Install the 'initialdata' fixture, using format discovery
load_data(['initial_data'], verbosity=verbosity)
syncdb.help_doc = "Create the database tables for all apps in INSTALLED_APPS whose tables haven't already been created."
syncdb.args = '[--verbosity] [--interactive]'
def get_admin_index(app):
"Returns admin-index template snippet (in list form) for the given app."
from django.utils.text import capfirst
from django.db.models import get_models
output = []
app_models = get_models(app)
app_label = app_models[0]._meta.app_label
output.append('{%% if perms.%s %%}' % app_label)
output.append('<div class="module"><h2>%s</h2><table>' % app_label.title())
for model in app_models:
if model._meta.admin:
output.append(MODULE_TEMPLATE % {
'app': app_label,
'mod': model._meta.module_name,
'name': capfirst(model._meta.verbose_name_plural),
'addperm': model._meta.get_add_permission(),
'changeperm': model._meta.get_change_permission(),
})
output.append('</table></div>')
output.append('{% endif %}')
return output
get_admin_index.help_doc = "Prints the admin-index template snippet for the given app name(s)."
get_admin_index.args = APP_ARGS
def _module_to_dict(module, omittable=lambda k: k.startswith('_')):
"Converts a module namespace to a Python dictionary. Used by get_settings_diff."
return dict([(k, repr(v)) for k, v in module.__dict__.items() if not omittable(k)])
def diffsettings():
"""
Displays differences between the current settings.py and Django's
default settings. Settings that don't appear in the defaults are
followed by "###".
"""
# Inspired by Postfix's "postconf -n".
from django.conf import settings, global_settings
user_settings = _module_to_dict(settings._target)
default_settings = _module_to_dict(global_settings)
output = []
keys = user_settings.keys()
keys.sort()
for key in keys:
if key not in default_settings:
output.append("%s = %s ###" % (key, user_settings[key]))
elif user_settings[key] != default_settings[key]:
output.append("%s = %s" % (key, user_settings[key]))
print '\n'.join(output)
diffsettings.args = ""
def reset(app, interactive=True):
"Executes the equivalent of 'get_sql_reset' in the current database."
from django.db import connection, transaction
from django.conf import settings
app_name = app.__name__.split('.')[-2]
disable_termcolors()
# First, try validating the models.
_check_for_validation_errors(app)
sql_list = get_sql_reset(app)
if interactive:
confirm = raw_input("""
You have requested a database reset.
This will IRREVERSIBLY DESTROY any data for
the "%s" application in the database "%s".
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """ % (app_name, settings.DATABASE_NAME))
else:
confirm = 'yes'
if confirm == 'yes':
try:
cursor = connection.cursor()
for sql in sql_list:
cursor.execute(sql)
except Exception, e:
sys.stderr.write(style.ERROR("""Error: %s couldn't be reset. Possible reasons:
* The database isn't running or isn't configured correctly.
* At least one of the database tables doesn't exist.
* The SQL was invalid.
Hint: Look at the output of 'django-admin.py sqlreset %s'. That's the SQL this command wasn't able to run.
The full error: """ % (app_name, app_name)) + style.ERROR_OUTPUT(str(e)) + '\n')
transaction.rollback_unless_managed()
sys.exit(1)
transaction.commit_unless_managed()
else:
print "Reset cancelled."
reset.help_doc = "Executes ``sqlreset`` for the given app(s) in the current database."
reset.args = '[--interactive]' + APP_ARGS
def flush(verbosity=1, interactive=True):
"Returns all tables in the database to the same state they were in immediately after syncdb."
from django.conf import settings
from django.db import connection, transaction, models
from django.dispatch import dispatcher
disable_termcolors()
# First, try validating the models.
_check_for_validation_errors()
# Import the 'management' module within each installed app, to register
# dispatcher events.
for app_name in settings.INSTALLED_APPS:
try:
__import__(app_name + '.management', {}, {}, [''])
except ImportError:
pass
sql_list = get_sql_flush()
if interactive:
confirm = raw_input("""
You have requested a flush of the database.
This will IRREVERSIBLY DESTROY all data currently in the database,
and return each table to the state it was in after syncdb.
Are you sure you want to do this?
Type 'yes' to continue, or 'no' to cancel: """)
else:
confirm = 'yes'
if confirm == 'yes':
try:
cursor = connection.cursor()
for sql in sql_list:
cursor.execute(sql)
except Exception, e:
sys.stderr.write(style.ERROR("""Error: Database %s couldn't be flushed. Possible reasons:
* The database isn't running or isn't configured correctly.
* At least one of the expected database tables doesn't exist.
* The SQL was invalid.
Hint: Look at the output of 'django-admin.py sqlflush'. That's the SQL this command wasn't able to run.
The full error: """ % settings.DATABASE_NAME + style.ERROR_OUTPUT(str(e)) + '\n'))
transaction.rollback_unless_managed()
sys.exit(1)
transaction.commit_unless_managed()
# Emit the post sync signal. This allows individual
# applications to respond as if the database had been
# sync'd from scratch.
_emit_post_sync_signal(models.get_models(), verbosity, interactive)
# Reinstall the initial_data fixture
load_data(['initial_data'], verbosity=verbosity)
else:
print "Flush cancelled."
flush.help_doc = "Executes ``sqlflush`` on the current database."
flush.args = '[--verbosity] [--interactive]'
def _start_helper(app_or_project, name, directory, other_name=''):
other = {'project': 'app', 'app': 'project'}[app_or_project]
if not _is_valid_dir_name(name):
sys.stderr.write(style.ERROR("Error: %r is not a valid %s name. Please use only numbers, letters and underscores.\n" % (name, app_or_project)))
sys.exit(1)
top_dir = os.path.join(directory, name)
try:
os.mkdir(top_dir)
except OSError, e:
sys.stderr.write(style.ERROR("Error: %s\n" % e))
sys.exit(1)
template_dir = PROJECT_TEMPLATE_DIR % app_or_project
for d, subdirs, files in os.walk(template_dir):
relative_dir = d[len(template_dir)+1:].replace('%s_name' % app_or_project, name)
if relative_dir:
os.mkdir(os.path.join(top_dir, relative_dir))
for i, subdir in enumerate(subdirs):
if subdir.startswith('.'):
del subdirs[i]
for f in files:
if f.endswith('.pyc'):
continue
path_old = os.path.join(d, f)
path_new = os.path.join(top_dir, relative_dir, f.replace('%s_name' % app_or_project, name))
fp_old = open(path_old, 'r')
fp_new = open(path_new, 'w')
fp_new.write(fp_old.read().replace('{{ %s_name }}' % app_or_project, name).replace('{{ %s_name }}' % other, other_name))
fp_old.close()
fp_new.close()
try:
shutil.copymode(path_old, path_new)
except OSError:
sys.stderr.write(style.NOTICE("Notice: Couldn't set permission bits on %s. You're probably using an uncommon filesystem setup. No problem.\n" % path_new))
def startproject(project_name, directory):
"Creates a Django project for the given project_name in the given directory."
from random import choice
if project_name in INVALID_PROJECT_NAMES:
sys.stderr.write(style.ERROR("Error: '%r' conflicts with the name of an existing Python module and cannot be used as a project name. Please try another name.\n" % project_name))
sys.exit(1)
_start_helper('project', project_name, directory)
# Create a random SECRET_KEY hash, and put it in the main settings.
main_settings_file = os.path.join(directory, project_name, 'settings.py')
settings_contents = open(main_settings_file, 'r').read()
fp = open(main_settings_file, 'w')
secret_key = ''.join([choice('abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)') for i in range(50)])
settings_contents = re.sub(r"(?<=SECRET_KEY = ')'", secret_key + "'", settings_contents)
fp.write(settings_contents)
fp.close()
startproject.help_doc = "Creates a Django project directory structure for the given project name in the current directory."
startproject.args = "[projectname]"
def startapp(app_name, directory):
"Creates a Django app for the given app_name in the given directory."
# Determine the project_name a bit naively -- by looking at the name of
# the parent directory.
project_dir = os.path.normpath(os.path.join(directory, '..'))
project_name = os.path.basename(project_dir)
if app_name == os.path.basename(directory):
sys.stderr.write(style.ERROR("Error: You cannot create an app with the same name (%r) as your project.\n" % app_name))
sys.exit(1)
_start_helper('app', app_name, directory, project_name)
startapp.help_doc = "Creates a Django app directory structure for the given app name in the current directory."
startapp.args = "[appname]"
def inspectdb():
"Generator that introspects the tables in the given database name and returns a Django model, one line at a time."
from django.db import connection, get_introspection_module
import keyword
introspection_module = get_introspection_module()
table2model = lambda table_name: table_name.title().replace('_', '')
cursor = connection.cursor()
yield "# This is an auto-generated Django model module."
yield "# You'll have to do the following manually to clean this up:"
yield "# * Rearrange models' order"
yield "# * Make sure each model has one field with primary_key=True"
yield "# Feel free to rename the models, but don't rename db_table values or field names."
yield "#"
yield "# Also note: You'll have to insert the output of 'django-admin.py sqlcustom [appname]'"
yield "# into your database."
yield ''
yield 'from django.db import models'
yield ''
for table_name in introspection_module.get_table_list(cursor):
yield 'class %s(models.Model):' % table2model(table_name)
try:
relations = introspection_module.get_relations(cursor, table_name)
except NotImplementedError:
relations = {}
try:
indexes = introspection_module.get_indexes(cursor, table_name)
except NotImplementedError:
indexes = {}
for i, row in enumerate(introspection_module.get_table_description(cursor, table_name)):
att_name = row[0]
comment_notes = [] # Holds Field notes, to be displayed in a Python comment.
extra_params = {} # Holds Field parameters such as 'db_column'.
if ' ' in att_name:
extra_params['db_column'] = att_name
att_name = att_name.replace(' ', '')
comment_notes.append('Field renamed to remove spaces.')
if keyword.iskeyword(att_name):
extra_params['db_column'] = att_name
att_name += '_field'
comment_notes.append('Field renamed because it was a Python reserved word.')
if relations.has_key(i):
rel_to = relations[i][1] == table_name and "'self'" or table2model(relations[i][1])
field_type = 'ForeignKey(%s' % rel_to
if att_name.endswith('_id'):
att_name = att_name[:-3]
else:
extra_params['db_column'] = att_name
else:
try:
field_type = introspection_module.DATA_TYPES_REVERSE[row[1]]
except KeyError:
field_type = 'TextField'
comment_notes.append('This field type is a guess.')
# This is a hook for DATA_TYPES_REVERSE to return a tuple of
# (field_type, extra_params_dict).
if type(field_type) is tuple:
field_type, new_params = field_type
extra_params.update(new_params)
# Add maxlength for all CharFields.
if field_type == 'CharField' and row[3]:
extra_params['maxlength'] = row[3]
if field_type == 'FloatField':
extra_params['max_digits'] = row[4]
extra_params['decimal_places'] = row[5]
# Add primary_key and unique, if necessary.
column_name = extra_params.get('db_column', att_name)
if column_name in indexes:
if indexes[column_name]['primary_key']:
extra_params['primary_key'] = True
elif indexes[column_name]['unique']:
extra_params['unique'] = True
field_type += '('
# Don't output 'id = meta.AutoField(primary_key=True)', because
# that's assumed if it doesn't exist.
if att_name == 'id' and field_type == 'AutoField(' and extra_params == {'primary_key': True}:
continue
# Add 'null' and 'blank', if the 'null_ok' flag was present in the
# table description.
if row[6]: # If it's NULL...
extra_params['blank'] = True
if not field_type in ('TextField(', 'CharField('):
extra_params['null'] = True
field_desc = '%s = models.%s' % (att_name, field_type)
if extra_params:
if not field_desc.endswith('('):
field_desc += ', '
field_desc += ', '.join(['%s=%r' % (k, v) for k, v in extra_params.items()])
field_desc += ')'
if comment_notes:
field_desc += ' # ' + ' '.join(comment_notes)
yield ' %s' % field_desc
yield ' class Meta:'
yield ' db_table = %r' % table_name
yield ''
inspectdb.help_doc = "Introspects the database tables in the given database and outputs a Django model module."
inspectdb.args = ""
class ModelErrorCollection:
def __init__(self, outfile=sys.stdout):
self.errors = []
self.outfile = outfile
def add(self, context, error):
self.errors.append((context, error))
self.outfile.write(style.ERROR("%s: %s\n" % (context, error)))
def get_validation_errors(outfile, app=None):
"""
Validates all models that are part of the specified app. If no app name is provided,
validates all models of all installed apps. Writes errors, if any, to outfile.
Returns number of errors.
"""
from django.conf import settings
from django.db import models, connection
from django.db.models.loading import get_app_errors
from django.db.models.fields.related import RelatedObject
e = ModelErrorCollection(outfile)
for (app_name, error) in get_app_errors().items():
e.add(app_name, error)
for cls in models.get_models(app):
opts = cls._meta
# Do field-specific validation.
for f in opts.fields:
if f.name == 'id' and not f.primary_key and opts.pk.name == 'id':
e.add(opts, '"%s": You can\'t use "id" as a field name, because each model automatically gets an "id" field if none of the fields have primary_key=True. You need to either remove/rename your "id" field or add primary_key=True to a field.' % f.name)
if isinstance(f, models.CharField) and f.maxlength in (None, 0):
e.add(opts, '"%s": CharFields require a "maxlength" attribute.' % f.name)
if isinstance(f, models.FloatField):
if f.decimal_places is None:
e.add(opts, '"%s": FloatFields require a "decimal_places" attribute.' % f.name)
if f.max_digits is None:
e.add(opts, '"%s": FloatFields require a "max_digits" attribute.' % f.name)
if isinstance(f, models.FileField) and not f.upload_to:
e.add(opts, '"%s": FileFields require an "upload_to" attribute.' % f.name)
if isinstance(f, models.ImageField):
try:
from PIL import Image
except ImportError:
e.add(opts, '"%s": To use ImageFields, you need to install the Python Imaging Library. Get it at http://www.pythonware.com/products/pil/ .' % f.name)
if f.prepopulate_from is not None and type(f.prepopulate_from) not in (list, tuple):
e.add(opts, '"%s": prepopulate_from should be a list or tuple.' % f.name)
if f.choices:
if not hasattr(f.choices, '__iter__'):
e.add(opts, '"%s": "choices" should be iterable (e.g., a tuple or list).' % f.name)
else:
for c in f.choices:
if not type(c) in (tuple, list) or len(c) != 2:
e.add(opts, '"%s": "choices" should be a sequence of two-tuples.' % f.name)
if f.db_index not in (None, True, False):
e.add(opts, '"%s": "db_index" should be either None, True or False.' % f.name)
# Check that maxlength <= 255 if using older MySQL versions.
if settings.DATABASE_ENGINE == 'mysql':
db_version = connection.get_server_version()
if db_version < (5, 0, 3) and isinstance(f, (models.CharField, models.CommaSeparatedIntegerField, models.SlugField)) and f.maxlength > 255:
e.add(opts, '"%s": %s cannot have a "maxlength" greater than 255 when you are using a version of MySQL prior to 5.0.3 (you are using %s).' % (f.name, f.__class__.__name__, '.'.join([str(n) for n in db_version[:3]])))
# Check to see if the related field will clash with any
# existing fields, m2m fields, m2m related objects or related objects
if f.rel:
rel_opts = f.rel.to._meta
if f.rel.to not in models.get_models():
e.add(opts, "'%s' has relation with model %s, which has not been installed" % (f.name, rel_opts.object_name))
rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
rel_query_name = f.related_query_name()
for r in rel_opts.fields:
if r.name == rel_name:
e.add(opts, "Accessor for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.many_to_many:
if r.name == rel_name:
e.add(opts, "Accessor for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.get_all_related_many_to_many_objects():
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
for r in rel_opts.get_all_related_objects():
if r.field is not f:
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
for i, f in enumerate(opts.many_to_many):
# Check to see if the related m2m field will clash with any
# existing fields, m2m fields, m2m related objects or related objects
rel_opts = f.rel.to._meta
if f.rel.to not in models.get_models():
e.add(opts, "'%s' has m2m relation with model %s, which has not been installed" % (f.name, rel_opts.object_name))
rel_name = RelatedObject(f.rel.to, cls, f).get_accessor_name()
rel_query_name = f.related_query_name()
# If rel_name is none, there is no reverse accessor.
# (This only occurs for symmetrical m2m relations to self).
# If this is the case, there are no clashes to check for this field, as
# there are no reverse descriptors for this field.
if rel_name is not None:
for r in rel_opts.fields:
if r.name == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.many_to_many:
if r.name == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
if r.name == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.name, f.name))
for r in rel_opts.get_all_related_many_to_many_objects():
if r.field is not f:
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with related m2m field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
for r in rel_opts.get_all_related_objects():
if r.get_accessor_name() == rel_name:
e.add(opts, "Accessor for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
if r.get_accessor_name() == rel_query_name:
e.add(opts, "Reverse query name for m2m field '%s' clashes with related field '%s.%s'. Add a related_name argument to the definition for '%s'." % (f.name, rel_opts.object_name, r.get_accessor_name(), f.name))
# Check admin attribute.
if opts.admin is not None:
if not isinstance(opts.admin, models.AdminOptions):
e.add(opts, '"admin" attribute, if given, must be set to a models.AdminOptions() instance.')
else:
# list_display
if not isinstance(opts.admin.list_display, (list, tuple)):
e.add(opts, '"admin.list_display", if given, must be set to a list or tuple.')
else:
for fn in opts.admin.list_display:
try:
f = opts.get_field(fn)
except models.FieldDoesNotExist:
if not hasattr(cls, fn):
e.add(opts, '"admin.list_display" refers to %r, which isn\'t an attribute, method or property.' % fn)
else:
if isinstance(f, models.ManyToManyField):
e.add(opts, '"admin.list_display" doesn\'t support ManyToManyFields (%r).' % fn)
# list_display_links
if opts.admin.list_display_links and not opts.admin.list_display:
e.add(opts, '"admin.list_display" must be defined for "admin.list_display_links" to be used.')
if not isinstance(opts.admin.list_display_links, (list, tuple)):
e.add(opts, '"admin.list_display_links", if given, must be set to a list or tuple.')
else:
for fn in opts.admin.list_display_links:
try:
f = opts.get_field(fn)
except models.FieldDoesNotExist:
if not hasattr(cls, fn):
e.add(opts, '"admin.list_display_links" refers to %r, which isn\'t an attribute, method or property.' % fn)
if fn not in opts.admin.list_display:
e.add(opts, '"admin.list_display_links" refers to %r, which is not defined in "admin.list_display".' % fn)
# list_filter
if not isinstance(opts.admin.list_filter, (list, tuple)):
e.add(opts, '"admin.list_filter", if given, must be set to a list or tuple.')
else:
for fn in opts.admin.list_filter:
try:
f = opts.get_field(fn)
except models.FieldDoesNotExist:
e.add(opts, '"admin.list_filter" refers to %r, which isn\'t a field.' % fn)
# date_hierarchy
if opts.admin.date_hierarchy:
try:
f = opts.get_field(opts.admin.date_hierarchy)
except models.FieldDoesNotExist:
e.add(opts, '"admin.date_hierarchy" refers to %r, which isn\'t a field.' % opts.admin.date_hierarchy)
# Check ordering attribute.
if opts.ordering:
for field_name in opts.ordering:
if field_name == '?': continue
if field_name.startswith('-'):
field_name = field_name[1:]
if opts.order_with_respect_to and field_name == '_order':
continue
if '.' in field_name: continue # Skip ordering in the format 'table.field'.
try:
opts.get_field(field_name, many_to_many=False)
except models.FieldDoesNotExist:
e.add(opts, '"ordering" refers to "%s", a field that doesn\'t exist.' % field_name)
# Check core=True, if needed.
for related in opts.get_followed_related_objects():
if not related.edit_inline:
continue
try:
for f in related.opts.fields:
if f.core:
raise StopIteration
e.add(related.opts, "At least one field in %s should have core=True, because it's being edited inline by %s.%s." % (related.opts.object_name, opts.module_name, opts.object_name))
except StopIteration:
pass
# Check unique_together.
for ut in opts.unique_together:
for field_name in ut:
try:
f = opts.get_field(field_name, many_to_many=True)
except models.FieldDoesNotExist:
e.add(opts, '"unique_together" refers to %s, a field that doesn\'t exist. Check your syntax.' % field_name)
else:
if isinstance(f.rel, models.ManyToManyRel):
e.add(opts, '"unique_together" refers to %s. ManyToManyFields are not supported in unique_together.' % f.name)
return len(e.errors)
def validate(outfile=sys.stdout, silent_success=False):
"Validates all installed models."
try:
num_errors = get_validation_errors(outfile)
if silent_success and num_errors == 0:
return
outfile.write('%s error%s found.\n' % (num_errors, num_errors != 1 and 's' or ''))
except ImproperlyConfigured:
outfile.write("Skipping validation because things aren't configured properly.")
validate.args = ''
def _check_for_validation_errors(app=None):
"""Check that an app has no validation errors, and exit with errors if it does."""
try:
from cStringIO import StringIO
except ImportError:
from StringIO import StringIO
s = StringIO()
num_errors = get_validation_errors(s, app)
if num_errors:
if app:
sys.stderr.write(style.ERROR("Error: %s couldn't be installed, because there were errors in your model:\n" % app))
else:
sys.stderr.write(style.ERROR("Error: Couldn't install apps, because there were errors in one or more models:\n"))
s.seek(0)
sys.stderr.write(s.read())
sys.exit(1)
def runserver(addr, port, use_reloader=True, admin_media_dir=''):
"Starts a lightweight Web server for development."
from django.core.servers.basehttp import run, AdminMediaHandler, WSGIServerException
from django.core.handlers.wsgi import WSGIHandler
if not addr:
addr = '127.0.0.1'
if not port.isdigit():
sys.stderr.write(style.ERROR("Error: %r is not a valid port number.\n" % port))
sys.exit(1)
quit_command = sys.platform == 'win32' and 'CTRL-BREAK' or 'CONTROL-C'
def inner_run():
from django.conf import settings
print "Validating models..."
validate()
print "\nDjango version %s, using settings %r" % (get_version(), settings.SETTINGS_MODULE)
print "Development server is running at http://%s:%s/" % (addr, port)
print "Quit the server with %s." % quit_command
try:
import django
path = admin_media_dir or django.__path__[0] + '/contrib/admin/media'
handler = AdminMediaHandler(WSGIHandler(), path)
run(addr, int(port), handler)
except WSGIServerException, e:
# Use helpful error messages instead of ugly tracebacks.
ERRORS = {
13: "You don't have permission to access that port.",
98: "That port is already in use.",
99: "That IP address can't be assigned-to.",
}
try:
error_text = ERRORS[e.args[0].args[0]]
except (AttributeError, KeyError):
error_text = str(e)
sys.stderr.write(style.ERROR("Error: %s" % error_text) + '\n')
sys.exit(1)
except KeyboardInterrupt:
sys.exit(0)
if use_reloader:
from django.utils import autoreload
autoreload.main(inner_run)
else:
inner_run()
runserver.args = '[--noreload] [--adminmedia=ADMIN_MEDIA_PATH] [optional port number, or ipaddr:port]'
def createcachetable(tablename):
"Creates the table needed to use the SQL cache backend"
from django.db import backend, connection, transaction, get_creation_module, models
data_types = get_creation_module().DATA_TYPES
fields = (
# "key" is a reserved word in MySQL, so use "cache_key" instead.
models.CharField(name='cache_key', maxlength=255, unique=True, primary_key=True),
models.TextField(name='value'),
models.DateTimeField(name='expires', db_index=True),
)
table_output = []
index_output = []
for f in fields:
field_output = [backend.quote_name(f.name), data_types[f.get_internal_type()] % f.__dict__]
field_output.append("%sNULL" % (not f.null and "NOT " or ""))
if f.unique:
field_output.append("UNIQUE")
if f.primary_key:
field_output.append("PRIMARY KEY")
if f.db_index:
unique = f.unique and "UNIQUE " or ""
index_output.append("CREATE %sINDEX %s_%s ON %s (%s);" % \
(unique, tablename, f.name, backend.quote_name(tablename),
backend.quote_name(f.name)))
table_output.append(" ".join(field_output))
full_statement = ["CREATE TABLE %s (" % backend.quote_name(tablename)]
for i, line in enumerate(table_output):
full_statement.append(' %s%s' % (line, i < len(table_output)-1 and ',' or ''))
full_statement.append(');')
curs = connection.cursor()
curs.execute("\n".join(full_statement))
for statement in index_output:
curs.execute(statement)
transaction.commit_unless_managed()
createcachetable.args = "[tablename]"
def run_shell(use_plain=False):
"Runs a Python interactive interpreter. Tries to use IPython, if it's available."
# XXX: (Temporary) workaround for ticket #1796: force early loading of all
# models from installed apps.
from django.db.models.loading import get_models
loaded_models = get_models()
try:
if use_plain:
# Don't bother loading IPython, because the user wants plain Python.
raise ImportError
import IPython
# Explicitly pass an empty list as arguments, because otherwise IPython
# would use sys.argv from this script.
shell = IPython.Shell.IPShell(argv=[])
shell.mainloop()
except ImportError:
import code
try: # Try activating rlcompleter, because it's handy.
import readline
except ImportError:
pass
else:
# We don't have to wrap the following import in a 'try', because
# we already know 'readline' was imported successfully.
import rlcompleter
readline.parse_and_bind("tab:complete")
code.interact()
run_shell.args = '[--plain]'
def dbshell():
"Runs the command-line client for the current DATABASE_ENGINE."
from django.db import runshell
runshell()
dbshell.args = ""
def runfcgi(args):
"Runs this project as a FastCGI application. Requires flup."
from django.conf import settings
from django.utils import translation
# Activate the current language, because it won't get activated later.
try:
translation.activate(settings.LANGUAGE_CODE)
except AttributeError:
pass
from django.core.servers.fastcgi import runfastcgi
runfastcgi(args)
runfcgi.args = '[various KEY=val options, use `runfcgi help` for help]'
def test(app_labels, verbosity=1):
"Runs the test suite for the specified applications"
from django.conf import settings
from django.db.models import get_app, get_apps
if len(app_labels) == 0:
app_list = get_apps()
else:
app_list = [get_app(app_label) for app_label in app_labels]
test_path = settings.TEST_RUNNER.split('.')
# Allow for Python 2.5 relative paths
if len(test_path) > 1:
test_module_name = '.'.join(test_path[:-1])
else:
test_module_name = '.'
test_module = __import__(test_module_name, {}, {}, test_path[-1])
test_runner = getattr(test_module, test_path[-1])
failures = test_runner(app_list, verbosity)
if failures:
sys.exit(failures)
test.help_doc = 'Runs the test suite for the specified applications, or the entire site if no apps are specified'
test.args = '[--verbosity] ' + APP_ARGS
def load_data(fixture_labels, verbosity=1):
"Installs the provided fixture file(s) as data in the database."
from django.db.models import get_apps
from django.core import serializers
from django.db import connection, transaction
from django.conf import settings
import sys
# Keep a count of the installed objects and fixtures
count = [0,0]
humanize = lambda dirname: dirname and "'%s'" % dirname or 'absolute path'
# Get a cursor (even though we don't need one yet). This has
# the side effect of initializing the test database (if
# it isn't already initialized).
cursor = connection.cursor()
# Start transaction management. All fixtures are installed in a
# single transaction to ensure that all references are resolved.
transaction.commit_unless_managed()
transaction.enter_transaction_management()
transaction.managed(True)
app_fixtures = [os.path.join(os.path.dirname(app.__file__),'fixtures') for app in get_apps()]
for fixture_label in fixture_labels:
if verbosity > 0:
print "Loading '%s' fixtures..." % fixture_label
for fixture_dir in app_fixtures + list(settings.FIXTURE_DIRS) + ['']:
if verbosity > 1:
print "Checking %s for fixtures..." % humanize(fixture_dir)
parts = fixture_label.split('.')
if len(parts) == 1:
fixture_name = fixture_label
formats = serializers.get_serializer_formats()
else:
fixture_name, format = '.'.join(parts[:-1]), parts[-1]
formats = [format]
label_found = False
for format in formats:
serializer = serializers.get_serializer(format)
if verbosity > 1:
print "Trying %s for %s fixture '%s'..." % \
(humanize(fixture_dir), format, fixture_name)
try:
full_path = os.path.join(fixture_dir, '.'.join([fixture_name, format]))
fixture = open(full_path, 'r')
if label_found:
fixture.close()
print style.ERROR("Multiple fixtures named '%s' in %s. Aborting." %
(fixture_name, humanize(fixture_dir)))
transaction.rollback()
transaction.leave_transaction_management()
return
else:
count[1] += 1
if verbosity > 0:
print "Installing %s fixture '%s' from %s." % \
(format, fixture_name, humanize(fixture_dir))
try:
objects = serializers.deserialize(format, fixture)
for obj in objects:
count[0] += 1
obj.save()
label_found = True
except Exception, e:
fixture.close()
sys.stderr.write(
style.ERROR("Problem installing fixture '%s': %s\n" %
(full_path, str(e))))
transaction.rollback()
transaction.leave_transaction_management()
return
fixture.close()
except:
if verbosity > 1:
print "No %s fixture '%s' in %s." % \
(format, fixture_name, humanize(fixture_dir))
if count[0] == 0:
if verbosity > 0:
print "No fixtures found."
else:
if verbosity > 0:
print "Installed %d object(s) from %d fixture(s)" % tuple(count)
transaction.commit()
transaction.leave_transaction_management()
load_data.help_doc = 'Installs the named fixture(s) in the database'
load_data.args = "[--verbosity] fixture, fixture, ..."
def dump_data(app_labels, format='json', indent=None):
"Output the current contents of the database as a fixture of the given format"
from django.db.models import get_app, get_apps, get_models
from django.core import serializers
if len(app_labels) == 0:
app_list = get_apps()
else:
app_list = [get_app(app_label) for app_label in app_labels]
# Check that the serialization format exists; this is a shortcut to
# avoid collating all the objects and _then_ failing.
try:
serializers.get_serializer(format)
except KeyError:
sys.stderr.write(style.ERROR("Unknown serialization format: %s\n" % format))
objects = []
for app in app_list:
for model in get_models(app):
objects.extend(model.objects.all())
try:
return serializers.serialize(format, objects, indent=indent)
except Exception, e:
sys.stderr.write(style.ERROR("Unable to serialize database: %s\n" % e))
dump_data.help_doc = 'Output the contents of the database as a fixture of the given format'
dump_data.args = '[--format]' + APP_ARGS
# Utilities for command-line script
DEFAULT_ACTION_MAPPING = {
'adminindex': get_admin_index,
'createcachetable' : createcachetable,
'dbshell': dbshell,
'diffsettings': diffsettings,
'dumpdata': dump_data,
'flush': flush,
'inspectdb': inspectdb,
'loaddata': load_data,
'reset': reset,
'runfcgi': runfcgi,
'runserver': runserver,
'shell': run_shell,
'sql': get_sql_create,
'sqlall': get_sql_all,
'sqlclear': get_sql_delete,
'sqlcustom': get_custom_sql,
'sqlflush': get_sql_flush,
'sqlindexes': get_sql_indexes,
'sqlinitialdata': get_sql_initial_data,
'sqlreset': get_sql_reset,
'sqlsequencereset': get_sql_sequence_reset,
'startapp': startapp,
'startproject': startproject,
'syncdb': syncdb,
'validate': validate,
'test':test,
}
NO_SQL_TRANSACTION = (
'adminindex',
'createcachetable',
'dbshell',
'diffsettings',
'reset',
'sqlindexes',
'syncdb',
)
class DjangoOptionParser(OptionParser):
def print_usage_and_exit(self):
self.print_help(sys.stderr)
sys.exit(1)
def get_usage(action_mapping):
"""
Returns a usage string. Doesn't do the options stuff, because optparse
takes care of that.
"""
usage = ["%prog action [options]\nactions:"]
available_actions = action_mapping.keys()
available_actions.sort()
for a in available_actions:
func = action_mapping[a]
usage.append(" %s %s" % (a, func.args))
usage.extend(textwrap.wrap(getattr(func, 'help_doc', textwrap.dedent(func.__doc__.strip())), initial_indent=' ', subsequent_indent=' '))
usage.append("")
return '\n'.join(usage[:-1]) # Cut off last list element, an empty space.
def print_error(msg, cmd):
sys.stderr.write(style.ERROR('Error: %s' % msg) + '\nRun "%s --help" for help.\n' % cmd)
sys.exit(1)
def execute_from_command_line(action_mapping=DEFAULT_ACTION_MAPPING, argv=None):
# Use sys.argv if we've not passed in a custom argv
if argv is None:
argv = sys.argv
# Parse the command-line arguments. optparse handles the dirty work.
parser = DjangoOptionParser(usage=get_usage(action_mapping), version=get_version())
parser.add_option('--settings',
help='Python path to settings module, e.g. "myproject.settings.main". If this isn\'t provided, the DJANGO_SETTINGS_MODULE environment variable will be used.')
parser.add_option('--pythonpath',
help='Lets you manually add a directory the Python path, e.g. "/home/djangoprojects/myproject".')
parser.add_option('--plain', action='store_true', dest='plain',
help='Tells Django to use plain Python, not IPython, for "shell" command.')
parser.add_option('--noinput', action='store_false', dest='interactive', default=True,
help='Tells Django to NOT prompt the user for input of any kind.')
parser.add_option('--noreload', action='store_false', dest='use_reloader', default=True,
help='Tells Django to NOT use the auto-reloader when running the development server.')
parser.add_option('--format', default='json', dest='format',
help='Specifies the output serialization format for fixtures')
parser.add_option('--indent', default=None, dest='indent',
type='int', help='Specifies the indent level to use when pretty-printing output')
parser.add_option('--verbosity', action='store', dest='verbosity', default='1',
type='choice', choices=['0', '1', '2'],
help='Verbosity level; 0=minimal output, 1=normal output, 2=all output'),
parser.add_option('--adminmedia', dest='admin_media_path', default='', help='Specifies the directory from which to serve admin media for runserver.'),
options, args = parser.parse_args(argv[1:])
# Take care of options.
if options.settings:
os.environ['DJANGO_SETTINGS_MODULE'] = options.settings
if options.pythonpath:
sys.path.insert(0, options.pythonpath)
# Run the appropriate action. Unfortunately, optparse can't handle
# positional arguments, so this has to parse/validate them.
try:
action = args[0]
except IndexError:
parser.print_usage_and_exit()
if not action_mapping.has_key(action):
print_error("Your action, %r, was invalid." % action, argv[0])
# Switch to English, because django-admin.py creates database content
# like permissions, and those shouldn't contain any translations.
# But only do this if we should have a working settings file.
if action not in ('startproject', 'startapp'):
from django.utils import translation
translation.activate('en-us')
if action == 'shell':
action_mapping[action](options.plain is True)
elif action in ('validate', 'diffsettings', 'dbshell'):
action_mapping[action]()
elif action in ('flush', 'syncdb'):
action_mapping[action](int(options.verbosity), options.interactive)
elif action == 'inspectdb':
try:
for line in action_mapping[action]():
print line
except NotImplementedError:
sys.stderr.write(style.ERROR("Error: %r isn't supported for the currently selected database backend.\n" % action))
sys.exit(1)
elif action == 'createcachetable':
try:
action_mapping[action](args[1])
except IndexError:
parser.print_usage_and_exit()
elif action in ('test', 'loaddata'):
try:
action_mapping[action](args[1:], int(options.verbosity))
except IndexError:
parser.print_usage_and_exit()
elif action == 'dumpdata':
try:
print action_mapping[action](args[1:], options.format, options.indent)
except IndexError:
parser.print_usage_and_exit()
elif action in ('startapp', 'startproject'):
try:
name = args[1]
except IndexError:
parser.print_usage_and_exit()
action_mapping[action](name, os.getcwd())
elif action == 'runserver':
if len(args) < 2:
addr = ''
port = '8000'
else:
try:
addr, port = args[1].split(':')
except ValueError:
addr, port = '', args[1]
action_mapping[action](addr, port, options.use_reloader, options.admin_media_path)
elif action == 'runfcgi':
action_mapping[action](args[1:])
elif action == 'sqlinitialdata':
print action_mapping[action](args[1:])
elif action == 'sqlflush':
print '\n'.join(action_mapping[action]())
else:
from django.db import models
validate(silent_success=True)
try:
mod_list = [models.get_app(app_label) for app_label in args[1:]]
except ImportError, e:
sys.stderr.write(style.ERROR("Error: %s. Are you sure your INSTALLED_APPS setting is correct?\n" % e))
sys.exit(1)
if not mod_list:
parser.print_usage_and_exit()
if action not in NO_SQL_TRANSACTION:
print style.SQL_KEYWORD("BEGIN;")
for mod in mod_list:
if action == 'reset':
output = action_mapping[action](mod, options.interactive)
else:
output = action_mapping[action](mod)
if output:
print '\n'.join(output)
if action not in NO_SQL_TRANSACTION:
print style.SQL_KEYWORD("COMMIT;")
def setup_environ(settings_mod):
"""
Configure the runtime environment. This can also be used by external
scripts wanting to set up a similar environment to manage.py.
"""
# Add this project to sys.path so that it's importable in the conventional
# way. For example, if this file (manage.py) lives in a directory
# "myproject", this code would add "/path/to/myproject" to sys.path.
project_directory = os.path.dirname(settings_mod.__file__)
project_name = os.path.basename(project_directory)
sys.path.append(os.path.join(project_directory, '..'))
project_module = __import__(project_name, {}, {}, [''])
sys.path.pop()
# Set DJANGO_SETTINGS_MODULE appropriately.
os.environ['DJANGO_SETTINGS_MODULE'] = '%s.settings' % project_name
return project_directory
def execute_manager(settings_mod, argv=None):
project_directory = setup_environ(settings_mod)
action_mapping = DEFAULT_ACTION_MAPPING.copy()
# Remove the "startproject" command from the action_mapping, because that's
# a django-admin.py command, not a manage.py command.
del action_mapping['startproject']
# Override the startapp handler so that it always uses the
# project_directory, not the current working directory (which is default).
action_mapping['startapp'] = lambda app_name, directory: startapp(app_name, project_directory)
action_mapping['startapp'].__doc__ = startapp.__doc__
action_mapping['startapp'].help_doc = startapp.help_doc
action_mapping['startapp'].args = startapp.args
# Run the django-admin.py command.
execute_from_command_line(action_mapping, argv)
| Python |
class InvalidPage(Exception):
pass
class ObjectPaginator(object):
"""
This class makes pagination easy. Feed it a QuerySet or list, plus the number
of objects you want on each page. Then read the hits and pages properties to
see how many pages it involves. Call get_page with a page number (starting
at 0) to get back a list of objects for that page.
Finally, check if a page number has a next/prev page using
has_next_page(page_number) and has_previous_page(page_number).
Use orphans to avoid small final pages. For example:
13 records, num_per_page=10, orphans=2 --> pages==2, len(self.get_page(0))==10
12 records, num_per_page=10, orphans=2 --> pages==1, len(self.get_page(0))==12
"""
def __init__(self, query_set, num_per_page, orphans=0):
self.query_set = query_set
self.num_per_page = num_per_page
self.orphans = orphans
self._hits = self._pages = None
def validate_page_number(self, page_number):
try:
page_number = int(page_number)
except ValueError:
raise InvalidPage
if page_number < 0 or page_number > self.pages - 1:
raise InvalidPage
return page_number
def get_page(self, page_number):
page_number = self.validate_page_number(page_number)
bottom = page_number * self.num_per_page
top = bottom + self.num_per_page
if top + self.orphans >= self.hits:
top = self.hits
return self.query_set[bottom:top]
def has_next_page(self, page_number):
"Does page $page_number have a 'next' page?"
return page_number < self.pages - 1
def has_previous_page(self, page_number):
return page_number > 0
def first_on_page(self, page_number):
"""
Returns the 1-based index of the first object on the given page,
relative to total objects found (hits).
"""
page_number = self.validate_page_number(page_number)
return (self.num_per_page * page_number) + 1
def last_on_page(self, page_number):
"""
Returns the 1-based index of the last object on the given page,
relative to total objects found (hits).
"""
page_number = self.validate_page_number(page_number)
page_number += 1 # 1-base
if page_number == self.pages:
return self.hits
return page_number * self.num_per_page
def _get_hits(self):
if self._hits is None:
# Try .count() or fall back to len().
try:
self._hits = int(self.query_set.count())
except (AttributeError, TypeError, ValueError):
# AttributeError if query_set has no object count.
# TypeError if query_set.count() required arguments.
# ValueError if int() fails.
self._hits = len(self.query_set)
return self._hits
def _get_pages(self):
if self._pages is None:
hits = (self.hits - 1 - self.orphans)
if hits < 1:
hits = 0
self._pages = hits // self.num_per_page + 1
return self._pages
hits = property(_get_hits)
pages = property(_get_pages)
| Python |
from django.core.handlers.base import BaseHandler
from django.core import signals
from django.dispatch import dispatcher
from django.utils import datastructures
from django import http
from pprint import pformat
import os
# NOTE: do *not* import settings (or any module which eventually imports
# settings) until after ModPythonHandler has been called; otherwise os.environ
# won't be set up correctly (with respect to settings).
class ModPythonRequest(http.HttpRequest):
def __init__(self, req):
self._req = req
self.path = req.uri
def __repr__(self):
# Since this is called as part of error handling, we need to be very
# robust against potentially malformed input.
try:
get = pformat(self.GET)
except:
get = '<could not parse>'
try:
post = pformat(self.POST)
except:
post = '<could not parse>'
try:
cookies = pformat(self.COOKIES)
except:
cookies = '<could not parse>'
try:
meta = pformat(self.META)
except:
meta = '<could not parse>'
return '<ModPythonRequest\npath:%s,\nGET:%s,\nPOST:%s,\nCOOKIES:%s,\nMETA:%s>' % \
(self.path, get, post, cookies, meta)
def get_full_path(self):
return '%s%s' % (self.path, self._req.args and ('?' + self._req.args) or '')
def is_secure(self):
# Note: modpython 3.2.10+ has req.is_https(), but we need to support previous versions
return self._req.subprocess_env.has_key('HTTPS') and self._req.subprocess_env['HTTPS'] == 'on'
def _load_post_and_files(self):
"Populates self._post and self._files"
if self._req.headers_in.has_key('content-type') and self._req.headers_in['content-type'].startswith('multipart'):
self._post, self._files = http.parse_file_upload(self._req.headers_in, self.raw_post_data)
else:
self._post, self._files = http.QueryDict(self.raw_post_data), datastructures.MultiValueDict()
def _get_request(self):
if not hasattr(self, '_request'):
self._request = datastructures.MergeDict(self.POST, self.GET)
return self._request
def _get_get(self):
if not hasattr(self, '_get'):
self._get = http.QueryDict(self._req.args)
return self._get
def _set_get(self, get):
self._get = get
def _get_post(self):
if not hasattr(self, '_post'):
self._load_post_and_files()
return self._post
def _set_post(self, post):
self._post = post
def _get_cookies(self):
if not hasattr(self, '_cookies'):
self._cookies = http.parse_cookie(self._req.headers_in.get('cookie', ''))
return self._cookies
def _set_cookies(self, cookies):
self._cookies = cookies
def _get_files(self):
if not hasattr(self, '_files'):
self._load_post_and_files()
return self._files
def _get_meta(self):
"Lazy loader that returns self.META dictionary"
if not hasattr(self, '_meta'):
self._meta = {
'AUTH_TYPE': self._req.ap_auth_type,
'CONTENT_LENGTH': self._req.clength, # This may be wrong
'CONTENT_TYPE': self._req.content_type, # This may be wrong
'GATEWAY_INTERFACE': 'CGI/1.1',
'PATH_INFO': self._req.path_info,
'PATH_TRANSLATED': None, # Not supported
'QUERY_STRING': self._req.args,
'REMOTE_ADDR': self._req.connection.remote_ip,
'REMOTE_HOST': None, # DNS lookups not supported
'REMOTE_IDENT': self._req.connection.remote_logname,
'REMOTE_USER': self._req.user,
'REQUEST_METHOD': self._req.method,
'SCRIPT_NAME': None, # Not supported
'SERVER_NAME': self._req.server.server_hostname,
'SERVER_PORT': self._req.server.port,
'SERVER_PROTOCOL': self._req.protocol,
'SERVER_SOFTWARE': 'mod_python'
}
for key, value in self._req.headers_in.items():
key = 'HTTP_' + key.upper().replace('-', '_')
self._meta[key] = value
return self._meta
def _get_raw_post_data(self):
try:
return self._raw_post_data
except AttributeError:
self._raw_post_data = self._req.read()
return self._raw_post_data
def _get_method(self):
return self.META['REQUEST_METHOD'].upper()
GET = property(_get_get, _set_get)
POST = property(_get_post, _set_post)
COOKIES = property(_get_cookies, _set_cookies)
FILES = property(_get_files)
META = property(_get_meta)
REQUEST = property(_get_request)
raw_post_data = property(_get_raw_post_data)
method = property(_get_method)
class ModPythonHandler(BaseHandler):
def __call__(self, req):
# mod_python fakes the environ, and thus doesn't process SetEnv. This fixes that
os.environ.update(req.subprocess_env)
# now that the environ works we can see the correct settings, so imports
# that use settings now can work
from django.conf import settings
# if we need to set up middleware, now that settings works we can do it now.
if self._request_middleware is None:
self.load_middleware()
dispatcher.send(signal=signals.request_started)
try:
request = ModPythonRequest(req)
response = self.get_response(request)
# Apply response middleware
for middleware_method in self._response_middleware:
response = middleware_method(request, response)
finally:
dispatcher.send(signal=signals.request_finished)
# Convert our custom HttpResponse object back into the mod_python req.
req.content_type = response['Content-Type']
for key, value in response.headers.items():
if key != 'Content-Type':
req.headers_out[key] = value
for c in response.cookies.values():
req.headers_out.add('Set-Cookie', c.output(header=''))
req.status = response.status_code
try:
for chunk in response:
req.write(chunk)
finally:
response.close()
return 0 # mod_python.apache.OK
def handler(req):
# mod_python hooks into this function.
return ModPythonHandler()(req)
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.