code stringlengths 1 1.72M | language stringclasses 1
value |
|---|---|
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A NotImplemented Images API stub for when the PIL library is not found."""
class ImagesNotImplementedServiceStub(object):
"""Stub version of images API which raises a NotImplementedError."""
def MakeSyncCall(self, service, call, request, response):
"""Main entry point.
Args:
service: str, must be 'images'.
call: str, name of the RPC to make, must be part of ImagesService.
request: pb object, corresponding args to the 'call' argument.
response: pb object, return value for the 'call' argument.
"""
raise NotImplementedError("Unable to find the Python PIL library. Please "
"view the SDK documentation for details about "
"installing PIL on your system.")
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Image manipulation API.
Classes defined in this module:
Image: class used to encapsulate image information and transformations for
that image.
The current manipulations that are available are resize, rotate,
horizontal_flip, vertical_flip, crop and im_feeling_lucky.
It should be noted that each transform can only be called once per image
per execute_transforms() call.
"""
import struct
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.images import images_service_pb
from google.appengine.runtime import apiproxy_errors
JPEG = images_service_pb.OutputSettings.JPEG
PNG = images_service_pb.OutputSettings.PNG
OUTPUT_ENCODING_TYPES = frozenset([JPEG, PNG])
MAX_TRANSFORMS_PER_REQUEST = 10
class Error(Exception):
"""Base error class for this module."""
class TransformationError(Error):
"""Error while attempting to transform the image."""
class BadRequestError(Error):
"""The parameters given had something wrong with them."""
class NotImageError(Error):
"""The image data given is not recognizable as an image."""
class BadImageError(Error):
"""The image data given is corrupt."""
class LargeImageError(Error):
"""The image data given is too large to process."""
class Image(object):
"""Image object to manipulate."""
def __init__(self, image_data):
"""Constructor.
Args:
image_data: str, image data in string form.
Raises:
NotImageError if the given data is empty.
"""
if not image_data:
raise NotImageError("Empty image data.")
self._image_data = image_data
self._transforms = []
self._width = None
self._height = None
def _check_transform_limits(self):
"""Ensure some simple limits on the number of transforms allowed.
Raises:
BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
requested for this image
"""
if len(self._transforms) >= MAX_TRANSFORMS_PER_REQUEST:
raise BadRequestError("%d transforms have already been requested on this "
"image." % MAX_TRANSFORMS_PER_REQUEST)
def _update_dimensions(self):
"""Updates the width and height fields of the image.
Raises:
NotImageError if the image data is not an image.
BadImageError if the image data is corrupt.
"""
size = len(self._image_data)
if size >= 6 and self._image_data.startswith("GIF"):
self._update_gif_dimensions()
elif size >= 8 and self._image_data.startswith("\x89PNG\x0D\x0A\x1A\x0A"):
self._update_png_dimensions()
elif size >= 2 and self._image_data.startswith("\xff\xD8"):
self._update_jpeg_dimensions()
elif (size >= 8 and (self._image_data.startswith("II\x2a\x00") or
self._image_data.startswith("MM\x00\x2a"))):
self._update_tiff_dimensions()
elif size >= 2 and self._image_data.startswith("BM"):
self._update_bmp_dimensions()
elif size >= 4 and self._image_data.startswith("\x00\x00\x01\x00"):
self._update_ico_dimensions()
else:
raise NotImageError("Unrecognized image format")
def _update_gif_dimensions(self):
"""Updates the width and height fields of the gif image.
Raises:
BadImageError if the image string is not a valid gif image.
"""
size = len(self._image_data)
if size >= 10:
self._width, self._height = struct.unpack("<HH", self._image_data[6:10])
else:
raise BadImageError("Corrupt GIF format")
def _update_png_dimensions(self):
"""Updates the width and height fields of the png image.
Raises:
BadImageError if the image string is not a valid png image.
"""
size = len(self._image_data)
if size >= 24 and self._image_data[12:16] == "IHDR":
self._width, self._height = struct.unpack(">II", self._image_data[16:24])
else:
raise BadImageError("Corrupt PNG format")
def _update_jpeg_dimensions(self):
"""Updates the width and height fields of the jpeg image.
Raises:
BadImageError if the image string is not a valid jpeg image.
"""
size = len(self._image_data)
offset = 2
while offset < size:
while offset < size and ord(self._image_data[offset]) != 0xFF:
offset += 1
while offset < size and ord(self._image_data[offset]) == 0xFF:
offset += 1
if (offset < size and ord(self._image_data[offset]) & 0xF0 == 0xC0 and
ord(self._image_data[offset]) != 0xC4):
offset += 4
if offset + 4 <= size:
self._height, self._width = struct.unpack(
">HH",
self._image_data[offset:offset + 4])
break
else:
raise BadImageError("Corrupt JPEG format")
elif offset + 3 <= size:
offset += 1
offset += struct.unpack(">H", self._image_data[offset:offset + 2])[0]
else:
raise BadImageError("Corrupt JPEG format")
if self._height is None or self._width is None:
raise BadImageError("Corrupt JPEG format")
def _update_tiff_dimensions(self):
"""Updates the width and height fields of the tiff image.
Raises:
BadImageError if the image string is not a valid tiff image.
"""
size = len(self._image_data)
if self._image_data.startswith("II"):
endianness = "<"
else:
endianness = ">"
ifd_offset = struct.unpack(endianness + "I", self._image_data[4:8])[0]
if ifd_offset + 14 <= size:
ifd_size = struct.unpack(
endianness + "H",
self._image_data[ifd_offset:ifd_offset + 2])[0]
ifd_offset += 2
for unused_i in range(0, ifd_size):
if ifd_offset + 12 <= size:
tag = struct.unpack(
endianness + "H",
self._image_data[ifd_offset:ifd_offset + 2])[0]
if tag == 0x100 or tag == 0x101:
value_type = struct.unpack(
endianness + "H",
self._image_data[ifd_offset + 2:ifd_offset + 4])[0]
if value_type == 3:
format = endianness + "H"
end_offset = ifd_offset + 10
elif value_type == 4:
format = endianness + "I"
end_offset = ifd_offset + 12
else:
format = endianness + "B"
end_offset = ifd_offset + 9
if tag == 0x100:
self._width = struct.unpack(
format,
self._image_data[ifd_offset + 8:end_offset])[0]
if self._height is not None:
break
else:
self._height = struct.unpack(
format,
self._image_data[ifd_offset + 8:end_offset])[0]
if self._width is not None:
break
ifd_offset += 12
else:
raise BadImageError("Corrupt TIFF format")
if self._width is None or self._height is None:
raise BadImageError("Corrupt TIFF format")
def _update_bmp_dimensions(self):
"""Updates the width and height fields of the bmp image.
Raises:
BadImageError if the image string is not a valid bmp image.
"""
size = len(self._image_data)
if size >= 18:
header_length = struct.unpack("<I", self._image_data[14:18])[0]
if ((header_length == 40 or header_length == 108 or
header_length == 124 or header_length == 64) and size >= 26):
self._width, self._height = struct.unpack("<II",
self._image_data[18:26])
elif header_length == 12 and size >= 22:
self._width, self._height = struct.unpack("<HH",
self._image_data[18:22])
else:
raise BadImageError("Corrupt BMP format")
else:
raise BadImageError("Corrupt BMP format")
def _update_ico_dimensions(self):
"""Updates the width and height fields of the ico image.
Raises:
BadImageError if the image string is not a valid ico image.
"""
size = len(self._image_data)
if size >= 8:
self._width, self._height = struct.unpack("<BB", self._image_data[6:8])
if not self._width:
self._width = 256
if not self._height:
self._height = 256
else:
raise BadImageError("Corrupt ICO format")
def resize(self, width=0, height=0):
"""Resize the image maintaining the aspect ratio.
If both width and height are specified, the more restricting of the two
values will be used when resizing the photo. The maximum dimension allowed
for both width and height is 4000 pixels.
Args:
width: int, width (in pixels) to change the image width to.
height: int, height (in pixels) to change the image height to.
Raises:
TypeError when width or height is not either 'int' or 'long' types.
BadRequestError when there is something wrong with the given height or
width or if MAX_TRANSFORMS_PER_REQUEST transforms have already been
requested on this image.
"""
if (not isinstance(width, (int, long)) or
not isinstance(height, (int, long))):
raise TypeError("Width and height must be integers.")
if width < 0 or height < 0:
raise BadRequestError("Width and height must be >= 0.")
if not width and not height:
raise BadRequestError("At least one of width or height must be > 0.")
if width > 4000 or height > 4000:
raise BadRequestError("Both width and height must be < 4000.")
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_width(width)
transform.set_height(height)
self._transforms.append(transform)
def rotate(self, degrees):
"""Rotate an image a given number of degrees clockwise.
Args:
degrees: int, must be a multiple of 90.
Raises:
TypeError when degrees is not either 'int' or 'long' types.
BadRequestError when there is something wrong with the given degrees or
if MAX_TRANSFORMS_PER_REQUEST transforms have already been requested.
"""
if not isinstance(degrees, (int, long)):
raise TypeError("Degrees must be integers.")
if degrees % 90 != 0:
raise BadRequestError("degrees argument must be multiple of 90.")
degrees = degrees % 360
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_rotate(degrees)
self._transforms.append(transform)
def horizontal_flip(self):
"""Flip the image horizontally.
Raises:
BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
requested on the image.
"""
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_horizontal_flip(True)
self._transforms.append(transform)
def vertical_flip(self):
"""Flip the image vertically.
Raises:
BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already been
requested on the image.
"""
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_vertical_flip(True)
self._transforms.append(transform)
def _validate_crop_arg(self, val, val_name):
"""Validate the given value of a Crop() method argument.
Args:
val: float, value of the argument.
val_name: str, name of the argument.
Raises:
TypeError if the args are not of type 'float'.
BadRequestError when there is something wrong with the given bounding box.
"""
if type(val) != float:
raise TypeError("arg '%s' must be of type 'float'." % val_name)
if not (0 <= val <= 1.0):
raise BadRequestError("arg '%s' must be between 0.0 and 1.0 "
"(inclusive)" % val_name)
def crop(self, left_x, top_y, right_x, bottom_y):
"""Crop the image.
The four arguments are the scaling numbers to describe the bounding box
which will crop the image. The upper left point of the bounding box will
be at (left_x*image_width, top_y*image_height) the lower right point will
be at (right_x*image_width, bottom_y*image_height).
Args:
left_x: float value between 0.0 and 1.0 (inclusive).
top_y: float value between 0.0 and 1.0 (inclusive).
right_x: float value between 0.0 and 1.0 (inclusive).
bottom_y: float value between 0.0 and 1.0 (inclusive).
Raises:
TypeError if the args are not of type 'float'.
BadRequestError when there is something wrong with the given bounding box
or if MAX_TRANSFORMS_PER_REQUEST transforms have already been requested
for this image.
"""
self._validate_crop_arg(left_x, "left_x")
self._validate_crop_arg(top_y, "top_y")
self._validate_crop_arg(right_x, "right_x")
self._validate_crop_arg(bottom_y, "bottom_y")
if left_x >= right_x:
raise BadRequestError("left_x must be less than right_x")
if top_y >= bottom_y:
raise BadRequestError("top_y must be less than bottom_y")
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_crop_left_x(left_x)
transform.set_crop_top_y(top_y)
transform.set_crop_right_x(right_x)
transform.set_crop_bottom_y(bottom_y)
self._transforms.append(transform)
def im_feeling_lucky(self):
"""Automatically adjust image contrast and color levels.
This is similar to the "I'm Feeling Lucky" button in Picasa.
Raises:
BadRequestError if MAX_TRANSFORMS_PER_REQUEST transforms have already
been requested for this image.
"""
self._check_transform_limits()
transform = images_service_pb.Transform()
transform.set_autolevels(True)
self._transforms.append(transform)
def execute_transforms(self, output_encoding=PNG):
"""Perform transformations on given image.
Args:
output_encoding: A value from OUTPUT_ENCODING_TYPES.
Returns:
str, image data after the transformations have been performed on it.
Raises:
BadRequestError when there is something wrong with the request
specifications.
NotImageError when the image data given is not an image.
BadImageError when the image data given is corrupt.
LargeImageError when the image data given is too large to process.
TransformtionError when something errors during image manipulation.
Error when something unknown, but bad, happens.
"""
if output_encoding not in OUTPUT_ENCODING_TYPES:
raise BadRequestError("Output encoding type not in recognized set "
"%s" % OUTPUT_ENCODING_TYPES)
if not self._transforms:
raise BadRequestError("Must specify at least one transformation.")
request = images_service_pb.ImagesTransformRequest()
response = images_service_pb.ImagesTransformResponse()
request.mutable_image().set_content(self._image_data)
for transform in self._transforms:
request.add_transform().CopyFrom(transform)
request.mutable_output().set_mime_type(output_encoding)
try:
apiproxy_stub_map.MakeSyncCall("images",
"Transform",
request,
response)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
images_service_pb.ImagesServiceError.BAD_TRANSFORM_DATA):
raise BadRequestError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.NOT_IMAGE):
raise NotImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.BAD_IMAGE_DATA):
raise BadImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.IMAGE_TOO_LARGE):
raise LargeImageError()
elif (e.application_error ==
images_service_pb.ImagesServiceError.UNSPECIFIED_ERROR):
raise TransformationError()
else:
raise Error()
self._image_data = response.image().content()
self._transforms = []
self._width = None
self._height = None
return self._image_data
@property
def width(self):
"""Gets the width of the image."""
if self._width is None:
self._update_dimensions()
return self._width
@property
def height(self):
"""Gets the height of the image."""
if self._height is None:
self._update_dimensions()
return self._height
def resize(image_data, width=0, height=0, output_encoding=PNG):
"""Resize a given image file maintaining the aspect ratio.
If both width and height are specified, the more restricting of the two
values will be used when resizing the photo. The maximum dimension allowed
for both width and height is 4000 pixels.
Args:
image_data: str, source image data.
width: int, width (in pixels) to change the image width to.
height: int, height (in pixels) to change the image height to.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
Raises:
TypeError when width or height not either 'int' or 'long' types.
BadRequestError when there is something wrong with the given height or
width.
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.resize(width, height)
return image.execute_transforms(output_encoding=output_encoding)
def rotate(image_data, degrees, output_encoding=PNG):
"""Rotate a given image a given number of degrees clockwise.
Args:
image_data: str, source image data.
degrees: value from ROTATE_DEGREE_VALUES.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
Raises:
TypeError when degrees is not either 'int' or 'long' types.
BadRequestError when there is something wrong with the given degrees.
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.rotate(degrees)
return image.execute_transforms(output_encoding=output_encoding)
def horizontal_flip(image_data, output_encoding=PNG):
"""Flip the image horizontally.
Args:
image_data: str, source image data.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
Raises:
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.horizontal_flip()
return image.execute_transforms(output_encoding=output_encoding)
def vertical_flip(image_data, output_encoding=PNG):
"""Flip the image vertically.
Args:
image_data: str, source image data.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
Raises:
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.vertical_flip()
return image.execute_transforms(output_encoding=output_encoding)
def crop(image_data, left_x, top_y, right_x, bottom_y, output_encoding=PNG):
"""Crop the given image.
The four arguments are the scaling numbers to describe the bounding box
which will crop the image. The upper left point of the bounding box will
be at (left_x*image_width, top_y*image_height) the lower right point will
be at (right_x*image_width, bottom_y*image_height).
Args:
image_data: str, source image data.
left_x: float value between 0.0 and 1.0 (inclusive).
top_y: float value between 0.0 and 1.0 (inclusive).
right_x: float value between 0.0 and 1.0 (inclusive).
bottom_y: float value between 0.0 and 1.0 (inclusive).
output_encoding: a value from OUTPUT_ENCODING_TYPES.
Raises:
TypeError if the args are not of type 'float'.
BadRequestError when there is something wrong with the given bounding box.
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.crop(left_x, top_y, right_x, bottom_y)
return image.execute_transforms(output_encoding=output_encoding)
def im_feeling_lucky(image_data, output_encoding=PNG):
"""Automatically adjust image levels.
This is similar to the "I'm Feeling Lucky" button in Picasa.
Args:
image_data: str, source image data.
output_encoding: a value from OUTPUT_ENCODING_TYPES.
Raises:
Error when something went wrong with the call. See Image.ExecuteTransforms
for more details.
"""
image = Image(image_data)
image.im_feeling_lucky()
return image.execute_transforms(output_encoding=output_encoding)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
class ImagesServiceError(ProtocolBuffer.ProtocolMessage):
UNSPECIFIED_ERROR = 1
BAD_TRANSFORM_DATA = 2
NOT_IMAGE = 3
BAD_IMAGE_DATA = 4
IMAGE_TOO_LARGE = 5
_ErrorCode_NAMES = {
1: "UNSPECIFIED_ERROR",
2: "BAD_TRANSFORM_DATA",
3: "NOT_IMAGE",
4: "BAD_IMAGE_DATA",
5: "IMAGE_TOO_LARGE",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class ImagesServiceTransform(ProtocolBuffer.ProtocolMessage):
RESIZE = 1
ROTATE = 2
HORIZONTAL_FLIP = 3
VERTICAL_FLIP = 4
CROP = 5
IM_FEELING_LUCKY = 6
_Type_NAMES = {
1: "RESIZE",
2: "ROTATE",
3: "HORIZONTAL_FLIP",
4: "VERTICAL_FLIP",
5: "CROP",
6: "IM_FEELING_LUCKY",
}
def Type_Name(cls, x): return cls._Type_NAMES.get(x, "")
Type_Name = classmethod(Type_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Transform(ProtocolBuffer.ProtocolMessage):
has_width_ = 0
width_ = 0
has_height_ = 0
height_ = 0
has_rotate_ = 0
rotate_ = 0
has_horizontal_flip_ = 0
horizontal_flip_ = 0
has_vertical_flip_ = 0
vertical_flip_ = 0
has_crop_left_x_ = 0
crop_left_x_ = 0.0
has_crop_top_y_ = 0
crop_top_y_ = 0.0
has_crop_right_x_ = 0
crop_right_x_ = 1.0
has_crop_bottom_y_ = 0
crop_bottom_y_ = 1.0
has_autolevels_ = 0
autolevels_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def width(self): return self.width_
def set_width(self, x):
self.has_width_ = 1
self.width_ = x
def clear_width(self):
if self.has_width_:
self.has_width_ = 0
self.width_ = 0
def has_width(self): return self.has_width_
def height(self): return self.height_
def set_height(self, x):
self.has_height_ = 1
self.height_ = x
def clear_height(self):
if self.has_height_:
self.has_height_ = 0
self.height_ = 0
def has_height(self): return self.has_height_
def rotate(self): return self.rotate_
def set_rotate(self, x):
self.has_rotate_ = 1
self.rotate_ = x
def clear_rotate(self):
if self.has_rotate_:
self.has_rotate_ = 0
self.rotate_ = 0
def has_rotate(self): return self.has_rotate_
def horizontal_flip(self): return self.horizontal_flip_
def set_horizontal_flip(self, x):
self.has_horizontal_flip_ = 1
self.horizontal_flip_ = x
def clear_horizontal_flip(self):
if self.has_horizontal_flip_:
self.has_horizontal_flip_ = 0
self.horizontal_flip_ = 0
def has_horizontal_flip(self): return self.has_horizontal_flip_
def vertical_flip(self): return self.vertical_flip_
def set_vertical_flip(self, x):
self.has_vertical_flip_ = 1
self.vertical_flip_ = x
def clear_vertical_flip(self):
if self.has_vertical_flip_:
self.has_vertical_flip_ = 0
self.vertical_flip_ = 0
def has_vertical_flip(self): return self.has_vertical_flip_
def crop_left_x(self): return self.crop_left_x_
def set_crop_left_x(self, x):
self.has_crop_left_x_ = 1
self.crop_left_x_ = x
def clear_crop_left_x(self):
if self.has_crop_left_x_:
self.has_crop_left_x_ = 0
self.crop_left_x_ = 0.0
def has_crop_left_x(self): return self.has_crop_left_x_
def crop_top_y(self): return self.crop_top_y_
def set_crop_top_y(self, x):
self.has_crop_top_y_ = 1
self.crop_top_y_ = x
def clear_crop_top_y(self):
if self.has_crop_top_y_:
self.has_crop_top_y_ = 0
self.crop_top_y_ = 0.0
def has_crop_top_y(self): return self.has_crop_top_y_
def crop_right_x(self): return self.crop_right_x_
def set_crop_right_x(self, x):
self.has_crop_right_x_ = 1
self.crop_right_x_ = x
def clear_crop_right_x(self):
if self.has_crop_right_x_:
self.has_crop_right_x_ = 0
self.crop_right_x_ = 1.0
def has_crop_right_x(self): return self.has_crop_right_x_
def crop_bottom_y(self): return self.crop_bottom_y_
def set_crop_bottom_y(self, x):
self.has_crop_bottom_y_ = 1
self.crop_bottom_y_ = x
def clear_crop_bottom_y(self):
if self.has_crop_bottom_y_:
self.has_crop_bottom_y_ = 0
self.crop_bottom_y_ = 1.0
def has_crop_bottom_y(self): return self.has_crop_bottom_y_
def autolevels(self): return self.autolevels_
def set_autolevels(self, x):
self.has_autolevels_ = 1
self.autolevels_ = x
def clear_autolevels(self):
if self.has_autolevels_:
self.has_autolevels_ = 0
self.autolevels_ = 0
def has_autolevels(self): return self.has_autolevels_
def MergeFrom(self, x):
assert x is not self
if (x.has_width()): self.set_width(x.width())
if (x.has_height()): self.set_height(x.height())
if (x.has_rotate()): self.set_rotate(x.rotate())
if (x.has_horizontal_flip()): self.set_horizontal_flip(x.horizontal_flip())
if (x.has_vertical_flip()): self.set_vertical_flip(x.vertical_flip())
if (x.has_crop_left_x()): self.set_crop_left_x(x.crop_left_x())
if (x.has_crop_top_y()): self.set_crop_top_y(x.crop_top_y())
if (x.has_crop_right_x()): self.set_crop_right_x(x.crop_right_x())
if (x.has_crop_bottom_y()): self.set_crop_bottom_y(x.crop_bottom_y())
if (x.has_autolevels()): self.set_autolevels(x.autolevels())
def Equals(self, x):
if x is self: return 1
if self.has_width_ != x.has_width_: return 0
if self.has_width_ and self.width_ != x.width_: return 0
if self.has_height_ != x.has_height_: return 0
if self.has_height_ and self.height_ != x.height_: return 0
if self.has_rotate_ != x.has_rotate_: return 0
if self.has_rotate_ and self.rotate_ != x.rotate_: return 0
if self.has_horizontal_flip_ != x.has_horizontal_flip_: return 0
if self.has_horizontal_flip_ and self.horizontal_flip_ != x.horizontal_flip_: return 0
if self.has_vertical_flip_ != x.has_vertical_flip_: return 0
if self.has_vertical_flip_ and self.vertical_flip_ != x.vertical_flip_: return 0
if self.has_crop_left_x_ != x.has_crop_left_x_: return 0
if self.has_crop_left_x_ and self.crop_left_x_ != x.crop_left_x_: return 0
if self.has_crop_top_y_ != x.has_crop_top_y_: return 0
if self.has_crop_top_y_ and self.crop_top_y_ != x.crop_top_y_: return 0
if self.has_crop_right_x_ != x.has_crop_right_x_: return 0
if self.has_crop_right_x_ and self.crop_right_x_ != x.crop_right_x_: return 0
if self.has_crop_bottom_y_ != x.has_crop_bottom_y_: return 0
if self.has_crop_bottom_y_ and self.crop_bottom_y_ != x.crop_bottom_y_: return 0
if self.has_autolevels_ != x.has_autolevels_: return 0
if self.has_autolevels_ and self.autolevels_ != x.autolevels_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_width_): n += 1 + self.lengthVarInt64(self.width_)
if (self.has_height_): n += 1 + self.lengthVarInt64(self.height_)
if (self.has_rotate_): n += 1 + self.lengthVarInt64(self.rotate_)
if (self.has_horizontal_flip_): n += 2
if (self.has_vertical_flip_): n += 2
if (self.has_crop_left_x_): n += 5
if (self.has_crop_top_y_): n += 5
if (self.has_crop_right_x_): n += 5
if (self.has_crop_bottom_y_): n += 5
if (self.has_autolevels_): n += 2
return n + 0
def Clear(self):
self.clear_width()
self.clear_height()
self.clear_rotate()
self.clear_horizontal_flip()
self.clear_vertical_flip()
self.clear_crop_left_x()
self.clear_crop_top_y()
self.clear_crop_right_x()
self.clear_crop_bottom_y()
self.clear_autolevels()
def OutputUnchecked(self, out):
if (self.has_width_):
out.putVarInt32(8)
out.putVarInt32(self.width_)
if (self.has_height_):
out.putVarInt32(16)
out.putVarInt32(self.height_)
if (self.has_rotate_):
out.putVarInt32(24)
out.putVarInt32(self.rotate_)
if (self.has_horizontal_flip_):
out.putVarInt32(32)
out.putBoolean(self.horizontal_flip_)
if (self.has_vertical_flip_):
out.putVarInt32(40)
out.putBoolean(self.vertical_flip_)
if (self.has_crop_left_x_):
out.putVarInt32(53)
out.putFloat(self.crop_left_x_)
if (self.has_crop_top_y_):
out.putVarInt32(61)
out.putFloat(self.crop_top_y_)
if (self.has_crop_right_x_):
out.putVarInt32(69)
out.putFloat(self.crop_right_x_)
if (self.has_crop_bottom_y_):
out.putVarInt32(77)
out.putFloat(self.crop_bottom_y_)
if (self.has_autolevels_):
out.putVarInt32(80)
out.putBoolean(self.autolevels_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_width(d.getVarInt32())
continue
if tt == 16:
self.set_height(d.getVarInt32())
continue
if tt == 24:
self.set_rotate(d.getVarInt32())
continue
if tt == 32:
self.set_horizontal_flip(d.getBoolean())
continue
if tt == 40:
self.set_vertical_flip(d.getBoolean())
continue
if tt == 53:
self.set_crop_left_x(d.getFloat())
continue
if tt == 61:
self.set_crop_top_y(d.getFloat())
continue
if tt == 69:
self.set_crop_right_x(d.getFloat())
continue
if tt == 77:
self.set_crop_bottom_y(d.getFloat())
continue
if tt == 80:
self.set_autolevels(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_width_: res+=prefix+("width: %s\n" % self.DebugFormatInt32(self.width_))
if self.has_height_: res+=prefix+("height: %s\n" % self.DebugFormatInt32(self.height_))
if self.has_rotate_: res+=prefix+("rotate: %s\n" % self.DebugFormatInt32(self.rotate_))
if self.has_horizontal_flip_: res+=prefix+("horizontal_flip: %s\n" % self.DebugFormatBool(self.horizontal_flip_))
if self.has_vertical_flip_: res+=prefix+("vertical_flip: %s\n" % self.DebugFormatBool(self.vertical_flip_))
if self.has_crop_left_x_: res+=prefix+("crop_left_x: %s\n" % self.DebugFormatFloat(self.crop_left_x_))
if self.has_crop_top_y_: res+=prefix+("crop_top_y: %s\n" % self.DebugFormatFloat(self.crop_top_y_))
if self.has_crop_right_x_: res+=prefix+("crop_right_x: %s\n" % self.DebugFormatFloat(self.crop_right_x_))
if self.has_crop_bottom_y_: res+=prefix+("crop_bottom_y: %s\n" % self.DebugFormatFloat(self.crop_bottom_y_))
if self.has_autolevels_: res+=prefix+("autolevels: %s\n" % self.DebugFormatBool(self.autolevels_))
return res
kwidth = 1
kheight = 2
krotate = 3
khorizontal_flip = 4
kvertical_flip = 5
kcrop_left_x = 6
kcrop_top_y = 7
kcrop_right_x = 8
kcrop_bottom_y = 9
kautolevels = 10
_TEXT = (
"ErrorCode",
"width",
"height",
"rotate",
"horizontal_flip",
"vertical_flip",
"crop_left_x",
"crop_top_y",
"crop_right_x",
"crop_bottom_y",
"autolevels",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.FLOAT,
ProtocolBuffer.Encoder.FLOAT,
ProtocolBuffer.Encoder.FLOAT,
ProtocolBuffer.Encoder.FLOAT,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class ImageData(ProtocolBuffer.ProtocolMessage):
has_content_ = 0
content_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def content(self): return self.content_
def set_content(self, x):
self.has_content_ = 1
self.content_ = x
def clear_content(self):
if self.has_content_:
self.has_content_ = 0
self.content_ = ""
def has_content(self): return self.has_content_
def MergeFrom(self, x):
assert x is not self
if (x.has_content()): self.set_content(x.content())
def Equals(self, x):
if x is self: return 1
if self.has_content_ != x.has_content_: return 0
if self.has_content_ and self.content_ != x.content_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_content_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: content not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.content_))
return n + 1
def Clear(self):
self.clear_content()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.content_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_content(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_content_: res+=prefix+("content: %s\n" % self.DebugFormatString(self.content_))
return res
kcontent = 1
_TEXT = (
"ErrorCode",
"content",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class OutputSettings(ProtocolBuffer.ProtocolMessage):
PNG = 0
JPEG = 1
_MIME_TYPE_NAMES = {
0: "PNG",
1: "JPEG",
}
def MIME_TYPE_Name(cls, x): return cls._MIME_TYPE_NAMES.get(x, "")
MIME_TYPE_Name = classmethod(MIME_TYPE_Name)
has_mime_type_ = 0
mime_type_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def mime_type(self): return self.mime_type_
def set_mime_type(self, x):
self.has_mime_type_ = 1
self.mime_type_ = x
def clear_mime_type(self):
if self.has_mime_type_:
self.has_mime_type_ = 0
self.mime_type_ = 0
def has_mime_type(self): return self.has_mime_type_
def MergeFrom(self, x):
assert x is not self
if (x.has_mime_type()): self.set_mime_type(x.mime_type())
def Equals(self, x):
if x is self: return 1
if self.has_mime_type_ != x.has_mime_type_: return 0
if self.has_mime_type_ and self.mime_type_ != x.mime_type_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_mime_type_): n += 1 + self.lengthVarInt64(self.mime_type_)
return n + 0
def Clear(self):
self.clear_mime_type()
def OutputUnchecked(self, out):
if (self.has_mime_type_):
out.putVarInt32(8)
out.putVarInt32(self.mime_type_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_mime_type(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_mime_type_: res+=prefix+("mime_type: %s\n" % self.DebugFormatInt32(self.mime_type_))
return res
kmime_type = 1
_TEXT = (
"ErrorCode",
"mime_type",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class ImagesTransformRequest(ProtocolBuffer.ProtocolMessage):
has_image_ = 0
has_output_ = 0
def __init__(self, contents=None):
self.image_ = ImageData()
self.transform_ = []
self.output_ = OutputSettings()
if contents is not None: self.MergeFromString(contents)
def image(self): return self.image_
def mutable_image(self): self.has_image_ = 1; return self.image_
def clear_image(self):self.has_image_ = 0; self.image_.Clear()
def has_image(self): return self.has_image_
def transform_size(self): return len(self.transform_)
def transform_list(self): return self.transform_
def transform(self, i):
return self.transform_[i]
def mutable_transform(self, i):
return self.transform_[i]
def add_transform(self):
x = Transform()
self.transform_.append(x)
return x
def clear_transform(self):
self.transform_ = []
def output(self): return self.output_
def mutable_output(self): self.has_output_ = 1; return self.output_
def clear_output(self):self.has_output_ = 0; self.output_.Clear()
def has_output(self): return self.has_output_
def MergeFrom(self, x):
assert x is not self
if (x.has_image()): self.mutable_image().MergeFrom(x.image())
for i in xrange(x.transform_size()): self.add_transform().CopyFrom(x.transform(i))
if (x.has_output()): self.mutable_output().MergeFrom(x.output())
def Equals(self, x):
if x is self: return 1
if self.has_image_ != x.has_image_: return 0
if self.has_image_ and self.image_ != x.image_: return 0
if len(self.transform_) != len(x.transform_): return 0
for e1, e2 in zip(self.transform_, x.transform_):
if e1 != e2: return 0
if self.has_output_ != x.has_output_: return 0
if self.has_output_ and self.output_ != x.output_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_image_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: image not set.')
elif not self.image_.IsInitialized(debug_strs): initialized = 0
for p in self.transform_:
if not p.IsInitialized(debug_strs): initialized=0
if (not self.has_output_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: output not set.')
elif not self.output_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.image_.ByteSize())
n += 1 * len(self.transform_)
for i in xrange(len(self.transform_)): n += self.lengthString(self.transform_[i].ByteSize())
n += self.lengthString(self.output_.ByteSize())
return n + 2
def Clear(self):
self.clear_image()
self.clear_transform()
self.clear_output()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.image_.ByteSize())
self.image_.OutputUnchecked(out)
for i in xrange(len(self.transform_)):
out.putVarInt32(18)
out.putVarInt32(self.transform_[i].ByteSize())
self.transform_[i].OutputUnchecked(out)
out.putVarInt32(26)
out.putVarInt32(self.output_.ByteSize())
self.output_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_image().TryMerge(tmp)
continue
if tt == 18:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_transform().TryMerge(tmp)
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_output().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_image_:
res+=prefix+"image <\n"
res+=self.image_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt=0
for e in self.transform_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("transform%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
if self.has_output_:
res+=prefix+"output <\n"
res+=self.output_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kimage = 1
ktransform = 2
koutput = 3
_TEXT = (
"ErrorCode",
"image",
"transform",
"output",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class ImagesTransformResponse(ProtocolBuffer.ProtocolMessage):
has_image_ = 0
def __init__(self, contents=None):
self.image_ = ImageData()
if contents is not None: self.MergeFromString(contents)
def image(self): return self.image_
def mutable_image(self): self.has_image_ = 1; return self.image_
def clear_image(self):self.has_image_ = 0; self.image_.Clear()
def has_image(self): return self.has_image_
def MergeFrom(self, x):
assert x is not self
if (x.has_image()): self.mutable_image().MergeFrom(x.image())
def Equals(self, x):
if x is self: return 1
if self.has_image_ != x.has_image_: return 0
if self.has_image_ and self.image_ != x.image_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_image_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: image not set.')
elif not self.image_.IsInitialized(debug_strs): initialized = 0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(self.image_.ByteSize())
return n + 1
def Clear(self):
self.clear_image()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putVarInt32(self.image_.ByteSize())
self.image_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_image().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_image_:
res+=prefix+"image <\n"
res+=self.image_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kimage = 1
_TEXT = (
"ErrorCode",
"image",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['ImagesServiceError','ImagesServiceTransform','Transform','ImageData','OutputSettings','ImagesTransformRequest','ImagesTransformResponse']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""PyYAML event builder handler
Receives events from YAML listener and forwards them to a builder
object so that it can construct a properly structured object.
"""
from google.appengine.api import yaml_errors
from google.appengine.api import yaml_listener
import yaml
_TOKEN_DOCUMENT = 'document'
_TOKEN_SEQUENCE = 'sequence'
_TOKEN_MAPPING = 'mapping'
_TOKEN_KEY = 'key'
_TOKEN_VALUES = frozenset((
_TOKEN_DOCUMENT,
_TOKEN_SEQUENCE,
_TOKEN_MAPPING,
_TOKEN_KEY))
class Builder(object):
"""Interface for building documents and type from YAML events.
Implement this interface to create a new builder. Builders are
passed to the BuilderHandler and used as a factory and assembler
for creating concrete representations of YAML files.
"""
def BuildDocument(self):
"""Build new document.
The object built by this method becomes the top level entity
that the builder handler constructs. The actual type is
determined by the sub-class of the Builder class and can essentially
be any type at all. This method is always called when the parser
encounters the start of a new document.
Returns:
New object instance representing concrete document which is
returned to user via BuilderHandler.GetResults().
"""
def InitializeDocument(self, document, value):
"""Initialize document with value from top level of document.
This method is called when the root document element is encountered at
the top level of a YAML document. It should get called immediately
after BuildDocument.
Receiving the None value indicates the empty document.
Args:
document: Document as constructed in BuildDocument.
value: Scalar value to initialize the document with.
"""
def BuildMapping(self, top_value):
"""Build a new mapping representation.
Called when StartMapping event received. Type of object is determined
by Builder sub-class.
Args:
top_value: Object which will be new mappings parant. Will be object
returned from previous call to BuildMapping or BuildSequence.
Returns:
Instance of new object that represents a mapping type in target model.
"""
def EndMapping(self, top_value, mapping):
"""Previously constructed mapping scope is at an end.
Called when the end of a mapping block is encountered. Useful for
additional clean up or end of scope validation.
Args:
top_value: Value which is parent of the mapping.
mapping: Mapping which is at the end of its scope.
"""
def BuildSequence(self, top_value):
"""Build a new sequence representation.
Called when StartSequence event received. Type of object is determined
by Builder sub-class.
Args:
top_value: Object which will be new sequences parant. Will be object
returned from previous call to BuildMapping or BuildSequence.
Returns:
Instance of new object that represents a sequence type in target model.
"""
def EndSequence(self, top_value, sequence):
"""Previously constructed sequence scope is at an end.
Called when the end of a sequence block is encountered. Useful for
additional clean up or end of scope validation.
Args:
top_value: Value which is parent of the sequence.
sequence: Sequence which is at the end of its scope.
"""
def MapTo(self, subject, key, value):
"""Map value to a mapping representation.
Implementation is defined by sub-class of Builder.
Args:
subject: Object that represents mapping. Value returned from
BuildMapping.
key: Key used to map value to subject. Can be any scalar value.
value: Value which is mapped to subject. Can be any kind of value.
"""
def AppendTo(self, subject, value):
"""Append value to a sequence representation.
Implementation is defined by sub-class of Builder.
Args:
subject: Object that represents sequence. Value returned from
BuildSequence
value: Value to be appended to subject. Can be any kind of value.
"""
class BuilderHandler(yaml_listener.EventHandler):
"""PyYAML event handler used to build objects.
Maintains state information as it receives parse events so that object
nesting is maintained. Uses provided builder object to construct and
assemble objects as it goes.
As it receives events from the YAML parser, it builds a stack of data
representing structural tokens. As the scope of documents, mappings
and sequences end, those token, value pairs are popped from the top of
the stack so that the original scope can resume processing.
A special case is made for the _KEY token. It represents a temporary
value which only occurs inside mappings. It is immediately popped off
the stack when it's associated value is encountered in the parse stream.
It is necessary to do this because the YAML parser does not combine
key and value information in to a single event.
"""
def __init__(self, builder):
"""Initialization for builder handler.
Args:
builder: Instance of Builder class.
Raises:
ListenerConfigurationError when builder is not a Builder class.
"""
if not isinstance(builder, Builder):
raise yaml_errors.ListenerConfigurationError(
'Must provide builder of type yaml_listener.Builder')
self._builder = builder
self._stack = None
self._top = None
self._results = []
def _Push(self, token, value):
"""Push values to stack at start of nesting.
When a new object scope is beginning, will push the token (type of scope)
along with the new objects value, the latter of which is provided through
the various build methods of the builder.
Args:
token: Token indicating the type of scope which is being created; must
belong to _TOKEN_VALUES.
value: Value to associate with given token. Construction of value is
determined by the builder provided to this handler at construction.
"""
self._top = (token, value)
self._stack.append(self._top)
def _Pop(self):
"""Pop values from stack at end of nesting.
Called to indicate the end of a nested scope.
Returns:
Previously pushed value at the top of the stack.
"""
assert self._stack != [] and self._stack is not None
token, value = self._stack.pop()
if self._stack:
self._top = self._stack[-1]
else:
self._top = None
return value
def _HandleAnchor(self, event):
"""Handle anchor attached to event.
Currently will raise an error if anchor is used. Anchors are used to
define a document wide tag to a given value (scalar, mapping or sequence).
Args:
event: Event which may have anchor property set.
Raises:
NotImplementedError if event attempts to use an anchor.
"""
if hasattr(event, 'anchor') and event.anchor is not None:
raise NotImplementedError, 'Anchors not supported in this handler'
def _HandleValue(self, value):
"""Handle given value based on state of parser
This method handles the various values that are created by the builder
at the beginning of scope events (such as mappings and sequences) or
when a scalar value is received.
Method is called when handler receives a parser, MappingStart or
SequenceStart.
Args:
value: Value received as scalar value or newly constructed mapping or
sequence instance.
Raises:
InternalError if the building process encounters an unexpected token.
This is an indication of an implementation error in BuilderHandler.
"""
token, top_value = self._top
if token == _TOKEN_KEY:
key = self._Pop()
mapping_token, mapping = self._top
assert _TOKEN_MAPPING == mapping_token
self._builder.MapTo(mapping, key, value)
elif token == _TOKEN_MAPPING:
self._Push(_TOKEN_KEY, value)
elif token == _TOKEN_SEQUENCE:
self._builder.AppendTo(top_value, value)
elif token == _TOKEN_DOCUMENT:
self._builder.InitializeDocument(top_value, value)
else:
raise yaml_errors.InternalError('Unrecognized builder token:\n%s' % token)
def StreamStart(self, event, loader):
"""Initializes internal state of handler
Args:
event: Ignored.
"""
assert self._stack is None
self._stack = []
self._top = None
self._results = []
def StreamEnd(self, event, loader):
"""Cleans up internal state of handler after parsing
Args:
event: Ignored.
"""
assert self._stack == [] and self._top is None
self._stack = None
def DocumentStart(self, event, loader):
"""Build new document.
Pushes new document on to stack.
Args:
event: Ignored.
"""
assert self._stack == []
self._Push(_TOKEN_DOCUMENT, self._builder.BuildDocument())
def DocumentEnd(self, event, loader):
"""End of document.
Args:
event: Ignored.
"""
assert self._top[0] == _TOKEN_DOCUMENT
self._results.append(self._Pop())
def Alias(self, event, loader):
"""Not implemented yet.
Args:
event: Ignored.
"""
raise NotImplementedError('Anchors not supported in this handler')
def Scalar(self, event, loader):
"""Handle scalar value
Since scalars are simple values that are passed directly in by the
parser, handle like any value with no additional processing.
Of course, key values will be handles specially. A key value is recognized
when the top token is _TOKEN_MAPPING.
Args:
event: Event containing scalar value.
"""
self._HandleAnchor(event)
if event.tag is None and self._top[0] != _TOKEN_MAPPING:
try:
tag = loader.resolve(yaml.nodes.ScalarNode,
event.value, event.implicit)
except IndexError:
tag = loader.DEFAULT_SCALAR_TAG
else:
tag = event.tag
if tag is None:
value = event.value
else:
node = yaml.nodes.ScalarNode(tag,
event.value,
event.start_mark,
event.end_mark,
event.style)
value = loader.construct_object(node)
self._HandleValue(value)
def SequenceStart(self, event, loader):
"""Start of sequence scope
Create a new sequence from the builder and then handle in the context
of its parent.
Args:
event: SequenceStartEvent generated by loader.
loader: Loader that generated event.
"""
self._HandleAnchor(event)
token, parent = self._top
if token == _TOKEN_KEY:
token, parent = self._stack[-2]
sequence = self._builder.BuildSequence(parent)
self._HandleValue(sequence)
self._Push(_TOKEN_SEQUENCE, sequence)
def SequenceEnd(self, event, loader):
"""End of sequence.
Args:
event: Ignored
loader: Ignored.
"""
assert self._top[0] == _TOKEN_SEQUENCE
end_object = self._Pop()
top_value = self._top[1]
self._builder.EndSequence(top_value, end_object)
def MappingStart(self, event, loader):
"""Start of mapping scope.
Create a mapping from builder and then handle in the context of its
parent.
Args:
event: MappingStartEvent generated by loader.
loader: Loader that generated event.
"""
self._HandleAnchor(event)
token, parent = self._top
if token == _TOKEN_KEY:
token, parent = self._stack[-2]
mapping = self._builder.BuildMapping(parent)
self._HandleValue(mapping)
self._Push(_TOKEN_MAPPING, mapping)
def MappingEnd(self, event, loader):
"""End of mapping
Args:
event: Ignored.
loader: Ignored.
"""
assert self._top[0] == _TOKEN_MAPPING
end_object = self._Pop()
top_value = self._top[1]
self._builder.EndMapping(top_value, end_object)
def GetResults(self):
"""Get results of document stream processing.
This method can be invoked after fully parsing the entire YAML file
to retrieve constructed contents of YAML file. Called after EndStream.
Returns:
A tuple of all document objects that were parsed from YAML stream.
Raises:
InternalError if the builder stack is not empty by the end of parsing.
"""
if self._stack is not None:
raise yaml_errors.InternalError('Builder stack is not empty.')
return tuple(self._results)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""AppInfo tools
Library for working with AppInfo records in memory, store and load from
configuration files.
"""
import re
from google.appengine.api import appinfo_errors
from google.appengine.api import validation
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_object
_URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).'
_FILES_REGEX = r'(?!\^).*(?!\$).'
_DELTA_REGEX = r'([1-9][0-9]*)([DdHhMm]|[sS]?)'
_EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
_EXPIRATION_CONVERSIONS = {
'd': 60 * 60 * 24,
'h': 60 * 60,
'm': 60,
's': 1,
}
APP_ID_MAX_LEN = 100
MAJOR_VERSION_ID_MAX_LEN = 100
MAX_URL_MAPS = 100
APPLICATION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % APP_ID_MAX_LEN
VERSION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % MAJOR_VERSION_ID_MAX_LEN
RUNTIME_RE_STRING = r'[a-z]{1,30}'
API_VERSION_RE_STRING = r'[\w.]{1,32}'
HANDLER_STATIC_FILES = 'static_files'
HANDLER_STATIC_DIR = 'static_dir'
HANDLER_SCRIPT = 'script'
LOGIN_OPTIONAL = 'optional'
LOGIN_REQUIRED = 'required'
LOGIN_ADMIN = 'admin'
SECURE_HTTP = 'never'
SECURE_HTTPS = 'always'
SECURE_HTTP_OR_HTTPS = 'optional'
DEFAULT_SKIP_FILES = (r"^(.*/)?("
r"(app\.yaml)|"
r"(app\.yml)|"
r"(index\.yaml)|"
r"(index\.yml)|"
r"(#.*#)|"
r"(.*~)|"
r"(.*\.py[co])|"
r"(.*/RCS/.*)|"
r"(\..*)|"
r")$")
LOGIN = 'login'
SECURE = 'secure'
URL = 'url'
STATIC_FILES = 'static_files'
UPLOAD = 'upload'
STATIC_DIR = 'static_dir'
MIME_TYPE = 'mime_type'
SCRIPT = 'script'
EXPIRATION = 'expiration'
APPLICATION = 'application'
VERSION = 'version'
RUNTIME = 'runtime'
API_VERSION = 'api_version'
HANDLERS = 'handlers'
DEFAULT_EXPIRATION = 'default_expiration'
SKIP_FILES = 'skip_files'
class URLMap(validation.Validated):
"""Mapping from URLs to handlers.
This class acts like something of a union type. Its purpose is to
describe a mapping between a set of URLs and their handlers. What
handler type a given instance has is determined by which handler-id
attribute is used.
Each mapping can have one and only one handler type. Attempting to
use more than one handler-id attribute will cause an UnknownHandlerType
to be raised during validation. Failure to provide any handler-id
attributes will cause MissingHandlerType to be raised during validation.
The regular expression used by the url field will be used to match against
the entire URL path and query string of the request. This means that
partial maps will not be matched. Specifying a url, say /admin, is the
same as matching against the regular expression '^/admin$'. Don't begin
your matching url with ^ or end them with $. These regular expressions
won't be accepted and will raise ValueError.
Attributes:
login: Whether or not login is required to access URL. Defaults to
'optional'.
secure: Restriction on the protocol which can be used to serve
this URL/handler (HTTP, HTTPS or either).
url: Regular expression used to fully match against the request URLs path.
See Special Cases for using static_dir.
static_files: Handler id attribute that maps URL to the appropriate
file. Can use back regex references to the string matched to url.
upload: Regular expression used by the application configuration
program to know which files are uploaded as blobs. It's very
difficult to determine this using just the url and static_files
so this attribute must be included. Required when defining a
static_files mapping.
A matching file name must fully match against the upload regex, similar
to how url is matched against the request path. Do not begin upload
with ^ or end it with $.
static_dir: Handler id that maps the provided url to a sub-directory
within the application directory. See Special Cases.
mime_type: When used with static_files and static_dir the mime-type
of files served from those directories are overridden with this
value.
script: Handler id that maps URLs to scipt handler within the application
directory that will run using CGI.
expiration: When used with static files and directories, the time delta to
use for cache expiration. Has the form '4d 5h 30m 15s', where each letter
signifies days, hours, minutes, and seconds, respectively. The 's' for
seconds may be omitted. Only one amount must be specified, combining
multiple amounts is optional. Example good values: '10', '1d 6h',
'1h 30m', '7d 7d 7d', '5m 30'.
Special cases:
When defining a static_dir handler, do not use a regular expression
in the url attribute. Both the url and static_dir attributes are
automatically mapped to these equivalents:
<url>/(.*)
<static_dir>/\1
For example:
url: /images
static_dir: images_folder
Is the same as this static_files declaration:
url: /images/(.*)
static_files: images/\1
upload: images/(.*)
"""
ATTRIBUTES = {
URL: validation.Optional(_URL_REGEX),
LOGIN: validation.Options(LOGIN_OPTIONAL,
LOGIN_REQUIRED,
LOGIN_ADMIN,
default=LOGIN_OPTIONAL),
SECURE: validation.Options(SECURE_HTTP,
SECURE_HTTPS,
SECURE_HTTP_OR_HTTPS,
default=SECURE_HTTP),
HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
UPLOAD: validation.Optional(_FILES_REGEX),
HANDLER_STATIC_DIR: validation.Optional(_FILES_REGEX),
MIME_TYPE: validation.Optional(str),
EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
}
COMMON_FIELDS = set([URL, LOGIN, SECURE])
ALLOWED_FIELDS = {
HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION),
HANDLER_STATIC_DIR: (MIME_TYPE, EXPIRATION),
HANDLER_SCRIPT: (),
}
def GetHandler(self):
"""Get handler for mapping.
Returns:
Value of the handler (determined by handler id attribute).
"""
return getattr(self, self.GetHandlerType())
def GetHandlerType(self):
"""Get handler type of mapping.
Returns:
Handler type determined by which handler id attribute is set.
Raises:
UnknownHandlerType when none of the no handler id attributes
are set.
UnexpectedHandlerAttribute when an unexpected attribute
is set for the discovered handler type.
HandlerTypeMissingAttribute when the handler is missing a
required attribute for its handler type.
"""
for id_field in URLMap.ALLOWED_FIELDS.iterkeys():
if getattr(self, id_field) is not None:
mapping_type = id_field
break
else:
raise appinfo_errors.UnknownHandlerType(
'Unknown url handler type.\n%s' % str(self))
allowed_fields = URLMap.ALLOWED_FIELDS[mapping_type]
for attribute in self.ATTRIBUTES.iterkeys():
if (getattr(self, attribute) is not None and
not (attribute in allowed_fields or
attribute in URLMap.COMMON_FIELDS or
attribute == mapping_type)):
raise appinfo_errors.UnexpectedHandlerAttribute(
'Unexpected attribute "%s" for mapping type %s.' %
(attribute, mapping_type))
if mapping_type == HANDLER_STATIC_FILES and not self.upload:
raise appinfo_errors.MissingHandlerAttribute(
'Missing "%s" attribute for URL "%s".' % (UPLOAD, self.url))
return mapping_type
def CheckInitialized(self):
"""Adds additional checking to make sure handler has correct fields.
In addition to normal ValidatedCheck calls GetHandlerType
which validates all the handler fields are configured
properly.
Raises:
UnknownHandlerType when none of the no handler id attributes
are set.
UnexpectedHandlerAttribute when an unexpected attribute
is set for the discovered handler type.
HandlerTypeMissingAttribute when the handler is missing a
required attribute for its handler type.
"""
super(URLMap, self).CheckInitialized()
self.GetHandlerType()
class AppInfoExternal(validation.Validated):
"""Class representing users application info.
This class is passed to a yaml_object builder to provide the validation
for the application information file format parser.
Attributes:
application: Unique identifier for application.
version: Application's major version number.
runtime: Runtime used by application.
api_version: Which version of APIs to use.
handlers: List of URL handlers.
default_expiration: Default time delta to use for cache expiration for
all static files, unless they have their own specific 'expiration' set.
See the URLMap.expiration field's documentation for more information.
skip_files: An re object. Files that match this regular expression will
not be uploaded by appcfg.py. For example:
skip_files: |
.svn.*|
#.*#
"""
ATTRIBUTES = {
APPLICATION: APPLICATION_RE_STRING,
VERSION: VERSION_RE_STRING,
RUNTIME: RUNTIME_RE_STRING,
API_VERSION: API_VERSION_RE_STRING,
HANDLERS: validation.Optional(validation.Repeated(URLMap)),
DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES)
}
def CheckInitialized(self):
"""Ensures that at least one url mapping is provided.
Raises:
MissingURLMapping when no URLMap objects are present in object.
TooManyURLMappings when there are too many URLMap entries.
"""
super(AppInfoExternal, self).CheckInitialized()
if not self.handlers:
raise appinfo_errors.MissingURLMapping(
'No URLMap entries found in application configuration')
if len(self.handlers) > MAX_URL_MAPS:
raise appinfo_errors.TooManyURLMappings(
'Found more than %d URLMap entries in application configuration' %
MAX_URL_MAPS)
def LoadSingleAppInfo(app_info):
"""Load a single AppInfo object where one and only one is expected.
Args:
app_info: A file-like object or string. If it is a string, parse it as
a configuration file. If it is a file-like object, read in data and
parse.
Returns:
An instance of AppInfoExternal as loaded from a YAML file.
Raises:
EmptyConfigurationFile when there are no documents in YAML file.
MultipleConfigurationFile when there is more than one document in YAML
file.
"""
builder = yaml_object.ObjectBuilder(AppInfoExternal)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(app_info)
app_infos = handler.GetResults()
if len(app_infos) < 1:
raise appinfo_errors.EmptyConfigurationFile()
if len(app_infos) > 1:
raise appinfo_errors.MultipleConfigurationFile()
return app_infos[0]
def ParseExpiration(expiration):
"""Parses an expiration delta string.
Args:
expiration: String that matches _DELTA_REGEX.
Returns:
Time delta in seconds.
"""
delta = 0
for match in re.finditer(_DELTA_REGEX, expiration):
amount = int(match.group(1))
units = _EXPIRATION_CONVERSIONS.get(match.group(2).lower(), 1)
delta += amount * units
return delta
_file_path_positive_re = re.compile(r'^[ 0-9a-zA-Z\._\+/\$-]{1,256}$')
_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-')
_file_path_negative_2_re = re.compile(r'//|/$')
_file_path_negative_3_re = re.compile(r'^ | $|/ | /')
def ValidFilename(filename):
"""Determines if filename is valid.
filename must be a valid pathname.
- It must contain only letters, numbers, _, +, /, $, ., and -.
- It must be less than 256 chars.
- It must not contain "/./", "/../", or "//".
- It must not end in "/".
- All spaces must be in the middle of a directory or file name.
Args:
filename: The filename to validate.
Returns:
An error string if the filename is invalid. Returns '' if the filename
is valid.
"""
if _file_path_positive_re.match(filename) is None:
return 'Invalid character in filename: %s' % filename
if _file_path_negative_1_re.search(filename) is not None:
return ('Filename cannot contain "." or ".." or start with "-": %s' %
filename)
if _file_path_negative_2_re.search(filename) is not None:
return 'Filename cannot have trailing / or contain //: %s' % filename
if _file_path_negative_3_re.search(filename) is not None:
return 'Any spaces must be in the middle of a filename: %s' % filename
return ''
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""The Python datastore admin API for managing indices and schemas.
"""
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.datastore import datastore_index
from google.appengine.datastore import datastore_pb
from google.appengine.runtime import apiproxy_errors
from google.appengine.datastore import entity_pb
_DIRECTION_MAP = {
'asc': entity_pb.Index_Property.ASCENDING,
'ascending': entity_pb.Index_Property.ASCENDING,
'desc': entity_pb.Index_Property.DESCENDING,
'descending': entity_pb.Index_Property.DESCENDING,
}
def GetSchema(_app=None):
"""Infers an app's schema from the entities in the datastore.
Note that the PropertyValue PBs in the returned EntityProtos are empty
placeholders, so they may cause problems if you try to convert them to
python values with e.g. datastore_types. In particular, user values will
throw UserNotFoundError because their email and auth domain fields will be
empty.
Returns:
list of entity_pb.EntityProto, with kind and property names and types
"""
req = api_base_pb.StringProto()
req.set_value(datastore_types.ResolveAppId(_app))
resp = datastore_pb.Schema()
_Call('GetSchema', req, resp)
return resp.kind_list()
def GetIndices(_app=None):
"""Fetches all composite indices in the datastore for this app.
Returns:
list of entity_pb.CompositeIndex
"""
req = api_base_pb.StringProto()
req.set_value(datastore_types.ResolveAppId(_app))
resp = datastore_pb.CompositeIndices()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'GetIndices', req, resp)
except apiproxy_errors.ApplicationError, err:
raise datastore._ToDatastoreError(err)
return resp.index_list()
def CreateIndex(index):
"""Creates a new composite index in the datastore for this app.
Args:
index: entity_pb.CompositeIndex
Returns:
int, the id allocated to the index
"""
resp = api_base_pb.Integer64Proto()
_Call('CreateIndex', index, resp)
return resp.value()
def UpdateIndex(index):
"""Updates an index's status. The entire index definition must be present.
Args:
index: entity_pb.CompositeIndex
"""
_Call('UpdateIndex', index, api_base_pb.VoidProto())
def DeleteIndex(index):
"""Deletes an index. The entire index definition must be present.
Args:
index: entity_pb.CompositeIndex
"""
_Call('DeleteIndex', index, api_base_pb.VoidProto())
def _Call(call, req, resp):
"""Generic method for making a datastore API call.
Args:
call: string, the name of the RPC call
req: the request PB. if the app_id field is not set, it defaults to the
local app.
resp: the response PB
"""
if hasattr(req, 'app_id'):
req.set_app_id(datastore_types.ResolveAppId(req.app_id(), 'req.app_id()'))
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', call, req, resp)
except apiproxy_errors.ApplicationError, err:
raise datastore._ToDatastoreError(err)
def IndexDefinitionToProto(app_id, index_definition):
"""Transform individual Index definition to protocol buffer.
Args:
app_id: Application id for new protocol buffer CompositeIndex.
index_definition: datastore_index.Index object to transform.
Returns:
New entity_pb.CompositeIndex with default values set and index
information filled in.
"""
proto = entity_pb.CompositeIndex()
proto.set_app_id(app_id)
proto.set_id(0)
proto.set_state(entity_pb.CompositeIndex.WRITE_ONLY)
definition_proto = proto.mutable_definition()
definition_proto.set_entity_type(index_definition.kind)
definition_proto.set_ancestor(index_definition.ancestor)
if index_definition.properties is not None:
for prop in index_definition.properties:
prop_proto = definition_proto.add_property()
prop_proto.set_name(prop.name)
prop_proto.set_direction(_DIRECTION_MAP[prop.direction])
return proto
def IndexDefinitionsToProtos(app_id, index_definitions):
"""Transform multiple index definitions to composite index records
Args:
app_id: Application id for new protocol buffer CompositeIndex.
index_definition: A list of datastore_index.Index objects to transform.
Returns:
A list of tranformed entity_pb.Compositeindex entities with default values
set and index information filled in.
"""
return [IndexDefinitionToProto(app_id, index)
for index in index_definitions]
def ProtoToIndexDefinition(proto):
"""Transform individual index protocol buffer to index definition.
Args:
proto: An instance of entity_pb.CompositeIndex to transform.
Returns:
A new instance of datastore_index.Index.
"""
properties = []
proto_index = proto.definition()
for prop_proto in proto_index.property_list():
prop_definition = datastore_index.Property(name=prop_proto.name())
if prop_proto.direction() == entity_pb.Index_Property.DESCENDING:
prop_definition.direction = 'descending'
properties.append(prop_definition)
index = datastore_index.Index(kind=proto_index.entity_type(),
properties=properties)
if proto_index.ancestor():
index.ancestor = True
return index
def ProtosToIndexDefinitions(protos):
"""Transform multiple index protocol buffers to index definitions.
Args:
A list of entity_pb.Index records.
"""
return [ProtoToIndexDefinition(definition) for definition in protos]
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Exceptions raised my mail API."""
class Error(Exception):
"""Base Mail error type."""
class BadRequestError(Error):
"""Email is not valid."""
class InvalidSenderError(Error):
"""Sender is not a permitted to send mail for this application."""
class InvalidEmailError(Error):
"""Bad email set on an email field."""
class InvalidAttachmentTypeError(Error):
"""Invalid file type for attachments. We don't send viruses!"""
class MissingRecipientsError(Error):
"""No recipients specified in message."""
class MissingSenderError(Error):
"""No sender specified in message."""
class MissingSubjectError(Error):
"""Subject not specified in message."""
class MissingBodyError(Error):
"""No body specified in message."""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""PyYAML event listener
Contains class which interprets YAML events and forwards them to
a handler object.
"""
from google.appengine.api import yaml_errors
import yaml
_EVENT_METHOD_MAP = {
yaml.events.StreamStartEvent: 'StreamStart',
yaml.events.StreamEndEvent: 'StreamEnd',
yaml.events.DocumentStartEvent: 'DocumentStart',
yaml.events.DocumentEndEvent: 'DocumentEnd',
yaml.events.AliasEvent: 'Alias',
yaml.events.ScalarEvent: 'Scalar',
yaml.events.SequenceStartEvent: 'SequenceStart',
yaml.events.SequenceEndEvent: 'SequenceEnd',
yaml.events.MappingStartEvent: 'MappingStart',
yaml.events.MappingEndEvent: 'MappingEnd',
}
class EventHandler(object):
"""Handler interface for parsing YAML files.
Implement this interface to define specific YAML event handling class.
Implementing classes instances are passed to the constructor of
EventListener to act as a receiver of YAML parse events.
"""
def StreamStart(self, event, loader):
"""Handle start of stream event"""
def StreamEnd(self, event, loader):
"""Handle end of stream event"""
def DocumentStart(self, event, loader):
"""Handle start of document event"""
def DocumentEnd(self, event, loader):
"""Handle end of document event"""
def Alias(self, event, loader):
"""Handle alias event"""
def Scalar(self, event, loader):
"""Handle scalar event"""
def SequenceStart(self, event, loader):
"""Handle start of sequence event"""
def SequenceEnd(self, event, loader):
"""Handle end of sequence event"""
def MappingStart(self, event, loader):
"""Handle start of mappping event"""
def MappingEnd(self, event, loader):
"""Handle end of mapping event"""
class EventListener(object):
"""Helper class to re-map PyYAML events to method calls.
By default, PyYAML generates its events via a Python generator. This class
is a helper that iterates over the events from the PyYAML parser and forwards
them to a handle class in the form of method calls. For simplicity, the
underlying event is forwarded to the handler as a parameter to the call.
This object does not itself produce iterable objects, but is really a mapping
to a given handler instance.
Example use:
class PrintDocumentHandler(object):
def DocumentStart(event):
print "A new document has been started"
EventListener(PrintDocumentHandler()).Parse('''
key1: value1
---
key2: value2
'''
>>> A new document has been started
A new document has been started
In the example above, the implemented handler class (PrintDocumentHandler)
has a single method which reports each time a new document is started within
a YAML file. It is not necessary to subclass the EventListener, merely it
receives a PrintDocumentHandler instance. Every time a new document begins,
PrintDocumentHandler.DocumentStart is called with the PyYAML event passed
in as its parameter..
"""
def __init__(self, event_handler):
"""Initialize PyYAML event listener.
Constructs internal mapping directly from event type to method on actual
handler. This prevents reflection being used during actual parse time.
Args:
event_handler: Event handler that will receive mapped events. Must
implement at least one appropriate handler method named from
the values of the _EVENT_METHOD_MAP.
Raises:
ListenerConfigurationError if event_handler is not an EventHandler.
"""
if not isinstance(event_handler, EventHandler):
raise yaml_errors.ListenerConfigurationError(
'Must provide event handler of type yaml_listener.EventHandler')
self._event_method_map = {}
for event, method in _EVENT_METHOD_MAP.iteritems():
self._event_method_map[event] = getattr(event_handler, method)
def HandleEvent(self, event, loader=None):
"""Handle individual PyYAML event.
Args:
event: Event to forward to method call in method call.
Raises:
IllegalEvent when receives an unrecognized or unsupported event type.
"""
if event.__class__ not in _EVENT_METHOD_MAP:
raise yaml_errors.IllegalEvent(
"%s is not a valid PyYAML class" % event.__class__.__name__)
if event.__class__ in self._event_method_map:
self._event_method_map[event.__class__](event, loader)
def _HandleEvents(self, events):
"""Iterate over all events and send them to handler.
This method is not meant to be called from the interface.
Only use in tests.
Args:
events: Iterator or generator containing events to process.
raises:
EventListenerParserError when a yaml.parser.ParserError is raised.
EventError when an exception occurs during the handling of an event.
"""
for event in events:
try:
self.HandleEvent(*event)
except Exception, e:
event_object, loader = event
raise yaml_errors.EventError(e, event_object)
def _GenerateEventParameters(self,
stream,
loader_class=yaml.loader.SafeLoader):
"""Creates a generator that yields event, loader parameter pairs.
For use as parameters to HandleEvent method for use by Parse method.
During testing, _GenerateEventParameters is simulated by allowing
the harness to pass in a list of pairs as the parameter.
A list of (event, loader) pairs must be passed to _HandleEvents otherwise
it is not possible to pass the loader instance to the handler.
Also responsible for instantiating the loader from the Loader
parameter.
Args:
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work.
Loader: Loader class to use as per the yaml.parse method. Used to
instantiate new yaml.loader instance.
Yields:
Tuple(event, loader) where:
event: Event emitted by PyYAML loader.
loader_class: Used for dependency injection.
"""
assert loader_class is not None
try:
loader = loader_class(stream)
while loader.check_event():
yield (loader.get_event(), loader)
except yaml.error.YAMLError, e:
raise yaml_errors.EventListenerYAMLError(e)
def Parse(self, stream, loader_class=yaml.loader.SafeLoader):
"""Call YAML parser to generate and handle all events.
Calls PyYAML parser and sends resulting generator to handle_event method
for processing.
Args:
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work with the YAML parser.
loader_class: Used for dependency injection.
"""
self._HandleEvents(self._GenerateEventParameters(stream, loader_class))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import StringProto
class URLFetchServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
INVALID_URL = 1
FETCH_ERROR = 2
UNSPECIFIED_ERROR = 3
RESPONSE_TOO_LARGE = 4
DEADLINE_EXCEEDED = 5
_ErrorCode_NAMES = {
0: "OK",
1: "INVALID_URL",
2: "FETCH_ERROR",
3: "UNSPECIFIED_ERROR",
4: "RESPONSE_TOO_LARGE",
5: "DEADLINE_EXCEEDED",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class URLFetchRequest_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(34)
out.putPrefixedString(self.key_)
out.putVarInt32(42)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 28: break
if tt == 34:
self.set_key(d.getPrefixedString())
continue
if tt == 42:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("Key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("Value: %s\n" % self.DebugFormatString(self.value_))
return res
class URLFetchRequest(ProtocolBuffer.ProtocolMessage):
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_RequestMethod_NAMES = {
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
}
def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
RequestMethod_Name = classmethod(RequestMethod_Name)
has_method_ = 0
method_ = 0
has_url_ = 0
url_ = ""
has_payload_ = 0
payload_ = ""
has_followredirects_ = 0
followredirects_ = 1
def __init__(self, contents=None):
self.header_ = []
if contents is not None: self.MergeFromString(contents)
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = 0
def has_method(self): return self.has_method_
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = URLFetchRequest_Header()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def payload(self): return self.payload_
def set_payload(self, x):
self.has_payload_ = 1
self.payload_ = x
def clear_payload(self):
if self.has_payload_:
self.has_payload_ = 0
self.payload_ = ""
def has_payload(self): return self.has_payload_
def followredirects(self): return self.followredirects_
def set_followredirects(self, x):
self.has_followredirects_ = 1
self.followredirects_ = x
def clear_followredirects(self):
if self.has_followredirects_:
self.has_followredirects_ = 0
self.followredirects_ = 1
def has_followredirects(self): return self.has_followredirects_
def MergeFrom(self, x):
assert x is not self
if (x.has_method()): self.set_method(x.method())
if (x.has_url()): self.set_url(x.url())
for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_payload()): self.set_payload(x.payload())
if (x.has_followredirects()): self.set_followredirects(x.followredirects())
def Equals(self, x):
if x is self: return 1
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_payload_ != x.has_payload_: return 0
if self.has_payload_ and self.payload_ != x.payload_: return 0
if self.has_followredirects_ != x.has_followredirects_: return 0
if self.has_followredirects_ and self.followredirects_ != x.followredirects_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_method_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: method not set.')
if (not self.has_url_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: url not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.method_)
n += self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_payload_): n += 1 + self.lengthString(len(self.payload_))
if (self.has_followredirects_): n += 2
return n + 2
def Clear(self):
self.clear_method()
self.clear_url()
self.clear_header()
self.clear_payload()
self.clear_followredirects()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.method_)
out.putVarInt32(18)
out.putPrefixedString(self.url_)
for i in xrange(len(self.header_)):
out.putVarInt32(27)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(28)
if (self.has_payload_):
out.putVarInt32(50)
out.putPrefixedString(self.payload_)
if (self.has_followredirects_):
out.putVarInt32(56)
out.putBoolean(self.followredirects_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_method(d.getVarInt32())
continue
if tt == 18:
self.set_url(d.getPrefixedString())
continue
if tt == 27:
self.add_header().TryMerge(d)
continue
if tt == 50:
self.set_payload(d.getPrefixedString())
continue
if tt == 56:
self.set_followredirects(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_method_: res+=prefix+("Method: %s\n" % self.DebugFormatInt32(self.method_))
if self.has_url_: res+=prefix+("Url: %s\n" % self.DebugFormatString(self.url_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_payload_: res+=prefix+("Payload: %s\n" % self.DebugFormatString(self.payload_))
if self.has_followredirects_: res+=prefix+("FollowRedirects: %s\n" % self.DebugFormatBool(self.followredirects_))
return res
kMethod = 1
kUrl = 2
kHeaderGroup = 3
kHeaderKey = 4
kHeaderValue = 5
kPayload = 6
kFollowRedirects = 7
_TEXT = (
"ErrorCode",
"Method",
"Url",
"Header",
"Key",
"Value",
"Payload",
"FollowRedirects",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class URLFetchResponse_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(34)
out.putPrefixedString(self.key_)
out.putVarInt32(42)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 28: break
if tt == 34:
self.set_key(d.getPrefixedString())
continue
if tt == 42:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("Key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("Value: %s\n" % self.DebugFormatString(self.value_))
return res
class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
has_content_ = 0
content_ = ""
has_statuscode_ = 0
statuscode_ = 0
has_contentwastruncated_ = 0
contentwastruncated_ = 0
def __init__(self, contents=None):
self.header_ = []
if contents is not None: self.MergeFromString(contents)
def content(self): return self.content_
def set_content(self, x):
self.has_content_ = 1
self.content_ = x
def clear_content(self):
if self.has_content_:
self.has_content_ = 0
self.content_ = ""
def has_content(self): return self.has_content_
def statuscode(self): return self.statuscode_
def set_statuscode(self, x):
self.has_statuscode_ = 1
self.statuscode_ = x
def clear_statuscode(self):
if self.has_statuscode_:
self.has_statuscode_ = 0
self.statuscode_ = 0
def has_statuscode(self): return self.has_statuscode_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = URLFetchResponse_Header()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def contentwastruncated(self): return self.contentwastruncated_
def set_contentwastruncated(self, x):
self.has_contentwastruncated_ = 1
self.contentwastruncated_ = x
def clear_contentwastruncated(self):
if self.has_contentwastruncated_:
self.has_contentwastruncated_ = 0
self.contentwastruncated_ = 0
def has_contentwastruncated(self): return self.has_contentwastruncated_
def MergeFrom(self, x):
assert x is not self
if (x.has_content()): self.set_content(x.content())
if (x.has_statuscode()): self.set_statuscode(x.statuscode())
for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_contentwastruncated()): self.set_contentwastruncated(x.contentwastruncated())
def Equals(self, x):
if x is self: return 1
if self.has_content_ != x.has_content_: return 0
if self.has_content_ and self.content_ != x.content_: return 0
if self.has_statuscode_ != x.has_statuscode_: return 0
if self.has_statuscode_ and self.statuscode_ != x.statuscode_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_contentwastruncated_ != x.has_contentwastruncated_: return 0
if self.has_contentwastruncated_ and self.contentwastruncated_ != x.contentwastruncated_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_statuscode_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: statuscode not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_content_): n += 1 + self.lengthString(len(self.content_))
n += self.lengthVarInt64(self.statuscode_)
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_contentwastruncated_): n += 2
return n + 1
def Clear(self):
self.clear_content()
self.clear_statuscode()
self.clear_header()
self.clear_contentwastruncated()
def OutputUnchecked(self, out):
if (self.has_content_):
out.putVarInt32(10)
out.putPrefixedString(self.content_)
out.putVarInt32(16)
out.putVarInt32(self.statuscode_)
for i in xrange(len(self.header_)):
out.putVarInt32(27)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(28)
if (self.has_contentwastruncated_):
out.putVarInt32(48)
out.putBoolean(self.contentwastruncated_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_content(d.getPrefixedString())
continue
if tt == 16:
self.set_statuscode(d.getVarInt32())
continue
if tt == 27:
self.add_header().TryMerge(d)
continue
if tt == 48:
self.set_contentwastruncated(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_content_: res+=prefix+("Content: %s\n" % self.DebugFormatString(self.content_))
if self.has_statuscode_: res+=prefix+("StatusCode: %s\n" % self.DebugFormatInt32(self.statuscode_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_contentwastruncated_: res+=prefix+("ContentWasTruncated: %s\n" % self.DebugFormatBool(self.contentwastruncated_))
return res
kContent = 1
kStatusCode = 2
kHeaderGroup = 3
kHeaderKey = 4
kHeaderValue = 5
kContentWasTruncated = 6
_TEXT = (
"ErrorCode",
"Content",
"StatusCode",
"Header",
"Key",
"Value",
"ContentWasTruncated",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['URLFetchServiceError','URLFetchRequest','URLFetchRequest_Header','URLFetchResponse','URLFetchResponse_Header']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
In-memory persistent stub for the Python datastore API. Gets, queries,
and searches are implemented as in-memory scans over all entities.
Stores entities across sessions as pickled proto bufs in a single file. On
startup, all entities are read from the file and loaded into memory. On
every Put(), the file is wiped and all entities are written from scratch.
Clients can also manually Read() and Write() the file themselves.
Transactions are serialized through __tx_lock. Each transaction acquires it
when it begins and releases it when it commits or rolls back. This is
important, since there are other member variables like __tx_snapshot that are
per-transaction, so they should only be used by one tx at a time.
"""
import datetime
import logging
import os
import struct
import sys
import tempfile
import threading
import warnings
import cPickle as pickle
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
from google.appengine.api import datastore
from google.appengine.api import datastore_admin
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
from google.appengine.datastore import datastore_index
from google.appengine.runtime import apiproxy_errors
from google.net.proto import ProtocolBuffer
from google.appengine.datastore import entity_pb
warnings.filterwarnings('ignore', 'tempnam is a potential security risk')
entity_pb.Reference.__hash__ = lambda self: hash(self.Encode())
datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
_MAXIMUM_RESULTS = 1000
_MAX_QUERY_OFFSET = 1000
_MAX_QUERY_COMPONENTS = 100
class _StoredEntity(object):
"""Simple wrapper around an entity stored by the stub.
Public properties:
protobuf: Native protobuf Python object, entity_pb.EntityProto.
encoded_protobuf: Encoded binary representation of above protobuf.
native: datastore.Entity instance.
"""
def __init__(self, entity):
"""Create a _StoredEntity object and store an entity.
Args:
entity: entity_pb.EntityProto to store.
"""
self.protobuf = entity
self.encoded_protobuf = entity.Encode()
self.native = datastore.Entity._FromPb(entity)
class DatastoreFileStub(apiproxy_stub.APIProxyStub):
""" Persistent stub for the Python datastore API.
Stores all entities in memory, and persists them to a file as pickled
protocol buffers. A DatastoreFileStub instance handles a single app's data
and is backed by files on disk.
"""
_PROPERTY_TYPE_TAGS = {
datastore_types.Blob: entity_pb.PropertyValue.kstringValue,
bool: entity_pb.PropertyValue.kbooleanValue,
datastore_types.Category: entity_pb.PropertyValue.kstringValue,
datetime.datetime: entity_pb.PropertyValue.kint64Value,
datastore_types.Email: entity_pb.PropertyValue.kstringValue,
float: entity_pb.PropertyValue.kdoubleValue,
datastore_types.GeoPt: entity_pb.PropertyValue.kPointValueGroup,
datastore_types.IM: entity_pb.PropertyValue.kstringValue,
int: entity_pb.PropertyValue.kint64Value,
datastore_types.Key: entity_pb.PropertyValue.kReferenceValueGroup,
datastore_types.Link: entity_pb.PropertyValue.kstringValue,
long: entity_pb.PropertyValue.kint64Value,
datastore_types.PhoneNumber: entity_pb.PropertyValue.kstringValue,
datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,
datastore_types.Rating: entity_pb.PropertyValue.kint64Value,
str: entity_pb.PropertyValue.kstringValue,
datastore_types.Text: entity_pb.PropertyValue.kstringValue,
type(None): 0,
unicode: entity_pb.PropertyValue.kstringValue,
users.User: entity_pb.PropertyValue.kUserValueGroup,
}
WRITE_ONLY = entity_pb.CompositeIndex.WRITE_ONLY
READ_WRITE = entity_pb.CompositeIndex.READ_WRITE
DELETED = entity_pb.CompositeIndex.DELETED
ERROR = entity_pb.CompositeIndex.ERROR
_INDEX_STATE_TRANSITIONS = {
WRITE_ONLY: frozenset((READ_WRITE, DELETED, ERROR)),
READ_WRITE: frozenset((DELETED,)),
ERROR: frozenset((DELETED,)),
DELETED: frozenset((ERROR,)),
}
def __init__(self,
app_id,
datastore_file,
history_file,
require_indexes=False,
service_name='datastore_v3'):
"""Constructor.
Initializes and loads the datastore from the backing files, if they exist.
Args:
app_id: string
datastore_file: string, stores all entities across sessions. Use None
not to use a file.
history_file: string, stores query history. Use None as with
datastore_file.
require_indexes: bool, default False. If True, composite indexes must
exist in index.yaml for queries that need them.
service_name: Service name expected for all calls.
"""
super(DatastoreFileStub, self).__init__(service_name)
assert isinstance(app_id, basestring) and app_id != ''
self.__app_id = app_id
self.__datastore_file = datastore_file
self.__history_file = history_file
self.__entities = {}
self.__schema_cache = {}
self.__tx_snapshot = {}
self.__queries = {}
self.__transactions = {}
self.__indexes = {}
self.__require_indexes = require_indexes
self.__query_history = {}
self.__next_id = 1
self.__next_cursor = 1
self.__next_tx_handle = 1
self.__next_index_id = 1
self.__id_lock = threading.Lock()
self.__cursor_lock = threading.Lock()
self.__tx_handle_lock = threading.Lock()
self.__index_id_lock = threading.Lock()
self.__tx_lock = threading.Lock()
self.__entities_lock = threading.Lock()
self.__file_lock = threading.Lock()
self.__indexes_lock = threading.Lock()
self.Read()
def Clear(self):
""" Clears the datastore by deleting all currently stored entities and
queries. """
self.__entities = {}
self.__queries = {}
self.__transactions = {}
self.__query_history = {}
self.__schema_cache = {}
def _AppKindForKey(self, key):
""" Get (app, kind) tuple from given key.
The (app, kind) tuple is used as an index into several internal
dictionaries, e.g. __entities.
Args:
key: entity_pb.Reference
Returns:
Tuple (app, kind), both are unicode strings.
"""
last_path = key.path().element_list()[-1]
return key.app(), last_path.type()
def _StoreEntity(self, entity):
""" Store the given entity.
Args:
entity: entity_pb.EntityProto
"""
key = entity.key()
app_kind = self._AppKindForKey(key)
if app_kind not in self.__entities:
self.__entities[app_kind] = {}
self.__entities[app_kind][key] = _StoredEntity(entity)
if app_kind in self.__schema_cache:
del self.__schema_cache[app_kind]
READ_PB_EXCEPTIONS = (ProtocolBuffer.ProtocolBufferDecodeError, LookupError,
TypeError, ValueError)
READ_ERROR_MSG = ('Data in %s is corrupt or a different version. '
'Try running with the --clear_datastore flag.\n%r')
READ_PY250_MSG = ('Are you using FloatProperty and/or GeoPtProperty? '
'Unfortunately loading float values from the datastore '
'file does not work with Python 2.5.0. '
'Please upgrade to a newer Python 2.5 release or use '
'the --clear_datastore flag.\n')
def Read(self):
""" Reads the datastore and history files into memory.
The in-memory query history is cleared, but the datastore is *not*
cleared; the entities in the files are merged into the entities in memory.
If you want them to overwrite the in-memory datastore, call Clear() before
calling Read().
If the datastore file contains an entity with the same app name, kind, and
key as an entity already in the datastore, the entity from the file
overwrites the entity in the datastore.
Also sets __next_id to one greater than the highest id allocated so far.
"""
if self.__datastore_file and self.__datastore_file != '/dev/null':
for encoded_entity in self.__ReadPickled(self.__datastore_file):
try:
entity = entity_pb.EntityProto(encoded_entity)
except self.READ_PB_EXCEPTIONS, e:
raise datastore_errors.InternalError(self.READ_ERROR_MSG %
(self.__datastore_file, e))
except struct.error, e:
if (sys.version_info[0:3] == (2, 5, 0)
and e.message.startswith('unpack requires a string argument')):
raise datastore_errors.InternalError(self.READ_PY250_MSG +
self.READ_ERROR_MSG %
(self.__datastore_file, e))
else:
raise
self._StoreEntity(entity)
last_path = entity.key().path().element_list()[-1]
if last_path.has_id() and last_path.id() >= self.__next_id:
self.__next_id = last_path.id() + 1
self.__query_history = {}
for encoded_query, count in self.__ReadPickled(self.__history_file):
try:
query_pb = datastore_pb.Query(encoded_query)
except self.READ_PB_EXCEPTIONS, e:
raise datastore_errors.InternalError(self.READ_ERROR_MSG %
(self.__history_file, e))
if query_pb in self.__query_history:
self.__query_history[query_pb] += count
else:
self.__query_history[query_pb] = count
def Write(self):
""" Writes out the datastore and history files. Be careful! If the files
already exist, this method overwrites them!
"""
self.__WriteDatastore()
self.__WriteHistory()
def __WriteDatastore(self):
""" Writes out the datastore file. Be careful! If the file already exist,
this method overwrites it!
"""
if self.__datastore_file and self.__datastore_file != '/dev/null':
encoded = []
for kind_dict in self.__entities.values():
for entity in kind_dict.values():
encoded.append(entity.encoded_protobuf)
self.__WritePickled(encoded, self.__datastore_file)
def __WriteHistory(self):
""" Writes out the history file. Be careful! If the file already exist,
this method overwrites it!
"""
if self.__history_file and self.__history_file != '/dev/null':
encoded = [(query.Encode(), count)
for query, count in self.__query_history.items()]
self.__WritePickled(encoded, self.__history_file)
def __ReadPickled(self, filename):
"""Reads a pickled object from the given file and returns it.
"""
self.__file_lock.acquire()
try:
try:
if filename and filename != '/dev/null' and os.path.isfile(filename):
return pickle.load(open(filename, 'rb'))
else:
logging.warning('Could not read datastore data from %s', filename)
except (AttributeError, LookupError, NameError, TypeError,
ValueError, struct.error, pickle.PickleError), e:
raise datastore_errors.InternalError(
'Could not read data from %s. Try running with the '
'--clear_datastore flag. Cause:\n%r' % (filename, e))
finally:
self.__file_lock.release()
return []
def __WritePickled(self, obj, filename, openfile=file):
"""Pickles the object and writes it to the given file.
"""
if not filename or filename == '/dev/null' or not obj:
return
tmpfile = openfile(os.tempnam(os.path.dirname(filename)), 'wb')
pickler = pickle.Pickler(tmpfile, protocol=1)
pickler.fast = True
pickler.dump(obj)
tmpfile.close()
self.__file_lock.acquire()
try:
try:
os.rename(tmpfile.name, filename)
except OSError:
try:
os.remove(filename)
except:
pass
os.rename(tmpfile.name, filename)
finally:
self.__file_lock.release()
def MakeSyncCall(self, service, call, request, response):
""" The main RPC entry point. service must be 'datastore_v3'. So far, the
supported calls are 'Get', 'Put', 'RunQuery', 'Next', and 'Count'.
"""
super(DatastoreFileStub, self).MakeSyncCall(service,
call,
request,
response)
explanation = []
assert response.IsInitialized(explanation), explanation
def QueryHistory(self):
"""Returns a dict that maps Query PBs to times they've been run.
"""
return dict((pb, times) for pb, times in self.__query_history.items()
if pb.app() == self.__app_id)
def _Dynamic_Put(self, put_request, put_response):
clones = []
for entity in put_request.entity_list():
clone = entity_pb.EntityProto()
clone.CopyFrom(entity)
clones.append(clone)
assert clone.has_key()
assert clone.key().path().element_size() > 0
last_path = clone.key().path().element_list()[-1]
if last_path.id() == 0 and not last_path.has_name():
self.__id_lock.acquire()
last_path.set_id(self.__next_id)
self.__next_id += 1
self.__id_lock.release()
assert clone.entity_group().element_size() == 0
group = clone.mutable_entity_group()
root = clone.key().path().element(0)
group.add_element().CopyFrom(root)
else:
assert (clone.has_entity_group() and
clone.entity_group().element_size() > 0)
self.__entities_lock.acquire()
try:
for clone in clones:
self._StoreEntity(clone)
finally:
self.__entities_lock.release()
if not put_request.has_transaction():
self.__WriteDatastore()
put_response.key_list().extend([c.key() for c in clones])
def _Dynamic_Get(self, get_request, get_response):
for key in get_request.key_list():
app_kind = self._AppKindForKey(key)
group = get_response.add_entity()
try:
entity = self.__entities[app_kind][key].protobuf
except KeyError:
entity = None
if entity:
group.mutable_entity().CopyFrom(entity)
def _Dynamic_Delete(self, delete_request, delete_response):
self.__entities_lock.acquire()
try:
for key in delete_request.key_list():
app_kind = self._AppKindForKey(key)
try:
del self.__entities[app_kind][key]
if not self.__entities[app_kind]:
del self.__entities[app_kind]
del self.__schema_cache[app_kind]
except KeyError:
pass
if not delete_request.has_transaction():
self.__WriteDatastore()
finally:
self.__entities_lock.release()
def _Dynamic_RunQuery(self, query, query_result):
if not self.__tx_lock.acquire(False):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST, 'Can\'t query inside a transaction.')
else:
self.__tx_lock.release()
if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')
num_components = len(query.filter_list()) + len(query.order_list())
if query.has_ancestor():
num_components += 1
if num_components > _MAX_QUERY_COMPONENTS:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
('query is too large. may not have more than %s filters'
' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))
app = query.app()
if self.__require_indexes:
required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
if required:
required_key = kind, ancestor, props
indexes = self.__indexes.get(app)
if not indexes:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.NEED_INDEX,
"This query requires a composite index, but none are defined. "
"You must create an index.yaml file in your application root.")
eq_filters_set = set(props[:num_eq_filters])
remaining_filters = props[num_eq_filters:]
for index in indexes:
definition = datastore_admin.ProtoToIndexDefinition(index)
index_key = datastore_index.IndexToKey(definition)
if required_key == index_key:
break
if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
this_props = index_key[2]
this_eq_filters_set = set(this_props[:num_eq_filters])
this_remaining_filters = this_props[num_eq_filters:]
if (eq_filters_set == this_eq_filters_set and
remaining_filters == this_remaining_filters):
break
else:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.NEED_INDEX,
"This query requires a composite index that is not defined. "
"You must update the index.yaml file in your application root.")
try:
query.set_app(app)
results = self.__entities[app, query.kind()].values()
results = [entity.native for entity in results]
except KeyError:
results = []
if query.has_ancestor():
ancestor_path = query.ancestor().path().element_list()
def is_descendant(entity):
path = entity.key()._Key__reference.path().element_list()
return path[:len(ancestor_path)] == ancestor_path
results = filter(is_descendant, results)
operators = {datastore_pb.Query_Filter.LESS_THAN: '<',
datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
datastore_pb.Query_Filter.GREATER_THAN: '>',
datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
datastore_pb.Query_Filter.EQUAL: '==',
}
for filt in query.filter_list():
assert filt.op() != datastore_pb.Query_Filter.IN
prop = filt.property(0).name().decode('utf-8')
op = operators[filt.op()]
filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
for filter_prop in filt.property_list()]
def passes(entity):
""" Returns True if the entity passes the filter, False otherwise. """
if prop in datastore_types._SPECIAL_PROPERTIES:
entity_vals = self.__GetSpecialPropertyValue(entity, prop)
else:
entity_vals = entity.get(prop, [])
if not isinstance(entity_vals, list):
entity_vals = [entity_vals]
for fixed_entity_val in entity_vals:
if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES:
continue
for filter_val in filter_val_list:
fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
fixed_entity_val.__class__)
filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
if fixed_entity_type == filter_type:
comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
elif op != '==':
comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
else:
continue
logging.log(logging.DEBUG - 1,
'Evaling filter expression "%s"', comp)
try:
ret = eval(comp)
if ret and ret != NotImplementedError:
return True
except TypeError:
pass
return False
results = filter(passes, results)
def has_prop_indexed(entity, prop):
"""Returns True if prop is in the entity and is not a raw property, or
is a special property."""
if prop in datastore_types._SPECIAL_PROPERTIES:
return True
values = entity.get(prop, [])
if not isinstance(values, (tuple, list)):
values = [values]
for value in values:
if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
return True
return False
for order in query.order_list():
prop = order.property().decode('utf-8')
results = [entity for entity in results if has_prop_indexed(entity, prop)]
def order_compare_entities(a, b):
""" Return a negative, zero or positive number depending on whether
entity a is considered smaller than, equal to, or larger than b,
according to the query's orderings. """
cmped = 0
for o in query.order_list():
prop = o.property().decode('utf-8')
reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
if prop in datastore_types._SPECIAL_PROPERTIES:
a_val = self.__GetSpecialPropertyValue(a, prop)
b_val = self.__GetSpecialPropertyValue(b, prop)
else:
a_val = a[prop]
if isinstance(a_val, list):
a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
b_val = b[prop]
if isinstance(b_val, list):
b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
cmped = order_compare_properties(a_val, b_val)
if o.direction() is datastore_pb.Query_Order.DESCENDING:
cmped = -cmped
if cmped != 0:
return cmped
if cmped == 0:
return cmp(a.key(), b.key())
def order_compare_properties(x, y):
"""Return a negative, zero or positive number depending on whether
property value x is considered smaller than, equal to, or larger than
property value y. If x and y are different types, they're compared based
on the type ordering used in the real datastore, which is based on the
tag numbers in the PropertyValue PB.
"""
if isinstance(x, datetime.datetime):
x = datastore_types.DatetimeToTimestamp(x)
if isinstance(y, datetime.datetime):
y = datastore_types.DatetimeToTimestamp(y)
x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)
if x_type == y_type:
try:
return cmp(x, y)
except TypeError:
return 0
else:
return cmp(x_type, y_type)
results.sort(order_compare_entities)
offset = 0
limit = len(results)
if query.has_offset():
offset = query.offset()
if query.has_limit():
limit = query.limit()
if limit > _MAXIMUM_RESULTS:
limit = _MAXIMUM_RESULTS
results = results[offset:limit + offset]
clone = datastore_pb.Query()
clone.CopyFrom(query)
clone.clear_hint()
if clone in self.__query_history:
self.__query_history[clone] += 1
else:
self.__query_history[clone] = 1
self.__WriteHistory()
self.__cursor_lock.acquire()
cursor = self.__next_cursor
self.__next_cursor += 1
self.__cursor_lock.release()
self.__queries[cursor] = (results, len(results))
query_result.mutable_cursor().set_cursor(cursor)
query_result.set_more_results(len(results) > 0)
def _Dynamic_Next(self, next_request, query_result):
cursor = next_request.cursor().cursor()
try:
results, orig_count = self.__queries[cursor]
except KeyError:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'Cursor %d not found' % cursor)
count = next_request.count()
results_pb = [r._ToPb() for r in results[:count]]
query_result.result_list().extend(results_pb)
del results[:count]
query_result.set_more_results(len(results) > 0)
def _Dynamic_Count(self, query, integer64proto):
query_result = datastore_pb.QueryResult()
self._Dynamic_RunQuery(query, query_result)
cursor = query_result.cursor().cursor()
results, count = self.__queries[cursor]
integer64proto.set_value(count)
del self.__queries[cursor]
def _Dynamic_BeginTransaction(self, request, transaction):
self.__tx_handle_lock.acquire()
handle = self.__next_tx_handle
self.__next_tx_handle += 1
self.__tx_handle_lock.release()
self.__transactions[handle] = None
transaction.set_handle(handle)
self.__tx_lock.acquire()
snapshot = [(app_kind, dict(entities))
for app_kind, entities in self.__entities.items()]
self.__tx_snapshot = dict(snapshot)
def _Dynamic_Commit(self, transaction, transaction_response):
if not self.__transactions.has_key(transaction.handle()):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction handle %d not found' % transaction.handle())
self.__tx_snapshot = {}
try:
self.__WriteDatastore()
finally:
self.__tx_lock.release()
def _Dynamic_Rollback(self, transaction, transaction_response):
if not self.__transactions.has_key(transaction.handle()):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction handle %d not found' % transaction.handle())
self.__entities = self.__tx_snapshot
self.__tx_snapshot = {}
self.__tx_lock.release()
def _Dynamic_GetSchema(self, app_str, schema):
minint = -sys.maxint - 1
try:
minfloat = float('-inf')
except ValueError:
minfloat = -1e300000
app_str = app_str.value()
kinds = []
for app, kind in self.__entities:
if app == app_str:
app_kind = (app, kind)
if app_kind in self.__schema_cache:
kinds.append(self.__schema_cache[app_kind])
continue
kind_pb = entity_pb.EntityProto()
kind_pb.mutable_key().set_app('')
kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
kind_pb.mutable_entity_group()
props = {}
for entity in self.__entities[app_kind].values():
for prop in entity.protobuf.property_list():
if prop.name() not in props:
props[prop.name()] = entity_pb.PropertyValue()
props[prop.name()].MergeFrom(prop.value())
for value_pb in props.values():
if value_pb.has_int64value():
value_pb.set_int64value(minint)
if value_pb.has_booleanvalue():
value_pb.set_booleanvalue(False)
if value_pb.has_stringvalue():
value_pb.set_stringvalue('')
if value_pb.has_doublevalue():
value_pb.set_doublevalue(minfloat)
if value_pb.has_pointvalue():
value_pb.mutable_pointvalue().set_x(minfloat)
value_pb.mutable_pointvalue().set_y(minfloat)
if value_pb.has_uservalue():
value_pb.mutable_uservalue().set_gaiaid(minint)
value_pb.mutable_uservalue().set_email('')
value_pb.mutable_uservalue().set_auth_domain('')
value_pb.mutable_uservalue().clear_nickname()
elif value_pb.has_referencevalue():
value_pb.clear_referencevalue()
value_pb.mutable_referencevalue().set_app('')
for name, value_pb in props.items():
prop_pb = kind_pb.add_property()
prop_pb.set_name(name)
prop_pb.set_multiple(False)
prop_pb.mutable_value().CopyFrom(value_pb)
kinds.append(kind_pb)
self.__schema_cache[app_kind] = kind_pb
for kind_pb in kinds:
schema.add_kind().CopyFrom(kind_pb)
def _Dynamic_CreateIndex(self, index, id_response):
if index.id() != 0:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'New index id must be 0.')
elif self.__FindIndex(index):
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'Index already exists.')
self.__index_id_lock.acquire()
index.set_id(self.__next_index_id)
id_response.set_value(self.__next_index_id)
self.__next_index_id += 1
self.__index_id_lock.release()
clone = entity_pb.CompositeIndex()
clone.CopyFrom(index)
app = index.app_id()
clone.set_app_id(app)
self.__indexes_lock.acquire()
try:
if app not in self.__indexes:
self.__indexes[app] = []
self.__indexes[app].append(clone)
finally:
self.__indexes_lock.release()
def _Dynamic_GetIndices(self, app_str, composite_indices):
composite_indices.index_list().extend(
self.__indexes.get(app_str.value(), []))
def _Dynamic_UpdateIndex(self, index, void):
stored_index = self.__FindIndex(index)
if not stored_index:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
"Index doesn't exist.")
elif (index.state() != stored_index.state() and
index.state() not in self._INDEX_STATE_TRANSITIONS[stored_index.state()]):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
"cannot move index state from %s to %s" %
(entity_pb.CompositeIndex.State_Name(stored_index.state()),
(entity_pb.CompositeIndex.State_Name(index.state()))))
self.__indexes_lock.acquire()
try:
stored_index.set_state(index.state())
finally:
self.__indexes_lock.release()
def _Dynamic_DeleteIndex(self, index, void):
stored_index = self.__FindIndex(index)
if not stored_index:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
"Index doesn't exist.")
app = index.app_id()
self.__indexes_lock.acquire()
try:
self.__indexes[app].remove(stored_index)
finally:
self.__indexes_lock.release()
def __FindIndex(self, index):
"""Finds an existing index by definition.
Args:
definition: entity_pb.CompositeIndex
Returns:
entity_pb.CompositeIndex, if it exists; otherwise None
"""
app = index.app_id()
if app in self.__indexes:
for stored_index in self.__indexes[app]:
if index.definition() == stored_index.definition():
return stored_index
return None
@classmethod
def __GetSpecialPropertyValue(cls, entity, property):
"""Returns an entity's value for a special property.
Right now, the only special property is __key__, whose value is the
entity's key.
Args:
entity: datastore.Entity
Returns:
property value. For __key__, a datastore_types.Key.
Raises:
AssertionError, if the given property is not special.
"""
assert property in datastore_types._SPECIAL_PROPERTIES
if property == datastore_types._KEY_SPECIAL_PROPERTY:
return entity.key()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import VoidProto
class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
UNSPECIFIED_ERROR = 1
_ErrorCode_NAMES = {
0: "OK",
1: "UNSPECIFIED_ERROR",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheGetRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.key_ = []
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def set_key(self, i, x):
self.key_[i] = x
def add_key(self, x):
self.key_.append(x)
def clear_key(self):
self.key_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key(x.key(i))
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(len(self.key_[i]))
return n + 0
def Clear(self):
self.clear_key()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putPrefixedString(self.key_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_key(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
kkey = 1
_TEXT = (
"ErrorCode",
"key",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheGetResponse_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
has_flags_ = 0
flags_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def flags(self): return self.flags_
def set_flags(self, x):
self.has_flags_ = 1
self.flags_ = x
def clear_flags(self):
if self.has_flags_:
self.has_flags_ = 0
self.flags_ = 0
def has_flags(self): return self.has_flags_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
if (x.has_flags()): self.set_flags(x.flags())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
if self.has_flags_ != x.has_flags_: return 0
if self.has_flags_ and self.flags_ != x.flags_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
if (self.has_flags_): n += 5
return n + 2
def Clear(self):
self.clear_key()
self.clear_value()
self.clear_flags()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.key_)
out.putVarInt32(26)
out.putPrefixedString(self.value_)
if (self.has_flags_):
out.putVarInt32(37)
out.put32(self.flags_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_key(d.getPrefixedString())
continue
if tt == 26:
self.set_value(d.getPrefixedString())
continue
if tt == 37:
self.set_flags(d.get32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatFixed32(self.flags_))
return res
class MemcacheGetResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.item_ = []
if contents is not None: self.MergeFromString(contents)
def item_size(self): return len(self.item_)
def item_list(self): return self.item_
def item(self, i):
return self.item_[i]
def mutable_item(self, i):
return self.item_[i]
def add_item(self):
x = MemcacheGetResponse_Item()
self.item_.append(x)
return x
def clear_item(self):
self.item_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
def Equals(self, x):
if x is self: return 1
if len(self.item_) != len(x.item_): return 0
for e1, e2 in zip(self.item_, x.item_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.item_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.item_)
for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_item()
def OutputUnchecked(self, out):
for i in xrange(len(self.item_)):
out.putVarInt32(11)
self.item_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_item().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.item_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Item%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kItemGroup = 1
kItemkey = 2
kItemvalue = 3
kItemflags = 4
_TEXT = (
"ErrorCode",
"Item",
"key",
"value",
"flags",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.FLOAT,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheSetRequest_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
has_flags_ = 0
flags_ = 0
has_set_policy_ = 0
set_policy_ = 1
has_expiration_time_ = 0
expiration_time_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def flags(self): return self.flags_
def set_flags(self, x):
self.has_flags_ = 1
self.flags_ = x
def clear_flags(self):
if self.has_flags_:
self.has_flags_ = 0
self.flags_ = 0
def has_flags(self): return self.has_flags_
def set_policy(self): return self.set_policy_
def set_set_policy(self, x):
self.has_set_policy_ = 1
self.set_policy_ = x
def clear_set_policy(self):
if self.has_set_policy_:
self.has_set_policy_ = 0
self.set_policy_ = 1
def has_set_policy(self): return self.has_set_policy_
def expiration_time(self): return self.expiration_time_
def set_expiration_time(self, x):
self.has_expiration_time_ = 1
self.expiration_time_ = x
def clear_expiration_time(self):
if self.has_expiration_time_:
self.has_expiration_time_ = 0
self.expiration_time_ = 0
def has_expiration_time(self): return self.has_expiration_time_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
if (x.has_flags()): self.set_flags(x.flags())
if (x.has_set_policy()): self.set_set_policy(x.set_policy())
if (x.has_expiration_time()): self.set_expiration_time(x.expiration_time())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
if self.has_flags_ != x.has_flags_: return 0
if self.has_flags_ and self.flags_ != x.flags_: return 0
if self.has_set_policy_ != x.has_set_policy_: return 0
if self.has_set_policy_ and self.set_policy_ != x.set_policy_: return 0
if self.has_expiration_time_ != x.has_expiration_time_: return 0
if self.has_expiration_time_ and self.expiration_time_ != x.expiration_time_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
if (self.has_flags_): n += 5
if (self.has_set_policy_): n += 1 + self.lengthVarInt64(self.set_policy_)
if (self.has_expiration_time_): n += 5
return n + 2
def Clear(self):
self.clear_key()
self.clear_value()
self.clear_flags()
self.clear_set_policy()
self.clear_expiration_time()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.key_)
out.putVarInt32(26)
out.putPrefixedString(self.value_)
if (self.has_flags_):
out.putVarInt32(37)
out.put32(self.flags_)
if (self.has_set_policy_):
out.putVarInt32(40)
out.putVarInt32(self.set_policy_)
if (self.has_expiration_time_):
out.putVarInt32(53)
out.put32(self.expiration_time_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_key(d.getPrefixedString())
continue
if tt == 26:
self.set_value(d.getPrefixedString())
continue
if tt == 37:
self.set_flags(d.get32())
continue
if tt == 40:
self.set_set_policy(d.getVarInt32())
continue
if tt == 53:
self.set_expiration_time(d.get32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatFixed32(self.flags_))
if self.has_set_policy_: res+=prefix+("set_policy: %s\n" % self.DebugFormatInt32(self.set_policy_))
if self.has_expiration_time_: res+=prefix+("expiration_time: %s\n" % self.DebugFormatFixed32(self.expiration_time_))
return res
class MemcacheSetRequest(ProtocolBuffer.ProtocolMessage):
SET = 1
ADD = 2
REPLACE = 3
_SetPolicy_NAMES = {
1: "SET",
2: "ADD",
3: "REPLACE",
}
def SetPolicy_Name(cls, x): return cls._SetPolicy_NAMES.get(x, "")
SetPolicy_Name = classmethod(SetPolicy_Name)
def __init__(self, contents=None):
self.item_ = []
if contents is not None: self.MergeFromString(contents)
def item_size(self): return len(self.item_)
def item_list(self): return self.item_
def item(self, i):
return self.item_[i]
def mutable_item(self, i):
return self.item_[i]
def add_item(self):
x = MemcacheSetRequest_Item()
self.item_.append(x)
return x
def clear_item(self):
self.item_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
def Equals(self, x):
if x is self: return 1
if len(self.item_) != len(x.item_): return 0
for e1, e2 in zip(self.item_, x.item_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.item_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.item_)
for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_item()
def OutputUnchecked(self, out):
for i in xrange(len(self.item_)):
out.putVarInt32(11)
self.item_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_item().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.item_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Item%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kItemGroup = 1
kItemkey = 2
kItemvalue = 3
kItemflags = 4
kItemset_policy = 5
kItemexpiration_time = 6
_TEXT = (
"ErrorCode",
"Item",
"key",
"value",
"flags",
"set_policy",
"expiration_time",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.FLOAT,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.FLOAT,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheSetResponse(ProtocolBuffer.ProtocolMessage):
STORED = 1
NOT_STORED = 2
ERROR = 3
_SetStatusCode_NAMES = {
1: "STORED",
2: "NOT_STORED",
3: "ERROR",
}
def SetStatusCode_Name(cls, x): return cls._SetStatusCode_NAMES.get(x, "")
SetStatusCode_Name = classmethod(SetStatusCode_Name)
def __init__(self, contents=None):
self.set_status_ = []
if contents is not None: self.MergeFromString(contents)
def set_status_size(self): return len(self.set_status_)
def set_status_list(self): return self.set_status_
def set_status(self, i):
return self.set_status_[i]
def set_set_status(self, i, x):
self.set_status_[i] = x
def add_set_status(self, x):
self.set_status_.append(x)
def clear_set_status(self):
self.set_status_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.set_status_size()): self.add_set_status(x.set_status(i))
def Equals(self, x):
if x is self: return 1
if len(self.set_status_) != len(x.set_status_): return 0
for e1, e2 in zip(self.set_status_, x.set_status_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.set_status_)
for i in xrange(len(self.set_status_)): n += self.lengthVarInt64(self.set_status_[i])
return n + 0
def Clear(self):
self.clear_set_status()
def OutputUnchecked(self, out):
for i in xrange(len(self.set_status_)):
out.putVarInt32(8)
out.putVarInt32(self.set_status_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.add_set_status(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.set_status_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("set_status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
cnt+=1
return res
kset_status = 1
_TEXT = (
"ErrorCode",
"set_status",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheDeleteRequest_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_delete_time_ = 0
delete_time_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def delete_time(self): return self.delete_time_
def set_delete_time(self, x):
self.has_delete_time_ = 1
self.delete_time_ = x
def clear_delete_time(self):
if self.has_delete_time_:
self.has_delete_time_ = 0
self.delete_time_ = 0
def has_delete_time(self): return self.has_delete_time_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_delete_time()): self.set_delete_time(x.delete_time())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_delete_time_ != x.has_delete_time_: return 0
if self.has_delete_time_ and self.delete_time_ != x.delete_time_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
if (self.has_delete_time_): n += 5
return n + 1
def Clear(self):
self.clear_key()
self.clear_delete_time()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.key_)
if (self.has_delete_time_):
out.putVarInt32(29)
out.put32(self.delete_time_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_key(d.getPrefixedString())
continue
if tt == 29:
self.set_delete_time(d.get32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_delete_time_: res+=prefix+("delete_time: %s\n" % self.DebugFormatFixed32(self.delete_time_))
return res
class MemcacheDeleteRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.item_ = []
if contents is not None: self.MergeFromString(contents)
def item_size(self): return len(self.item_)
def item_list(self): return self.item_
def item(self, i):
return self.item_[i]
def mutable_item(self, i):
return self.item_[i]
def add_item(self):
x = MemcacheDeleteRequest_Item()
self.item_.append(x)
return x
def clear_item(self):
self.item_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
def Equals(self, x):
if x is self: return 1
if len(self.item_) != len(x.item_): return 0
for e1, e2 in zip(self.item_, x.item_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.item_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.item_)
for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_item()
def OutputUnchecked(self, out):
for i in xrange(len(self.item_)):
out.putVarInt32(11)
self.item_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_item().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.item_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Item%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kItemGroup = 1
kItemkey = 2
kItemdelete_time = 3
_TEXT = (
"ErrorCode",
"Item",
"key",
"delete_time",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.FLOAT,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheDeleteResponse(ProtocolBuffer.ProtocolMessage):
DELETED = 1
NOT_FOUND = 2
_DeleteStatusCode_NAMES = {
1: "DELETED",
2: "NOT_FOUND",
}
def DeleteStatusCode_Name(cls, x): return cls._DeleteStatusCode_NAMES.get(x, "")
DeleteStatusCode_Name = classmethod(DeleteStatusCode_Name)
def __init__(self, contents=None):
self.delete_status_ = []
if contents is not None: self.MergeFromString(contents)
def delete_status_size(self): return len(self.delete_status_)
def delete_status_list(self): return self.delete_status_
def delete_status(self, i):
return self.delete_status_[i]
def set_delete_status(self, i, x):
self.delete_status_[i] = x
def add_delete_status(self, x):
self.delete_status_.append(x)
def clear_delete_status(self):
self.delete_status_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.delete_status_size()): self.add_delete_status(x.delete_status(i))
def Equals(self, x):
if x is self: return 1
if len(self.delete_status_) != len(x.delete_status_): return 0
for e1, e2 in zip(self.delete_status_, x.delete_status_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.delete_status_)
for i in xrange(len(self.delete_status_)): n += self.lengthVarInt64(self.delete_status_[i])
return n + 0
def Clear(self):
self.clear_delete_status()
def OutputUnchecked(self, out):
for i in xrange(len(self.delete_status_)):
out.putVarInt32(8)
out.putVarInt32(self.delete_status_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.add_delete_status(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.delete_status_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("delete_status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
cnt+=1
return res
kdelete_status = 1
_TEXT = (
"ErrorCode",
"delete_status",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheIncrementRequest(ProtocolBuffer.ProtocolMessage):
INCREMENT = 1
DECREMENT = 2
_Direction_NAMES = {
1: "INCREMENT",
2: "DECREMENT",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_key_ = 0
key_ = ""
has_delta_ = 0
delta_ = 1
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def delta(self): return self.delta_
def set_delta(self, x):
self.has_delta_ = 1
self.delta_ = x
def clear_delta(self):
if self.has_delta_:
self.has_delta_ = 0
self.delta_ = 1
def has_delta(self): return self.has_delta_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_delta()): self.set_delta(x.delta())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_delta_ != x.has_delta_: return 0
if self.has_delta_ and self.delta_ != x.delta_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
if (self.has_delta_): n += 1 + self.lengthVarInt64(self.delta_)
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def Clear(self):
self.clear_key()
self.clear_delta()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_delta_):
out.putVarInt32(16)
out.putVarUint64(self.delta_)
if (self.has_direction_):
out.putVarInt32(24)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 16:
self.set_delta(d.getVarUint64())
continue
if tt == 24:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_delta_: res+=prefix+("delta: %s\n" % self.DebugFormatInt64(self.delta_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
kkey = 1
kdelta = 2
kdirection = 3
_TEXT = (
"ErrorCode",
"key",
"delta",
"direction",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheIncrementResponse(ProtocolBuffer.ProtocolMessage):
has_new_value_ = 0
new_value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def new_value(self): return self.new_value_
def set_new_value(self, x):
self.has_new_value_ = 1
self.new_value_ = x
def clear_new_value(self):
if self.has_new_value_:
self.has_new_value_ = 0
self.new_value_ = 0
def has_new_value(self): return self.has_new_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_new_value()): self.set_new_value(x.new_value())
def Equals(self, x):
if x is self: return 1
if self.has_new_value_ != x.has_new_value_: return 0
if self.has_new_value_ and self.new_value_ != x.new_value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_new_value_): n += 1 + self.lengthVarInt64(self.new_value_)
return n + 0
def Clear(self):
self.clear_new_value()
def OutputUnchecked(self, out):
if (self.has_new_value_):
out.putVarInt32(8)
out.putVarUint64(self.new_value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_new_value(d.getVarUint64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_new_value_: res+=prefix+("new_value: %s\n" % self.DebugFormatInt64(self.new_value_))
return res
knew_value = 1
_TEXT = (
"ErrorCode",
"new_value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheFlushRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheFlushResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheStatsRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MergedNamespaceStats(ProtocolBuffer.ProtocolMessage):
has_hits_ = 0
hits_ = 0
has_misses_ = 0
misses_ = 0
has_byte_hits_ = 0
byte_hits_ = 0
has_items_ = 0
items_ = 0
has_bytes_ = 0
bytes_ = 0
has_oldest_item_age_ = 0
oldest_item_age_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def hits(self): return self.hits_
def set_hits(self, x):
self.has_hits_ = 1
self.hits_ = x
def clear_hits(self):
if self.has_hits_:
self.has_hits_ = 0
self.hits_ = 0
def has_hits(self): return self.has_hits_
def misses(self): return self.misses_
def set_misses(self, x):
self.has_misses_ = 1
self.misses_ = x
def clear_misses(self):
if self.has_misses_:
self.has_misses_ = 0
self.misses_ = 0
def has_misses(self): return self.has_misses_
def byte_hits(self): return self.byte_hits_
def set_byte_hits(self, x):
self.has_byte_hits_ = 1
self.byte_hits_ = x
def clear_byte_hits(self):
if self.has_byte_hits_:
self.has_byte_hits_ = 0
self.byte_hits_ = 0
def has_byte_hits(self): return self.has_byte_hits_
def items(self): return self.items_
def set_items(self, x):
self.has_items_ = 1
self.items_ = x
def clear_items(self):
if self.has_items_:
self.has_items_ = 0
self.items_ = 0
def has_items(self): return self.has_items_
def bytes(self): return self.bytes_
def set_bytes(self, x):
self.has_bytes_ = 1
self.bytes_ = x
def clear_bytes(self):
if self.has_bytes_:
self.has_bytes_ = 0
self.bytes_ = 0
def has_bytes(self): return self.has_bytes_
def oldest_item_age(self): return self.oldest_item_age_
def set_oldest_item_age(self, x):
self.has_oldest_item_age_ = 1
self.oldest_item_age_ = x
def clear_oldest_item_age(self):
if self.has_oldest_item_age_:
self.has_oldest_item_age_ = 0
self.oldest_item_age_ = 0
def has_oldest_item_age(self): return self.has_oldest_item_age_
def MergeFrom(self, x):
assert x is not self
if (x.has_hits()): self.set_hits(x.hits())
if (x.has_misses()): self.set_misses(x.misses())
if (x.has_byte_hits()): self.set_byte_hits(x.byte_hits())
if (x.has_items()): self.set_items(x.items())
if (x.has_bytes()): self.set_bytes(x.bytes())
if (x.has_oldest_item_age()): self.set_oldest_item_age(x.oldest_item_age())
def Equals(self, x):
if x is self: return 1
if self.has_hits_ != x.has_hits_: return 0
if self.has_hits_ and self.hits_ != x.hits_: return 0
if self.has_misses_ != x.has_misses_: return 0
if self.has_misses_ and self.misses_ != x.misses_: return 0
if self.has_byte_hits_ != x.has_byte_hits_: return 0
if self.has_byte_hits_ and self.byte_hits_ != x.byte_hits_: return 0
if self.has_items_ != x.has_items_: return 0
if self.has_items_ and self.items_ != x.items_: return 0
if self.has_bytes_ != x.has_bytes_: return 0
if self.has_bytes_ and self.bytes_ != x.bytes_: return 0
if self.has_oldest_item_age_ != x.has_oldest_item_age_: return 0
if self.has_oldest_item_age_ and self.oldest_item_age_ != x.oldest_item_age_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_hits_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: hits not set.')
if (not self.has_misses_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: misses not set.')
if (not self.has_byte_hits_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: byte_hits not set.')
if (not self.has_items_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: items not set.')
if (not self.has_bytes_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bytes not set.')
if (not self.has_oldest_item_age_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: oldest_item_age not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.hits_)
n += self.lengthVarInt64(self.misses_)
n += self.lengthVarInt64(self.byte_hits_)
n += self.lengthVarInt64(self.items_)
n += self.lengthVarInt64(self.bytes_)
return n + 10
def Clear(self):
self.clear_hits()
self.clear_misses()
self.clear_byte_hits()
self.clear_items()
self.clear_bytes()
self.clear_oldest_item_age()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarUint64(self.hits_)
out.putVarInt32(16)
out.putVarUint64(self.misses_)
out.putVarInt32(24)
out.putVarUint64(self.byte_hits_)
out.putVarInt32(32)
out.putVarUint64(self.items_)
out.putVarInt32(40)
out.putVarUint64(self.bytes_)
out.putVarInt32(53)
out.put32(self.oldest_item_age_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_hits(d.getVarUint64())
continue
if tt == 16:
self.set_misses(d.getVarUint64())
continue
if tt == 24:
self.set_byte_hits(d.getVarUint64())
continue
if tt == 32:
self.set_items(d.getVarUint64())
continue
if tt == 40:
self.set_bytes(d.getVarUint64())
continue
if tt == 53:
self.set_oldest_item_age(d.get32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_hits_: res+=prefix+("hits: %s\n" % self.DebugFormatInt64(self.hits_))
if self.has_misses_: res+=prefix+("misses: %s\n" % self.DebugFormatInt64(self.misses_))
if self.has_byte_hits_: res+=prefix+("byte_hits: %s\n" % self.DebugFormatInt64(self.byte_hits_))
if self.has_items_: res+=prefix+("items: %s\n" % self.DebugFormatInt64(self.items_))
if self.has_bytes_: res+=prefix+("bytes: %s\n" % self.DebugFormatInt64(self.bytes_))
if self.has_oldest_item_age_: res+=prefix+("oldest_item_age: %s\n" % self.DebugFormatFixed32(self.oldest_item_age_))
return res
khits = 1
kmisses = 2
kbyte_hits = 3
kitems = 4
kbytes = 5
koldest_item_age = 6
_TEXT = (
"ErrorCode",
"hits",
"misses",
"byte_hits",
"items",
"bytes",
"oldest_item_age",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.FLOAT,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheStatsResponse(ProtocolBuffer.ProtocolMessage):
has_stats_ = 0
stats_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def stats(self):
if self.stats_ is None:
self.lazy_init_lock_.acquire()
try:
if self.stats_ is None: self.stats_ = MergedNamespaceStats()
finally:
self.lazy_init_lock_.release()
return self.stats_
def mutable_stats(self): self.has_stats_ = 1; return self.stats()
def clear_stats(self):
if self.has_stats_:
self.has_stats_ = 0;
if self.stats_ is not None: self.stats_.Clear()
def has_stats(self): return self.has_stats_
def MergeFrom(self, x):
assert x is not self
if (x.has_stats()): self.mutable_stats().MergeFrom(x.stats())
def Equals(self, x):
if x is self: return 1
if self.has_stats_ != x.has_stats_: return 0
if self.has_stats_ and self.stats_ != x.stats_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_stats_ and not self.stats_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_stats_): n += 1 + self.lengthString(self.stats_.ByteSize())
return n + 0
def Clear(self):
self.clear_stats()
def OutputUnchecked(self, out):
if (self.has_stats_):
out.putVarInt32(10)
out.putVarInt32(self.stats_.ByteSize())
self.stats_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_stats().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_stats_:
res+=prefix+"stats <\n"
res+=self.stats_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kstats = 1
_TEXT = (
"ErrorCode",
"stats",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['MemcacheServiceError','MemcacheGetRequest','MemcacheGetResponse','MemcacheGetResponse_Item','MemcacheSetRequest','MemcacheSetRequest_Item','MemcacheSetResponse','MemcacheDeleteRequest','MemcacheDeleteRequest_Item','MemcacheDeleteResponse','MemcacheIncrementRequest','MemcacheIncrementResponse','MemcacheFlushRequest','MemcacheFlushResponse','MemcacheStatsRequest','MergedNamespaceStats','MemcacheStatsResponse']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Memcache API.
Provides memcached-alike API to application developers to store
data in memory when reliable storage via the DataStore API isn't
required and higher performance is desired.
"""
import cStringIO
import math
import pickle
import types
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.memcache import memcache_service_pb
from google.appengine.runtime import apiproxy_errors
MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
MemcacheGetResponse = memcache_service_pb.MemcacheGetResponse
MemcacheGetRequest = memcache_service_pb.MemcacheGetRequest
MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
MemcacheDeleteRequest = memcache_service_pb.MemcacheDeleteRequest
MemcacheIncrementResponse = memcache_service_pb.MemcacheIncrementResponse
MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
MemcacheFlushResponse = memcache_service_pb.MemcacheFlushResponse
MemcacheFlushRequest = memcache_service_pb.MemcacheFlushRequest
MemcacheStatsRequest = memcache_service_pb.MemcacheStatsRequest
MemcacheStatsResponse = memcache_service_pb.MemcacheStatsResponse
DELETE_NETWORK_FAILURE = 0
DELETE_ITEM_MISSING = 1
DELETE_SUCCESSFUL = 2
MAX_KEY_SIZE = 250
MAX_VALUE_SIZE = 10 ** 6
STAT_HITS = 'hits'
STAT_MISSES = 'misses'
STAT_BYTE_HITS = 'byte_hits'
STAT_ITEMS = 'items'
STAT_BYTES = 'bytes'
STAT_OLDEST_ITEM_AGES = 'oldest_item_age'
FLAG_TYPE_MASK = 7
FLAG_COMPRESSED = 1 << 3
TYPE_STR = 0
TYPE_UNICODE = 1
TYPE_PICKLED = 2
TYPE_INT = 3
TYPE_LONG = 4
TYPE_BOOL = 5
def _key_string(key, key_prefix='', server_to_user_dict=None):
"""Utility function to handle different ways of requesting keys.
Args:
key: Either a string or tuple of (shard_number, string). In Google App
Engine the sharding is automatic so the shard number is ignored.
To memcache, the key is just bytes (no defined encoding).
key_prefix: Optional string prefix to prepend to key.
server_to_user_dict: Optional dictionary to populate with a mapping of
server-side key (which includes the key_prefix) to user-supplied key
(which does not have the prefix).
Returns:
The key as a non-unicode string prepended with key_prefix. This is the key
sent to and stored by the server.
Raises:
TypeError: If provided key isn't a string or tuple of (int, string)
or key_prefix or server_to_user_dict are of the wrong type.
ValueError: If the key, when translated to the server key, is more than
250 bytes in length.
"""
if type(key) is types.TupleType:
key = key[1]
if not isinstance(key, basestring):
raise TypeError('Key must be a string instance, received %r' % key)
if not isinstance(key_prefix, basestring):
raise TypeError('key_prefix must be a string instance, received %r' %
key_prefix)
server_key = key_prefix + key
if isinstance(server_key, unicode):
server_key = server_key.encode('utf-8')
if len(server_key) > MAX_KEY_SIZE:
raise ValueError('Keys may not be more than %d bytes in length, '
'received %d bytes' % (MAX_KEY_SIZE, len(server_key)))
if server_to_user_dict is not None:
if not isinstance(server_to_user_dict, dict):
raise TypeError('server_to_user_dict must be a dict instance, ' +
'received %r' % key)
server_to_user_dict[server_key] = key
return server_key
def _validate_encode_value(value, do_pickle):
"""Utility function to validate and encode server keys and values.
Args:
value: Value to store in memcache. If it's a string, it will get passed
along as-is. If it's a unicode string, it will be marked appropriately,
such that retrievals will yield a unicode value. If it's any other data
type, this function will attempt to pickle the data and then store the
serialized result, unpickling it upon retrieval.
do_pickle: Callable that takes an object and returns a non-unicode
string containing the pickled object.
Returns:
Tuple (stored_value, flags) where:
stored_value: The value as a non-unicode string that should be stored
in memcache.
flags: An integer with bits set from the FLAG_* constants in this file
to indicate the encoding of the key and value.
Raises:
ValueError: If the encoded value is too large.
pickle.PicklingError: If the value is not a string and could not be pickled.
RuntimeError: If a complicated data structure could not be pickled due to
too many levels of recursion in its composition.
"""
flags = 0
stored_value = value
if isinstance(value, str):
pass
elif isinstance(value, unicode):
stored_value = value.encode('utf-8')
flags |= TYPE_UNICODE
elif isinstance(value, bool):
stored_value = str(int(value))
flags |= TYPE_BOOL
elif isinstance(value, int):
stored_value = str(value)
flags |= TYPE_INT
elif isinstance(value, long):
stored_value = str(value)
flags |= TYPE_LONG
else:
stored_value = do_pickle(value)
flags |= TYPE_PICKLED
if len(stored_value) > MAX_VALUE_SIZE:
raise ValueError('Values may not be more than %d bytes in length; '
'received %d bytes' % (MAX_VALUE_SIZE, len(stored_value)))
return (stored_value, flags)
def _decode_value(stored_value, flags, do_unpickle):
"""Utility function for decoding values retrieved from memcache.
Args:
stored_value: The value as a non-unicode string that was stored.
flags: An integer with bits set from the FLAG_* constants in this file
that indicate the encoding of the key and value.
do_unpickle: Callable that takes a non-unicode string object that contains
a pickled object and returns the pickled object.
Returns:
The original object that was stored, be it a normal string, a unicode
string, int, long, or a Python object that was pickled.
Raises:
pickle.UnpicklingError: If the value could not be unpickled.
"""
assert isinstance(stored_value, str)
assert isinstance(flags, (int, long))
type_number = flags & FLAG_TYPE_MASK
value = stored_value
if type_number == TYPE_STR:
return value
elif type_number == TYPE_UNICODE:
return value.decode('utf-8')
elif type_number == TYPE_PICKLED:
return do_unpickle(value)
elif type_number == TYPE_BOOL:
return bool(int(value))
elif type_number == TYPE_INT:
return int(value)
elif type_number == TYPE_LONG:
return long(value)
else:
assert False, "Unknown stored type"
assert False, "Shouldn't get here."
class Client(object):
"""Memcache client object, through which one invokes all memcache operations.
Several methods are no-ops to retain source-level compatibility
with the existing popular Python memcache library.
Any method that takes a 'key' argument will accept that key as a string
(unicode or not) or a tuple of (hash_value, string) where the hash_value,
normally used for sharding onto a memcache instance, is instead ignored, as
Google App Engine deals with the sharding transparently. Keys in memcache are
just bytes, without a specified encoding. All such methods may raise TypeError
if provided a bogus key value and a ValueError if the key is too large.
Any method that takes a 'value' argument will accept as that value any
string (unicode or not), int, long, or pickle-able Python object, including
all native types. You'll get back from the cache the same type that you
originally put in.
"""
def __init__(self, servers=None, debug=0,
pickleProtocol=pickle.HIGHEST_PROTOCOL,
pickler=pickle.Pickler,
unpickler=pickle.Unpickler,
pload=None,
pid=None,
make_sync_call=apiproxy_stub_map.MakeSyncCall):
"""Create a new Client object.
No parameters are required.
Arguments:
servers: Ignored; only for compatibility.
debug: Ignored; only for compatibility.
pickleProtocol: Pickle protocol to use for pickling the object.
pickler: pickle.Pickler sub-class to use for pickling.
unpickler: pickle.Unpickler sub-class to use for unpickling.
pload: Callable to use for retrieving objects by persistent id.
pid: Callable to use for determine the persistent id for objects, if any.
make_sync_call: Function to use to make an App Engine service call.
Used for testing.
"""
self._pickle_data = cStringIO.StringIO()
self._pickler_instance = pickler(self._pickle_data,
protocol=pickleProtocol)
self._unpickler_instance = unpickler(self._pickle_data)
if pid is not None:
self._pickler_instance.persistent_id = pid
if pload is not None:
self._unpickler_instance.persistent_load = pload
def DoPickle(value):
self._pickle_data.truncate(0)
self._pickler_instance.clear_memo()
self._pickler_instance.dump(value)
return self._pickle_data.getvalue()
self._do_pickle = DoPickle
def DoUnpickle(value):
self._pickle_data.truncate(0)
self._pickle_data.write(value)
self._pickle_data.seek(0)
self._unpickler_instance.memo.clear()
return self._unpickler_instance.load()
self._do_unpickle = DoUnpickle
self._make_sync_call = make_sync_call
def set_servers(self, servers):
"""Sets the pool of memcache servers used by the client.
This is purely a compatibility method. In Google App Engine, it's a no-op.
"""
pass
def disconnect_all(self):
"""Closes all connections to memcache servers.
This is purely a compatibility method. In Google App Engine, it's a no-op.
"""
pass
def forget_dead_hosts(self):
"""Resets all servers to the alive status.
This is purely a compatibility method. In Google App Engine, it's a no-op.
"""
pass
def debuglog(self):
"""Logging function for debugging information.
This is purely a compatibility method. In Google App Engine, it's a no-op.
"""
pass
def get_stats(self):
"""Gets memcache statistics for this application.
All of these statistics may reset due to various transient conditions. They
provide the best information available at the time of being called.
Returns:
Dictionary mapping statistic names to associated values. Statistics and
their associated meanings:
hits: Number of cache get requests resulting in a cache hit.
misses: Number of cache get requests resulting in a cache miss.
byte_hits: Sum of bytes transferred on get requests. Rolls over to
zero on overflow.
items: Number of key/value pairs in the cache.
bytes: Total size of all items in the cache.
oldest_item_age: How long in seconds since the oldest item in the
cache was accessed. Effectively, this indicates how long a new
item will survive in the cache without being accessed. This is
_not_ the amount of time that has elapsed since the item was
created.
On error, returns None.
"""
request = MemcacheStatsRequest()
response = MemcacheStatsResponse()
try:
self._make_sync_call('memcache', 'Stats', request, response)
except apiproxy_errors.Error:
return None
if not response.has_stats():
return None
stats = response.stats()
return {
STAT_HITS: stats.hits(),
STAT_MISSES: stats.misses(),
STAT_BYTE_HITS: stats.byte_hits(),
STAT_ITEMS: stats.items(),
STAT_BYTES: stats.bytes(),
STAT_OLDEST_ITEM_AGES: stats.oldest_item_age(),
}
def flush_all(self):
"""Deletes everything in memcache.
Returns:
True on success, False on RPC or server error.
"""
request = MemcacheFlushRequest()
response = MemcacheFlushResponse()
try:
self._make_sync_call('memcache', 'FlushAll', request, response)
except apiproxy_errors.Error:
return False
return True
def get(self, key):
"""Looks up a single key in memcache.
If you have multiple items to load, though, it's much more efficient
to use get_multi() instead, which loads them in one bulk operation,
reducing the networking latency that'd otherwise be required to do
many serialized get() operations.
Args:
key: The key in memcache to look up. See docs on Client
for details of format.
Returns:
The value of the key, if found in memcache, else None.
"""
request = MemcacheGetRequest()
request.add_key(_key_string(key))
response = MemcacheGetResponse()
try:
self._make_sync_call('memcache', 'Get', request, response)
except apiproxy_errors.Error:
return None
if not response.item_size():
return None
return _decode_value(response.item(0).value(),
response.item(0).flags(),
self._do_unpickle)
def get_multi(self, keys, key_prefix=''):
"""Looks up multiple keys from memcache in one operation.
This is the recommended way to do bulk loads.
Args:
keys: List of keys to look up. Keys may be strings or
tuples of (hash_value, string). Google App Engine
does the sharding and hashing automatically, though, so the hash
value is ignored. To memcache, keys are just series of bytes,
and not in any particular encoding.
key_prefix: Prefix to prepend to all keys when talking to the server;
not included in the returned dictionary.
Returns:
A dictionary of the keys and values that were present in memcache.
Even if the key_prefix was specified, that key_prefix won't be on
the keys in the returned dictionary.
"""
request = MemcacheGetRequest()
response = MemcacheGetResponse()
user_key = {}
for key in keys:
request.add_key(_key_string(key, key_prefix, user_key))
try:
self._make_sync_call('memcache', 'Get', request, response)
except apiproxy_errors.Error:
return {}
return_value = {}
for returned_item in response.item_list():
value = _decode_value(returned_item.value(), returned_item.flags(),
self._do_unpickle)
return_value[user_key[returned_item.key()]] = value
return return_value
def delete(self, key, seconds=0):
"""Deletes a key from memcache.
Args:
key: Key to delete. See docs on Client for detils.
seconds: Optional number of seconds to make deleted items 'locked'
for 'add' operations. Value can be a delta from current time (up to
1 month), or an absolute Unix epoch time. Defaults to 0, which means
items can be immediately added. With or without this option,
a 'set' operation will always work. Float values will be rounded up to
the nearest whole second.
Returns:
DELETE_NETWORK_FAILURE (0) on network failure,
DELETE_ITEM_MISSING (1) if the server tried to delete the item but
didn't have it, or
DELETE_SUCCESSFUL (2) if the item was actually deleted.
This can be used as a boolean value, where a network failure is the
only bad condition.
"""
if not isinstance(seconds, (int, long, float)):
raise TypeError('Delete timeout must be a number.')
if seconds < 0:
raise ValueError('Delete timeout must be non-negative.')
request = MemcacheDeleteRequest()
response = MemcacheDeleteResponse()
delete_item = request.add_item()
delete_item.set_key(_key_string(key))
delete_item.set_delete_time(int(math.ceil(seconds)))
try:
self._make_sync_call('memcache', 'Delete', request, response)
except apiproxy_errors.Error:
return DELETE_NETWORK_FAILURE
assert response.delete_status_size() == 1, 'Unexpected status size.'
if response.delete_status(0) == MemcacheDeleteResponse.DELETED:
return DELETE_SUCCESSFUL
elif response.delete_status(0) == MemcacheDeleteResponse.NOT_FOUND:
return DELETE_ITEM_MISSING
assert False, 'Unexpected deletion status code.'
def delete_multi(self, keys, seconds=0, key_prefix=''):
"""Delete multiple keys at once.
Args:
keys: List of keys to delete.
seconds: Optional number of seconds to make deleted items 'locked'
for 'add' operations. Value can be a delta from current time (up to
1 month), or an absolute Unix epoch time. Defaults to 0, which means
items can be immediately added. With or without this option,
a 'set' operation will always work. Float values will be rounded up to
the nearest whole second.
key_prefix: Prefix to put on all keys when sending specified
keys to memcache. See docs for get_multi() and set_multi().
Returns:
True if all operations completed successfully. False if one
or more failed to complete.
"""
if not isinstance(seconds, (int, long, float)):
raise TypeError('Delete timeout must be a number.')
if seconds < 0:
raise ValueError('Delete timeout must not be negative.')
request = MemcacheDeleteRequest()
response = MemcacheDeleteResponse()
for key in keys:
delete_item = request.add_item()
delete_item.set_key(_key_string(key, key_prefix=key_prefix))
delete_item.set_delete_time(int(math.ceil(seconds)))
try:
self._make_sync_call('memcache', 'Delete', request, response)
except apiproxy_errors.Error:
return False
return True
def set(self, key, value, time=0, min_compress_len=0):
"""Sets a key's value, regardless of previous contents in cache.
Unlike add() and replace(), this method always sets (or
overwrites) the value in memcache, regardless of previous
contents.
Args:
key: Key to set. See docs on Client for details.
value: Value to set. Any type. If complex, will be pickled.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
min_compress_len: Ignored option for compatibility.
Returns:
True if set. False on error.
"""
return self._set_with_policy(MemcacheSetRequest.SET, key, value, time=time)
def add(self, key, value, time=0, min_compress_len=0):
"""Sets a key's value, iff item is not already in memcache.
Args:
key: Key to set. See docs on Client for details.
value: Value to set. Any type. If complex, will be pickled.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
min_compress_len: Ignored option for compatibility.
Returns:
True if added. False on error.
"""
return self._set_with_policy(MemcacheSetRequest.ADD, key, value, time=time)
def replace(self, key, value, time=0, min_compress_len=0):
"""Replaces a key's value, failing if item isn't already in memcache.
Args:
key: Key to set. See docs on Client for details.
value: Value to set. Any type. If complex, will be pickled.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
min_compress_len: Ignored option for compatibility.
Returns:
True if replaced. False on RPC error or cache miss.
"""
return self._set_with_policy(MemcacheSetRequest.REPLACE,
key, value, time=time)
def _set_with_policy(self, policy, key, value, time=0):
"""Sets a single key with a specified policy.
Helper function for set(), add(), and replace().
Args:
policy: One of MemcacheSetRequest.SET, .ADD, or .REPLACE.
key: Key to add, set, or replace. See docs on Client for details.
value: Value to set.
time: Expiration time, defaulting to 0 (never expiring).
Returns:
True if stored, False on RPC error or policy error, e.g. a replace
that failed due to the item not already existing, or an add
failing due to the item not already existing.
"""
if not isinstance(time, (int, long, float)):
raise TypeError('Expiration must be a number.')
if time < 0:
raise ValueError('Expiration must not be negative.')
request = MemcacheSetRequest()
item = request.add_item()
item.set_key(_key_string(key))
stored_value, flags = _validate_encode_value(value, self._do_pickle)
item.set_value(stored_value)
item.set_flags(flags)
item.set_set_policy(policy)
item.set_expiration_time(int(math.ceil(time)))
response = MemcacheSetResponse()
try:
self._make_sync_call('memcache', 'Set', request, response)
except apiproxy_errors.Error:
return False
if response.set_status_size() != 1:
return False
return response.set_status(0) == MemcacheSetResponse.STORED
def _set_multi_with_policy(self, policy, mapping, time=0, key_prefix=''):
"""Set multiple keys with a specified policy.
Helper function for set_multi(), add_multi(), and replace_multi(). This
reduces the network latency of doing many requests in serial.
Args:
policy: One of MemcacheSetRequest.SET, ADD, or REPLACE.
mapping: Dictionary of keys to values.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
key_prefix: Prefix for to prepend to all keys.
Returns:
A list of keys whose values were NOT set. On total success,
this list should be empty. On network/RPC/server errors,
a list of all input keys is returned; in this case the keys
may or may not have been updated.
"""
if not isinstance(time, (int, long, float)):
raise TypeError('Expiration must be a number.')
if time < 0.0:
raise ValueError('Expiration must not be negative.')
request = MemcacheSetRequest()
user_key = {}
server_keys = []
for key, value in mapping.iteritems():
server_key = _key_string(key, key_prefix, user_key)
stored_value, flags = _validate_encode_value(value, self._do_pickle)
server_keys.append(server_key)
item = request.add_item()
item.set_key(server_key)
item.set_value(stored_value)
item.set_flags(flags)
item.set_set_policy(policy)
item.set_expiration_time(int(math.ceil(time)))
response = MemcacheSetResponse()
try:
self._make_sync_call('memcache', 'Set', request, response)
except apiproxy_errors.Error:
return user_key.values()
assert response.set_status_size() == len(server_keys)
unset_list = []
for server_key, set_status in zip(server_keys, response.set_status_list()):
if set_status != MemcacheSetResponse.STORED:
unset_list.append(user_key[server_key])
return unset_list
def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
"""Set multiple keys' values at once, regardless of previous contents.
Args:
mapping: Dictionary of keys to values.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
key_prefix: Prefix for to prepend to all keys.
min_compress_len: Unimplemented compatibility option.
Returns:
A list of keys whose values were NOT set. On total success,
this list should be empty.
"""
return self._set_multi_with_policy(MemcacheSetRequest.SET, mapping,
time=time, key_prefix=key_prefix)
def add_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
"""Set multiple keys' values iff items are not already in memcache.
Args:
mapping: Dictionary of keys to values.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
key_prefix: Prefix for to prepend to all keys.
min_compress_len: Unimplemented compatibility option.
Returns:
A list of keys whose values were NOT set because they did not already
exist in memcache. On total success, this list should be empty.
"""
return self._set_multi_with_policy(MemcacheSetRequest.ADD, mapping,
time=time, key_prefix=key_prefix)
def replace_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
"""Replace multiple keys' values, failing if the items aren't in memcache.
Args:
mapping: Dictionary of keys to values.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
key_prefix: Prefix for to prepend to all keys.
min_compress_len: Unimplemented compatibility option.
Returns:
A list of keys whose values were NOT set because they already existed
in memcache. On total success, this list should be empty.
"""
return self._set_multi_with_policy(MemcacheSetRequest.REPLACE, mapping,
time=time, key_prefix=key_prefix)
def incr(self, key, delta=1):
"""Atomically increments a key's value.
Internally, the value is a unsigned 64-bit integer. Memcache
doesn't check 64-bit overflows. The value, if too large, will
wrap around.
The key must already exist in the cache to be incremented. To
initialize a counter, set() it to the initial value, as an
ASCII decimal integer. Future get()s of the key, post-increment,
will still be an ASCII decimal value.
Args:
key: Key to increment. See Client's docstring for details.
delta: Non-negative integer value (int or long) to increment key by,
defaulting to 1.
Returns:
New long integer value, or None if key was not in the cache, could not
be incremented for any other reason, or a network/RPC/server error
occurred.
Raises:
ValueError: If number is negative.
TypeError: If delta isn't an int or long.
"""
return self._incrdecr(key, False, delta)
def decr(self, key, delta=1):
"""Atomically decrements a key's value.
Internally, the value is a unsigned 64-bit integer. Memcache
caps decrementing below zero to zero.
The key must already exist in the cache to be decremented. See
docs on incr() for details.
Args:
key: Key to decrement. See Client's docstring for details.
delta: Non-negative integer value (int or long) to decrement key by,
defaulting to 1.
Returns:
New long integer value, or None if key wasn't in cache and couldn't
be decremented, or a network/RPC/server error occurred.
Raises:
ValueError: If number is negative.
TypeError: If delta isn't an int or long.
"""
return self._incrdecr(key, True, delta)
def _incrdecr(self, key, is_negative, delta):
"""Increment or decrement a key by a provided delta.
Args:
key: Key to increment or decrement.
is_negative: Boolean, if this is a decrement.
delta: Non-negative integer amount (int or long) to increment
or decrement by.
Returns:
New long integer value, or None on cache miss or network/RPC/server
error.
Raises:
ValueError: If delta is negative.
TypeError: If delta isn't an int or long.
"""
if not isinstance(delta, (int, long)):
raise TypeError('Delta must be an integer or long, received %r' % delta)
if delta < 0:
raise ValueError('Delta must not be negative.')
request = MemcacheIncrementRequest()
response = MemcacheIncrementResponse()
request.set_key(_key_string(key))
request.set_delta(delta)
if is_negative:
request.set_direction(MemcacheIncrementRequest.DECREMENT)
else:
request.set_direction(MemcacheIncrementRequest.INCREMENT)
try:
self._make_sync_call('memcache', 'Increment', request, response)
except apiproxy_errors.Error:
return None
if response.has_new_value():
return response.new_value()
return None
_CLIENT = None
def setup_client(client_obj):
"""Sets the Client object instance to use for all module-level methods.
Use this method if you want to have customer persistent_id() or
persistent_load() functions associated with your client.
Args:
client_obj: Instance of the memcache.Client object.
"""
global _CLIENT
var_dict = globals()
_CLIENT = client_obj
var_dict['set_servers'] = _CLIENT.set_servers
var_dict['disconnect_all'] = _CLIENT.disconnect_all
var_dict['forget_dead_hosts'] = _CLIENT.forget_dead_hosts
var_dict['debuglog'] = _CLIENT.debuglog
var_dict['get'] = _CLIENT.get
var_dict['get_multi'] = _CLIENT.get_multi
var_dict['set'] = _CLIENT.set
var_dict['set_multi'] = _CLIENT.set_multi
var_dict['add'] = _CLIENT.add
var_dict['add_multi'] = _CLIENT.add_multi
var_dict['replace'] = _CLIENT.replace
var_dict['replace_multi'] = _CLIENT.replace_multi
var_dict['delete'] = _CLIENT.delete
var_dict['delete_multi'] = _CLIENT.delete_multi
var_dict['incr'] = _CLIENT.incr
var_dict['decr'] = _CLIENT.decr
var_dict['flush_all'] = _CLIENT.flush_all
var_dict['get_stats'] = _CLIENT.get_stats
setup_client(Client())
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the memcache API, keeping all data in process memory."""
import logging
import time
from google.appengine.api import apiproxy_stub
from google.appengine.api import memcache
from google.appengine.api.memcache import memcache_service_pb
MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
class CacheEntry(object):
"""An entry in the cache."""
def __init__(self, value, expiration, flags, gettime):
"""Initializer.
Args:
value: String containing the data for this entry.
expiration: Number containing the expiration time or offset in seconds
for this entry.
flags: Opaque flags used by the memcache implementation.
gettime: Used for testing. Function that works like time.time().
"""
assert isinstance(value, basestring)
assert len(value) <= memcache.MAX_VALUE_SIZE
assert isinstance(expiration, (int, long))
self._gettime = gettime
self.value = value
self.flags = flags
self.created_time = self._gettime()
self.will_expire = expiration != 0
self.locked = False
self._SetExpiration(expiration)
def _SetExpiration(self, expiration):
"""Sets the expiration for this entry.
Args:
expiration: Number containing the expiration time or offset in seconds
for this entry. If expiration is above one month, then it's considered
an absolute time since the UNIX epoch.
"""
if expiration > (86400 * 30):
self.expiration_time = expiration
else:
self.expiration_time = self._gettime() + expiration
def CheckExpired(self):
"""Returns True if this entry has expired; False otherwise."""
return self.will_expire and self._gettime() >= self.expiration_time
def ExpireAndLock(self, timeout):
"""Marks this entry as deleted and locks it for the expiration time.
Used to implement memcache's delete timeout behavior.
Args:
timeout: Parameter originally passed to memcache.delete or
memcache.delete_multi to control deletion timeout.
"""
self.will_expire = True
self.locked = True
self._SetExpiration(timeout)
def CheckLocked(self):
"""Returns True if this entry was deleted but has not yet timed out."""
return self.locked and not self.CheckExpired()
class MemcacheServiceStub(apiproxy_stub.APIProxyStub):
"""Python only memcache service stub.
This stub keeps all data in the local process' memory, not in any
external servers.
"""
def __init__(self, gettime=time.time, service_name='memcache'):
"""Initializer.
Args:
gettime: time.time()-like function used for testing.
service_name: Service name expected for all calls.
"""
super(MemcacheServiceStub, self).__init__(service_name)
self._gettime = gettime
self._ResetStats()
self._the_cache = {}
def _ResetStats(self):
"""Resets statistics information."""
self._hits = 0
self._misses = 0
self._byte_hits = 0
self._cache_creation_time = self._gettime()
def _GetKey(self, key):
"""Retrieves a CacheEntry from the cache if it hasn't expired.
Does not take deletion timeout into account.
Args:
key: The key to retrieve from the cache.
Returns:
The corresponding CacheEntry instance, or None if it was not found or
has already expired.
"""
entry = self._the_cache.get(key, None)
if entry is None:
return None
elif entry.CheckExpired():
del self._the_cache[key]
return None
else:
return entry
def _Dynamic_Get(self, request, response):
"""Implementation of MemcacheService::Get().
Args:
request: A MemcacheGetRequest.
response: A MemcacheGetResponse.
"""
keys = set(request.key_list())
for key in keys:
entry = self._GetKey(key)
if entry is None or entry.CheckLocked():
self._misses += 1
continue
self._hits += 1
self._byte_hits += len(entry.value)
item = response.add_item()
item.set_key(key)
item.set_value(entry.value)
item.set_flags(entry.flags)
def _Dynamic_Set(self, request, response):
"""Implementation of MemcacheService::Set().
Args:
request: A MemcacheSetRequest.
response: A MemcacheSetResponse.
"""
for item in request.item_list():
key = item.key()
set_policy = item.set_policy()
old_entry = self._GetKey(key)
set_status = MemcacheSetResponse.NOT_STORED
if ((set_policy == MemcacheSetRequest.SET) or
(set_policy == MemcacheSetRequest.ADD and old_entry is None) or
(set_policy == MemcacheSetRequest.REPLACE and old_entry is not None)):
if (old_entry is None or
set_policy == MemcacheSetRequest.SET
or not old_entry.CheckLocked()):
self._the_cache[key] = CacheEntry(item.value(),
item.expiration_time(),
item.flags(),
gettime=self._gettime)
set_status = MemcacheSetResponse.STORED
response.add_set_status(set_status)
def _Dynamic_Delete(self, request, response):
"""Implementation of MemcacheService::Delete().
Args:
request: A MemcacheDeleteRequest.
response: A MemcacheDeleteResponse.
"""
for item in request.item_list():
key = item.key()
entry = self._GetKey(key)
delete_status = MemcacheDeleteResponse.DELETED
if entry is None:
delete_status = MemcacheDeleteResponse.NOT_FOUND
elif item.delete_time() == 0:
del self._the_cache[key]
else:
entry.ExpireAndLock(item.delete_time())
response.add_delete_status(delete_status)
def _Dynamic_Increment(self, request, response):
"""Implementation of MemcacheService::Increment().
Args:
request: A MemcacheIncrementRequest.
response: A MemcacheIncrementResponse.
"""
key = request.key()
entry = self._GetKey(key)
if entry is None:
return
try:
old_value = long(entry.value)
if old_value < 0:
raise ValueError
except ValueError, e:
logging.error('Increment/decrement failed: Could not interpret '
'value for key = "%s" as an unsigned integer.', key)
return
delta = request.delta()
if request.direction() == MemcacheIncrementRequest.DECREMENT:
delta = -delta
new_value = old_value + delta
if not (0 <= new_value < 2**64):
new_value = 0
entry.value = str(new_value)
response.set_new_value(new_value)
def _Dynamic_FlushAll(self, request, response):
"""Implementation of MemcacheService::FlushAll().
Args:
request: A MemcacheFlushRequest.
response: A MemcacheFlushResponse.
"""
self._the_cache.clear()
self._ResetStats()
def _Dynamic_Stats(self, request, response):
"""Implementation of MemcacheService::Stats().
Args:
request: A MemcacheStatsRequest.
response: A MemcacheStatsResponse.
"""
stats = response.mutable_stats()
stats.set_hits(self._hits)
stats.set_misses(self._misses)
stats.set_byte_hits(self._byte_hits)
stats.set_items(len(self._the_cache))
total_bytes = 0
for key, entry in self._the_cache.iteritems():
total_bytes += len(entry.value)
stats.set_bytes(total_bytes)
stats.set_oldest_item_age(self._gettime() - self._cache_creation_time)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import VoidProto
class MemcacheServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
UNSPECIFIED_ERROR = 1
_ErrorCode_NAMES = {
0: "OK",
1: "UNSPECIFIED_ERROR",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheGetRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.key_ = []
if contents is not None: self.MergeFromString(contents)
def key_size(self): return len(self.key_)
def key_list(self): return self.key_
def key(self, i):
return self.key_[i]
def set_key(self, i, x):
self.key_[i] = x
def add_key(self, x):
self.key_.append(x)
def clear_key(self):
self.key_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.key_size()): self.add_key(x.key(i))
def Equals(self, x):
if x is self: return 1
if len(self.key_) != len(x.key_): return 0
for e1, e2 in zip(self.key_, x.key_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.key_)
for i in xrange(len(self.key_)): n += self.lengthString(len(self.key_[i]))
return n + 0
def Clear(self):
self.clear_key()
def OutputUnchecked(self, out):
for i in xrange(len(self.key_)):
out.putVarInt32(10)
out.putPrefixedString(self.key_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.add_key(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.key_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("key%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
kkey = 1
_TEXT = (
"ErrorCode",
"key",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheGetResponse_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
has_flags_ = 0
flags_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def flags(self): return self.flags_
def set_flags(self, x):
self.has_flags_ = 1
self.flags_ = x
def clear_flags(self):
if self.has_flags_:
self.has_flags_ = 0
self.flags_ = 0
def has_flags(self): return self.has_flags_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
if (x.has_flags()): self.set_flags(x.flags())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
if self.has_flags_ != x.has_flags_: return 0
if self.has_flags_ and self.flags_ != x.flags_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
if (self.has_flags_): n += 5
return n + 2
def Clear(self):
self.clear_key()
self.clear_value()
self.clear_flags()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.key_)
out.putVarInt32(26)
out.putPrefixedString(self.value_)
if (self.has_flags_):
out.putVarInt32(37)
out.put32(self.flags_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_key(d.getPrefixedString())
continue
if tt == 26:
self.set_value(d.getPrefixedString())
continue
if tt == 37:
self.set_flags(d.get32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatFixed32(self.flags_))
return res
class MemcacheGetResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.item_ = []
if contents is not None: self.MergeFromString(contents)
def item_size(self): return len(self.item_)
def item_list(self): return self.item_
def item(self, i):
return self.item_[i]
def mutable_item(self, i):
return self.item_[i]
def add_item(self):
x = MemcacheGetResponse_Item()
self.item_.append(x)
return x
def clear_item(self):
self.item_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
def Equals(self, x):
if x is self: return 1
if len(self.item_) != len(x.item_): return 0
for e1, e2 in zip(self.item_, x.item_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.item_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.item_)
for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_item()
def OutputUnchecked(self, out):
for i in xrange(len(self.item_)):
out.putVarInt32(11)
self.item_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_item().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.item_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Item%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kItemGroup = 1
kItemkey = 2
kItemvalue = 3
kItemflags = 4
_TEXT = (
"ErrorCode",
"Item",
"key",
"value",
"flags",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.FLOAT,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheSetRequest_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
has_flags_ = 0
flags_ = 0
has_set_policy_ = 0
set_policy_ = 1
has_expiration_time_ = 0
expiration_time_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def flags(self): return self.flags_
def set_flags(self, x):
self.has_flags_ = 1
self.flags_ = x
def clear_flags(self):
if self.has_flags_:
self.has_flags_ = 0
self.flags_ = 0
def has_flags(self): return self.has_flags_
def set_policy(self): return self.set_policy_
def set_set_policy(self, x):
self.has_set_policy_ = 1
self.set_policy_ = x
def clear_set_policy(self):
if self.has_set_policy_:
self.has_set_policy_ = 0
self.set_policy_ = 1
def has_set_policy(self): return self.has_set_policy_
def expiration_time(self): return self.expiration_time_
def set_expiration_time(self, x):
self.has_expiration_time_ = 1
self.expiration_time_ = x
def clear_expiration_time(self):
if self.has_expiration_time_:
self.has_expiration_time_ = 0
self.expiration_time_ = 0
def has_expiration_time(self): return self.has_expiration_time_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
if (x.has_flags()): self.set_flags(x.flags())
if (x.has_set_policy()): self.set_set_policy(x.set_policy())
if (x.has_expiration_time()): self.set_expiration_time(x.expiration_time())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
if self.has_flags_ != x.has_flags_: return 0
if self.has_flags_ and self.flags_ != x.flags_: return 0
if self.has_set_policy_ != x.has_set_policy_: return 0
if self.has_set_policy_ and self.set_policy_ != x.set_policy_: return 0
if self.has_expiration_time_ != x.has_expiration_time_: return 0
if self.has_expiration_time_ and self.expiration_time_ != x.expiration_time_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
if (self.has_flags_): n += 5
if (self.has_set_policy_): n += 1 + self.lengthVarInt64(self.set_policy_)
if (self.has_expiration_time_): n += 5
return n + 2
def Clear(self):
self.clear_key()
self.clear_value()
self.clear_flags()
self.clear_set_policy()
self.clear_expiration_time()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.key_)
out.putVarInt32(26)
out.putPrefixedString(self.value_)
if (self.has_flags_):
out.putVarInt32(37)
out.put32(self.flags_)
if (self.has_set_policy_):
out.putVarInt32(40)
out.putVarInt32(self.set_policy_)
if (self.has_expiration_time_):
out.putVarInt32(53)
out.put32(self.expiration_time_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_key(d.getPrefixedString())
continue
if tt == 26:
self.set_value(d.getPrefixedString())
continue
if tt == 37:
self.set_flags(d.get32())
continue
if tt == 40:
self.set_set_policy(d.getVarInt32())
continue
if tt == 53:
self.set_expiration_time(d.get32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
if self.has_flags_: res+=prefix+("flags: %s\n" % self.DebugFormatFixed32(self.flags_))
if self.has_set_policy_: res+=prefix+("set_policy: %s\n" % self.DebugFormatInt32(self.set_policy_))
if self.has_expiration_time_: res+=prefix+("expiration_time: %s\n" % self.DebugFormatFixed32(self.expiration_time_))
return res
class MemcacheSetRequest(ProtocolBuffer.ProtocolMessage):
SET = 1
ADD = 2
REPLACE = 3
_SetPolicy_NAMES = {
1: "SET",
2: "ADD",
3: "REPLACE",
}
def SetPolicy_Name(cls, x): return cls._SetPolicy_NAMES.get(x, "")
SetPolicy_Name = classmethod(SetPolicy_Name)
def __init__(self, contents=None):
self.item_ = []
if contents is not None: self.MergeFromString(contents)
def item_size(self): return len(self.item_)
def item_list(self): return self.item_
def item(self, i):
return self.item_[i]
def mutable_item(self, i):
return self.item_[i]
def add_item(self):
x = MemcacheSetRequest_Item()
self.item_.append(x)
return x
def clear_item(self):
self.item_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
def Equals(self, x):
if x is self: return 1
if len(self.item_) != len(x.item_): return 0
for e1, e2 in zip(self.item_, x.item_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.item_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.item_)
for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_item()
def OutputUnchecked(self, out):
for i in xrange(len(self.item_)):
out.putVarInt32(11)
self.item_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_item().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.item_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Item%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kItemGroup = 1
kItemkey = 2
kItemvalue = 3
kItemflags = 4
kItemset_policy = 5
kItemexpiration_time = 6
_TEXT = (
"ErrorCode",
"Item",
"key",
"value",
"flags",
"set_policy",
"expiration_time",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.FLOAT,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.FLOAT,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheSetResponse(ProtocolBuffer.ProtocolMessage):
STORED = 1
NOT_STORED = 2
ERROR = 3
_SetStatusCode_NAMES = {
1: "STORED",
2: "NOT_STORED",
3: "ERROR",
}
def SetStatusCode_Name(cls, x): return cls._SetStatusCode_NAMES.get(x, "")
SetStatusCode_Name = classmethod(SetStatusCode_Name)
def __init__(self, contents=None):
self.set_status_ = []
if contents is not None: self.MergeFromString(contents)
def set_status_size(self): return len(self.set_status_)
def set_status_list(self): return self.set_status_
def set_status(self, i):
return self.set_status_[i]
def set_set_status(self, i, x):
self.set_status_[i] = x
def add_set_status(self, x):
self.set_status_.append(x)
def clear_set_status(self):
self.set_status_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.set_status_size()): self.add_set_status(x.set_status(i))
def Equals(self, x):
if x is self: return 1
if len(self.set_status_) != len(x.set_status_): return 0
for e1, e2 in zip(self.set_status_, x.set_status_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.set_status_)
for i in xrange(len(self.set_status_)): n += self.lengthVarInt64(self.set_status_[i])
return n + 0
def Clear(self):
self.clear_set_status()
def OutputUnchecked(self, out):
for i in xrange(len(self.set_status_)):
out.putVarInt32(8)
out.putVarInt32(self.set_status_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.add_set_status(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.set_status_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("set_status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
cnt+=1
return res
kset_status = 1
_TEXT = (
"ErrorCode",
"set_status",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheDeleteRequest_Item(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_delete_time_ = 0
delete_time_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def delete_time(self): return self.delete_time_
def set_delete_time(self, x):
self.has_delete_time_ = 1
self.delete_time_ = x
def clear_delete_time(self):
if self.has_delete_time_:
self.has_delete_time_ = 0
self.delete_time_ = 0
def has_delete_time(self): return self.has_delete_time_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_delete_time()): self.set_delete_time(x.delete_time())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_delete_time_ != x.has_delete_time_: return 0
if self.has_delete_time_ and self.delete_time_ != x.delete_time_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
if (self.has_delete_time_): n += 5
return n + 1
def Clear(self):
self.clear_key()
self.clear_delete_time()
def OutputUnchecked(self, out):
out.putVarInt32(18)
out.putPrefixedString(self.key_)
if (self.has_delete_time_):
out.putVarInt32(29)
out.put32(self.delete_time_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 12: break
if tt == 18:
self.set_key(d.getPrefixedString())
continue
if tt == 29:
self.set_delete_time(d.get32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_delete_time_: res+=prefix+("delete_time: %s\n" % self.DebugFormatFixed32(self.delete_time_))
return res
class MemcacheDeleteRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
self.item_ = []
if contents is not None: self.MergeFromString(contents)
def item_size(self): return len(self.item_)
def item_list(self): return self.item_
def item(self, i):
return self.item_[i]
def mutable_item(self, i):
return self.item_[i]
def add_item(self):
x = MemcacheDeleteRequest_Item()
self.item_.append(x)
return x
def clear_item(self):
self.item_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.item_size()): self.add_item().CopyFrom(x.item(i))
def Equals(self, x):
if x is self: return 1
if len(self.item_) != len(x.item_): return 0
for e1, e2 in zip(self.item_, x.item_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
for p in self.item_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += 2 * len(self.item_)
for i in xrange(len(self.item_)): n += self.item_[i].ByteSize()
return n + 0
def Clear(self):
self.clear_item()
def OutputUnchecked(self, out):
for i in xrange(len(self.item_)):
out.putVarInt32(11)
self.item_[i].OutputUnchecked(out)
out.putVarInt32(12)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 11:
self.add_item().TryMerge(d)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.item_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Item%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
return res
kItemGroup = 1
kItemkey = 2
kItemdelete_time = 3
_TEXT = (
"ErrorCode",
"Item",
"key",
"delete_time",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.FLOAT,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheDeleteResponse(ProtocolBuffer.ProtocolMessage):
DELETED = 1
NOT_FOUND = 2
_DeleteStatusCode_NAMES = {
1: "DELETED",
2: "NOT_FOUND",
}
def DeleteStatusCode_Name(cls, x): return cls._DeleteStatusCode_NAMES.get(x, "")
DeleteStatusCode_Name = classmethod(DeleteStatusCode_Name)
def __init__(self, contents=None):
self.delete_status_ = []
if contents is not None: self.MergeFromString(contents)
def delete_status_size(self): return len(self.delete_status_)
def delete_status_list(self): return self.delete_status_
def delete_status(self, i):
return self.delete_status_[i]
def set_delete_status(self, i, x):
self.delete_status_[i] = x
def add_delete_status(self, x):
self.delete_status_.append(x)
def clear_delete_status(self):
self.delete_status_ = []
def MergeFrom(self, x):
assert x is not self
for i in xrange(x.delete_status_size()): self.add_delete_status(x.delete_status(i))
def Equals(self, x):
if x is self: return 1
if len(self.delete_status_) != len(x.delete_status_): return 0
for e1, e2 in zip(self.delete_status_, x.delete_status_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
n += 1 * len(self.delete_status_)
for i in xrange(len(self.delete_status_)): n += self.lengthVarInt64(self.delete_status_[i])
return n + 0
def Clear(self):
self.clear_delete_status()
def OutputUnchecked(self, out):
for i in xrange(len(self.delete_status_)):
out.putVarInt32(8)
out.putVarInt32(self.delete_status_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.add_delete_status(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
cnt=0
for e in self.delete_status_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("delete_status%s: %s\n" % (elm, self.DebugFormatInt32(e)))
cnt+=1
return res
kdelete_status = 1
_TEXT = (
"ErrorCode",
"delete_status",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheIncrementRequest(ProtocolBuffer.ProtocolMessage):
INCREMENT = 1
DECREMENT = 2
_Direction_NAMES = {
1: "INCREMENT",
2: "DECREMENT",
}
def Direction_Name(cls, x): return cls._Direction_NAMES.get(x, "")
Direction_Name = classmethod(Direction_Name)
has_key_ = 0
key_ = ""
has_delta_ = 0
delta_ = 1
has_direction_ = 0
direction_ = 1
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def delta(self): return self.delta_
def set_delta(self, x):
self.has_delta_ = 1
self.delta_ = x
def clear_delta(self):
if self.has_delta_:
self.has_delta_ = 0
self.delta_ = 1
def has_delta(self): return self.has_delta_
def direction(self): return self.direction_
def set_direction(self, x):
self.has_direction_ = 1
self.direction_ = x
def clear_direction(self):
if self.has_direction_:
self.has_direction_ = 0
self.direction_ = 1
def has_direction(self): return self.has_direction_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_delta()): self.set_delta(x.delta())
if (x.has_direction()): self.set_direction(x.direction())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_delta_ != x.has_delta_: return 0
if self.has_delta_ and self.delta_ != x.delta_: return 0
if self.has_direction_ != x.has_direction_: return 0
if self.has_direction_ and self.direction_ != x.direction_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
if (self.has_delta_): n += 1 + self.lengthVarInt64(self.delta_)
if (self.has_direction_): n += 1 + self.lengthVarInt64(self.direction_)
return n + 1
def Clear(self):
self.clear_key()
self.clear_delta()
self.clear_direction()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.key_)
if (self.has_delta_):
out.putVarInt32(16)
out.putVarUint64(self.delta_)
if (self.has_direction_):
out.putVarInt32(24)
out.putVarInt32(self.direction_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_key(d.getPrefixedString())
continue
if tt == 16:
self.set_delta(d.getVarUint64())
continue
if tt == 24:
self.set_direction(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("key: %s\n" % self.DebugFormatString(self.key_))
if self.has_delta_: res+=prefix+("delta: %s\n" % self.DebugFormatInt64(self.delta_))
if self.has_direction_: res+=prefix+("direction: %s\n" % self.DebugFormatInt32(self.direction_))
return res
kkey = 1
kdelta = 2
kdirection = 3
_TEXT = (
"ErrorCode",
"key",
"delta",
"direction",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheIncrementResponse(ProtocolBuffer.ProtocolMessage):
has_new_value_ = 0
new_value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def new_value(self): return self.new_value_
def set_new_value(self, x):
self.has_new_value_ = 1
self.new_value_ = x
def clear_new_value(self):
if self.has_new_value_:
self.has_new_value_ = 0
self.new_value_ = 0
def has_new_value(self): return self.has_new_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_new_value()): self.set_new_value(x.new_value())
def Equals(self, x):
if x is self: return 1
if self.has_new_value_ != x.has_new_value_: return 0
if self.has_new_value_ and self.new_value_ != x.new_value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
if (self.has_new_value_): n += 1 + self.lengthVarInt64(self.new_value_)
return n + 0
def Clear(self):
self.clear_new_value()
def OutputUnchecked(self, out):
if (self.has_new_value_):
out.putVarInt32(8)
out.putVarUint64(self.new_value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_new_value(d.getVarUint64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_new_value_: res+=prefix+("new_value: %s\n" % self.DebugFormatInt64(self.new_value_))
return res
knew_value = 1
_TEXT = (
"ErrorCode",
"new_value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheFlushRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheFlushResponse(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheStatsRequest(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MergedNamespaceStats(ProtocolBuffer.ProtocolMessage):
has_hits_ = 0
hits_ = 0
has_misses_ = 0
misses_ = 0
has_byte_hits_ = 0
byte_hits_ = 0
has_items_ = 0
items_ = 0
has_bytes_ = 0
bytes_ = 0
has_oldest_item_age_ = 0
oldest_item_age_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def hits(self): return self.hits_
def set_hits(self, x):
self.has_hits_ = 1
self.hits_ = x
def clear_hits(self):
if self.has_hits_:
self.has_hits_ = 0
self.hits_ = 0
def has_hits(self): return self.has_hits_
def misses(self): return self.misses_
def set_misses(self, x):
self.has_misses_ = 1
self.misses_ = x
def clear_misses(self):
if self.has_misses_:
self.has_misses_ = 0
self.misses_ = 0
def has_misses(self): return self.has_misses_
def byte_hits(self): return self.byte_hits_
def set_byte_hits(self, x):
self.has_byte_hits_ = 1
self.byte_hits_ = x
def clear_byte_hits(self):
if self.has_byte_hits_:
self.has_byte_hits_ = 0
self.byte_hits_ = 0
def has_byte_hits(self): return self.has_byte_hits_
def items(self): return self.items_
def set_items(self, x):
self.has_items_ = 1
self.items_ = x
def clear_items(self):
if self.has_items_:
self.has_items_ = 0
self.items_ = 0
def has_items(self): return self.has_items_
def bytes(self): return self.bytes_
def set_bytes(self, x):
self.has_bytes_ = 1
self.bytes_ = x
def clear_bytes(self):
if self.has_bytes_:
self.has_bytes_ = 0
self.bytes_ = 0
def has_bytes(self): return self.has_bytes_
def oldest_item_age(self): return self.oldest_item_age_
def set_oldest_item_age(self, x):
self.has_oldest_item_age_ = 1
self.oldest_item_age_ = x
def clear_oldest_item_age(self):
if self.has_oldest_item_age_:
self.has_oldest_item_age_ = 0
self.oldest_item_age_ = 0
def has_oldest_item_age(self): return self.has_oldest_item_age_
def MergeFrom(self, x):
assert x is not self
if (x.has_hits()): self.set_hits(x.hits())
if (x.has_misses()): self.set_misses(x.misses())
if (x.has_byte_hits()): self.set_byte_hits(x.byte_hits())
if (x.has_items()): self.set_items(x.items())
if (x.has_bytes()): self.set_bytes(x.bytes())
if (x.has_oldest_item_age()): self.set_oldest_item_age(x.oldest_item_age())
def Equals(self, x):
if x is self: return 1
if self.has_hits_ != x.has_hits_: return 0
if self.has_hits_ and self.hits_ != x.hits_: return 0
if self.has_misses_ != x.has_misses_: return 0
if self.has_misses_ and self.misses_ != x.misses_: return 0
if self.has_byte_hits_ != x.has_byte_hits_: return 0
if self.has_byte_hits_ and self.byte_hits_ != x.byte_hits_: return 0
if self.has_items_ != x.has_items_: return 0
if self.has_items_ and self.items_ != x.items_: return 0
if self.has_bytes_ != x.has_bytes_: return 0
if self.has_bytes_ and self.bytes_ != x.bytes_: return 0
if self.has_oldest_item_age_ != x.has_oldest_item_age_: return 0
if self.has_oldest_item_age_ and self.oldest_item_age_ != x.oldest_item_age_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_hits_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: hits not set.')
if (not self.has_misses_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: misses not set.')
if (not self.has_byte_hits_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: byte_hits not set.')
if (not self.has_items_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: items not set.')
if (not self.has_bytes_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: bytes not set.')
if (not self.has_oldest_item_age_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: oldest_item_age not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.hits_)
n += self.lengthVarInt64(self.misses_)
n += self.lengthVarInt64(self.byte_hits_)
n += self.lengthVarInt64(self.items_)
n += self.lengthVarInt64(self.bytes_)
return n + 10
def Clear(self):
self.clear_hits()
self.clear_misses()
self.clear_byte_hits()
self.clear_items()
self.clear_bytes()
self.clear_oldest_item_age()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarUint64(self.hits_)
out.putVarInt32(16)
out.putVarUint64(self.misses_)
out.putVarInt32(24)
out.putVarUint64(self.byte_hits_)
out.putVarInt32(32)
out.putVarUint64(self.items_)
out.putVarInt32(40)
out.putVarUint64(self.bytes_)
out.putVarInt32(53)
out.put32(self.oldest_item_age_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_hits(d.getVarUint64())
continue
if tt == 16:
self.set_misses(d.getVarUint64())
continue
if tt == 24:
self.set_byte_hits(d.getVarUint64())
continue
if tt == 32:
self.set_items(d.getVarUint64())
continue
if tt == 40:
self.set_bytes(d.getVarUint64())
continue
if tt == 53:
self.set_oldest_item_age(d.get32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_hits_: res+=prefix+("hits: %s\n" % self.DebugFormatInt64(self.hits_))
if self.has_misses_: res+=prefix+("misses: %s\n" % self.DebugFormatInt64(self.misses_))
if self.has_byte_hits_: res+=prefix+("byte_hits: %s\n" % self.DebugFormatInt64(self.byte_hits_))
if self.has_items_: res+=prefix+("items: %s\n" % self.DebugFormatInt64(self.items_))
if self.has_bytes_: res+=prefix+("bytes: %s\n" % self.DebugFormatInt64(self.bytes_))
if self.has_oldest_item_age_: res+=prefix+("oldest_item_age: %s\n" % self.DebugFormatFixed32(self.oldest_item_age_))
return res
khits = 1
kmisses = 2
kbyte_hits = 3
kitems = 4
kbytes = 5
koldest_item_age = 6
_TEXT = (
"ErrorCode",
"hits",
"misses",
"byte_hits",
"items",
"bytes",
"oldest_item_age",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.FLOAT,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MemcacheStatsResponse(ProtocolBuffer.ProtocolMessage):
has_stats_ = 0
stats_ = None
def __init__(self, contents=None):
self.lazy_init_lock_ = thread.allocate_lock()
if contents is not None: self.MergeFromString(contents)
def stats(self):
if self.stats_ is None:
self.lazy_init_lock_.acquire()
try:
if self.stats_ is None: self.stats_ = MergedNamespaceStats()
finally:
self.lazy_init_lock_.release()
return self.stats_
def mutable_stats(self): self.has_stats_ = 1; return self.stats()
def clear_stats(self):
if self.has_stats_:
self.has_stats_ = 0;
if self.stats_ is not None: self.stats_.Clear()
def has_stats(self): return self.has_stats_
def MergeFrom(self, x):
assert x is not self
if (x.has_stats()): self.mutable_stats().MergeFrom(x.stats())
def Equals(self, x):
if x is self: return 1
if self.has_stats_ != x.has_stats_: return 0
if self.has_stats_ and self.stats_ != x.stats_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (self.has_stats_ and not self.stats_.IsInitialized(debug_strs)): initialized = 0
return initialized
def ByteSize(self):
n = 0
if (self.has_stats_): n += 1 + self.lengthString(self.stats_.ByteSize())
return n + 0
def Clear(self):
self.clear_stats()
def OutputUnchecked(self, out):
if (self.has_stats_):
out.putVarInt32(10)
out.putVarInt32(self.stats_.ByteSize())
self.stats_.OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.mutable_stats().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_stats_:
res+=prefix+"stats <\n"
res+=self.stats_.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
return res
kstats = 1
_TEXT = (
"ErrorCode",
"stats",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['MemcacheServiceError','MemcacheGetRequest','MemcacheGetResponse','MemcacheGetResponse_Item','MemcacheSetRequest','MemcacheSetRequest_Item','MemcacheSetResponse','MemcacheDeleteRequest','MemcacheDeleteRequest_Item','MemcacheDeleteResponse','MemcacheIncrementRequest','MemcacheIncrementResponse','MemcacheFlushRequest','MemcacheFlushResponse','MemcacheStatsRequest','MergedNamespaceStats','MemcacheStatsResponse']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the memcache API, keeping all data in process memory."""
import logging
import time
from google.appengine.api import apiproxy_stub
from google.appengine.api import memcache
from google.appengine.api.memcache import memcache_service_pb
MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
class CacheEntry(object):
"""An entry in the cache."""
def __init__(self, value, expiration, flags, gettime):
"""Initializer.
Args:
value: String containing the data for this entry.
expiration: Number containing the expiration time or offset in seconds
for this entry.
flags: Opaque flags used by the memcache implementation.
gettime: Used for testing. Function that works like time.time().
"""
assert isinstance(value, basestring)
assert len(value) <= memcache.MAX_VALUE_SIZE
assert isinstance(expiration, (int, long))
self._gettime = gettime
self.value = value
self.flags = flags
self.created_time = self._gettime()
self.will_expire = expiration != 0
self.locked = False
self._SetExpiration(expiration)
def _SetExpiration(self, expiration):
"""Sets the expiration for this entry.
Args:
expiration: Number containing the expiration time or offset in seconds
for this entry. If expiration is above one month, then it's considered
an absolute time since the UNIX epoch.
"""
if expiration > (86400 * 30):
self.expiration_time = expiration
else:
self.expiration_time = self._gettime() + expiration
def CheckExpired(self):
"""Returns True if this entry has expired; False otherwise."""
return self.will_expire and self._gettime() >= self.expiration_time
def ExpireAndLock(self, timeout):
"""Marks this entry as deleted and locks it for the expiration time.
Used to implement memcache's delete timeout behavior.
Args:
timeout: Parameter originally passed to memcache.delete or
memcache.delete_multi to control deletion timeout.
"""
self.will_expire = True
self.locked = True
self._SetExpiration(timeout)
def CheckLocked(self):
"""Returns True if this entry was deleted but has not yet timed out."""
return self.locked and not self.CheckExpired()
class MemcacheServiceStub(apiproxy_stub.APIProxyStub):
"""Python only memcache service stub.
This stub keeps all data in the local process' memory, not in any
external servers.
"""
def __init__(self, gettime=time.time, service_name='memcache'):
"""Initializer.
Args:
gettime: time.time()-like function used for testing.
service_name: Service name expected for all calls.
"""
super(MemcacheServiceStub, self).__init__(service_name)
self._gettime = gettime
self._ResetStats()
self._the_cache = {}
def _ResetStats(self):
"""Resets statistics information."""
self._hits = 0
self._misses = 0
self._byte_hits = 0
self._cache_creation_time = self._gettime()
def _GetKey(self, key):
"""Retrieves a CacheEntry from the cache if it hasn't expired.
Does not take deletion timeout into account.
Args:
key: The key to retrieve from the cache.
Returns:
The corresponding CacheEntry instance, or None if it was not found or
has already expired.
"""
entry = self._the_cache.get(key, None)
if entry is None:
return None
elif entry.CheckExpired():
del self._the_cache[key]
return None
else:
return entry
def _Dynamic_Get(self, request, response):
"""Implementation of MemcacheService::Get().
Args:
request: A MemcacheGetRequest.
response: A MemcacheGetResponse.
"""
keys = set(request.key_list())
for key in keys:
entry = self._GetKey(key)
if entry is None or entry.CheckLocked():
self._misses += 1
continue
self._hits += 1
self._byte_hits += len(entry.value)
item = response.add_item()
item.set_key(key)
item.set_value(entry.value)
item.set_flags(entry.flags)
def _Dynamic_Set(self, request, response):
"""Implementation of MemcacheService::Set().
Args:
request: A MemcacheSetRequest.
response: A MemcacheSetResponse.
"""
for item in request.item_list():
key = item.key()
set_policy = item.set_policy()
old_entry = self._GetKey(key)
set_status = MemcacheSetResponse.NOT_STORED
if ((set_policy == MemcacheSetRequest.SET) or
(set_policy == MemcacheSetRequest.ADD and old_entry is None) or
(set_policy == MemcacheSetRequest.REPLACE and old_entry is not None)):
if (old_entry is None or
set_policy == MemcacheSetRequest.SET
or not old_entry.CheckLocked()):
self._the_cache[key] = CacheEntry(item.value(),
item.expiration_time(),
item.flags(),
gettime=self._gettime)
set_status = MemcacheSetResponse.STORED
response.add_set_status(set_status)
def _Dynamic_Delete(self, request, response):
"""Implementation of MemcacheService::Delete().
Args:
request: A MemcacheDeleteRequest.
response: A MemcacheDeleteResponse.
"""
for item in request.item_list():
key = item.key()
entry = self._GetKey(key)
delete_status = MemcacheDeleteResponse.DELETED
if entry is None:
delete_status = MemcacheDeleteResponse.NOT_FOUND
elif item.delete_time() == 0:
del self._the_cache[key]
else:
entry.ExpireAndLock(item.delete_time())
response.add_delete_status(delete_status)
def _Dynamic_Increment(self, request, response):
"""Implementation of MemcacheService::Increment().
Args:
request: A MemcacheIncrementRequest.
response: A MemcacheIncrementResponse.
"""
key = request.key()
entry = self._GetKey(key)
if entry is None:
return
try:
old_value = long(entry.value)
if old_value < 0:
raise ValueError
except ValueError, e:
logging.error('Increment/decrement failed: Could not interpret '
'value for key = "%s" as an unsigned integer.', key)
return
delta = request.delta()
if request.direction() == MemcacheIncrementRequest.DECREMENT:
delta = -delta
new_value = old_value + delta
if not (0 <= new_value < 2**64):
new_value = 0
entry.value = str(new_value)
response.set_new_value(new_value)
def _Dynamic_FlushAll(self, request, response):
"""Implementation of MemcacheService::FlushAll().
Args:
request: A MemcacheFlushRequest.
response: A MemcacheFlushResponse.
"""
self._the_cache.clear()
self._ResetStats()
def _Dynamic_Stats(self, request, response):
"""Implementation of MemcacheService::Stats().
Args:
request: A MemcacheStatsRequest.
response: A MemcacheStatsResponse.
"""
stats = response.mutable_stats()
stats.set_hits(self._hits)
stats.set_misses(self._misses)
stats.set_byte_hits(self._byte_hits)
stats.set_items(len(self._the_cache))
total_bytes = 0
for key, entry in self._the_cache.iteritems():
total_bytes += len(entry.value)
stats.set_bytes(total_bytes)
stats.set_oldest_item_age(self._gettime() - self._cache_creation_time)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Memcache API.
Provides memcached-alike API to application developers to store
data in memory when reliable storage via the DataStore API isn't
required and higher performance is desired.
"""
import cStringIO
import math
import pickle
import types
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api.memcache import memcache_service_pb
from google.appengine.runtime import apiproxy_errors
MemcacheSetResponse = memcache_service_pb.MemcacheSetResponse
MemcacheSetRequest = memcache_service_pb.MemcacheSetRequest
MemcacheGetResponse = memcache_service_pb.MemcacheGetResponse
MemcacheGetRequest = memcache_service_pb.MemcacheGetRequest
MemcacheDeleteResponse = memcache_service_pb.MemcacheDeleteResponse
MemcacheDeleteRequest = memcache_service_pb.MemcacheDeleteRequest
MemcacheIncrementResponse = memcache_service_pb.MemcacheIncrementResponse
MemcacheIncrementRequest = memcache_service_pb.MemcacheIncrementRequest
MemcacheFlushResponse = memcache_service_pb.MemcacheFlushResponse
MemcacheFlushRequest = memcache_service_pb.MemcacheFlushRequest
MemcacheStatsRequest = memcache_service_pb.MemcacheStatsRequest
MemcacheStatsResponse = memcache_service_pb.MemcacheStatsResponse
DELETE_NETWORK_FAILURE = 0
DELETE_ITEM_MISSING = 1
DELETE_SUCCESSFUL = 2
MAX_KEY_SIZE = 250
MAX_VALUE_SIZE = 10 ** 6
STAT_HITS = 'hits'
STAT_MISSES = 'misses'
STAT_BYTE_HITS = 'byte_hits'
STAT_ITEMS = 'items'
STAT_BYTES = 'bytes'
STAT_OLDEST_ITEM_AGES = 'oldest_item_age'
FLAG_TYPE_MASK = 7
FLAG_COMPRESSED = 1 << 3
TYPE_STR = 0
TYPE_UNICODE = 1
TYPE_PICKLED = 2
TYPE_INT = 3
TYPE_LONG = 4
TYPE_BOOL = 5
def _key_string(key, key_prefix='', server_to_user_dict=None):
"""Utility function to handle different ways of requesting keys.
Args:
key: Either a string or tuple of (shard_number, string). In Google App
Engine the sharding is automatic so the shard number is ignored.
To memcache, the key is just bytes (no defined encoding).
key_prefix: Optional string prefix to prepend to key.
server_to_user_dict: Optional dictionary to populate with a mapping of
server-side key (which includes the key_prefix) to user-supplied key
(which does not have the prefix).
Returns:
The key as a non-unicode string prepended with key_prefix. This is the key
sent to and stored by the server.
Raises:
TypeError: If provided key isn't a string or tuple of (int, string)
or key_prefix or server_to_user_dict are of the wrong type.
ValueError: If the key, when translated to the server key, is more than
250 bytes in length.
"""
if type(key) is types.TupleType:
key = key[1]
if not isinstance(key, basestring):
raise TypeError('Key must be a string instance, received %r' % key)
if not isinstance(key_prefix, basestring):
raise TypeError('key_prefix must be a string instance, received %r' %
key_prefix)
server_key = key_prefix + key
if isinstance(server_key, unicode):
server_key = server_key.encode('utf-8')
if len(server_key) > MAX_KEY_SIZE:
raise ValueError('Keys may not be more than %d bytes in length, '
'received %d bytes' % (MAX_KEY_SIZE, len(server_key)))
if server_to_user_dict is not None:
if not isinstance(server_to_user_dict, dict):
raise TypeError('server_to_user_dict must be a dict instance, ' +
'received %r' % key)
server_to_user_dict[server_key] = key
return server_key
def _validate_encode_value(value, do_pickle):
"""Utility function to validate and encode server keys and values.
Args:
value: Value to store in memcache. If it's a string, it will get passed
along as-is. If it's a unicode string, it will be marked appropriately,
such that retrievals will yield a unicode value. If it's any other data
type, this function will attempt to pickle the data and then store the
serialized result, unpickling it upon retrieval.
do_pickle: Callable that takes an object and returns a non-unicode
string containing the pickled object.
Returns:
Tuple (stored_value, flags) where:
stored_value: The value as a non-unicode string that should be stored
in memcache.
flags: An integer with bits set from the FLAG_* constants in this file
to indicate the encoding of the key and value.
Raises:
ValueError: If the encoded value is too large.
pickle.PicklingError: If the value is not a string and could not be pickled.
RuntimeError: If a complicated data structure could not be pickled due to
too many levels of recursion in its composition.
"""
flags = 0
stored_value = value
if isinstance(value, str):
pass
elif isinstance(value, unicode):
stored_value = value.encode('utf-8')
flags |= TYPE_UNICODE
elif isinstance(value, bool):
stored_value = str(int(value))
flags |= TYPE_BOOL
elif isinstance(value, int):
stored_value = str(value)
flags |= TYPE_INT
elif isinstance(value, long):
stored_value = str(value)
flags |= TYPE_LONG
else:
stored_value = do_pickle(value)
flags |= TYPE_PICKLED
if len(stored_value) > MAX_VALUE_SIZE:
raise ValueError('Values may not be more than %d bytes in length; '
'received %d bytes' % (MAX_VALUE_SIZE, len(stored_value)))
return (stored_value, flags)
def _decode_value(stored_value, flags, do_unpickle):
"""Utility function for decoding values retrieved from memcache.
Args:
stored_value: The value as a non-unicode string that was stored.
flags: An integer with bits set from the FLAG_* constants in this file
that indicate the encoding of the key and value.
do_unpickle: Callable that takes a non-unicode string object that contains
a pickled object and returns the pickled object.
Returns:
The original object that was stored, be it a normal string, a unicode
string, int, long, or a Python object that was pickled.
Raises:
pickle.UnpicklingError: If the value could not be unpickled.
"""
assert isinstance(stored_value, str)
assert isinstance(flags, (int, long))
type_number = flags & FLAG_TYPE_MASK
value = stored_value
if type_number == TYPE_STR:
return value
elif type_number == TYPE_UNICODE:
return value.decode('utf-8')
elif type_number == TYPE_PICKLED:
return do_unpickle(value)
elif type_number == TYPE_BOOL:
return bool(int(value))
elif type_number == TYPE_INT:
return int(value)
elif type_number == TYPE_LONG:
return long(value)
else:
assert False, "Unknown stored type"
assert False, "Shouldn't get here."
class Client(object):
"""Memcache client object, through which one invokes all memcache operations.
Several methods are no-ops to retain source-level compatibility
with the existing popular Python memcache library.
Any method that takes a 'key' argument will accept that key as a string
(unicode or not) or a tuple of (hash_value, string) where the hash_value,
normally used for sharding onto a memcache instance, is instead ignored, as
Google App Engine deals with the sharding transparently. Keys in memcache are
just bytes, without a specified encoding. All such methods may raise TypeError
if provided a bogus key value and a ValueError if the key is too large.
Any method that takes a 'value' argument will accept as that value any
string (unicode or not), int, long, or pickle-able Python object, including
all native types. You'll get back from the cache the same type that you
originally put in.
"""
def __init__(self, servers=None, debug=0,
pickleProtocol=pickle.HIGHEST_PROTOCOL,
pickler=pickle.Pickler,
unpickler=pickle.Unpickler,
pload=None,
pid=None,
make_sync_call=apiproxy_stub_map.MakeSyncCall):
"""Create a new Client object.
No parameters are required.
Arguments:
servers: Ignored; only for compatibility.
debug: Ignored; only for compatibility.
pickleProtocol: Pickle protocol to use for pickling the object.
pickler: pickle.Pickler sub-class to use for pickling.
unpickler: pickle.Unpickler sub-class to use for unpickling.
pload: Callable to use for retrieving objects by persistent id.
pid: Callable to use for determine the persistent id for objects, if any.
make_sync_call: Function to use to make an App Engine service call.
Used for testing.
"""
self._pickle_data = cStringIO.StringIO()
self._pickler_instance = pickler(self._pickle_data,
protocol=pickleProtocol)
self._unpickler_instance = unpickler(self._pickle_data)
if pid is not None:
self._pickler_instance.persistent_id = pid
if pload is not None:
self._unpickler_instance.persistent_load = pload
def DoPickle(value):
self._pickle_data.truncate(0)
self._pickler_instance.clear_memo()
self._pickler_instance.dump(value)
return self._pickle_data.getvalue()
self._do_pickle = DoPickle
def DoUnpickle(value):
self._pickle_data.truncate(0)
self._pickle_data.write(value)
self._pickle_data.seek(0)
self._unpickler_instance.memo.clear()
return self._unpickler_instance.load()
self._do_unpickle = DoUnpickle
self._make_sync_call = make_sync_call
def set_servers(self, servers):
"""Sets the pool of memcache servers used by the client.
This is purely a compatibility method. In Google App Engine, it's a no-op.
"""
pass
def disconnect_all(self):
"""Closes all connections to memcache servers.
This is purely a compatibility method. In Google App Engine, it's a no-op.
"""
pass
def forget_dead_hosts(self):
"""Resets all servers to the alive status.
This is purely a compatibility method. In Google App Engine, it's a no-op.
"""
pass
def debuglog(self):
"""Logging function for debugging information.
This is purely a compatibility method. In Google App Engine, it's a no-op.
"""
pass
def get_stats(self):
"""Gets memcache statistics for this application.
All of these statistics may reset due to various transient conditions. They
provide the best information available at the time of being called.
Returns:
Dictionary mapping statistic names to associated values. Statistics and
their associated meanings:
hits: Number of cache get requests resulting in a cache hit.
misses: Number of cache get requests resulting in a cache miss.
byte_hits: Sum of bytes transferred on get requests. Rolls over to
zero on overflow.
items: Number of key/value pairs in the cache.
bytes: Total size of all items in the cache.
oldest_item_age: How long in seconds since the oldest item in the
cache was accessed. Effectively, this indicates how long a new
item will survive in the cache without being accessed. This is
_not_ the amount of time that has elapsed since the item was
created.
On error, returns None.
"""
request = MemcacheStatsRequest()
response = MemcacheStatsResponse()
try:
self._make_sync_call('memcache', 'Stats', request, response)
except apiproxy_errors.Error:
return None
if not response.has_stats():
return None
stats = response.stats()
return {
STAT_HITS: stats.hits(),
STAT_MISSES: stats.misses(),
STAT_BYTE_HITS: stats.byte_hits(),
STAT_ITEMS: stats.items(),
STAT_BYTES: stats.bytes(),
STAT_OLDEST_ITEM_AGES: stats.oldest_item_age(),
}
def flush_all(self):
"""Deletes everything in memcache.
Returns:
True on success, False on RPC or server error.
"""
request = MemcacheFlushRequest()
response = MemcacheFlushResponse()
try:
self._make_sync_call('memcache', 'FlushAll', request, response)
except apiproxy_errors.Error:
return False
return True
def get(self, key):
"""Looks up a single key in memcache.
If you have multiple items to load, though, it's much more efficient
to use get_multi() instead, which loads them in one bulk operation,
reducing the networking latency that'd otherwise be required to do
many serialized get() operations.
Args:
key: The key in memcache to look up. See docs on Client
for details of format.
Returns:
The value of the key, if found in memcache, else None.
"""
request = MemcacheGetRequest()
request.add_key(_key_string(key))
response = MemcacheGetResponse()
try:
self._make_sync_call('memcache', 'Get', request, response)
except apiproxy_errors.Error:
return None
if not response.item_size():
return None
return _decode_value(response.item(0).value(),
response.item(0).flags(),
self._do_unpickle)
def get_multi(self, keys, key_prefix=''):
"""Looks up multiple keys from memcache in one operation.
This is the recommended way to do bulk loads.
Args:
keys: List of keys to look up. Keys may be strings or
tuples of (hash_value, string). Google App Engine
does the sharding and hashing automatically, though, so the hash
value is ignored. To memcache, keys are just series of bytes,
and not in any particular encoding.
key_prefix: Prefix to prepend to all keys when talking to the server;
not included in the returned dictionary.
Returns:
A dictionary of the keys and values that were present in memcache.
Even if the key_prefix was specified, that key_prefix won't be on
the keys in the returned dictionary.
"""
request = MemcacheGetRequest()
response = MemcacheGetResponse()
user_key = {}
for key in keys:
request.add_key(_key_string(key, key_prefix, user_key))
try:
self._make_sync_call('memcache', 'Get', request, response)
except apiproxy_errors.Error:
return {}
return_value = {}
for returned_item in response.item_list():
value = _decode_value(returned_item.value(), returned_item.flags(),
self._do_unpickle)
return_value[user_key[returned_item.key()]] = value
return return_value
def delete(self, key, seconds=0):
"""Deletes a key from memcache.
Args:
key: Key to delete. See docs on Client for detils.
seconds: Optional number of seconds to make deleted items 'locked'
for 'add' operations. Value can be a delta from current time (up to
1 month), or an absolute Unix epoch time. Defaults to 0, which means
items can be immediately added. With or without this option,
a 'set' operation will always work. Float values will be rounded up to
the nearest whole second.
Returns:
DELETE_NETWORK_FAILURE (0) on network failure,
DELETE_ITEM_MISSING (1) if the server tried to delete the item but
didn't have it, or
DELETE_SUCCESSFUL (2) if the item was actually deleted.
This can be used as a boolean value, where a network failure is the
only bad condition.
"""
if not isinstance(seconds, (int, long, float)):
raise TypeError('Delete timeout must be a number.')
if seconds < 0:
raise ValueError('Delete timeout must be non-negative.')
request = MemcacheDeleteRequest()
response = MemcacheDeleteResponse()
delete_item = request.add_item()
delete_item.set_key(_key_string(key))
delete_item.set_delete_time(int(math.ceil(seconds)))
try:
self._make_sync_call('memcache', 'Delete', request, response)
except apiproxy_errors.Error:
return DELETE_NETWORK_FAILURE
assert response.delete_status_size() == 1, 'Unexpected status size.'
if response.delete_status(0) == MemcacheDeleteResponse.DELETED:
return DELETE_SUCCESSFUL
elif response.delete_status(0) == MemcacheDeleteResponse.NOT_FOUND:
return DELETE_ITEM_MISSING
assert False, 'Unexpected deletion status code.'
def delete_multi(self, keys, seconds=0, key_prefix=''):
"""Delete multiple keys at once.
Args:
keys: List of keys to delete.
seconds: Optional number of seconds to make deleted items 'locked'
for 'add' operations. Value can be a delta from current time (up to
1 month), or an absolute Unix epoch time. Defaults to 0, which means
items can be immediately added. With or without this option,
a 'set' operation will always work. Float values will be rounded up to
the nearest whole second.
key_prefix: Prefix to put on all keys when sending specified
keys to memcache. See docs for get_multi() and set_multi().
Returns:
True if all operations completed successfully. False if one
or more failed to complete.
"""
if not isinstance(seconds, (int, long, float)):
raise TypeError('Delete timeout must be a number.')
if seconds < 0:
raise ValueError('Delete timeout must not be negative.')
request = MemcacheDeleteRequest()
response = MemcacheDeleteResponse()
for key in keys:
delete_item = request.add_item()
delete_item.set_key(_key_string(key, key_prefix=key_prefix))
delete_item.set_delete_time(int(math.ceil(seconds)))
try:
self._make_sync_call('memcache', 'Delete', request, response)
except apiproxy_errors.Error:
return False
return True
def set(self, key, value, time=0, min_compress_len=0):
"""Sets a key's value, regardless of previous contents in cache.
Unlike add() and replace(), this method always sets (or
overwrites) the value in memcache, regardless of previous
contents.
Args:
key: Key to set. See docs on Client for details.
value: Value to set. Any type. If complex, will be pickled.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
min_compress_len: Ignored option for compatibility.
Returns:
True if set. False on error.
"""
return self._set_with_policy(MemcacheSetRequest.SET, key, value, time=time)
def add(self, key, value, time=0, min_compress_len=0):
"""Sets a key's value, iff item is not already in memcache.
Args:
key: Key to set. See docs on Client for details.
value: Value to set. Any type. If complex, will be pickled.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
min_compress_len: Ignored option for compatibility.
Returns:
True if added. False on error.
"""
return self._set_with_policy(MemcacheSetRequest.ADD, key, value, time=time)
def replace(self, key, value, time=0, min_compress_len=0):
"""Replaces a key's value, failing if item isn't already in memcache.
Args:
key: Key to set. See docs on Client for details.
value: Value to set. Any type. If complex, will be pickled.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
min_compress_len: Ignored option for compatibility.
Returns:
True if replaced. False on RPC error or cache miss.
"""
return self._set_with_policy(MemcacheSetRequest.REPLACE,
key, value, time=time)
def _set_with_policy(self, policy, key, value, time=0):
"""Sets a single key with a specified policy.
Helper function for set(), add(), and replace().
Args:
policy: One of MemcacheSetRequest.SET, .ADD, or .REPLACE.
key: Key to add, set, or replace. See docs on Client for details.
value: Value to set.
time: Expiration time, defaulting to 0 (never expiring).
Returns:
True if stored, False on RPC error or policy error, e.g. a replace
that failed due to the item not already existing, or an add
failing due to the item not already existing.
"""
if not isinstance(time, (int, long, float)):
raise TypeError('Expiration must be a number.')
if time < 0:
raise ValueError('Expiration must not be negative.')
request = MemcacheSetRequest()
item = request.add_item()
item.set_key(_key_string(key))
stored_value, flags = _validate_encode_value(value, self._do_pickle)
item.set_value(stored_value)
item.set_flags(flags)
item.set_set_policy(policy)
item.set_expiration_time(int(math.ceil(time)))
response = MemcacheSetResponse()
try:
self._make_sync_call('memcache', 'Set', request, response)
except apiproxy_errors.Error:
return False
if response.set_status_size() != 1:
return False
return response.set_status(0) == MemcacheSetResponse.STORED
def _set_multi_with_policy(self, policy, mapping, time=0, key_prefix=''):
"""Set multiple keys with a specified policy.
Helper function for set_multi(), add_multi(), and replace_multi(). This
reduces the network latency of doing many requests in serial.
Args:
policy: One of MemcacheSetRequest.SET, ADD, or REPLACE.
mapping: Dictionary of keys to values.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
key_prefix: Prefix for to prepend to all keys.
Returns:
A list of keys whose values were NOT set. On total success,
this list should be empty. On network/RPC/server errors,
a list of all input keys is returned; in this case the keys
may or may not have been updated.
"""
if not isinstance(time, (int, long, float)):
raise TypeError('Expiration must be a number.')
if time < 0.0:
raise ValueError('Expiration must not be negative.')
request = MemcacheSetRequest()
user_key = {}
server_keys = []
for key, value in mapping.iteritems():
server_key = _key_string(key, key_prefix, user_key)
stored_value, flags = _validate_encode_value(value, self._do_pickle)
server_keys.append(server_key)
item = request.add_item()
item.set_key(server_key)
item.set_value(stored_value)
item.set_flags(flags)
item.set_set_policy(policy)
item.set_expiration_time(int(math.ceil(time)))
response = MemcacheSetResponse()
try:
self._make_sync_call('memcache', 'Set', request, response)
except apiproxy_errors.Error:
return user_key.values()
assert response.set_status_size() == len(server_keys)
unset_list = []
for server_key, set_status in zip(server_keys, response.set_status_list()):
if set_status != MemcacheSetResponse.STORED:
unset_list.append(user_key[server_key])
return unset_list
def set_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
"""Set multiple keys' values at once, regardless of previous contents.
Args:
mapping: Dictionary of keys to values.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
key_prefix: Prefix for to prepend to all keys.
min_compress_len: Unimplemented compatibility option.
Returns:
A list of keys whose values were NOT set. On total success,
this list should be empty.
"""
return self._set_multi_with_policy(MemcacheSetRequest.SET, mapping,
time=time, key_prefix=key_prefix)
def add_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
"""Set multiple keys' values iff items are not already in memcache.
Args:
mapping: Dictionary of keys to values.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
key_prefix: Prefix for to prepend to all keys.
min_compress_len: Unimplemented compatibility option.
Returns:
A list of keys whose values were NOT set because they did not already
exist in memcache. On total success, this list should be empty.
"""
return self._set_multi_with_policy(MemcacheSetRequest.ADD, mapping,
time=time, key_prefix=key_prefix)
def replace_multi(self, mapping, time=0, key_prefix='', min_compress_len=0):
"""Replace multiple keys' values, failing if the items aren't in memcache.
Args:
mapping: Dictionary of keys to values.
time: Optional expiration time, either relative number of seconds
from current time (up to 1 month), or an absolute Unix epoch time.
By default, items never expire, though items may be evicted due to
memory pressure. Float values will be rounded up to the nearest
whole second.
key_prefix: Prefix for to prepend to all keys.
min_compress_len: Unimplemented compatibility option.
Returns:
A list of keys whose values were NOT set because they already existed
in memcache. On total success, this list should be empty.
"""
return self._set_multi_with_policy(MemcacheSetRequest.REPLACE, mapping,
time=time, key_prefix=key_prefix)
def incr(self, key, delta=1):
"""Atomically increments a key's value.
Internally, the value is a unsigned 64-bit integer. Memcache
doesn't check 64-bit overflows. The value, if too large, will
wrap around.
The key must already exist in the cache to be incremented. To
initialize a counter, set() it to the initial value, as an
ASCII decimal integer. Future get()s of the key, post-increment,
will still be an ASCII decimal value.
Args:
key: Key to increment. See Client's docstring for details.
delta: Non-negative integer value (int or long) to increment key by,
defaulting to 1.
Returns:
New long integer value, or None if key was not in the cache, could not
be incremented for any other reason, or a network/RPC/server error
occurred.
Raises:
ValueError: If number is negative.
TypeError: If delta isn't an int or long.
"""
return self._incrdecr(key, False, delta)
def decr(self, key, delta=1):
"""Atomically decrements a key's value.
Internally, the value is a unsigned 64-bit integer. Memcache
caps decrementing below zero to zero.
The key must already exist in the cache to be decremented. See
docs on incr() for details.
Args:
key: Key to decrement. See Client's docstring for details.
delta: Non-negative integer value (int or long) to decrement key by,
defaulting to 1.
Returns:
New long integer value, or None if key wasn't in cache and couldn't
be decremented, or a network/RPC/server error occurred.
Raises:
ValueError: If number is negative.
TypeError: If delta isn't an int or long.
"""
return self._incrdecr(key, True, delta)
def _incrdecr(self, key, is_negative, delta):
"""Increment or decrement a key by a provided delta.
Args:
key: Key to increment or decrement.
is_negative: Boolean, if this is a decrement.
delta: Non-negative integer amount (int or long) to increment
or decrement by.
Returns:
New long integer value, or None on cache miss or network/RPC/server
error.
Raises:
ValueError: If delta is negative.
TypeError: If delta isn't an int or long.
"""
if not isinstance(delta, (int, long)):
raise TypeError('Delta must be an integer or long, received %r' % delta)
if delta < 0:
raise ValueError('Delta must not be negative.')
request = MemcacheIncrementRequest()
response = MemcacheIncrementResponse()
request.set_key(_key_string(key))
request.set_delta(delta)
if is_negative:
request.set_direction(MemcacheIncrementRequest.DECREMENT)
else:
request.set_direction(MemcacheIncrementRequest.INCREMENT)
try:
self._make_sync_call('memcache', 'Increment', request, response)
except apiproxy_errors.Error:
return None
if response.has_new_value():
return response.new_value()
return None
_CLIENT = None
def setup_client(client_obj):
"""Sets the Client object instance to use for all module-level methods.
Use this method if you want to have customer persistent_id() or
persistent_load() functions associated with your client.
Args:
client_obj: Instance of the memcache.Client object.
"""
global _CLIENT
var_dict = globals()
_CLIENT = client_obj
var_dict['set_servers'] = _CLIENT.set_servers
var_dict['disconnect_all'] = _CLIENT.disconnect_all
var_dict['forget_dead_hosts'] = _CLIENT.forget_dead_hosts
var_dict['debuglog'] = _CLIENT.debuglog
var_dict['get'] = _CLIENT.get
var_dict['get_multi'] = _CLIENT.get_multi
var_dict['set'] = _CLIENT.set
var_dict['set_multi'] = _CLIENT.set_multi
var_dict['add'] = _CLIENT.add
var_dict['add_multi'] = _CLIENT.add_multi
var_dict['replace'] = _CLIENT.replace
var_dict['replace_multi'] = _CLIENT.replace_multi
var_dict['delete'] = _CLIENT.delete
var_dict['delete_multi'] = _CLIENT.delete_multi
var_dict['incr'] = _CLIENT.incr
var_dict['decr'] = _CLIENT.decr
var_dict['flush_all'] = _CLIENT.flush_all
var_dict['get_stats'] = _CLIENT.get_stats
setup_client(Client())
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the capability service API, everything is always enabled."""
from google.appengine.api import apiproxy_stub
from google.appengine.api import capabilities
IsEnabledRequest = capabilities.IsEnabledRequest
IsEnabledResponse = capabilities.IsEnabledResponse
CapabilityConfig = capabilities.CapabilityConfig
class CapabilityServiceStub(apiproxy_stub.APIProxyStub):
"""Python only capability service stub."""
def __init__(self, service_name='capability_service'):
"""Constructor.
Args:
service_name: Service name expected for all calls.
"""
super(CapabilityServiceStub, self).__init__(service_name)
def _Dynamic_IsEnabled(self, request, response):
"""Implementation of CapabilityService::IsEnabled().
Args:
request: An IsEnabledRequest.
response: An IsEnabledResponse.
"""
response.set_summary_status(IsEnabledResponse.ENABLED)
default_config = response.add_config()
default_config.set_package('')
default_config.set_capability('')
default_config.set_status(CapabilityConfig.ENABLED)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.base.capabilities_pb import CapabilityConfig
class IsEnabledRequest(ProtocolBuffer.ProtocolMessage):
has_package_ = 0
package_ = ""
def __init__(self, contents=None):
self.capability_ = []
self.call_ = []
if contents is not None: self.MergeFromString(contents)
def package(self): return self.package_
def set_package(self, x):
self.has_package_ = 1
self.package_ = x
def clear_package(self):
if self.has_package_:
self.has_package_ = 0
self.package_ = ""
def has_package(self): return self.has_package_
def capability_size(self): return len(self.capability_)
def capability_list(self): return self.capability_
def capability(self, i):
return self.capability_[i]
def set_capability(self, i, x):
self.capability_[i] = x
def add_capability(self, x):
self.capability_.append(x)
def clear_capability(self):
self.capability_ = []
def call_size(self): return len(self.call_)
def call_list(self): return self.call_
def call(self, i):
return self.call_[i]
def set_call(self, i, x):
self.call_[i] = x
def add_call(self, x):
self.call_.append(x)
def clear_call(self):
self.call_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_package()): self.set_package(x.package())
for i in xrange(x.capability_size()): self.add_capability(x.capability(i))
for i in xrange(x.call_size()): self.add_call(x.call(i))
def Equals(self, x):
if x is self: return 1
if self.has_package_ != x.has_package_: return 0
if self.has_package_ and self.package_ != x.package_: return 0
if len(self.capability_) != len(x.capability_): return 0
for e1, e2 in zip(self.capability_, x.capability_):
if e1 != e2: return 0
if len(self.call_) != len(x.call_): return 0
for e1, e2 in zip(self.call_, x.call_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_package_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: package not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.package_))
n += 1 * len(self.capability_)
for i in xrange(len(self.capability_)): n += self.lengthString(len(self.capability_[i]))
n += 1 * len(self.call_)
for i in xrange(len(self.call_)): n += self.lengthString(len(self.call_[i]))
return n + 1
def Clear(self):
self.clear_package()
self.clear_capability()
self.clear_call()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.package_)
for i in xrange(len(self.capability_)):
out.putVarInt32(18)
out.putPrefixedString(self.capability_[i])
for i in xrange(len(self.call_)):
out.putVarInt32(26)
out.putPrefixedString(self.call_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_package(d.getPrefixedString())
continue
if tt == 18:
self.add_capability(d.getPrefixedString())
continue
if tt == 26:
self.add_call(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_package_: res+=prefix+("package: %s\n" % self.DebugFormatString(self.package_))
cnt=0
for e in self.capability_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("capability%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.call_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("call%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
kpackage = 1
kcapability = 2
kcall = 3
_TEXT = (
"ErrorCode",
"package",
"capability",
"call",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class IsEnabledResponse(ProtocolBuffer.ProtocolMessage):
ENABLED = 1
SCHEDULED_FUTURE = 2
SCHEDULED_NOW = 3
DISABLED = 4
UNKNOWN = 5
_SummaryStatus_NAMES = {
1: "ENABLED",
2: "SCHEDULED_FUTURE",
3: "SCHEDULED_NOW",
4: "DISABLED",
5: "UNKNOWN",
}
def SummaryStatus_Name(cls, x): return cls._SummaryStatus_NAMES.get(x, "")
SummaryStatus_Name = classmethod(SummaryStatus_Name)
has_summary_status_ = 0
summary_status_ = 0
has_time_until_scheduled_ = 0
time_until_scheduled_ = 0
def __init__(self, contents=None):
self.config_ = []
if contents is not None: self.MergeFromString(contents)
def summary_status(self): return self.summary_status_
def set_summary_status(self, x):
self.has_summary_status_ = 1
self.summary_status_ = x
def clear_summary_status(self):
if self.has_summary_status_:
self.has_summary_status_ = 0
self.summary_status_ = 0
def has_summary_status(self): return self.has_summary_status_
def time_until_scheduled(self): return self.time_until_scheduled_
def set_time_until_scheduled(self, x):
self.has_time_until_scheduled_ = 1
self.time_until_scheduled_ = x
def clear_time_until_scheduled(self):
if self.has_time_until_scheduled_:
self.has_time_until_scheduled_ = 0
self.time_until_scheduled_ = 0
def has_time_until_scheduled(self): return self.has_time_until_scheduled_
def config_size(self): return len(self.config_)
def config_list(self): return self.config_
def config(self, i):
return self.config_[i]
def mutable_config(self, i):
return self.config_[i]
def add_config(self):
x = CapabilityConfig()
self.config_.append(x)
return x
def clear_config(self):
self.config_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_summary_status()): self.set_summary_status(x.summary_status())
if (x.has_time_until_scheduled()): self.set_time_until_scheduled(x.time_until_scheduled())
for i in xrange(x.config_size()): self.add_config().CopyFrom(x.config(i))
def Equals(self, x):
if x is self: return 1
if self.has_summary_status_ != x.has_summary_status_: return 0
if self.has_summary_status_ and self.summary_status_ != x.summary_status_: return 0
if self.has_time_until_scheduled_ != x.has_time_until_scheduled_: return 0
if self.has_time_until_scheduled_ and self.time_until_scheduled_ != x.time_until_scheduled_: return 0
if len(self.config_) != len(x.config_): return 0
for e1, e2 in zip(self.config_, x.config_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_summary_status_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: summary_status not set.')
for p in self.config_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.summary_status_)
if (self.has_time_until_scheduled_): n += 1 + self.lengthVarInt64(self.time_until_scheduled_)
n += 1 * len(self.config_)
for i in xrange(len(self.config_)): n += self.lengthString(self.config_[i].ByteSize())
return n + 1
def Clear(self):
self.clear_summary_status()
self.clear_time_until_scheduled()
self.clear_config()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.summary_status_)
if (self.has_time_until_scheduled_):
out.putVarInt32(16)
out.putVarInt64(self.time_until_scheduled_)
for i in xrange(len(self.config_)):
out.putVarInt32(26)
out.putVarInt32(self.config_[i].ByteSize())
self.config_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_summary_status(d.getVarInt32())
continue
if tt == 16:
self.set_time_until_scheduled(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_config().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_summary_status_: res+=prefix+("summary_status: %s\n" % self.DebugFormatInt32(self.summary_status_))
if self.has_time_until_scheduled_: res+=prefix+("time_until_scheduled: %s\n" % self.DebugFormatInt64(self.time_until_scheduled_))
cnt=0
for e in self.config_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("config%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
ksummary_status = 1
ktime_until_scheduled = 2
kconfig = 3
_TEXT = (
"ErrorCode",
"summary_status",
"time_until_scheduled",
"config",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['IsEnabledRequest','IsEnabledResponse']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Allows applications to identify API outages and scheduled downtime.
Some examples:
def StoreUploadedProfileImage(self):
uploaded_image = self.request.get('img')
# If the images API is unavailable, we'll just skip the resize.
if CapabilitySet('images').is_enabled():
uploaded_image = images.resize(uploaded_image, 64, 64)
store(uploaded_image)
def RenderHTMLForm(self):
datastore_readonly = CapabilitySet('datastore_v3', capabilities=['write'])
if datastore_readonly.may_be_disabled_in(60):
# self.response.out('<p>Not accepting submissions right now: %s</p>' %
datastore_readonly.admin_message())
# ...render form with form elements disabled...
else:
# ...render form normally...
Individual API wrapper modules should expose CapabilitySet objects
for users rather than relying on users to create them. They may
also create convenience methods (e.g. db.IsReadOnly()) that delegate
to the relevant CapabilitySet.
Classes defined here:
CapabilitySet: encapsulates one or more capabilities, allows introspection.
UnknownCapabilityError: thrown when an unknown capability is requested.
"""
from google.appengine.api.capabilities import capability_service_pb
from google.appengine.base import capabilities_pb
from google.appengine.api import apiproxy_stub_map
IsEnabledRequest = capability_service_pb.IsEnabledRequest
IsEnabledResponse = capability_service_pb.IsEnabledResponse
CapabilityConfig = capabilities_pb.CapabilityConfig
class UnknownCapabilityError(Exception):
"""An unknown capability was requested."""
class CapabilitySet(object):
"""Encapsulates one or more capabilities.
Capabilities can either be named explicitly, or inferred from the
list of methods provided. If no capabilities or methods are
provided, this will check whether the entire package is enabled.
"""
def __init__(self, package, capabilities=None, methods=None,
stub_map=apiproxy_stub_map):
"""Constructor.
Args:
capabilities: list of strings
methods: list of strings
"""
if capabilities is None:
capabilities = []
if methods is None:
methods = []
self._package = package
self._capabilities = ['*'] + capabilities
self._methods = methods
self._stub_map = stub_map
def is_enabled(self):
"""Tests whether the capabilities is currently enabled.
Returns:
True if API calls that require these capabillities will succeed.
Raises:
UnknownCapabilityError, if a specified capability was not recognized.
"""
config = self._get_status()
return config.summary_status() in (IsEnabledResponse.ENABLED,
IsEnabledResponse.SCHEDULED_FUTURE,
IsEnabledResponse.SCHEDULED_NOW)
def will_remain_enabled_for(self, time=60):
"""Returns true if it will remain enabled for the specified amount of time.
Args:
time: Number of seconds in the future to look when checking for scheduled
downtime.
Returns:
True if there is no scheduled downtime for the specified capability
within the amount of time specified.
Raises:
UnknownCapabilityError, if a specified capability was not recognized.
"""
config = self._get_status()
status = config.summary_status()
if status == IsEnabledResponse.ENABLED:
return True
elif status == IsEnabledResponse.SCHEDULED_NOW:
return False
elif status == IsEnabledResponse.SCHEDULED_FUTURE:
if config.has_time_until_scheduled():
return config.time_until_scheduled() >= time
else:
return True
elif status == IsEnabledResponse.DISABLED:
return False
else:
return False
def admin_message(self):
"""Get any administrator notice messages for these capabilities.
Returns:
A string containing one or more admin messages, or an empty string.
Raises:
UnknownCapabilityError, if a specified capability was not recognized.
"""
message_list = []
for config in self._get_status().config_list():
message = config.admin_message()
if message and message not in message_list:
message_list.append(message)
return ' '.join(message_list)
def _get_status(self):
"""Get an IsEnabledResponse for the capabilities listed.
Returns:
IsEnabledResponse for the specified capabilities.
Raises:
UnknownCapabilityError: If an unknown capability was requested.
"""
req = IsEnabledRequest()
req.set_package(self._package)
for capability in self._capabilities:
req.add_capability(capability)
for method in self._methods:
req.add_call(method)
resp = capability_service_pb.IsEnabledResponse()
self._stub_map.MakeSyncCall('capability_service', 'IsEnabled', req, resp)
if resp.summary_status() == IsEnabledResponse.UNKNOWN:
raise UnknownCapabilityError()
return resp
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the capability service API, everything is always enabled."""
from google.appengine.api import apiproxy_stub
from google.appengine.api import capabilities
IsEnabledRequest = capabilities.IsEnabledRequest
IsEnabledResponse = capabilities.IsEnabledResponse
CapabilityConfig = capabilities.CapabilityConfig
class CapabilityServiceStub(apiproxy_stub.APIProxyStub):
"""Python only capability service stub."""
def __init__(self, service_name='capability_service'):
"""Constructor.
Args:
service_name: Service name expected for all calls.
"""
super(CapabilityServiceStub, self).__init__(service_name)
def _Dynamic_IsEnabled(self, request, response):
"""Implementation of CapabilityService::IsEnabled().
Args:
request: An IsEnabledRequest.
response: An IsEnabledResponse.
"""
response.set_summary_status(IsEnabledResponse.ENABLED)
default_config = response.add_config()
default_config.set_package('')
default_config.set_capability('')
default_config.set_status(CapabilityConfig.ENABLED)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.base.capabilities_pb import CapabilityConfig
class IsEnabledRequest(ProtocolBuffer.ProtocolMessage):
has_package_ = 0
package_ = ""
def __init__(self, contents=None):
self.capability_ = []
self.call_ = []
if contents is not None: self.MergeFromString(contents)
def package(self): return self.package_
def set_package(self, x):
self.has_package_ = 1
self.package_ = x
def clear_package(self):
if self.has_package_:
self.has_package_ = 0
self.package_ = ""
def has_package(self): return self.has_package_
def capability_size(self): return len(self.capability_)
def capability_list(self): return self.capability_
def capability(self, i):
return self.capability_[i]
def set_capability(self, i, x):
self.capability_[i] = x
def add_capability(self, x):
self.capability_.append(x)
def clear_capability(self):
self.capability_ = []
def call_size(self): return len(self.call_)
def call_list(self): return self.call_
def call(self, i):
return self.call_[i]
def set_call(self, i, x):
self.call_[i] = x
def add_call(self, x):
self.call_.append(x)
def clear_call(self):
self.call_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_package()): self.set_package(x.package())
for i in xrange(x.capability_size()): self.add_capability(x.capability(i))
for i in xrange(x.call_size()): self.add_call(x.call(i))
def Equals(self, x):
if x is self: return 1
if self.has_package_ != x.has_package_: return 0
if self.has_package_ and self.package_ != x.package_: return 0
if len(self.capability_) != len(x.capability_): return 0
for e1, e2 in zip(self.capability_, x.capability_):
if e1 != e2: return 0
if len(self.call_) != len(x.call_): return 0
for e1, e2 in zip(self.call_, x.call_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_package_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: package not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.package_))
n += 1 * len(self.capability_)
for i in xrange(len(self.capability_)): n += self.lengthString(len(self.capability_[i]))
n += 1 * len(self.call_)
for i in xrange(len(self.call_)): n += self.lengthString(len(self.call_[i]))
return n + 1
def Clear(self):
self.clear_package()
self.clear_capability()
self.clear_call()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.package_)
for i in xrange(len(self.capability_)):
out.putVarInt32(18)
out.putPrefixedString(self.capability_[i])
for i in xrange(len(self.call_)):
out.putVarInt32(26)
out.putPrefixedString(self.call_[i])
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_package(d.getPrefixedString())
continue
if tt == 18:
self.add_capability(d.getPrefixedString())
continue
if tt == 26:
self.add_call(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_package_: res+=prefix+("package: %s\n" % self.DebugFormatString(self.package_))
cnt=0
for e in self.capability_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("capability%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.call_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("call%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
return res
kpackage = 1
kcapability = 2
kcall = 3
_TEXT = (
"ErrorCode",
"package",
"capability",
"call",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class IsEnabledResponse(ProtocolBuffer.ProtocolMessage):
ENABLED = 1
SCHEDULED_FUTURE = 2
SCHEDULED_NOW = 3
DISABLED = 4
UNKNOWN = 5
_SummaryStatus_NAMES = {
1: "ENABLED",
2: "SCHEDULED_FUTURE",
3: "SCHEDULED_NOW",
4: "DISABLED",
5: "UNKNOWN",
}
def SummaryStatus_Name(cls, x): return cls._SummaryStatus_NAMES.get(x, "")
SummaryStatus_Name = classmethod(SummaryStatus_Name)
has_summary_status_ = 0
summary_status_ = 0
has_time_until_scheduled_ = 0
time_until_scheduled_ = 0
def __init__(self, contents=None):
self.config_ = []
if contents is not None: self.MergeFromString(contents)
def summary_status(self): return self.summary_status_
def set_summary_status(self, x):
self.has_summary_status_ = 1
self.summary_status_ = x
def clear_summary_status(self):
if self.has_summary_status_:
self.has_summary_status_ = 0
self.summary_status_ = 0
def has_summary_status(self): return self.has_summary_status_
def time_until_scheduled(self): return self.time_until_scheduled_
def set_time_until_scheduled(self, x):
self.has_time_until_scheduled_ = 1
self.time_until_scheduled_ = x
def clear_time_until_scheduled(self):
if self.has_time_until_scheduled_:
self.has_time_until_scheduled_ = 0
self.time_until_scheduled_ = 0
def has_time_until_scheduled(self): return self.has_time_until_scheduled_
def config_size(self): return len(self.config_)
def config_list(self): return self.config_
def config(self, i):
return self.config_[i]
def mutable_config(self, i):
return self.config_[i]
def add_config(self):
x = CapabilityConfig()
self.config_.append(x)
return x
def clear_config(self):
self.config_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_summary_status()): self.set_summary_status(x.summary_status())
if (x.has_time_until_scheduled()): self.set_time_until_scheduled(x.time_until_scheduled())
for i in xrange(x.config_size()): self.add_config().CopyFrom(x.config(i))
def Equals(self, x):
if x is self: return 1
if self.has_summary_status_ != x.has_summary_status_: return 0
if self.has_summary_status_ and self.summary_status_ != x.summary_status_: return 0
if self.has_time_until_scheduled_ != x.has_time_until_scheduled_: return 0
if self.has_time_until_scheduled_ and self.time_until_scheduled_ != x.time_until_scheduled_: return 0
if len(self.config_) != len(x.config_): return 0
for e1, e2 in zip(self.config_, x.config_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_summary_status_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: summary_status not set.')
for p in self.config_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.summary_status_)
if (self.has_time_until_scheduled_): n += 1 + self.lengthVarInt64(self.time_until_scheduled_)
n += 1 * len(self.config_)
for i in xrange(len(self.config_)): n += self.lengthString(self.config_[i].ByteSize())
return n + 1
def Clear(self):
self.clear_summary_status()
self.clear_time_until_scheduled()
self.clear_config()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.summary_status_)
if (self.has_time_until_scheduled_):
out.putVarInt32(16)
out.putVarInt64(self.time_until_scheduled_)
for i in xrange(len(self.config_)):
out.putVarInt32(26)
out.putVarInt32(self.config_[i].ByteSize())
self.config_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_summary_status(d.getVarInt32())
continue
if tt == 16:
self.set_time_until_scheduled(d.getVarInt64())
continue
if tt == 26:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_config().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_summary_status_: res+=prefix+("summary_status: %s\n" % self.DebugFormatInt32(self.summary_status_))
if self.has_time_until_scheduled_: res+=prefix+("time_until_scheduled: %s\n" % self.DebugFormatInt64(self.time_until_scheduled_))
cnt=0
for e in self.config_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("config%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
ksummary_status = 1
ktime_until_scheduled = 2
kconfig = 3
_TEXT = (
"ErrorCode",
"summary_status",
"time_until_scheduled",
"config",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['IsEnabledRequest','IsEnabledResponse']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Allows applications to identify API outages and scheduled downtime.
Some examples:
def StoreUploadedProfileImage(self):
uploaded_image = self.request.get('img')
# If the images API is unavailable, we'll just skip the resize.
if CapabilitySet('images').is_enabled():
uploaded_image = images.resize(uploaded_image, 64, 64)
store(uploaded_image)
def RenderHTMLForm(self):
datastore_readonly = CapabilitySet('datastore_v3', capabilities=['write'])
if datastore_readonly.may_be_disabled_in(60):
# self.response.out('<p>Not accepting submissions right now: %s</p>' %
datastore_readonly.admin_message())
# ...render form with form elements disabled...
else:
# ...render form normally...
Individual API wrapper modules should expose CapabilitySet objects
for users rather than relying on users to create them. They may
also create convenience methods (e.g. db.IsReadOnly()) that delegate
to the relevant CapabilitySet.
Classes defined here:
CapabilitySet: encapsulates one or more capabilities, allows introspection.
UnknownCapabilityError: thrown when an unknown capability is requested.
"""
from google.appengine.api.capabilities import capability_service_pb
from google.appengine.base import capabilities_pb
from google.appengine.api import apiproxy_stub_map
IsEnabledRequest = capability_service_pb.IsEnabledRequest
IsEnabledResponse = capability_service_pb.IsEnabledResponse
CapabilityConfig = capabilities_pb.CapabilityConfig
class UnknownCapabilityError(Exception):
"""An unknown capability was requested."""
class CapabilitySet(object):
"""Encapsulates one or more capabilities.
Capabilities can either be named explicitly, or inferred from the
list of methods provided. If no capabilities or methods are
provided, this will check whether the entire package is enabled.
"""
def __init__(self, package, capabilities=None, methods=None,
stub_map=apiproxy_stub_map):
"""Constructor.
Args:
capabilities: list of strings
methods: list of strings
"""
if capabilities is None:
capabilities = []
if methods is None:
methods = []
self._package = package
self._capabilities = ['*'] + capabilities
self._methods = methods
self._stub_map = stub_map
def is_enabled(self):
"""Tests whether the capabilities is currently enabled.
Returns:
True if API calls that require these capabillities will succeed.
Raises:
UnknownCapabilityError, if a specified capability was not recognized.
"""
config = self._get_status()
return config.summary_status() in (IsEnabledResponse.ENABLED,
IsEnabledResponse.SCHEDULED_FUTURE,
IsEnabledResponse.SCHEDULED_NOW)
def will_remain_enabled_for(self, time=60):
"""Returns true if it will remain enabled for the specified amount of time.
Args:
time: Number of seconds in the future to look when checking for scheduled
downtime.
Returns:
True if there is no scheduled downtime for the specified capability
within the amount of time specified.
Raises:
UnknownCapabilityError, if a specified capability was not recognized.
"""
config = self._get_status()
status = config.summary_status()
if status == IsEnabledResponse.ENABLED:
return True
elif status == IsEnabledResponse.SCHEDULED_NOW:
return False
elif status == IsEnabledResponse.SCHEDULED_FUTURE:
if config.has_time_until_scheduled():
return config.time_until_scheduled() >= time
else:
return True
elif status == IsEnabledResponse.DISABLED:
return False
else:
return False
def admin_message(self):
"""Get any administrator notice messages for these capabilities.
Returns:
A string containing one or more admin messages, or an empty string.
Raises:
UnknownCapabilityError, if a specified capability was not recognized.
"""
message_list = []
for config in self._get_status().config_list():
message = config.admin_message()
if message and message not in message_list:
message_list.append(message)
return ' '.join(message_list)
def _get_status(self):
"""Get an IsEnabledResponse for the capabilities listed.
Returns:
IsEnabledResponse for the specified capabilities.
Raises:
UnknownCapabilityError: If an unknown capability was requested.
"""
req = IsEnabledRequest()
req.set_package(self._package)
for capability in self._capabilities:
req.add_capability(capability)
for method in self._methods:
req.add_call(method)
resp = capability_service_pb.IsEnabledResponse()
self._stub_map.MakeSyncCall('capability_service', 'IsEnabled', req, resp)
if resp.summary_status() == IsEnabledResponse.UNKNOWN:
raise UnknownCapabilityError()
return resp
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Builder for mapping YAML documents to object instances.
ObjectBuilder is responsible for mapping a YAML document to classes defined
using the validation mechanism (see google.appengine.api.validation.py).
"""
from google.appengine.api import validation
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_errors
import yaml
class _ObjectMapper(object):
"""Wrapper used for mapping attributes from a yaml file to an object.
This wrapper is required because objects do not know what property they are
associated with a creation time, and therefore can not be instantiated
with the correct class until they are mapped to their parents.
"""
def __init__(self):
"""Object mapper starts off with empty value."""
self.value = None
self.seen = set()
def set_value(self, value):
"""Set value of instance to map to.
Args:
value: Instance that this mapper maps to.
"""
self.value = value
def see(self, key):
if key in self.seen:
raise yaml_errors.DuplicateAttribute("Duplicate attribute '%s'." % key)
self.seen.add(key)
class _ObjectSequencer(object):
"""Wrapper used for building sequences from a yaml file to a list.
This wrapper is required because objects do not know what property they are
associated with a creation time, and therefore can not be instantiated
with the correct class until they are mapped to their parents.
"""
def __init__(self):
"""Object sequencer starts off with empty value."""
self.value = []
self.constructor = None
def set_constructor(self, constructor):
"""Set object used for constructing new sequence instances.
Args:
constructor: Callable which can accept no arguments. Must return
an instance of the appropriate class for the container.
"""
self.constructor = constructor
class ObjectBuilder(yaml_builder.Builder):
"""Builder used for constructing validated objects.
Given a class that implements validation.Validated, it will parse a YAML
document and attempt to build an instance of the class. It does so by mapping
YAML keys to Python attributes. ObjectBuilder will only map YAML fields
to attributes defined in the Validated subclasses 'ATTRIBUTE' definitions.
Lists are mapped to validated. Repeated attributes and maps are mapped to
validated.Type properties.
For a YAML map to be compatible with a class, the class must have a
constructor that can be called with no parameters. If the provided type
does not have such a constructor a parse time error will occur.
"""
def __init__(self, default_class):
"""Initialize validated object builder.
Args:
default_class: Class that is instantiated upon the detection of a new
document. An instance of this class will act as the document itself.
"""
self.default_class = default_class
def _GetRepeated(self, attribute):
"""Get the ultimate type of a repeated validator.
Looks for an instance of validation.Repeated, returning its constructor.
Args:
attribute: Repeated validator attribute to find type for.
Returns:
The expected class of of the Type validator, otherwise object.
"""
if isinstance(attribute, validation.Optional):
attribute = attribute.validator
if isinstance(attribute, validation.Repeated):
return attribute.constructor
return object
def BuildDocument(self):
"""Instantiate new root validated object.
Returns:
New instance of validated object.
"""
return self.default_class()
def BuildMapping(self, top_value):
"""New instance of object mapper for opening map scope.
Args:
top_value: Parent of nested object.
Returns:
New instance of object mapper.
"""
result = _ObjectMapper()
if isinstance(top_value, self.default_class):
result.value = top_value
return result
def EndMapping(self, top_value, mapping):
"""When leaving scope, makes sure new object is initialized.
This method is mainly for picking up on any missing required attributes.
Args:
top_value: Parent of closing mapping object.
mapping: _ObjectMapper instance that is leaving scope.
"""
try:
mapping.value.CheckInitialized()
except validation.ValidationError:
raise
except Exception, e:
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
raise validation.ValidationError("Invalid object:\n%s" % error_str, e)
def BuildSequence(self, top_value):
"""New instance of object sequence.
Args:
top_value: Object that contains the new sequence.
Returns:
A new _ObjectSequencer instance.
"""
return _ObjectSequencer()
def MapTo(self, subject, key, value):
"""Map key-value pair to an objects attribute.
Args:
subject: _ObjectMapper of object that will receive new attribute.
key: Key of attribute.
value: Value of new attribute.
Raises:
UnexpectedAttribute when the key is not a validated attribute of
the subject value class.
"""
assert subject.value is not None
if key not in subject.value.ATTRIBUTES:
raise yaml_errors.UnexpectedAttribute(
'Unexpected attribute \'%s\' for object of type %s.' %
(key, str(subject.value.__class__)))
if isinstance(value, _ObjectMapper):
value.set_value(subject.value.GetAttribute(key).expected_type())
value = value.value
elif isinstance(value, _ObjectSequencer):
value.set_constructor(self._GetRepeated(subject.value.ATTRIBUTES[key]))
value = value.value
subject.see(key)
try:
setattr(subject.value, key, value)
except validation.ValidationError, e:
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
try:
value_str = str(value)
except Exception:
value_str = '<unknown>'
e.message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
(value_str, key, error_str))
raise e
except Exception, e:
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
try:
value_str = str(value)
except Exception:
value_str = '<unknown>'
message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
(value_str, key, error_str))
raise validation.ValidationError(message, e)
def AppendTo(self, subject, value):
"""Append a value to a sequence.
Args:
subject: _ObjectSequence that is receiving new value.
value: Value that is being appended to sequence.
"""
if isinstance(value, _ObjectMapper):
value.set_value(subject.constructor())
subject.value.append(value.value)
else:
subject.value.append(value)
def BuildObjects(default_class, stream, loader=yaml.loader.SafeLoader):
"""Build objects from stream.
Handles the basic case of loading all the objects from a stream.
Args:
default_class: Class that is instantiated upon the detection of a new
document. An instance of this class will act as the document itself.
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work with the YAML parser.
loader_class: Used for dependency injection.
Returns:
List of default_class instances parsed from the stream.
"""
builder = ObjectBuilder(default_class)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(stream, loader)
return handler.GetResults()
def BuildSingleObject(default_class, stream, loader=yaml.loader.SafeLoader):
"""Build object from stream.
Handles the basic case of loading a single object from a stream.
Args:
default_class: Class that is instantiated upon the detection of a new
document. An instance of this class will act as the document itself.
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work with the YAML parser.
loader_class: Used for dependency injection.
"""
definitions = BuildObjects(default_class, stream, loader)
if len(definitions) < 1:
raise yaml_errors.EmptyConfigurationFile()
if len(definitions) > 1:
raise yaml_errors.MultipleConfigurationFile()
return definitions[0]
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
class StringProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.value_))
return n + 1
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Integer32Proto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.value_)
return n + 1
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_value(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt32(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Integer64Proto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.value_)
return n + 1
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_value(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt64(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class BoolProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
return n + 2
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putBoolean(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_value(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatBool(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class DoubleProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0.0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
return n + 9
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.putDouble(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_value(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormat(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.DOUBLE,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class VoidProto(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','VoidProto']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the mail API, writes email to logs and can optionally
send real email via SMTP or sendmail."""
from email import MIMEBase
from email import MIMEMultipart
from email import MIMEText
import logging
import mail
import mimetypes
import subprocess
import smtplib
from google.appengine.api import apiproxy_stub
class MailServiceStub(apiproxy_stub.APIProxyStub):
"""Python only mail service stub.
This stub does not actually attempt to send email. instead it merely logs
a description of the email to the developers console.
Args:
host: Host of SMTP server to use. Blank disables sending SMTP.
port: Port of SMTP server to use.
user: User to log in to SMTP server as.
password: Password for SMTP server user.
"""
def __init__(self,
host=None,
port=25,
user='',
password='',
enable_sendmail=False,
show_mail_body=False,
service_name='mail'):
"""Constructor.
Args:
host: Host of SMTP mail server.
post: Port of SMTP mail server.
user: Sending user of SMTP mail.
password: SMTP password.
enable_sendmail: Whether sendmail enabled or not.
show_mail_body: Whether to show mail body in log.
service_name: Service name expected for all calls.
"""
super(MailServiceStub, self).__init__(service_name)
self._smtp_host = host
self._smtp_port = port
self._smtp_user = user
self._smtp_password = password
self._enable_sendmail = enable_sendmail
self._show_mail_body = show_mail_body
def _GenerateLog(self, method, message, log):
"""Generate a list of log messages representing sent mail.
Args:
message: Message to write to log.
log: Log function of type string -> None
"""
log('MailService.%s' % method)
log(' From: %s' % message.sender())
for address in message.to_list():
log(' To: %s' % address)
for address in message.cc_list():
log(' Cc: %s' % address)
for address in message.bcc_list():
log(' Bcc: %s' % address)
if message.replyto():
log(' Reply-to: %s' % message.replyto())
log(' Subject: %s' % message.subject())
if message.has_textbody():
log(' Body:')
log(' Content-type: text/plain')
log(' Data length: %d' % len(message.textbody()))
if self._show_mail_body:
log('-----\n' + message.textbody() + '\n-----')
if message.has_htmlbody():
log(' Body:')
log(' Content-type: text/html')
log(' Data length: %d' % len(message.htmlbody()))
if self._show_mail_body:
log('-----\n' + message.htmlbody() + '\n-----')
for attachment in message.attachment_list():
log(' Attachment:')
log(' File name: %s' % attachment.filename())
log(' Data length: %s' % len(attachment.data()))
def _SendSMTP(self, mime_message, smtp_lib=smtplib.SMTP):
"""Send MIME message via SMTP.
Connects to SMTP server and sends MIME message. If user is supplied
will try to login to that server to send as authenticated. Does not
currently support encryption.
Args:
mime_message: MimeMessage to send. Create using ToMIMEMessage.
smtp_lib: Class of SMTP library. Used for dependency injection.
"""
smtp = smtp_lib()
try:
smtp.connect(self._smtp_host, self._smtp_port)
if self._smtp_user:
smtp.login(self._smtp_user, self._smtp_password)
tos = ', '.join([mime_message[to] for to in ['To', 'Cc', 'Bcc']
if mime_message[to]])
smtp.sendmail(mime_message['From'], tos, str(mime_message))
finally:
smtp.quit()
def _SendSendmail(self, mime_message,
popen=subprocess.Popen,
sendmail_command='sendmail'):
"""Send MIME message via sendmail, if exists on computer.
Attempts to send email via sendmail. Any IO failure, including
the program not being found is ignored.
Args:
mime_message: MimeMessage to send. Create using ToMIMEMessage.
popen: popen function to create a new sub-process.
"""
try:
tos = [mime_message[to] for to in ['To', 'Cc', 'Bcc'] if mime_message[to]]
sendmail_command = '%s %s' % (sendmail_command, ' '.join(tos))
try:
child = popen(sendmail_command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
except (IOError, OSError), e:
logging.error('Unable to open pipe to sendmail')
raise
try:
child.stdin.write(str(mime_message))
child.stdin.close()
finally:
while child.poll() is None:
child.stdout.read(100)
child.stdout.close()
except (IOError, OSError), e:
logging.error('Error sending mail using sendmail: ' + str(e))
def _Send(self, request, response, log=logging.info,
smtp_lib=smtplib.SMTP,
popen=subprocess.Popen,
sendmail_command='sendmail'):
"""Implementation of MailServer::Send().
Logs email message. Contents of attachments are not shown, only
their sizes. If SMTP is configured, will send via SMTP, else
will use Sendmail if it is installed.
Args:
request: The message to send, a SendMailRequest.
response: The send response, a SendMailResponse.
log: Log function to send log information. Used for dependency
injection.
smtp_lib: Class of SMTP library. Used for dependency injection.
popen2: popen2 function to use for opening pipe to other process.
Used for dependency injection.
"""
self._GenerateLog('Send', request, log)
if self._smtp_host and self._enable_sendmail:
log('Both SMTP and sendmail are enabled. Ignoring sendmail.')
import email
mime_message = mail.MailMessageToMIMEMessage(request)
if self._smtp_host:
self._SendSMTP(mime_message, smtp_lib)
elif self._enable_sendmail:
self._SendSendmail(mime_message, popen, sendmail_command)
else:
logging.info('You are not currently sending out real email. '
'If you have sendmail installed you can use it '
'by using the server with --enable_sendmail')
_Dynamic_Send = _Send
def _SendToAdmins(self, request, response, log=logging.info):
"""Implementation of MailServer::SendToAdmins().
Logs email message. Contents of attachments are not shown, only
their sizes.
Given the difficulty of determining who the actual sender
is, Sendmail and SMTP are disabled for this action.
Args:
request: The message to send, a SendMailRequest.
response: The send response, a SendMailResponse.
log: Log function to send log information. Used for dependency
injection.
"""
self._GenerateLog('SendToAdmins', request, log)
if self._smtp_host and self._enable_sendmail:
log('Both SMTP and sendmail are enabled. Ignoring sendmail.')
_Dynamic_SendToAdmins = _SendToAdmins
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors used in the YAML API, which is used by app developers."""
class Error(Exception):
"""Base datastore yaml error type."""
class ProtocolBufferParseError(Error):
"""Error in protocol buffer parsing"""
class EmptyConfigurationFile(Error):
"""Tried to load empty configuration file."""
class MultipleConfigurationFile(Error):
"""Tried to load configuration file with multiple objects."""
class UnexpectedAttribute(Error):
"""Raised when an unexpected attribute is encounted."""
class DuplicateAttribute(Error):
"""Generated when an attribute is assigned to twice."""
class ListenerConfigurationError(Error):
"""Generated when there is a parsing problem due to configuration."""
class IllegalEvent(Error):
"""Raised when an unexpected event type is received by listener."""
class InternalError(Error):
"""Raised when an internal implementation error is detected."""
class EventListenerError(Error):
"""Top level exception raised by YAML listener.
Any exception raised within the process of parsing a YAML file via an
EventListener is caught and wrapped in an EventListenerError. The causing
exception is maintained, but additional useful information is saved which
can be used for reporting useful information to users.
Attributes:
cause: The original exception which caused the EventListenerError.
"""
def __init__(self, cause):
"""Initialize event-listener error."""
if hasattr(cause, 'args') and cause.args:
Error.__init__(self, *cause.args)
else:
Error.__init__(self, str(cause))
self.cause = cause
class EventListenerYAMLError(EventListenerError):
"""Generated specifically for yaml.error.YAMLError."""
class EventError(EventListenerError):
"""Generated specifically when an error occurs in event handler.
Attributes:
cause: The original exception which caused the EventListenerError.
event: Event being handled when exception occured.
"""
def __init__(self, cause, event):
"""Initialize event-listener error."""
EventListenerError.__init__(self, cause)
self.event = event
def __str__(self):
return '%s\n%s' % (self.cause, self.event.start_mark)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors used in the Python appinfo API, used by app developers."""
class Error(Exception):
"""Base datastore AppInfo type."""
class EmptyConfigurationFile(Error):
"""Tried to load empty configuration file"""
class MultipleConfigurationFile(Error):
"""Tried to load configuration file with multiple AppInfo objects"""
class UnknownHandlerType(Error):
"""Raised when it is not possible to determine URL mapping type."""
class UnexpectedHandlerAttribute(Error):
"""Raised when a handler type has an attribute that it does not use."""
class MissingHandlerAttribute(Error):
"""Raised when a handler is missing an attribute required by its type."""
class MissingURLMapping(Error):
"""Raised when there are no URL mappings in external appinfo."""
class TooManyURLMappings(Error):
"""Raised when there are too many URL mappings in external appinfo."""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the urlfetch API, based on httplib."""
import httplib
import logging
import socket
import urllib
import urlparse
from google.appengine.api import apiproxy_stub
from google.appengine.api import urlfetch
from google.appengine.api import urlfetch_errors
from google.appengine.api import urlfetch_service_pb
from google.appengine.runtime import apiproxy_errors
MAX_RESPONSE_SIZE = 2 ** 24
MAX_REDIRECTS = urlfetch.MAX_REDIRECTS
REDIRECT_STATUSES = frozenset([
httplib.MOVED_PERMANENTLY,
httplib.FOUND,
httplib.SEE_OTHER,
httplib.TEMPORARY_REDIRECT,
])
PORTS_ALLOWED_IN_PRODUCTION = (
None, '80', '443', '4443', '8080', '8081', '8082', '8083', '8084', '8085',
'8086', '8087', '8088', '8089', '8188', '8444', '8990')
_API_CALL_DEADLINE = 5.0
_UNTRUSTED_REQUEST_HEADERS = frozenset([
'accept-encoding',
'content-length',
'host',
'referer',
'user-agent',
'vary',
'via',
'x-forwarded-for',
])
class URLFetchServiceStub(apiproxy_stub.APIProxyStub):
"""Stub version of the urlfetch API to be used with apiproxy_stub_map."""
def __init__(self, service_name='urlfetch'):
"""Initializer.
Args:
service_name: Service name expected for all calls.
"""
super(URLFetchServiceStub, self).__init__(service_name)
def _Dynamic_Fetch(self, request, response):
"""Trivial implementation of URLFetchService::Fetch().
Args:
request: the fetch to perform, a URLFetchRequest
response: the fetch response, a URLFetchResponse
"""
(protocol, host, path, parameters, query, fragment) = urlparse.urlparse(request.url())
payload = None
if request.method() == urlfetch_service_pb.URLFetchRequest.GET:
method = 'GET'
elif request.method() == urlfetch_service_pb.URLFetchRequest.POST:
method = 'POST'
payload = request.payload()
elif request.method() == urlfetch_service_pb.URLFetchRequest.HEAD:
method = 'HEAD'
elif request.method() == urlfetch_service_pb.URLFetchRequest.PUT:
method = 'PUT'
payload = request.payload()
elif request.method() == urlfetch_service_pb.URLFetchRequest.DELETE:
method = 'DELETE'
else:
logging.error('Invalid method: %s', request.method())
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR)
if not (protocol == 'http' or protocol == 'https'):
logging.error('Invalid protocol: %s', protocol)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
sanitized_headers = self._SanitizeHttpHeaders(_UNTRUSTED_REQUEST_HEADERS,
request.header_list())
request.clear_header()
request.header_list().extend(sanitized_headers)
self._RetrieveURL(request.url(), payload, method,
request.header_list(), response,
follow_redirects=request.followredirects())
def _RetrieveURL(self, url, payload, method, headers, response,
follow_redirects=True):
"""Retrieves a URL.
Args:
url: String containing the URL to access.
payload: Request payload to send, if any; None if no payload.
method: HTTP method to use (e.g., 'GET')
headers: List of additional header objects to use for the request.
response: Response object
follow_redirects: optional setting (defaulting to True) for whether or not
we should transparently follow redirects (up to MAX_REDIRECTS)
Raises:
Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR
in cases where:
- MAX_REDIRECTS is exceeded
- The protocol of the redirected URL is bad or missing.
"""
last_protocol = ''
last_host = ''
for redirect_number in xrange(MAX_REDIRECTS + 1):
parsed = urlparse.urlparse(url)
protocol, host, path, parameters, query, fragment = parsed
port = urllib.splitport(urllib.splituser(host)[1])[1]
if port not in PORTS_ALLOWED_IN_PRODUCTION:
logging.warning(
'urlfetch received %s ; port %s is not allowed in production!' %
(url, port))
if host == '' and protocol == '':
host = last_host
protocol = last_protocol
adjusted_headers = {
'Host': host,
'Accept': '*/*',
}
if payload is not None:
adjusted_headers['Content-Length'] = len(payload)
if method == 'POST' and payload:
adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded'
for header in headers:
adjusted_headers[header.key().title()] = header.value()
logging.debug('Making HTTP request: host = %s, '
'url = %s, payload = %s, headers = %s',
host, url, payload, adjusted_headers)
try:
if protocol == 'http':
connection = httplib.HTTPConnection(host)
elif protocol == 'https':
connection = httplib.HTTPSConnection(host)
else:
error_msg = 'Redirect specified invalid protocol: "%s"' % protocol
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
last_protocol = protocol
last_host = host
if query != '':
full_path = path + '?' + query
else:
full_path = path
orig_timeout = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(_API_CALL_DEADLINE)
connection.request(method, full_path, payload, adjusted_headers)
http_response = connection.getresponse()
http_response_data = http_response.read()
finally:
socket.setdefaulttimeout(orig_timeout)
connection.close()
except (httplib.error, socket.error, IOError), e:
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, str(e))
if http_response.status in REDIRECT_STATUSES and follow_redirects:
url = http_response.getheader('Location', None)
if url is None:
error_msg = 'Redirecting response was missing "Location" header'
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
else:
response.set_statuscode(http_response.status)
response.set_content(http_response_data[:MAX_RESPONSE_SIZE])
for header_key, header_value in http_response.getheaders():
header_proto = response.add_header()
header_proto.set_key(header_key)
header_proto.set_value(header_value)
if len(http_response_data) > MAX_RESPONSE_SIZE:
response.set_contentwastruncated(True)
break
else:
error_msg = 'Too many repeated redirects'
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
def _SanitizeHttpHeaders(self, untrusted_headers, headers):
"""Cleans "unsafe" headers from the HTTP request/response.
Args:
untrusted_headers: set of untrusted headers names
headers: list of string pairs, first is header name and the second is header's value
"""
return (h for h in headers if h.key().lower() not in untrusted_headers)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors used in the urlfetch API
developers.
"""
class Error(Exception):
"""Base URL fetcher error type."""
class InvalidURLError(Error):
"""Raised when the URL given is empty or invalid.
Only http: and https: URLs are allowed. The maximum URL length
allowed is 2048 characters. The login/pass portion is not
allowed. In deployed applications, only ports 80 and 443 for http
and https respectively are allowed.
"""
class DownloadError(Error):
"""Raised when the we could not fetch the URL for any reason.
Note that this exception is only raised when we could not contact the
server. HTTP errors (e.g., 404) are returned in as the status_code field
in the return value of Fetch, and no exception is raised.
"""
class ResponseTooLargeError(Error):
"""Raised when the response was too large and was truncated."""
def __init__(self, response):
self.response = response
class InvalidMethodError(Error):
"""Raised when an invalid value for 'method' is provided"""
class InvalidMethodError(Error):
"""Raised when an invalid value for 'method' is provided"""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import VoidProto
class MailServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
INTERNAL_ERROR = 1
BAD_REQUEST = 2
UNAUTHORIZED_SENDER = 3
INVALID_ATTACHMENT_TYPE = 4
_ErrorCode_NAMES = {
0: "OK",
1: "INTERNAL_ERROR",
2: "BAD_REQUEST",
3: "UNAUTHORIZED_SENDER",
4: "INVALID_ATTACHMENT_TYPE",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MailAttachment(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_data_ = 0
data_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def data(self): return self.data_
def set_data(self, x):
self.has_data_ = 1
self.data_ = x
def clear_data(self):
if self.has_data_:
self.has_data_ = 0
self.data_ = ""
def has_data(self): return self.has_data_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_data()): self.set_data(x.data())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_data_ != x.has_data_: return 0
if self.has_data_ and self.data_ != x.data_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_data_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: data not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthString(len(self.data_))
return n + 2
def Clear(self):
self.clear_filename()
self.clear_data()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(18)
out.putPrefixedString(self.data_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 18:
self.set_data(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("FileName: %s\n" % self.DebugFormatString(self.filename_))
if self.has_data_: res+=prefix+("Data: %s\n" % self.DebugFormatString(self.data_))
return res
kFileName = 1
kData = 2
_TEXT = (
"ErrorCode",
"FileName",
"Data",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MailMessage(ProtocolBuffer.ProtocolMessage):
has_sender_ = 0
sender_ = ""
has_replyto_ = 0
replyto_ = ""
has_subject_ = 0
subject_ = ""
has_textbody_ = 0
textbody_ = ""
has_htmlbody_ = 0
htmlbody_ = ""
def __init__(self, contents=None):
self.to_ = []
self.cc_ = []
self.bcc_ = []
self.attachment_ = []
if contents is not None: self.MergeFromString(contents)
def sender(self): return self.sender_
def set_sender(self, x):
self.has_sender_ = 1
self.sender_ = x
def clear_sender(self):
if self.has_sender_:
self.has_sender_ = 0
self.sender_ = ""
def has_sender(self): return self.has_sender_
def replyto(self): return self.replyto_
def set_replyto(self, x):
self.has_replyto_ = 1
self.replyto_ = x
def clear_replyto(self):
if self.has_replyto_:
self.has_replyto_ = 0
self.replyto_ = ""
def has_replyto(self): return self.has_replyto_
def to_size(self): return len(self.to_)
def to_list(self): return self.to_
def to(self, i):
return self.to_[i]
def set_to(self, i, x):
self.to_[i] = x
def add_to(self, x):
self.to_.append(x)
def clear_to(self):
self.to_ = []
def cc_size(self): return len(self.cc_)
def cc_list(self): return self.cc_
def cc(self, i):
return self.cc_[i]
def set_cc(self, i, x):
self.cc_[i] = x
def add_cc(self, x):
self.cc_.append(x)
def clear_cc(self):
self.cc_ = []
def bcc_size(self): return len(self.bcc_)
def bcc_list(self): return self.bcc_
def bcc(self, i):
return self.bcc_[i]
def set_bcc(self, i, x):
self.bcc_[i] = x
def add_bcc(self, x):
self.bcc_.append(x)
def clear_bcc(self):
self.bcc_ = []
def subject(self): return self.subject_
def set_subject(self, x):
self.has_subject_ = 1
self.subject_ = x
def clear_subject(self):
if self.has_subject_:
self.has_subject_ = 0
self.subject_ = ""
def has_subject(self): return self.has_subject_
def textbody(self): return self.textbody_
def set_textbody(self, x):
self.has_textbody_ = 1
self.textbody_ = x
def clear_textbody(self):
if self.has_textbody_:
self.has_textbody_ = 0
self.textbody_ = ""
def has_textbody(self): return self.has_textbody_
def htmlbody(self): return self.htmlbody_
def set_htmlbody(self, x):
self.has_htmlbody_ = 1
self.htmlbody_ = x
def clear_htmlbody(self):
if self.has_htmlbody_:
self.has_htmlbody_ = 0
self.htmlbody_ = ""
def has_htmlbody(self): return self.has_htmlbody_
def attachment_size(self): return len(self.attachment_)
def attachment_list(self): return self.attachment_
def attachment(self, i):
return self.attachment_[i]
def mutable_attachment(self, i):
return self.attachment_[i]
def add_attachment(self):
x = MailAttachment()
self.attachment_.append(x)
return x
def clear_attachment(self):
self.attachment_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_sender()): self.set_sender(x.sender())
if (x.has_replyto()): self.set_replyto(x.replyto())
for i in xrange(x.to_size()): self.add_to(x.to(i))
for i in xrange(x.cc_size()): self.add_cc(x.cc(i))
for i in xrange(x.bcc_size()): self.add_bcc(x.bcc(i))
if (x.has_subject()): self.set_subject(x.subject())
if (x.has_textbody()): self.set_textbody(x.textbody())
if (x.has_htmlbody()): self.set_htmlbody(x.htmlbody())
for i in xrange(x.attachment_size()): self.add_attachment().CopyFrom(x.attachment(i))
def Equals(self, x):
if x is self: return 1
if self.has_sender_ != x.has_sender_: return 0
if self.has_sender_ and self.sender_ != x.sender_: return 0
if self.has_replyto_ != x.has_replyto_: return 0
if self.has_replyto_ and self.replyto_ != x.replyto_: return 0
if len(self.to_) != len(x.to_): return 0
for e1, e2 in zip(self.to_, x.to_):
if e1 != e2: return 0
if len(self.cc_) != len(x.cc_): return 0
for e1, e2 in zip(self.cc_, x.cc_):
if e1 != e2: return 0
if len(self.bcc_) != len(x.bcc_): return 0
for e1, e2 in zip(self.bcc_, x.bcc_):
if e1 != e2: return 0
if self.has_subject_ != x.has_subject_: return 0
if self.has_subject_ and self.subject_ != x.subject_: return 0
if self.has_textbody_ != x.has_textbody_: return 0
if self.has_textbody_ and self.textbody_ != x.textbody_: return 0
if self.has_htmlbody_ != x.has_htmlbody_: return 0
if self.has_htmlbody_ and self.htmlbody_ != x.htmlbody_: return 0
if len(self.attachment_) != len(x.attachment_): return 0
for e1, e2 in zip(self.attachment_, x.attachment_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_sender_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: sender not set.')
if (not self.has_subject_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: subject not set.')
for p in self.attachment_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.sender_))
if (self.has_replyto_): n += 1 + self.lengthString(len(self.replyto_))
n += 1 * len(self.to_)
for i in xrange(len(self.to_)): n += self.lengthString(len(self.to_[i]))
n += 1 * len(self.cc_)
for i in xrange(len(self.cc_)): n += self.lengthString(len(self.cc_[i]))
n += 1 * len(self.bcc_)
for i in xrange(len(self.bcc_)): n += self.lengthString(len(self.bcc_[i]))
n += self.lengthString(len(self.subject_))
if (self.has_textbody_): n += 1 + self.lengthString(len(self.textbody_))
if (self.has_htmlbody_): n += 1 + self.lengthString(len(self.htmlbody_))
n += 1 * len(self.attachment_)
for i in xrange(len(self.attachment_)): n += self.lengthString(self.attachment_[i].ByteSize())
return n + 2
def Clear(self):
self.clear_sender()
self.clear_replyto()
self.clear_to()
self.clear_cc()
self.clear_bcc()
self.clear_subject()
self.clear_textbody()
self.clear_htmlbody()
self.clear_attachment()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.sender_)
if (self.has_replyto_):
out.putVarInt32(18)
out.putPrefixedString(self.replyto_)
for i in xrange(len(self.to_)):
out.putVarInt32(26)
out.putPrefixedString(self.to_[i])
for i in xrange(len(self.cc_)):
out.putVarInt32(34)
out.putPrefixedString(self.cc_[i])
for i in xrange(len(self.bcc_)):
out.putVarInt32(42)
out.putPrefixedString(self.bcc_[i])
out.putVarInt32(50)
out.putPrefixedString(self.subject_)
if (self.has_textbody_):
out.putVarInt32(58)
out.putPrefixedString(self.textbody_)
if (self.has_htmlbody_):
out.putVarInt32(66)
out.putPrefixedString(self.htmlbody_)
for i in xrange(len(self.attachment_)):
out.putVarInt32(74)
out.putVarInt32(self.attachment_[i].ByteSize())
self.attachment_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_sender(d.getPrefixedString())
continue
if tt == 18:
self.set_replyto(d.getPrefixedString())
continue
if tt == 26:
self.add_to(d.getPrefixedString())
continue
if tt == 34:
self.add_cc(d.getPrefixedString())
continue
if tt == 42:
self.add_bcc(d.getPrefixedString())
continue
if tt == 50:
self.set_subject(d.getPrefixedString())
continue
if tt == 58:
self.set_textbody(d.getPrefixedString())
continue
if tt == 66:
self.set_htmlbody(d.getPrefixedString())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_attachment().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_sender_: res+=prefix+("Sender: %s\n" % self.DebugFormatString(self.sender_))
if self.has_replyto_: res+=prefix+("ReplyTo: %s\n" % self.DebugFormatString(self.replyto_))
cnt=0
for e in self.to_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("To%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.cc_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Cc%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.bcc_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Bcc%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_subject_: res+=prefix+("Subject: %s\n" % self.DebugFormatString(self.subject_))
if self.has_textbody_: res+=prefix+("TextBody: %s\n" % self.DebugFormatString(self.textbody_))
if self.has_htmlbody_: res+=prefix+("HtmlBody: %s\n" % self.DebugFormatString(self.htmlbody_))
cnt=0
for e in self.attachment_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Attachment%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
kSender = 1
kReplyTo = 2
kTo = 3
kCc = 4
kBcc = 5
kSubject = 6
kTextBody = 7
kHtmlBody = 8
kAttachment = 9
_TEXT = (
"ErrorCode",
"Sender",
"ReplyTo",
"To",
"Cc",
"Bcc",
"Subject",
"TextBody",
"HtmlBody",
"Attachment",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['MailServiceError','MailAttachment','MailMessage']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Container of APIProxy stubs for more convenient unittesting.
Classes/variables/functions defined here:
APIProxyStubMap: container of APIProxy stubs.
apiproxy: global instance of an APIProxyStubMap.
MakeSyncCall: APIProxy entry point.
"""
import inspect
import sys
def CreateRPC(service):
"""Creates a RPC instance for the given service.
The instance is suitable for talking to remote services.
Each RPC instance can be used only once, and should not be reused.
Args:
service: string representing which service to call.
Returns:
the rpc object.
Raises:
AssertionError or RuntimeError if the stub for service doesn't supply a
CreateRPC method.
"""
stub = apiproxy.GetStub(service)
assert stub, 'No api proxy found for service "%s"' % service
assert hasattr(stub, 'CreateRPC'), ('The service "%s" doesn\'t have ' +
'a CreateRPC method.' % service)
return stub.CreateRPC()
def MakeSyncCall(service, call, request, response):
"""The APIProxy entry point for a synchronous API call.
Args:
service: string representing which service to call
call: string representing which function to call
request: protocol buffer for the request
response: protocol buffer for the response
Raises:
apiproxy_errors.Error or a subclass.
"""
apiproxy.MakeSyncCall(service, call, request, response)
class ListOfHooks(object):
"""An ordered collection of hooks for a particular API call.
A hook is a function that has exactly the same signature as
a service stub. It will be called before or after an api hook is
executed, depending on whether this list is for precall of postcall hooks.
Hooks can be used for debugging purposes (check certain
pre- or postconditions on api calls) or to apply patches to protocol
buffers before/after a call gets submitted.
"""
def __init__(self):
"""Constructor."""
self.__content = []
self.__unique_keys = set()
def __len__(self):
"""Returns the amount of elements in the collection."""
return self.__content.__len__()
def __Insert(self, index, key, function, service=None):
"""Appends a hook at a certain position in the list.
Args:
index: the index of where to insert the function
key: a unique key (within the module) for this particular function.
If something from the same module with the same key is already
registered, nothing will be added.
function: the hook to be added.
service: optional argument that restricts the hook to a particular api
Returns:
True if the collection was modified.
"""
unique_key = (key, inspect.getmodule(function))
if unique_key in self.__unique_keys:
return False
self.__content.insert(index, (key, function, service))
self.__unique_keys.add(unique_key)
return True
def Append(self, key, function, service=None):
"""Appends a hook at the end of the list.
Args:
key: a unique key (within the module) for this particular function.
If something from the same module with the same key is already
registered, nothing will be added.
function: the hook to be added.
service: optional argument that restricts the hook to a particular api
Returns:
True if the collection was modified.
"""
return self.__Insert(len(self), key, function, service)
def Push(self, key, function, service=None):
"""Inserts a hook at the beginning of the list.
Args:
key: a unique key (within the module) for this particular function.
If something from the same module with the same key is already
registered, nothing will be added.
function: the hook to be added.
service: optional argument that restricts the hook to a particular api
Returns:
True if the collection was modified.
"""
return self.__Insert(0, key, function, service)
def Clear(self):
"""Removes all hooks from the list (useful for unit tests)."""
self.__content = []
self.__unique_keys = set()
def Call(self, service, call, request, response):
"""Invokes all hooks in this collection.
Args:
service: string representing which service to call
call: string representing which function to call
request: protocol buffer for the request
response: protocol buffer for the response
"""
for key, function, srv in self.__content:
if srv is None or srv == service:
function(service, call, request, response)
class APIProxyStubMap:
"""Container of APIProxy stubs for more convenient unittesting.
Stubs may be either trivial implementations of APIProxy services (e.g.
DatastoreFileStub, UserServiceStub) or "real" implementations.
For unittests, we may want to mix and match real and trivial implementations
of services in order to better focus testing on individual service
implementations. To achieve this, we allow the client to attach stubs to
service names, as well as define a default stub to be used if no specific
matching stub is identified.
"""
def __init__(self, default_stub=None):
"""Constructor.
Args:
default_stub: optional stub
'default_stub' will be used whenever no specific matching stub is found.
"""
self.__stub_map = {}
self.__default_stub = default_stub
self.__precall_hooks = ListOfHooks()
self.__postcall_hooks = ListOfHooks()
def GetPreCallHooks(self):
"""Gets a collection for all precall hooks."""
return self.__precall_hooks
def GetPostCallHooks(self):
"""Gets a collection for all precall hooks."""
return self.__postcall_hooks
def RegisterStub(self, service, stub):
"""Register the provided stub for the specified service.
Args:
service: string
stub: stub
"""
assert not self.__stub_map.has_key(service)
self.__stub_map[service] = stub
if service == 'datastore':
self.RegisterStub('datastore_v3', stub)
def GetStub(self, service):
"""Retrieve the stub registered for the specified service.
Args:
service: string
Returns:
stub
Returns the stub registered for 'service', and returns the default stub
if no such stub is found.
"""
return self.__stub_map.get(service, self.__default_stub)
def MakeSyncCall(self, service, call, request, response):
"""The APIProxy entry point.
Args:
service: string representing which service to call
call: string representing which function to call
request: protocol buffer for the request
response: protocol buffer for the response
Raises:
apiproxy_errors.Error or a subclass.
"""
stub = self.GetStub(service)
assert stub, 'No api proxy found for service "%s"' % service
self.__precall_hooks.Call(service, call, request, response)
stub.MakeSyncCall(service, call, request, response)
self.__postcall_hooks.Call(service, call, request, response)
def GetDefaultAPIProxy():
try:
runtime = __import__('google.appengine.runtime', globals(), locals(),
['apiproxy'])
return APIProxyStubMap(runtime.apiproxy)
except (AttributeError, ImportError):
return APIProxyStubMap()
apiproxy = GetDefaultAPIProxy()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Higher-level, semantic data types for the datastore. These types
are expected to be set as attributes of Entities. See "Supported Data Types"
in the API Guide.
Most of these types are based on XML elements from Atom and GData elements
from the atom and gd namespaces. For more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
The namespace schemas are:
http://www.w3.org/2005/Atom
http://schemas.google.com/g/2005
"""
import base64
import calendar
import datetime
import os
import re
import string
import time
import urlparse
from xml.sax import saxutils
from google.appengine.datastore import datastore_pb
from google.appengine.api import datastore_errors
from google.appengine.api import users
from google.net.proto import ProtocolBuffer
from google.appengine.datastore import entity_pb
_MAX_STRING_LENGTH = 500
_MAX_LINK_PROPERTY_LENGTH = 2083
RESERVED_PROPERTY_NAME = re.compile('^__.*__$')
_KEY_SPECIAL_PROPERTY = '__key__'
_SPECIAL_PROPERTIES = frozenset([_KEY_SPECIAL_PROPERTY])
class UtcTzinfo(datetime.tzinfo):
def utcoffset(self, dt): return datetime.timedelta(0)
def dst(self, dt): return datetime.timedelta(0)
def tzname(self, dt): return 'UTC'
def __repr__(self): return 'datastore_types.UTC'
UTC = UtcTzinfo()
def typename(obj):
"""Returns the type of obj as a string. More descriptive and specific than
type(obj), and safe for any object, unlike __class__."""
if hasattr(obj, '__class__'):
return getattr(obj, '__class__').__name__
else:
return type(obj).__name__
def ValidateString(value,
name='unused',
exception=datastore_errors.BadValueError,
max_len=_MAX_STRING_LENGTH):
"""Raises an exception if value is not a valid string or a subclass thereof.
A string is valid if it's not empty, no more than _MAX_STRING_LENGTH bytes,
and not a Blob. The exception type can be specified with the exception
argument; it defaults to BadValueError.
Args:
value: the value to validate.
name: the name of this value; used in the exception message.
exception: the type of exception to raise.
max_len: the maximum allowed length, in bytes
"""
if not isinstance(value, basestring) or isinstance(value, Blob):
raise exception('%s should be a string; received %s (a %s):' %
(name, value, typename(value)))
if not value:
raise exception('%s must not be empty.' % name)
if len(value.encode('utf-8')) > max_len:
raise exception('%s must be under %d bytes.' % (name, max_len))
def ResolveAppId(app, name='_app'):
"""Validate app id, providing a default.
If the argument is None, $APPLICATION_ID is substituted.
Args:
app: The app id argument value to be validated.
name: The argument name, for error messages.
Returns:
The value of app, or the substituted default. Always a non-empty string.
Raises:
BadArgumentError if the value is empty or not a string.
"""
if app is None:
app = os.environ.get('APPLICATION_ID', '')
ValidateString(app, '_app', datastore_errors.BadArgumentError)
return app
class Key(object):
"""The primary key for a datastore entity.
A datastore GUID. A Key instance uniquely identifies an entity across all
apps, and includes all information necessary to fetch the entity from the
datastore with Get().
Key implements __hash__, and key instances are immutable, so Keys may be
used in sets and as dictionary keys.
"""
__reference = None
def __init__(self, encoded=None):
"""Constructor. Creates a Key from a string.
Args:
# a base64-encoded primary key, generated by Key.__str__
encoded: str
"""
if encoded is not None:
if not isinstance(encoded, basestring):
try:
repr_encoded = repr(encoded)
except:
repr_encoded = "<couldn't encode>"
raise datastore_errors.BadArgumentError(
'Key() expects a string; received %s (a %s).' %
(repr_encoded, typename(encoded)))
try:
modulo = len(encoded) % 4
if modulo != 0:
encoded += ('=' * (4 - modulo))
encoded_pb = base64.urlsafe_b64decode(str(encoded))
self.__reference = entity_pb.Reference(encoded_pb)
assert self.__reference.IsInitialized()
except (AssertionError, TypeError), e:
raise datastore_errors.BadKeyError(
'Invalid string key %s. Details: %s' % (encoded, e))
except Exception, e:
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
raise datastore_errors.BadKeyError('Invalid string key %s.' % encoded)
else:
raise
else:
self.__reference = entity_pb.Reference()
@staticmethod
def from_path(*args, **kwds):
"""Static method to construct a Key out of a "path" (kind, id or name, ...).
This is useful when an application wants to use just the id or name portion
of a key in e.g. a URL, where the rest of the URL provides enough context to
fill in the rest, i.e. the app id (always implicit), the entity kind, and
possibly an ancestor key. Since ids and names are usually small, they're
more attractive for use in end-user-visible URLs than the full string
representation of a key.
Args:
kind: the entity kind (a str or unicode instance)
id_or_name: the id (an int or long) or name (a str or unicode instance)
Additional positional arguments are allowed and should be
alternating kind and id/name.
Keyword args:
parent: optional parent Key; default None.
Returns:
A new Key instance whose .kind() and .id() or .name() methods return
the *last* kind and id or name positional arguments passed.
Raises:
BadArgumentError for invalid arguments.
BadKeyError if the parent key is incomplete.
"""
parent = kwds.pop('parent', None)
_app = ResolveAppId(kwds.pop('_app', None))
if kwds:
raise datastore_errors.BadArgumentError(
'Excess keyword arguments ' + repr(kwds))
if not args or len(args) % 2:
raise datastore_errors.BadArgumentError(
'A non-zero even number of positional arguments is required '
'(kind, id or name, kind, id or name, ...); received %s' % repr(args))
if parent is not None:
if not isinstance(parent, Key):
raise datastore_errors.BadArgumentError(
'Expected None or a Key as parent; received %r (a %s).' %
(parent, typename(parent)))
if not parent.has_id_or_name():
raise datastore_errors.BadKeyError(
'The parent Key is incomplete.')
if _app != parent.app():
raise datastore_errors.BadArgumentError(
'The _app argument (%r) should match parent.app() (%s)' %
(_app, parent.app()))
key = Key()
ref = key.__reference
if parent is not None:
ref.CopyFrom(parent.__reference)
else:
ref.set_app(_app)
path = ref.mutable_path()
for i in xrange(0, len(args), 2):
kind, id_or_name = args[i:i+2]
if isinstance(kind, basestring):
kind = kind.encode('utf-8')
else:
raise datastore_errors.BadArgumentError(
'Expected a string kind as argument %d; received %r (a %s).' %
(i + 1, kind, typename(kind)))
elem = path.add_element()
elem.set_type(kind)
if isinstance(id_or_name, (int, long)):
elem.set_id(id_or_name)
elif isinstance(id_or_name, basestring):
ValidateString(id_or_name, 'name')
if id_or_name and id_or_name[0] in string.digits:
raise datastore_errors.BadArgumentError(
'Names may not begin with a digit; received %s.' % id_or_name)
elem.set_name(id_or_name.encode('utf-8'))
else:
raise datastore_errors.BadArgumentError(
'Expected an integer id or string name as argument %d; '
'received %r (a %s).' % (i + 2, id_or_name, typename(id_or_name)))
assert ref.IsInitialized()
return key
def app(self):
"""Returns this entity's app id, a string."""
if self.__reference.app():
return self.__reference.app().decode('utf-8')
else:
return None
def kind(self):
"""Returns this entity's kind, as a string."""
if self.__reference.path().element_size() > 0:
encoded = self.__reference.path().element_list()[-1].type()
return unicode(encoded.decode('utf-8'))
else:
return None
def id(self):
"""Returns this entity's id, or None if it doesn't have one."""
elems = self.__reference.path().element_list()
if elems and elems[-1].has_id() and elems[-1].id():
return elems[-1].id()
else:
return None
def name(self):
"""Returns this entity's name, or None if it doesn't have one."""
elems = self.__reference.path().element_list()
if elems and elems[-1].has_name() and elems[-1].name():
return elems[-1].name().decode('utf-8')
else:
return None
def id_or_name(self):
"""Returns this entity's id or name, whichever it has, or None."""
if self.id() is not None:
return self.id()
else:
return self.name()
def has_id_or_name(self):
"""Returns True if this entity has an id or name, False otherwise.
"""
return self.id_or_name() is not None
def parent(self):
"""Returns this entity's parent, as a Key. If this entity has no parent,
returns None."""
if self.__reference.path().element_size() > 1:
parent = Key()
parent.__reference.CopyFrom(self.__reference)
parent.__reference.path().element_list().pop()
return parent
else:
return None
def ToTagUri(self):
"""Returns a tag: URI for this entity for use in XML output.
Foreign keys for entities may be represented in XML output as tag URIs.
RFC 4151 describes the tag URI scheme. From http://taguri.org/:
The tag algorithm lets people mint - create - identifiers that no one
else using the same algorithm could ever mint. It is simple enough to do
in your head, and the resulting identifiers can be easy to read, write,
and remember. The identifiers conform to the URI (URL) Syntax.
Tag URIs for entities use the app's auth domain and the date that the URI
is generated. The namespace-specific part is <kind>[<key>].
For example, here is the tag URI for a Kitten with the key "Fluffy" in the
catsinsinks app:
tag:catsinsinks.googleapps.com,2006-08-29:Kitten[Fluffy]
Raises a BadKeyError if this entity's key is incomplete.
"""
if not self.has_id_or_name():
raise datastore_errors.BadKeyError(
'ToTagUri() called for an entity with an incomplete key.')
return u'tag:%s.%s,%s:%s[%s]' % (saxutils.escape(self.app()),
os.environ['AUTH_DOMAIN'],
datetime.date.today().isoformat(),
saxutils.escape(self.kind()),
saxutils.escape(str(self)))
ToXml = ToTagUri
def entity_group(self):
"""Returns this key's entity group as a Key.
Note that the returned Key will be incomplete if this Key is for a root
entity and it is incomplete.
"""
group = Key._FromPb(self.__reference)
del group.__reference.path().element_list()[1:]
return group
@staticmethod
def _FromPb(pb):
"""Static factory method. Creates a Key from an entity_pb.Reference.
Not intended to be used by application developers. Enforced by hiding the
entity_pb classes.
Args:
pb: entity_pb.Reference
"""
if not isinstance(pb, entity_pb.Reference):
raise datastore_errors.BadArgumentError(
'Key constructor takes an entity_pb.Reference; received %s (a %s).' %
(pb, typename(pb)))
key = Key()
key.__reference = entity_pb.Reference()
key.__reference.CopyFrom(pb)
return key
def _ToPb(self):
"""Converts this Key to its protocol buffer representation.
Not intended to be used by application developers. Enforced by hiding the
entity_pb classes.
Returns:
# the Reference PB representation of this Key
entity_pb.Reference
"""
pb = entity_pb.Reference()
pb.CopyFrom(self.__reference)
if not self.has_id_or_name():
pb.mutable_path().element_list()[-1].set_id(0)
pb.app().decode('utf-8')
for pathelem in pb.path().element_list():
pathelem.type().decode('utf-8')
return pb
def __str__(self):
"""Encodes this Key as an opaque string.
Returns a string representation of this key, suitable for use in HTML,
URLs, and other similar use cases. If the entity's key is incomplete,
raises a BadKeyError.
Unfortunately, this string encoding isn't particularly compact, and its
length varies with the length of the path. If you want a shorter identifier
and you know the kind and parent (if any) ahead of time, consider using just
the entity's id or name.
Returns:
string
"""
if (self.has_id_or_name()):
encoded = base64.urlsafe_b64encode(self.__reference.Encode())
return encoded.replace('=', '')
else:
raise datastore_errors.BadKeyError(
'Cannot string encode an incomplete key!\n%s' % self.__reference)
def __repr__(self):
"""Returns an eval()able string representation of this key.
Returns a Python string of the form 'datastore_types.Key.from_path(...)'
that can be used to recreate this key.
Returns:
string
"""
args = []
for elem in self.__reference.path().element_list():
args.append(repr(elem.type()))
if elem.has_name():
args.append(repr(elem.name().decode('utf-8')))
else:
args.append(repr(elem.id()))
args.append('_app=%r' % self.__reference.app().decode('utf-8'))
return u'datastore_types.Key.from_path(%s)' % ', '.join(args)
def __cmp__(self, other):
"""Returns negative, zero, or positive when comparing two keys.
TODO(ryanb): for API v2, we should change this to make incomplete keys, ie
keys without an id or name, not equal to any other keys.
Args:
other: Key to compare to.
Returns:
Negative if self is less than "other"
Zero if "other" is equal to self
Positive if self is greater than "other"
"""
if not isinstance(other, Key):
return -2
self_args = []
other_args = []
self_args.append(self.__reference.app().decode('utf-8'))
other_args.append(other.__reference.app().decode('utf-8'))
for elem in self.__reference.path().element_list():
self_args.append(repr(elem.type()))
if elem.has_name():
self_args.append(repr(elem.name().decode('utf-8')))
else:
self_args.append(elem.id())
for elem in other.__reference.path().element_list():
other_args.append(repr(elem.type()))
if elem.has_name():
other_args.append(repr(elem.name().decode('utf-8')))
else:
other_args.append(elem.id())
result = cmp(self_args, other_args)
return result
def __hash__(self):
"""Returns a 32-bit integer hash of this key.
Implements Python's hash protocol so that Keys may be used in sets and as
dictionary keys.
Returns:
int
"""
return hash(self.__str__())
class Category(unicode):
"""A tag, ie a descriptive word or phrase. Entities may be tagged by users,
and later returned by a queries for that tag. Tags can also be used for
ranking results (frequency), photo captions, clustering, activity, etc.
Here's a more in-depth description: http://www.zeldman.com/daily/0405d.shtml
This is the Atom "category" element. In XML output, the tag is provided as
the term attribute. See:
http://www.atomenabled.org/developers/syndication/#category
Raises BadValueError if tag is not a string or subtype.
"""
TERM = 'user-tag'
def __init__(self, tag):
super(Category, self).__init__(self, tag)
ValidateString(tag, 'tag')
def ToXml(self):
return u'<category term="%s" label=%s />' % (Category.TERM,
saxutils.quoteattr(self))
class Link(unicode):
"""A fully qualified URL. Usually http: scheme, but may also be file:, ftp:,
news:, among others.
If you have email (mailto:) or instant messaging (aim:, xmpp:) links,
consider using the Email or IM classes instead.
This is the Atom "link" element. In XML output, the link is provided as the
href attribute. See:
http://www.atomenabled.org/developers/syndication/#link
Raises BadValueError if link is not a fully qualified, well-formed URL.
"""
def __init__(self, link):
super(Link, self).__init__(self, link)
ValidateString(link, 'link', max_len=_MAX_LINK_PROPERTY_LENGTH)
scheme, domain, path, params, query, fragment = urlparse.urlparse(link)
if (not scheme or (scheme != 'file' and not domain) or
(scheme == 'file' and not path)):
raise datastore_errors.BadValueError('Invalid URL: %s' % link)
def ToXml(self):
return u'<link href=%s />' % saxutils.quoteattr(self)
class Email(unicode):
"""An RFC2822 email address. Makes no attempt at validation; apart from
checking MX records, email address validation is a rathole.
This is the gd:email element. In XML output, the email address is provided as
the address attribute. See:
http://code.google.com/apis/gdata/common-elements.html#gdEmail
Raises BadValueError if email is not a valid email address.
"""
def __init__(self, email):
super(Email, self).__init__(self, email)
ValidateString(email, 'email')
def ToXml(self):
return u'<gd:email address=%s />' % saxutils.quoteattr(self)
class GeoPt(object):
"""A geographical point, specified by floating-point latitude and longitude
coordinates. Often used to integrate with mapping sites like Google Maps.
May also be used as ICBM coordinates.
This is the georss:point element. In XML output, the coordinates are
provided as the lat and lon attributes. See: http://georss.org/
Serializes to '<lat>,<lon>'. Raises BadValueError if it's passed an invalid
serialized string, or if lat and lon are not valid floating points in the
ranges [-90, 90] and [-180, 180], respectively.
"""
lat = None
lon = None
def __init__(self, lat, lon=None):
if lon is None:
try:
split = lat.split(',')
lat, lon = split
except (AttributeError, ValueError):
raise datastore_errors.BadValueError(
'Expected a "lat,long" formatted string; received %s (a %s).' %
(lat, typename(lat)))
try:
lat = float(lat)
lon = float(lon)
if abs(lat) > 90:
raise datastore_errors.BadValueError(
'Latitude must be between -90 and 90; received %f' % lat)
if abs(lon) > 180:
raise datastore_errors.BadValueError(
'Longitude must be between -180 and 180; received %f' % lon)
except (TypeError, ValueError):
raise datastore_errors.BadValueError(
'Expected floats for lat and long; received %s (a %s) and %s (a %s).' %
(lat, typename(lat), lon, typename(lon)))
self.lat = lat
self.lon = lon
def __cmp__(self, other):
if not isinstance(other, GeoPt):
try:
other = GeoPt(other)
except datastore_errors.BadValueError:
return NotImplemented
lat_cmp = cmp(self.lat, other.lat)
if lat_cmp != 0:
return lat_cmp
else:
return cmp(self.lon, other.lon)
def __hash__(self):
"""Returns a 32-bit integer hash of this point.
Implements Python's hash protocol so that GeoPts may be used in sets and
as dictionary keys.
Returns:
int
"""
return hash((self.lat, self.lon))
def __repr__(self):
"""Returns an eval()able string representation of this GeoPt.
The returned string is of the form 'datastore_types.GeoPt([lat], [lon])'.
Returns:
string
"""
return 'datastore_types.GeoPt(%r, %r)' % (self.lat, self.lon)
def __unicode__(self):
return u'%s,%s' % (unicode(self.lat), unicode(self.lon))
__str__ = __unicode__
def ToXml(self):
return u'<georss:point>%s %s</georss:point>' % (unicode(self.lat),
unicode(self.lon))
class IM(object):
"""An instant messaging handle. Includes both an address and its protocol.
The protocol value is either a standard IM scheme or a URL identifying the
IM network for the protocol. Possible values include:
Value Description
sip SIP/SIMPLE
unknown Unknown or unspecified
xmpp XMPP/Jabber
http://aim.com/ AIM
http://icq.com/ ICQ
http://talk.google.com/ Google Talk
http://messenger.msn.com/ MSN Messenger
http://messenger.yahoo.com/ Yahoo Messenger
http://sametime.com/ Lotus Sametime
http://gadu-gadu.pl/ Gadu-Gadu
This is the gd:im element. In XML output, the address and protocol are
provided as the address and protocol attributes, respectively. See:
http://code.google.com/apis/gdata/common-elements.html#gdIm
Serializes to '<protocol> <address>'. Raises BadValueError if tag is not a
standard IM scheme or a URL.
"""
PROTOCOLS = [ 'sip', 'unknown', 'xmpp' ]
protocol = None
address = None
def __init__(self, protocol, address=None):
if address is None:
try:
split = protocol.split(' ')
protocol, address = split
except (AttributeError, ValueError):
raise datastore_errors.BadValueError(
'Expected string of format "protocol address"; received %s' %
str(protocol))
ValidateString(address, 'address')
if protocol not in self.PROTOCOLS:
Link(protocol)
self.address = address
self.protocol = protocol
def __cmp__(self, other):
if not isinstance(other, IM):
try:
other = IM(other)
except datastore_errors.BadValueError:
return NotImplemented
return cmp((self.address, self.protocol),
(other.address, other.protocol))
def __repr__(self):
"""Returns an eval()able string representation of this IM.
The returned string is of the form:
datastore_types.IM('address', 'protocol')
Returns:
string
"""
return 'datastore_types.IM(%r, %r)' % (self.protocol, self.address)
def __unicode__(self):
return u'%s %s' % (self.protocol, self.address)
__str__ = __unicode__
def ToXml(self):
return (u'<gd:im protocol=%s address=%s />' %
(saxutils.quoteattr(self.protocol),
saxutils.quoteattr(self.address)))
def __len__(self):
return len(unicode(self))
class PhoneNumber(unicode):
"""A human-readable phone number or address.
No validation is performed. Phone numbers have many different formats -
local, long distance, domestic, international, internal extension, TTY,
VOIP, SMS, and alternative networks like Skype, XFire and Roger Wilco. They
all have their own numbering and addressing formats.
This is the gd:phoneNumber element. In XML output, the phone number is
provided as the text of the element. See:
http://code.google.com/apis/gdata/common-elements.html#gdPhoneNumber
Raises BadValueError if phone is not a string or subtype.
"""
def __init__(self, phone):
super(PhoneNumber, self).__init__(self, phone)
ValidateString(phone, 'phone')
def ToXml(self):
return u'<gd:phoneNumber>%s</gd:phoneNumber>' % saxutils.escape(self)
class PostalAddress(unicode):
"""A human-readable mailing address. Again, mailing address formats vary
widely, so no validation is performed.
This is the gd:postalAddress element. In XML output, the address is provided
as the text of the element. See:
http://code.google.com/apis/gdata/common-elements.html#gdPostalAddress
Raises BadValueError if address is not a string or subtype.
"""
def __init__(self, address):
super(PostalAddress, self).__init__(self, address)
ValidateString(address, 'address')
def ToXml(self):
return u'<gd:postalAddress>%s</gd:postalAddress>' % saxutils.escape(self)
class Rating(long):
"""A user-provided integer rating for a piece of content. Normalized to a
0-100 scale.
This is the gd:rating element. In XML output, the address is provided
as the text of the element. See:
http://code.google.com/apis/gdata/common-elements.html#gdRating
Serializes to the decimal string representation of the rating. Raises
BadValueError if the rating is not an integer in the range [0, 100].
"""
MIN = 0
MAX = 100
def __init__(self, rating):
super(Rating, self).__init__(self, rating)
if isinstance(rating, float) or isinstance(rating, complex):
raise datastore_errors.BadValueError(
'Expected int or long; received %s (a %s).' %
(rating, typename(rating)))
try:
if long(rating) < Rating.MIN or long(rating) > Rating.MAX:
raise datastore_errors.BadValueError()
except ValueError:
raise datastore_errors.BadValueError(
'Expected int or long; received %s (a %s).' %
(rating, typename(rating)))
def ToXml(self):
return (u'<gd:rating value="%d" min="%d" max="%d" />' %
(self, Rating.MIN, Rating.MAX))
class Text(unicode):
"""A long string type.
Strings of any length can be stored in the datastore using this
type. It behaves identically to the Python unicode type, except for
the constructor, which only accepts str and unicode arguments.
"""
def __new__(cls, arg=None, encoding=None):
"""Constructor.
We only accept unicode and str instances, the latter with encoding.
Args:
arg: optional unicode or str instance; default u''
encoding: optional encoding; disallowed when isinstance(arg, unicode),
defaults to 'ascii' when isinstance(arg, str);
"""
if arg is None:
arg = u''
if isinstance(arg, unicode):
if encoding is not None:
raise TypeError('Text() with a unicode argument '
'should not specify an encoding')
return super(Text, cls).__new__(cls, arg)
if isinstance(arg, str):
if encoding is None:
encoding = 'ascii'
return super(Text, cls).__new__(cls, arg, encoding)
raise TypeError('Text() argument should be str or unicode, not %s' %
type(arg).__name__)
class Blob(str):
"""A blob type, appropriate for storing binary data of any length.
This behaves identically to the Python str type, except for the
constructor, which only accepts str arguments.
"""
def __new__(cls, arg=None):
"""Constructor.
We only accept str instances.
Args:
arg: optional str instance (default '')
"""
if arg is None:
arg = ''
if isinstance(arg, str):
return super(Blob, cls).__new__(cls, arg)
raise TypeError('Blob() argument should be str instance, not %s' %
type(arg).__name__)
def ToXml(self):
"""Output a blob as XML.
Returns:
Base64 encoded version of itself for safe insertion in to an XML document.
"""
encoded = base64.urlsafe_b64encode(self)
return saxutils.escape(encoded)
class ByteString(str):
"""A byte-string type, appropriate for storing short amounts of indexed data.
This behaves identically to Blob, except it's used only for short, indexed
byte strings.
"""
def __new__(cls, arg=None):
"""Constructor.
We only accept str instances.
Args:
arg: optional str instance (default '')
"""
if arg is None:
arg = ''
if isinstance(arg, str):
return super(ByteString, cls).__new__(cls, arg)
raise TypeError('ByteString() argument should be str instance, not %s' %
type(arg).__name__)
def ToXml(self):
"""Output a ByteString as XML.
Returns:
Base64 encoded version of itself for safe insertion in to an XML document.
"""
encoded = base64.urlsafe_b64encode(self)
return saxutils.escape(encoded)
_PROPERTY_MEANINGS = {
Blob: entity_pb.Property.BLOB,
ByteString: entity_pb.Property.BYTESTRING,
Text: entity_pb.Property.TEXT,
datetime.datetime: entity_pb.Property.GD_WHEN,
Category: entity_pb.Property.ATOM_CATEGORY,
Link: entity_pb.Property.ATOM_LINK,
Email: entity_pb.Property.GD_EMAIL,
GeoPt: entity_pb.Property.GEORSS_POINT,
IM: entity_pb.Property.GD_IM,
PhoneNumber: entity_pb.Property.GD_PHONENUMBER,
PostalAddress: entity_pb.Property.GD_POSTALADDRESS,
Rating: entity_pb.Property.GD_RATING,
}
_PROPERTY_TYPES = frozenset([
Blob,
ByteString,
bool,
Category,
datetime.datetime,
Email,
float,
GeoPt,
IM,
int,
Key,
Link,
long,
PhoneNumber,
PostalAddress,
Rating,
str,
Text,
type(None),
unicode,
users.User,
])
_RAW_PROPERTY_TYPES = (Blob, Text)
def ValidatePropertyInteger(name, value):
"""Raises an exception if the supplied integer is invalid.
Args:
name: Name of the property this is for.
value: Integer value.
Raises:
OverflowError if the value does not fit within a signed int64.
"""
if not (-0x8000000000000000 <= value <= 0x7fffffffffffffff):
raise OverflowError('%d is out of bounds for int64' % value)
def ValidateStringLength(name, value, max_len):
"""Raises an exception if the supplied string is too long.
Args:
name: Name of the property this is for.
value: String value.
max_len: Maximum length the string may be.
Raises:
OverflowError if the value is larger than the maximum length.
"""
if len(value) > max_len:
raise datastore_errors.BadValueError(
'Property %s is %d bytes long; it must be %d or less. '
'Consider Text instead, which can store strings of any length.' %
(name, len(value), max_len))
def ValidatePropertyString(name, value):
"""Validates the length of an indexed string property.
Args:
name: Name of the property this is for.
value: String value.
"""
ValidateStringLength(name, value, max_len=_MAX_STRING_LENGTH)
def ValidatePropertyLink(name, value):
"""Validates the length of an indexed Link property.
Args:
name: Name of the property this is for.
value: String value.
"""
ValidateStringLength(name, value, max_len=_MAX_LINK_PROPERTY_LENGTH)
def ValidatePropertyNothing(name, value):
"""No-op validation function.
Args:
name: Name of the property this is for.
value: Not used.
"""
pass
def ValidatePropertyKey(name, value):
"""Raises an exception if the supplied datastore.Key instance is invalid.
Args:
name: Name of the property this is for.
value: A datastore.Key instance.
Raises:
datastore_errors.BadValueError if the value is invalid.
"""
if not value.has_id_or_name():
raise datastore_errors.BadValueError(
'Incomplete key found for reference property %s.' % name)
_VALIDATE_PROPERTY_VALUES = {
Blob: ValidatePropertyNothing,
ByteString: ValidatePropertyString,
bool: ValidatePropertyNothing,
Category: ValidatePropertyString,
datetime.datetime: ValidatePropertyNothing,
Email: ValidatePropertyString,
float: ValidatePropertyNothing,
GeoPt: ValidatePropertyNothing,
IM: ValidatePropertyString,
int: ValidatePropertyInteger,
Key: ValidatePropertyKey,
Link: ValidatePropertyLink,
long: ValidatePropertyInteger,
PhoneNumber: ValidatePropertyString,
PostalAddress: ValidatePropertyString,
Rating: ValidatePropertyInteger,
str: ValidatePropertyString,
Text: ValidatePropertyNothing,
type(None): ValidatePropertyNothing,
unicode: ValidatePropertyString,
users.User: ValidatePropertyNothing,
}
assert set(_VALIDATE_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
def ValidateProperty(name, values, read_only=False):
"""Helper function for validating property values.
Args:
name: Name of the property this is for.
value: Value for the property as a Python native type.
Raises:
BadPropertyError if the property name is invalid. BadValueError if the
property did not validate correctly or the value was an empty list. Other
exception types (like OverflowError) if the property value does not meet
type-specific criteria.
"""
ValidateString(name, 'property name', datastore_errors.BadPropertyError)
if not read_only and RESERVED_PROPERTY_NAME.match(name):
raise datastore_errors.BadPropertyError(
'%s is a reserved property name.' % name)
values_type = type(values)
if values_type is tuple:
raise datastore_errors.BadValueError(
'May not use tuple property value; property %s is %s.' %
(name, repr(values)))
if values_type is list:
multiple = True
else:
multiple = False
values = [values]
if not values:
raise datastore_errors.BadValueError(
'May not use the empty list as a property value; property %s is %s.' %
(name, repr(values)))
try:
for v in values:
prop_validator = _VALIDATE_PROPERTY_VALUES.get(v.__class__)
if prop_validator is None:
raise datastore_errors.BadValueError(
'Unsupported type for property %s: %s' % (name, v.__class__))
prop_validator(name, v)
except (KeyError, ValueError, TypeError, IndexError, AttributeError), msg:
raise datastore_errors.BadValueError(
'Error type checking values for property %s: %s' % (name, msg))
ValidateReadProperty = ValidateProperty
def PackBlob(name, value, pbvalue):
"""Packs a Blob property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A Blob instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_stringvalue(value)
def PackString(name, value, pbvalue):
"""Packs a string-typed property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A string, unicode, or string-like value instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_stringvalue(unicode(value).encode('utf-8'))
def PackDatetime(name, value, pbvalue):
"""Packs a datetime-typed property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A datetime.datetime instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_int64value(DatetimeToTimestamp(value))
def DatetimeToTimestamp(value):
"""Converts a datetime.datetime to microseconds since the epoch, as a float.
Args:
value: datetime.datetime
Returns: value as a long
"""
if value.tzinfo:
value = value.astimezone(UTC)
return long(calendar.timegm(value.timetuple()) * 1000000L) + value.microsecond
def PackGeoPt(name, value, pbvalue):
"""Packs a GeoPt property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A GeoPt instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.mutable_pointvalue().set_x(value.lat)
pbvalue.mutable_pointvalue().set_y(value.lon)
def PackUser(name, value, pbvalue):
"""Packs a User property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A users.User instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.mutable_uservalue().set_email(value.email().encode('utf-8'))
pbvalue.mutable_uservalue().set_auth_domain(
value.auth_domain().encode('utf-8'))
pbvalue.mutable_uservalue().set_gaiaid(0)
def PackKey(name, value, pbvalue):
"""Packs a reference property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A Key instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
ref = value._Key__reference
pbvalue.mutable_referencevalue().set_app(ref.app())
for elem in ref.path().element_list():
pbvalue.mutable_referencevalue().add_pathelement().CopyFrom(elem)
def PackBool(name, value, pbvalue):
"""Packs a boolean property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A boolean instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_booleanvalue(value)
def PackInteger(name, value, pbvalue):
"""Packs an integer property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: An int or long instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_int64value(value)
def PackFloat(name, value, pbvalue):
"""Packs a float property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A float instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_doublevalue(value)
_PACK_PROPERTY_VALUES = {
Blob: PackBlob,
ByteString: PackBlob,
bool: PackBool,
Category: PackString,
datetime.datetime: PackDatetime,
Email: PackString,
float: PackFloat,
GeoPt: PackGeoPt,
IM: PackString,
int: PackInteger,
Key: PackKey,
Link: PackString,
long: PackInteger,
PhoneNumber: PackString,
PostalAddress: PackString,
Rating: PackInteger,
str: PackString,
Text: PackString,
type(None): lambda name, value, pbvalue: None,
unicode: PackString,
users.User: PackUser,
}
assert set(_PACK_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
def ToPropertyPb(name, values):
"""Creates type-specific entity_pb.PropertyValues.
Determines the type and meaning of the PropertyValue based on the Python
type of the input value(s).
NOTE: This function does not validate anything!
Args:
name: string or unicode; the property name
values: The values for this property, either a single one or a list of them.
All values must be a supported type. Lists of values must all be of the
same type.
Returns:
A list of entity_pb.PropertyValue instances.
"""
encoded_name = name.encode('utf-8')
values_type = type(values)
if values_type is list:
multiple = True
else:
multiple = False
values = [values]
pbs = []
for v in values:
pb = entity_pb.Property()
pb.set_name(encoded_name)
pb.set_multiple(multiple)
meaning = _PROPERTY_MEANINGS.get(v.__class__)
if meaning is not None:
pb.set_meaning(meaning)
pack_prop = _PACK_PROPERTY_VALUES[v.__class__]
pbvalue = pack_prop(name, v, pb.mutable_value())
pbs.append(pb)
if multiple:
return pbs
else:
return pbs[0]
def FromReferenceProperty(value):
"""Converts a reference PropertyValue to a Key.
Args:
value: entity_pb.PropertyValue
Returns:
Key
Raises:
BadValueError if the value is not a PropertyValue.
"""
assert isinstance(value, entity_pb.PropertyValue)
assert value.has_referencevalue()
ref = value.referencevalue()
key = Key()
key_ref = key._Key__reference
key_ref.set_app(ref.app())
for pathelem in ref.pathelement_list():
key_ref.mutable_path().add_element().CopyFrom(pathelem)
return key
_EPOCH = datetime.datetime.utcfromtimestamp(0)
_PROPERTY_CONVERSIONS = {
entity_pb.Property.GD_WHEN:
lambda val: _EPOCH + datetime.timedelta(microseconds=val),
entity_pb.Property.ATOM_CATEGORY: Category,
entity_pb.Property.ATOM_LINK: Link,
entity_pb.Property.GD_EMAIL: Email,
entity_pb.Property.GEORSS_POINT: lambda coords: GeoPt(*coords),
entity_pb.Property.GD_IM: IM,
entity_pb.Property.GD_PHONENUMBER: PhoneNumber,
entity_pb.Property.GD_POSTALADDRESS: PostalAddress,
entity_pb.Property.GD_RATING: Rating,
entity_pb.Property.BLOB: Blob,
entity_pb.Property.BYTESTRING: ByteString,
entity_pb.Property.TEXT: Text,
}
def FromPropertyPb(pb):
"""Converts a property PB to a python value.
Args:
pb: entity_pb.Property
Returns:
# return type is determined by the type of the argument
string, int, bool, double, users.User, or one of the atom or gd types
"""
pbval = pb.value()
meaning = pb.meaning()
if pbval.has_stringvalue():
value = pbval.stringvalue()
if meaning not in (entity_pb.Property.BLOB, entity_pb.Property.BYTESTRING):
value = unicode(value.decode('utf-8'))
elif pbval.has_int64value():
value = long(pbval.int64value())
elif pbval.has_booleanvalue():
value = bool(pbval.booleanvalue())
elif pbval.has_doublevalue():
value = pbval.doublevalue()
elif pbval.has_referencevalue():
value = FromReferenceProperty(pbval)
elif pbval.has_pointvalue():
value = (pbval.pointvalue().x(), pbval.pointvalue().y())
elif pbval.has_uservalue():
email = unicode(pbval.uservalue().email().decode('utf-8'))
auth_domain = unicode(pbval.uservalue().auth_domain().decode('utf-8'))
value = users.User(email=email, _auth_domain=auth_domain)
else:
value = None
try:
if pb.has_meaning():
conversion = _PROPERTY_CONVERSIONS[meaning]
value = conversion(value)
except (KeyError, ValueError, IndexError, TypeError, AttributeError), msg:
raise datastore_errors.BadValueError(
'Error converting pb: %s\nException was: %s' % (pb, msg))
return value
def PropertyTypeName(value):
"""Returns the name of the type of the given property value, as a string.
Raises BadValueError if the value is not a valid property type.
Args:
value: any valid property value
Returns:
string
"""
if value.__class__ in _PROPERTY_MEANINGS:
meaning = _PROPERTY_MEANINGS[value.__class__]
name = entity_pb.Property._Meaning_NAMES[meaning]
return name.lower().replace('_', ':')
elif isinstance(value, basestring):
return 'string'
elif isinstance(value, users.User):
return 'user'
elif isinstance(value, long):
return 'int'
elif value is None:
return 'null'
else:
return typename(value).lower()
_PROPERTY_TYPE_STRINGS = {
'string': unicode,
'bool': bool,
'int': long,
'null': type(None),
'float': float,
'key': Key,
'blob': Blob,
'bytestring': ByteString,
'text': Text,
'user': users.User,
'atom:category': Category,
'atom:link': Link,
'gd:email': Email,
'gd:when': datetime.datetime,
'georss:point': GeoPt,
'gd:im': IM,
'gd:phonenumber': PhoneNumber,
'gd:postaladdress': PostalAddress,
'gd:rating': Rating,
}
def FromPropertyTypeName(type_name):
"""Returns the python type given a type name.
Args:
type_name: A string representation of a datastore type name.
Returns:
A python type.
"""
return _PROPERTY_TYPE_STRINGS[type_name]
def PropertyValueFromString(type_, value_string, _auth_domain=None):
"""Returns an instance of a property value given a type and string value.
The reverse of this method is just str() and type() of the python value.
Note that this does *not* support non-UTC offsets in ISO 8601-formatted
datetime strings, e.g. the -08:00 suffix in '2002-12-25 00:00:00-08:00'.
It only supports -00:00 and +00:00 suffixes, which are UTC.
Args:
type_: A python class.
value_string: A string representation of the value of the property.
Returns:
An instance of 'type'.
Raises:
ValueError if type_ is datetime and value_string has a timezone offset.
"""
if type_ == datetime.datetime:
value_string = value_string.strip()
if value_string[-6] in ('+', '-'):
if value_string[-5:] == '00:00':
value_string = value_string[:-6]
else:
raise ValueError('Non-UTC offsets in datetimes are not supported.')
split = value_string.split('.')
iso_date = split[0]
microseconds = 0
if len(split) > 1:
microseconds = int(split[1])
time_struct = time.strptime(iso_date, '%Y-%m-%d %H:%M:%S')[0:6]
value = datetime.datetime(*(time_struct + (microseconds,)))
return value
elif type_ == Rating:
return Rating(int(value_string))
elif type_ == bool:
return value_string == 'True'
elif type_ == users.User:
return users.User(value_string, _auth_domain)
elif type_ == type(None):
return None
return type_(value_string)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Classes for common kinds, including Contact, Message, and Event.
Most of these kinds are based on the gd namespace "kinds" from GData:
http://code.google.com/apis/gdata/common-elements.html
"""
import types
import urlparse
from xml.sax import saxutils
from google.appengine.datastore import datastore_pb
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
class GdKind(datastore.Entity):
""" A base class for gd namespace kinds.
This class contains common logic for all gd namespace kinds. For example,
this class translates datastore (app id, kind, key) tuples to tag:
URIs appropriate for use in <key> tags.
"""
HEADER = u"""<entry xmlns:gd='http://schemas.google.com/g/2005'>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#%s' />"""
FOOTER = u"""
</entry>"""
_kind_properties = set()
_contact_properties = set()
def __init__(self, kind, title, kind_properties, contact_properties=[]):
""" Ctor.
title is the name of this particular entity, e.g. Bob Jones or Mom's
Birthday Party.
kind_properties is a list of property names that should be included in
this entity's XML encoding as first-class XML elements, instead of
<property> elements. 'title' and 'content' are added to kind_properties
automatically, and may not appear in contact_properties.
contact_properties is a list of property names that are Keys that point to
Contact entities, and should be included in this entity's XML encoding as
<gd:who> elements. If a property name is included in both kind_properties
and contact_properties, it is treated as a Contact property.
Args:
kind: string
title: string
kind_properties: list of strings
contact_properties: list of string
"""
datastore.Entity.__init__(self, kind)
if not isinstance(title, types.StringTypes):
raise datastore_errors.BadValueError(
'Expected a string for title; received %s (a %s).' %
(title, datastore_types.typename(title)))
self['title'] = title
self['content'] = ''
self._contact_properties = set(contact_properties)
assert not self._contact_properties.intersection(self.keys())
self._kind_properties = set(kind_properties) - self._contact_properties
self._kind_properties.add('title')
self._kind_properties.add('content')
def _KindPropertiesToXml(self):
""" Convert the properties that are part of this gd kind to XML. For
testability, the XML elements in the output are sorted alphabetically
by property name.
Returns:
string # the XML representation of the gd kind properties
"""
properties = self._kind_properties.intersection(set(self.keys()))
xml = u''
for prop in sorted(properties):
prop_xml = saxutils.quoteattr(prop)[1:-1]
value = self[prop]
has_toxml = (hasattr(value, 'ToXml') or
isinstance(value, list) and hasattr(value[0], 'ToXml'))
for val in self._XmlEscapeValues(prop):
if has_toxml:
xml += '\n %s' % val
else:
xml += '\n <%s>%s</%s>' % (prop_xml, val, prop_xml)
return xml
def _ContactPropertiesToXml(self):
""" Convert this kind's Contact properties kind to XML. For testability,
the XML elements in the output are sorted alphabetically by property name.
Returns:
string # the XML representation of the Contact properties
"""
properties = self._contact_properties.intersection(set(self.keys()))
xml = u''
for prop in sorted(properties):
values = self[prop]
if not isinstance(values, list):
values = [values]
for value in values:
assert isinstance(value, datastore_types.Key)
xml += """
<gd:who rel="http://schemas.google.com/g/2005#%s.%s>
<gd:entryLink href="%s" />
</gd:who>""" % (self.kind().lower(), prop, value.ToTagUri())
return xml
def _LeftoverPropertiesToXml(self):
""" Convert all of this entity's properties that *aren't* part of this gd
kind to XML.
Returns:
string # the XML representation of the leftover properties
"""
leftovers = set(self.keys())
leftovers -= self._kind_properties
leftovers -= self._contact_properties
if leftovers:
return u'\n ' + '\n '.join(self._PropertiesToXml(leftovers))
else:
return u''
def ToXml(self):
""" Returns an XML representation of this entity, as a string.
"""
xml = GdKind.HEADER % self.kind().lower()
xml += self._KindPropertiesToXml()
xml += self._ContactPropertiesToXml()
xml += self._LeftoverPropertiesToXml()
xml += GdKind.FOOTER
return xml
class Message(GdKind):
"""A message, such as an email, a discussion group posting, or a comment.
Includes the message title, contents, participants, and other properties.
This is the gd Message kind. See:
http://code.google.com/apis/gdata/common-elements.html#gdMessageKind
These properties are meaningful. They are all optional.
property name property type meaning
-------------------------------------
title string message subject
content string message body
from Contact* sender
to Contact* primary recipient
cc Contact* CC recipient
bcc Contact* BCC recipient
reply-to Contact* intended recipient of replies
link Link* attachment
category Category* tag or label associated with this message
geoPt GeoPt* geographic location the message was posted from
rating Rating* message rating, as defined by the application
* means this property may be repeated.
The Contact properties should be Keys of Contact entities. They are
represented in the XML encoding as linked <gd:who> elements.
"""
KIND_PROPERTIES = ['title', 'content', 'link', 'category', 'geoPt', 'rating']
CONTACT_PROPERTIES = ['from', 'to', 'cc', 'bcc', 'reply-to']
def __init__(self, title, kind='Message'):
GdKind.__init__(self, kind, title, Message.KIND_PROPERTIES,
Message.CONTACT_PROPERTIES)
class Event(GdKind):
"""A calendar event.
Includes the event title, description, location, organizer, start and end
time, and other details.
This is the gd Event kind. See:
http://code.google.com/apis/gdata/common-elements.html#gdEventKind
These properties are meaningful. They are all optional.
property name property type meaning
-------------------------------------
title string event name
content string event description
author string the organizer's name
where string* human-readable location (not a GeoPt)
startTime timestamp start time
endTime timestamp end time
eventStatus string one of the Event.Status values
link Link* page with more information
category Category* tag or label associated with this event
attendee Contact* attendees and other related people
* means this property may be repeated.
The Contact properties should be Keys of Contact entities. They are
represented in the XML encoding as linked <gd:who> elements.
"""
KIND_PROPERTIES = ['title', 'content', 'author', 'where', 'startTime',
'endTime', 'eventStatus', 'link', 'category']
CONTACT_PROPERTIES = ['attendee']
class Status:
CONFIRMED = 'confirmed'
TENTATIVE = 'tentative'
CANCELED = 'canceled'
def __init__(self, title, kind='Event'):
GdKind.__init__(self, kind, title, Event.KIND_PROPERTIES,
Event.CONTACT_PROPERTIES)
def ToXml(self):
""" Override GdKind.ToXml() to special-case author, gd:where, gd:when, and
gd:eventStatus.
"""
xml = GdKind.HEADER % self.kind().lower()
self._kind_properties = set(Contact.KIND_PROPERTIES)
xml += self._KindPropertiesToXml()
if 'author' in self:
xml += """
<author><name>%s</name></author>""" % self['author']
if 'eventStatus' in self:
xml += """
<gd:eventStatus value="http://schemas.google.com/g/2005#event.%s" />""" % (
self['eventStatus'])
if 'where' in self:
lines = ['<gd:where valueString="%s" />' % val
for val in self._XmlEscapeValues('where')]
xml += '\n ' + '\n '.join(lines)
iso_format = '%Y-%m-%dT%H:%M:%S'
xml += '\n <gd:when'
for key in ['startTime', 'endTime']:
if key in self:
xml += ' %s="%s"' % (key, self[key].isoformat())
xml += ' />'
self._kind_properties.update(['author', 'where', 'startTime', 'endTime',
'eventStatus'])
xml += self._ContactPropertiesToXml()
xml += self._LeftoverPropertiesToXml()
xml += GdKind.FOOTER
return xml
class Contact(GdKind):
"""A contact: a person, a venue such as a club or a restaurant, or an
organization.
This is the gd Contact kind. See:
http://code.google.com/apis/gdata/common-elements.html#gdContactKind
Most of the information about the contact is in the <gd:contactSection>
element; see the reference section for that element for details.
These properties are meaningful. They are all optional.
property name property type meaning
-------------------------------------
title string contact's name
content string notes
email Email* email address
geoPt GeoPt* geographic location
im IM* IM address
phoneNumber Phonenumber* phone number
postalAddress PostalAddress* mailing address
link Link* link to more information
category Category* tag or label associated with this contact
* means this property may be repeated.
"""
CONTACT_SECTION_HEADER = """
<gd:contactSection>"""
CONTACT_SECTION_FOOTER = """
</gd:contactSection>"""
KIND_PROPERTIES = ['title', 'content', 'link', 'category']
CONTACT_SECTION_PROPERTIES = ['email', 'geoPt', 'im', 'phoneNumber',
'postalAddress']
def __init__(self, title, kind='Contact'):
GdKind.__init__(self, kind, title, Contact.KIND_PROPERTIES)
def ToXml(self):
""" Override GdKind.ToXml() to put some properties inside a
gd:contactSection.
"""
xml = GdKind.HEADER % self.kind().lower()
self._kind_properties = set(Contact.KIND_PROPERTIES)
xml += self._KindPropertiesToXml()
xml += Contact.CONTACT_SECTION_HEADER
self._kind_properties = set(Contact.CONTACT_SECTION_PROPERTIES)
xml += self._KindPropertiesToXml()
xml += Contact.CONTACT_SECTION_FOOTER
self._kind_properties.update(Contact.KIND_PROPERTIES)
xml += self._LeftoverPropertiesToXml()
xml += GdKind.FOOTER
return xml
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""URL downloading API.
Methods defined in this module:
Fetch(): fetchs a given URL using an HTTP GET or POST
"""
import os
import UserDict
import urllib2
import urlparse
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import urlfetch_service_pb
from google.appengine.api.urlfetch_errors import *
from google.appengine.runtime import apiproxy_errors
MAX_REDIRECTS = 5
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_URL_STRING_MAP = {
'GET': GET,
'POST': POST,
'HEAD': HEAD,
'PUT': PUT,
'DELETE': DELETE,
}
_VALID_METHODS = frozenset(_URL_STRING_MAP.values())
class _CaselessDict(UserDict.IterableUserDict):
"""Case insensitive dictionary.
This class was lifted from os.py and slightly modified.
"""
def __init__(self):
UserDict.IterableUserDict.__init__(self)
self.caseless_keys = {}
def __setitem__(self, key, item):
"""Set dictionary item.
Args:
key: Key of new item. Key is case insensitive, so "d['Key'] = value "
will replace previous values set by "d['key'] = old_value".
item: Item to store.
"""
caseless_key = key.lower()
if caseless_key in self.caseless_keys:
del self.data[self.caseless_keys[caseless_key]]
self.caseless_keys[caseless_key] = key
self.data[key] = item
def __getitem__(self, key):
"""Get dictionary item.
Args:
key: Key of item to get. Key is case insensitive, so "d['Key']" is the
same as "d['key']".
Returns:
Item associated with key.
"""
return self.data[self.caseless_keys[key.lower()]]
def __delitem__(self, key):
"""Remove item from dictionary.
Args:
key: Key of item to remove. Key is case insensitive, so "del d['Key']" is
the same as "del d['key']"
"""
caseless_key = key.lower()
del self.data[self.caseless_keys[caseless_key]]
del self.caseless_keys[caseless_key]
def has_key(self, key):
"""Determine if dictionary has item with specific key.
Args:
key: Key to check for presence. Key is case insensitive, so
"d.has_key('Key')" evaluates to the same value as "d.has_key('key')".
Returns:
True if dictionary contains key, else False.
"""
return key.lower() in self.caseless_keys
def __contains__(self, key):
"""Same as 'has_key', but used for 'in' operator.'"""
return self.has_key(key)
def get(self, key, failobj=None):
"""Get dictionary item, defaulting to another value if it does not exist.
Args:
key: Key of item to get. Key is case insensitive, so "d['Key']" is the
same as "d['key']".
failobj: Value to return if key not in dictionary.
"""
try:
cased_key = self.caseless_keys[key.lower()]
except KeyError:
return failobj
return self.data[cased_key]
def update(self, dict=None, **kwargs):
"""Update dictionary using values from another dictionary and keywords.
Args:
dict: Dictionary to update from.
kwargs: Keyword arguments to update from.
"""
if dict:
try:
keys = dict.keys()
except AttributeError:
for k, v in dict:
self[k] = v
else:
for k in keys:
self[k] = dict[k]
if kwargs:
self.update(kwargs)
def copy(self):
"""Make a shallow, case sensitive copy of self."""
return dict(self)
def _is_fetching_self(url, method):
"""Checks if the fetch is for the same URL from which it originated.
Args:
url: str, The URL being fetched.
method: value from _VALID_METHODS.
Returns:
boolean indicating whether or not it seems that the app is trying to fetch
itself.
"""
if (method != GET or
"HTTP_HOST" not in os.environ or
"PATH_INFO" not in os.environ):
return False
scheme, host_port, path, query, fragment = urlparse.urlsplit(url)
if host_port == os.environ['HTTP_HOST']:
current_path = urllib2.unquote(os.environ['PATH_INFO'])
desired_path = urllib2.unquote(path)
if (current_path == desired_path or
(current_path in ('', '/') and desired_path in ('', '/'))):
return True
return False
def fetch(url, payload=None, method=GET, headers={}, allow_truncated=False,
follow_redirects=True):
"""Fetches the given HTTP URL, blocking until the result is returned.
Other optional parameters are:
method: GET, POST, HEAD, PUT, or DELETE
payload: POST or PUT payload (implies method is not GET, HEAD, or DELETE)
headers: dictionary of HTTP headers to send with the request
allow_truncated: if true, truncate large responses and return them without
error. otherwise, ResponseTooLargeError will be thrown when a response is
truncated.
follow_redirects: if true (the default), redirects are
transparently followed and the response (if less than 5
redirects) contains the final destination's payload and the
response status is 200. You lose, however, the redirect chain
information. If false, you see the HTTP response yourself,
including the 'Location' header, and redirects are not
followed.
We use a HTTP/1.1 compliant proxy to fetch the result.
The returned data structure has the following fields:
content: string containing the response from the server
status_code: HTTP status code returned by the server
headers: dictionary of headers returned by the server
If the URL is an empty string or obviously invalid, we throw an
urlfetch.InvalidURLError. If the server cannot be contacted, we throw a
urlfetch.DownloadError. Note that HTTP errors are returned as a part
of the returned structure, so HTTP errors like 404 do not result in an
exception.
"""
if isinstance(method, basestring):
method = method.upper()
method = _URL_STRING_MAP.get(method, method)
if method not in _VALID_METHODS:
raise InvalidMethodError('Invalid method %s.' % str(method))
if _is_fetching_self(url, method):
raise InvalidURLError("App cannot fetch the same URL as the one used for "
"the request.")
request = urlfetch_service_pb.URLFetchRequest()
response = urlfetch_service_pb.URLFetchResponse()
request.set_url(url)
if method == GET:
request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
elif method == POST:
request.set_method(urlfetch_service_pb.URLFetchRequest.POST)
elif method == HEAD:
request.set_method(urlfetch_service_pb.URLFetchRequest.HEAD)
elif method == PUT:
request.set_method(urlfetch_service_pb.URLFetchRequest.PUT)
elif method == DELETE:
request.set_method(urlfetch_service_pb.URLFetchRequest.DELETE)
if payload and (method == POST or method == PUT):
request.set_payload(payload)
for key, value in headers.iteritems():
header_proto = request.add_header()
header_proto.set_key(key)
header_proto.set_value(str(value))
request.set_followredirects(follow_redirects)
try:
apiproxy_stub_map.MakeSyncCall('urlfetch', 'Fetch', request, response)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.INVALID_URL):
raise InvalidURLError(str(e))
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR):
raise DownloadError(str(e))
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR):
raise DownloadError(str(e))
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
raise ResponseTooLargeError(None)
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
raise DownloadError(str(e))
raise e
result = _URLFetchResult(response)
if not allow_truncated and response.contentwastruncated():
raise ResponseTooLargeError(result)
return result
Fetch = fetch
class _URLFetchResult(object):
"""A Pythonic representation of our fetch response protocol buffer."""
def __init__(self, response_proto):
self.__pb = response_proto
self.content = response_proto.content()
self.status_code = response_proto.statuscode()
self.content_was_truncated = response_proto.contentwastruncated()
self.headers = _CaselessDict()
for header_proto in response_proto.header_list():
self.headers[header_proto.key()] = header_proto.value()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors used in the Python appinfo API, used by app developers."""
class Error(Exception):
"""Base datastore AppInfo type."""
class EmptyConfigurationFile(Error):
"""Tried to load empty configuration file"""
class MultipleConfigurationFile(Error):
"""Tried to load configuration file with multiple AppInfo objects"""
class UnknownHandlerType(Error):
"""Raised when it is not possible to determine URL mapping type."""
class UnexpectedHandlerAttribute(Error):
"""Raised when a handler type has an attribute that it does not use."""
class MissingHandlerAttribute(Error):
"""Raised when a handler is missing an attribute required by its type."""
class MissingURLMapping(Error):
"""Raised when there are no URL mappings in external appinfo."""
class TooManyURLMappings(Error):
"""Raised when there are too many URL mappings in external appinfo."""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Exceptions raised my mail API."""
class Error(Exception):
"""Base Mail error type."""
class BadRequestError(Error):
"""Email is not valid."""
class InvalidSenderError(Error):
"""Sender is not a permitted to send mail for this application."""
class InvalidEmailError(Error):
"""Bad email set on an email field."""
class InvalidAttachmentTypeError(Error):
"""Invalid file type for attachments. We don't send viruses!"""
class MissingRecipientsError(Error):
"""No recipients specified in message."""
class MissingSenderError(Error):
"""No sender specified in message."""
class MissingSubjectError(Error):
"""Subject not specified in message."""
class MissingBodyError(Error):
"""No body specified in message."""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors used in the Python datastore API."""
class Error(Exception):
"""Base datastore error type.
"""
class BadValueError(Error):
"""Raised by Entity.__setitem__(), Query.__setitem__(), Get(), and others
when a property value or filter value is invalid.
"""
class BadPropertyError(Error):
"""Raised by Entity.__setitem__() when a property name isn't a string.
"""
class BadRequestError(Error):
"""Raised by datastore calls when the parameter(s) are invalid.
"""
class EntityNotFoundError(Error):
"""DEPRECATED: Raised by Get() when the requested entity is not found.
"""
class BadArgumentError(Error):
"""Raised by Query.Order(), Iterator.Next(), and others when they're
passed an invalid argument.
"""
class QueryNotFoundError(Error):
"""DEPRECATED: Raised by Iterator methods when the Iterator is invalid. This
should not happen during normal usage; it protects against malicious users
and system errors.
"""
class TransactionNotFoundError(Error):
"""DEPRECATED: Raised by RunInTransaction. This is an internal error; you
should not see this.
"""
class Rollback(Error):
"""May be raised by transaction functions when they want to roll back
instead of committing. Note that *any* exception raised by a transaction
function will cause a rollback. This is purely for convenience. See
datastore.RunInTransaction for details.
"""
class TransactionFailedError(Error):
"""Raised by RunInTransaction methods when the transaction could not be
committed, even after retrying. This is usually due to high contention.
"""
class BadFilterError(Error):
"""Raised by Query.__setitem__() and Query.Run() when a filter string is
invalid.
"""
def __init__(self, filter):
self.filter = filter
def __str__(self):
return (u'BadFilterError: invalid filter: %s.' % self.filter)
class BadQueryError(Error):
"""Raised by Query when a query or query string is invalid.
"""
class BadKeyError(Error):
"""Raised by Key.__str__ when the key is invalid.
"""
class InternalError(Error):
"""An internal datastore error. Please report this to Google.
"""
class NeedIndexError(Error):
"""No matching index was found for a query that requires an index. Check
the Indexes page in the Admin Console and your index.yaml file.
"""
class Timeout(Error):
"""The datastore operation timed out. This can happen when you attempt to
put, get, or delete too many entities or an entity with too many properties,
or if the datastore is overloaded or having trouble.
"""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Logging utilities for use by applications.
Classes defined here:
AppLogsHandler: StreamHandler subclass
"""
import logging
import sys
import types
NEWLINE_REPLACEMENT = "\0"
class AppLogsHandler(logging.StreamHandler):
"""Logging handler that will direct output to a persistent store of
application logs.
This handler will output log statements to stderr. This handler is
automatically initialized and attached to the Python common logging library.
"""
def __init__(self, stream=None):
"""Constructor.
Args:
# stream is optional. it defaults to sys.stderr.
stream: destination for output
"""
logging.StreamHandler.__init__(self, stream)
def close(self):
"""Closes the stream.
This implementation based on the implementation of FileHandler.close()."""
self.flush()
self.stream.close()
logging.StreamHandler.close(self)
def emit(self, record):
"""Emit a record.
This implementation is based on the implementation of
StreamHandler.emit()."""
try:
message = self._AppLogsMessage(record)
self.stream.write(message.encode("UTF-8"))
self.flush()
except (KeyboardInterrupt, SystemExit):
raise
except:
self.handleError(record)
def _AppLogsMessage(self, record):
"""Converts the log record into a log line."""
message = self.format(record).replace("\n", NEWLINE_REPLACEMENT)
return "LOG %d %d %s\n" % (self._AppLogsLevel(record.levelno),
long(record.created * 1000 * 1000),
message)
def _AppLogsLevel(self, level):
"""Converts the logging level used in Python to the API logging level"""
if level >= logging.CRITICAL:
return 4
elif level >= logging.ERROR:
return 3
elif level >= logging.WARNING:
return 2
elif level >= logging.INFO:
return 1
else:
return 0
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import StringProto
class UserServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
REDIRECT_URL_TOO_LONG = 1
NOT_ALLOWED = 2
_ErrorCode_NAMES = {
0: "OK",
1: "REDIRECT_URL_TOO_LONG",
2: "NOT_ALLOWED",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['UserServiceError']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""CronInfo tools.
A library for working with CronInfo records, describing cron entries for an
application. Supports loading the records from yaml.
"""
import logging
import sys
import traceback
try:
import pytz
except ImportError:
pytz = None
from google.appengine.cron import groc
from google.appengine.api import validation
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_object
_URL_REGEX = r'^/.*$'
_TIMEZONE_REGEX = r'^.{0,100}$'
_DESCRIPTION_REGEX = r'^.{0,499}$'
class GrocValidator(validation.Validator):
"""Checks that a schedule is in valid groc format."""
def Validate(self, value):
"""Validates a schedule."""
if value is None:
raise validation.MissingAttribute('schedule must be specified')
if not isinstance(value, basestring):
raise TypeError('schedule must be a string, not \'%r\''%type(value))
schedule = groc.CreateParser(value)
try:
schedule.timespec()
except groc.GrocException, e:
raise validation.ValidationError('schedule \'%s\' failed to parse: %s'%(
value, e.args[0]))
return value
class TimezoneValidator(validation.Validator):
"""Checks that a timezone can be correctly parsed and is known."""
def Validate(self, value):
"""Validates a timezone."""
if value is None:
return
if not isinstance(value, basestring):
raise TypeError('timezone must be a string, not \'%r\'' % type(value))
if pytz is None:
return value
try:
pytz.timezone(value)
except pytz.UnknownTimeZoneError:
raise validation.ValidationError('timezone \'%s\' is unknown' % value)
except IOError:
return value
except:
e, v, t = sys.exc_info()
logging.warning("pytz raised an unexpected error: %s.\n" % (v) +
"Traceback:\n" + "\n".join(traceback.format_tb(t)))
raise
return value
CRON = 'cron'
URL = 'url'
SCHEDULE = 'schedule'
TIMEZONE = 'timezone'
DESCRIPTION = 'description'
class MalformedCronfigurationFile(Exception):
"""Configuration file for Cron is malformed."""
pass
class CronEntry(validation.Validated):
"""A cron entry describes a single cron job."""
ATTRIBUTES = {
URL: _URL_REGEX,
SCHEDULE: GrocValidator(),
TIMEZONE: TimezoneValidator(),
DESCRIPTION: validation.Optional(_DESCRIPTION_REGEX)
}
class CronInfoExternal(validation.Validated):
"""CronInfoExternal describes all cron entries for an application."""
ATTRIBUTES = {
CRON: validation.Optional(validation.Repeated(CronEntry))
}
def LoadSingleCron(cron_info):
"""Load a cron.yaml file or string and return a CronInfoExternal object."""
builder = yaml_object.ObjectBuilder(CronInfoExternal)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(cron_info)
cron_info = handler.GetResults()
if len(cron_info) < 1:
raise MalformedCronfigurationFile('Empty cron configuration.')
if len(cron_info) > 1:
raise MalformedCronfigurationFile('Multiple cron sections '
'in configuration.')
return cron_info[0]
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import StringProto
class URLFetchServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
INVALID_URL = 1
FETCH_ERROR = 2
UNSPECIFIED_ERROR = 3
RESPONSE_TOO_LARGE = 4
DEADLINE_EXCEEDED = 5
_ErrorCode_NAMES = {
0: "OK",
1: "INVALID_URL",
2: "FETCH_ERROR",
3: "UNSPECIFIED_ERROR",
4: "RESPONSE_TOO_LARGE",
5: "DEADLINE_EXCEEDED",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class URLFetchRequest_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(34)
out.putPrefixedString(self.key_)
out.putVarInt32(42)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 28: break
if tt == 34:
self.set_key(d.getPrefixedString())
continue
if tt == 42:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("Key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("Value: %s\n" % self.DebugFormatString(self.value_))
return res
class URLFetchRequest(ProtocolBuffer.ProtocolMessage):
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_RequestMethod_NAMES = {
1: "GET",
2: "POST",
3: "HEAD",
4: "PUT",
5: "DELETE",
}
def RequestMethod_Name(cls, x): return cls._RequestMethod_NAMES.get(x, "")
RequestMethod_Name = classmethod(RequestMethod_Name)
has_method_ = 0
method_ = 0
has_url_ = 0
url_ = ""
has_payload_ = 0
payload_ = ""
has_followredirects_ = 0
followredirects_ = 1
def __init__(self, contents=None):
self.header_ = []
if contents is not None: self.MergeFromString(contents)
def method(self): return self.method_
def set_method(self, x):
self.has_method_ = 1
self.method_ = x
def clear_method(self):
if self.has_method_:
self.has_method_ = 0
self.method_ = 0
def has_method(self): return self.has_method_
def url(self): return self.url_
def set_url(self, x):
self.has_url_ = 1
self.url_ = x
def clear_url(self):
if self.has_url_:
self.has_url_ = 0
self.url_ = ""
def has_url(self): return self.has_url_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = URLFetchRequest_Header()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def payload(self): return self.payload_
def set_payload(self, x):
self.has_payload_ = 1
self.payload_ = x
def clear_payload(self):
if self.has_payload_:
self.has_payload_ = 0
self.payload_ = ""
def has_payload(self): return self.has_payload_
def followredirects(self): return self.followredirects_
def set_followredirects(self, x):
self.has_followredirects_ = 1
self.followredirects_ = x
def clear_followredirects(self):
if self.has_followredirects_:
self.has_followredirects_ = 0
self.followredirects_ = 1
def has_followredirects(self): return self.has_followredirects_
def MergeFrom(self, x):
assert x is not self
if (x.has_method()): self.set_method(x.method())
if (x.has_url()): self.set_url(x.url())
for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_payload()): self.set_payload(x.payload())
if (x.has_followredirects()): self.set_followredirects(x.followredirects())
def Equals(self, x):
if x is self: return 1
if self.has_method_ != x.has_method_: return 0
if self.has_method_ and self.method_ != x.method_: return 0
if self.has_url_ != x.has_url_: return 0
if self.has_url_ and self.url_ != x.url_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_payload_ != x.has_payload_: return 0
if self.has_payload_ and self.payload_ != x.payload_: return 0
if self.has_followredirects_ != x.has_followredirects_: return 0
if self.has_followredirects_ and self.followredirects_ != x.followredirects_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_method_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: method not set.')
if (not self.has_url_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: url not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.method_)
n += self.lengthString(len(self.url_))
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_payload_): n += 1 + self.lengthString(len(self.payload_))
if (self.has_followredirects_): n += 2
return n + 2
def Clear(self):
self.clear_method()
self.clear_url()
self.clear_header()
self.clear_payload()
self.clear_followredirects()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.method_)
out.putVarInt32(18)
out.putPrefixedString(self.url_)
for i in xrange(len(self.header_)):
out.putVarInt32(27)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(28)
if (self.has_payload_):
out.putVarInt32(50)
out.putPrefixedString(self.payload_)
if (self.has_followredirects_):
out.putVarInt32(56)
out.putBoolean(self.followredirects_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_method(d.getVarInt32())
continue
if tt == 18:
self.set_url(d.getPrefixedString())
continue
if tt == 27:
self.add_header().TryMerge(d)
continue
if tt == 50:
self.set_payload(d.getPrefixedString())
continue
if tt == 56:
self.set_followredirects(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_method_: res+=prefix+("Method: %s\n" % self.DebugFormatInt32(self.method_))
if self.has_url_: res+=prefix+("Url: %s\n" % self.DebugFormatString(self.url_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_payload_: res+=prefix+("Payload: %s\n" % self.DebugFormatString(self.payload_))
if self.has_followredirects_: res+=prefix+("FollowRedirects: %s\n" % self.DebugFormatBool(self.followredirects_))
return res
kMethod = 1
kUrl = 2
kHeaderGroup = 3
kHeaderKey = 4
kHeaderValue = 5
kPayload = 6
kFollowRedirects = 7
_TEXT = (
"ErrorCode",
"Method",
"Url",
"Header",
"Key",
"Value",
"Payload",
"FollowRedirects",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class URLFetchResponse_Header(ProtocolBuffer.ProtocolMessage):
has_key_ = 0
key_ = ""
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def key(self): return self.key_
def set_key(self, x):
self.has_key_ = 1
self.key_ = x
def clear_key(self):
if self.has_key_:
self.has_key_ = 0
self.key_ = ""
def has_key(self): return self.has_key_
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_key()): self.set_key(x.key())
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_key_ != x.has_key_: return 0
if self.has_key_ and self.key_ != x.key_: return 0
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_key_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: key not set.')
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.key_))
n += self.lengthString(len(self.value_))
return n + 2
def Clear(self):
self.clear_key()
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(34)
out.putPrefixedString(self.key_)
out.putVarInt32(42)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while 1:
tt = d.getVarInt32()
if tt == 28: break
if tt == 34:
self.set_key(d.getPrefixedString())
continue
if tt == 42:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_key_: res+=prefix+("Key: %s\n" % self.DebugFormatString(self.key_))
if self.has_value_: res+=prefix+("Value: %s\n" % self.DebugFormatString(self.value_))
return res
class URLFetchResponse(ProtocolBuffer.ProtocolMessage):
has_content_ = 0
content_ = ""
has_statuscode_ = 0
statuscode_ = 0
has_contentwastruncated_ = 0
contentwastruncated_ = 0
def __init__(self, contents=None):
self.header_ = []
if contents is not None: self.MergeFromString(contents)
def content(self): return self.content_
def set_content(self, x):
self.has_content_ = 1
self.content_ = x
def clear_content(self):
if self.has_content_:
self.has_content_ = 0
self.content_ = ""
def has_content(self): return self.has_content_
def statuscode(self): return self.statuscode_
def set_statuscode(self, x):
self.has_statuscode_ = 1
self.statuscode_ = x
def clear_statuscode(self):
if self.has_statuscode_:
self.has_statuscode_ = 0
self.statuscode_ = 0
def has_statuscode(self): return self.has_statuscode_
def header_size(self): return len(self.header_)
def header_list(self): return self.header_
def header(self, i):
return self.header_[i]
def mutable_header(self, i):
return self.header_[i]
def add_header(self):
x = URLFetchResponse_Header()
self.header_.append(x)
return x
def clear_header(self):
self.header_ = []
def contentwastruncated(self): return self.contentwastruncated_
def set_contentwastruncated(self, x):
self.has_contentwastruncated_ = 1
self.contentwastruncated_ = x
def clear_contentwastruncated(self):
if self.has_contentwastruncated_:
self.has_contentwastruncated_ = 0
self.contentwastruncated_ = 0
def has_contentwastruncated(self): return self.has_contentwastruncated_
def MergeFrom(self, x):
assert x is not self
if (x.has_content()): self.set_content(x.content())
if (x.has_statuscode()): self.set_statuscode(x.statuscode())
for i in xrange(x.header_size()): self.add_header().CopyFrom(x.header(i))
if (x.has_contentwastruncated()): self.set_contentwastruncated(x.contentwastruncated())
def Equals(self, x):
if x is self: return 1
if self.has_content_ != x.has_content_: return 0
if self.has_content_ and self.content_ != x.content_: return 0
if self.has_statuscode_ != x.has_statuscode_: return 0
if self.has_statuscode_ and self.statuscode_ != x.statuscode_: return 0
if len(self.header_) != len(x.header_): return 0
for e1, e2 in zip(self.header_, x.header_):
if e1 != e2: return 0
if self.has_contentwastruncated_ != x.has_contentwastruncated_: return 0
if self.has_contentwastruncated_ and self.contentwastruncated_ != x.contentwastruncated_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_statuscode_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: statuscode not set.')
for p in self.header_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
if (self.has_content_): n += 1 + self.lengthString(len(self.content_))
n += self.lengthVarInt64(self.statuscode_)
n += 2 * len(self.header_)
for i in xrange(len(self.header_)): n += self.header_[i].ByteSize()
if (self.has_contentwastruncated_): n += 2
return n + 1
def Clear(self):
self.clear_content()
self.clear_statuscode()
self.clear_header()
self.clear_contentwastruncated()
def OutputUnchecked(self, out):
if (self.has_content_):
out.putVarInt32(10)
out.putPrefixedString(self.content_)
out.putVarInt32(16)
out.putVarInt32(self.statuscode_)
for i in xrange(len(self.header_)):
out.putVarInt32(27)
self.header_[i].OutputUnchecked(out)
out.putVarInt32(28)
if (self.has_contentwastruncated_):
out.putVarInt32(48)
out.putBoolean(self.contentwastruncated_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_content(d.getPrefixedString())
continue
if tt == 16:
self.set_statuscode(d.getVarInt32())
continue
if tt == 27:
self.add_header().TryMerge(d)
continue
if tt == 48:
self.set_contentwastruncated(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_content_: res+=prefix+("Content: %s\n" % self.DebugFormatString(self.content_))
if self.has_statuscode_: res+=prefix+("StatusCode: %s\n" % self.DebugFormatInt32(self.statuscode_))
cnt=0
for e in self.header_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Header%s {\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+"}\n"
cnt+=1
if self.has_contentwastruncated_: res+=prefix+("ContentWasTruncated: %s\n" % self.DebugFormatBool(self.contentwastruncated_))
return res
kContent = 1
kStatusCode = 2
kHeaderGroup = 3
kHeaderKey = 4
kHeaderValue = 5
kContentWasTruncated = 6
_TEXT = (
"ErrorCode",
"Content",
"StatusCode",
"Header",
"Key",
"Value",
"ContentWasTruncated",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STARTGROUP,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['URLFetchServiceError','URLFetchRequest','URLFetchRequest_Header','URLFetchResponse','URLFetchResponse_Header']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""The Python datastore API used by app developers.
Defines Entity, Query, and Iterator classes, as well as methods for all of the
datastore's calls. Also defines conversions between the Python classes and
their PB counterparts.
The datastore errors are defined in the datastore_errors module. That module is
only required to avoid circular imports. datastore imports datastore_types,
which needs BadValueError, so it can't be defined in datastore.
"""
import heapq
import itertools
import logging
import re
import string
import sys
import traceback
from xml.sax import saxutils
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.datastore import datastore_index
from google.appengine.datastore import datastore_pb
from google.appengine.runtime import apiproxy_errors
from google.appengine.datastore import entity_pb
MAX_ALLOWABLE_QUERIES = 30
DEFAULT_TRANSACTION_RETRIES = 3
_MAX_INDEXED_PROPERTIES = 5000
Key = datastore_types.Key
typename = datastore_types.typename
_txes = {}
def NormalizeAndTypeCheck(arg, types):
"""Normalizes and type checks the given argument.
Args:
arg: an instance, tuple, list, iterator, or generator of the given type(s)
types: allowed type or tuple of types
Returns:
A (list, bool) tuple. The list is a normalized, shallow copy of the
argument. The boolean is True if the argument was a sequence, False
if it was a single object.
Raises:
AssertionError: types includes list or tuple.
BadArgumentError: arg is not an instance or sequence of one of the given
types.
"""
if not isinstance(types, (list, tuple)):
types = (types,)
assert list not in types and tuple not in types
if isinstance(arg, types):
return ([arg], False)
else:
try:
for val in arg:
if not isinstance(val, types):
raise datastore_errors.BadArgumentError(
'Expected one of %s; received %s (a %s).' %
(types, val, typename(val)))
except TypeError:
raise datastore_errors.BadArgumentError(
'Expected an instance or sequence of %s; received %s (a %s).' %
(types, arg, typename(arg)))
return (list(arg), True)
def NormalizeAndTypeCheckKeys(keys):
"""Normalizes and type checks that the given argument is a valid key or keys.
A wrapper around NormalizeAndTypeCheck() that accepts strings, Keys, and
Entities, and normalizes to Keys.
Args:
keys: a Key or sequence of Keys
Returns:
A (list of Keys, bool) tuple. See NormalizeAndTypeCheck.
Raises:
BadArgumentError: arg is not an instance or sequence of one of the given
types.
"""
keys, multiple = NormalizeAndTypeCheck(keys, (basestring, Entity, Key))
keys = [_GetCompleteKeyOrError(key) for key in keys]
return (keys, multiple)
def Put(entities):
"""Store one or more entities in the datastore.
The entities may be new or previously existing. For new entities, Put() will
fill in the app id and key assigned by the datastore.
If the argument is a single Entity, a single Key will be returned. If the
argument is a list of Entity, a list of Keys will be returned.
Args:
entities: Entity or list of Entities
Returns:
Key or list of Keys
Raises:
TransactionFailedError, if the Put could not be committed.
"""
entities, multiple = NormalizeAndTypeCheck(entities, Entity)
if multiple and not entities:
return []
for entity in entities:
if not entity.kind() or not entity.app():
raise datastore_errors.BadRequestError(
'App and kind must not be empty, in entity: %s' % entity)
req = datastore_pb.PutRequest()
req.entity_list().extend([e._ToPb() for e in entities])
keys = [e.key() for e in entities]
tx = _MaybeSetupTransaction(req, keys)
if tx:
tx.RecordModifiedKeys([k for k in keys if k.has_id_or_name()])
resp = datastore_pb.PutResponse()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
keys = resp.key_list()
num_keys = len(keys)
num_entities = len(entities)
if num_keys != num_entities:
raise datastore_errors.InternalError(
'Put accepted %d entities but returned %d keys.' %
(num_entities, num_keys))
for entity, key in zip(entities, keys):
entity._Entity__key._Key__reference.CopyFrom(key)
if tx:
tx.RecordModifiedKeys([e.key() for e in entities], error_on_repeat=False)
if multiple:
return [Key._FromPb(k) for k in keys]
else:
return Key._FromPb(resp.key(0))
def Get(keys):
"""Retrieves one or more entities from the datastore.
Retrieves the entity or entities with the given key(s) from the datastore
and returns them as fully populated Entity objects, as defined below. If
there is an error, raises a subclass of datastore_errors.Error.
If keys is a single key or string, an Entity will be returned, or
EntityNotFoundError will be raised if no existing entity matches the key.
However, if keys is a list or tuple, a list of entities will be returned
that corresponds to the sequence of keys. It will include entities for keys
that were found and None placeholders for keys that were not found.
Args:
# the primary key(s) of the entity(ies) to retrieve
keys: Key or string or list of Keys or strings
Returns:
Entity or list of Entity objects
"""
keys, multiple = NormalizeAndTypeCheckKeys(keys)
if multiple and not keys:
return []
req = datastore_pb.GetRequest()
req.key_list().extend([key._Key__reference for key in keys])
_MaybeSetupTransaction(req, keys)
resp = datastore_pb.GetResponse()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
entities = []
for group in resp.entity_list():
if group.has_entity():
entities.append(Entity._FromPb(group.entity()))
else:
entities.append(None)
if multiple:
return entities
else:
if entities[0] is None:
raise datastore_errors.EntityNotFoundError()
return entities[0]
def Delete(keys):
"""Deletes one or more entities from the datastore. Use with care!
Deletes the given entity(ies) from the datastore. You can only delete
entities from your app. If there is an error, raises a subclass of
datastore_errors.Error.
Args:
# the primary key(s) of the entity(ies) to delete
keys: Key or string or list of Keys or strings
Raises:
TransactionFailedError, if the Delete could not be committed.
"""
keys, multiple = NormalizeAndTypeCheckKeys(keys)
if multiple and not keys:
return
req = datastore_pb.DeleteRequest()
req.key_list().extend([key._Key__reference for key in keys])
tx = _MaybeSetupTransaction(req, keys)
if tx:
tx.RecordModifiedKeys(keys)
resp = datastore_pb.DeleteResponse()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
class Entity(dict):
"""A datastore entity.
Includes read-only accessors for app id, kind, and primary key. Also
provides dictionary-style access to properties.
"""
def __init__(self, kind, parent=None, _app=None, name=None):
"""Constructor. Takes the kind and transaction root, which cannot be
changed after the entity is constructed, and an optional parent. Raises
BadArgumentError or BadKeyError if kind is invalid or parent is not an
existing Entity or Key in the datastore.
Args:
# this entity's kind
kind: string
# if provided, this entity's parent. Its key must be complete.
parent: Entity or Key
# if provided, this entity's name.
name: string
"""
ref = entity_pb.Reference()
_app = datastore_types.ResolveAppId(_app)
ref.set_app(_app)
datastore_types.ValidateString(kind, 'kind',
datastore_errors.BadArgumentError)
if parent is not None:
parent = _GetCompleteKeyOrError(parent)
if _app != parent.app():
raise datastore_errors.BadArgumentError(
"_app %s doesn't match parent's app %s" % (_app, parent.app()))
ref.CopyFrom(parent._Key__reference)
last_path = ref.mutable_path().add_element()
last_path.set_type(kind.encode('utf-8'))
if name is not None:
datastore_types.ValidateString(name, 'name')
if name[0] in string.digits:
raise datastore_errors.BadValueError('name cannot begin with a digit')
last_path.set_name(name.encode('utf-8'))
self.__key = Key._FromPb(ref)
def app(self):
"""Returns the name of the application that created this entity, a
string.
"""
return self.__key.app()
def kind(self):
"""Returns this entity's kind, a string.
"""
return self.__key.kind()
def key(self):
"""Returns this entity's primary key, a Key instance.
"""
return self.__key
def parent(self):
"""Returns this entity's parent, as a Key. If this entity has no parent,
returns None.
"""
return self.key().parent()
def entity_group(self):
"""Returns this entitys's entity group as a Key.
Note that the returned Key will be incomplete if this is a a root entity
and its key is incomplete.
"""
return self.key().entity_group()
def __setitem__(self, name, value):
"""Implements the [] operator. Used to set property value(s).
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(name, value)
dict.__setitem__(self, name, value)
def setdefault(self, name, value):
"""If the property exists, returns its value. Otherwise sets it to value.
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(name, value)
return dict.setdefault(self, name, value)
def update(self, other):
"""Updates this entity's properties from the values in other.
If any property name is the empty string or not a string, raises
BadPropertyError. If any value is not a supported type, raises
BadValueError.
"""
for name, value in other.items():
self.__setitem__(name, value)
def copy(self):
"""The copy method is not supported.
"""
raise NotImplementedError('Entity does not support the copy() method.')
def ToXml(self):
"""Returns an XML representation of this entity. Atom and gd:namespace
properties are converted to XML according to their respective schemas. For
more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
This is *not* optimized. It shouldn't be used anywhere near code that's
performance-critical.
"""
xml = u'<entity kind=%s' % saxutils.quoteattr(self.kind())
if self.__key.has_id_or_name():
xml += ' key=%s' % saxutils.quoteattr(str(self.__key))
xml += '>'
if self.__key.has_id_or_name():
xml += '\n <key>%s</key>' % self.__key.ToTagUri()
properties = self.keys()
if properties:
properties.sort()
xml += '\n ' + '\n '.join(self._PropertiesToXml(properties))
xml += '\n</entity>\n'
return xml
def _PropertiesToXml(self, properties):
""" Returns a list of the XML representations of each of the given
properties. Ignores properties that don't exist in this entity.
Arg:
properties: string or list of strings
Returns:
list of strings
"""
xml_properties = []
for propname in properties:
if not self.has_key(propname):
continue
propname_xml = saxutils.quoteattr(propname)
values = self[propname]
if not isinstance(values, list):
values = [values]
proptype = datastore_types.PropertyTypeName(values[0])
proptype_xml = saxutils.quoteattr(proptype)
escaped_values = self._XmlEscapeValues(propname)
open_tag = u'<property name=%s type=%s>' % (propname_xml, proptype_xml)
close_tag = u'</property>'
xml_properties += [open_tag + val + close_tag for val in escaped_values]
return xml_properties
def _XmlEscapeValues(self, property):
""" Returns a list of the XML-escaped string values for the given property.
Raises an AssertionError if the property doesn't exist.
Arg:
property: string
Returns:
list of strings
"""
assert self.has_key(property)
xml = []
values = self[property]
if not isinstance(values, list):
values = [values]
for val in values:
if hasattr(val, 'ToXml'):
xml.append(val.ToXml())
else:
if val is None:
xml.append('')
else:
xml.append(saxutils.escape(unicode(val)))
return xml
def _ToPb(self):
"""Converts this Entity to its protocol buffer representation. Not
intended to be used by application developers.
Returns:
entity_pb.Entity
"""
pb = entity_pb.EntityProto()
pb.mutable_key().CopyFrom(self.key()._ToPb())
group = pb.mutable_entity_group()
if self.__key.has_id_or_name():
root = pb.key().path().element(0)
group.add_element().CopyFrom(root)
properties = self.items()
properties.sort()
for (name, values) in properties:
properties = datastore_types.ToPropertyPb(name, values)
if not isinstance(properties, list):
properties = [properties]
sample = values
if isinstance(sample, list):
sample = values[0]
if isinstance(sample, datastore_types._RAW_PROPERTY_TYPES):
pb.raw_property_list().extend(properties)
else:
pb.property_list().extend(properties)
if pb.property_size() > _MAX_INDEXED_PROPERTIES:
raise datastore_errors.BadRequestError(
'Too many indexed properties for entity %r.' % self.key())
return pb
@staticmethod
def _FromPb(pb):
"""Static factory method. Returns the Entity representation of the
given protocol buffer (datastore_pb.Entity). Not intended to be used by
application developers.
The Entity PB's key must be complete. If it isn't, an AssertionError is
raised.
Args:
# a protocol buffer Entity
pb: datastore_pb.Entity
Returns:
# the Entity representation of the argument
Entity
"""
assert pb.key().path().element_size() > 0
last_path = pb.key().path().element_list()[-1]
assert last_path.has_id() ^ last_path.has_name()
if last_path.has_id():
assert last_path.id() != 0
else:
assert last_path.has_name()
assert last_path.name()
e = Entity(unicode(last_path.type().decode('utf-8')))
ref = e.__key._Key__reference
ref.CopyFrom(pb.key())
temporary_values = {}
for prop_list in (pb.property_list(), pb.raw_property_list()):
for prop in prop_list:
if not prop.has_multiple():
raise datastore_errors.Error(
'Property %s is corrupt in the datastore; it\'s missing the '
'multiple valued field.' % prop.name())
try:
value = datastore_types.FromPropertyPb(prop)
except (AssertionError, AttributeError, TypeError, ValueError), e:
raise datastore_errors.Error(
'Property %s is corrupt in the datastore. %s: %s' %
(e.__class__, prop.name(), e))
multiple = prop.multiple()
if multiple:
value = [value]
name = prop.name()
cur_value = temporary_values.get(name)
if cur_value is None:
temporary_values[name] = value
elif not multiple:
raise datastore_errors.Error(
'Property %s is corrupt in the datastore; it has multiple '
'values, but is not marked as multiply valued.' % name)
else:
cur_value.extend(value)
for name, value in temporary_values.iteritems():
decoded_name = unicode(name.decode('utf-8'))
datastore_types.ValidateReadProperty(decoded_name, value)
dict.__setitem__(e, decoded_name, value)
return e
class Query(dict):
"""A datastore query.
(Instead of this, consider using appengine.ext.gql.Query! It provides a
query language interface on top of the same functionality.)
Queries are used to retrieve entities that match certain criteria, including
app id, kind, and property filters. Results may also be sorted by properties.
App id and kind are required. Only entities from the given app, of the given
type, are returned. If an ancestor is set, with Ancestor(), only entities
with that ancestor are returned.
Property filters are used to provide criteria based on individual property
values. A filter compares a specific property in each entity to a given
value or list of possible values.
An entity is returned if its property values match *all* of the query's
filters. In other words, filters are combined with AND, not OR. If an
entity does not have a value for a property used in a filter, it is not
returned.
Property filters map filter strings of the form '<property name> <operator>'
to filter values. Use dictionary accessors to set property filters, like so:
> query = Query('Person')
> query['name ='] = 'Ryan'
> query['age >='] = 21
This query returns all Person entities where the name property is 'Ryan',
'Ken', or 'Bret', and the age property is at least 21.
Another way to build this query is:
> query = Query('Person')
> query.update({'name =': 'Ryan', 'age >=': 21})
The supported operators are =, >, <, >=, and <=. Only one inequality
filter may be used per query. Any number of equals filters may be used in
a single Query.
A filter value may be a list or tuple of values. This is interpreted as
multiple filters with the same filter string and different values, all ANDed
together. For example, this query returns everyone with the tags "google"
and "app engine":
> Query('Person', {'tag =': ('google', 'app engine')})
Result entities can be returned in different orders. Use the Order()
method to specify properties that results will be sorted by, and in which
direction.
Note that filters and orderings may be provided at any time before the query
is run. When the query is fully specified, Run() runs the query and returns
an iterator. The query results can be accessed through the iterator.
A query object may be reused after it's been run. Its filters and
orderings can be changed to create a modified query.
If you know how many result entities you need, use Get() to fetch them:
> query = Query('Person', {'age >': 21})
> for person in query.Get(4):
> print 'I have four pints left. Have one on me, %s!' % person['name']
If you don't know how many results you need, or if you need them all, you
can get an iterator over the results by calling Run():
> for person in Query('Person', {'age >': 21}).Run():
> print 'Have a pint on me, %s!' % person['name']
Get() is more efficient than Run(), so use Get() whenever possible.
Finally, the Count() method returns the number of result entities matched by
the query. The returned count is cached; successive Count() calls will not
re-scan the datastore unless the query is changed.
"""
ASCENDING = datastore_pb.Query_Order.ASCENDING
DESCENDING = datastore_pb.Query_Order.DESCENDING
ORDER_FIRST = datastore_pb.Query.ORDER_FIRST
ANCESTOR_FIRST = datastore_pb.Query.ANCESTOR_FIRST
FILTER_FIRST = datastore_pb.Query.FILTER_FIRST
OPERATORS = {'<': datastore_pb.Query_Filter.LESS_THAN,
'<=': datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
'>': datastore_pb.Query_Filter.GREATER_THAN,
'>=': datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
'=': datastore_pb.Query_Filter.EQUAL,
'==': datastore_pb.Query_Filter.EQUAL,
}
INEQUALITY_OPERATORS = frozenset(['<', '<=', '>', '>='])
FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS.keys()),
re.IGNORECASE | re.UNICODE)
__kind = None
__app = None
__orderings = None
__cached_count = None
__hint = None
__ancestor = None
__filter_order = None
__filter_counter = 0
__inequality_prop = None
__inequality_count = 0
def __init__(self, kind, filters={}, _app=None):
"""Constructor.
Raises BadArgumentError if kind is not a string. Raises BadValueError or
BadFilterError if filters is not a dictionary of valid filters.
Args:
# kind is required. filters is optional; if provided, it's used
# as an initial set of property filters.
kind: string
filters: dict
"""
datastore_types.ValidateString(kind, 'kind',
datastore_errors.BadArgumentError)
self.__kind = kind
self.__orderings = []
self.__filter_order = {}
self.update(filters)
self.__app = datastore_types.ResolveAppId(_app)
def Order(self, *orderings):
"""Specify how the query results should be sorted.
Result entities will be sorted by the first property argument, then by the
second, and so on. For example, this:
> query = Query('Person')
> query.Order('bday', ('age', Query.DESCENDING))
sorts everyone in order of their birthday, starting with January 1.
People with the same birthday are sorted by age, oldest to youngest.
The direction for each sort property may be provided; if omitted, it
defaults to ascending.
Order() may be called multiple times. Each call resets the sort order
from scratch.
If an inequality filter exists in this Query it must be the first property
passed to Order. Any number of sort orders may be used after the
inequality filter property. Without inequality filters, any number of
filters with different orders may be specified.
Entities with multiple values for an order property are sorted by their
lowest value.
Note that a sort order implies an existence filter! In other words,
Entities without the sort order property are filtered out, and *not*
included in the query results.
If the sort order property has different types in different entities - ie,
if bob['id'] is an int and fred['id'] is a string - the entities will be
grouped first by the property type, then sorted within type. No attempt is
made to compare property values across types.
Raises BadArgumentError if any argument is of the wrong format.
Args:
# the properties to sort by, in sort order. each argument may be either a
# string or (string, direction) 2-tuple.
Returns:
# this query
Query
"""
orderings = list(orderings)
for (order, i) in zip(orderings, range(len(orderings))):
if not (isinstance(order, basestring) or
(isinstance(order, tuple) and len(order) in [2, 3])):
raise datastore_errors.BadArgumentError(
'Order() expects strings or 2- or 3-tuples; received %s (a %s). ' %
(order, typename(order)))
if isinstance(order, basestring):
order = (order,)
datastore_types.ValidateString(order[0], 'sort order property',
datastore_errors.BadArgumentError)
property = order[0]
direction = order[-1]
if direction not in (Query.ASCENDING, Query.DESCENDING):
if len(order) == 3:
raise datastore_errors.BadArgumentError(
'Order() expects Query.ASCENDING or DESCENDING; received %s' %
str(direction))
direction = Query.ASCENDING
orderings[i] = (property, direction)
if (orderings and self.__inequality_prop and
orderings[0][0] != self.__inequality_prop):
raise datastore_errors.BadArgumentError(
'First ordering property must be the same as inequality filter '
'property, if specified for this query; received %s, expected %s' %
(orderings[0][0], self.__inequality_prop))
self.__orderings = orderings
return self
def Hint(self, hint):
"""Sets a hint for how this query should run.
The query hint gives us information about how best to execute your query.
Currently, we can only do one index scan, so the query hint should be used
to indicates which index we should scan against.
Use FILTER_FIRST if your first filter will only match a few results. In
this case, it will be most efficient to scan against the index for this
property, load the results into memory, and apply the remaining filters
and sort orders there.
Similarly, use ANCESTOR_FIRST if the query's ancestor only has a few
descendants. In this case, it will be most efficient to scan all entities
below the ancestor and load them into memory first.
Use ORDER_FIRST if the query has a sort order and the result set is large
or you only plan to fetch the first few results. In that case, we
shouldn't try to load all of the results into memory; instead, we should
scan the index for this property, which is in sorted order.
Note that hints are currently ignored in the v3 datastore!
Arg:
one of datastore.Query.[ORDER_FIRST, ANCESTOR_FIRST, FILTER_FIRST]
Returns:
# this query
Query
"""
if hint not in [self.ORDER_FIRST, self.ANCESTOR_FIRST, self.FILTER_FIRST]:
raise datastore_errors.BadArgumentError(
'Query hint must be ORDER_FIRST, ANCESTOR_FIRST, or FILTER_FIRST.')
self.__hint = hint
return self
def Ancestor(self, ancestor):
"""Sets an ancestor for this query.
This restricts the query to only return result entities that are descended
from a given entity. In other words, all of the results will have the
ancestor as their parent, or parent's parent, or etc.
Raises BadArgumentError or BadKeyError if parent is not an existing Entity
or Key in the datastore.
Args:
# the key must be complete
ancestor: Entity or Key
Returns:
# this query
Query
"""
key = _GetCompleteKeyOrError(ancestor)
self.__ancestor = datastore_pb.Reference()
self.__ancestor.CopyFrom(key._Key__reference)
return self
def Run(self):
"""Runs this query.
If a filter string is invalid, raises BadFilterError. If a filter value is
invalid, raises BadValueError. If an IN filter is provided, and a sort
order on another property is provided, raises BadQueryError.
If you know in advance how many results you want, use Get() instead. It's
more efficient.
Returns:
# an iterator that provides access to the query results
Iterator
"""
return self._Run()
def _Run(self, limit=None, offset=None):
"""Runs this query, with an optional result limit and an optional offset.
Identical to Run, with the extra optional limit and offset parameters.
limit and offset must both be integers >= 0.
This is not intended to be used by application developers. Use Get()
instead!
"""
if _CurrentTransactionKey():
raise datastore_errors.BadRequestError(
"Can't query inside a transaction.")
pb = self._ToPb(limit, offset)
result = datastore_pb.QueryResult()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', pb, result)
except apiproxy_errors.ApplicationError, err:
try:
_ToDatastoreError(err)
except datastore_errors.NeedIndexError, exc:
yaml = datastore_index.IndexYamlForQuery(
*datastore_index.CompositeIndexForQuery(pb)[1:-1])
raise datastore_errors.NeedIndexError(
str(exc) + '\nThis query needs this index:\n' + yaml)
return Iterator._FromPb(result.cursor())
def Get(self, limit, offset=0):
"""Fetches and returns a maximum number of results from the query.
This method fetches and returns a list of resulting entities that matched
the query. If the query specified a sort order, entities are returned in
that order. Otherwise, the order is undefined.
The limit argument specifies the maximum number of entities to return. If
it's greater than the number of remaining entities, all of the remaining
entities are returned. In that case, the length of the returned list will
be smaller than limit.
The offset argument specifies the number of entities that matched the
query criteria to skip before starting to return results. The limit is
applied after the offset, so if you provide a limit of 10 and an offset of 5
and your query matches 20 records, the records whose index is 0 through 4
will be skipped and the records whose index is 5 through 14 will be
returned.
The results are always returned as a list. If there are no results left,
an empty list is returned.
If you know in advance how many results you want, this method is more
efficient than Run(), since it fetches all of the results at once. (The
datastore backend sets the the limit on the underlying
scan, which makes the scan significantly faster.)
Args:
# the maximum number of entities to return
int or long
# the number of entities to skip
int or long
Returns:
# a list of entities
[Entity, ...]
"""
if not isinstance(limit, (int, long)) or limit <= 0:
raise datastore_errors.BadArgumentError(
'Argument to Get named \'limit\' must be an int greater than 0; '
'received %s (a %s)' % (limit, typename(limit)))
if not isinstance(offset, (int, long)) or offset < 0:
raise datastore_errors.BadArgumentError(
'Argument to Get named \'offset\' must be an int greater than or '
'equal to 0; received %s (a %s)' % (offset, typename(offset)))
return self._Run(limit, offset)._Next(limit)
def Count(self, limit=None):
"""Returns the number of entities that this query matches. The returned
count is cached; successive Count() calls will not re-scan the datastore
unless the query is changed.
Args:
limit, a number. If there are more results than this, stop short and
just return this number. Providing this argument makes the count
operation more efficient.
Returns:
The number of results.
"""
if self.__cached_count:
return self.__cached_count
resp = api_base_pb.Integer64Proto()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Count',
self._ToPb(limit=limit), resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
else:
self.__cached_count = resp.value()
return self.__cached_count
def __iter__(self):
raise NotImplementedError(
'Query objects should not be used as iterators. Call Run() first.')
def __setitem__(self, filter, value):
"""Implements the [] operator. Used to set filters.
If the filter string is empty or not a string, raises BadFilterError. If
the value is not a supported type, raises BadValueError.
"""
if isinstance(value, tuple):
value = list(value)
datastore_types.ValidateProperty(' ', value, read_only=True)
match = self._CheckFilter(filter, value)
property = match.group(1)
operator = match.group(3)
dict.__setitem__(self, filter, value)
if operator in self.INEQUALITY_OPERATORS:
if self.__inequality_prop is None:
self.__inequality_prop = property
else:
assert self.__inequality_prop == property
self.__inequality_count += 1
if filter not in self.__filter_order:
self.__filter_order[filter] = self.__filter_counter
self.__filter_counter += 1
self.__cached_count = None
def setdefault(self, filter, value):
"""If the filter exists, returns its value. Otherwise sets it to value.
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(' ', value)
self._CheckFilter(filter, value)
self.__cached_count = None
return dict.setdefault(self, filter, value)
def __delitem__(self, filter):
"""Implements the del [] operator. Used to remove filters.
"""
dict.__delitem__(self, filter)
del self.__filter_order[filter]
self.__cached_count = None
match = Query.FILTER_REGEX.match(filter)
property = match.group(1)
operator = match.group(3)
if operator in self.INEQUALITY_OPERATORS:
assert self.__inequality_count >= 1
assert property == self.__inequality_prop
self.__inequality_count -= 1
if self.__inequality_count == 0:
self.__inequality_prop = None
def update(self, other):
"""Updates this query's filters from the ones in other.
If any filter string is invalid, raises BadFilterError. If any value is
not a supported type, raises BadValueError.
"""
for filter, value in other.items():
self.__setitem__(filter, value)
def copy(self):
"""The copy method is not supported.
"""
raise NotImplementedError('Query does not support the copy() method.')
def _CheckFilter(self, filter, values):
"""Type check a filter string and list of values.
Raises BadFilterError if the filter string is empty, not a string, or
invalid. Raises BadValueError if the value type is not supported.
Args:
filter: String containing the filter text.
values: List of associated filter values.
Returns:
re.MatchObject (never None) that matches the 'filter'. Group 1 is the
property name, group 3 is the operator. (Group 2 is unused.)
"""
try:
match = Query.FILTER_REGEX.match(filter)
if not match:
raise datastore_errors.BadFilterError(
'Could not parse filter string: %s' % str(filter))
except TypeError:
raise datastore_errors.BadFilterError(
'Could not parse filter string: %s' % str(filter))
property = match.group(1)
operator = match.group(3)
if operator is None:
operator = '='
if isinstance(values, tuple):
values = list(values)
elif not isinstance(values, list):
values = [values]
if isinstance(values[0], datastore_types._RAW_PROPERTY_TYPES):
raise datastore_errors.BadValueError(
'Filtering on %s properties is not supported.' % typename(values[0]))
if operator in self.INEQUALITY_OPERATORS:
if self.__inequality_prop and property != self.__inequality_prop:
raise datastore_errors.BadFilterError(
'Only one property per query may have inequality filters (%s).' %
', '.join(self.INEQUALITY_OPERATORS))
elif len(self.__orderings) >= 1 and self.__orderings[0][0] != property:
raise datastore_errors.BadFilterError(
'Inequality operators (%s) must be on the same property as the '
'first sort order, if any sort orders are supplied' %
', '.join(self.INEQUALITY_OPERATORS))
if property in datastore_types._SPECIAL_PROPERTIES:
if property == datastore_types._KEY_SPECIAL_PROPERTY:
for value in values:
if not isinstance(value, Key):
raise datastore_errors.BadFilterError(
'%s filter value must be a Key; received %s (a %s)' %
(datastore_types._KEY_SPECIAL_PROPERTY, value, typename(value)))
return match
def _ToPb(self, limit=None, offset=None):
"""Converts this Query to its protocol buffer representation. Not
intended to be used by application developers. Enforced by hiding the
datastore_pb classes.
Args:
# an upper bound on the number of results returned by the query.
limit: int
# number of results that match the query to skip. limit is applied
# after the offset is fulfilled
offset: int
Returns:
# the PB representation of this Query
datastore_pb.Query
"""
pb = datastore_pb.Query()
pb.set_kind(self.__kind.encode('utf-8'))
if self.__app:
pb.set_app(self.__app.encode('utf-8'))
if limit is not None:
pb.set_limit(limit)
if offset is not None:
pb.set_offset(offset)
if self.__ancestor:
pb.mutable_ancestor().CopyFrom(self.__ancestor)
if ((self.__hint == self.ORDER_FIRST and self.__orderings) or
(self.__hint == self.ANCESTOR_FIRST and self.__ancestor) or
(self.__hint == self.FILTER_FIRST and len(self) > 0)):
pb.set_hint(self.__hint)
ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
ordered_filters.sort()
for i, filter_str in ordered_filters:
if filter_str not in self:
continue
values = self[filter_str]
match = self._CheckFilter(filter_str, values)
name = match.group(1)
props = datastore_types.ToPropertyPb(name, values)
if not isinstance(props, list):
props = [props]
op = match.group(3)
if op is None:
op = '='
for prop in props:
filter = pb.add_filter()
filter.set_op(self.OPERATORS[op])
filter.add_property().CopyFrom(prop)
for property, direction in self.__orderings:
order = pb.add_order()
order.set_property(property.encode('utf-8'))
order.set_direction(direction)
return pb
class MultiQuery(Query):
"""Class representing a query which requires multiple datastore queries.
This class is actually a subclass of datastore.Query as it is intended to act
like a normal Query object (supporting the same interface).
"""
def __init__(self, bound_queries, orderings):
if len(bound_queries) > MAX_ALLOWABLE_QUERIES:
raise datastore_errors.BadArgumentError(
'Cannot satisfy query -- too many subqueries (max: %d, got %d).'
' Probable cause: too many IN/!= filters in query.' %
(MAX_ALLOWABLE_QUERIES, len(bound_queries)))
self.__bound_queries = bound_queries
self.__orderings = orderings
def __str__(self):
res = 'MultiQuery: '
for query in self.__bound_queries:
res = '%s %s' % (res, str(query))
return res
def Get(self, limit, offset=0):
"""Get results of the query with a limit on the number of results.
Args:
limit: maximum number of values to return.
offset: offset requested -- if nonzero, this will override the offset in
the original query
Returns:
A list of entities with at most "limit" entries (less if the query
completes before reading limit values).
"""
count = 1
result = []
iterator = self.Run()
try:
for i in xrange(offset):
val = iterator.next()
except StopIteration:
pass
try:
while count <= limit:
val = iterator.next()
result.append(val)
count += 1
except StopIteration:
pass
return result
class SortOrderEntity(object):
"""Allow entity comparisons using provided orderings.
The iterator passed to the constructor is eventually consumed via
calls to GetNext(), which generate new SortOrderEntity s with the
same orderings.
"""
def __init__(self, entity_iterator, orderings):
"""Ctor.
Args:
entity_iterator: an iterator of entities which will be wrapped.
orderings: an iterable of (identifier, order) pairs. order
should be either Query.ASCENDING or Query.DESCENDING.
"""
self.__entity_iterator = entity_iterator
self.__entity = None
self.__min_max_value_cache = {}
try:
self.__entity = entity_iterator.next()
except StopIteration:
pass
else:
self.__orderings = orderings
def __str__(self):
return str(self.__entity)
def GetEntity(self):
"""Gets the wrapped entity."""
return self.__entity
def GetNext(self):
"""Wrap and return the next entity.
The entity is retrieved from the iterator given at construction time.
"""
return MultiQuery.SortOrderEntity(self.__entity_iterator,
self.__orderings)
def CmpProperties(self, that):
"""Compare two entities and return their relative order.
Compares self to that based on the current sort orderings and the
key orders between them. Returns negative, 0, or positive depending on
whether self is less, equal to, or greater than that. This
comparison returns as if all values were to be placed in ascending order
(highest value last). Only uses the sort orderings to compare (ignores
keys).
Args:
that: SortOrderEntity
Returns:
Negative if self < that
Zero if self == that
Positive if self > that
"""
if not self.__entity:
return cmp(self.__entity, that.__entity)
for (identifier, order) in self.__orderings:
value1 = self.__GetValueForId(self, identifier, order)
value2 = self.__GetValueForId(that, identifier, order)
result = cmp(value1, value2)
if order == Query.DESCENDING:
result = -result
if result:
return result
return 0
def __GetValueForId(self, sort_order_entity, identifier, sort_order):
value = sort_order_entity.__entity[identifier]
entity_key = sort_order_entity.__entity.key()
if (entity_key, identifier) in self.__min_max_value_cache:
value = self.__min_max_value_cache[(entity_key, identifier)]
elif isinstance(value, list):
if sort_order == Query.DESCENDING:
value = min(value)
else:
value = max(value)
self.__min_max_value_cache[(entity_key, identifier)] = value
return value
def __cmp__(self, that):
"""Compare self to that w.r.t. values defined in the sort order.
Compare an entity with another, using sort-order first, then the key
order to break ties. This can be used in a heap to have faster min-value
lookup.
Args:
that: other entity to compare to
Returns:
negative: if self is less than that in sort order
zero: if self is equal to that in sort order
positive: if self is greater than that in sort order
"""
property_compare = self.CmpProperties(that)
if property_compare:
return property_compare
else:
return cmp(self.__entity.key(), that.__entity.key())
def Run(self):
"""Return an iterable output with all results in order."""
results = []
count = 1
log_level = logging.DEBUG - 1
for bound_query in self.__bound_queries:
logging.log(log_level, 'Running query #%i' % count)
results.append(bound_query.Run())
count += 1
def IterateResults(results):
"""Iterator function to return all results in sorted order.
Iterate over the array of results, yielding the next element, in
sorted order. This function is destructive (results will be empty
when the operation is complete).
Args:
results: list of result iterators to merge and iterate through
Yields:
The next result in sorted order.
"""
result_heap = []
for result in results:
heap_value = MultiQuery.SortOrderEntity(result, self.__orderings)
if heap_value.GetEntity():
heapq.heappush(result_heap, heap_value)
used_keys = set()
while result_heap:
top_result = heapq.heappop(result_heap)
results_to_push = []
if top_result.GetEntity().key() not in used_keys:
yield top_result.GetEntity()
else:
pass
used_keys.add(top_result.GetEntity().key())
results_to_push = []
while result_heap:
next = heapq.heappop(result_heap)
if cmp(top_result, next):
results_to_push.append(next)
break
else:
results_to_push.append(next.GetNext())
results_to_push.append(top_result.GetNext())
for popped_result in results_to_push:
if popped_result.GetEntity():
heapq.heappush(result_heap, popped_result)
return IterateResults(results)
def Count(self, limit=None):
"""Return the number of matched entities for this query.
Will return the de-duplicated count of results. Will call the more
efficient Get() function if a limit is given.
Args:
limit: maximum number of entries to count (for any result > limit, return
limit).
Returns:
count of the number of entries returned.
"""
if limit is None:
count = 0
for i in self.Run():
count += 1
return count
else:
return len(self.Get(limit))
def __setitem__(self, query_filter, value):
"""Add a new filter by setting it on all subqueries.
If any of the setting operations raise an exception, the ones
that succeeded are undone and the exception is propagated
upward.
Args:
query_filter: a string of the form "property operand".
value: the value that the given property is compared against.
"""
saved_items = []
for index, query in enumerate(self.__bound_queries):
saved_items.append(query.get(query_filter, None))
try:
query[query_filter] = value
except:
for q, old_value in itertools.izip(self.__bound_queries[:index],
saved_items):
if old_value is not None:
q[query_filter] = old_value
else:
del q[query_filter]
raise
def __delitem__(self, query_filter):
"""Delete a filter by deleting it from all subqueries.
If a KeyError is raised during the attempt, it is ignored, unless
every subquery raised a KeyError. If any other exception is
raised, any deletes will be rolled back.
Args:
query_filter: the filter to delete.
Raises:
KeyError: No subquery had an entry containing query_filter.
"""
subquery_count = len(self.__bound_queries)
keyerror_count = 0
saved_items = []
for index, query in enumerate(self.__bound_queries):
try:
saved_items.append(query.get(query_filter, None))
del query[query_filter]
except KeyError:
keyerror_count += 1
except:
for q, old_value in itertools.izip(self.__bound_queries[:index],
saved_items):
if old_value is not None:
q[query_filter] = old_value
raise
if keyerror_count == subquery_count:
raise KeyError(query_filter)
def __iter__(self):
return iter(self.__bound_queries)
class Iterator(object):
"""An iterator over the results of a datastore query.
Iterators are used to access the results of a Query. An iterator is
obtained by building a Query, then calling Run() on it.
Iterator implements Python's iterator protocol, so results can be accessed
with the for and in statements:
> it = Query('Person').Run()
> for person in it:
> print 'Hi, %s!' % person['name']
"""
def __init__(self, cursor):
self.__cursor = cursor
self.__buffer = []
self.__more_results = True
def _Next(self, count):
"""Returns the next result(s) of the query.
Not intended to be used by application developers. Use the python
iterator protocol instead.
This method returns the next entities from the list of resulting
entities that matched the query. If the query specified a sort
order, entities are returned in that order. Otherwise, the order
is undefined.
The argument specifies the number of entities to return. If it's
greater than the number of remaining entities, all of the
remaining entities are returned. In that case, the length of the
returned list will be smaller than count.
There is an internal buffer for use with the next() method. If
this buffer is not empty, up to 'count' values are removed from
this buffer and returned. It's best not to mix _Next() and
next().
The results are always returned as a list. If there are no results
left, an empty list is returned.
Args:
# the number of entities to return; must be >= 1
count: int or long
Returns:
# a list of entities
[Entity, ...]
"""
if not isinstance(count, (int, long)) or count <= 0:
raise datastore_errors.BadArgumentError(
'Argument to _Next must be an int greater than 0; received %s (a %s)' %
(count, typename(count)))
if self.__buffer:
raise datastore_errors.BadRequestError(
'You can\'t mix next() and _Next()')
if not self.__more_results:
return []
req = datastore_pb.NextRequest()
req.set_count(count)
req.mutable_cursor().CopyFrom(self._ToPb())
result = datastore_pb.QueryResult()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', req, result)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
self.__more_results = result.more_results()
ret = [Entity._FromPb(r) for r in result.result_list()]
return ret
_BUFFER_SIZE = 20
def next(self):
if not self.__buffer:
self.__buffer = self._Next(self._BUFFER_SIZE)
try:
return self.__buffer.pop(0)
except IndexError:
raise StopIteration
def __iter__(self): return self
def _ToPb(self):
"""Converts this Iterator to its protocol buffer representation. Not
intended to be used by application developers. Enforced by hiding the
datastore_pb classes.
Returns:
# the PB representation of this Iterator
datastore_pb.Cursor
"""
pb = datastore_pb.Cursor()
pb.set_cursor(self.__cursor)
return pb
@staticmethod
def _FromPb(pb):
"""Static factory method. Returns the Iterator representation of the given
protocol buffer (datastore_pb.Cursor). Not intended to be used by
application developers. Enforced by not hiding the datastore_pb classes.
Args:
# a protocol buffer Cursor
pb: datastore_pb.Cursor
Returns:
# the Iterator representation of the argument
Iterator
"""
return Iterator(pb.cursor())
class _Transaction(object):
"""Encapsulates a transaction currently in progress.
If we've sent a BeginTransaction call, then handle will be a
datastore_pb.Transaction that holds the transaction handle.
If we know the entity group for this transaction, it's stored in the
entity_group attribute, which is set by RecordModifiedKeys().
modified_keys is a set containing the Keys of all entities modified (ie put
or deleted) in this transaction. If an entity is modified more than once, a
BadRequestError is raised.
"""
def __init__(self):
"""Initializes modified_keys to the empty set."""
self.handle = None
self.entity_group = None
self.modified_keys = None
self.modified_keys = set()
def RecordModifiedKeys(self, keys, error_on_repeat=True):
"""Updates the modified keys seen so far.
Also sets entity_group if it hasn't yet been set.
If error_on_repeat is True and any of the given keys have already been
modified, raises BadRequestError.
Args:
keys: sequence of Keys
"""
keys, _ = NormalizeAndTypeCheckKeys(keys)
if keys and not self.entity_group:
self.entity_group = keys[0].entity_group()
keys = set(keys)
if error_on_repeat:
already_modified = self.modified_keys.intersection(keys)
if already_modified:
raise datastore_errors.BadRequestError(
"Can't update entity more than once in a transaction: %r" %
already_modified.pop())
self.modified_keys.update(keys)
def RunInTransaction(function, *args, **kwargs):
"""Runs a function inside a datastore transaction.
Runs the user-provided function inside transaction, retries default
number of times.
Args:
# a function to be run inside the transaction
function: callable
# positional arguments to pass to the function
args: variable number of any type
Returns:
the function's return value, if any
Raises:
TransactionFailedError, if the transaction could not be committed.
"""
return RunInTransactionCustomRetries(
DEFAULT_TRANSACTION_RETRIES, function, *args, **kwargs)
def RunInTransactionCustomRetries(retries, function, *args, **kwargs):
"""Runs a function inside a datastore transaction.
Runs the user-provided function inside a full-featured, ACID datastore
transaction. Every Put, Get, and Delete call in the function is made within
the transaction. All entities involved in these calls must belong to the
same entity group. Queries are not supported.
The trailing arguments are passed to the function as positional arguments.
If the function returns a value, that value will be returned by
RunInTransaction. Otherwise, it will return None.
The function may raise any exception to roll back the transaction instead of
committing it. If this happens, the transaction will be rolled back and the
exception will be re-raised up to RunInTransaction's caller.
If you want to roll back intentionally, but don't have an appropriate
exception to raise, you can raise an instance of datastore_errors.Rollback.
It will cause a rollback, but will *not* be re-raised up to the caller.
The function may be run more than once, so it should be idempotent. It
should avoid side effects, and it shouldn't have *any* side effects that
aren't safe to occur multiple times. This includes modifying the arguments,
since they persist across invocations of the function. However, this doesn't
include Put, Get, and Delete calls, of course.
Example usage:
> def decrement(key, amount=1):
> counter = datastore.Get(key)
> counter['count'] -= amount
> if counter['count'] < 0: # don't let the counter go negative
> raise datastore_errors.Rollback()
> datastore.Put(counter)
>
> counter = datastore.Query('Counter', {'name': 'foo'})
> datastore.RunInTransaction(decrement, counter.key(), amount=5)
Transactions satisfy the traditional ACID properties. They are:
- Atomic. All of a transaction's operations are executed or none of them are.
- Consistent. The datastore's state is consistent before and after a
transaction, whether it committed or rolled back. Invariants such as
"every entity has a primary key" are preserved.
- Isolated. Transactions operate on a snapshot of the datastore. Other
datastore operations do not see intermediated effects of the transaction;
they only see its effects after it has committed.
- Durable. On commit, all writes are persisted to the datastore.
Nested transactions are not supported.
Args:
# number of retries
retries: integer
# a function to be run inside the transaction
function: callable
# positional arguments to pass to the function
args: variable number of any type
Returns:
the function's return value, if any
Raises:
TransactionFailedError, if the transaction could not be committed.
"""
if _CurrentTransactionKey():
raise datastore_errors.BadRequestError(
'Nested transactions are not supported.')
if retries < 0:
raise datastore_errors.BadRequestError(
'Number of retries should be non-negative number.')
tx_key = None
try:
tx_key = _NewTransactionKey()
tx = _Transaction()
_txes[tx_key] = tx
for i in range(0, retries + 1):
tx.modified_keys.clear()
try:
result = function(*args, **kwargs)
except:
original_exception = sys.exc_info()
if tx.handle:
try:
resp = api_base_pb.VoidProto()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
tx.handle, resp)
except:
exc_info = sys.exc_info()
logging.info('Exception sending Rollback:\n' +
''.join(traceback.format_exception(*exc_info)))
type, value, trace = original_exception
if type is datastore_errors.Rollback:
return
else:
raise type, value, trace
if tx.handle:
try:
resp = datastore_pb.CommitResponse()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
tx.handle, resp)
except apiproxy_errors.ApplicationError, err:
if (err.application_error ==
datastore_pb.Error.CONCURRENT_TRANSACTION):
logging.warning('Transaction collision for entity group with '
'key %r. Retrying...', tx.entity_group)
tx.handle = None
tx.entity_group = None
continue
else:
raise _ToDatastoreError(err)
return result
raise datastore_errors.TransactionFailedError(
'The transaction could not be committed. Please try again.')
finally:
if tx_key in _txes:
del _txes[tx_key]
del tx_key
def _MaybeSetupTransaction(request, keys):
"""Begins a transaction, if necessary, and populates it in the request.
If we're currently inside a transaction, this records the entity group,
checks that the keys are all in that entity group, creates the transaction
PB, and sends the BeginTransaction. It then populates the transaction handle
in the request.
Raises BadRequestError if the entity has a different entity group than the
current transaction.
Args:
request: GetRequest, PutRequest, or DeleteRequest
keys: sequence of Keys
Returns:
_Transaction if we're inside a transaction, otherwise None
"""
assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,
datastore_pb.DeleteRequest))
tx_key = None
try:
tx_key = _CurrentTransactionKey()
if tx_key:
tx = _txes[tx_key]
groups = [k.entity_group() for k in keys]
if tx.entity_group:
expected_group = tx.entity_group
else:
expected_group = groups[0]
for group in groups:
if (group != expected_group or
(not group.has_id_or_name() and group is not expected_group)):
raise _DifferentEntityGroupError(expected_group, group)
if not tx.handle:
tx.handle = datastore_pb.Transaction()
req = api_base_pb.VoidProto()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction', req,
tx.handle)
request.mutable_transaction().CopyFrom(tx.handle)
return tx
finally:
del tx_key
def _DifferentEntityGroupError(a, b):
"""Raises a BadRequestError that says the given entity groups are different.
Includes the two entity groups in the message, formatted more clearly and
concisely than repr(Key).
Args:
a, b are both Keys that represent entity groups.
"""
def id_or_name(key):
if key.name():
return 'name=%r' % key.name()
else:
return 'id=%r' % key.id()
raise datastore_errors.BadRequestError(
'Cannot operate on different entity groups in a transaction: '
'(kind=%r, %s) and (kind=%r, %s).' % (a.kind(), id_or_name(a),
b.kind(), id_or_name(b)))
def _FindTransactionFrameInStack():
"""Walks the stack to find a RunInTransaction() call.
Returns:
# this is the RunInTransactionCustomRetries() frame record, if found
frame record or None
"""
frame = sys._getframe()
filename = frame.f_code.co_filename
frame = frame.f_back.f_back
while frame:
if (frame.f_code.co_filename == filename and
frame.f_code.co_name == 'RunInTransactionCustomRetries'):
return frame
frame = frame.f_back
return None
_CurrentTransactionKey = _FindTransactionFrameInStack
_NewTransactionKey = sys._getframe
def _GetCompleteKeyOrError(arg):
"""Expects an Entity or a Key, and returns the corresponding Key. Raises
BadArgumentError or BadKeyError if arg is a different type or is incomplete.
Args:
arg: Entity or Key
Returns:
Key
"""
if isinstance(arg, Key):
key = arg
elif isinstance(arg, basestring):
key = Key(arg)
elif isinstance(arg, Entity):
key = arg.key()
elif not isinstance(arg, Key):
raise datastore_errors.BadArgumentError(
'Expects argument to be an Entity or Key; received %s (a %s).' %
(arg, typename(arg)))
assert isinstance(key, Key)
if not key.has_id_or_name():
raise datastore_errors.BadKeyError('Key %r is not complete.' % key)
return key
def _AddOrAppend(dictionary, key, value):
"""Adds the value to the existing values in the dictionary, if any.
If dictionary[key] doesn't exist, sets dictionary[key] to value.
If dictionary[key] is not a list, sets dictionary[key] to [old_value, value].
If dictionary[key] is a list, appends value to that list.
Args:
dictionary: a dict
key, value: anything
"""
if key in dictionary:
existing_value = dictionary[key]
if isinstance(existing_value, list):
existing_value.append(value)
else:
dictionary[key] = [existing_value, value]
else:
dictionary[key] = value
def _ToDatastoreError(err):
"""Converts an apiproxy.ApplicationError to an error in datastore_errors.
Args:
err: apiproxy.ApplicationError
Returns:
a subclass of datastore_errors.Error
"""
errors = {
datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,
datastore_pb.Error.CONCURRENT_TRANSACTION:
datastore_errors.TransactionFailedError,
datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
}
if err.application_error in errors:
raise errors[err.application_error](err.error_detail)
else:
raise datastore_errors.Error(err.error_detail)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""URL downloading API.
Methods defined in this module:
Fetch(): fetchs a given URL using an HTTP GET or POST
"""
import os
import UserDict
import urllib2
import urlparse
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import urlfetch_service_pb
from google.appengine.api.urlfetch_errors import *
from google.appengine.runtime import apiproxy_errors
MAX_REDIRECTS = 5
GET = 1
POST = 2
HEAD = 3
PUT = 4
DELETE = 5
_URL_STRING_MAP = {
'GET': GET,
'POST': POST,
'HEAD': HEAD,
'PUT': PUT,
'DELETE': DELETE,
}
_VALID_METHODS = frozenset(_URL_STRING_MAP.values())
class _CaselessDict(UserDict.IterableUserDict):
"""Case insensitive dictionary.
This class was lifted from os.py and slightly modified.
"""
def __init__(self):
UserDict.IterableUserDict.__init__(self)
self.caseless_keys = {}
def __setitem__(self, key, item):
"""Set dictionary item.
Args:
key: Key of new item. Key is case insensitive, so "d['Key'] = value "
will replace previous values set by "d['key'] = old_value".
item: Item to store.
"""
caseless_key = key.lower()
if caseless_key in self.caseless_keys:
del self.data[self.caseless_keys[caseless_key]]
self.caseless_keys[caseless_key] = key
self.data[key] = item
def __getitem__(self, key):
"""Get dictionary item.
Args:
key: Key of item to get. Key is case insensitive, so "d['Key']" is the
same as "d['key']".
Returns:
Item associated with key.
"""
return self.data[self.caseless_keys[key.lower()]]
def __delitem__(self, key):
"""Remove item from dictionary.
Args:
key: Key of item to remove. Key is case insensitive, so "del d['Key']" is
the same as "del d['key']"
"""
caseless_key = key.lower()
del self.data[self.caseless_keys[caseless_key]]
del self.caseless_keys[caseless_key]
def has_key(self, key):
"""Determine if dictionary has item with specific key.
Args:
key: Key to check for presence. Key is case insensitive, so
"d.has_key('Key')" evaluates to the same value as "d.has_key('key')".
Returns:
True if dictionary contains key, else False.
"""
return key.lower() in self.caseless_keys
def __contains__(self, key):
"""Same as 'has_key', but used for 'in' operator.'"""
return self.has_key(key)
def get(self, key, failobj=None):
"""Get dictionary item, defaulting to another value if it does not exist.
Args:
key: Key of item to get. Key is case insensitive, so "d['Key']" is the
same as "d['key']".
failobj: Value to return if key not in dictionary.
"""
try:
cased_key = self.caseless_keys[key.lower()]
except KeyError:
return failobj
return self.data[cased_key]
def update(self, dict=None, **kwargs):
"""Update dictionary using values from another dictionary and keywords.
Args:
dict: Dictionary to update from.
kwargs: Keyword arguments to update from.
"""
if dict:
try:
keys = dict.keys()
except AttributeError:
for k, v in dict:
self[k] = v
else:
for k in keys:
self[k] = dict[k]
if kwargs:
self.update(kwargs)
def copy(self):
"""Make a shallow, case sensitive copy of self."""
return dict(self)
def _is_fetching_self(url, method):
"""Checks if the fetch is for the same URL from which it originated.
Args:
url: str, The URL being fetched.
method: value from _VALID_METHODS.
Returns:
boolean indicating whether or not it seems that the app is trying to fetch
itself.
"""
if (method != GET or
"HTTP_HOST" not in os.environ or
"PATH_INFO" not in os.environ):
return False
scheme, host_port, path, query, fragment = urlparse.urlsplit(url)
if host_port == os.environ['HTTP_HOST']:
current_path = urllib2.unquote(os.environ['PATH_INFO'])
desired_path = urllib2.unquote(path)
if (current_path == desired_path or
(current_path in ('', '/') and desired_path in ('', '/'))):
return True
return False
def fetch(url, payload=None, method=GET, headers={}, allow_truncated=False,
follow_redirects=True):
"""Fetches the given HTTP URL, blocking until the result is returned.
Other optional parameters are:
method: GET, POST, HEAD, PUT, or DELETE
payload: POST or PUT payload (implies method is not GET, HEAD, or DELETE)
headers: dictionary of HTTP headers to send with the request
allow_truncated: if true, truncate large responses and return them without
error. otherwise, ResponseTooLargeError will be thrown when a response is
truncated.
follow_redirects: if true (the default), redirects are
transparently followed and the response (if less than 5
redirects) contains the final destination's payload and the
response status is 200. You lose, however, the redirect chain
information. If false, you see the HTTP response yourself,
including the 'Location' header, and redirects are not
followed.
We use a HTTP/1.1 compliant proxy to fetch the result.
The returned data structure has the following fields:
content: string containing the response from the server
status_code: HTTP status code returned by the server
headers: dictionary of headers returned by the server
If the URL is an empty string or obviously invalid, we throw an
urlfetch.InvalidURLError. If the server cannot be contacted, we throw a
urlfetch.DownloadError. Note that HTTP errors are returned as a part
of the returned structure, so HTTP errors like 404 do not result in an
exception.
"""
if isinstance(method, basestring):
method = method.upper()
method = _URL_STRING_MAP.get(method, method)
if method not in _VALID_METHODS:
raise InvalidMethodError('Invalid method %s.' % str(method))
if _is_fetching_self(url, method):
raise InvalidURLError("App cannot fetch the same URL as the one used for "
"the request.")
request = urlfetch_service_pb.URLFetchRequest()
response = urlfetch_service_pb.URLFetchResponse()
request.set_url(url)
if method == GET:
request.set_method(urlfetch_service_pb.URLFetchRequest.GET)
elif method == POST:
request.set_method(urlfetch_service_pb.URLFetchRequest.POST)
elif method == HEAD:
request.set_method(urlfetch_service_pb.URLFetchRequest.HEAD)
elif method == PUT:
request.set_method(urlfetch_service_pb.URLFetchRequest.PUT)
elif method == DELETE:
request.set_method(urlfetch_service_pb.URLFetchRequest.DELETE)
if payload and (method == POST or method == PUT):
request.set_payload(payload)
for key, value in headers.iteritems():
header_proto = request.add_header()
header_proto.set_key(key)
header_proto.set_value(str(value))
request.set_followredirects(follow_redirects)
try:
apiproxy_stub_map.MakeSyncCall('urlfetch', 'Fetch', request, response)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.INVALID_URL):
raise InvalidURLError(str(e))
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR):
raise DownloadError(str(e))
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR):
raise DownloadError(str(e))
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.RESPONSE_TOO_LARGE):
raise ResponseTooLargeError(None)
if (e.application_error ==
urlfetch_service_pb.URLFetchServiceError.DEADLINE_EXCEEDED):
raise DownloadError(str(e))
raise e
result = _URLFetchResult(response)
if not allow_truncated and response.contentwastruncated():
raise ResponseTooLargeError(result)
return result
Fetch = fetch
class _URLFetchResult(object):
"""A Pythonic representation of our fetch response protocol buffer."""
def __init__(self, response_proto):
self.__pb = response_proto
self.content = response_proto.content()
self.status_code = response_proto.statuscode()
self.content_was_truncated = response_proto.contentwastruncated()
self.headers = _CaselessDict()
for header_proto in response_proto.header_list():
self.headers[header_proto.key()] = header_proto.value()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Container of APIProxy stubs for more convenient unittesting.
Classes/variables/functions defined here:
APIProxyStubMap: container of APIProxy stubs.
apiproxy: global instance of an APIProxyStubMap.
MakeSyncCall: APIProxy entry point.
"""
import inspect
import sys
def CreateRPC(service):
"""Creates a RPC instance for the given service.
The instance is suitable for talking to remote services.
Each RPC instance can be used only once, and should not be reused.
Args:
service: string representing which service to call.
Returns:
the rpc object.
Raises:
AssertionError or RuntimeError if the stub for service doesn't supply a
CreateRPC method.
"""
stub = apiproxy.GetStub(service)
assert stub, 'No api proxy found for service "%s"' % service
assert hasattr(stub, 'CreateRPC'), ('The service "%s" doesn\'t have ' +
'a CreateRPC method.' % service)
return stub.CreateRPC()
def MakeSyncCall(service, call, request, response):
"""The APIProxy entry point for a synchronous API call.
Args:
service: string representing which service to call
call: string representing which function to call
request: protocol buffer for the request
response: protocol buffer for the response
Raises:
apiproxy_errors.Error or a subclass.
"""
apiproxy.MakeSyncCall(service, call, request, response)
class ListOfHooks(object):
"""An ordered collection of hooks for a particular API call.
A hook is a function that has exactly the same signature as
a service stub. It will be called before or after an api hook is
executed, depending on whether this list is for precall of postcall hooks.
Hooks can be used for debugging purposes (check certain
pre- or postconditions on api calls) or to apply patches to protocol
buffers before/after a call gets submitted.
"""
def __init__(self):
"""Constructor."""
self.__content = []
self.__unique_keys = set()
def __len__(self):
"""Returns the amount of elements in the collection."""
return self.__content.__len__()
def __Insert(self, index, key, function, service=None):
"""Appends a hook at a certain position in the list.
Args:
index: the index of where to insert the function
key: a unique key (within the module) for this particular function.
If something from the same module with the same key is already
registered, nothing will be added.
function: the hook to be added.
service: optional argument that restricts the hook to a particular api
Returns:
True if the collection was modified.
"""
unique_key = (key, inspect.getmodule(function))
if unique_key in self.__unique_keys:
return False
self.__content.insert(index, (key, function, service))
self.__unique_keys.add(unique_key)
return True
def Append(self, key, function, service=None):
"""Appends a hook at the end of the list.
Args:
key: a unique key (within the module) for this particular function.
If something from the same module with the same key is already
registered, nothing will be added.
function: the hook to be added.
service: optional argument that restricts the hook to a particular api
Returns:
True if the collection was modified.
"""
return self.__Insert(len(self), key, function, service)
def Push(self, key, function, service=None):
"""Inserts a hook at the beginning of the list.
Args:
key: a unique key (within the module) for this particular function.
If something from the same module with the same key is already
registered, nothing will be added.
function: the hook to be added.
service: optional argument that restricts the hook to a particular api
Returns:
True if the collection was modified.
"""
return self.__Insert(0, key, function, service)
def Clear(self):
"""Removes all hooks from the list (useful for unit tests)."""
self.__content = []
self.__unique_keys = set()
def Call(self, service, call, request, response):
"""Invokes all hooks in this collection.
Args:
service: string representing which service to call
call: string representing which function to call
request: protocol buffer for the request
response: protocol buffer for the response
"""
for key, function, srv in self.__content:
if srv is None or srv == service:
function(service, call, request, response)
class APIProxyStubMap:
"""Container of APIProxy stubs for more convenient unittesting.
Stubs may be either trivial implementations of APIProxy services (e.g.
DatastoreFileStub, UserServiceStub) or "real" implementations.
For unittests, we may want to mix and match real and trivial implementations
of services in order to better focus testing on individual service
implementations. To achieve this, we allow the client to attach stubs to
service names, as well as define a default stub to be used if no specific
matching stub is identified.
"""
def __init__(self, default_stub=None):
"""Constructor.
Args:
default_stub: optional stub
'default_stub' will be used whenever no specific matching stub is found.
"""
self.__stub_map = {}
self.__default_stub = default_stub
self.__precall_hooks = ListOfHooks()
self.__postcall_hooks = ListOfHooks()
def GetPreCallHooks(self):
"""Gets a collection for all precall hooks."""
return self.__precall_hooks
def GetPostCallHooks(self):
"""Gets a collection for all precall hooks."""
return self.__postcall_hooks
def RegisterStub(self, service, stub):
"""Register the provided stub for the specified service.
Args:
service: string
stub: stub
"""
assert not self.__stub_map.has_key(service)
self.__stub_map[service] = stub
if service == 'datastore':
self.RegisterStub('datastore_v3', stub)
def GetStub(self, service):
"""Retrieve the stub registered for the specified service.
Args:
service: string
Returns:
stub
Returns the stub registered for 'service', and returns the default stub
if no such stub is found.
"""
return self.__stub_map.get(service, self.__default_stub)
def MakeSyncCall(self, service, call, request, response):
"""The APIProxy entry point.
Args:
service: string representing which service to call
call: string representing which function to call
request: protocol buffer for the request
response: protocol buffer for the response
Raises:
apiproxy_errors.Error or a subclass.
"""
stub = self.GetStub(service)
assert stub, 'No api proxy found for service "%s"' % service
self.__precall_hooks.Call(service, call, request, response)
stub.MakeSyncCall(service, call, request, response)
self.__postcall_hooks.Call(service, call, request, response)
def GetDefaultAPIProxy():
try:
runtime = __import__('google.appengine.runtime', globals(), locals(),
['apiproxy'])
return APIProxyStubMap(runtime.apiproxy)
except (AttributeError, ImportError):
return APIProxyStubMap()
apiproxy = GetDefaultAPIProxy()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Higher-level, semantic data types for the datastore. These types
are expected to be set as attributes of Entities. See "Supported Data Types"
in the API Guide.
Most of these types are based on XML elements from Atom and GData elements
from the atom and gd namespaces. For more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
The namespace schemas are:
http://www.w3.org/2005/Atom
http://schemas.google.com/g/2005
"""
import base64
import calendar
import datetime
import os
import re
import string
import time
import urlparse
from xml.sax import saxutils
from google.appengine.datastore import datastore_pb
from google.appengine.api import datastore_errors
from google.appengine.api import users
from google.net.proto import ProtocolBuffer
from google.appengine.datastore import entity_pb
_MAX_STRING_LENGTH = 500
_MAX_LINK_PROPERTY_LENGTH = 2083
RESERVED_PROPERTY_NAME = re.compile('^__.*__$')
_KEY_SPECIAL_PROPERTY = '__key__'
_SPECIAL_PROPERTIES = frozenset([_KEY_SPECIAL_PROPERTY])
class UtcTzinfo(datetime.tzinfo):
def utcoffset(self, dt): return datetime.timedelta(0)
def dst(self, dt): return datetime.timedelta(0)
def tzname(self, dt): return 'UTC'
def __repr__(self): return 'datastore_types.UTC'
UTC = UtcTzinfo()
def typename(obj):
"""Returns the type of obj as a string. More descriptive and specific than
type(obj), and safe for any object, unlike __class__."""
if hasattr(obj, '__class__'):
return getattr(obj, '__class__').__name__
else:
return type(obj).__name__
def ValidateString(value,
name='unused',
exception=datastore_errors.BadValueError,
max_len=_MAX_STRING_LENGTH):
"""Raises an exception if value is not a valid string or a subclass thereof.
A string is valid if it's not empty, no more than _MAX_STRING_LENGTH bytes,
and not a Blob. The exception type can be specified with the exception
argument; it defaults to BadValueError.
Args:
value: the value to validate.
name: the name of this value; used in the exception message.
exception: the type of exception to raise.
max_len: the maximum allowed length, in bytes
"""
if not isinstance(value, basestring) or isinstance(value, Blob):
raise exception('%s should be a string; received %s (a %s):' %
(name, value, typename(value)))
if not value:
raise exception('%s must not be empty.' % name)
if len(value.encode('utf-8')) > max_len:
raise exception('%s must be under %d bytes.' % (name, max_len))
def ResolveAppId(app, name='_app'):
"""Validate app id, providing a default.
If the argument is None, $APPLICATION_ID is substituted.
Args:
app: The app id argument value to be validated.
name: The argument name, for error messages.
Returns:
The value of app, or the substituted default. Always a non-empty string.
Raises:
BadArgumentError if the value is empty or not a string.
"""
if app is None:
app = os.environ.get('APPLICATION_ID', '')
ValidateString(app, '_app', datastore_errors.BadArgumentError)
return app
class Key(object):
"""The primary key for a datastore entity.
A datastore GUID. A Key instance uniquely identifies an entity across all
apps, and includes all information necessary to fetch the entity from the
datastore with Get().
Key implements __hash__, and key instances are immutable, so Keys may be
used in sets and as dictionary keys.
"""
__reference = None
def __init__(self, encoded=None):
"""Constructor. Creates a Key from a string.
Args:
# a base64-encoded primary key, generated by Key.__str__
encoded: str
"""
if encoded is not None:
if not isinstance(encoded, basestring):
try:
repr_encoded = repr(encoded)
except:
repr_encoded = "<couldn't encode>"
raise datastore_errors.BadArgumentError(
'Key() expects a string; received %s (a %s).' %
(repr_encoded, typename(encoded)))
try:
modulo = len(encoded) % 4
if modulo != 0:
encoded += ('=' * (4 - modulo))
encoded_pb = base64.urlsafe_b64decode(str(encoded))
self.__reference = entity_pb.Reference(encoded_pb)
assert self.__reference.IsInitialized()
except (AssertionError, TypeError), e:
raise datastore_errors.BadKeyError(
'Invalid string key %s. Details: %s' % (encoded, e))
except Exception, e:
if e.__class__.__name__ == 'ProtocolBufferDecodeError':
raise datastore_errors.BadKeyError('Invalid string key %s.' % encoded)
else:
raise
else:
self.__reference = entity_pb.Reference()
@staticmethod
def from_path(*args, **kwds):
"""Static method to construct a Key out of a "path" (kind, id or name, ...).
This is useful when an application wants to use just the id or name portion
of a key in e.g. a URL, where the rest of the URL provides enough context to
fill in the rest, i.e. the app id (always implicit), the entity kind, and
possibly an ancestor key. Since ids and names are usually small, they're
more attractive for use in end-user-visible URLs than the full string
representation of a key.
Args:
kind: the entity kind (a str or unicode instance)
id_or_name: the id (an int or long) or name (a str or unicode instance)
Additional positional arguments are allowed and should be
alternating kind and id/name.
Keyword args:
parent: optional parent Key; default None.
Returns:
A new Key instance whose .kind() and .id() or .name() methods return
the *last* kind and id or name positional arguments passed.
Raises:
BadArgumentError for invalid arguments.
BadKeyError if the parent key is incomplete.
"""
parent = kwds.pop('parent', None)
_app = ResolveAppId(kwds.pop('_app', None))
if kwds:
raise datastore_errors.BadArgumentError(
'Excess keyword arguments ' + repr(kwds))
if not args or len(args) % 2:
raise datastore_errors.BadArgumentError(
'A non-zero even number of positional arguments is required '
'(kind, id or name, kind, id or name, ...); received %s' % repr(args))
if parent is not None:
if not isinstance(parent, Key):
raise datastore_errors.BadArgumentError(
'Expected None or a Key as parent; received %r (a %s).' %
(parent, typename(parent)))
if not parent.has_id_or_name():
raise datastore_errors.BadKeyError(
'The parent Key is incomplete.')
if _app != parent.app():
raise datastore_errors.BadArgumentError(
'The _app argument (%r) should match parent.app() (%s)' %
(_app, parent.app()))
key = Key()
ref = key.__reference
if parent is not None:
ref.CopyFrom(parent.__reference)
else:
ref.set_app(_app)
path = ref.mutable_path()
for i in xrange(0, len(args), 2):
kind, id_or_name = args[i:i+2]
if isinstance(kind, basestring):
kind = kind.encode('utf-8')
else:
raise datastore_errors.BadArgumentError(
'Expected a string kind as argument %d; received %r (a %s).' %
(i + 1, kind, typename(kind)))
elem = path.add_element()
elem.set_type(kind)
if isinstance(id_or_name, (int, long)):
elem.set_id(id_or_name)
elif isinstance(id_or_name, basestring):
ValidateString(id_or_name, 'name')
if id_or_name and id_or_name[0] in string.digits:
raise datastore_errors.BadArgumentError(
'Names may not begin with a digit; received %s.' % id_or_name)
elem.set_name(id_or_name.encode('utf-8'))
else:
raise datastore_errors.BadArgumentError(
'Expected an integer id or string name as argument %d; '
'received %r (a %s).' % (i + 2, id_or_name, typename(id_or_name)))
assert ref.IsInitialized()
return key
def app(self):
"""Returns this entity's app id, a string."""
if self.__reference.app():
return self.__reference.app().decode('utf-8')
else:
return None
def kind(self):
"""Returns this entity's kind, as a string."""
if self.__reference.path().element_size() > 0:
encoded = self.__reference.path().element_list()[-1].type()
return unicode(encoded.decode('utf-8'))
else:
return None
def id(self):
"""Returns this entity's id, or None if it doesn't have one."""
elems = self.__reference.path().element_list()
if elems and elems[-1].has_id() and elems[-1].id():
return elems[-1].id()
else:
return None
def name(self):
"""Returns this entity's name, or None if it doesn't have one."""
elems = self.__reference.path().element_list()
if elems and elems[-1].has_name() and elems[-1].name():
return elems[-1].name().decode('utf-8')
else:
return None
def id_or_name(self):
"""Returns this entity's id or name, whichever it has, or None."""
if self.id() is not None:
return self.id()
else:
return self.name()
def has_id_or_name(self):
"""Returns True if this entity has an id or name, False otherwise.
"""
return self.id_or_name() is not None
def parent(self):
"""Returns this entity's parent, as a Key. If this entity has no parent,
returns None."""
if self.__reference.path().element_size() > 1:
parent = Key()
parent.__reference.CopyFrom(self.__reference)
parent.__reference.path().element_list().pop()
return parent
else:
return None
def ToTagUri(self):
"""Returns a tag: URI for this entity for use in XML output.
Foreign keys for entities may be represented in XML output as tag URIs.
RFC 4151 describes the tag URI scheme. From http://taguri.org/:
The tag algorithm lets people mint - create - identifiers that no one
else using the same algorithm could ever mint. It is simple enough to do
in your head, and the resulting identifiers can be easy to read, write,
and remember. The identifiers conform to the URI (URL) Syntax.
Tag URIs for entities use the app's auth domain and the date that the URI
is generated. The namespace-specific part is <kind>[<key>].
For example, here is the tag URI for a Kitten with the key "Fluffy" in the
catsinsinks app:
tag:catsinsinks.googleapps.com,2006-08-29:Kitten[Fluffy]
Raises a BadKeyError if this entity's key is incomplete.
"""
if not self.has_id_or_name():
raise datastore_errors.BadKeyError(
'ToTagUri() called for an entity with an incomplete key.')
return u'tag:%s.%s,%s:%s[%s]' % (saxutils.escape(self.app()),
os.environ['AUTH_DOMAIN'],
datetime.date.today().isoformat(),
saxutils.escape(self.kind()),
saxutils.escape(str(self)))
ToXml = ToTagUri
def entity_group(self):
"""Returns this key's entity group as a Key.
Note that the returned Key will be incomplete if this Key is for a root
entity and it is incomplete.
"""
group = Key._FromPb(self.__reference)
del group.__reference.path().element_list()[1:]
return group
@staticmethod
def _FromPb(pb):
"""Static factory method. Creates a Key from an entity_pb.Reference.
Not intended to be used by application developers. Enforced by hiding the
entity_pb classes.
Args:
pb: entity_pb.Reference
"""
if not isinstance(pb, entity_pb.Reference):
raise datastore_errors.BadArgumentError(
'Key constructor takes an entity_pb.Reference; received %s (a %s).' %
(pb, typename(pb)))
key = Key()
key.__reference = entity_pb.Reference()
key.__reference.CopyFrom(pb)
return key
def _ToPb(self):
"""Converts this Key to its protocol buffer representation.
Not intended to be used by application developers. Enforced by hiding the
entity_pb classes.
Returns:
# the Reference PB representation of this Key
entity_pb.Reference
"""
pb = entity_pb.Reference()
pb.CopyFrom(self.__reference)
if not self.has_id_or_name():
pb.mutable_path().element_list()[-1].set_id(0)
pb.app().decode('utf-8')
for pathelem in pb.path().element_list():
pathelem.type().decode('utf-8')
return pb
def __str__(self):
"""Encodes this Key as an opaque string.
Returns a string representation of this key, suitable for use in HTML,
URLs, and other similar use cases. If the entity's key is incomplete,
raises a BadKeyError.
Unfortunately, this string encoding isn't particularly compact, and its
length varies with the length of the path. If you want a shorter identifier
and you know the kind and parent (if any) ahead of time, consider using just
the entity's id or name.
Returns:
string
"""
if (self.has_id_or_name()):
encoded = base64.urlsafe_b64encode(self.__reference.Encode())
return encoded.replace('=', '')
else:
raise datastore_errors.BadKeyError(
'Cannot string encode an incomplete key!\n%s' % self.__reference)
def __repr__(self):
"""Returns an eval()able string representation of this key.
Returns a Python string of the form 'datastore_types.Key.from_path(...)'
that can be used to recreate this key.
Returns:
string
"""
args = []
for elem in self.__reference.path().element_list():
args.append(repr(elem.type()))
if elem.has_name():
args.append(repr(elem.name().decode('utf-8')))
else:
args.append(repr(elem.id()))
args.append('_app=%r' % self.__reference.app().decode('utf-8'))
return u'datastore_types.Key.from_path(%s)' % ', '.join(args)
def __cmp__(self, other):
"""Returns negative, zero, or positive when comparing two keys.
TODO(ryanb): for API v2, we should change this to make incomplete keys, ie
keys without an id or name, not equal to any other keys.
Args:
other: Key to compare to.
Returns:
Negative if self is less than "other"
Zero if "other" is equal to self
Positive if self is greater than "other"
"""
if not isinstance(other, Key):
return -2
self_args = []
other_args = []
self_args.append(self.__reference.app().decode('utf-8'))
other_args.append(other.__reference.app().decode('utf-8'))
for elem in self.__reference.path().element_list():
self_args.append(repr(elem.type()))
if elem.has_name():
self_args.append(repr(elem.name().decode('utf-8')))
else:
self_args.append(elem.id())
for elem in other.__reference.path().element_list():
other_args.append(repr(elem.type()))
if elem.has_name():
other_args.append(repr(elem.name().decode('utf-8')))
else:
other_args.append(elem.id())
result = cmp(self_args, other_args)
return result
def __hash__(self):
"""Returns a 32-bit integer hash of this key.
Implements Python's hash protocol so that Keys may be used in sets and as
dictionary keys.
Returns:
int
"""
return hash(self.__str__())
class Category(unicode):
"""A tag, ie a descriptive word or phrase. Entities may be tagged by users,
and later returned by a queries for that tag. Tags can also be used for
ranking results (frequency), photo captions, clustering, activity, etc.
Here's a more in-depth description: http://www.zeldman.com/daily/0405d.shtml
This is the Atom "category" element. In XML output, the tag is provided as
the term attribute. See:
http://www.atomenabled.org/developers/syndication/#category
Raises BadValueError if tag is not a string or subtype.
"""
TERM = 'user-tag'
def __init__(self, tag):
super(Category, self).__init__(self, tag)
ValidateString(tag, 'tag')
def ToXml(self):
return u'<category term="%s" label=%s />' % (Category.TERM,
saxutils.quoteattr(self))
class Link(unicode):
"""A fully qualified URL. Usually http: scheme, but may also be file:, ftp:,
news:, among others.
If you have email (mailto:) or instant messaging (aim:, xmpp:) links,
consider using the Email or IM classes instead.
This is the Atom "link" element. In XML output, the link is provided as the
href attribute. See:
http://www.atomenabled.org/developers/syndication/#link
Raises BadValueError if link is not a fully qualified, well-formed URL.
"""
def __init__(self, link):
super(Link, self).__init__(self, link)
ValidateString(link, 'link', max_len=_MAX_LINK_PROPERTY_LENGTH)
scheme, domain, path, params, query, fragment = urlparse.urlparse(link)
if (not scheme or (scheme != 'file' and not domain) or
(scheme == 'file' and not path)):
raise datastore_errors.BadValueError('Invalid URL: %s' % link)
def ToXml(self):
return u'<link href=%s />' % saxutils.quoteattr(self)
class Email(unicode):
"""An RFC2822 email address. Makes no attempt at validation; apart from
checking MX records, email address validation is a rathole.
This is the gd:email element. In XML output, the email address is provided as
the address attribute. See:
http://code.google.com/apis/gdata/common-elements.html#gdEmail
Raises BadValueError if email is not a valid email address.
"""
def __init__(self, email):
super(Email, self).__init__(self, email)
ValidateString(email, 'email')
def ToXml(self):
return u'<gd:email address=%s />' % saxutils.quoteattr(self)
class GeoPt(object):
"""A geographical point, specified by floating-point latitude and longitude
coordinates. Often used to integrate with mapping sites like Google Maps.
May also be used as ICBM coordinates.
This is the georss:point element. In XML output, the coordinates are
provided as the lat and lon attributes. See: http://georss.org/
Serializes to '<lat>,<lon>'. Raises BadValueError if it's passed an invalid
serialized string, or if lat and lon are not valid floating points in the
ranges [-90, 90] and [-180, 180], respectively.
"""
lat = None
lon = None
def __init__(self, lat, lon=None):
if lon is None:
try:
split = lat.split(',')
lat, lon = split
except (AttributeError, ValueError):
raise datastore_errors.BadValueError(
'Expected a "lat,long" formatted string; received %s (a %s).' %
(lat, typename(lat)))
try:
lat = float(lat)
lon = float(lon)
if abs(lat) > 90:
raise datastore_errors.BadValueError(
'Latitude must be between -90 and 90; received %f' % lat)
if abs(lon) > 180:
raise datastore_errors.BadValueError(
'Longitude must be between -180 and 180; received %f' % lon)
except (TypeError, ValueError):
raise datastore_errors.BadValueError(
'Expected floats for lat and long; received %s (a %s) and %s (a %s).' %
(lat, typename(lat), lon, typename(lon)))
self.lat = lat
self.lon = lon
def __cmp__(self, other):
if not isinstance(other, GeoPt):
try:
other = GeoPt(other)
except datastore_errors.BadValueError:
return NotImplemented
lat_cmp = cmp(self.lat, other.lat)
if lat_cmp != 0:
return lat_cmp
else:
return cmp(self.lon, other.lon)
def __hash__(self):
"""Returns a 32-bit integer hash of this point.
Implements Python's hash protocol so that GeoPts may be used in sets and
as dictionary keys.
Returns:
int
"""
return hash((self.lat, self.lon))
def __repr__(self):
"""Returns an eval()able string representation of this GeoPt.
The returned string is of the form 'datastore_types.GeoPt([lat], [lon])'.
Returns:
string
"""
return 'datastore_types.GeoPt(%r, %r)' % (self.lat, self.lon)
def __unicode__(self):
return u'%s,%s' % (unicode(self.lat), unicode(self.lon))
__str__ = __unicode__
def ToXml(self):
return u'<georss:point>%s %s</georss:point>' % (unicode(self.lat),
unicode(self.lon))
class IM(object):
"""An instant messaging handle. Includes both an address and its protocol.
The protocol value is either a standard IM scheme or a URL identifying the
IM network for the protocol. Possible values include:
Value Description
sip SIP/SIMPLE
unknown Unknown or unspecified
xmpp XMPP/Jabber
http://aim.com/ AIM
http://icq.com/ ICQ
http://talk.google.com/ Google Talk
http://messenger.msn.com/ MSN Messenger
http://messenger.yahoo.com/ Yahoo Messenger
http://sametime.com/ Lotus Sametime
http://gadu-gadu.pl/ Gadu-Gadu
This is the gd:im element. In XML output, the address and protocol are
provided as the address and protocol attributes, respectively. See:
http://code.google.com/apis/gdata/common-elements.html#gdIm
Serializes to '<protocol> <address>'. Raises BadValueError if tag is not a
standard IM scheme or a URL.
"""
PROTOCOLS = [ 'sip', 'unknown', 'xmpp' ]
protocol = None
address = None
def __init__(self, protocol, address=None):
if address is None:
try:
split = protocol.split(' ')
protocol, address = split
except (AttributeError, ValueError):
raise datastore_errors.BadValueError(
'Expected string of format "protocol address"; received %s' %
str(protocol))
ValidateString(address, 'address')
if protocol not in self.PROTOCOLS:
Link(protocol)
self.address = address
self.protocol = protocol
def __cmp__(self, other):
if not isinstance(other, IM):
try:
other = IM(other)
except datastore_errors.BadValueError:
return NotImplemented
return cmp((self.address, self.protocol),
(other.address, other.protocol))
def __repr__(self):
"""Returns an eval()able string representation of this IM.
The returned string is of the form:
datastore_types.IM('address', 'protocol')
Returns:
string
"""
return 'datastore_types.IM(%r, %r)' % (self.protocol, self.address)
def __unicode__(self):
return u'%s %s' % (self.protocol, self.address)
__str__ = __unicode__
def ToXml(self):
return (u'<gd:im protocol=%s address=%s />' %
(saxutils.quoteattr(self.protocol),
saxutils.quoteattr(self.address)))
def __len__(self):
return len(unicode(self))
class PhoneNumber(unicode):
"""A human-readable phone number or address.
No validation is performed. Phone numbers have many different formats -
local, long distance, domestic, international, internal extension, TTY,
VOIP, SMS, and alternative networks like Skype, XFire and Roger Wilco. They
all have their own numbering and addressing formats.
This is the gd:phoneNumber element. In XML output, the phone number is
provided as the text of the element. See:
http://code.google.com/apis/gdata/common-elements.html#gdPhoneNumber
Raises BadValueError if phone is not a string or subtype.
"""
def __init__(self, phone):
super(PhoneNumber, self).__init__(self, phone)
ValidateString(phone, 'phone')
def ToXml(self):
return u'<gd:phoneNumber>%s</gd:phoneNumber>' % saxutils.escape(self)
class PostalAddress(unicode):
"""A human-readable mailing address. Again, mailing address formats vary
widely, so no validation is performed.
This is the gd:postalAddress element. In XML output, the address is provided
as the text of the element. See:
http://code.google.com/apis/gdata/common-elements.html#gdPostalAddress
Raises BadValueError if address is not a string or subtype.
"""
def __init__(self, address):
super(PostalAddress, self).__init__(self, address)
ValidateString(address, 'address')
def ToXml(self):
return u'<gd:postalAddress>%s</gd:postalAddress>' % saxutils.escape(self)
class Rating(long):
"""A user-provided integer rating for a piece of content. Normalized to a
0-100 scale.
This is the gd:rating element. In XML output, the address is provided
as the text of the element. See:
http://code.google.com/apis/gdata/common-elements.html#gdRating
Serializes to the decimal string representation of the rating. Raises
BadValueError if the rating is not an integer in the range [0, 100].
"""
MIN = 0
MAX = 100
def __init__(self, rating):
super(Rating, self).__init__(self, rating)
if isinstance(rating, float) or isinstance(rating, complex):
raise datastore_errors.BadValueError(
'Expected int or long; received %s (a %s).' %
(rating, typename(rating)))
try:
if long(rating) < Rating.MIN or long(rating) > Rating.MAX:
raise datastore_errors.BadValueError()
except ValueError:
raise datastore_errors.BadValueError(
'Expected int or long; received %s (a %s).' %
(rating, typename(rating)))
def ToXml(self):
return (u'<gd:rating value="%d" min="%d" max="%d" />' %
(self, Rating.MIN, Rating.MAX))
class Text(unicode):
"""A long string type.
Strings of any length can be stored in the datastore using this
type. It behaves identically to the Python unicode type, except for
the constructor, which only accepts str and unicode arguments.
"""
def __new__(cls, arg=None, encoding=None):
"""Constructor.
We only accept unicode and str instances, the latter with encoding.
Args:
arg: optional unicode or str instance; default u''
encoding: optional encoding; disallowed when isinstance(arg, unicode),
defaults to 'ascii' when isinstance(arg, str);
"""
if arg is None:
arg = u''
if isinstance(arg, unicode):
if encoding is not None:
raise TypeError('Text() with a unicode argument '
'should not specify an encoding')
return super(Text, cls).__new__(cls, arg)
if isinstance(arg, str):
if encoding is None:
encoding = 'ascii'
return super(Text, cls).__new__(cls, arg, encoding)
raise TypeError('Text() argument should be str or unicode, not %s' %
type(arg).__name__)
class Blob(str):
"""A blob type, appropriate for storing binary data of any length.
This behaves identically to the Python str type, except for the
constructor, which only accepts str arguments.
"""
def __new__(cls, arg=None):
"""Constructor.
We only accept str instances.
Args:
arg: optional str instance (default '')
"""
if arg is None:
arg = ''
if isinstance(arg, str):
return super(Blob, cls).__new__(cls, arg)
raise TypeError('Blob() argument should be str instance, not %s' %
type(arg).__name__)
def ToXml(self):
"""Output a blob as XML.
Returns:
Base64 encoded version of itself for safe insertion in to an XML document.
"""
encoded = base64.urlsafe_b64encode(self)
return saxutils.escape(encoded)
class ByteString(str):
"""A byte-string type, appropriate for storing short amounts of indexed data.
This behaves identically to Blob, except it's used only for short, indexed
byte strings.
"""
def __new__(cls, arg=None):
"""Constructor.
We only accept str instances.
Args:
arg: optional str instance (default '')
"""
if arg is None:
arg = ''
if isinstance(arg, str):
return super(ByteString, cls).__new__(cls, arg)
raise TypeError('ByteString() argument should be str instance, not %s' %
type(arg).__name__)
def ToXml(self):
"""Output a ByteString as XML.
Returns:
Base64 encoded version of itself for safe insertion in to an XML document.
"""
encoded = base64.urlsafe_b64encode(self)
return saxutils.escape(encoded)
_PROPERTY_MEANINGS = {
Blob: entity_pb.Property.BLOB,
ByteString: entity_pb.Property.BYTESTRING,
Text: entity_pb.Property.TEXT,
datetime.datetime: entity_pb.Property.GD_WHEN,
Category: entity_pb.Property.ATOM_CATEGORY,
Link: entity_pb.Property.ATOM_LINK,
Email: entity_pb.Property.GD_EMAIL,
GeoPt: entity_pb.Property.GEORSS_POINT,
IM: entity_pb.Property.GD_IM,
PhoneNumber: entity_pb.Property.GD_PHONENUMBER,
PostalAddress: entity_pb.Property.GD_POSTALADDRESS,
Rating: entity_pb.Property.GD_RATING,
}
_PROPERTY_TYPES = frozenset([
Blob,
ByteString,
bool,
Category,
datetime.datetime,
Email,
float,
GeoPt,
IM,
int,
Key,
Link,
long,
PhoneNumber,
PostalAddress,
Rating,
str,
Text,
type(None),
unicode,
users.User,
])
_RAW_PROPERTY_TYPES = (Blob, Text)
def ValidatePropertyInteger(name, value):
"""Raises an exception if the supplied integer is invalid.
Args:
name: Name of the property this is for.
value: Integer value.
Raises:
OverflowError if the value does not fit within a signed int64.
"""
if not (-0x8000000000000000 <= value <= 0x7fffffffffffffff):
raise OverflowError('%d is out of bounds for int64' % value)
def ValidateStringLength(name, value, max_len):
"""Raises an exception if the supplied string is too long.
Args:
name: Name of the property this is for.
value: String value.
max_len: Maximum length the string may be.
Raises:
OverflowError if the value is larger than the maximum length.
"""
if len(value) > max_len:
raise datastore_errors.BadValueError(
'Property %s is %d bytes long; it must be %d or less. '
'Consider Text instead, which can store strings of any length.' %
(name, len(value), max_len))
def ValidatePropertyString(name, value):
"""Validates the length of an indexed string property.
Args:
name: Name of the property this is for.
value: String value.
"""
ValidateStringLength(name, value, max_len=_MAX_STRING_LENGTH)
def ValidatePropertyLink(name, value):
"""Validates the length of an indexed Link property.
Args:
name: Name of the property this is for.
value: String value.
"""
ValidateStringLength(name, value, max_len=_MAX_LINK_PROPERTY_LENGTH)
def ValidatePropertyNothing(name, value):
"""No-op validation function.
Args:
name: Name of the property this is for.
value: Not used.
"""
pass
def ValidatePropertyKey(name, value):
"""Raises an exception if the supplied datastore.Key instance is invalid.
Args:
name: Name of the property this is for.
value: A datastore.Key instance.
Raises:
datastore_errors.BadValueError if the value is invalid.
"""
if not value.has_id_or_name():
raise datastore_errors.BadValueError(
'Incomplete key found for reference property %s.' % name)
_VALIDATE_PROPERTY_VALUES = {
Blob: ValidatePropertyNothing,
ByteString: ValidatePropertyString,
bool: ValidatePropertyNothing,
Category: ValidatePropertyString,
datetime.datetime: ValidatePropertyNothing,
Email: ValidatePropertyString,
float: ValidatePropertyNothing,
GeoPt: ValidatePropertyNothing,
IM: ValidatePropertyString,
int: ValidatePropertyInteger,
Key: ValidatePropertyKey,
Link: ValidatePropertyLink,
long: ValidatePropertyInteger,
PhoneNumber: ValidatePropertyString,
PostalAddress: ValidatePropertyString,
Rating: ValidatePropertyInteger,
str: ValidatePropertyString,
Text: ValidatePropertyNothing,
type(None): ValidatePropertyNothing,
unicode: ValidatePropertyString,
users.User: ValidatePropertyNothing,
}
assert set(_VALIDATE_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
def ValidateProperty(name, values, read_only=False):
"""Helper function for validating property values.
Args:
name: Name of the property this is for.
value: Value for the property as a Python native type.
Raises:
BadPropertyError if the property name is invalid. BadValueError if the
property did not validate correctly or the value was an empty list. Other
exception types (like OverflowError) if the property value does not meet
type-specific criteria.
"""
ValidateString(name, 'property name', datastore_errors.BadPropertyError)
if not read_only and RESERVED_PROPERTY_NAME.match(name):
raise datastore_errors.BadPropertyError(
'%s is a reserved property name.' % name)
values_type = type(values)
if values_type is tuple:
raise datastore_errors.BadValueError(
'May not use tuple property value; property %s is %s.' %
(name, repr(values)))
if values_type is list:
multiple = True
else:
multiple = False
values = [values]
if not values:
raise datastore_errors.BadValueError(
'May not use the empty list as a property value; property %s is %s.' %
(name, repr(values)))
try:
for v in values:
prop_validator = _VALIDATE_PROPERTY_VALUES.get(v.__class__)
if prop_validator is None:
raise datastore_errors.BadValueError(
'Unsupported type for property %s: %s' % (name, v.__class__))
prop_validator(name, v)
except (KeyError, ValueError, TypeError, IndexError, AttributeError), msg:
raise datastore_errors.BadValueError(
'Error type checking values for property %s: %s' % (name, msg))
ValidateReadProperty = ValidateProperty
def PackBlob(name, value, pbvalue):
"""Packs a Blob property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A Blob instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_stringvalue(value)
def PackString(name, value, pbvalue):
"""Packs a string-typed property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A string, unicode, or string-like value instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_stringvalue(unicode(value).encode('utf-8'))
def PackDatetime(name, value, pbvalue):
"""Packs a datetime-typed property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A datetime.datetime instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_int64value(DatetimeToTimestamp(value))
def DatetimeToTimestamp(value):
"""Converts a datetime.datetime to microseconds since the epoch, as a float.
Args:
value: datetime.datetime
Returns: value as a long
"""
if value.tzinfo:
value = value.astimezone(UTC)
return long(calendar.timegm(value.timetuple()) * 1000000L) + value.microsecond
def PackGeoPt(name, value, pbvalue):
"""Packs a GeoPt property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A GeoPt instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.mutable_pointvalue().set_x(value.lat)
pbvalue.mutable_pointvalue().set_y(value.lon)
def PackUser(name, value, pbvalue):
"""Packs a User property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A users.User instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.mutable_uservalue().set_email(value.email().encode('utf-8'))
pbvalue.mutable_uservalue().set_auth_domain(
value.auth_domain().encode('utf-8'))
pbvalue.mutable_uservalue().set_gaiaid(0)
def PackKey(name, value, pbvalue):
"""Packs a reference property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A Key instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
ref = value._Key__reference
pbvalue.mutable_referencevalue().set_app(ref.app())
for elem in ref.path().element_list():
pbvalue.mutable_referencevalue().add_pathelement().CopyFrom(elem)
def PackBool(name, value, pbvalue):
"""Packs a boolean property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A boolean instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_booleanvalue(value)
def PackInteger(name, value, pbvalue):
"""Packs an integer property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: An int or long instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_int64value(value)
def PackFloat(name, value, pbvalue):
"""Packs a float property into a entity_pb.PropertyValue.
Args:
name: The name of the property as a string.
value: A float instance.
pbvalue: The entity_pb.PropertyValue to pack this value into.
"""
pbvalue.set_doublevalue(value)
_PACK_PROPERTY_VALUES = {
Blob: PackBlob,
ByteString: PackBlob,
bool: PackBool,
Category: PackString,
datetime.datetime: PackDatetime,
Email: PackString,
float: PackFloat,
GeoPt: PackGeoPt,
IM: PackString,
int: PackInteger,
Key: PackKey,
Link: PackString,
long: PackInteger,
PhoneNumber: PackString,
PostalAddress: PackString,
Rating: PackInteger,
str: PackString,
Text: PackString,
type(None): lambda name, value, pbvalue: None,
unicode: PackString,
users.User: PackUser,
}
assert set(_PACK_PROPERTY_VALUES.iterkeys()) == _PROPERTY_TYPES
def ToPropertyPb(name, values):
"""Creates type-specific entity_pb.PropertyValues.
Determines the type and meaning of the PropertyValue based on the Python
type of the input value(s).
NOTE: This function does not validate anything!
Args:
name: string or unicode; the property name
values: The values for this property, either a single one or a list of them.
All values must be a supported type. Lists of values must all be of the
same type.
Returns:
A list of entity_pb.PropertyValue instances.
"""
encoded_name = name.encode('utf-8')
values_type = type(values)
if values_type is list:
multiple = True
else:
multiple = False
values = [values]
pbs = []
for v in values:
pb = entity_pb.Property()
pb.set_name(encoded_name)
pb.set_multiple(multiple)
meaning = _PROPERTY_MEANINGS.get(v.__class__)
if meaning is not None:
pb.set_meaning(meaning)
pack_prop = _PACK_PROPERTY_VALUES[v.__class__]
pbvalue = pack_prop(name, v, pb.mutable_value())
pbs.append(pb)
if multiple:
return pbs
else:
return pbs[0]
def FromReferenceProperty(value):
"""Converts a reference PropertyValue to a Key.
Args:
value: entity_pb.PropertyValue
Returns:
Key
Raises:
BadValueError if the value is not a PropertyValue.
"""
assert isinstance(value, entity_pb.PropertyValue)
assert value.has_referencevalue()
ref = value.referencevalue()
key = Key()
key_ref = key._Key__reference
key_ref.set_app(ref.app())
for pathelem in ref.pathelement_list():
key_ref.mutable_path().add_element().CopyFrom(pathelem)
return key
_EPOCH = datetime.datetime.utcfromtimestamp(0)
_PROPERTY_CONVERSIONS = {
entity_pb.Property.GD_WHEN:
lambda val: _EPOCH + datetime.timedelta(microseconds=val),
entity_pb.Property.ATOM_CATEGORY: Category,
entity_pb.Property.ATOM_LINK: Link,
entity_pb.Property.GD_EMAIL: Email,
entity_pb.Property.GEORSS_POINT: lambda coords: GeoPt(*coords),
entity_pb.Property.GD_IM: IM,
entity_pb.Property.GD_PHONENUMBER: PhoneNumber,
entity_pb.Property.GD_POSTALADDRESS: PostalAddress,
entity_pb.Property.GD_RATING: Rating,
entity_pb.Property.BLOB: Blob,
entity_pb.Property.BYTESTRING: ByteString,
entity_pb.Property.TEXT: Text,
}
def FromPropertyPb(pb):
"""Converts a property PB to a python value.
Args:
pb: entity_pb.Property
Returns:
# return type is determined by the type of the argument
string, int, bool, double, users.User, or one of the atom or gd types
"""
pbval = pb.value()
meaning = pb.meaning()
if pbval.has_stringvalue():
value = pbval.stringvalue()
if meaning not in (entity_pb.Property.BLOB, entity_pb.Property.BYTESTRING):
value = unicode(value.decode('utf-8'))
elif pbval.has_int64value():
value = long(pbval.int64value())
elif pbval.has_booleanvalue():
value = bool(pbval.booleanvalue())
elif pbval.has_doublevalue():
value = pbval.doublevalue()
elif pbval.has_referencevalue():
value = FromReferenceProperty(pbval)
elif pbval.has_pointvalue():
value = (pbval.pointvalue().x(), pbval.pointvalue().y())
elif pbval.has_uservalue():
email = unicode(pbval.uservalue().email().decode('utf-8'))
auth_domain = unicode(pbval.uservalue().auth_domain().decode('utf-8'))
value = users.User(email=email, _auth_domain=auth_domain)
else:
value = None
try:
if pb.has_meaning():
conversion = _PROPERTY_CONVERSIONS[meaning]
value = conversion(value)
except (KeyError, ValueError, IndexError, TypeError, AttributeError), msg:
raise datastore_errors.BadValueError(
'Error converting pb: %s\nException was: %s' % (pb, msg))
return value
def PropertyTypeName(value):
"""Returns the name of the type of the given property value, as a string.
Raises BadValueError if the value is not a valid property type.
Args:
value: any valid property value
Returns:
string
"""
if value.__class__ in _PROPERTY_MEANINGS:
meaning = _PROPERTY_MEANINGS[value.__class__]
name = entity_pb.Property._Meaning_NAMES[meaning]
return name.lower().replace('_', ':')
elif isinstance(value, basestring):
return 'string'
elif isinstance(value, users.User):
return 'user'
elif isinstance(value, long):
return 'int'
elif value is None:
return 'null'
else:
return typename(value).lower()
_PROPERTY_TYPE_STRINGS = {
'string': unicode,
'bool': bool,
'int': long,
'null': type(None),
'float': float,
'key': Key,
'blob': Blob,
'bytestring': ByteString,
'text': Text,
'user': users.User,
'atom:category': Category,
'atom:link': Link,
'gd:email': Email,
'gd:when': datetime.datetime,
'georss:point': GeoPt,
'gd:im': IM,
'gd:phonenumber': PhoneNumber,
'gd:postaladdress': PostalAddress,
'gd:rating': Rating,
}
def FromPropertyTypeName(type_name):
"""Returns the python type given a type name.
Args:
type_name: A string representation of a datastore type name.
Returns:
A python type.
"""
return _PROPERTY_TYPE_STRINGS[type_name]
def PropertyValueFromString(type_, value_string, _auth_domain=None):
"""Returns an instance of a property value given a type and string value.
The reverse of this method is just str() and type() of the python value.
Note that this does *not* support non-UTC offsets in ISO 8601-formatted
datetime strings, e.g. the -08:00 suffix in '2002-12-25 00:00:00-08:00'.
It only supports -00:00 and +00:00 suffixes, which are UTC.
Args:
type_: A python class.
value_string: A string representation of the value of the property.
Returns:
An instance of 'type'.
Raises:
ValueError if type_ is datetime and value_string has a timezone offset.
"""
if type_ == datetime.datetime:
value_string = value_string.strip()
if value_string[-6] in ('+', '-'):
if value_string[-5:] == '00:00':
value_string = value_string[:-6]
else:
raise ValueError('Non-UTC offsets in datetimes are not supported.')
split = value_string.split('.')
iso_date = split[0]
microseconds = 0
if len(split) > 1:
microseconds = int(split[1])
time_struct = time.strptime(iso_date, '%Y-%m-%d %H:%M:%S')[0:6]
value = datetime.datetime(*(time_struct + (microseconds,)))
return value
elif type_ == Rating:
return Rating(int(value_string))
elif type_ == bool:
return value_string == 'True'
elif type_ == users.User:
return users.User(value_string, _auth_domain)
elif type_ == type(None):
return None
return type_(value_string)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""AppInfo tools
Library for working with AppInfo records in memory, store and load from
configuration files.
"""
import re
from google.appengine.api import appinfo_errors
from google.appengine.api import validation
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_object
_URL_REGEX = r'(?!\^)/|\.|(\(.).*(?!\$).'
_FILES_REGEX = r'(?!\^).*(?!\$).'
_DELTA_REGEX = r'([1-9][0-9]*)([DdHhMm]|[sS]?)'
_EXPIRATION_REGEX = r'\s*(%s)(\s+%s)*\s*' % (_DELTA_REGEX, _DELTA_REGEX)
_EXPIRATION_CONVERSIONS = {
'd': 60 * 60 * 24,
'h': 60 * 60,
'm': 60,
's': 1,
}
APP_ID_MAX_LEN = 100
MAJOR_VERSION_ID_MAX_LEN = 100
MAX_URL_MAPS = 100
APPLICATION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % APP_ID_MAX_LEN
VERSION_RE_STRING = r'(?!-)[a-z\d\-]{1,%d}' % MAJOR_VERSION_ID_MAX_LEN
RUNTIME_RE_STRING = r'[a-z]{1,30}'
API_VERSION_RE_STRING = r'[\w.]{1,32}'
HANDLER_STATIC_FILES = 'static_files'
HANDLER_STATIC_DIR = 'static_dir'
HANDLER_SCRIPT = 'script'
LOGIN_OPTIONAL = 'optional'
LOGIN_REQUIRED = 'required'
LOGIN_ADMIN = 'admin'
SECURE_HTTP = 'never'
SECURE_HTTPS = 'always'
SECURE_HTTP_OR_HTTPS = 'optional'
DEFAULT_SKIP_FILES = (r"^(.*/)?("
r"(app\.yaml)|"
r"(app\.yml)|"
r"(index\.yaml)|"
r"(index\.yml)|"
r"(#.*#)|"
r"(.*~)|"
r"(.*\.py[co])|"
r"(.*/RCS/.*)|"
r"(\..*)|"
r")$")
LOGIN = 'login'
SECURE = 'secure'
URL = 'url'
STATIC_FILES = 'static_files'
UPLOAD = 'upload'
STATIC_DIR = 'static_dir'
MIME_TYPE = 'mime_type'
SCRIPT = 'script'
EXPIRATION = 'expiration'
APPLICATION = 'application'
VERSION = 'version'
RUNTIME = 'runtime'
API_VERSION = 'api_version'
HANDLERS = 'handlers'
DEFAULT_EXPIRATION = 'default_expiration'
SKIP_FILES = 'skip_files'
class URLMap(validation.Validated):
"""Mapping from URLs to handlers.
This class acts like something of a union type. Its purpose is to
describe a mapping between a set of URLs and their handlers. What
handler type a given instance has is determined by which handler-id
attribute is used.
Each mapping can have one and only one handler type. Attempting to
use more than one handler-id attribute will cause an UnknownHandlerType
to be raised during validation. Failure to provide any handler-id
attributes will cause MissingHandlerType to be raised during validation.
The regular expression used by the url field will be used to match against
the entire URL path and query string of the request. This means that
partial maps will not be matched. Specifying a url, say /admin, is the
same as matching against the regular expression '^/admin$'. Don't begin
your matching url with ^ or end them with $. These regular expressions
won't be accepted and will raise ValueError.
Attributes:
login: Whether or not login is required to access URL. Defaults to
'optional'.
secure: Restriction on the protocol which can be used to serve
this URL/handler (HTTP, HTTPS or either).
url: Regular expression used to fully match against the request URLs path.
See Special Cases for using static_dir.
static_files: Handler id attribute that maps URL to the appropriate
file. Can use back regex references to the string matched to url.
upload: Regular expression used by the application configuration
program to know which files are uploaded as blobs. It's very
difficult to determine this using just the url and static_files
so this attribute must be included. Required when defining a
static_files mapping.
A matching file name must fully match against the upload regex, similar
to how url is matched against the request path. Do not begin upload
with ^ or end it with $.
static_dir: Handler id that maps the provided url to a sub-directory
within the application directory. See Special Cases.
mime_type: When used with static_files and static_dir the mime-type
of files served from those directories are overridden with this
value.
script: Handler id that maps URLs to scipt handler within the application
directory that will run using CGI.
expiration: When used with static files and directories, the time delta to
use for cache expiration. Has the form '4d 5h 30m 15s', where each letter
signifies days, hours, minutes, and seconds, respectively. The 's' for
seconds may be omitted. Only one amount must be specified, combining
multiple amounts is optional. Example good values: '10', '1d 6h',
'1h 30m', '7d 7d 7d', '5m 30'.
Special cases:
When defining a static_dir handler, do not use a regular expression
in the url attribute. Both the url and static_dir attributes are
automatically mapped to these equivalents:
<url>/(.*)
<static_dir>/\1
For example:
url: /images
static_dir: images_folder
Is the same as this static_files declaration:
url: /images/(.*)
static_files: images/\1
upload: images/(.*)
"""
ATTRIBUTES = {
URL: validation.Optional(_URL_REGEX),
LOGIN: validation.Options(LOGIN_OPTIONAL,
LOGIN_REQUIRED,
LOGIN_ADMIN,
default=LOGIN_OPTIONAL),
SECURE: validation.Options(SECURE_HTTP,
SECURE_HTTPS,
SECURE_HTTP_OR_HTTPS,
default=SECURE_HTTP),
HANDLER_STATIC_FILES: validation.Optional(_FILES_REGEX),
UPLOAD: validation.Optional(_FILES_REGEX),
HANDLER_STATIC_DIR: validation.Optional(_FILES_REGEX),
MIME_TYPE: validation.Optional(str),
EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
HANDLER_SCRIPT: validation.Optional(_FILES_REGEX),
}
COMMON_FIELDS = set([URL, LOGIN, SECURE])
ALLOWED_FIELDS = {
HANDLER_STATIC_FILES: (MIME_TYPE, UPLOAD, EXPIRATION),
HANDLER_STATIC_DIR: (MIME_TYPE, EXPIRATION),
HANDLER_SCRIPT: (),
}
def GetHandler(self):
"""Get handler for mapping.
Returns:
Value of the handler (determined by handler id attribute).
"""
return getattr(self, self.GetHandlerType())
def GetHandlerType(self):
"""Get handler type of mapping.
Returns:
Handler type determined by which handler id attribute is set.
Raises:
UnknownHandlerType when none of the no handler id attributes
are set.
UnexpectedHandlerAttribute when an unexpected attribute
is set for the discovered handler type.
HandlerTypeMissingAttribute when the handler is missing a
required attribute for its handler type.
"""
for id_field in URLMap.ALLOWED_FIELDS.iterkeys():
if getattr(self, id_field) is not None:
mapping_type = id_field
break
else:
raise appinfo_errors.UnknownHandlerType(
'Unknown url handler type.\n%s' % str(self))
allowed_fields = URLMap.ALLOWED_FIELDS[mapping_type]
for attribute in self.ATTRIBUTES.iterkeys():
if (getattr(self, attribute) is not None and
not (attribute in allowed_fields or
attribute in URLMap.COMMON_FIELDS or
attribute == mapping_type)):
raise appinfo_errors.UnexpectedHandlerAttribute(
'Unexpected attribute "%s" for mapping type %s.' %
(attribute, mapping_type))
if mapping_type == HANDLER_STATIC_FILES and not self.upload:
raise appinfo_errors.MissingHandlerAttribute(
'Missing "%s" attribute for URL "%s".' % (UPLOAD, self.url))
return mapping_type
def CheckInitialized(self):
"""Adds additional checking to make sure handler has correct fields.
In addition to normal ValidatedCheck calls GetHandlerType
which validates all the handler fields are configured
properly.
Raises:
UnknownHandlerType when none of the no handler id attributes
are set.
UnexpectedHandlerAttribute when an unexpected attribute
is set for the discovered handler type.
HandlerTypeMissingAttribute when the handler is missing a
required attribute for its handler type.
"""
super(URLMap, self).CheckInitialized()
self.GetHandlerType()
class AppInfoExternal(validation.Validated):
"""Class representing users application info.
This class is passed to a yaml_object builder to provide the validation
for the application information file format parser.
Attributes:
application: Unique identifier for application.
version: Application's major version number.
runtime: Runtime used by application.
api_version: Which version of APIs to use.
handlers: List of URL handlers.
default_expiration: Default time delta to use for cache expiration for
all static files, unless they have their own specific 'expiration' set.
See the URLMap.expiration field's documentation for more information.
skip_files: An re object. Files that match this regular expression will
not be uploaded by appcfg.py. For example:
skip_files: |
.svn.*|
#.*#
"""
ATTRIBUTES = {
APPLICATION: APPLICATION_RE_STRING,
VERSION: VERSION_RE_STRING,
RUNTIME: RUNTIME_RE_STRING,
API_VERSION: API_VERSION_RE_STRING,
HANDLERS: validation.Optional(validation.Repeated(URLMap)),
DEFAULT_EXPIRATION: validation.Optional(_EXPIRATION_REGEX),
SKIP_FILES: validation.RegexStr(default=DEFAULT_SKIP_FILES)
}
def CheckInitialized(self):
"""Ensures that at least one url mapping is provided.
Raises:
MissingURLMapping when no URLMap objects are present in object.
TooManyURLMappings when there are too many URLMap entries.
"""
super(AppInfoExternal, self).CheckInitialized()
if not self.handlers:
raise appinfo_errors.MissingURLMapping(
'No URLMap entries found in application configuration')
if len(self.handlers) > MAX_URL_MAPS:
raise appinfo_errors.TooManyURLMappings(
'Found more than %d URLMap entries in application configuration' %
MAX_URL_MAPS)
def LoadSingleAppInfo(app_info):
"""Load a single AppInfo object where one and only one is expected.
Args:
app_info: A file-like object or string. If it is a string, parse it as
a configuration file. If it is a file-like object, read in data and
parse.
Returns:
An instance of AppInfoExternal as loaded from a YAML file.
Raises:
EmptyConfigurationFile when there are no documents in YAML file.
MultipleConfigurationFile when there is more than one document in YAML
file.
"""
builder = yaml_object.ObjectBuilder(AppInfoExternal)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(app_info)
app_infos = handler.GetResults()
if len(app_infos) < 1:
raise appinfo_errors.EmptyConfigurationFile()
if len(app_infos) > 1:
raise appinfo_errors.MultipleConfigurationFile()
return app_infos[0]
def ParseExpiration(expiration):
"""Parses an expiration delta string.
Args:
expiration: String that matches _DELTA_REGEX.
Returns:
Time delta in seconds.
"""
delta = 0
for match in re.finditer(_DELTA_REGEX, expiration):
amount = int(match.group(1))
units = _EXPIRATION_CONVERSIONS.get(match.group(2).lower(), 1)
delta += amount * units
return delta
_file_path_positive_re = re.compile(r'^[ 0-9a-zA-Z\._\+/\$-]{1,256}$')
_file_path_negative_1_re = re.compile(r'\.\.|^\./|\.$|/\./|^-')
_file_path_negative_2_re = re.compile(r'//|/$')
_file_path_negative_3_re = re.compile(r'^ | $|/ | /')
def ValidFilename(filename):
"""Determines if filename is valid.
filename must be a valid pathname.
- It must contain only letters, numbers, _, +, /, $, ., and -.
- It must be less than 256 chars.
- It must not contain "/./", "/../", or "//".
- It must not end in "/".
- All spaces must be in the middle of a directory or file name.
Args:
filename: The filename to validate.
Returns:
An error string if the filename is invalid. Returns '' if the filename
is valid.
"""
if _file_path_positive_re.match(filename) is None:
return 'Invalid character in filename: %s' % filename
if _file_path_negative_1_re.search(filename) is not None:
return ('Filename cannot contain "." or ".." or start with "-": %s' %
filename)
if _file_path_negative_2_re.search(filename) is not None:
return 'Filename cannot have trailing / or contain //: %s' % filename
if _file_path_negative_3_re.search(filename) is not None:
return 'Any spaces must be in the middle of a filename: %s' % filename
return ''
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
class StringProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = ""
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.value_))
return n + 1
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_value(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatString(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Integer32Proto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.value_)
return n + 1
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt32(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_value(d.getVarInt32())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt32(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class Integer64Proto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthVarInt64(self.value_)
return n + 1
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putVarInt64(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_value(d.getVarInt64())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatInt64(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class BoolProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
return n + 2
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(8)
out.putBoolean(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 8:
self.set_value(d.getBoolean())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormatBool(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class DoubleProto(ProtocolBuffer.ProtocolMessage):
has_value_ = 0
value_ = 0.0
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def value(self): return self.value_
def set_value(self, x):
self.has_value_ = 1
self.value_ = x
def clear_value(self):
if self.has_value_:
self.has_value_ = 0
self.value_ = 0.0
def has_value(self): return self.has_value_
def MergeFrom(self, x):
assert x is not self
if (x.has_value()): self.set_value(x.value())
def Equals(self, x):
if x is self: return 1
if self.has_value_ != x.has_value_: return 0
if self.has_value_ and self.value_ != x.value_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_value_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: value not set.')
return initialized
def ByteSize(self):
n = 0
return n + 9
def Clear(self):
self.clear_value()
def OutputUnchecked(self, out):
out.putVarInt32(9)
out.putDouble(self.value_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 9:
self.set_value(d.getDouble())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_value_: res+=prefix+("value: %s\n" % self.DebugFormat(self.value_))
return res
kvalue = 1
_TEXT = (
"ErrorCode",
"value",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.DOUBLE,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class VoidProto(ProtocolBuffer.ProtocolMessage):
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['StringProto','Integer32Proto','Integer64Proto','BoolProto','DoubleProto','VoidProto']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Base class for implementing RPC of API proxy stubs."""
import sys
class RPC(object):
"""Base class for implementing RPC of API proxy stubs.
To implement a RPC to make real asynchronous API call:
- Extend this class.
- Override _MakeCallImpl and/or _WaitImpl to do a real asynchronous call.
"""
IDLE = 0
RUNNING = 1
FINISHING = 2
def __init__(self, package=None, call=None, request=None, response=None,
callback=None, deadline=None, stub=None):
"""Constructor for the RPC object.
All arguments are optional, and simply set members on the class.
These data members will be overriden by values passed to MakeCall.
Args:
package: string, the package for the call
call: string, the call within the package
request: ProtocolMessage instance, appropriate for the arguments
response: ProtocolMessage instance, appropriate for the response
callback: callable, called when call is complete
deadline: A double specifying the deadline for this call as the number of
seconds from the current time. Ignored if non-positive.
stub: APIProxyStub instance, used in default _WaitImpl to do real call
"""
self.__exception = None
self.__state = RPC.IDLE
self.__traceback = None
self.package = package
self.call = call
self.request = request
self.response = response
self.callback = callback
self.deadline = deadline
self.stub = stub
def MakeCall(self, package=None, call=None, request=None, response=None,
callback=None, deadline=None):
"""Makes an asynchronous (i.e. non-blocking) API call within the
specified package for the specified call method.
It will call the _MakeRealCall to do the real job.
Args:
Same as constructor; see __init__.
Raises:
TypeError or AssertionError if an argument is of an invalid type.
AssertionError or RuntimeError is an RPC is already in use.
"""
self.callback = callback or self.callback
self.package = package or self.package
self.call = call or self.call
self.request = request or self.request
self.response = response or self.response
self.deadline = deadline or self.deadline
assert self.__state is RPC.IDLE, ('RPC for %s.%s has already been started' %
(self.package, self.call))
assert self.callback is None or callable(self.callback)
self._MakeCallImpl()
def Wait(self):
"""Waits on the API call associated with this RPC."""
rpc_completed = self._WaitImpl()
assert rpc_completed, ('RPC for %s.%s was not completed, and no other ' +
'exception was raised ' % (self.package, self.call))
def CheckSuccess(self):
"""If there was an exception, raise it now.
Raises:
Exception of the API call or the callback, if any.
"""
if self.exception and self.__traceback:
raise self.exception.__class__, self.exception, self.__traceback
elif self.exception:
raise self.exception
@property
def exception(self):
return self.__exception
@property
def state(self):
return self.__state
def _MakeCallImpl(self):
"""Override this method to implement a real asynchronous call rpc."""
self.__state = RPC.RUNNING
def _WaitImpl(self):
"""Override this method to implement a real asynchronous call rpc.
Returns:
True if the async call was completed successfully.
"""
try:
try:
self.stub.MakeSyncCall(self.package, self.call,
self.request, self.response)
except Exception, e:
self.__exception = e
finally:
self.__state = RPC.FINISHING
self.__Callback()
return True
def __Callback(self):
if self.callback:
try:
self.callback()
except:
exc_class, self.__exception, self.__traceback = sys.exc_info()
self.__exception._appengine_apiproxy_rpc = self
raise
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Validation tools for generic object structures.
This library is used for defining classes with constrained attributes.
Attributes are defined on the class which contains them using validators.
Although validators can be defined by any client of this library, a number
of standard validators are provided here.
Validators can be any callable that takes a single parameter which checks
the new value before it is assigned to the attribute. Validators are
permitted to modify a received value so that it is appropriate for the
attribute definition. For example, using int as a validator will cast
a correctly formatted string to a number, or raise an exception if it
can not. This is not recommended, however. the correct way to use a
validator that ensure the correct type is to use the Type validator.
This validation library is mainly intended for use with the YAML object
builder. See yaml_object.py.
"""
import re
import google
import yaml
class Error(Exception):
"""Base class for all package errors."""
class AttributeDefinitionError(Error):
"""An error occurred in the definition of class attributes."""
class ValidationError(Error):
"""Base class for raising exceptions during validation."""
def __init__(self, message, cause=None):
"""Initialize exception."""
if hasattr(cause, 'args') and cause.args:
Error.__init__(self, message, *cause.args)
else:
Error.__init__(self, message)
self.message = message
self.cause = cause
def __str__(self):
return str(self.message)
class MissingAttribute(ValidationError):
"""Raised when a required attribute is missing from object."""
def AsValidator(validator):
"""Wrap various types as instances of a validator.
Used to allow shorthand for common validator types. It
converts the following types to the following Validators.
strings -> Regex
type -> Type
collection -> Options
Validator -> Its self!
Args:
validator: Object to wrap in a validator.
Returns:
Validator instance that wraps the given value.
Raises:
AttributeDefinitionError if validator is not one of the above described
types.
"""
if isinstance(validator, (str, unicode)):
return Regex(validator, type(validator))
if isinstance(validator, type):
return Type(validator)
if isinstance(validator, (list, tuple, set)):
return Options(*tuple(validator))
if isinstance(validator, Validator):
return validator
else:
raise AttributeDefinitionError('%s is not a valid validator' %
str(validator))
class Validated(object):
"""Base class for other classes that require validation.
A class which intends to use validated fields should sub-class itself from
this class. Each class should define an 'ATTRIBUTES' class variable which
should be a map from attribute name to its validator. For example:
class Story(Validated):
ATTRIBUTES = {'title': Type(str),
'authors': Repeated(Type(str)),
'isbn': Optional(Type(str)),
'pages': Type(int),
}
Attributes that are not listed under ATTRIBUTES work like normal and are
not validated upon assignment.
"""
ATTRIBUTES = None
def __init__(self, **attributes):
"""Constructor for Validated classes.
This constructor can optionally assign values to the class via its
keyword arguments.
Raises:
AttributeDefinitionError when class instance is missing ATTRIBUTE
definition or when ATTRIBUTE is of the wrong type.
"""
if not isinstance(self.ATTRIBUTES, dict):
raise AttributeDefinitionError(
'The class %s does not define an ATTRIBUTE variable.'
% self.__class__)
for key in self.ATTRIBUTES.keys():
object.__setattr__(self, key, self.GetAttribute(key).default)
self.Set(**attributes)
@classmethod
def GetAttribute(self, key):
"""Safely get the underlying attribute definition as a Validator.
Args:
key: Name of attribute to get.
Returns:
Validator associated with key or attribute value wrapped in a
validator.
"""
return AsValidator(self.ATTRIBUTES[key])
def Set(self, **attributes):
"""Set multiple values on Validated instance.
This method can only be used to assign validated methods.
Args:
attributes: Attributes to set on object.
Raises:
ValidationError when no validated attribute exists on class.
"""
for key, value in attributes.iteritems():
if key not in self.ATTRIBUTES:
raise ValidationError('Class \'%s\' does not have attribute \'%s\''
% (self.__class__, key))
setattr(self, key, value)
def CheckInitialized(self):
"""Checks that all required fields are initialized.
Since an instance of Validated starts off in an uninitialized state, it
is sometimes necessary to check that it has been fully initialized.
The main problem this solves is how to validate that an instance has
all of its required fields set. By default, Validator classes do not
allow None, but all attributes are initialized to None when instantiated.
Raises:
Exception relevant to the kind of validation. The type of the exception
is determined by the validator. Typically this will be ValueError or
TypeError.
"""
for key in self.ATTRIBUTES.iterkeys():
try:
self.GetAttribute(key)(getattr(self, key))
except MissingAttribute, e:
e.message = "Missing required value '%s'." % key
raise e
def __setattr__(self, key, value):
"""Set attribute.
Setting a value on an object of this type will only work for attributes
defined in ATTRIBUTES. To make other assignments possible it is necessary
to override this method in subclasses.
It is important that assignment is restricted in this way because
this validation is used as validation for parsing. Absent this restriction
it would be possible for method names to be overwritten.
Args:
key: Name of attribute to set.
value: Attributes new value.
Raises:
ValidationError when trying to assign to a value that does not exist.
"""
if key in self.ATTRIBUTES:
value = self.GetAttribute(key)(value)
object.__setattr__(self, key, value)
else:
raise ValidationError('Class \'%s\' does not have attribute \'%s\''
% (self.__class__, key))
def __str__(self):
"""Formatted view of validated object and nested values."""
return repr(self)
def __repr__(self):
"""Formatted view of validated object and nested values."""
values = [(attr, getattr(self, attr)) for attr in self.ATTRIBUTES]
dent = ' '
value_list = []
for attr, value in values:
value_list.append('\n%s%s=%s' % (dent, attr, value))
return "<%s %s\n%s>" % (self.__class__.__name__, ' '.join(value_list), dent)
def __eq__(self, other):
"""Equality operator.
Comparison is done by comparing all attribute values to those in the other
instance. Objects which are not of the same type are not equal.
Args:
other: Other object to compare against.
Returns:
True if validated objects are equal, else False.
"""
if type(self) != type(other):
return False
for key in self.ATTRIBUTES.iterkeys():
if getattr(self, key) != getattr(other, key):
return False
return True
def __ne__(self, other):
"""Inequality operator."""
return not self.__eq__(other)
def __hash__(self):
"""Hash function for using Validated objects in sets and maps.
Hash is done by hashing all keys and values and xor'ing them together.
Returns:
Hash of validated object.
"""
result = 0
for key in self.ATTRIBUTES.iterkeys():
value = getattr(self, key)
if isinstance(value, list):
value = tuple(value)
result = result ^ hash(key) ^ hash(value)
return result
@staticmethod
def _ToValue(validator, value):
"""Convert any value to simplified collections and basic types.
Args:
validator: An instance of Validator that corresponds with 'value'.
May also be 'str' or 'int' if those were used instead of a full
Validator.
value: Value to convert to simplified collections.
Returns:
The value as a dictionary if it is a Validated object.
A list of items converted to simplified collections if value is a list
or a tuple.
Otherwise, just the value.
"""
if isinstance(value, Validated):
return value.ToDict()
elif isinstance(value, (list, tuple)):
return [Validated._ToValue(validator, item) for item in value]
else:
if isinstance(validator, Validator):
return validator.ToValue(value)
return value
def ToDict(self):
"""Convert Validated object to a dictionary.
Recursively traverses all of its elements and converts everything to
simplified collections.
Returns:
A dict of all attributes defined in this classes ATTRIBUTES mapped
to its value. This structure is recursive in that Validated objects
that are referenced by this object and in lists are also converted to
dicts.
"""
result = {}
for name, validator in self.ATTRIBUTES.iteritems():
value = getattr(self, name)
if not(isinstance(validator, Validator) and value == validator.default):
result[name] = Validated._ToValue(validator, value)
return result
def ToYAML(self):
"""Print validated object as simplified YAML.
Returns:
Object as a simplified YAML string compatible with parsing using the
SafeLoader.
"""
return yaml.dump(self.ToDict(),
default_flow_style=False,
Dumper=yaml.SafeDumper)
class Validator(object):
"""Validator base class.
Though any callable can be used as a validator, this class encapsulates the
case when a specific validator needs to hold a particular state or
configuration.
To implement Validator sub-class, override the validate method.
This class is permitted to change the ultimate value that is set to the
attribute if there is a reasonable way to perform the conversion.
"""
expected_type = object
def __init__(self, default=None):
"""Constructor.
Args:
default: Default assignment is made during initialization and will
not pass through validation.
"""
self.default = default
def __call__(self, value):
"""Main interface to validator is call mechanism."""
return self.Validate(value)
def Validate(self, value):
"""Override this method to customize sub-class behavior.
Args:
value: Value to validate.
Returns:
Value if value is valid, or a valid representation of value.
"""
return value
def ToValue(self, value):
"""Convert 'value' to a simplified collection or basic type.
Subclasses of Validator should override this method when the dumped
representation of 'value' is not simply <type>(value) (e.g. a regex).
Args:
value: An object of the same type that was returned from Validate().
Returns:
An instance of a builtin type (e.g. int, str, dict, etc). By default
it returns 'value' unmodified.
"""
return value
class Type(Validator):
"""Verifies property is of expected type.
Can optionally convert value if it is not of the expected type.
It is possible to specify a required field of a specific type in shorthand
by merely providing the type. This method is slightly less efficient than
providing an explicit type but is not significant unless parsing a large
amount of information:
class Person(Validated):
ATTRIBUTES = {'name': unicode,
'age': int,
}
However, in most instances it is best to use the type constants:
class Person(Validated):
ATTRIBUTES = {'name': TypeUnicode,
'age': TypeInt,
}
"""
def __init__(self, expected_type, convert=True, default=None):
"""Initialize Type validator.
Args:
expected_type: Type that attribute should validate against.
convert: Cause conversion if value is not the right type.
Conversion is done by calling the constructor of the type
with the value as its first parameter.
"""
super(Type, self).__init__(default)
self.expected_type = expected_type
self.convert = convert
def Validate(self, value):
"""Validate that value is correct type.
Args:
value: Value to validate.
Returns:
None if value is None, value if value is of correct type, converted
value if the validator is configured to convert.
Raises:
ValidationError if value is not of the right type and validator
is not configured to convert.
"""
if not isinstance(value, self.expected_type):
if value is not None and self.convert:
try:
return self.expected_type(value)
except ValueError, e:
raise ValidationError('Type conversion failed for value \'%s\'.'
% value,
e)
except TypeError, e:
raise ValidationError('Expected value of type %s, but got \'%s\'.'
% (self.expected_type, value))
else:
raise MissingAttribute('Missing value is required.')
else:
return value
TYPE_BOOL = Type(bool)
TYPE_INT = Type(int)
TYPE_LONG = Type(long)
TYPE_STR = Type(str)
TYPE_UNICODE = Type(unicode)
TYPE_FLOAT = Type(float)
class Options(Validator):
"""Limit field based on pre-determined values.
Options are used to make sure an enumerated set of values are the only
one permitted for assignment. It is possible to define aliases which
map multiple string values to a single original. An example of usage:
class ZooAnimal(validated.Class):
ATTRIBUTES = {
'name': str,
'kind': Options('platypus', # No aliases
('rhinoceros', ['rhino']), # One alias
('canine', ('dog', 'puppy')), # Two aliases
)
"""
def __init__(self, *options, **kw):
"""Initialize options.
Args:
options: List of allowed values.
"""
if 'default' in kw:
default = kw['default']
else:
default = None
alias_map = {}
def AddAlias(alias, original):
"""Set new alias on alias_map.
Raises:
AttributeDefinitionError when option already exists or if alias is
not of type str..
"""
if not isinstance(alias, str):
raise AttributeDefinitionError(
'All option values must be of type str.')
elif alias in alias_map:
raise AttributeDefinitionError(
"Option '%s' already defined for options property." % alias)
alias_map[alias] = original
for option in options:
if isinstance(option, str):
AddAlias(option, option)
elif isinstance(option, (list, tuple)):
if len(option) != 2:
raise AttributeDefinitionError("Alias is defined as a list of tuple "
"with two items. The first is the "
"original option, while the second "
"is a list or tuple of str aliases.\n"
"\n Example:\n"
" ('original', ('alias1', "
"'alias2'")
original, aliases = option
AddAlias(original, original)
if not isinstance(aliases, (list, tuple)):
raise AttributeDefinitionError('Alias lists must be a list or tuple')
for alias in aliases:
AddAlias(alias, original)
else:
raise AttributeDefinitionError("All options must be of type str "
"or of the form (str, [str...]).")
super(Options, self).__init__(default)
self.options = alias_map
def Validate(self, value):
"""Validate options.
Returns:
Original value for provided alias.
Raises:
ValidationError when value is not one of predefined values.
"""
if value is None:
raise ValidationError('Value for options field must not be None.')
value = str(value)
if value not in self.options:
raise ValidationError('Value \'%s\' not in %s.'
% (value, self.options))
return self.options[value]
class Optional(Validator):
"""Definition of optional attributes.
Optional values are attributes which can be set to None or left
unset. All values in a basic Validated class are set to None
at initialization. Failure to assign to non-optional values
will result in a validation error when calling CheckInitialized.
"""
def __init__(self, validator, default=None):
"""Initializer.
This constructor will make a few guesses about the value passed in
as the validator:
- If the validator argument is a type, it automatically creates a Type
validator around it.
- If the validator argument is a list or tuple, it automatically
creates an Options validator around it.
Args:
validator: Optional validation condition.
Raises:
AttributeDefinitionError if validator is not callable.
"""
self.validator = AsValidator(validator)
self.expected_type = self.validator.expected_type
self.default = default
def Validate(self, value):
"""Optionally require a value.
Normal validators do not accept None. This will accept none on
behalf of the contained validator.
Args:
value: Value to be validated as optional.
Returns:
None if value is None, else results of contained validation.
"""
if value is None:
return None
return self.validator(value)
class Regex(Validator):
"""Regular expression validator.
Regular expression validator always converts value to string. Note that
matches must be exact. Partial matches will not validate. For example:
class ClassDescr(Validated):
ATTRIBUTES = { 'name': Regex(r'[a-zA-Z_][a-zA-Z_0-9]*'),
'parent': Type(type),
}
Alternatively, any attribute that is defined as a string is automatically
interpreted to be of type Regex. It is possible to specify unicode regex
strings as well. This approach is slightly less efficient, but usually
is not significant unless parsing large amounts of data:
class ClassDescr(Validated):
ATTRIBUTES = { 'name': r'[a-zA-Z_][a-zA-Z_0-9]*',
'parent': Type(type),
}
# This will raise a ValidationError exception.
my_class(name='AName with space', parent=AnotherClass)
"""
def __init__(self, regex, string_type=unicode, default=None):
"""Initialized regex validator.
Args:
regex: Regular expression string to use for comparison.
Raises:
AttributeDefinitionError if string_type is not a kind of string.
"""
super(Regex, self).__init__(default)
if (not issubclass(string_type, basestring) or
string_type is basestring):
raise AttributeDefinitionError(
'Regex fields must be a string type not %s.' % str(string_type))
if isinstance(regex, basestring):
self.re = re.compile('^%s$' % regex)
else:
raise AttributeDefinitionError(
'Regular expression must be string. Found %s.' % str(regex))
self.expected_type = string_type
def Validate(self, value):
"""Does validation of a string against a regular expression.
Args:
value: String to match against regular expression.
Raises:
ValidationError when value does not match regular expression or
when value does not match provided string type.
"""
if issubclass(self.expected_type, str):
cast_value = TYPE_STR(value)
else:
cast_value = TYPE_UNICODE(value)
if self.re.match(cast_value) is None:
raise ValidationError('Value \'%s\' does not match expression \'%s\''
% (value, self.re.pattern))
return cast_value
class _RegexStrValue(object):
"""Simulates the regex object to support recomplation when necessary.
Used by the RegexStr class to dynamically build and recompile regular
expression attributes of a validated object. This object replaces the normal
object returned from re.compile which is immutable.
When the value of this object is a string, that string is simply used as the
regular expression when recompilation is needed. If the state of this object
is a list of strings, the strings are joined in to a single 'or' expression.
"""
def __init__(self, attribute, value):
"""Initialize recompilable regex value.
Args:
attribute: Attribute validator associated with this regex value.
value: Initial underlying python value for regex string. Either a single
regex string or a list of regex strings.
"""
self.__attribute = attribute
self.__value = value
self.__regex = None
def __AsString(self, value):
"""Convert a value to appropriate string.
Returns:
String version of value with all carriage returns and line feeds removed.
"""
if issubclass(self.__attribute.expected_type, str):
cast_value = TYPE_STR(value)
else:
cast_value = TYPE_UNICODE(value)
cast_value = cast_value.replace('\n', '')
cast_value = cast_value.replace('\r', '')
return cast_value
def __BuildRegex(self):
"""Build regex string from state.
Returns:
String version of regular expression. Sequence objects are constructed
as larger regular expression where each regex in the list is joined with
all the others as single 'or' expression.
"""
if isinstance(self.__value, list):
value_list = self.__value
sequence = True
else:
value_list = [self.__value]
sequence = False
regex_list = []
for item in value_list:
regex_list.append(self.__AsString(item))
if sequence:
return '|'.join('(?:%s)' % item for item in regex_list)
else:
return regex_list[0]
def __Compile(self):
"""Build regular expression object from state.
Returns:
Compiled regular expression based on internal value.
"""
regex = self.__BuildRegex()
try:
return re.compile(regex)
except re.error, e:
raise ValidationError('Value \'%s\' does not compile: %s' % (regex, e), e)
@property
def regex(self):
"""Compiled regular expression as described by underlying value."""
return self.__Compile()
def match(self, value):
"""Match against internal regular expression.
Returns:
Regular expression object built from underlying value.
"""
return re.match(self.__BuildRegex(), value)
def Validate(self):
"""Ensure that regex string compiles."""
self.__Compile()
def __str__(self):
"""Regular expression string as described by underlying value."""
return self.__BuildRegex()
def __eq__(self, other):
"""Comparison against other regular expression string values."""
if isinstance(other, _RegexStrValue):
return self.__BuildRegex() == other.__BuildRegex()
return str(self) == other
def __ne__(self, other):
"""Inequality operator for regular expression string value."""
return not self.__eq__(other)
class RegexStr(Validator):
"""Validates that a string can compile as a regex without errors.
Use this validator when the value of a field should be a regex. That
means that the value must be a string that can be compiled by re.compile().
The attribute will then be a compiled re object.
"""
def __init__(self, string_type=unicode, default=None):
"""Initialized regex validator.
Raises:
AttributeDefinitionError if string_type is not a kind of string.
"""
if default is not None:
default = _RegexStrValue(self, default)
re.compile(str(default))
super(RegexStr, self).__init__(default)
if (not issubclass(string_type, basestring) or
string_type is basestring):
raise AttributeDefinitionError(
'RegexStr fields must be a string type not %s.' % str(string_type))
self.expected_type = string_type
def Validate(self, value):
"""Validates that the string compiles as a regular expression.
Because the regular expression might have been expressed as a multiline
string, this function also strips newlines out of value.
Args:
value: String to compile as a regular expression.
Raises:
ValueError when value does not compile as a regular expression. TypeError
when value does not match provided string type.
"""
if isinstance(value, _RegexStrValue):
return value
value = _RegexStrValue(self, value)
value.Validate()
return value
def ToValue(self, value):
"""Returns the RE pattern for this validator."""
return str(value)
class Range(Validator):
"""Validates that numbers fall within the correct range.
In theory this class can be emulated using Options, however error
messages generated from that class will not be very intelligible.
This class essentially does the same thing, but knows the intended
integer range.
Also, this range class supports floats and other types that implement
ordinality.
The range is inclusive, meaning 3 is considered in the range
in Range(1,3).
"""
def __init__(self, minimum, maximum, range_type=int, default=None):
"""Initializer for range.
Args:
minimum: Minimum for attribute.
maximum: Maximum for attribute.
range_type: Type of field. Defaults to int.
"""
super(Range, self).__init__(default)
if not isinstance(minimum, range_type):
raise AttributeDefinitionError(
'Minimum value must be of type %s, instead it is %s (%s).' %
(str(range_type), str(type(minimum)), str(minimum)))
if not isinstance(maximum, range_type):
raise AttributeDefinitionError(
'Maximum value must be of type %s, instead it is %s (%s).' %
(str(range_type), str(type(maximum)), str(maximum)))
self.minimum = minimum
self.maximum = maximum
self.expected_type = range_type
self._type_validator = Type(range_type)
def Validate(self, value):
"""Validate that value is within range.
Validates against range-type then checks the range.
Args:
value: Value to validate.
Raises:
ValidationError when value is out of range. ValidationError when value
is notd of the same range type.
"""
cast_value = self._type_validator.Validate(value)
if cast_value < self.minimum or cast_value > self.maximum:
raise ValidationError('Value \'%s\' is out of range %s - %s'
% (str(value),
str(self.minimum),
str(self.maximum)))
return cast_value
class Repeated(Validator):
"""Repeated field validator.
Indicates that attribute is expected to be a repeated value, ie,
a sequence. This adds additional validation over just Type(list)
in that it retains information about what can be stored in the list by
use of its constructor field.
"""
def __init__(self, constructor, default=None):
"""Initializer for repeated field.
Args:
constructor: Type used for verifying elements of sequence attribute.
"""
super(Repeated, self).__init__(default)
self.constructor = constructor
self.expected_type = list
def Validate(self, value):
"""Do validation of sequence.
Value must be a list and all elements must be of type 'constructor'.
Args:
value: Value to validate.
Raises:
ValidationError if value is None, not a list or one of its elements is the
wrong type.
"""
if not isinstance(value, list):
raise ValidationError('Repeated fields must be sequence, '
'but found \'%s\'.' % value)
for item in value:
if not isinstance(item, self.constructor):
raise ValidationError('Repeated items must be %s, but found \'%s\'.'
% (str(self.constructor), str(item)))
return value
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import VoidProto
class MailServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
INTERNAL_ERROR = 1
BAD_REQUEST = 2
UNAUTHORIZED_SENDER = 3
INVALID_ATTACHMENT_TYPE = 4
_ErrorCode_NAMES = {
0: "OK",
1: "INTERNAL_ERROR",
2: "BAD_REQUEST",
3: "UNAUTHORIZED_SENDER",
4: "INVALID_ATTACHMENT_TYPE",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MailAttachment(ProtocolBuffer.ProtocolMessage):
has_filename_ = 0
filename_ = ""
has_data_ = 0
data_ = ""
def __init__(self, contents=None):
if contents is not None: self.MergeFromString(contents)
def filename(self): return self.filename_
def set_filename(self, x):
self.has_filename_ = 1
self.filename_ = x
def clear_filename(self):
if self.has_filename_:
self.has_filename_ = 0
self.filename_ = ""
def has_filename(self): return self.has_filename_
def data(self): return self.data_
def set_data(self, x):
self.has_data_ = 1
self.data_ = x
def clear_data(self):
if self.has_data_:
self.has_data_ = 0
self.data_ = ""
def has_data(self): return self.has_data_
def MergeFrom(self, x):
assert x is not self
if (x.has_filename()): self.set_filename(x.filename())
if (x.has_data()): self.set_data(x.data())
def Equals(self, x):
if x is self: return 1
if self.has_filename_ != x.has_filename_: return 0
if self.has_filename_ and self.filename_ != x.filename_: return 0
if self.has_data_ != x.has_data_: return 0
if self.has_data_ and self.data_ != x.data_: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_filename_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: filename not set.')
if (not self.has_data_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: data not set.')
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.filename_))
n += self.lengthString(len(self.data_))
return n + 2
def Clear(self):
self.clear_filename()
self.clear_data()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.filename_)
out.putVarInt32(18)
out.putPrefixedString(self.data_)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_filename(d.getPrefixedString())
continue
if tt == 18:
self.set_data(d.getPrefixedString())
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_filename_: res+=prefix+("FileName: %s\n" % self.DebugFormatString(self.filename_))
if self.has_data_: res+=prefix+("Data: %s\n" % self.DebugFormatString(self.data_))
return res
kFileName = 1
kData = 2
_TEXT = (
"ErrorCode",
"FileName",
"Data",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
class MailMessage(ProtocolBuffer.ProtocolMessage):
has_sender_ = 0
sender_ = ""
has_replyto_ = 0
replyto_ = ""
has_subject_ = 0
subject_ = ""
has_textbody_ = 0
textbody_ = ""
has_htmlbody_ = 0
htmlbody_ = ""
def __init__(self, contents=None):
self.to_ = []
self.cc_ = []
self.bcc_ = []
self.attachment_ = []
if contents is not None: self.MergeFromString(contents)
def sender(self): return self.sender_
def set_sender(self, x):
self.has_sender_ = 1
self.sender_ = x
def clear_sender(self):
if self.has_sender_:
self.has_sender_ = 0
self.sender_ = ""
def has_sender(self): return self.has_sender_
def replyto(self): return self.replyto_
def set_replyto(self, x):
self.has_replyto_ = 1
self.replyto_ = x
def clear_replyto(self):
if self.has_replyto_:
self.has_replyto_ = 0
self.replyto_ = ""
def has_replyto(self): return self.has_replyto_
def to_size(self): return len(self.to_)
def to_list(self): return self.to_
def to(self, i):
return self.to_[i]
def set_to(self, i, x):
self.to_[i] = x
def add_to(self, x):
self.to_.append(x)
def clear_to(self):
self.to_ = []
def cc_size(self): return len(self.cc_)
def cc_list(self): return self.cc_
def cc(self, i):
return self.cc_[i]
def set_cc(self, i, x):
self.cc_[i] = x
def add_cc(self, x):
self.cc_.append(x)
def clear_cc(self):
self.cc_ = []
def bcc_size(self): return len(self.bcc_)
def bcc_list(self): return self.bcc_
def bcc(self, i):
return self.bcc_[i]
def set_bcc(self, i, x):
self.bcc_[i] = x
def add_bcc(self, x):
self.bcc_.append(x)
def clear_bcc(self):
self.bcc_ = []
def subject(self): return self.subject_
def set_subject(self, x):
self.has_subject_ = 1
self.subject_ = x
def clear_subject(self):
if self.has_subject_:
self.has_subject_ = 0
self.subject_ = ""
def has_subject(self): return self.has_subject_
def textbody(self): return self.textbody_
def set_textbody(self, x):
self.has_textbody_ = 1
self.textbody_ = x
def clear_textbody(self):
if self.has_textbody_:
self.has_textbody_ = 0
self.textbody_ = ""
def has_textbody(self): return self.has_textbody_
def htmlbody(self): return self.htmlbody_
def set_htmlbody(self, x):
self.has_htmlbody_ = 1
self.htmlbody_ = x
def clear_htmlbody(self):
if self.has_htmlbody_:
self.has_htmlbody_ = 0
self.htmlbody_ = ""
def has_htmlbody(self): return self.has_htmlbody_
def attachment_size(self): return len(self.attachment_)
def attachment_list(self): return self.attachment_
def attachment(self, i):
return self.attachment_[i]
def mutable_attachment(self, i):
return self.attachment_[i]
def add_attachment(self):
x = MailAttachment()
self.attachment_.append(x)
return x
def clear_attachment(self):
self.attachment_ = []
def MergeFrom(self, x):
assert x is not self
if (x.has_sender()): self.set_sender(x.sender())
if (x.has_replyto()): self.set_replyto(x.replyto())
for i in xrange(x.to_size()): self.add_to(x.to(i))
for i in xrange(x.cc_size()): self.add_cc(x.cc(i))
for i in xrange(x.bcc_size()): self.add_bcc(x.bcc(i))
if (x.has_subject()): self.set_subject(x.subject())
if (x.has_textbody()): self.set_textbody(x.textbody())
if (x.has_htmlbody()): self.set_htmlbody(x.htmlbody())
for i in xrange(x.attachment_size()): self.add_attachment().CopyFrom(x.attachment(i))
def Equals(self, x):
if x is self: return 1
if self.has_sender_ != x.has_sender_: return 0
if self.has_sender_ and self.sender_ != x.sender_: return 0
if self.has_replyto_ != x.has_replyto_: return 0
if self.has_replyto_ and self.replyto_ != x.replyto_: return 0
if len(self.to_) != len(x.to_): return 0
for e1, e2 in zip(self.to_, x.to_):
if e1 != e2: return 0
if len(self.cc_) != len(x.cc_): return 0
for e1, e2 in zip(self.cc_, x.cc_):
if e1 != e2: return 0
if len(self.bcc_) != len(x.bcc_): return 0
for e1, e2 in zip(self.bcc_, x.bcc_):
if e1 != e2: return 0
if self.has_subject_ != x.has_subject_: return 0
if self.has_subject_ and self.subject_ != x.subject_: return 0
if self.has_textbody_ != x.has_textbody_: return 0
if self.has_textbody_ and self.textbody_ != x.textbody_: return 0
if self.has_htmlbody_ != x.has_htmlbody_: return 0
if self.has_htmlbody_ and self.htmlbody_ != x.htmlbody_: return 0
if len(self.attachment_) != len(x.attachment_): return 0
for e1, e2 in zip(self.attachment_, x.attachment_):
if e1 != e2: return 0
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
if (not self.has_sender_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: sender not set.')
if (not self.has_subject_):
initialized = 0
if debug_strs is not None:
debug_strs.append('Required field: subject not set.')
for p in self.attachment_:
if not p.IsInitialized(debug_strs): initialized=0
return initialized
def ByteSize(self):
n = 0
n += self.lengthString(len(self.sender_))
if (self.has_replyto_): n += 1 + self.lengthString(len(self.replyto_))
n += 1 * len(self.to_)
for i in xrange(len(self.to_)): n += self.lengthString(len(self.to_[i]))
n += 1 * len(self.cc_)
for i in xrange(len(self.cc_)): n += self.lengthString(len(self.cc_[i]))
n += 1 * len(self.bcc_)
for i in xrange(len(self.bcc_)): n += self.lengthString(len(self.bcc_[i]))
n += self.lengthString(len(self.subject_))
if (self.has_textbody_): n += 1 + self.lengthString(len(self.textbody_))
if (self.has_htmlbody_): n += 1 + self.lengthString(len(self.htmlbody_))
n += 1 * len(self.attachment_)
for i in xrange(len(self.attachment_)): n += self.lengthString(self.attachment_[i].ByteSize())
return n + 2
def Clear(self):
self.clear_sender()
self.clear_replyto()
self.clear_to()
self.clear_cc()
self.clear_bcc()
self.clear_subject()
self.clear_textbody()
self.clear_htmlbody()
self.clear_attachment()
def OutputUnchecked(self, out):
out.putVarInt32(10)
out.putPrefixedString(self.sender_)
if (self.has_replyto_):
out.putVarInt32(18)
out.putPrefixedString(self.replyto_)
for i in xrange(len(self.to_)):
out.putVarInt32(26)
out.putPrefixedString(self.to_[i])
for i in xrange(len(self.cc_)):
out.putVarInt32(34)
out.putPrefixedString(self.cc_[i])
for i in xrange(len(self.bcc_)):
out.putVarInt32(42)
out.putPrefixedString(self.bcc_[i])
out.putVarInt32(50)
out.putPrefixedString(self.subject_)
if (self.has_textbody_):
out.putVarInt32(58)
out.putPrefixedString(self.textbody_)
if (self.has_htmlbody_):
out.putVarInt32(66)
out.putPrefixedString(self.htmlbody_)
for i in xrange(len(self.attachment_)):
out.putVarInt32(74)
out.putVarInt32(self.attachment_[i].ByteSize())
self.attachment_[i].OutputUnchecked(out)
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if tt == 10:
self.set_sender(d.getPrefixedString())
continue
if tt == 18:
self.set_replyto(d.getPrefixedString())
continue
if tt == 26:
self.add_to(d.getPrefixedString())
continue
if tt == 34:
self.add_cc(d.getPrefixedString())
continue
if tt == 42:
self.add_bcc(d.getPrefixedString())
continue
if tt == 50:
self.set_subject(d.getPrefixedString())
continue
if tt == 58:
self.set_textbody(d.getPrefixedString())
continue
if tt == 66:
self.set_htmlbody(d.getPrefixedString())
continue
if tt == 74:
length = d.getVarInt32()
tmp = ProtocolBuffer.Decoder(d.buffer(), d.pos(), d.pos() + length)
d.skip(length)
self.add_attachment().TryMerge(tmp)
continue
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
if self.has_sender_: res+=prefix+("Sender: %s\n" % self.DebugFormatString(self.sender_))
if self.has_replyto_: res+=prefix+("ReplyTo: %s\n" % self.DebugFormatString(self.replyto_))
cnt=0
for e in self.to_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("To%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.cc_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Cc%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
cnt=0
for e in self.bcc_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Bcc%s: %s\n" % (elm, self.DebugFormatString(e)))
cnt+=1
if self.has_subject_: res+=prefix+("Subject: %s\n" % self.DebugFormatString(self.subject_))
if self.has_textbody_: res+=prefix+("TextBody: %s\n" % self.DebugFormatString(self.textbody_))
if self.has_htmlbody_: res+=prefix+("HtmlBody: %s\n" % self.DebugFormatString(self.htmlbody_))
cnt=0
for e in self.attachment_:
elm=""
if printElemNumber: elm="(%d)" % cnt
res+=prefix+("Attachment%s <\n" % elm)
res+=e.__str__(prefix + " ", printElemNumber)
res+=prefix+">\n"
cnt+=1
return res
kSender = 1
kReplyTo = 2
kTo = 3
kCc = 4
kBcc = 5
kSubject = 6
kTextBody = 7
kHtmlBody = 8
kAttachment = 9
_TEXT = (
"ErrorCode",
"Sender",
"ReplyTo",
"To",
"Cc",
"Bcc",
"Subject",
"TextBody",
"HtmlBody",
"Attachment",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
ProtocolBuffer.Encoder.STRING,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['MailServiceError','MailAttachment','MailMessage']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors used in the YAML API, which is used by app developers."""
class Error(Exception):
"""Base datastore yaml error type."""
class ProtocolBufferParseError(Error):
"""Error in protocol buffer parsing"""
class EmptyConfigurationFile(Error):
"""Tried to load empty configuration file."""
class MultipleConfigurationFile(Error):
"""Tried to load configuration file with multiple objects."""
class UnexpectedAttribute(Error):
"""Raised when an unexpected attribute is encounted."""
class DuplicateAttribute(Error):
"""Generated when an attribute is assigned to twice."""
class ListenerConfigurationError(Error):
"""Generated when there is a parsing problem due to configuration."""
class IllegalEvent(Error):
"""Raised when an unexpected event type is received by listener."""
class InternalError(Error):
"""Raised when an internal implementation error is detected."""
class EventListenerError(Error):
"""Top level exception raised by YAML listener.
Any exception raised within the process of parsing a YAML file via an
EventListener is caught and wrapped in an EventListenerError. The causing
exception is maintained, but additional useful information is saved which
can be used for reporting useful information to users.
Attributes:
cause: The original exception which caused the EventListenerError.
"""
def __init__(self, cause):
"""Initialize event-listener error."""
if hasattr(cause, 'args') and cause.args:
Error.__init__(self, *cause.args)
else:
Error.__init__(self, str(cause))
self.cause = cause
class EventListenerYAMLError(EventListenerError):
"""Generated specifically for yaml.error.YAMLError."""
class EventError(EventListenerError):
"""Generated specifically when an error occurs in event handler.
Attributes:
cause: The original exception which caused the EventListenerError.
event: Event being handled when exception occured.
"""
def __init__(self, cause, event):
"""Initialize event-listener error."""
EventListenerError.__init__(self, cause)
self.event = event
def __str__(self):
return '%s\n%s' % (self.cause, self.event.start_mark)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Base class for implementing API proxy stubs."""
from google.appengine.api import apiproxy_rpc
from google.appengine.runtime import apiproxy_errors
MAX_REQUEST_SIZE = 1 << 20
class APIProxyStub(object):
"""Base class for implementing API proxy stub classes.
To implement an API proxy stub:
- Extend this class.
- Override __init__ to pass in appropriate default service name.
- Implement service methods as _Dynamic_<method>(request, response).
"""
def __init__(self, service_name, max_request_size=MAX_REQUEST_SIZE):
"""Constructor.
Args:
service_name: Service name expected for all calls.
max_request_size: int, maximum allowable size of the incoming request. A
apiproxy_errors.RequestTooLargeError will be raised if the inbound
request exceeds this size. Default is 1 MB.
"""
self.__service_name = service_name
self.__max_request_size = max_request_size
def CreateRPC(self):
"""Creates RPC object instance.
Returns:
a instance of RPC.
"""
return apiproxy_rpc.RPC(stub=self)
def MakeSyncCall(self, service, call, request, response):
"""The main RPC entry point.
Args:
service: Must be name as provided to service_name of constructor.
call: A string representing the rpc to make. Must be part of
the underlying services methods and impemented by _Dynamic_<call>.
request: A protocol buffer of the type corresponding to 'call'.
response: A protocol buffer of the type corresponding to 'call'.
"""
assert service == self.__service_name, ('Expected "%s" service name, '
'was "%s"' % (self.__service_name,
service))
if request.ByteSize() > self.__max_request_size:
raise apiproxy_errors.RequestTooLargeError(
'The request to API call %s.%s() was too large.' % (service, call))
messages = []
assert request.IsInitialized(messages), messages
method = getattr(self, '_Dynamic_' + call)
method(request, response)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Sends email on behalf of application.
Provides functions for application developers to provide email services
for their applications. Also provides a few utility methods.
"""
from email import MIMEBase
from email import MIMEMultipart
from email import MIMEText
import mimetypes
import types
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import mail_service_pb
from google.appengine.api import users
from google.appengine.api.mail_errors import *
from google.appengine.runtime import apiproxy_errors
ERROR_MAP = {
mail_service_pb.MailServiceError.BAD_REQUEST:
BadRequestError,
mail_service_pb.MailServiceError.UNAUTHORIZED_SENDER:
InvalidSenderError,
mail_service_pb.MailServiceError.INVALID_ATTACHMENT_TYPE:
InvalidAttachmentTypeError,
}
EXTENSION_WHITELIST = set([
'bmp',
'css',
'csv',
'gif',
'html', 'htm',
'jpeg', 'jpg', 'jpe',
'pdf',
'png',
'rss',
'text', 'txt', 'asc', 'diff', 'pot',
'tiff', 'tif',
'wbmp',
])
def invalid_email_reason(email_address, field):
"""Determine reason why email is invalid
Args:
email_address: Email to check.
Returns:
String indicating invalid email reason if there is one,
else None.
"""
if email_address is None:
return 'None email address for %s.' % field
if isinstance(email_address, users.User):
email_address = email_address.email()
if not isinstance(email_address, types.StringTypes):
return 'Invalid email address type for %s.' % field
stripped_address = email_address.strip()
if not stripped_address:
return 'Empty email address for %s.' % field
return None
InvalidEmailReason = invalid_email_reason
def is_email_valid(email_address):
"""Determine if email is invalid.
Args:
email_address: Email to check.
Returns:
True if email is valid, else False.
"""
return invalid_email_reason(email_address, '') is None
IsEmailValid = is_email_valid
def check_email_valid(email_address, field):
"""Check that email is valid
Args:
email_address: Email to check.
Raises:
InvalidEmailError if email_address is invalid.
"""
reason = invalid_email_reason(email_address, field)
if reason is not None:
raise InvalidEmailError(reason)
CheckEmailValid = check_email_valid
def _email_check_and_list(emails, field):
"""Generate a list of emails.
Args:
emails: Single email or list of emails.
Returns:
Sequence of email addresses.
Raises:
InvalidEmailError if any email addresses are invalid.
"""
if isinstance(emails, types.StringTypes):
check_email_valid(value)
else:
for address in iter(emails):
check_email_valid(address, field)
def _email_sequence(emails):
"""Forces email to be sequenceable type.
Iterable values are returned as is. This function really just wraps the case
where there is a single email string.
Args:
emails: Emails (or email) to coerce to sequence.
Returns:
Single tuple with email in it if only one email string provided,
else returns emails as is.
"""
if isinstance(emails, types.StringTypes):
return emails,
return emails
def _attachment_sequence(attachments):
"""Forces attachments to be sequenceable type.
Iterable values are returned as is. This function really just wraps the case
where there is a single attachment.
Args:
attachments: Attachments (or attachment) to coerce to sequence.
Returns:
Single tuple with attachment tuple in it if only one attachment provided,
else returns attachments as is.
"""
if len(attachments) == 2 and isinstance(attachments[0], types.StringTypes):
return attachments,
return attachments
def send_mail(sender,
to,
subject,
body,
make_sync_call=apiproxy_stub_map.MakeSyncCall,
**kw):
"""Sends mail on behalf of application.
Args:
sender: Sender email address as appears in the 'from' email line.
to: List of 'to' addresses or a single address.
subject: Message subject string.
body: Body of type text/plain.
make_sync_call: Function used to make sync call to API proxy.
kw: Keyword arguments compatible with EmailMessage keyword based
constructor.
Raises:
InvalidEmailError when invalid email address provided.
"""
kw['sender'] = sender
kw['to'] = to
kw['subject'] = subject
kw['body'] = body
message = EmailMessage(**kw)
message.send(make_sync_call)
SendMail = send_mail
def send_mail_to_admins(sender,
subject,
body,
make_sync_call=apiproxy_stub_map.MakeSyncCall,
**kw):
"""Sends mail to admins on behalf of application.
Args:
sender: Sender email address as appears in the 'from' email line.
subject: Message subject string.
body: Body of type text/plain.
make_sync_call: Function used to make sync call to API proxy.
kw: Keyword arguments compatible with EmailMessage keyword based
constructor.
Raises:
InvalidEmailError when invalid email address provided.
"""
kw['sender'] = sender
kw['subject'] = subject
kw['body'] = body
message = AdminEmailMessage(**kw)
message.send(make_sync_call)
SendMailToAdmins = send_mail_to_admins
def mail_message_to_mime_message(protocol_message):
"""Generate a MIMEMultitype message from protocol buffer.
Generates a complete MIME multi-part email object from a MailMessage
protocol buffer. The body fields are sent as individual alternatives
if they are both present, otherwise, only one body part is sent.
Multiple entry email fields such as 'To', 'Cc' and 'Bcc' are converted
to a list of comma separated email addresses.
Args:
message: Message PB to convert to MIMEMultitype.
Returns:
MIMEMultitype representing the provided MailMessage.
"""
parts = []
if protocol_message.has_textbody():
parts.append(MIMEText.MIMEText(protocol_message.textbody()))
if protocol_message.has_htmlbody():
parts.append(MIMEText.MIMEText(protocol_message.htmlbody(),
_subtype='html'))
if len(parts) == 1:
payload = parts
else:
payload = [MIMEMultipart.MIMEMultipart('alternative', _subparts=parts)]
result = MIMEMultipart.MIMEMultipart(_subparts=payload)
for attachment in protocol_message.attachment_list():
mime_type, encoding = mimetypes.guess_type(attachment.filename())
assert mime_type is not None
maintype, subtype = mime_type.split('/')
mime_attachment = MIMEBase.MIMEBase(maintype, subtype)
mime_attachment.add_header('Content-Disposition',
'attachment',
filename=attachment.filename())
mime_attachment.set_charset(encoding)
mime_attachment.set_payload(attachment.data())
result.attach(mime_attachment)
if protocol_message.to_size():
result['To'] = ', '.join(protocol_message.to_list())
if protocol_message.cc_size():
result['Cc'] = ', '.join(protocol_message.cc_list())
if protocol_message.bcc_size():
result['Bcc'] = ', '.join(protocol_message.bcc_list())
result['From'] = protocol_message.sender()
result['ReplyTo'] = protocol_message.replyto()
result['Subject'] = protocol_message.subject()
return result
MailMessageToMIMEMessage = mail_message_to_mime_message
def _to_str(value):
"""Helper function to make sure unicode values converted to utf-8
Args:
value: str or unicode to convert to utf-8.
Returns:
UTF-8 encoded str of value, otherwise value unchanged.
"""
if isinstance(value, unicode):
return value.encode('utf-8')
return value
class _EmailMessageBase(object):
"""Base class for email API service objects.
Subclasses must define a class variable called _API_CALL with the name
of its underlying mail sending API call.
"""
PROPERTIES = set([
'sender',
'reply_to',
'subject',
'body',
'html',
'attachments',
])
def __init__(self, **kw):
"""Initialize Email message.
Creates new MailMessage protocol buffer and initializes it with any
keyword arguments.
Args:
kw: List of keyword properties as defined by PROPERTIES.
"""
self.initialize(**kw)
def initialize(self, **kw):
"""Keyword initialization.
Used to set all fields of the email message using keyword arguments.
Args:
kw: List of keyword properties as defined by PROPERTIES.
"""
for name, value in kw.iteritems():
setattr(self, name, value)
def Initialize(self, **kw):
self.initialize(**kw)
def check_initialized(self):
"""Check if EmailMessage is properly initialized.
Test used to determine if EmailMessage meets basic requirements
for being used with the mail API. This means that the following
fields must be set or have at least one value in the case of
multi value fields:
- Subject must be set.
- A recipient must be specified.
- Must contain a body.
This check does not include determining if the sender is actually
authorized to send email for the application.
Raises:
Appropriate exception for initialization failure.
InvalidAttachmentTypeError: Use of incorrect attachment type.
MissingRecipientsError: No recipients specified in to, cc or bcc.
MissingSenderError: No sender specified.
MissingSubjectError: Subject is not specified.
MissingBodyError: No body specified.
"""
if not hasattr(self, 'sender'):
raise MissingSenderError()
if not hasattr(self, 'subject'):
raise MissingSubjectError()
if not hasattr(self, 'body') and not hasattr(self, 'html'):
raise MissingBodyError()
if hasattr(self, 'attachments'):
for filename, data in _attachment_sequence(self.attachments):
split_filename = filename.split('.')
if len(split_filename) < 2:
raise InvalidAttachmentTypeError()
if split_filename[-1] not in EXTENSION_WHITELIST:
raise InvalidAttachmentTypeError()
mime_type, encoding = mimetypes.guess_type(filename)
if mime_type is None:
raise InvalidAttachmentTypeError()
def CheckInitialized(self):
self.check_initialized()
def is_initialized(self):
"""Determine if EmailMessage is properly initialized.
Returns:
True if message is properly initializes, otherwise False.
"""
try:
self.check_initialized()
return True
except Error:
return False
def IsInitialized(self):
return self.is_initialized()
def ToProto(self):
"""Convert mail message to protocol message.
Unicode strings are converted to UTF-8 for all fields.
This method is overriden by EmailMessage to support the sender fields.
Returns:
MailMessage protocol version of mail message.
"""
self.check_initialized()
message = mail_service_pb.MailMessage()
message.set_sender(_to_str(self.sender))
if hasattr(self, 'reply_to'):
message.set_replyto(_to_str(self.reply_to))
message.set_subject(_to_str(self.subject))
if hasattr(self, 'body'):
message.set_textbody(_to_str(self.body))
if hasattr(self, 'html'):
message.set_htmlbody(_to_str(self.html))
if hasattr(self, 'attachments'):
for file_name, data in _attachment_sequence(self.attachments):
attachment = message.add_attachment()
attachment.set_filename(_to_str(file_name))
attachment.set_data(_to_str(data))
return message
def to_mime_message(self):
"""Generate a MIMEMultitype message from EmailMessage.
Calls MailMessageToMessage after converting self to protocol
buffer. Protocol buffer is better at handing corner cases
than EmailMessage class.
Returns:
MIMEMultitype representing the provided MailMessage.
Raises:
Appropriate exception for initialization failure.
InvalidAttachmentTypeError: Use of incorrect attachment type.
MissingSenderError: No sender specified.
MissingSubjectError: Subject is not specified.
MissingBodyError: No body specified.
"""
return mail_message_to_mime_message(self.ToProto())
def ToMIMEMessage(self):
return self.to_mime_message()
def send(self, make_sync_call=apiproxy_stub_map.MakeSyncCall):
"""Send email message.
Send properly initialized email message via email API.
Args:
make_sync_call: Method which will make synchronous call to api proxy.
Raises:
Errors defined in this file above.
"""
message = self.ToProto()
response = api_base_pb.VoidProto()
try:
make_sync_call('mail', self._API_CALL, message, response)
except apiproxy_errors.ApplicationError, e:
if e.application_error in ERROR_MAP:
raise ERROR_MAP[e.application_error](e.error_detail)
raise e
def Send(self, *args, **kwds):
self.send(*args, **kwds)
def _check_attachment(self, attachment):
file_name, data = attachment
if not (isinstance(file_name, types.StringTypes) or
isinstance(data, types.StringTypes)):
raise TypeError()
def _check_attachments(self, attachments):
"""Checks values going to attachment field.
Mainly used to check type safety of the values. Each value of the list
must be a pair of the form (file_name, data), and both values a string
type.
Args:
attachments: Collection of attachment tuples.
Raises:
TypeError if values are not string type.
"""
if len(attachments) == 2 and isinstance(attachments[0], types.StringTypes):
self._check_attachment(attachments)
else:
for attachment in attachments:
self._check_attachment(attachment)
def __setattr__(self, attr, value):
"""Property setting access control.
Controls write access to email fields.
Args:
attr: Attribute to access.
value: New value for field.
"""
if attr in ['sender', 'reply_to']:
check_email_valid(value, attr)
if not value:
raise ValueError('May not set empty value for \'%s\'' % attr)
if attr not in self.PROPERTIES:
raise AttributeError('\'EmailMessage\' has no attribute \'%s\'' % attr)
if attr == 'attachments':
self._check_attachments(value)
super(_EmailMessageBase, self).__setattr__(attr, value)
class EmailMessage(_EmailMessageBase):
"""Main interface to email API service.
This class is used to programmatically build an email message to send via
the Mail API. The usage is to construct an instance, populate its fields
and call Send().
Example Usage:
An EmailMessage can be built completely by the constructor.
EmailMessage(sender='sender@nowhere.com',
to='recipient@nowhere.com',
subject='a subject',
body='This is an email to you').Send()
It might be desirable for an application to build an email in different
places throughout the code. For this, EmailMessage is mutable.
message = EmailMessage()
message.sender = 'sender@nowhere.com'
message.to = ['recipient1@nowhere.com', 'recipient2@nowhere.com']
message.subject = 'a subject'
message.body = 'This is an email to you')
message.check_initialized()
message.send()
"""
_API_CALL = 'Send'
PROPERTIES = _EmailMessageBase.PROPERTIES
PROPERTIES.update(('to', 'cc', 'bcc'))
def check_initialized(self):
"""Provide additional checks to ensure recipients have been specified.
Raises:
MissingRecipientError when no recipients specified in to, cc or bcc.
"""
if (not hasattr(self, 'to') and
not hasattr(self, 'cc') and
not hasattr(self, 'bcc')):
raise MissingRecipientsError()
super(EmailMessage, self).check_initialized()
def CheckInitialized(self):
self.check_initialized()
def ToProto(self):
"""Does addition conversion of recipient fields to protocol buffer.
Returns:
MailMessage protocol version of mail message including sender fields.
"""
message = super(EmailMessage, self).ToProto()
for attribute, adder in (('to', message.add_to),
('cc', message.add_cc),
('bcc', message.add_bcc)):
if hasattr(self, attribute):
for address in _email_sequence(getattr(self, attribute)):
adder(_to_str(address))
return message
def __setattr__(self, attr, value):
"""Provides additional checks on recipient fields."""
if attr in ['to', 'cc', 'bcc']:
if isinstance(value, types.StringTypes):
check_email_valid(value, attr)
else:
_email_check_and_list(value, attr)
super(EmailMessage, self).__setattr__(attr, value)
class AdminEmailMessage(_EmailMessageBase):
"""Interface to sending email messages to all admins via the amil API.
This class is used to programmatically build an admin email message to send
via the Mail API. The usage is to construct an instance, populate its fields
and call Send().
Unlike the normal email message, addresses in the recipient fields are
ignored and not used for sending.
Example Usage:
An AdminEmailMessage can be built completely by the constructor.
AdminEmailMessage(sender='sender@nowhere.com',
subject='a subject',
body='This is an email to you').Send()
It might be desirable for an application to build an admin email in
different places throughout the code. For this, AdminEmailMessage is
mutable.
message = AdminEmailMessage()
message.sender = 'sender@nowhere.com'
message.subject = 'a subject'
message.body = 'This is an email to you')
message.check_initialized()
message.send()
"""
_API_CALL = 'SendToAdmins'
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""PyYAML event builder handler
Receives events from YAML listener and forwards them to a builder
object so that it can construct a properly structured object.
"""
from google.appengine.api import yaml_errors
from google.appengine.api import yaml_listener
import yaml
_TOKEN_DOCUMENT = 'document'
_TOKEN_SEQUENCE = 'sequence'
_TOKEN_MAPPING = 'mapping'
_TOKEN_KEY = 'key'
_TOKEN_VALUES = frozenset((
_TOKEN_DOCUMENT,
_TOKEN_SEQUENCE,
_TOKEN_MAPPING,
_TOKEN_KEY))
class Builder(object):
"""Interface for building documents and type from YAML events.
Implement this interface to create a new builder. Builders are
passed to the BuilderHandler and used as a factory and assembler
for creating concrete representations of YAML files.
"""
def BuildDocument(self):
"""Build new document.
The object built by this method becomes the top level entity
that the builder handler constructs. The actual type is
determined by the sub-class of the Builder class and can essentially
be any type at all. This method is always called when the parser
encounters the start of a new document.
Returns:
New object instance representing concrete document which is
returned to user via BuilderHandler.GetResults().
"""
def InitializeDocument(self, document, value):
"""Initialize document with value from top level of document.
This method is called when the root document element is encountered at
the top level of a YAML document. It should get called immediately
after BuildDocument.
Receiving the None value indicates the empty document.
Args:
document: Document as constructed in BuildDocument.
value: Scalar value to initialize the document with.
"""
def BuildMapping(self, top_value):
"""Build a new mapping representation.
Called when StartMapping event received. Type of object is determined
by Builder sub-class.
Args:
top_value: Object which will be new mappings parant. Will be object
returned from previous call to BuildMapping or BuildSequence.
Returns:
Instance of new object that represents a mapping type in target model.
"""
def EndMapping(self, top_value, mapping):
"""Previously constructed mapping scope is at an end.
Called when the end of a mapping block is encountered. Useful for
additional clean up or end of scope validation.
Args:
top_value: Value which is parent of the mapping.
mapping: Mapping which is at the end of its scope.
"""
def BuildSequence(self, top_value):
"""Build a new sequence representation.
Called when StartSequence event received. Type of object is determined
by Builder sub-class.
Args:
top_value: Object which will be new sequences parant. Will be object
returned from previous call to BuildMapping or BuildSequence.
Returns:
Instance of new object that represents a sequence type in target model.
"""
def EndSequence(self, top_value, sequence):
"""Previously constructed sequence scope is at an end.
Called when the end of a sequence block is encountered. Useful for
additional clean up or end of scope validation.
Args:
top_value: Value which is parent of the sequence.
sequence: Sequence which is at the end of its scope.
"""
def MapTo(self, subject, key, value):
"""Map value to a mapping representation.
Implementation is defined by sub-class of Builder.
Args:
subject: Object that represents mapping. Value returned from
BuildMapping.
key: Key used to map value to subject. Can be any scalar value.
value: Value which is mapped to subject. Can be any kind of value.
"""
def AppendTo(self, subject, value):
"""Append value to a sequence representation.
Implementation is defined by sub-class of Builder.
Args:
subject: Object that represents sequence. Value returned from
BuildSequence
value: Value to be appended to subject. Can be any kind of value.
"""
class BuilderHandler(yaml_listener.EventHandler):
"""PyYAML event handler used to build objects.
Maintains state information as it receives parse events so that object
nesting is maintained. Uses provided builder object to construct and
assemble objects as it goes.
As it receives events from the YAML parser, it builds a stack of data
representing structural tokens. As the scope of documents, mappings
and sequences end, those token, value pairs are popped from the top of
the stack so that the original scope can resume processing.
A special case is made for the _KEY token. It represents a temporary
value which only occurs inside mappings. It is immediately popped off
the stack when it's associated value is encountered in the parse stream.
It is necessary to do this because the YAML parser does not combine
key and value information in to a single event.
"""
def __init__(self, builder):
"""Initialization for builder handler.
Args:
builder: Instance of Builder class.
Raises:
ListenerConfigurationError when builder is not a Builder class.
"""
if not isinstance(builder, Builder):
raise yaml_errors.ListenerConfigurationError(
'Must provide builder of type yaml_listener.Builder')
self._builder = builder
self._stack = None
self._top = None
self._results = []
def _Push(self, token, value):
"""Push values to stack at start of nesting.
When a new object scope is beginning, will push the token (type of scope)
along with the new objects value, the latter of which is provided through
the various build methods of the builder.
Args:
token: Token indicating the type of scope which is being created; must
belong to _TOKEN_VALUES.
value: Value to associate with given token. Construction of value is
determined by the builder provided to this handler at construction.
"""
self._top = (token, value)
self._stack.append(self._top)
def _Pop(self):
"""Pop values from stack at end of nesting.
Called to indicate the end of a nested scope.
Returns:
Previously pushed value at the top of the stack.
"""
assert self._stack != [] and self._stack is not None
token, value = self._stack.pop()
if self._stack:
self._top = self._stack[-1]
else:
self._top = None
return value
def _HandleAnchor(self, event):
"""Handle anchor attached to event.
Currently will raise an error if anchor is used. Anchors are used to
define a document wide tag to a given value (scalar, mapping or sequence).
Args:
event: Event which may have anchor property set.
Raises:
NotImplementedError if event attempts to use an anchor.
"""
if hasattr(event, 'anchor') and event.anchor is not None:
raise NotImplementedError, 'Anchors not supported in this handler'
def _HandleValue(self, value):
"""Handle given value based on state of parser
This method handles the various values that are created by the builder
at the beginning of scope events (such as mappings and sequences) or
when a scalar value is received.
Method is called when handler receives a parser, MappingStart or
SequenceStart.
Args:
value: Value received as scalar value or newly constructed mapping or
sequence instance.
Raises:
InternalError if the building process encounters an unexpected token.
This is an indication of an implementation error in BuilderHandler.
"""
token, top_value = self._top
if token == _TOKEN_KEY:
key = self._Pop()
mapping_token, mapping = self._top
assert _TOKEN_MAPPING == mapping_token
self._builder.MapTo(mapping, key, value)
elif token == _TOKEN_MAPPING:
self._Push(_TOKEN_KEY, value)
elif token == _TOKEN_SEQUENCE:
self._builder.AppendTo(top_value, value)
elif token == _TOKEN_DOCUMENT:
self._builder.InitializeDocument(top_value, value)
else:
raise yaml_errors.InternalError('Unrecognized builder token:\n%s' % token)
def StreamStart(self, event, loader):
"""Initializes internal state of handler
Args:
event: Ignored.
"""
assert self._stack is None
self._stack = []
self._top = None
self._results = []
def StreamEnd(self, event, loader):
"""Cleans up internal state of handler after parsing
Args:
event: Ignored.
"""
assert self._stack == [] and self._top is None
self._stack = None
def DocumentStart(self, event, loader):
"""Build new document.
Pushes new document on to stack.
Args:
event: Ignored.
"""
assert self._stack == []
self._Push(_TOKEN_DOCUMENT, self._builder.BuildDocument())
def DocumentEnd(self, event, loader):
"""End of document.
Args:
event: Ignored.
"""
assert self._top[0] == _TOKEN_DOCUMENT
self._results.append(self._Pop())
def Alias(self, event, loader):
"""Not implemented yet.
Args:
event: Ignored.
"""
raise NotImplementedError('Anchors not supported in this handler')
def Scalar(self, event, loader):
"""Handle scalar value
Since scalars are simple values that are passed directly in by the
parser, handle like any value with no additional processing.
Of course, key values will be handles specially. A key value is recognized
when the top token is _TOKEN_MAPPING.
Args:
event: Event containing scalar value.
"""
self._HandleAnchor(event)
if event.tag is None and self._top[0] != _TOKEN_MAPPING:
try:
tag = loader.resolve(yaml.nodes.ScalarNode,
event.value, event.implicit)
except IndexError:
tag = loader.DEFAULT_SCALAR_TAG
else:
tag = event.tag
if tag is None:
value = event.value
else:
node = yaml.nodes.ScalarNode(tag,
event.value,
event.start_mark,
event.end_mark,
event.style)
value = loader.construct_object(node)
self._HandleValue(value)
def SequenceStart(self, event, loader):
"""Start of sequence scope
Create a new sequence from the builder and then handle in the context
of its parent.
Args:
event: SequenceStartEvent generated by loader.
loader: Loader that generated event.
"""
self._HandleAnchor(event)
token, parent = self._top
if token == _TOKEN_KEY:
token, parent = self._stack[-2]
sequence = self._builder.BuildSequence(parent)
self._HandleValue(sequence)
self._Push(_TOKEN_SEQUENCE, sequence)
def SequenceEnd(self, event, loader):
"""End of sequence.
Args:
event: Ignored
loader: Ignored.
"""
assert self._top[0] == _TOKEN_SEQUENCE
end_object = self._Pop()
top_value = self._top[1]
self._builder.EndSequence(top_value, end_object)
def MappingStart(self, event, loader):
"""Start of mapping scope.
Create a mapping from builder and then handle in the context of its
parent.
Args:
event: MappingStartEvent generated by loader.
loader: Loader that generated event.
"""
self._HandleAnchor(event)
token, parent = self._top
if token == _TOKEN_KEY:
token, parent = self._stack[-2]
mapping = self._builder.BuildMapping(parent)
self._HandleValue(mapping)
self._Push(_TOKEN_MAPPING, mapping)
def MappingEnd(self, event, loader):
"""End of mapping
Args:
event: Ignored.
loader: Ignored.
"""
assert self._top[0] == _TOKEN_MAPPING
end_object = self._Pop()
top_value = self._top[1]
self._builder.EndMapping(top_value, end_object)
def GetResults(self):
"""Get results of document stream processing.
This method can be invoked after fully parsing the entire YAML file
to retrieve constructed contents of YAML file. Called after EndStream.
Returns:
A tuple of all document objects that were parsed from YAML stream.
Raises:
InternalError if the builder stack is not empty by the end of parsing.
"""
if self._stack is not None:
raise yaml_errors.InternalError('Builder stack is not empty.')
return tuple(self._results)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors used in the urlfetch API
developers.
"""
class Error(Exception):
"""Base URL fetcher error type."""
class InvalidURLError(Error):
"""Raised when the URL given is empty or invalid.
Only http: and https: URLs are allowed. The maximum URL length
allowed is 2048 characters. The login/pass portion is not
allowed. In deployed applications, only ports 80 and 443 for http
and https respectively are allowed.
"""
class DownloadError(Error):
"""Raised when the we could not fetch the URL for any reason.
Note that this exception is only raised when we could not contact the
server. HTTP errors (e.g., 404) are returned in as the status_code field
in the return value of Fetch, and no exception is raised.
"""
class ResponseTooLargeError(Error):
"""Raised when the response was too large and was truncated."""
def __init__(self, response):
self.response = response
class InvalidMethodError(Error):
"""Raised when an invalid value for 'method' is provided"""
class InvalidMethodError(Error):
"""Raised when an invalid value for 'method' is provided"""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""PyYAML event listener
Contains class which interprets YAML events and forwards them to
a handler object.
"""
from google.appengine.api import yaml_errors
import yaml
_EVENT_METHOD_MAP = {
yaml.events.StreamStartEvent: 'StreamStart',
yaml.events.StreamEndEvent: 'StreamEnd',
yaml.events.DocumentStartEvent: 'DocumentStart',
yaml.events.DocumentEndEvent: 'DocumentEnd',
yaml.events.AliasEvent: 'Alias',
yaml.events.ScalarEvent: 'Scalar',
yaml.events.SequenceStartEvent: 'SequenceStart',
yaml.events.SequenceEndEvent: 'SequenceEnd',
yaml.events.MappingStartEvent: 'MappingStart',
yaml.events.MappingEndEvent: 'MappingEnd',
}
class EventHandler(object):
"""Handler interface for parsing YAML files.
Implement this interface to define specific YAML event handling class.
Implementing classes instances are passed to the constructor of
EventListener to act as a receiver of YAML parse events.
"""
def StreamStart(self, event, loader):
"""Handle start of stream event"""
def StreamEnd(self, event, loader):
"""Handle end of stream event"""
def DocumentStart(self, event, loader):
"""Handle start of document event"""
def DocumentEnd(self, event, loader):
"""Handle end of document event"""
def Alias(self, event, loader):
"""Handle alias event"""
def Scalar(self, event, loader):
"""Handle scalar event"""
def SequenceStart(self, event, loader):
"""Handle start of sequence event"""
def SequenceEnd(self, event, loader):
"""Handle end of sequence event"""
def MappingStart(self, event, loader):
"""Handle start of mappping event"""
def MappingEnd(self, event, loader):
"""Handle end of mapping event"""
class EventListener(object):
"""Helper class to re-map PyYAML events to method calls.
By default, PyYAML generates its events via a Python generator. This class
is a helper that iterates over the events from the PyYAML parser and forwards
them to a handle class in the form of method calls. For simplicity, the
underlying event is forwarded to the handler as a parameter to the call.
This object does not itself produce iterable objects, but is really a mapping
to a given handler instance.
Example use:
class PrintDocumentHandler(object):
def DocumentStart(event):
print "A new document has been started"
EventListener(PrintDocumentHandler()).Parse('''
key1: value1
---
key2: value2
'''
>>> A new document has been started
A new document has been started
In the example above, the implemented handler class (PrintDocumentHandler)
has a single method which reports each time a new document is started within
a YAML file. It is not necessary to subclass the EventListener, merely it
receives a PrintDocumentHandler instance. Every time a new document begins,
PrintDocumentHandler.DocumentStart is called with the PyYAML event passed
in as its parameter..
"""
def __init__(self, event_handler):
"""Initialize PyYAML event listener.
Constructs internal mapping directly from event type to method on actual
handler. This prevents reflection being used during actual parse time.
Args:
event_handler: Event handler that will receive mapped events. Must
implement at least one appropriate handler method named from
the values of the _EVENT_METHOD_MAP.
Raises:
ListenerConfigurationError if event_handler is not an EventHandler.
"""
if not isinstance(event_handler, EventHandler):
raise yaml_errors.ListenerConfigurationError(
'Must provide event handler of type yaml_listener.EventHandler')
self._event_method_map = {}
for event, method in _EVENT_METHOD_MAP.iteritems():
self._event_method_map[event] = getattr(event_handler, method)
def HandleEvent(self, event, loader=None):
"""Handle individual PyYAML event.
Args:
event: Event to forward to method call in method call.
Raises:
IllegalEvent when receives an unrecognized or unsupported event type.
"""
if event.__class__ not in _EVENT_METHOD_MAP:
raise yaml_errors.IllegalEvent(
"%s is not a valid PyYAML class" % event.__class__.__name__)
if event.__class__ in self._event_method_map:
self._event_method_map[event.__class__](event, loader)
def _HandleEvents(self, events):
"""Iterate over all events and send them to handler.
This method is not meant to be called from the interface.
Only use in tests.
Args:
events: Iterator or generator containing events to process.
raises:
EventListenerParserError when a yaml.parser.ParserError is raised.
EventError when an exception occurs during the handling of an event.
"""
for event in events:
try:
self.HandleEvent(*event)
except Exception, e:
event_object, loader = event
raise yaml_errors.EventError(e, event_object)
def _GenerateEventParameters(self,
stream,
loader_class=yaml.loader.SafeLoader):
"""Creates a generator that yields event, loader parameter pairs.
For use as parameters to HandleEvent method for use by Parse method.
During testing, _GenerateEventParameters is simulated by allowing
the harness to pass in a list of pairs as the parameter.
A list of (event, loader) pairs must be passed to _HandleEvents otherwise
it is not possible to pass the loader instance to the handler.
Also responsible for instantiating the loader from the Loader
parameter.
Args:
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work.
Loader: Loader class to use as per the yaml.parse method. Used to
instantiate new yaml.loader instance.
Yields:
Tuple(event, loader) where:
event: Event emitted by PyYAML loader.
loader_class: Used for dependency injection.
"""
assert loader_class is not None
try:
loader = loader_class(stream)
while loader.check_event():
yield (loader.get_event(), loader)
except yaml.error.YAMLError, e:
raise yaml_errors.EventListenerYAMLError(e)
def Parse(self, stream, loader_class=yaml.loader.SafeLoader):
"""Call YAML parser to generate and handle all events.
Calls PyYAML parser and sends resulting generator to handle_event method
for processing.
Args:
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work with the YAML parser.
loader_class: Used for dependency injection.
"""
self._HandleEvents(self._GenerateEventParameters(stream, loader_class))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""The Python datastore admin API for managing indices and schemas.
"""
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.datastore import datastore_index
from google.appengine.datastore import datastore_pb
from google.appengine.runtime import apiproxy_errors
from google.appengine.datastore import entity_pb
_DIRECTION_MAP = {
'asc': entity_pb.Index_Property.ASCENDING,
'ascending': entity_pb.Index_Property.ASCENDING,
'desc': entity_pb.Index_Property.DESCENDING,
'descending': entity_pb.Index_Property.DESCENDING,
}
def GetSchema(_app=None):
"""Infers an app's schema from the entities in the datastore.
Note that the PropertyValue PBs in the returned EntityProtos are empty
placeholders, so they may cause problems if you try to convert them to
python values with e.g. datastore_types. In particular, user values will
throw UserNotFoundError because their email and auth domain fields will be
empty.
Returns:
list of entity_pb.EntityProto, with kind and property names and types
"""
req = api_base_pb.StringProto()
req.set_value(datastore_types.ResolveAppId(_app))
resp = datastore_pb.Schema()
_Call('GetSchema', req, resp)
return resp.kind_list()
def GetIndices(_app=None):
"""Fetches all composite indices in the datastore for this app.
Returns:
list of entity_pb.CompositeIndex
"""
req = api_base_pb.StringProto()
req.set_value(datastore_types.ResolveAppId(_app))
resp = datastore_pb.CompositeIndices()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'GetIndices', req, resp)
except apiproxy_errors.ApplicationError, err:
raise datastore._ToDatastoreError(err)
return resp.index_list()
def CreateIndex(index):
"""Creates a new composite index in the datastore for this app.
Args:
index: entity_pb.CompositeIndex
Returns:
int, the id allocated to the index
"""
resp = api_base_pb.Integer64Proto()
_Call('CreateIndex', index, resp)
return resp.value()
def UpdateIndex(index):
"""Updates an index's status. The entire index definition must be present.
Args:
index: entity_pb.CompositeIndex
"""
_Call('UpdateIndex', index, api_base_pb.VoidProto())
def DeleteIndex(index):
"""Deletes an index. The entire index definition must be present.
Args:
index: entity_pb.CompositeIndex
"""
_Call('DeleteIndex', index, api_base_pb.VoidProto())
def _Call(call, req, resp):
"""Generic method for making a datastore API call.
Args:
call: string, the name of the RPC call
req: the request PB. if the app_id field is not set, it defaults to the
local app.
resp: the response PB
"""
if hasattr(req, 'app_id'):
req.set_app_id(datastore_types.ResolveAppId(req.app_id(), 'req.app_id()'))
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', call, req, resp)
except apiproxy_errors.ApplicationError, err:
raise datastore._ToDatastoreError(err)
def IndexDefinitionToProto(app_id, index_definition):
"""Transform individual Index definition to protocol buffer.
Args:
app_id: Application id for new protocol buffer CompositeIndex.
index_definition: datastore_index.Index object to transform.
Returns:
New entity_pb.CompositeIndex with default values set and index
information filled in.
"""
proto = entity_pb.CompositeIndex()
proto.set_app_id(app_id)
proto.set_id(0)
proto.set_state(entity_pb.CompositeIndex.WRITE_ONLY)
definition_proto = proto.mutable_definition()
definition_proto.set_entity_type(index_definition.kind)
definition_proto.set_ancestor(index_definition.ancestor)
if index_definition.properties is not None:
for prop in index_definition.properties:
prop_proto = definition_proto.add_property()
prop_proto.set_name(prop.name)
prop_proto.set_direction(_DIRECTION_MAP[prop.direction])
return proto
def IndexDefinitionsToProtos(app_id, index_definitions):
"""Transform multiple index definitions to composite index records
Args:
app_id: Application id for new protocol buffer CompositeIndex.
index_definition: A list of datastore_index.Index objects to transform.
Returns:
A list of tranformed entity_pb.Compositeindex entities with default values
set and index information filled in.
"""
return [IndexDefinitionToProto(app_id, index)
for index in index_definitions]
def ProtoToIndexDefinition(proto):
"""Transform individual index protocol buffer to index definition.
Args:
proto: An instance of entity_pb.CompositeIndex to transform.
Returns:
A new instance of datastore_index.Index.
"""
properties = []
proto_index = proto.definition()
for prop_proto in proto_index.property_list():
prop_definition = datastore_index.Property(name=prop_proto.name())
if prop_proto.direction() == entity_pb.Index_Property.DESCENDING:
prop_definition.direction = 'descending'
properties.append(prop_definition)
index = datastore_index.Index(kind=proto_index.entity_type(),
properties=properties)
if proto_index.ancestor():
index.ancestor = True
return index
def ProtosToIndexDefinitions(protos):
"""Transform multiple index protocol buffers to index definitions.
Args:
A list of entity_pb.Index records.
"""
return [ProtoToIndexDefinition(definition) for definition in protos]
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the urlfetch API, based on httplib."""
import httplib
import logging
import socket
import urllib
import urlparse
from google.appengine.api import apiproxy_stub
from google.appengine.api import urlfetch
from google.appengine.api import urlfetch_errors
from google.appengine.api import urlfetch_service_pb
from google.appengine.runtime import apiproxy_errors
MAX_RESPONSE_SIZE = 2 ** 24
MAX_REDIRECTS = urlfetch.MAX_REDIRECTS
REDIRECT_STATUSES = frozenset([
httplib.MOVED_PERMANENTLY,
httplib.FOUND,
httplib.SEE_OTHER,
httplib.TEMPORARY_REDIRECT,
])
PORTS_ALLOWED_IN_PRODUCTION = (
None, '80', '443', '4443', '8080', '8081', '8082', '8083', '8084', '8085',
'8086', '8087', '8088', '8089', '8188', '8444', '8990')
_API_CALL_DEADLINE = 5.0
_UNTRUSTED_REQUEST_HEADERS = frozenset([
'accept-encoding',
'content-length',
'host',
'referer',
'user-agent',
'vary',
'via',
'x-forwarded-for',
])
class URLFetchServiceStub(apiproxy_stub.APIProxyStub):
"""Stub version of the urlfetch API to be used with apiproxy_stub_map."""
def __init__(self, service_name='urlfetch'):
"""Initializer.
Args:
service_name: Service name expected for all calls.
"""
super(URLFetchServiceStub, self).__init__(service_name)
def _Dynamic_Fetch(self, request, response):
"""Trivial implementation of URLFetchService::Fetch().
Args:
request: the fetch to perform, a URLFetchRequest
response: the fetch response, a URLFetchResponse
"""
(protocol, host, path, parameters, query, fragment) = urlparse.urlparse(request.url())
payload = None
if request.method() == urlfetch_service_pb.URLFetchRequest.GET:
method = 'GET'
elif request.method() == urlfetch_service_pb.URLFetchRequest.POST:
method = 'POST'
payload = request.payload()
elif request.method() == urlfetch_service_pb.URLFetchRequest.HEAD:
method = 'HEAD'
elif request.method() == urlfetch_service_pb.URLFetchRequest.PUT:
method = 'PUT'
payload = request.payload()
elif request.method() == urlfetch_service_pb.URLFetchRequest.DELETE:
method = 'DELETE'
else:
logging.error('Invalid method: %s', request.method())
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.UNSPECIFIED_ERROR)
if not (protocol == 'http' or protocol == 'https'):
logging.error('Invalid protocol: %s', protocol)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.INVALID_URL)
sanitized_headers = self._SanitizeHttpHeaders(_UNTRUSTED_REQUEST_HEADERS,
request.header_list())
request.clear_header()
request.header_list().extend(sanitized_headers)
self._RetrieveURL(request.url(), payload, method,
request.header_list(), response,
follow_redirects=request.followredirects())
def _RetrieveURL(self, url, payload, method, headers, response,
follow_redirects=True):
"""Retrieves a URL.
Args:
url: String containing the URL to access.
payload: Request payload to send, if any; None if no payload.
method: HTTP method to use (e.g., 'GET')
headers: List of additional header objects to use for the request.
response: Response object
follow_redirects: optional setting (defaulting to True) for whether or not
we should transparently follow redirects (up to MAX_REDIRECTS)
Raises:
Raises an apiproxy_errors.ApplicationError exception with FETCH_ERROR
in cases where:
- MAX_REDIRECTS is exceeded
- The protocol of the redirected URL is bad or missing.
"""
last_protocol = ''
last_host = ''
for redirect_number in xrange(MAX_REDIRECTS + 1):
parsed = urlparse.urlparse(url)
protocol, host, path, parameters, query, fragment = parsed
port = urllib.splitport(urllib.splituser(host)[1])[1]
if port not in PORTS_ALLOWED_IN_PRODUCTION:
logging.warning(
'urlfetch received %s ; port %s is not allowed in production!' %
(url, port))
if host == '' and protocol == '':
host = last_host
protocol = last_protocol
adjusted_headers = {
'Host': host,
'Accept': '*/*',
}
if payload is not None:
adjusted_headers['Content-Length'] = len(payload)
if method == 'POST' and payload:
adjusted_headers['Content-Type'] = 'application/x-www-form-urlencoded'
for header in headers:
adjusted_headers[header.key().title()] = header.value()
logging.debug('Making HTTP request: host = %s, '
'url = %s, payload = %s, headers = %s',
host, url, payload, adjusted_headers)
try:
if protocol == 'http':
connection = httplib.HTTPConnection(host)
elif protocol == 'https':
connection = httplib.HTTPSConnection(host)
else:
error_msg = 'Redirect specified invalid protocol: "%s"' % protocol
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
last_protocol = protocol
last_host = host
if query != '':
full_path = path + '?' + query
else:
full_path = path
orig_timeout = socket.getdefaulttimeout()
try:
socket.setdefaulttimeout(_API_CALL_DEADLINE)
connection.request(method, full_path, payload, adjusted_headers)
http_response = connection.getresponse()
http_response_data = http_response.read()
finally:
socket.setdefaulttimeout(orig_timeout)
connection.close()
except (httplib.error, socket.error, IOError), e:
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, str(e))
if http_response.status in REDIRECT_STATUSES and follow_redirects:
url = http_response.getheader('Location', None)
if url is None:
error_msg = 'Redirecting response was missing "Location" header'
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
else:
response.set_statuscode(http_response.status)
response.set_content(http_response_data[:MAX_RESPONSE_SIZE])
for header_key, header_value in http_response.getheaders():
header_proto = response.add_header()
header_proto.set_key(header_key)
header_proto.set_value(header_value)
if len(http_response_data) > MAX_RESPONSE_SIZE:
response.set_contentwastruncated(True)
break
else:
error_msg = 'Too many repeated redirects'
logging.error(error_msg)
raise apiproxy_errors.ApplicationError(
urlfetch_service_pb.URLFetchServiceError.FETCH_ERROR, error_msg)
def _SanitizeHttpHeaders(self, untrusted_headers, headers):
"""Cleans "unsafe" headers from the HTTP request/response.
Args:
untrusted_headers: set of untrusted headers names
headers: list of string pairs, first is header name and the second is header's value
"""
return (h for h in headers if h.key().lower() not in untrusted_headers)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""
In-memory persistent stub for the Python datastore API. Gets, queries,
and searches are implemented as in-memory scans over all entities.
Stores entities across sessions as pickled proto bufs in a single file. On
startup, all entities are read from the file and loaded into memory. On
every Put(), the file is wiped and all entities are written from scratch.
Clients can also manually Read() and Write() the file themselves.
Transactions are serialized through __tx_lock. Each transaction acquires it
when it begins and releases it when it commits or rolls back. This is
important, since there are other member variables like __tx_snapshot that are
per-transaction, so they should only be used by one tx at a time.
"""
import datetime
import logging
import os
import struct
import sys
import tempfile
import threading
import warnings
import cPickle as pickle
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub
from google.appengine.api import datastore
from google.appengine.api import datastore_admin
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.api import users
from google.appengine.datastore import datastore_pb
from google.appengine.datastore import datastore_index
from google.appengine.runtime import apiproxy_errors
from google.net.proto import ProtocolBuffer
from google.appengine.datastore import entity_pb
warnings.filterwarnings('ignore', 'tempnam is a potential security risk')
entity_pb.Reference.__hash__ = lambda self: hash(self.Encode())
datastore_pb.Query.__hash__ = lambda self: hash(self.Encode())
_MAXIMUM_RESULTS = 1000
_MAX_QUERY_OFFSET = 1000
_MAX_QUERY_COMPONENTS = 100
class _StoredEntity(object):
"""Simple wrapper around an entity stored by the stub.
Public properties:
protobuf: Native protobuf Python object, entity_pb.EntityProto.
encoded_protobuf: Encoded binary representation of above protobuf.
native: datastore.Entity instance.
"""
def __init__(self, entity):
"""Create a _StoredEntity object and store an entity.
Args:
entity: entity_pb.EntityProto to store.
"""
self.protobuf = entity
self.encoded_protobuf = entity.Encode()
self.native = datastore.Entity._FromPb(entity)
class DatastoreFileStub(apiproxy_stub.APIProxyStub):
""" Persistent stub for the Python datastore API.
Stores all entities in memory, and persists them to a file as pickled
protocol buffers. A DatastoreFileStub instance handles a single app's data
and is backed by files on disk.
"""
_PROPERTY_TYPE_TAGS = {
datastore_types.Blob: entity_pb.PropertyValue.kstringValue,
bool: entity_pb.PropertyValue.kbooleanValue,
datastore_types.Category: entity_pb.PropertyValue.kstringValue,
datetime.datetime: entity_pb.PropertyValue.kint64Value,
datastore_types.Email: entity_pb.PropertyValue.kstringValue,
float: entity_pb.PropertyValue.kdoubleValue,
datastore_types.GeoPt: entity_pb.PropertyValue.kPointValueGroup,
datastore_types.IM: entity_pb.PropertyValue.kstringValue,
int: entity_pb.PropertyValue.kint64Value,
datastore_types.Key: entity_pb.PropertyValue.kReferenceValueGroup,
datastore_types.Link: entity_pb.PropertyValue.kstringValue,
long: entity_pb.PropertyValue.kint64Value,
datastore_types.PhoneNumber: entity_pb.PropertyValue.kstringValue,
datastore_types.PostalAddress: entity_pb.PropertyValue.kstringValue,
datastore_types.Rating: entity_pb.PropertyValue.kint64Value,
str: entity_pb.PropertyValue.kstringValue,
datastore_types.Text: entity_pb.PropertyValue.kstringValue,
type(None): 0,
unicode: entity_pb.PropertyValue.kstringValue,
users.User: entity_pb.PropertyValue.kUserValueGroup,
}
WRITE_ONLY = entity_pb.CompositeIndex.WRITE_ONLY
READ_WRITE = entity_pb.CompositeIndex.READ_WRITE
DELETED = entity_pb.CompositeIndex.DELETED
ERROR = entity_pb.CompositeIndex.ERROR
_INDEX_STATE_TRANSITIONS = {
WRITE_ONLY: frozenset((READ_WRITE, DELETED, ERROR)),
READ_WRITE: frozenset((DELETED,)),
ERROR: frozenset((DELETED,)),
DELETED: frozenset((ERROR,)),
}
def __init__(self,
app_id,
datastore_file,
history_file,
require_indexes=False,
service_name='datastore_v3'):
"""Constructor.
Initializes and loads the datastore from the backing files, if they exist.
Args:
app_id: string
datastore_file: string, stores all entities across sessions. Use None
not to use a file.
history_file: string, stores query history. Use None as with
datastore_file.
require_indexes: bool, default False. If True, composite indexes must
exist in index.yaml for queries that need them.
service_name: Service name expected for all calls.
"""
super(DatastoreFileStub, self).__init__(service_name)
assert isinstance(app_id, basestring) and app_id != ''
self.__app_id = app_id
self.__datastore_file = datastore_file
self.__history_file = history_file
self.__entities = {}
self.__schema_cache = {}
self.__tx_snapshot = {}
self.__queries = {}
self.__transactions = {}
self.__indexes = {}
self.__require_indexes = require_indexes
self.__query_history = {}
self.__next_id = 1
self.__next_cursor = 1
self.__next_tx_handle = 1
self.__next_index_id = 1
self.__id_lock = threading.Lock()
self.__cursor_lock = threading.Lock()
self.__tx_handle_lock = threading.Lock()
self.__index_id_lock = threading.Lock()
self.__tx_lock = threading.Lock()
self.__entities_lock = threading.Lock()
self.__file_lock = threading.Lock()
self.__indexes_lock = threading.Lock()
self.Read()
def Clear(self):
""" Clears the datastore by deleting all currently stored entities and
queries. """
self.__entities = {}
self.__queries = {}
self.__transactions = {}
self.__query_history = {}
self.__schema_cache = {}
def _AppKindForKey(self, key):
""" Get (app, kind) tuple from given key.
The (app, kind) tuple is used as an index into several internal
dictionaries, e.g. __entities.
Args:
key: entity_pb.Reference
Returns:
Tuple (app, kind), both are unicode strings.
"""
last_path = key.path().element_list()[-1]
return key.app(), last_path.type()
def _StoreEntity(self, entity):
""" Store the given entity.
Args:
entity: entity_pb.EntityProto
"""
key = entity.key()
app_kind = self._AppKindForKey(key)
if app_kind not in self.__entities:
self.__entities[app_kind] = {}
self.__entities[app_kind][key] = _StoredEntity(entity)
if app_kind in self.__schema_cache:
del self.__schema_cache[app_kind]
READ_PB_EXCEPTIONS = (ProtocolBuffer.ProtocolBufferDecodeError, LookupError,
TypeError, ValueError)
READ_ERROR_MSG = ('Data in %s is corrupt or a different version. '
'Try running with the --clear_datastore flag.\n%r')
READ_PY250_MSG = ('Are you using FloatProperty and/or GeoPtProperty? '
'Unfortunately loading float values from the datastore '
'file does not work with Python 2.5.0. '
'Please upgrade to a newer Python 2.5 release or use '
'the --clear_datastore flag.\n')
def Read(self):
""" Reads the datastore and history files into memory.
The in-memory query history is cleared, but the datastore is *not*
cleared; the entities in the files are merged into the entities in memory.
If you want them to overwrite the in-memory datastore, call Clear() before
calling Read().
If the datastore file contains an entity with the same app name, kind, and
key as an entity already in the datastore, the entity from the file
overwrites the entity in the datastore.
Also sets __next_id to one greater than the highest id allocated so far.
"""
if self.__datastore_file and self.__datastore_file != '/dev/null':
for encoded_entity in self.__ReadPickled(self.__datastore_file):
try:
entity = entity_pb.EntityProto(encoded_entity)
except self.READ_PB_EXCEPTIONS, e:
raise datastore_errors.InternalError(self.READ_ERROR_MSG %
(self.__datastore_file, e))
except struct.error, e:
if (sys.version_info[0:3] == (2, 5, 0)
and e.message.startswith('unpack requires a string argument')):
raise datastore_errors.InternalError(self.READ_PY250_MSG +
self.READ_ERROR_MSG %
(self.__datastore_file, e))
else:
raise
self._StoreEntity(entity)
last_path = entity.key().path().element_list()[-1]
if last_path.has_id() and last_path.id() >= self.__next_id:
self.__next_id = last_path.id() + 1
self.__query_history = {}
for encoded_query, count in self.__ReadPickled(self.__history_file):
try:
query_pb = datastore_pb.Query(encoded_query)
except self.READ_PB_EXCEPTIONS, e:
raise datastore_errors.InternalError(self.READ_ERROR_MSG %
(self.__history_file, e))
if query_pb in self.__query_history:
self.__query_history[query_pb] += count
else:
self.__query_history[query_pb] = count
def Write(self):
""" Writes out the datastore and history files. Be careful! If the files
already exist, this method overwrites them!
"""
self.__WriteDatastore()
self.__WriteHistory()
def __WriteDatastore(self):
""" Writes out the datastore file. Be careful! If the file already exist,
this method overwrites it!
"""
if self.__datastore_file and self.__datastore_file != '/dev/null':
encoded = []
for kind_dict in self.__entities.values():
for entity in kind_dict.values():
encoded.append(entity.encoded_protobuf)
self.__WritePickled(encoded, self.__datastore_file)
def __WriteHistory(self):
""" Writes out the history file. Be careful! If the file already exist,
this method overwrites it!
"""
if self.__history_file and self.__history_file != '/dev/null':
encoded = [(query.Encode(), count)
for query, count in self.__query_history.items()]
self.__WritePickled(encoded, self.__history_file)
def __ReadPickled(self, filename):
"""Reads a pickled object from the given file and returns it.
"""
self.__file_lock.acquire()
try:
try:
if filename and filename != '/dev/null' and os.path.isfile(filename):
return pickle.load(open(filename, 'rb'))
else:
logging.warning('Could not read datastore data from %s', filename)
except (AttributeError, LookupError, NameError, TypeError,
ValueError, struct.error, pickle.PickleError), e:
raise datastore_errors.InternalError(
'Could not read data from %s. Try running with the '
'--clear_datastore flag. Cause:\n%r' % (filename, e))
finally:
self.__file_lock.release()
return []
def __WritePickled(self, obj, filename, openfile=file):
"""Pickles the object and writes it to the given file.
"""
if not filename or filename == '/dev/null' or not obj:
return
tmpfile = openfile(os.tempnam(os.path.dirname(filename)), 'wb')
pickler = pickle.Pickler(tmpfile, protocol=1)
pickler.fast = True
pickler.dump(obj)
tmpfile.close()
self.__file_lock.acquire()
try:
try:
os.rename(tmpfile.name, filename)
except OSError:
try:
os.remove(filename)
except:
pass
os.rename(tmpfile.name, filename)
finally:
self.__file_lock.release()
def MakeSyncCall(self, service, call, request, response):
""" The main RPC entry point. service must be 'datastore_v3'. So far, the
supported calls are 'Get', 'Put', 'RunQuery', 'Next', and 'Count'.
"""
super(DatastoreFileStub, self).MakeSyncCall(service,
call,
request,
response)
explanation = []
assert response.IsInitialized(explanation), explanation
def QueryHistory(self):
"""Returns a dict that maps Query PBs to times they've been run.
"""
return dict((pb, times) for pb, times in self.__query_history.items()
if pb.app() == self.__app_id)
def _Dynamic_Put(self, put_request, put_response):
clones = []
for entity in put_request.entity_list():
clone = entity_pb.EntityProto()
clone.CopyFrom(entity)
clones.append(clone)
assert clone.has_key()
assert clone.key().path().element_size() > 0
last_path = clone.key().path().element_list()[-1]
if last_path.id() == 0 and not last_path.has_name():
self.__id_lock.acquire()
last_path.set_id(self.__next_id)
self.__next_id += 1
self.__id_lock.release()
assert clone.entity_group().element_size() == 0
group = clone.mutable_entity_group()
root = clone.key().path().element(0)
group.add_element().CopyFrom(root)
else:
assert (clone.has_entity_group() and
clone.entity_group().element_size() > 0)
self.__entities_lock.acquire()
try:
for clone in clones:
self._StoreEntity(clone)
finally:
self.__entities_lock.release()
if not put_request.has_transaction():
self.__WriteDatastore()
put_response.key_list().extend([c.key() for c in clones])
def _Dynamic_Get(self, get_request, get_response):
for key in get_request.key_list():
app_kind = self._AppKindForKey(key)
group = get_response.add_entity()
try:
entity = self.__entities[app_kind][key].protobuf
except KeyError:
entity = None
if entity:
group.mutable_entity().CopyFrom(entity)
def _Dynamic_Delete(self, delete_request, delete_response):
self.__entities_lock.acquire()
try:
for key in delete_request.key_list():
app_kind = self._AppKindForKey(key)
try:
del self.__entities[app_kind][key]
if not self.__entities[app_kind]:
del self.__entities[app_kind]
del self.__schema_cache[app_kind]
except KeyError:
pass
if not delete_request.has_transaction():
self.__WriteDatastore()
finally:
self.__entities_lock.release()
def _Dynamic_RunQuery(self, query, query_result):
if not self.__tx_lock.acquire(False):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST, 'Can\'t query inside a transaction.')
else:
self.__tx_lock.release()
if query.has_offset() and query.offset() > _MAX_QUERY_OFFSET:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST, 'Too big query offset.')
num_components = len(query.filter_list()) + len(query.order_list())
if query.has_ancestor():
num_components += 1
if num_components > _MAX_QUERY_COMPONENTS:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
('query is too large. may not have more than %s filters'
' + sort orders ancestor total' % _MAX_QUERY_COMPONENTS))
app = query.app()
if self.__require_indexes:
required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
if required:
required_key = kind, ancestor, props
indexes = self.__indexes.get(app)
if not indexes:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.NEED_INDEX,
"This query requires a composite index, but none are defined. "
"You must create an index.yaml file in your application root.")
eq_filters_set = set(props[:num_eq_filters])
remaining_filters = props[num_eq_filters:]
for index in indexes:
definition = datastore_admin.ProtoToIndexDefinition(index)
index_key = datastore_index.IndexToKey(definition)
if required_key == index_key:
break
if num_eq_filters > 1 and (kind, ancestor) == index_key[:2]:
this_props = index_key[2]
this_eq_filters_set = set(this_props[:num_eq_filters])
this_remaining_filters = this_props[num_eq_filters:]
if (eq_filters_set == this_eq_filters_set and
remaining_filters == this_remaining_filters):
break
else:
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.NEED_INDEX,
"This query requires a composite index that is not defined. "
"You must update the index.yaml file in your application root.")
try:
query.set_app(app)
results = self.__entities[app, query.kind()].values()
results = [entity.native for entity in results]
except KeyError:
results = []
if query.has_ancestor():
ancestor_path = query.ancestor().path().element_list()
def is_descendant(entity):
path = entity.key()._Key__reference.path().element_list()
return path[:len(ancestor_path)] == ancestor_path
results = filter(is_descendant, results)
operators = {datastore_pb.Query_Filter.LESS_THAN: '<',
datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL: '<=',
datastore_pb.Query_Filter.GREATER_THAN: '>',
datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL: '>=',
datastore_pb.Query_Filter.EQUAL: '==',
}
for filt in query.filter_list():
assert filt.op() != datastore_pb.Query_Filter.IN
prop = filt.property(0).name().decode('utf-8')
op = operators[filt.op()]
filter_val_list = [datastore_types.FromPropertyPb(filter_prop)
for filter_prop in filt.property_list()]
def passes(entity):
""" Returns True if the entity passes the filter, False otherwise. """
if prop in datastore_types._SPECIAL_PROPERTIES:
entity_vals = self.__GetSpecialPropertyValue(entity, prop)
else:
entity_vals = entity.get(prop, [])
if not isinstance(entity_vals, list):
entity_vals = [entity_vals]
for fixed_entity_val in entity_vals:
if type(fixed_entity_val) in datastore_types._RAW_PROPERTY_TYPES:
continue
for filter_val in filter_val_list:
fixed_entity_type = self._PROPERTY_TYPE_TAGS.get(
fixed_entity_val.__class__)
filter_type = self._PROPERTY_TYPE_TAGS.get(filter_val.__class__)
if fixed_entity_type == filter_type:
comp = u'%r %s %r' % (fixed_entity_val, op, filter_val)
elif op != '==':
comp = '%r %s %r' % (fixed_entity_type, op, filter_type)
else:
continue
logging.log(logging.DEBUG - 1,
'Evaling filter expression "%s"', comp)
try:
ret = eval(comp)
if ret and ret != NotImplementedError:
return True
except TypeError:
pass
return False
results = filter(passes, results)
def has_prop_indexed(entity, prop):
"""Returns True if prop is in the entity and is not a raw property, or
is a special property."""
if prop in datastore_types._SPECIAL_PROPERTIES:
return True
values = entity.get(prop, [])
if not isinstance(values, (tuple, list)):
values = [values]
for value in values:
if type(value) not in datastore_types._RAW_PROPERTY_TYPES:
return True
return False
for order in query.order_list():
prop = order.property().decode('utf-8')
results = [entity for entity in results if has_prop_indexed(entity, prop)]
def order_compare_entities(a, b):
""" Return a negative, zero or positive number depending on whether
entity a is considered smaller than, equal to, or larger than b,
according to the query's orderings. """
cmped = 0
for o in query.order_list():
prop = o.property().decode('utf-8')
reverse = (o.direction() is datastore_pb.Query_Order.DESCENDING)
if prop in datastore_types._SPECIAL_PROPERTIES:
a_val = self.__GetSpecialPropertyValue(a, prop)
b_val = self.__GetSpecialPropertyValue(b, prop)
else:
a_val = a[prop]
if isinstance(a_val, list):
a_val = sorted(a_val, order_compare_properties, reverse=reverse)[0]
b_val = b[prop]
if isinstance(b_val, list):
b_val = sorted(b_val, order_compare_properties, reverse=reverse)[0]
cmped = order_compare_properties(a_val, b_val)
if o.direction() is datastore_pb.Query_Order.DESCENDING:
cmped = -cmped
if cmped != 0:
return cmped
if cmped == 0:
return cmp(a.key(), b.key())
def order_compare_properties(x, y):
"""Return a negative, zero or positive number depending on whether
property value x is considered smaller than, equal to, or larger than
property value y. If x and y are different types, they're compared based
on the type ordering used in the real datastore, which is based on the
tag numbers in the PropertyValue PB.
"""
if isinstance(x, datetime.datetime):
x = datastore_types.DatetimeToTimestamp(x)
if isinstance(y, datetime.datetime):
y = datastore_types.DatetimeToTimestamp(y)
x_type = self._PROPERTY_TYPE_TAGS.get(x.__class__)
y_type = self._PROPERTY_TYPE_TAGS.get(y.__class__)
if x_type == y_type:
try:
return cmp(x, y)
except TypeError:
return 0
else:
return cmp(x_type, y_type)
results.sort(order_compare_entities)
offset = 0
limit = len(results)
if query.has_offset():
offset = query.offset()
if query.has_limit():
limit = query.limit()
if limit > _MAXIMUM_RESULTS:
limit = _MAXIMUM_RESULTS
results = results[offset:limit + offset]
clone = datastore_pb.Query()
clone.CopyFrom(query)
clone.clear_hint()
if clone in self.__query_history:
self.__query_history[clone] += 1
else:
self.__query_history[clone] = 1
self.__WriteHistory()
self.__cursor_lock.acquire()
cursor = self.__next_cursor
self.__next_cursor += 1
self.__cursor_lock.release()
self.__queries[cursor] = (results, len(results))
query_result.mutable_cursor().set_cursor(cursor)
query_result.set_more_results(len(results) > 0)
def _Dynamic_Next(self, next_request, query_result):
cursor = next_request.cursor().cursor()
try:
results, orig_count = self.__queries[cursor]
except KeyError:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'Cursor %d not found' % cursor)
count = next_request.count()
results_pb = [r._ToPb() for r in results[:count]]
query_result.result_list().extend(results_pb)
del results[:count]
query_result.set_more_results(len(results) > 0)
def _Dynamic_Count(self, query, integer64proto):
query_result = datastore_pb.QueryResult()
self._Dynamic_RunQuery(query, query_result)
cursor = query_result.cursor().cursor()
results, count = self.__queries[cursor]
integer64proto.set_value(count)
del self.__queries[cursor]
def _Dynamic_BeginTransaction(self, request, transaction):
self.__tx_handle_lock.acquire()
handle = self.__next_tx_handle
self.__next_tx_handle += 1
self.__tx_handle_lock.release()
self.__transactions[handle] = None
transaction.set_handle(handle)
self.__tx_lock.acquire()
snapshot = [(app_kind, dict(entities))
for app_kind, entities in self.__entities.items()]
self.__tx_snapshot = dict(snapshot)
def _Dynamic_Commit(self, transaction, transaction_response):
if not self.__transactions.has_key(transaction.handle()):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction handle %d not found' % transaction.handle())
self.__tx_snapshot = {}
try:
self.__WriteDatastore()
finally:
self.__tx_lock.release()
def _Dynamic_Rollback(self, transaction, transaction_response):
if not self.__transactions.has_key(transaction.handle()):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
'Transaction handle %d not found' % transaction.handle())
self.__entities = self.__tx_snapshot
self.__tx_snapshot = {}
self.__tx_lock.release()
def _Dynamic_GetSchema(self, app_str, schema):
minint = -sys.maxint - 1
try:
minfloat = float('-inf')
except ValueError:
minfloat = -1e300000
app_str = app_str.value()
kinds = []
for app, kind in self.__entities:
if app == app_str:
app_kind = (app, kind)
if app_kind in self.__schema_cache:
kinds.append(self.__schema_cache[app_kind])
continue
kind_pb = entity_pb.EntityProto()
kind_pb.mutable_key().set_app('')
kind_pb.mutable_key().mutable_path().add_element().set_type(kind)
kind_pb.mutable_entity_group()
props = {}
for entity in self.__entities[app_kind].values():
for prop in entity.protobuf.property_list():
if prop.name() not in props:
props[prop.name()] = entity_pb.PropertyValue()
props[prop.name()].MergeFrom(prop.value())
for value_pb in props.values():
if value_pb.has_int64value():
value_pb.set_int64value(minint)
if value_pb.has_booleanvalue():
value_pb.set_booleanvalue(False)
if value_pb.has_stringvalue():
value_pb.set_stringvalue('')
if value_pb.has_doublevalue():
value_pb.set_doublevalue(minfloat)
if value_pb.has_pointvalue():
value_pb.mutable_pointvalue().set_x(minfloat)
value_pb.mutable_pointvalue().set_y(minfloat)
if value_pb.has_uservalue():
value_pb.mutable_uservalue().set_gaiaid(minint)
value_pb.mutable_uservalue().set_email('')
value_pb.mutable_uservalue().set_auth_domain('')
value_pb.mutable_uservalue().clear_nickname()
elif value_pb.has_referencevalue():
value_pb.clear_referencevalue()
value_pb.mutable_referencevalue().set_app('')
for name, value_pb in props.items():
prop_pb = kind_pb.add_property()
prop_pb.set_name(name)
prop_pb.set_multiple(False)
prop_pb.mutable_value().CopyFrom(value_pb)
kinds.append(kind_pb)
self.__schema_cache[app_kind] = kind_pb
for kind_pb in kinds:
schema.add_kind().CopyFrom(kind_pb)
def _Dynamic_CreateIndex(self, index, id_response):
if index.id() != 0:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'New index id must be 0.')
elif self.__FindIndex(index):
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
'Index already exists.')
self.__index_id_lock.acquire()
index.set_id(self.__next_index_id)
id_response.set_value(self.__next_index_id)
self.__next_index_id += 1
self.__index_id_lock.release()
clone = entity_pb.CompositeIndex()
clone.CopyFrom(index)
app = index.app_id()
clone.set_app_id(app)
self.__indexes_lock.acquire()
try:
if app not in self.__indexes:
self.__indexes[app] = []
self.__indexes[app].append(clone)
finally:
self.__indexes_lock.release()
def _Dynamic_GetIndices(self, app_str, composite_indices):
composite_indices.index_list().extend(
self.__indexes.get(app_str.value(), []))
def _Dynamic_UpdateIndex(self, index, void):
stored_index = self.__FindIndex(index)
if not stored_index:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
"Index doesn't exist.")
elif (index.state() != stored_index.state() and
index.state() not in self._INDEX_STATE_TRANSITIONS[stored_index.state()]):
raise apiproxy_errors.ApplicationError(
datastore_pb.Error.BAD_REQUEST,
"cannot move index state from %s to %s" %
(entity_pb.CompositeIndex.State_Name(stored_index.state()),
(entity_pb.CompositeIndex.State_Name(index.state()))))
self.__indexes_lock.acquire()
try:
stored_index.set_state(index.state())
finally:
self.__indexes_lock.release()
def _Dynamic_DeleteIndex(self, index, void):
stored_index = self.__FindIndex(index)
if not stored_index:
raise apiproxy_errors.ApplicationError(datastore_pb.Error.BAD_REQUEST,
"Index doesn't exist.")
app = index.app_id()
self.__indexes_lock.acquire()
try:
self.__indexes[app].remove(stored_index)
finally:
self.__indexes_lock.release()
def __FindIndex(self, index):
"""Finds an existing index by definition.
Args:
definition: entity_pb.CompositeIndex
Returns:
entity_pb.CompositeIndex, if it exists; otherwise None
"""
app = index.app_id()
if app in self.__indexes:
for stored_index in self.__indexes[app]:
if index.definition() == stored_index.definition():
return stored_index
return None
@classmethod
def __GetSpecialPropertyValue(cls, entity, property):
"""Returns an entity's value for a special property.
Right now, the only special property is __key__, whose value is the
entity's key.
Args:
entity: datastore.Entity
Returns:
property value. For __key__, a datastore_types.Key.
Raises:
AssertionError, if the given property is not special.
"""
assert property in datastore_types._SPECIAL_PROPERTIES
if property == datastore_types._KEY_SPECIAL_PROPERTY:
return entity.key()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Python datastore class User to be used as a datastore data type.
Classes defined here:
User: object representing a user.
Error: base exception type
UserNotFoundError: UserService exception
RedirectTooLongError: UserService exception
NotAllowedError: UserService exception
"""
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import user_service_pb
from google.appengine.api import api_base_pb
from google.appengine.runtime import apiproxy_errors
class Error(Exception):
"""Base User error type."""
class UserNotFoundError(Error):
"""Raised by User.__init__() when there's no email argument and no user is
logged in."""
class RedirectTooLongError(Error):
"""Raised by UserService calls if the generated redirect URL was too long.
"""
class NotAllowedError(Error):
"""Raised by UserService calls if the requested redirect URL is not allowed.
"""
class User(object):
"""A user.
We provide here the email address, nickname, and auth domain for a user.
A nickname is a human-readable string which uniquely identifies a Google
user, akin to a username. It will be an email address for some users, but
not all.
"""
def __init__(self, email=None, _auth_domain=None):
"""Constructor.
Args:
# email is optional. it defaults to the current user.
email: string
"""
if _auth_domain is None:
_auth_domain = os.environ.get('AUTH_DOMAIN')
else:
assert email is not None
assert _auth_domain
if email is None:
assert 'USER_EMAIL' in os.environ
email = os.environ['USER_EMAIL']
if not email:
raise UserNotFoundError
self.__email = email
self.__auth_domain = _auth_domain
def nickname(self):
"""Return this user's nickname.
The nickname will be a unique, human readable identifier for this user
with respect to this application. It will be an email address for some
users, but not all.
"""
if (self.__email and self.__auth_domain and
self.__email.endswith('@' + self.__auth_domain)):
suffix_len = len(self.__auth_domain) + 1
return self.__email[:-suffix_len]
else:
return self.__email
def email(self):
"""Return this user's email address."""
return self.__email
def auth_domain(self):
"""Return this user's auth domain."""
return self.__auth_domain
def __unicode__(self):
return unicode(self.nickname())
def __str__(self):
return str(self.nickname())
def __repr__(self):
return "users.User(email='%s')" % self.email()
def __hash__(self):
return hash((self.__email, self.__auth_domain))
def __cmp__(self, other):
if not isinstance(other, User):
return NotImplemented
return cmp((self.__email, self.__auth_domain),
(other.__email, other.__auth_domain))
def create_login_url(dest_url):
"""Computes the login URL for this request and specified destination URL.
Args:
dest_url: String that is the desired final destination URL for the user
once login is complete. If 'dest_url' does not have a host
specified, we will use the host from the current request.
Returns:
string
"""
req = user_service_pb.StringProto()
resp = user_service_pb.StringProto()
req.set_value(dest_url)
try:
apiproxy_stub_map.MakeSyncCall('user', 'CreateLoginURL', req, resp)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
user_service_pb.UserServiceError.REDIRECT_URL_TOO_LONG):
raise RedirectTooLongError
elif (e.application_error ==
user_service_pb.UserServiceError.NOT_ALLOWED):
raise NotAllowedError
else:
raise e
return resp.value()
CreateLoginURL = create_login_url
def create_logout_url(dest_url):
"""Computes the logout URL for this request and specified destination URL.
Args:
dest_url: String that is the desired final destination URL for the user
once logout is complete. If 'dest_url' does not have a host
specified, we will use the host from the current request.
Returns:
string
"""
req = user_service_pb.StringProto()
resp = user_service_pb.StringProto()
req.set_value(dest_url)
try:
apiproxy_stub_map.MakeSyncCall('user', 'CreateLogoutURL', req, resp)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
user_service_pb.UserServiceError.REDIRECT_URL_TOO_LONG):
raise RedirectTooLongError
else:
raise e
return resp.value()
CreateLogoutURL = create_logout_url
def get_current_user():
try:
return User()
except UserNotFoundError:
return None
GetCurrentUser = get_current_user
def is_current_user_admin():
"""Return true if the user making this request is an admin for this
application, false otherwise.
We specifically make this a separate function, and not a member function of
the User class, because admin status is not persisted in the datastore. It
only exists for the user making this request right now.
"""
return (os.environ.get('USER_IS_ADMIN', '0')) == "1"
IsCurrentUserAdmin = is_current_user_admin
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Builder for mapping YAML documents to object instances.
ObjectBuilder is responsible for mapping a YAML document to classes defined
using the validation mechanism (see google.appengine.api.validation.py).
"""
from google.appengine.api import validation
from google.appengine.api import yaml_listener
from google.appengine.api import yaml_builder
from google.appengine.api import yaml_errors
import yaml
class _ObjectMapper(object):
"""Wrapper used for mapping attributes from a yaml file to an object.
This wrapper is required because objects do not know what property they are
associated with a creation time, and therefore can not be instantiated
with the correct class until they are mapped to their parents.
"""
def __init__(self):
"""Object mapper starts off with empty value."""
self.value = None
self.seen = set()
def set_value(self, value):
"""Set value of instance to map to.
Args:
value: Instance that this mapper maps to.
"""
self.value = value
def see(self, key):
if key in self.seen:
raise yaml_errors.DuplicateAttribute("Duplicate attribute '%s'." % key)
self.seen.add(key)
class _ObjectSequencer(object):
"""Wrapper used for building sequences from a yaml file to a list.
This wrapper is required because objects do not know what property they are
associated with a creation time, and therefore can not be instantiated
with the correct class until they are mapped to their parents.
"""
def __init__(self):
"""Object sequencer starts off with empty value."""
self.value = []
self.constructor = None
def set_constructor(self, constructor):
"""Set object used for constructing new sequence instances.
Args:
constructor: Callable which can accept no arguments. Must return
an instance of the appropriate class for the container.
"""
self.constructor = constructor
class ObjectBuilder(yaml_builder.Builder):
"""Builder used for constructing validated objects.
Given a class that implements validation.Validated, it will parse a YAML
document and attempt to build an instance of the class. It does so by mapping
YAML keys to Python attributes. ObjectBuilder will only map YAML fields
to attributes defined in the Validated subclasses 'ATTRIBUTE' definitions.
Lists are mapped to validated. Repeated attributes and maps are mapped to
validated.Type properties.
For a YAML map to be compatible with a class, the class must have a
constructor that can be called with no parameters. If the provided type
does not have such a constructor a parse time error will occur.
"""
def __init__(self, default_class):
"""Initialize validated object builder.
Args:
default_class: Class that is instantiated upon the detection of a new
document. An instance of this class will act as the document itself.
"""
self.default_class = default_class
def _GetRepeated(self, attribute):
"""Get the ultimate type of a repeated validator.
Looks for an instance of validation.Repeated, returning its constructor.
Args:
attribute: Repeated validator attribute to find type for.
Returns:
The expected class of of the Type validator, otherwise object.
"""
if isinstance(attribute, validation.Optional):
attribute = attribute.validator
if isinstance(attribute, validation.Repeated):
return attribute.constructor
return object
def BuildDocument(self):
"""Instantiate new root validated object.
Returns:
New instance of validated object.
"""
return self.default_class()
def BuildMapping(self, top_value):
"""New instance of object mapper for opening map scope.
Args:
top_value: Parent of nested object.
Returns:
New instance of object mapper.
"""
result = _ObjectMapper()
if isinstance(top_value, self.default_class):
result.value = top_value
return result
def EndMapping(self, top_value, mapping):
"""When leaving scope, makes sure new object is initialized.
This method is mainly for picking up on any missing required attributes.
Args:
top_value: Parent of closing mapping object.
mapping: _ObjectMapper instance that is leaving scope.
"""
try:
mapping.value.CheckInitialized()
except validation.ValidationError:
raise
except Exception, e:
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
raise validation.ValidationError("Invalid object:\n%s" % error_str, e)
def BuildSequence(self, top_value):
"""New instance of object sequence.
Args:
top_value: Object that contains the new sequence.
Returns:
A new _ObjectSequencer instance.
"""
return _ObjectSequencer()
def MapTo(self, subject, key, value):
"""Map key-value pair to an objects attribute.
Args:
subject: _ObjectMapper of object that will receive new attribute.
key: Key of attribute.
value: Value of new attribute.
Raises:
UnexpectedAttribute when the key is not a validated attribute of
the subject value class.
"""
assert subject.value is not None
if key not in subject.value.ATTRIBUTES:
raise yaml_errors.UnexpectedAttribute(
'Unexpected attribute \'%s\' for object of type %s.' %
(key, str(subject.value.__class__)))
if isinstance(value, _ObjectMapper):
value.set_value(subject.value.GetAttribute(key).expected_type())
value = value.value
elif isinstance(value, _ObjectSequencer):
value.set_constructor(self._GetRepeated(subject.value.ATTRIBUTES[key]))
value = value.value
subject.see(key)
try:
setattr(subject.value, key, value)
except validation.ValidationError, e:
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
try:
value_str = str(value)
except Exception:
value_str = '<unknown>'
e.message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
(value_str, key, error_str))
raise e
except Exception, e:
try:
error_str = str(e)
except Exception:
error_str = '<unknown>'
try:
value_str = str(value)
except Exception:
value_str = '<unknown>'
message = ("Unable to assign value '%s' to attribute '%s':\n%s" %
(value_str, key, error_str))
raise validation.ValidationError(message, e)
def AppendTo(self, subject, value):
"""Append a value to a sequence.
Args:
subject: _ObjectSequence that is receiving new value.
value: Value that is being appended to sequence.
"""
if isinstance(value, _ObjectMapper):
value.set_value(subject.constructor())
subject.value.append(value.value)
else:
subject.value.append(value)
def BuildObjects(default_class, stream, loader=yaml.loader.SafeLoader):
"""Build objects from stream.
Handles the basic case of loading all the objects from a stream.
Args:
default_class: Class that is instantiated upon the detection of a new
document. An instance of this class will act as the document itself.
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work with the YAML parser.
loader_class: Used for dependency injection.
Returns:
List of default_class instances parsed from the stream.
"""
builder = ObjectBuilder(default_class)
handler = yaml_builder.BuilderHandler(builder)
listener = yaml_listener.EventListener(handler)
listener.Parse(stream, loader)
return handler.GetResults()
def BuildSingleObject(default_class, stream, loader=yaml.loader.SafeLoader):
"""Build object from stream.
Handles the basic case of loading a single object from a stream.
Args:
default_class: Class that is instantiated upon the detection of a new
document. An instance of this class will act as the document itself.
stream: String document or open file object to process as per the
yaml.parse method. Any object that implements a 'read()' method which
returns a string document will work with the YAML parser.
loader_class: Used for dependency injection.
"""
definitions = BuildObjects(default_class, stream, loader)
if len(definitions) < 1:
raise yaml_errors.EmptyConfigurationFile()
if len(definitions) > 1:
raise yaml_errors.MultipleConfigurationFile()
return definitions[0]
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the mail API, writes email to logs and can optionally
send real email via SMTP or sendmail."""
from email import MIMEBase
from email import MIMEMultipart
from email import MIMEText
import logging
import mail
import mimetypes
import subprocess
import smtplib
from google.appengine.api import apiproxy_stub
class MailServiceStub(apiproxy_stub.APIProxyStub):
"""Python only mail service stub.
This stub does not actually attempt to send email. instead it merely logs
a description of the email to the developers console.
Args:
host: Host of SMTP server to use. Blank disables sending SMTP.
port: Port of SMTP server to use.
user: User to log in to SMTP server as.
password: Password for SMTP server user.
"""
def __init__(self,
host=None,
port=25,
user='',
password='',
enable_sendmail=False,
show_mail_body=False,
service_name='mail'):
"""Constructor.
Args:
host: Host of SMTP mail server.
post: Port of SMTP mail server.
user: Sending user of SMTP mail.
password: SMTP password.
enable_sendmail: Whether sendmail enabled or not.
show_mail_body: Whether to show mail body in log.
service_name: Service name expected for all calls.
"""
super(MailServiceStub, self).__init__(service_name)
self._smtp_host = host
self._smtp_port = port
self._smtp_user = user
self._smtp_password = password
self._enable_sendmail = enable_sendmail
self._show_mail_body = show_mail_body
def _GenerateLog(self, method, message, log):
"""Generate a list of log messages representing sent mail.
Args:
message: Message to write to log.
log: Log function of type string -> None
"""
log('MailService.%s' % method)
log(' From: %s' % message.sender())
for address in message.to_list():
log(' To: %s' % address)
for address in message.cc_list():
log(' Cc: %s' % address)
for address in message.bcc_list():
log(' Bcc: %s' % address)
if message.replyto():
log(' Reply-to: %s' % message.replyto())
log(' Subject: %s' % message.subject())
if message.has_textbody():
log(' Body:')
log(' Content-type: text/plain')
log(' Data length: %d' % len(message.textbody()))
if self._show_mail_body:
log('-----\n' + message.textbody() + '\n-----')
if message.has_htmlbody():
log(' Body:')
log(' Content-type: text/html')
log(' Data length: %d' % len(message.htmlbody()))
if self._show_mail_body:
log('-----\n' + message.htmlbody() + '\n-----')
for attachment in message.attachment_list():
log(' Attachment:')
log(' File name: %s' % attachment.filename())
log(' Data length: %s' % len(attachment.data()))
def _SendSMTP(self, mime_message, smtp_lib=smtplib.SMTP):
"""Send MIME message via SMTP.
Connects to SMTP server and sends MIME message. If user is supplied
will try to login to that server to send as authenticated. Does not
currently support encryption.
Args:
mime_message: MimeMessage to send. Create using ToMIMEMessage.
smtp_lib: Class of SMTP library. Used for dependency injection.
"""
smtp = smtp_lib()
try:
smtp.connect(self._smtp_host, self._smtp_port)
if self._smtp_user:
smtp.login(self._smtp_user, self._smtp_password)
tos = ', '.join([mime_message[to] for to in ['To', 'Cc', 'Bcc']
if mime_message[to]])
smtp.sendmail(mime_message['From'], tos, str(mime_message))
finally:
smtp.quit()
def _SendSendmail(self, mime_message,
popen=subprocess.Popen,
sendmail_command='sendmail'):
"""Send MIME message via sendmail, if exists on computer.
Attempts to send email via sendmail. Any IO failure, including
the program not being found is ignored.
Args:
mime_message: MimeMessage to send. Create using ToMIMEMessage.
popen: popen function to create a new sub-process.
"""
try:
tos = [mime_message[to] for to in ['To', 'Cc', 'Bcc'] if mime_message[to]]
sendmail_command = '%s %s' % (sendmail_command, ' '.join(tos))
try:
child = popen(sendmail_command,
shell=True,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE)
except (IOError, OSError), e:
logging.error('Unable to open pipe to sendmail')
raise
try:
child.stdin.write(str(mime_message))
child.stdin.close()
finally:
while child.poll() is None:
child.stdout.read(100)
child.stdout.close()
except (IOError, OSError), e:
logging.error('Error sending mail using sendmail: ' + str(e))
def _Send(self, request, response, log=logging.info,
smtp_lib=smtplib.SMTP,
popen=subprocess.Popen,
sendmail_command='sendmail'):
"""Implementation of MailServer::Send().
Logs email message. Contents of attachments are not shown, only
their sizes. If SMTP is configured, will send via SMTP, else
will use Sendmail if it is installed.
Args:
request: The message to send, a SendMailRequest.
response: The send response, a SendMailResponse.
log: Log function to send log information. Used for dependency
injection.
smtp_lib: Class of SMTP library. Used for dependency injection.
popen2: popen2 function to use for opening pipe to other process.
Used for dependency injection.
"""
self._GenerateLog('Send', request, log)
if self._smtp_host and self._enable_sendmail:
log('Both SMTP and sendmail are enabled. Ignoring sendmail.')
import email
mime_message = mail.MailMessageToMIMEMessage(request)
if self._smtp_host:
self._SendSMTP(mime_message, smtp_lib)
elif self._enable_sendmail:
self._SendSendmail(mime_message, popen, sendmail_command)
else:
logging.info('You are not currently sending out real email. '
'If you have sendmail installed you can use it '
'by using the server with --enable_sendmail')
_Dynamic_Send = _Send
def _SendToAdmins(self, request, response, log=logging.info):
"""Implementation of MailServer::SendToAdmins().
Logs email message. Contents of attachments are not shown, only
their sizes.
Given the difficulty of determining who the actual sender
is, Sendmail and SMTP are disabled for this action.
Args:
request: The message to send, a SendMailRequest.
response: The send response, a SendMailResponse.
log: Log function to send log information. Used for dependency
injection.
"""
self._GenerateLog('SendToAdmins', request, log)
if self._smtp_host and self._enable_sendmail:
log('Both SMTP and sendmail are enabled. Ignoring sendmail.')
_Dynamic_SendToAdmins = _SendToAdmins
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Trivial implementation of the UserService."""
import os
import urllib
import urlparse
from google.appengine.api import apiproxy_stub
from google.appengine.api import user_service_pb
_DEFAULT_LOGIN_URL = 'https://www.google.com/accounts/Login?continue=%s'
_DEFAULT_LOGOUT_URL = 'https://www.google.com/accounts/Logout?continue=%s'
class UserServiceStub(apiproxy_stub.APIProxyStub):
"""Trivial implementation of the UserService."""
def __init__(self,
login_url=_DEFAULT_LOGIN_URL,
logout_url=_DEFAULT_LOGOUT_URL,
service_name='user'):
"""Initializer.
Args:
login_url: String containing the URL to use for logging in.
logout_url: String containing the URL to use for logging out.
service_name: Service name expected for all calls.
Note: Both the login_url and logout_url arguments must contain one format
parameter, which will be replaced with the continuation URL where the user
should be redirected after log-in or log-out has been completed.
"""
super(UserServiceStub, self).__init__(service_name)
self.__num_requests = 0
self._login_url = login_url
self._logout_url = logout_url
os.environ['AUTH_DOMAIN'] = 'gmail.com'
def num_requests(self):
return self.__num_requests
def _Dynamic_CreateLoginURL(self, request, response):
"""Trivial implementation of UserService.CreateLoginURL().
Args:
request: the URL to redirect to after login; a base.StringProto
response: the login URL; a base.StringProto
"""
self.__num_requests += 1
response.set_value(
self._login_url %
urllib.quote(self._AddHostToContinueURL(request.value())))
def _Dynamic_CreateLogoutURL(self, request, response):
"""Trivial implementation of UserService.CreateLogoutURL().
Args:
request: the URL to redirect to after logout; a base.StringProto
response: the logout URL; a base.StringProto
"""
self.__num_requests += 1
response.set_value(
self._logout_url %
urllib.quote(self._AddHostToContinueURL(request.value())))
def _AddHostToContinueURL(self, continue_url):
"""Adds the request host to the continue url if no host is specified.
Args:
continue_url: the URL which may or may not have a host specified
Returns:
string
"""
(protocol, host, path, parameters, query, fragment) = urlparse.urlparse(continue_url, 'http')
if host:
return continue_url
host = os.environ['SERVER_NAME']
if os.environ['SERVER_PORT'] != '80':
host = host + ":" + os.environ['SERVER_PORT']
if path == '':
path = '/'
return urlparse.urlunparse(
(protocol, host, path, parameters, query, fragment))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Classes for common kinds, including Contact, Message, and Event.
Most of these kinds are based on the gd namespace "kinds" from GData:
http://code.google.com/apis/gdata/common-elements.html
"""
import types
import urlparse
from xml.sax import saxutils
from google.appengine.datastore import datastore_pb
from google.appengine.api import datastore
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
class GdKind(datastore.Entity):
""" A base class for gd namespace kinds.
This class contains common logic for all gd namespace kinds. For example,
this class translates datastore (app id, kind, key) tuples to tag:
URIs appropriate for use in <key> tags.
"""
HEADER = u"""<entry xmlns:gd='http://schemas.google.com/g/2005'>
<category scheme='http://schemas.google.com/g/2005#kind'
term='http://schemas.google.com/g/2005#%s' />"""
FOOTER = u"""
</entry>"""
_kind_properties = set()
_contact_properties = set()
def __init__(self, kind, title, kind_properties, contact_properties=[]):
""" Ctor.
title is the name of this particular entity, e.g. Bob Jones or Mom's
Birthday Party.
kind_properties is a list of property names that should be included in
this entity's XML encoding as first-class XML elements, instead of
<property> elements. 'title' and 'content' are added to kind_properties
automatically, and may not appear in contact_properties.
contact_properties is a list of property names that are Keys that point to
Contact entities, and should be included in this entity's XML encoding as
<gd:who> elements. If a property name is included in both kind_properties
and contact_properties, it is treated as a Contact property.
Args:
kind: string
title: string
kind_properties: list of strings
contact_properties: list of string
"""
datastore.Entity.__init__(self, kind)
if not isinstance(title, types.StringTypes):
raise datastore_errors.BadValueError(
'Expected a string for title; received %s (a %s).' %
(title, datastore_types.typename(title)))
self['title'] = title
self['content'] = ''
self._contact_properties = set(contact_properties)
assert not self._contact_properties.intersection(self.keys())
self._kind_properties = set(kind_properties) - self._contact_properties
self._kind_properties.add('title')
self._kind_properties.add('content')
def _KindPropertiesToXml(self):
""" Convert the properties that are part of this gd kind to XML. For
testability, the XML elements in the output are sorted alphabetically
by property name.
Returns:
string # the XML representation of the gd kind properties
"""
properties = self._kind_properties.intersection(set(self.keys()))
xml = u''
for prop in sorted(properties):
prop_xml = saxutils.quoteattr(prop)[1:-1]
value = self[prop]
has_toxml = (hasattr(value, 'ToXml') or
isinstance(value, list) and hasattr(value[0], 'ToXml'))
for val in self._XmlEscapeValues(prop):
if has_toxml:
xml += '\n %s' % val
else:
xml += '\n <%s>%s</%s>' % (prop_xml, val, prop_xml)
return xml
def _ContactPropertiesToXml(self):
""" Convert this kind's Contact properties kind to XML. For testability,
the XML elements in the output are sorted alphabetically by property name.
Returns:
string # the XML representation of the Contact properties
"""
properties = self._contact_properties.intersection(set(self.keys()))
xml = u''
for prop in sorted(properties):
values = self[prop]
if not isinstance(values, list):
values = [values]
for value in values:
assert isinstance(value, datastore_types.Key)
xml += """
<gd:who rel="http://schemas.google.com/g/2005#%s.%s>
<gd:entryLink href="%s" />
</gd:who>""" % (self.kind().lower(), prop, value.ToTagUri())
return xml
def _LeftoverPropertiesToXml(self):
""" Convert all of this entity's properties that *aren't* part of this gd
kind to XML.
Returns:
string # the XML representation of the leftover properties
"""
leftovers = set(self.keys())
leftovers -= self._kind_properties
leftovers -= self._contact_properties
if leftovers:
return u'\n ' + '\n '.join(self._PropertiesToXml(leftovers))
else:
return u''
def ToXml(self):
""" Returns an XML representation of this entity, as a string.
"""
xml = GdKind.HEADER % self.kind().lower()
xml += self._KindPropertiesToXml()
xml += self._ContactPropertiesToXml()
xml += self._LeftoverPropertiesToXml()
xml += GdKind.FOOTER
return xml
class Message(GdKind):
"""A message, such as an email, a discussion group posting, or a comment.
Includes the message title, contents, participants, and other properties.
This is the gd Message kind. See:
http://code.google.com/apis/gdata/common-elements.html#gdMessageKind
These properties are meaningful. They are all optional.
property name property type meaning
-------------------------------------
title string message subject
content string message body
from Contact* sender
to Contact* primary recipient
cc Contact* CC recipient
bcc Contact* BCC recipient
reply-to Contact* intended recipient of replies
link Link* attachment
category Category* tag or label associated with this message
geoPt GeoPt* geographic location the message was posted from
rating Rating* message rating, as defined by the application
* means this property may be repeated.
The Contact properties should be Keys of Contact entities. They are
represented in the XML encoding as linked <gd:who> elements.
"""
KIND_PROPERTIES = ['title', 'content', 'link', 'category', 'geoPt', 'rating']
CONTACT_PROPERTIES = ['from', 'to', 'cc', 'bcc', 'reply-to']
def __init__(self, title, kind='Message'):
GdKind.__init__(self, kind, title, Message.KIND_PROPERTIES,
Message.CONTACT_PROPERTIES)
class Event(GdKind):
"""A calendar event.
Includes the event title, description, location, organizer, start and end
time, and other details.
This is the gd Event kind. See:
http://code.google.com/apis/gdata/common-elements.html#gdEventKind
These properties are meaningful. They are all optional.
property name property type meaning
-------------------------------------
title string event name
content string event description
author string the organizer's name
where string* human-readable location (not a GeoPt)
startTime timestamp start time
endTime timestamp end time
eventStatus string one of the Event.Status values
link Link* page with more information
category Category* tag or label associated with this event
attendee Contact* attendees and other related people
* means this property may be repeated.
The Contact properties should be Keys of Contact entities. They are
represented in the XML encoding as linked <gd:who> elements.
"""
KIND_PROPERTIES = ['title', 'content', 'author', 'where', 'startTime',
'endTime', 'eventStatus', 'link', 'category']
CONTACT_PROPERTIES = ['attendee']
class Status:
CONFIRMED = 'confirmed'
TENTATIVE = 'tentative'
CANCELED = 'canceled'
def __init__(self, title, kind='Event'):
GdKind.__init__(self, kind, title, Event.KIND_PROPERTIES,
Event.CONTACT_PROPERTIES)
def ToXml(self):
""" Override GdKind.ToXml() to special-case author, gd:where, gd:when, and
gd:eventStatus.
"""
xml = GdKind.HEADER % self.kind().lower()
self._kind_properties = set(Contact.KIND_PROPERTIES)
xml += self._KindPropertiesToXml()
if 'author' in self:
xml += """
<author><name>%s</name></author>""" % self['author']
if 'eventStatus' in self:
xml += """
<gd:eventStatus value="http://schemas.google.com/g/2005#event.%s" />""" % (
self['eventStatus'])
if 'where' in self:
lines = ['<gd:where valueString="%s" />' % val
for val in self._XmlEscapeValues('where')]
xml += '\n ' + '\n '.join(lines)
iso_format = '%Y-%m-%dT%H:%M:%S'
xml += '\n <gd:when'
for key in ['startTime', 'endTime']:
if key in self:
xml += ' %s="%s"' % (key, self[key].isoformat())
xml += ' />'
self._kind_properties.update(['author', 'where', 'startTime', 'endTime',
'eventStatus'])
xml += self._ContactPropertiesToXml()
xml += self._LeftoverPropertiesToXml()
xml += GdKind.FOOTER
return xml
class Contact(GdKind):
"""A contact: a person, a venue such as a club or a restaurant, or an
organization.
This is the gd Contact kind. See:
http://code.google.com/apis/gdata/common-elements.html#gdContactKind
Most of the information about the contact is in the <gd:contactSection>
element; see the reference section for that element for details.
These properties are meaningful. They are all optional.
property name property type meaning
-------------------------------------
title string contact's name
content string notes
email Email* email address
geoPt GeoPt* geographic location
im IM* IM address
phoneNumber Phonenumber* phone number
postalAddress PostalAddress* mailing address
link Link* link to more information
category Category* tag or label associated with this contact
* means this property may be repeated.
"""
CONTACT_SECTION_HEADER = """
<gd:contactSection>"""
CONTACT_SECTION_FOOTER = """
</gd:contactSection>"""
KIND_PROPERTIES = ['title', 'content', 'link', 'category']
CONTACT_SECTION_PROPERTIES = ['email', 'geoPt', 'im', 'phoneNumber',
'postalAddress']
def __init__(self, title, kind='Contact'):
GdKind.__init__(self, kind, title, Contact.KIND_PROPERTIES)
def ToXml(self):
""" Override GdKind.ToXml() to put some properties inside a
gd:contactSection.
"""
xml = GdKind.HEADER % self.kind().lower()
self._kind_properties = set(Contact.KIND_PROPERTIES)
xml += self._KindPropertiesToXml()
xml += Contact.CONTACT_SECTION_HEADER
self._kind_properties = set(Contact.CONTACT_SECTION_PROPERTIES)
xml += self._KindPropertiesToXml()
xml += Contact.CONTACT_SECTION_FOOTER
self._kind_properties.update(Contact.KIND_PROPERTIES)
xml += self._LeftoverPropertiesToXml()
xml += GdKind.FOOTER
return xml
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Base class for implementing RPC of API proxy stubs."""
import sys
class RPC(object):
"""Base class for implementing RPC of API proxy stubs.
To implement a RPC to make real asynchronous API call:
- Extend this class.
- Override _MakeCallImpl and/or _WaitImpl to do a real asynchronous call.
"""
IDLE = 0
RUNNING = 1
FINISHING = 2
def __init__(self, package=None, call=None, request=None, response=None,
callback=None, deadline=None, stub=None):
"""Constructor for the RPC object.
All arguments are optional, and simply set members on the class.
These data members will be overriden by values passed to MakeCall.
Args:
package: string, the package for the call
call: string, the call within the package
request: ProtocolMessage instance, appropriate for the arguments
response: ProtocolMessage instance, appropriate for the response
callback: callable, called when call is complete
deadline: A double specifying the deadline for this call as the number of
seconds from the current time. Ignored if non-positive.
stub: APIProxyStub instance, used in default _WaitImpl to do real call
"""
self.__exception = None
self.__state = RPC.IDLE
self.__traceback = None
self.package = package
self.call = call
self.request = request
self.response = response
self.callback = callback
self.deadline = deadline
self.stub = stub
def MakeCall(self, package=None, call=None, request=None, response=None,
callback=None, deadline=None):
"""Makes an asynchronous (i.e. non-blocking) API call within the
specified package for the specified call method.
It will call the _MakeRealCall to do the real job.
Args:
Same as constructor; see __init__.
Raises:
TypeError or AssertionError if an argument is of an invalid type.
AssertionError or RuntimeError is an RPC is already in use.
"""
self.callback = callback or self.callback
self.package = package or self.package
self.call = call or self.call
self.request = request or self.request
self.response = response or self.response
self.deadline = deadline or self.deadline
assert self.__state is RPC.IDLE, ('RPC for %s.%s has already been started' %
(self.package, self.call))
assert self.callback is None or callable(self.callback)
self._MakeCallImpl()
def Wait(self):
"""Waits on the API call associated with this RPC."""
rpc_completed = self._WaitImpl()
assert rpc_completed, ('RPC for %s.%s was not completed, and no other ' +
'exception was raised ' % (self.package, self.call))
def CheckSuccess(self):
"""If there was an exception, raise it now.
Raises:
Exception of the API call or the callback, if any.
"""
if self.exception and self.__traceback:
raise self.exception.__class__, self.exception, self.__traceback
elif self.exception:
raise self.exception
@property
def exception(self):
return self.__exception
@property
def state(self):
return self.__state
def _MakeCallImpl(self):
"""Override this method to implement a real asynchronous call rpc."""
self.__state = RPC.RUNNING
def _WaitImpl(self):
"""Override this method to implement a real asynchronous call rpc.
Returns:
True if the async call was completed successfully.
"""
try:
try:
self.stub.MakeSyncCall(self.package, self.call,
self.request, self.response)
except Exception, e:
self.__exception = e
finally:
self.__state = RPC.FINISHING
self.__Callback()
return True
def __Callback(self):
if self.callback:
try:
self.callback()
except:
exc_class, self.__exception, self.__traceback = sys.exc_info()
self.__exception._appengine_apiproxy_rpc = self
raise
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""The Python datastore API used by app developers.
Defines Entity, Query, and Iterator classes, as well as methods for all of the
datastore's calls. Also defines conversions between the Python classes and
their PB counterparts.
The datastore errors are defined in the datastore_errors module. That module is
only required to avoid circular imports. datastore imports datastore_types,
which needs BadValueError, so it can't be defined in datastore.
"""
import heapq
import itertools
import logging
import re
import string
import sys
import traceback
from xml.sax import saxutils
from google.appengine.api import api_base_pb
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_errors
from google.appengine.api import datastore_types
from google.appengine.datastore import datastore_index
from google.appengine.datastore import datastore_pb
from google.appengine.runtime import apiproxy_errors
from google.appengine.datastore import entity_pb
MAX_ALLOWABLE_QUERIES = 30
DEFAULT_TRANSACTION_RETRIES = 3
_MAX_INDEXED_PROPERTIES = 5000
Key = datastore_types.Key
typename = datastore_types.typename
_txes = {}
def NormalizeAndTypeCheck(arg, types):
"""Normalizes and type checks the given argument.
Args:
arg: an instance, tuple, list, iterator, or generator of the given type(s)
types: allowed type or tuple of types
Returns:
A (list, bool) tuple. The list is a normalized, shallow copy of the
argument. The boolean is True if the argument was a sequence, False
if it was a single object.
Raises:
AssertionError: types includes list or tuple.
BadArgumentError: arg is not an instance or sequence of one of the given
types.
"""
if not isinstance(types, (list, tuple)):
types = (types,)
assert list not in types and tuple not in types
if isinstance(arg, types):
return ([arg], False)
else:
try:
for val in arg:
if not isinstance(val, types):
raise datastore_errors.BadArgumentError(
'Expected one of %s; received %s (a %s).' %
(types, val, typename(val)))
except TypeError:
raise datastore_errors.BadArgumentError(
'Expected an instance or sequence of %s; received %s (a %s).' %
(types, arg, typename(arg)))
return (list(arg), True)
def NormalizeAndTypeCheckKeys(keys):
"""Normalizes and type checks that the given argument is a valid key or keys.
A wrapper around NormalizeAndTypeCheck() that accepts strings, Keys, and
Entities, and normalizes to Keys.
Args:
keys: a Key or sequence of Keys
Returns:
A (list of Keys, bool) tuple. See NormalizeAndTypeCheck.
Raises:
BadArgumentError: arg is not an instance or sequence of one of the given
types.
"""
keys, multiple = NormalizeAndTypeCheck(keys, (basestring, Entity, Key))
keys = [_GetCompleteKeyOrError(key) for key in keys]
return (keys, multiple)
def Put(entities):
"""Store one or more entities in the datastore.
The entities may be new or previously existing. For new entities, Put() will
fill in the app id and key assigned by the datastore.
If the argument is a single Entity, a single Key will be returned. If the
argument is a list of Entity, a list of Keys will be returned.
Args:
entities: Entity or list of Entities
Returns:
Key or list of Keys
Raises:
TransactionFailedError, if the Put could not be committed.
"""
entities, multiple = NormalizeAndTypeCheck(entities, Entity)
if multiple and not entities:
return []
for entity in entities:
if not entity.kind() or not entity.app():
raise datastore_errors.BadRequestError(
'App and kind must not be empty, in entity: %s' % entity)
req = datastore_pb.PutRequest()
req.entity_list().extend([e._ToPb() for e in entities])
keys = [e.key() for e in entities]
tx = _MaybeSetupTransaction(req, keys)
if tx:
tx.RecordModifiedKeys([k for k in keys if k.has_id_or_name()])
resp = datastore_pb.PutResponse()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Put', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
keys = resp.key_list()
num_keys = len(keys)
num_entities = len(entities)
if num_keys != num_entities:
raise datastore_errors.InternalError(
'Put accepted %d entities but returned %d keys.' %
(num_entities, num_keys))
for entity, key in zip(entities, keys):
entity._Entity__key._Key__reference.CopyFrom(key)
if tx:
tx.RecordModifiedKeys([e.key() for e in entities], error_on_repeat=False)
if multiple:
return [Key._FromPb(k) for k in keys]
else:
return Key._FromPb(resp.key(0))
def Get(keys):
"""Retrieves one or more entities from the datastore.
Retrieves the entity or entities with the given key(s) from the datastore
and returns them as fully populated Entity objects, as defined below. If
there is an error, raises a subclass of datastore_errors.Error.
If keys is a single key or string, an Entity will be returned, or
EntityNotFoundError will be raised if no existing entity matches the key.
However, if keys is a list or tuple, a list of entities will be returned
that corresponds to the sequence of keys. It will include entities for keys
that were found and None placeholders for keys that were not found.
Args:
# the primary key(s) of the entity(ies) to retrieve
keys: Key or string or list of Keys or strings
Returns:
Entity or list of Entity objects
"""
keys, multiple = NormalizeAndTypeCheckKeys(keys)
if multiple and not keys:
return []
req = datastore_pb.GetRequest()
req.key_list().extend([key._Key__reference for key in keys])
_MaybeSetupTransaction(req, keys)
resp = datastore_pb.GetResponse()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Get', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
entities = []
for group in resp.entity_list():
if group.has_entity():
entities.append(Entity._FromPb(group.entity()))
else:
entities.append(None)
if multiple:
return entities
else:
if entities[0] is None:
raise datastore_errors.EntityNotFoundError()
return entities[0]
def Delete(keys):
"""Deletes one or more entities from the datastore. Use with care!
Deletes the given entity(ies) from the datastore. You can only delete
entities from your app. If there is an error, raises a subclass of
datastore_errors.Error.
Args:
# the primary key(s) of the entity(ies) to delete
keys: Key or string or list of Keys or strings
Raises:
TransactionFailedError, if the Delete could not be committed.
"""
keys, multiple = NormalizeAndTypeCheckKeys(keys)
if multiple and not keys:
return
req = datastore_pb.DeleteRequest()
req.key_list().extend([key._Key__reference for key in keys])
tx = _MaybeSetupTransaction(req, keys)
if tx:
tx.RecordModifiedKeys(keys)
resp = datastore_pb.DeleteResponse()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Delete', req, resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
class Entity(dict):
"""A datastore entity.
Includes read-only accessors for app id, kind, and primary key. Also
provides dictionary-style access to properties.
"""
def __init__(self, kind, parent=None, _app=None, name=None):
"""Constructor. Takes the kind and transaction root, which cannot be
changed after the entity is constructed, and an optional parent. Raises
BadArgumentError or BadKeyError if kind is invalid or parent is not an
existing Entity or Key in the datastore.
Args:
# this entity's kind
kind: string
# if provided, this entity's parent. Its key must be complete.
parent: Entity or Key
# if provided, this entity's name.
name: string
"""
ref = entity_pb.Reference()
_app = datastore_types.ResolveAppId(_app)
ref.set_app(_app)
datastore_types.ValidateString(kind, 'kind',
datastore_errors.BadArgumentError)
if parent is not None:
parent = _GetCompleteKeyOrError(parent)
if _app != parent.app():
raise datastore_errors.BadArgumentError(
"_app %s doesn't match parent's app %s" % (_app, parent.app()))
ref.CopyFrom(parent._Key__reference)
last_path = ref.mutable_path().add_element()
last_path.set_type(kind.encode('utf-8'))
if name is not None:
datastore_types.ValidateString(name, 'name')
if name[0] in string.digits:
raise datastore_errors.BadValueError('name cannot begin with a digit')
last_path.set_name(name.encode('utf-8'))
self.__key = Key._FromPb(ref)
def app(self):
"""Returns the name of the application that created this entity, a
string.
"""
return self.__key.app()
def kind(self):
"""Returns this entity's kind, a string.
"""
return self.__key.kind()
def key(self):
"""Returns this entity's primary key, a Key instance.
"""
return self.__key
def parent(self):
"""Returns this entity's parent, as a Key. If this entity has no parent,
returns None.
"""
return self.key().parent()
def entity_group(self):
"""Returns this entitys's entity group as a Key.
Note that the returned Key will be incomplete if this is a a root entity
and its key is incomplete.
"""
return self.key().entity_group()
def __setitem__(self, name, value):
"""Implements the [] operator. Used to set property value(s).
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(name, value)
dict.__setitem__(self, name, value)
def setdefault(self, name, value):
"""If the property exists, returns its value. Otherwise sets it to value.
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(name, value)
return dict.setdefault(self, name, value)
def update(self, other):
"""Updates this entity's properties from the values in other.
If any property name is the empty string or not a string, raises
BadPropertyError. If any value is not a supported type, raises
BadValueError.
"""
for name, value in other.items():
self.__setitem__(name, value)
def copy(self):
"""The copy method is not supported.
"""
raise NotImplementedError('Entity does not support the copy() method.')
def ToXml(self):
"""Returns an XML representation of this entity. Atom and gd:namespace
properties are converted to XML according to their respective schemas. For
more information, see:
http://www.atomenabled.org/developers/syndication/
http://code.google.com/apis/gdata/common-elements.html
This is *not* optimized. It shouldn't be used anywhere near code that's
performance-critical.
"""
xml = u'<entity kind=%s' % saxutils.quoteattr(self.kind())
if self.__key.has_id_or_name():
xml += ' key=%s' % saxutils.quoteattr(str(self.__key))
xml += '>'
if self.__key.has_id_or_name():
xml += '\n <key>%s</key>' % self.__key.ToTagUri()
properties = self.keys()
if properties:
properties.sort()
xml += '\n ' + '\n '.join(self._PropertiesToXml(properties))
xml += '\n</entity>\n'
return xml
def _PropertiesToXml(self, properties):
""" Returns a list of the XML representations of each of the given
properties. Ignores properties that don't exist in this entity.
Arg:
properties: string or list of strings
Returns:
list of strings
"""
xml_properties = []
for propname in properties:
if not self.has_key(propname):
continue
propname_xml = saxutils.quoteattr(propname)
values = self[propname]
if not isinstance(values, list):
values = [values]
proptype = datastore_types.PropertyTypeName(values[0])
proptype_xml = saxutils.quoteattr(proptype)
escaped_values = self._XmlEscapeValues(propname)
open_tag = u'<property name=%s type=%s>' % (propname_xml, proptype_xml)
close_tag = u'</property>'
xml_properties += [open_tag + val + close_tag for val in escaped_values]
return xml_properties
def _XmlEscapeValues(self, property):
""" Returns a list of the XML-escaped string values for the given property.
Raises an AssertionError if the property doesn't exist.
Arg:
property: string
Returns:
list of strings
"""
assert self.has_key(property)
xml = []
values = self[property]
if not isinstance(values, list):
values = [values]
for val in values:
if hasattr(val, 'ToXml'):
xml.append(val.ToXml())
else:
if val is None:
xml.append('')
else:
xml.append(saxutils.escape(unicode(val)))
return xml
def _ToPb(self):
"""Converts this Entity to its protocol buffer representation. Not
intended to be used by application developers.
Returns:
entity_pb.Entity
"""
pb = entity_pb.EntityProto()
pb.mutable_key().CopyFrom(self.key()._ToPb())
group = pb.mutable_entity_group()
if self.__key.has_id_or_name():
root = pb.key().path().element(0)
group.add_element().CopyFrom(root)
properties = self.items()
properties.sort()
for (name, values) in properties:
properties = datastore_types.ToPropertyPb(name, values)
if not isinstance(properties, list):
properties = [properties]
sample = values
if isinstance(sample, list):
sample = values[0]
if isinstance(sample, datastore_types._RAW_PROPERTY_TYPES):
pb.raw_property_list().extend(properties)
else:
pb.property_list().extend(properties)
if pb.property_size() > _MAX_INDEXED_PROPERTIES:
raise datastore_errors.BadRequestError(
'Too many indexed properties for entity %r.' % self.key())
return pb
@staticmethod
def _FromPb(pb):
"""Static factory method. Returns the Entity representation of the
given protocol buffer (datastore_pb.Entity). Not intended to be used by
application developers.
The Entity PB's key must be complete. If it isn't, an AssertionError is
raised.
Args:
# a protocol buffer Entity
pb: datastore_pb.Entity
Returns:
# the Entity representation of the argument
Entity
"""
assert pb.key().path().element_size() > 0
last_path = pb.key().path().element_list()[-1]
assert last_path.has_id() ^ last_path.has_name()
if last_path.has_id():
assert last_path.id() != 0
else:
assert last_path.has_name()
assert last_path.name()
e = Entity(unicode(last_path.type().decode('utf-8')))
ref = e.__key._Key__reference
ref.CopyFrom(pb.key())
temporary_values = {}
for prop_list in (pb.property_list(), pb.raw_property_list()):
for prop in prop_list:
if not prop.has_multiple():
raise datastore_errors.Error(
'Property %s is corrupt in the datastore; it\'s missing the '
'multiple valued field.' % prop.name())
try:
value = datastore_types.FromPropertyPb(prop)
except (AssertionError, AttributeError, TypeError, ValueError), e:
raise datastore_errors.Error(
'Property %s is corrupt in the datastore. %s: %s' %
(e.__class__, prop.name(), e))
multiple = prop.multiple()
if multiple:
value = [value]
name = prop.name()
cur_value = temporary_values.get(name)
if cur_value is None:
temporary_values[name] = value
elif not multiple:
raise datastore_errors.Error(
'Property %s is corrupt in the datastore; it has multiple '
'values, but is not marked as multiply valued.' % name)
else:
cur_value.extend(value)
for name, value in temporary_values.iteritems():
decoded_name = unicode(name.decode('utf-8'))
datastore_types.ValidateReadProperty(decoded_name, value)
dict.__setitem__(e, decoded_name, value)
return e
class Query(dict):
"""A datastore query.
(Instead of this, consider using appengine.ext.gql.Query! It provides a
query language interface on top of the same functionality.)
Queries are used to retrieve entities that match certain criteria, including
app id, kind, and property filters. Results may also be sorted by properties.
App id and kind are required. Only entities from the given app, of the given
type, are returned. If an ancestor is set, with Ancestor(), only entities
with that ancestor are returned.
Property filters are used to provide criteria based on individual property
values. A filter compares a specific property in each entity to a given
value or list of possible values.
An entity is returned if its property values match *all* of the query's
filters. In other words, filters are combined with AND, not OR. If an
entity does not have a value for a property used in a filter, it is not
returned.
Property filters map filter strings of the form '<property name> <operator>'
to filter values. Use dictionary accessors to set property filters, like so:
> query = Query('Person')
> query['name ='] = 'Ryan'
> query['age >='] = 21
This query returns all Person entities where the name property is 'Ryan',
'Ken', or 'Bret', and the age property is at least 21.
Another way to build this query is:
> query = Query('Person')
> query.update({'name =': 'Ryan', 'age >=': 21})
The supported operators are =, >, <, >=, and <=. Only one inequality
filter may be used per query. Any number of equals filters may be used in
a single Query.
A filter value may be a list or tuple of values. This is interpreted as
multiple filters with the same filter string and different values, all ANDed
together. For example, this query returns everyone with the tags "google"
and "app engine":
> Query('Person', {'tag =': ('google', 'app engine')})
Result entities can be returned in different orders. Use the Order()
method to specify properties that results will be sorted by, and in which
direction.
Note that filters and orderings may be provided at any time before the query
is run. When the query is fully specified, Run() runs the query and returns
an iterator. The query results can be accessed through the iterator.
A query object may be reused after it's been run. Its filters and
orderings can be changed to create a modified query.
If you know how many result entities you need, use Get() to fetch them:
> query = Query('Person', {'age >': 21})
> for person in query.Get(4):
> print 'I have four pints left. Have one on me, %s!' % person['name']
If you don't know how many results you need, or if you need them all, you
can get an iterator over the results by calling Run():
> for person in Query('Person', {'age >': 21}).Run():
> print 'Have a pint on me, %s!' % person['name']
Get() is more efficient than Run(), so use Get() whenever possible.
Finally, the Count() method returns the number of result entities matched by
the query. The returned count is cached; successive Count() calls will not
re-scan the datastore unless the query is changed.
"""
ASCENDING = datastore_pb.Query_Order.ASCENDING
DESCENDING = datastore_pb.Query_Order.DESCENDING
ORDER_FIRST = datastore_pb.Query.ORDER_FIRST
ANCESTOR_FIRST = datastore_pb.Query.ANCESTOR_FIRST
FILTER_FIRST = datastore_pb.Query.FILTER_FIRST
OPERATORS = {'<': datastore_pb.Query_Filter.LESS_THAN,
'<=': datastore_pb.Query_Filter.LESS_THAN_OR_EQUAL,
'>': datastore_pb.Query_Filter.GREATER_THAN,
'>=': datastore_pb.Query_Filter.GREATER_THAN_OR_EQUAL,
'=': datastore_pb.Query_Filter.EQUAL,
'==': datastore_pb.Query_Filter.EQUAL,
}
INEQUALITY_OPERATORS = frozenset(['<', '<=', '>', '>='])
FILTER_REGEX = re.compile(
'^\s*([^\s]+)(\s+(%s)\s*)?$' % '|'.join(OPERATORS.keys()),
re.IGNORECASE | re.UNICODE)
__kind = None
__app = None
__orderings = None
__cached_count = None
__hint = None
__ancestor = None
__filter_order = None
__filter_counter = 0
__inequality_prop = None
__inequality_count = 0
def __init__(self, kind, filters={}, _app=None):
"""Constructor.
Raises BadArgumentError if kind is not a string. Raises BadValueError or
BadFilterError if filters is not a dictionary of valid filters.
Args:
# kind is required. filters is optional; if provided, it's used
# as an initial set of property filters.
kind: string
filters: dict
"""
datastore_types.ValidateString(kind, 'kind',
datastore_errors.BadArgumentError)
self.__kind = kind
self.__orderings = []
self.__filter_order = {}
self.update(filters)
self.__app = datastore_types.ResolveAppId(_app)
def Order(self, *orderings):
"""Specify how the query results should be sorted.
Result entities will be sorted by the first property argument, then by the
second, and so on. For example, this:
> query = Query('Person')
> query.Order('bday', ('age', Query.DESCENDING))
sorts everyone in order of their birthday, starting with January 1.
People with the same birthday are sorted by age, oldest to youngest.
The direction for each sort property may be provided; if omitted, it
defaults to ascending.
Order() may be called multiple times. Each call resets the sort order
from scratch.
If an inequality filter exists in this Query it must be the first property
passed to Order. Any number of sort orders may be used after the
inequality filter property. Without inequality filters, any number of
filters with different orders may be specified.
Entities with multiple values for an order property are sorted by their
lowest value.
Note that a sort order implies an existence filter! In other words,
Entities without the sort order property are filtered out, and *not*
included in the query results.
If the sort order property has different types in different entities - ie,
if bob['id'] is an int and fred['id'] is a string - the entities will be
grouped first by the property type, then sorted within type. No attempt is
made to compare property values across types.
Raises BadArgumentError if any argument is of the wrong format.
Args:
# the properties to sort by, in sort order. each argument may be either a
# string or (string, direction) 2-tuple.
Returns:
# this query
Query
"""
orderings = list(orderings)
for (order, i) in zip(orderings, range(len(orderings))):
if not (isinstance(order, basestring) or
(isinstance(order, tuple) and len(order) in [2, 3])):
raise datastore_errors.BadArgumentError(
'Order() expects strings or 2- or 3-tuples; received %s (a %s). ' %
(order, typename(order)))
if isinstance(order, basestring):
order = (order,)
datastore_types.ValidateString(order[0], 'sort order property',
datastore_errors.BadArgumentError)
property = order[0]
direction = order[-1]
if direction not in (Query.ASCENDING, Query.DESCENDING):
if len(order) == 3:
raise datastore_errors.BadArgumentError(
'Order() expects Query.ASCENDING or DESCENDING; received %s' %
str(direction))
direction = Query.ASCENDING
orderings[i] = (property, direction)
if (orderings and self.__inequality_prop and
orderings[0][0] != self.__inequality_prop):
raise datastore_errors.BadArgumentError(
'First ordering property must be the same as inequality filter '
'property, if specified for this query; received %s, expected %s' %
(orderings[0][0], self.__inequality_prop))
self.__orderings = orderings
return self
def Hint(self, hint):
"""Sets a hint for how this query should run.
The query hint gives us information about how best to execute your query.
Currently, we can only do one index scan, so the query hint should be used
to indicates which index we should scan against.
Use FILTER_FIRST if your first filter will only match a few results. In
this case, it will be most efficient to scan against the index for this
property, load the results into memory, and apply the remaining filters
and sort orders there.
Similarly, use ANCESTOR_FIRST if the query's ancestor only has a few
descendants. In this case, it will be most efficient to scan all entities
below the ancestor and load them into memory first.
Use ORDER_FIRST if the query has a sort order and the result set is large
or you only plan to fetch the first few results. In that case, we
shouldn't try to load all of the results into memory; instead, we should
scan the index for this property, which is in sorted order.
Note that hints are currently ignored in the v3 datastore!
Arg:
one of datastore.Query.[ORDER_FIRST, ANCESTOR_FIRST, FILTER_FIRST]
Returns:
# this query
Query
"""
if hint not in [self.ORDER_FIRST, self.ANCESTOR_FIRST, self.FILTER_FIRST]:
raise datastore_errors.BadArgumentError(
'Query hint must be ORDER_FIRST, ANCESTOR_FIRST, or FILTER_FIRST.')
self.__hint = hint
return self
def Ancestor(self, ancestor):
"""Sets an ancestor for this query.
This restricts the query to only return result entities that are descended
from a given entity. In other words, all of the results will have the
ancestor as their parent, or parent's parent, or etc.
Raises BadArgumentError or BadKeyError if parent is not an existing Entity
or Key in the datastore.
Args:
# the key must be complete
ancestor: Entity or Key
Returns:
# this query
Query
"""
key = _GetCompleteKeyOrError(ancestor)
self.__ancestor = datastore_pb.Reference()
self.__ancestor.CopyFrom(key._Key__reference)
return self
def Run(self):
"""Runs this query.
If a filter string is invalid, raises BadFilterError. If a filter value is
invalid, raises BadValueError. If an IN filter is provided, and a sort
order on another property is provided, raises BadQueryError.
If you know in advance how many results you want, use Get() instead. It's
more efficient.
Returns:
# an iterator that provides access to the query results
Iterator
"""
return self._Run()
def _Run(self, limit=None, offset=None):
"""Runs this query, with an optional result limit and an optional offset.
Identical to Run, with the extra optional limit and offset parameters.
limit and offset must both be integers >= 0.
This is not intended to be used by application developers. Use Get()
instead!
"""
if _CurrentTransactionKey():
raise datastore_errors.BadRequestError(
"Can't query inside a transaction.")
pb = self._ToPb(limit, offset)
result = datastore_pb.QueryResult()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'RunQuery', pb, result)
except apiproxy_errors.ApplicationError, err:
try:
_ToDatastoreError(err)
except datastore_errors.NeedIndexError, exc:
yaml = datastore_index.IndexYamlForQuery(
*datastore_index.CompositeIndexForQuery(pb)[1:-1])
raise datastore_errors.NeedIndexError(
str(exc) + '\nThis query needs this index:\n' + yaml)
return Iterator._FromPb(result.cursor())
def Get(self, limit, offset=0):
"""Fetches and returns a maximum number of results from the query.
This method fetches and returns a list of resulting entities that matched
the query. If the query specified a sort order, entities are returned in
that order. Otherwise, the order is undefined.
The limit argument specifies the maximum number of entities to return. If
it's greater than the number of remaining entities, all of the remaining
entities are returned. In that case, the length of the returned list will
be smaller than limit.
The offset argument specifies the number of entities that matched the
query criteria to skip before starting to return results. The limit is
applied after the offset, so if you provide a limit of 10 and an offset of 5
and your query matches 20 records, the records whose index is 0 through 4
will be skipped and the records whose index is 5 through 14 will be
returned.
The results are always returned as a list. If there are no results left,
an empty list is returned.
If you know in advance how many results you want, this method is more
efficient than Run(), since it fetches all of the results at once. (The
datastore backend sets the the limit on the underlying
scan, which makes the scan significantly faster.)
Args:
# the maximum number of entities to return
int or long
# the number of entities to skip
int or long
Returns:
# a list of entities
[Entity, ...]
"""
if not isinstance(limit, (int, long)) or limit <= 0:
raise datastore_errors.BadArgumentError(
'Argument to Get named \'limit\' must be an int greater than 0; '
'received %s (a %s)' % (limit, typename(limit)))
if not isinstance(offset, (int, long)) or offset < 0:
raise datastore_errors.BadArgumentError(
'Argument to Get named \'offset\' must be an int greater than or '
'equal to 0; received %s (a %s)' % (offset, typename(offset)))
return self._Run(limit, offset)._Next(limit)
def Count(self, limit=None):
"""Returns the number of entities that this query matches. The returned
count is cached; successive Count() calls will not re-scan the datastore
unless the query is changed.
Args:
limit, a number. If there are more results than this, stop short and
just return this number. Providing this argument makes the count
operation more efficient.
Returns:
The number of results.
"""
if self.__cached_count:
return self.__cached_count
resp = api_base_pb.Integer64Proto()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Count',
self._ToPb(limit=limit), resp)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
else:
self.__cached_count = resp.value()
return self.__cached_count
def __iter__(self):
raise NotImplementedError(
'Query objects should not be used as iterators. Call Run() first.')
def __setitem__(self, filter, value):
"""Implements the [] operator. Used to set filters.
If the filter string is empty or not a string, raises BadFilterError. If
the value is not a supported type, raises BadValueError.
"""
if isinstance(value, tuple):
value = list(value)
datastore_types.ValidateProperty(' ', value, read_only=True)
match = self._CheckFilter(filter, value)
property = match.group(1)
operator = match.group(3)
dict.__setitem__(self, filter, value)
if operator in self.INEQUALITY_OPERATORS:
if self.__inequality_prop is None:
self.__inequality_prop = property
else:
assert self.__inequality_prop == property
self.__inequality_count += 1
if filter not in self.__filter_order:
self.__filter_order[filter] = self.__filter_counter
self.__filter_counter += 1
self.__cached_count = None
def setdefault(self, filter, value):
"""If the filter exists, returns its value. Otherwise sets it to value.
If the property name is the empty string or not a string, raises
BadPropertyError. If the value is not a supported type, raises
BadValueError.
"""
datastore_types.ValidateProperty(' ', value)
self._CheckFilter(filter, value)
self.__cached_count = None
return dict.setdefault(self, filter, value)
def __delitem__(self, filter):
"""Implements the del [] operator. Used to remove filters.
"""
dict.__delitem__(self, filter)
del self.__filter_order[filter]
self.__cached_count = None
match = Query.FILTER_REGEX.match(filter)
property = match.group(1)
operator = match.group(3)
if operator in self.INEQUALITY_OPERATORS:
assert self.__inequality_count >= 1
assert property == self.__inequality_prop
self.__inequality_count -= 1
if self.__inequality_count == 0:
self.__inequality_prop = None
def update(self, other):
"""Updates this query's filters from the ones in other.
If any filter string is invalid, raises BadFilterError. If any value is
not a supported type, raises BadValueError.
"""
for filter, value in other.items():
self.__setitem__(filter, value)
def copy(self):
"""The copy method is not supported.
"""
raise NotImplementedError('Query does not support the copy() method.')
def _CheckFilter(self, filter, values):
"""Type check a filter string and list of values.
Raises BadFilterError if the filter string is empty, not a string, or
invalid. Raises BadValueError if the value type is not supported.
Args:
filter: String containing the filter text.
values: List of associated filter values.
Returns:
re.MatchObject (never None) that matches the 'filter'. Group 1 is the
property name, group 3 is the operator. (Group 2 is unused.)
"""
try:
match = Query.FILTER_REGEX.match(filter)
if not match:
raise datastore_errors.BadFilterError(
'Could not parse filter string: %s' % str(filter))
except TypeError:
raise datastore_errors.BadFilterError(
'Could not parse filter string: %s' % str(filter))
property = match.group(1)
operator = match.group(3)
if operator is None:
operator = '='
if isinstance(values, tuple):
values = list(values)
elif not isinstance(values, list):
values = [values]
if isinstance(values[0], datastore_types._RAW_PROPERTY_TYPES):
raise datastore_errors.BadValueError(
'Filtering on %s properties is not supported.' % typename(values[0]))
if operator in self.INEQUALITY_OPERATORS:
if self.__inequality_prop and property != self.__inequality_prop:
raise datastore_errors.BadFilterError(
'Only one property per query may have inequality filters (%s).' %
', '.join(self.INEQUALITY_OPERATORS))
elif len(self.__orderings) >= 1 and self.__orderings[0][0] != property:
raise datastore_errors.BadFilterError(
'Inequality operators (%s) must be on the same property as the '
'first sort order, if any sort orders are supplied' %
', '.join(self.INEQUALITY_OPERATORS))
if property in datastore_types._SPECIAL_PROPERTIES:
if property == datastore_types._KEY_SPECIAL_PROPERTY:
for value in values:
if not isinstance(value, Key):
raise datastore_errors.BadFilterError(
'%s filter value must be a Key; received %s (a %s)' %
(datastore_types._KEY_SPECIAL_PROPERTY, value, typename(value)))
return match
def _ToPb(self, limit=None, offset=None):
"""Converts this Query to its protocol buffer representation. Not
intended to be used by application developers. Enforced by hiding the
datastore_pb classes.
Args:
# an upper bound on the number of results returned by the query.
limit: int
# number of results that match the query to skip. limit is applied
# after the offset is fulfilled
offset: int
Returns:
# the PB representation of this Query
datastore_pb.Query
"""
pb = datastore_pb.Query()
pb.set_kind(self.__kind.encode('utf-8'))
if self.__app:
pb.set_app(self.__app.encode('utf-8'))
if limit is not None:
pb.set_limit(limit)
if offset is not None:
pb.set_offset(offset)
if self.__ancestor:
pb.mutable_ancestor().CopyFrom(self.__ancestor)
if ((self.__hint == self.ORDER_FIRST and self.__orderings) or
(self.__hint == self.ANCESTOR_FIRST and self.__ancestor) or
(self.__hint == self.FILTER_FIRST and len(self) > 0)):
pb.set_hint(self.__hint)
ordered_filters = [(i, f) for f, i in self.__filter_order.iteritems()]
ordered_filters.sort()
for i, filter_str in ordered_filters:
if filter_str not in self:
continue
values = self[filter_str]
match = self._CheckFilter(filter_str, values)
name = match.group(1)
props = datastore_types.ToPropertyPb(name, values)
if not isinstance(props, list):
props = [props]
op = match.group(3)
if op is None:
op = '='
for prop in props:
filter = pb.add_filter()
filter.set_op(self.OPERATORS[op])
filter.add_property().CopyFrom(prop)
for property, direction in self.__orderings:
order = pb.add_order()
order.set_property(property.encode('utf-8'))
order.set_direction(direction)
return pb
class MultiQuery(Query):
"""Class representing a query which requires multiple datastore queries.
This class is actually a subclass of datastore.Query as it is intended to act
like a normal Query object (supporting the same interface).
"""
def __init__(self, bound_queries, orderings):
if len(bound_queries) > MAX_ALLOWABLE_QUERIES:
raise datastore_errors.BadArgumentError(
'Cannot satisfy query -- too many subqueries (max: %d, got %d).'
' Probable cause: too many IN/!= filters in query.' %
(MAX_ALLOWABLE_QUERIES, len(bound_queries)))
self.__bound_queries = bound_queries
self.__orderings = orderings
def __str__(self):
res = 'MultiQuery: '
for query in self.__bound_queries:
res = '%s %s' % (res, str(query))
return res
def Get(self, limit, offset=0):
"""Get results of the query with a limit on the number of results.
Args:
limit: maximum number of values to return.
offset: offset requested -- if nonzero, this will override the offset in
the original query
Returns:
A list of entities with at most "limit" entries (less if the query
completes before reading limit values).
"""
count = 1
result = []
iterator = self.Run()
try:
for i in xrange(offset):
val = iterator.next()
except StopIteration:
pass
try:
while count <= limit:
val = iterator.next()
result.append(val)
count += 1
except StopIteration:
pass
return result
class SortOrderEntity(object):
"""Allow entity comparisons using provided orderings.
The iterator passed to the constructor is eventually consumed via
calls to GetNext(), which generate new SortOrderEntity s with the
same orderings.
"""
def __init__(self, entity_iterator, orderings):
"""Ctor.
Args:
entity_iterator: an iterator of entities which will be wrapped.
orderings: an iterable of (identifier, order) pairs. order
should be either Query.ASCENDING or Query.DESCENDING.
"""
self.__entity_iterator = entity_iterator
self.__entity = None
self.__min_max_value_cache = {}
try:
self.__entity = entity_iterator.next()
except StopIteration:
pass
else:
self.__orderings = orderings
def __str__(self):
return str(self.__entity)
def GetEntity(self):
"""Gets the wrapped entity."""
return self.__entity
def GetNext(self):
"""Wrap and return the next entity.
The entity is retrieved from the iterator given at construction time.
"""
return MultiQuery.SortOrderEntity(self.__entity_iterator,
self.__orderings)
def CmpProperties(self, that):
"""Compare two entities and return their relative order.
Compares self to that based on the current sort orderings and the
key orders between them. Returns negative, 0, or positive depending on
whether self is less, equal to, or greater than that. This
comparison returns as if all values were to be placed in ascending order
(highest value last). Only uses the sort orderings to compare (ignores
keys).
Args:
that: SortOrderEntity
Returns:
Negative if self < that
Zero if self == that
Positive if self > that
"""
if not self.__entity:
return cmp(self.__entity, that.__entity)
for (identifier, order) in self.__orderings:
value1 = self.__GetValueForId(self, identifier, order)
value2 = self.__GetValueForId(that, identifier, order)
result = cmp(value1, value2)
if order == Query.DESCENDING:
result = -result
if result:
return result
return 0
def __GetValueForId(self, sort_order_entity, identifier, sort_order):
value = sort_order_entity.__entity[identifier]
entity_key = sort_order_entity.__entity.key()
if (entity_key, identifier) in self.__min_max_value_cache:
value = self.__min_max_value_cache[(entity_key, identifier)]
elif isinstance(value, list):
if sort_order == Query.DESCENDING:
value = min(value)
else:
value = max(value)
self.__min_max_value_cache[(entity_key, identifier)] = value
return value
def __cmp__(self, that):
"""Compare self to that w.r.t. values defined in the sort order.
Compare an entity with another, using sort-order first, then the key
order to break ties. This can be used in a heap to have faster min-value
lookup.
Args:
that: other entity to compare to
Returns:
negative: if self is less than that in sort order
zero: if self is equal to that in sort order
positive: if self is greater than that in sort order
"""
property_compare = self.CmpProperties(that)
if property_compare:
return property_compare
else:
return cmp(self.__entity.key(), that.__entity.key())
def Run(self):
"""Return an iterable output with all results in order."""
results = []
count = 1
log_level = logging.DEBUG - 1
for bound_query in self.__bound_queries:
logging.log(log_level, 'Running query #%i' % count)
results.append(bound_query.Run())
count += 1
def IterateResults(results):
"""Iterator function to return all results in sorted order.
Iterate over the array of results, yielding the next element, in
sorted order. This function is destructive (results will be empty
when the operation is complete).
Args:
results: list of result iterators to merge and iterate through
Yields:
The next result in sorted order.
"""
result_heap = []
for result in results:
heap_value = MultiQuery.SortOrderEntity(result, self.__orderings)
if heap_value.GetEntity():
heapq.heappush(result_heap, heap_value)
used_keys = set()
while result_heap:
top_result = heapq.heappop(result_heap)
results_to_push = []
if top_result.GetEntity().key() not in used_keys:
yield top_result.GetEntity()
else:
pass
used_keys.add(top_result.GetEntity().key())
results_to_push = []
while result_heap:
next = heapq.heappop(result_heap)
if cmp(top_result, next):
results_to_push.append(next)
break
else:
results_to_push.append(next.GetNext())
results_to_push.append(top_result.GetNext())
for popped_result in results_to_push:
if popped_result.GetEntity():
heapq.heappush(result_heap, popped_result)
return IterateResults(results)
def Count(self, limit=None):
"""Return the number of matched entities for this query.
Will return the de-duplicated count of results. Will call the more
efficient Get() function if a limit is given.
Args:
limit: maximum number of entries to count (for any result > limit, return
limit).
Returns:
count of the number of entries returned.
"""
if limit is None:
count = 0
for i in self.Run():
count += 1
return count
else:
return len(self.Get(limit))
def __setitem__(self, query_filter, value):
"""Add a new filter by setting it on all subqueries.
If any of the setting operations raise an exception, the ones
that succeeded are undone and the exception is propagated
upward.
Args:
query_filter: a string of the form "property operand".
value: the value that the given property is compared against.
"""
saved_items = []
for index, query in enumerate(self.__bound_queries):
saved_items.append(query.get(query_filter, None))
try:
query[query_filter] = value
except:
for q, old_value in itertools.izip(self.__bound_queries[:index],
saved_items):
if old_value is not None:
q[query_filter] = old_value
else:
del q[query_filter]
raise
def __delitem__(self, query_filter):
"""Delete a filter by deleting it from all subqueries.
If a KeyError is raised during the attempt, it is ignored, unless
every subquery raised a KeyError. If any other exception is
raised, any deletes will be rolled back.
Args:
query_filter: the filter to delete.
Raises:
KeyError: No subquery had an entry containing query_filter.
"""
subquery_count = len(self.__bound_queries)
keyerror_count = 0
saved_items = []
for index, query in enumerate(self.__bound_queries):
try:
saved_items.append(query.get(query_filter, None))
del query[query_filter]
except KeyError:
keyerror_count += 1
except:
for q, old_value in itertools.izip(self.__bound_queries[:index],
saved_items):
if old_value is not None:
q[query_filter] = old_value
raise
if keyerror_count == subquery_count:
raise KeyError(query_filter)
def __iter__(self):
return iter(self.__bound_queries)
class Iterator(object):
"""An iterator over the results of a datastore query.
Iterators are used to access the results of a Query. An iterator is
obtained by building a Query, then calling Run() on it.
Iterator implements Python's iterator protocol, so results can be accessed
with the for and in statements:
> it = Query('Person').Run()
> for person in it:
> print 'Hi, %s!' % person['name']
"""
def __init__(self, cursor):
self.__cursor = cursor
self.__buffer = []
self.__more_results = True
def _Next(self, count):
"""Returns the next result(s) of the query.
Not intended to be used by application developers. Use the python
iterator protocol instead.
This method returns the next entities from the list of resulting
entities that matched the query. If the query specified a sort
order, entities are returned in that order. Otherwise, the order
is undefined.
The argument specifies the number of entities to return. If it's
greater than the number of remaining entities, all of the
remaining entities are returned. In that case, the length of the
returned list will be smaller than count.
There is an internal buffer for use with the next() method. If
this buffer is not empty, up to 'count' values are removed from
this buffer and returned. It's best not to mix _Next() and
next().
The results are always returned as a list. If there are no results
left, an empty list is returned.
Args:
# the number of entities to return; must be >= 1
count: int or long
Returns:
# a list of entities
[Entity, ...]
"""
if not isinstance(count, (int, long)) or count <= 0:
raise datastore_errors.BadArgumentError(
'Argument to _Next must be an int greater than 0; received %s (a %s)' %
(count, typename(count)))
if self.__buffer:
raise datastore_errors.BadRequestError(
'You can\'t mix next() and _Next()')
if not self.__more_results:
return []
req = datastore_pb.NextRequest()
req.set_count(count)
req.mutable_cursor().CopyFrom(self._ToPb())
result = datastore_pb.QueryResult()
try:
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Next', req, result)
except apiproxy_errors.ApplicationError, err:
raise _ToDatastoreError(err)
self.__more_results = result.more_results()
ret = [Entity._FromPb(r) for r in result.result_list()]
return ret
_BUFFER_SIZE = 20
def next(self):
if not self.__buffer:
self.__buffer = self._Next(self._BUFFER_SIZE)
try:
return self.__buffer.pop(0)
except IndexError:
raise StopIteration
def __iter__(self): return self
def _ToPb(self):
"""Converts this Iterator to its protocol buffer representation. Not
intended to be used by application developers. Enforced by hiding the
datastore_pb classes.
Returns:
# the PB representation of this Iterator
datastore_pb.Cursor
"""
pb = datastore_pb.Cursor()
pb.set_cursor(self.__cursor)
return pb
@staticmethod
def _FromPb(pb):
"""Static factory method. Returns the Iterator representation of the given
protocol buffer (datastore_pb.Cursor). Not intended to be used by
application developers. Enforced by not hiding the datastore_pb classes.
Args:
# a protocol buffer Cursor
pb: datastore_pb.Cursor
Returns:
# the Iterator representation of the argument
Iterator
"""
return Iterator(pb.cursor())
class _Transaction(object):
"""Encapsulates a transaction currently in progress.
If we've sent a BeginTransaction call, then handle will be a
datastore_pb.Transaction that holds the transaction handle.
If we know the entity group for this transaction, it's stored in the
entity_group attribute, which is set by RecordModifiedKeys().
modified_keys is a set containing the Keys of all entities modified (ie put
or deleted) in this transaction. If an entity is modified more than once, a
BadRequestError is raised.
"""
def __init__(self):
"""Initializes modified_keys to the empty set."""
self.handle = None
self.entity_group = None
self.modified_keys = None
self.modified_keys = set()
def RecordModifiedKeys(self, keys, error_on_repeat=True):
"""Updates the modified keys seen so far.
Also sets entity_group if it hasn't yet been set.
If error_on_repeat is True and any of the given keys have already been
modified, raises BadRequestError.
Args:
keys: sequence of Keys
"""
keys, _ = NormalizeAndTypeCheckKeys(keys)
if keys and not self.entity_group:
self.entity_group = keys[0].entity_group()
keys = set(keys)
if error_on_repeat:
already_modified = self.modified_keys.intersection(keys)
if already_modified:
raise datastore_errors.BadRequestError(
"Can't update entity more than once in a transaction: %r" %
already_modified.pop())
self.modified_keys.update(keys)
def RunInTransaction(function, *args, **kwargs):
"""Runs a function inside a datastore transaction.
Runs the user-provided function inside transaction, retries default
number of times.
Args:
# a function to be run inside the transaction
function: callable
# positional arguments to pass to the function
args: variable number of any type
Returns:
the function's return value, if any
Raises:
TransactionFailedError, if the transaction could not be committed.
"""
return RunInTransactionCustomRetries(
DEFAULT_TRANSACTION_RETRIES, function, *args, **kwargs)
def RunInTransactionCustomRetries(retries, function, *args, **kwargs):
"""Runs a function inside a datastore transaction.
Runs the user-provided function inside a full-featured, ACID datastore
transaction. Every Put, Get, and Delete call in the function is made within
the transaction. All entities involved in these calls must belong to the
same entity group. Queries are not supported.
The trailing arguments are passed to the function as positional arguments.
If the function returns a value, that value will be returned by
RunInTransaction. Otherwise, it will return None.
The function may raise any exception to roll back the transaction instead of
committing it. If this happens, the transaction will be rolled back and the
exception will be re-raised up to RunInTransaction's caller.
If you want to roll back intentionally, but don't have an appropriate
exception to raise, you can raise an instance of datastore_errors.Rollback.
It will cause a rollback, but will *not* be re-raised up to the caller.
The function may be run more than once, so it should be idempotent. It
should avoid side effects, and it shouldn't have *any* side effects that
aren't safe to occur multiple times. This includes modifying the arguments,
since they persist across invocations of the function. However, this doesn't
include Put, Get, and Delete calls, of course.
Example usage:
> def decrement(key, amount=1):
> counter = datastore.Get(key)
> counter['count'] -= amount
> if counter['count'] < 0: # don't let the counter go negative
> raise datastore_errors.Rollback()
> datastore.Put(counter)
>
> counter = datastore.Query('Counter', {'name': 'foo'})
> datastore.RunInTransaction(decrement, counter.key(), amount=5)
Transactions satisfy the traditional ACID properties. They are:
- Atomic. All of a transaction's operations are executed or none of them are.
- Consistent. The datastore's state is consistent before and after a
transaction, whether it committed or rolled back. Invariants such as
"every entity has a primary key" are preserved.
- Isolated. Transactions operate on a snapshot of the datastore. Other
datastore operations do not see intermediated effects of the transaction;
they only see its effects after it has committed.
- Durable. On commit, all writes are persisted to the datastore.
Nested transactions are not supported.
Args:
# number of retries
retries: integer
# a function to be run inside the transaction
function: callable
# positional arguments to pass to the function
args: variable number of any type
Returns:
the function's return value, if any
Raises:
TransactionFailedError, if the transaction could not be committed.
"""
if _CurrentTransactionKey():
raise datastore_errors.BadRequestError(
'Nested transactions are not supported.')
if retries < 0:
raise datastore_errors.BadRequestError(
'Number of retries should be non-negative number.')
tx_key = None
try:
tx_key = _NewTransactionKey()
tx = _Transaction()
_txes[tx_key] = tx
for i in range(0, retries + 1):
tx.modified_keys.clear()
try:
result = function(*args, **kwargs)
except:
original_exception = sys.exc_info()
if tx.handle:
try:
resp = api_base_pb.VoidProto()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Rollback',
tx.handle, resp)
except:
exc_info = sys.exc_info()
logging.info('Exception sending Rollback:\n' +
''.join(traceback.format_exception(*exc_info)))
type, value, trace = original_exception
if type is datastore_errors.Rollback:
return
else:
raise type, value, trace
if tx.handle:
try:
resp = datastore_pb.CommitResponse()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'Commit',
tx.handle, resp)
except apiproxy_errors.ApplicationError, err:
if (err.application_error ==
datastore_pb.Error.CONCURRENT_TRANSACTION):
logging.warning('Transaction collision for entity group with '
'key %r. Retrying...', tx.entity_group)
tx.handle = None
tx.entity_group = None
continue
else:
raise _ToDatastoreError(err)
return result
raise datastore_errors.TransactionFailedError(
'The transaction could not be committed. Please try again.')
finally:
if tx_key in _txes:
del _txes[tx_key]
del tx_key
def _MaybeSetupTransaction(request, keys):
"""Begins a transaction, if necessary, and populates it in the request.
If we're currently inside a transaction, this records the entity group,
checks that the keys are all in that entity group, creates the transaction
PB, and sends the BeginTransaction. It then populates the transaction handle
in the request.
Raises BadRequestError if the entity has a different entity group than the
current transaction.
Args:
request: GetRequest, PutRequest, or DeleteRequest
keys: sequence of Keys
Returns:
_Transaction if we're inside a transaction, otherwise None
"""
assert isinstance(request, (datastore_pb.GetRequest, datastore_pb.PutRequest,
datastore_pb.DeleteRequest))
tx_key = None
try:
tx_key = _CurrentTransactionKey()
if tx_key:
tx = _txes[tx_key]
groups = [k.entity_group() for k in keys]
if tx.entity_group:
expected_group = tx.entity_group
else:
expected_group = groups[0]
for group in groups:
if (group != expected_group or
(not group.has_id_or_name() and group is not expected_group)):
raise _DifferentEntityGroupError(expected_group, group)
if not tx.handle:
tx.handle = datastore_pb.Transaction()
req = api_base_pb.VoidProto()
apiproxy_stub_map.MakeSyncCall('datastore_v3', 'BeginTransaction', req,
tx.handle)
request.mutable_transaction().CopyFrom(tx.handle)
return tx
finally:
del tx_key
def _DifferentEntityGroupError(a, b):
"""Raises a BadRequestError that says the given entity groups are different.
Includes the two entity groups in the message, formatted more clearly and
concisely than repr(Key).
Args:
a, b are both Keys that represent entity groups.
"""
def id_or_name(key):
if key.name():
return 'name=%r' % key.name()
else:
return 'id=%r' % key.id()
raise datastore_errors.BadRequestError(
'Cannot operate on different entity groups in a transaction: '
'(kind=%r, %s) and (kind=%r, %s).' % (a.kind(), id_or_name(a),
b.kind(), id_or_name(b)))
def _FindTransactionFrameInStack():
"""Walks the stack to find a RunInTransaction() call.
Returns:
# this is the RunInTransactionCustomRetries() frame record, if found
frame record or None
"""
frame = sys._getframe()
filename = frame.f_code.co_filename
frame = frame.f_back.f_back
while frame:
if (frame.f_code.co_filename == filename and
frame.f_code.co_name == 'RunInTransactionCustomRetries'):
return frame
frame = frame.f_back
return None
_CurrentTransactionKey = _FindTransactionFrameInStack
_NewTransactionKey = sys._getframe
def _GetCompleteKeyOrError(arg):
"""Expects an Entity or a Key, and returns the corresponding Key. Raises
BadArgumentError or BadKeyError if arg is a different type or is incomplete.
Args:
arg: Entity or Key
Returns:
Key
"""
if isinstance(arg, Key):
key = arg
elif isinstance(arg, basestring):
key = Key(arg)
elif isinstance(arg, Entity):
key = arg.key()
elif not isinstance(arg, Key):
raise datastore_errors.BadArgumentError(
'Expects argument to be an Entity or Key; received %s (a %s).' %
(arg, typename(arg)))
assert isinstance(key, Key)
if not key.has_id_or_name():
raise datastore_errors.BadKeyError('Key %r is not complete.' % key)
return key
def _AddOrAppend(dictionary, key, value):
"""Adds the value to the existing values in the dictionary, if any.
If dictionary[key] doesn't exist, sets dictionary[key] to value.
If dictionary[key] is not a list, sets dictionary[key] to [old_value, value].
If dictionary[key] is a list, appends value to that list.
Args:
dictionary: a dict
key, value: anything
"""
if key in dictionary:
existing_value = dictionary[key]
if isinstance(existing_value, list):
existing_value.append(value)
else:
dictionary[key] = [existing_value, value]
else:
dictionary[key] = value
def _ToDatastoreError(err):
"""Converts an apiproxy.ApplicationError to an error in datastore_errors.
Args:
err: apiproxy.ApplicationError
Returns:
a subclass of datastore_errors.Error
"""
errors = {
datastore_pb.Error.BAD_REQUEST: datastore_errors.BadRequestError,
datastore_pb.Error.CONCURRENT_TRANSACTION:
datastore_errors.TransactionFailedError,
datastore_pb.Error.INTERNAL_ERROR: datastore_errors.InternalError,
datastore_pb.Error.NEED_INDEX: datastore_errors.NeedIndexError,
datastore_pb.Error.TIMEOUT: datastore_errors.Timeout,
}
if err.application_error in errors:
raise errors[err.application_error](err.error_detail)
else:
raise datastore_errors.Error(err.error_detail)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Validation tools for generic object structures.
This library is used for defining classes with constrained attributes.
Attributes are defined on the class which contains them using validators.
Although validators can be defined by any client of this library, a number
of standard validators are provided here.
Validators can be any callable that takes a single parameter which checks
the new value before it is assigned to the attribute. Validators are
permitted to modify a received value so that it is appropriate for the
attribute definition. For example, using int as a validator will cast
a correctly formatted string to a number, or raise an exception if it
can not. This is not recommended, however. the correct way to use a
validator that ensure the correct type is to use the Type validator.
This validation library is mainly intended for use with the YAML object
builder. See yaml_object.py.
"""
import re
import google
import yaml
class Error(Exception):
"""Base class for all package errors."""
class AttributeDefinitionError(Error):
"""An error occurred in the definition of class attributes."""
class ValidationError(Error):
"""Base class for raising exceptions during validation."""
def __init__(self, message, cause=None):
"""Initialize exception."""
if hasattr(cause, 'args') and cause.args:
Error.__init__(self, message, *cause.args)
else:
Error.__init__(self, message)
self.message = message
self.cause = cause
def __str__(self):
return str(self.message)
class MissingAttribute(ValidationError):
"""Raised when a required attribute is missing from object."""
def AsValidator(validator):
"""Wrap various types as instances of a validator.
Used to allow shorthand for common validator types. It
converts the following types to the following Validators.
strings -> Regex
type -> Type
collection -> Options
Validator -> Its self!
Args:
validator: Object to wrap in a validator.
Returns:
Validator instance that wraps the given value.
Raises:
AttributeDefinitionError if validator is not one of the above described
types.
"""
if isinstance(validator, (str, unicode)):
return Regex(validator, type(validator))
if isinstance(validator, type):
return Type(validator)
if isinstance(validator, (list, tuple, set)):
return Options(*tuple(validator))
if isinstance(validator, Validator):
return validator
else:
raise AttributeDefinitionError('%s is not a valid validator' %
str(validator))
class Validated(object):
"""Base class for other classes that require validation.
A class which intends to use validated fields should sub-class itself from
this class. Each class should define an 'ATTRIBUTES' class variable which
should be a map from attribute name to its validator. For example:
class Story(Validated):
ATTRIBUTES = {'title': Type(str),
'authors': Repeated(Type(str)),
'isbn': Optional(Type(str)),
'pages': Type(int),
}
Attributes that are not listed under ATTRIBUTES work like normal and are
not validated upon assignment.
"""
ATTRIBUTES = None
def __init__(self, **attributes):
"""Constructor for Validated classes.
This constructor can optionally assign values to the class via its
keyword arguments.
Raises:
AttributeDefinitionError when class instance is missing ATTRIBUTE
definition or when ATTRIBUTE is of the wrong type.
"""
if not isinstance(self.ATTRIBUTES, dict):
raise AttributeDefinitionError(
'The class %s does not define an ATTRIBUTE variable.'
% self.__class__)
for key in self.ATTRIBUTES.keys():
object.__setattr__(self, key, self.GetAttribute(key).default)
self.Set(**attributes)
@classmethod
def GetAttribute(self, key):
"""Safely get the underlying attribute definition as a Validator.
Args:
key: Name of attribute to get.
Returns:
Validator associated with key or attribute value wrapped in a
validator.
"""
return AsValidator(self.ATTRIBUTES[key])
def Set(self, **attributes):
"""Set multiple values on Validated instance.
This method can only be used to assign validated methods.
Args:
attributes: Attributes to set on object.
Raises:
ValidationError when no validated attribute exists on class.
"""
for key, value in attributes.iteritems():
if key not in self.ATTRIBUTES:
raise ValidationError('Class \'%s\' does not have attribute \'%s\''
% (self.__class__, key))
setattr(self, key, value)
def CheckInitialized(self):
"""Checks that all required fields are initialized.
Since an instance of Validated starts off in an uninitialized state, it
is sometimes necessary to check that it has been fully initialized.
The main problem this solves is how to validate that an instance has
all of its required fields set. By default, Validator classes do not
allow None, but all attributes are initialized to None when instantiated.
Raises:
Exception relevant to the kind of validation. The type of the exception
is determined by the validator. Typically this will be ValueError or
TypeError.
"""
for key in self.ATTRIBUTES.iterkeys():
try:
self.GetAttribute(key)(getattr(self, key))
except MissingAttribute, e:
e.message = "Missing required value '%s'." % key
raise e
def __setattr__(self, key, value):
"""Set attribute.
Setting a value on an object of this type will only work for attributes
defined in ATTRIBUTES. To make other assignments possible it is necessary
to override this method in subclasses.
It is important that assignment is restricted in this way because
this validation is used as validation for parsing. Absent this restriction
it would be possible for method names to be overwritten.
Args:
key: Name of attribute to set.
value: Attributes new value.
Raises:
ValidationError when trying to assign to a value that does not exist.
"""
if key in self.ATTRIBUTES:
value = self.GetAttribute(key)(value)
object.__setattr__(self, key, value)
else:
raise ValidationError('Class \'%s\' does not have attribute \'%s\''
% (self.__class__, key))
def __str__(self):
"""Formatted view of validated object and nested values."""
return repr(self)
def __repr__(self):
"""Formatted view of validated object and nested values."""
values = [(attr, getattr(self, attr)) for attr in self.ATTRIBUTES]
dent = ' '
value_list = []
for attr, value in values:
value_list.append('\n%s%s=%s' % (dent, attr, value))
return "<%s %s\n%s>" % (self.__class__.__name__, ' '.join(value_list), dent)
def __eq__(self, other):
"""Equality operator.
Comparison is done by comparing all attribute values to those in the other
instance. Objects which are not of the same type are not equal.
Args:
other: Other object to compare against.
Returns:
True if validated objects are equal, else False.
"""
if type(self) != type(other):
return False
for key in self.ATTRIBUTES.iterkeys():
if getattr(self, key) != getattr(other, key):
return False
return True
def __ne__(self, other):
"""Inequality operator."""
return not self.__eq__(other)
def __hash__(self):
"""Hash function for using Validated objects in sets and maps.
Hash is done by hashing all keys and values and xor'ing them together.
Returns:
Hash of validated object.
"""
result = 0
for key in self.ATTRIBUTES.iterkeys():
value = getattr(self, key)
if isinstance(value, list):
value = tuple(value)
result = result ^ hash(key) ^ hash(value)
return result
@staticmethod
def _ToValue(validator, value):
"""Convert any value to simplified collections and basic types.
Args:
validator: An instance of Validator that corresponds with 'value'.
May also be 'str' or 'int' if those were used instead of a full
Validator.
value: Value to convert to simplified collections.
Returns:
The value as a dictionary if it is a Validated object.
A list of items converted to simplified collections if value is a list
or a tuple.
Otherwise, just the value.
"""
if isinstance(value, Validated):
return value.ToDict()
elif isinstance(value, (list, tuple)):
return [Validated._ToValue(validator, item) for item in value]
else:
if isinstance(validator, Validator):
return validator.ToValue(value)
return value
def ToDict(self):
"""Convert Validated object to a dictionary.
Recursively traverses all of its elements and converts everything to
simplified collections.
Returns:
A dict of all attributes defined in this classes ATTRIBUTES mapped
to its value. This structure is recursive in that Validated objects
that are referenced by this object and in lists are also converted to
dicts.
"""
result = {}
for name, validator in self.ATTRIBUTES.iteritems():
value = getattr(self, name)
if not(isinstance(validator, Validator) and value == validator.default):
result[name] = Validated._ToValue(validator, value)
return result
def ToYAML(self):
"""Print validated object as simplified YAML.
Returns:
Object as a simplified YAML string compatible with parsing using the
SafeLoader.
"""
return yaml.dump(self.ToDict(),
default_flow_style=False,
Dumper=yaml.SafeDumper)
class Validator(object):
"""Validator base class.
Though any callable can be used as a validator, this class encapsulates the
case when a specific validator needs to hold a particular state or
configuration.
To implement Validator sub-class, override the validate method.
This class is permitted to change the ultimate value that is set to the
attribute if there is a reasonable way to perform the conversion.
"""
expected_type = object
def __init__(self, default=None):
"""Constructor.
Args:
default: Default assignment is made during initialization and will
not pass through validation.
"""
self.default = default
def __call__(self, value):
"""Main interface to validator is call mechanism."""
return self.Validate(value)
def Validate(self, value):
"""Override this method to customize sub-class behavior.
Args:
value: Value to validate.
Returns:
Value if value is valid, or a valid representation of value.
"""
return value
def ToValue(self, value):
"""Convert 'value' to a simplified collection or basic type.
Subclasses of Validator should override this method when the dumped
representation of 'value' is not simply <type>(value) (e.g. a regex).
Args:
value: An object of the same type that was returned from Validate().
Returns:
An instance of a builtin type (e.g. int, str, dict, etc). By default
it returns 'value' unmodified.
"""
return value
class Type(Validator):
"""Verifies property is of expected type.
Can optionally convert value if it is not of the expected type.
It is possible to specify a required field of a specific type in shorthand
by merely providing the type. This method is slightly less efficient than
providing an explicit type but is not significant unless parsing a large
amount of information:
class Person(Validated):
ATTRIBUTES = {'name': unicode,
'age': int,
}
However, in most instances it is best to use the type constants:
class Person(Validated):
ATTRIBUTES = {'name': TypeUnicode,
'age': TypeInt,
}
"""
def __init__(self, expected_type, convert=True, default=None):
"""Initialize Type validator.
Args:
expected_type: Type that attribute should validate against.
convert: Cause conversion if value is not the right type.
Conversion is done by calling the constructor of the type
with the value as its first parameter.
"""
super(Type, self).__init__(default)
self.expected_type = expected_type
self.convert = convert
def Validate(self, value):
"""Validate that value is correct type.
Args:
value: Value to validate.
Returns:
None if value is None, value if value is of correct type, converted
value if the validator is configured to convert.
Raises:
ValidationError if value is not of the right type and validator
is not configured to convert.
"""
if not isinstance(value, self.expected_type):
if value is not None and self.convert:
try:
return self.expected_type(value)
except ValueError, e:
raise ValidationError('Type conversion failed for value \'%s\'.'
% value,
e)
except TypeError, e:
raise ValidationError('Expected value of type %s, but got \'%s\'.'
% (self.expected_type, value))
else:
raise MissingAttribute('Missing value is required.')
else:
return value
TYPE_BOOL = Type(bool)
TYPE_INT = Type(int)
TYPE_LONG = Type(long)
TYPE_STR = Type(str)
TYPE_UNICODE = Type(unicode)
TYPE_FLOAT = Type(float)
class Options(Validator):
"""Limit field based on pre-determined values.
Options are used to make sure an enumerated set of values are the only
one permitted for assignment. It is possible to define aliases which
map multiple string values to a single original. An example of usage:
class ZooAnimal(validated.Class):
ATTRIBUTES = {
'name': str,
'kind': Options('platypus', # No aliases
('rhinoceros', ['rhino']), # One alias
('canine', ('dog', 'puppy')), # Two aliases
)
"""
def __init__(self, *options, **kw):
"""Initialize options.
Args:
options: List of allowed values.
"""
if 'default' in kw:
default = kw['default']
else:
default = None
alias_map = {}
def AddAlias(alias, original):
"""Set new alias on alias_map.
Raises:
AttributeDefinitionError when option already exists or if alias is
not of type str..
"""
if not isinstance(alias, str):
raise AttributeDefinitionError(
'All option values must be of type str.')
elif alias in alias_map:
raise AttributeDefinitionError(
"Option '%s' already defined for options property." % alias)
alias_map[alias] = original
for option in options:
if isinstance(option, str):
AddAlias(option, option)
elif isinstance(option, (list, tuple)):
if len(option) != 2:
raise AttributeDefinitionError("Alias is defined as a list of tuple "
"with two items. The first is the "
"original option, while the second "
"is a list or tuple of str aliases.\n"
"\n Example:\n"
" ('original', ('alias1', "
"'alias2'")
original, aliases = option
AddAlias(original, original)
if not isinstance(aliases, (list, tuple)):
raise AttributeDefinitionError('Alias lists must be a list or tuple')
for alias in aliases:
AddAlias(alias, original)
else:
raise AttributeDefinitionError("All options must be of type str "
"or of the form (str, [str...]).")
super(Options, self).__init__(default)
self.options = alias_map
def Validate(self, value):
"""Validate options.
Returns:
Original value for provided alias.
Raises:
ValidationError when value is not one of predefined values.
"""
if value is None:
raise ValidationError('Value for options field must not be None.')
value = str(value)
if value not in self.options:
raise ValidationError('Value \'%s\' not in %s.'
% (value, self.options))
return self.options[value]
class Optional(Validator):
"""Definition of optional attributes.
Optional values are attributes which can be set to None or left
unset. All values in a basic Validated class are set to None
at initialization. Failure to assign to non-optional values
will result in a validation error when calling CheckInitialized.
"""
def __init__(self, validator, default=None):
"""Initializer.
This constructor will make a few guesses about the value passed in
as the validator:
- If the validator argument is a type, it automatically creates a Type
validator around it.
- If the validator argument is a list or tuple, it automatically
creates an Options validator around it.
Args:
validator: Optional validation condition.
Raises:
AttributeDefinitionError if validator is not callable.
"""
self.validator = AsValidator(validator)
self.expected_type = self.validator.expected_type
self.default = default
def Validate(self, value):
"""Optionally require a value.
Normal validators do not accept None. This will accept none on
behalf of the contained validator.
Args:
value: Value to be validated as optional.
Returns:
None if value is None, else results of contained validation.
"""
if value is None:
return None
return self.validator(value)
class Regex(Validator):
"""Regular expression validator.
Regular expression validator always converts value to string. Note that
matches must be exact. Partial matches will not validate. For example:
class ClassDescr(Validated):
ATTRIBUTES = { 'name': Regex(r'[a-zA-Z_][a-zA-Z_0-9]*'),
'parent': Type(type),
}
Alternatively, any attribute that is defined as a string is automatically
interpreted to be of type Regex. It is possible to specify unicode regex
strings as well. This approach is slightly less efficient, but usually
is not significant unless parsing large amounts of data:
class ClassDescr(Validated):
ATTRIBUTES = { 'name': r'[a-zA-Z_][a-zA-Z_0-9]*',
'parent': Type(type),
}
# This will raise a ValidationError exception.
my_class(name='AName with space', parent=AnotherClass)
"""
def __init__(self, regex, string_type=unicode, default=None):
"""Initialized regex validator.
Args:
regex: Regular expression string to use for comparison.
Raises:
AttributeDefinitionError if string_type is not a kind of string.
"""
super(Regex, self).__init__(default)
if (not issubclass(string_type, basestring) or
string_type is basestring):
raise AttributeDefinitionError(
'Regex fields must be a string type not %s.' % str(string_type))
if isinstance(regex, basestring):
self.re = re.compile('^%s$' % regex)
else:
raise AttributeDefinitionError(
'Regular expression must be string. Found %s.' % str(regex))
self.expected_type = string_type
def Validate(self, value):
"""Does validation of a string against a regular expression.
Args:
value: String to match against regular expression.
Raises:
ValidationError when value does not match regular expression or
when value does not match provided string type.
"""
if issubclass(self.expected_type, str):
cast_value = TYPE_STR(value)
else:
cast_value = TYPE_UNICODE(value)
if self.re.match(cast_value) is None:
raise ValidationError('Value \'%s\' does not match expression \'%s\''
% (value, self.re.pattern))
return cast_value
class _RegexStrValue(object):
"""Simulates the regex object to support recomplation when necessary.
Used by the RegexStr class to dynamically build and recompile regular
expression attributes of a validated object. This object replaces the normal
object returned from re.compile which is immutable.
When the value of this object is a string, that string is simply used as the
regular expression when recompilation is needed. If the state of this object
is a list of strings, the strings are joined in to a single 'or' expression.
"""
def __init__(self, attribute, value):
"""Initialize recompilable regex value.
Args:
attribute: Attribute validator associated with this regex value.
value: Initial underlying python value for regex string. Either a single
regex string or a list of regex strings.
"""
self.__attribute = attribute
self.__value = value
self.__regex = None
def __AsString(self, value):
"""Convert a value to appropriate string.
Returns:
String version of value with all carriage returns and line feeds removed.
"""
if issubclass(self.__attribute.expected_type, str):
cast_value = TYPE_STR(value)
else:
cast_value = TYPE_UNICODE(value)
cast_value = cast_value.replace('\n', '')
cast_value = cast_value.replace('\r', '')
return cast_value
def __BuildRegex(self):
"""Build regex string from state.
Returns:
String version of regular expression. Sequence objects are constructed
as larger regular expression where each regex in the list is joined with
all the others as single 'or' expression.
"""
if isinstance(self.__value, list):
value_list = self.__value
sequence = True
else:
value_list = [self.__value]
sequence = False
regex_list = []
for item in value_list:
regex_list.append(self.__AsString(item))
if sequence:
return '|'.join('(?:%s)' % item for item in regex_list)
else:
return regex_list[0]
def __Compile(self):
"""Build regular expression object from state.
Returns:
Compiled regular expression based on internal value.
"""
regex = self.__BuildRegex()
try:
return re.compile(regex)
except re.error, e:
raise ValidationError('Value \'%s\' does not compile: %s' % (regex, e), e)
@property
def regex(self):
"""Compiled regular expression as described by underlying value."""
return self.__Compile()
def match(self, value):
"""Match against internal regular expression.
Returns:
Regular expression object built from underlying value.
"""
return re.match(self.__BuildRegex(), value)
def Validate(self):
"""Ensure that regex string compiles."""
self.__Compile()
def __str__(self):
"""Regular expression string as described by underlying value."""
return self.__BuildRegex()
def __eq__(self, other):
"""Comparison against other regular expression string values."""
if isinstance(other, _RegexStrValue):
return self.__BuildRegex() == other.__BuildRegex()
return str(self) == other
def __ne__(self, other):
"""Inequality operator for regular expression string value."""
return not self.__eq__(other)
class RegexStr(Validator):
"""Validates that a string can compile as a regex without errors.
Use this validator when the value of a field should be a regex. That
means that the value must be a string that can be compiled by re.compile().
The attribute will then be a compiled re object.
"""
def __init__(self, string_type=unicode, default=None):
"""Initialized regex validator.
Raises:
AttributeDefinitionError if string_type is not a kind of string.
"""
if default is not None:
default = _RegexStrValue(self, default)
re.compile(str(default))
super(RegexStr, self).__init__(default)
if (not issubclass(string_type, basestring) or
string_type is basestring):
raise AttributeDefinitionError(
'RegexStr fields must be a string type not %s.' % str(string_type))
self.expected_type = string_type
def Validate(self, value):
"""Validates that the string compiles as a regular expression.
Because the regular expression might have been expressed as a multiline
string, this function also strips newlines out of value.
Args:
value: String to compile as a regular expression.
Raises:
ValueError when value does not compile as a regular expression. TypeError
when value does not match provided string type.
"""
if isinstance(value, _RegexStrValue):
return value
value = _RegexStrValue(self, value)
value.Validate()
return value
def ToValue(self, value):
"""Returns the RE pattern for this validator."""
return str(value)
class Range(Validator):
"""Validates that numbers fall within the correct range.
In theory this class can be emulated using Options, however error
messages generated from that class will not be very intelligible.
This class essentially does the same thing, but knows the intended
integer range.
Also, this range class supports floats and other types that implement
ordinality.
The range is inclusive, meaning 3 is considered in the range
in Range(1,3).
"""
def __init__(self, minimum, maximum, range_type=int, default=None):
"""Initializer for range.
Args:
minimum: Minimum for attribute.
maximum: Maximum for attribute.
range_type: Type of field. Defaults to int.
"""
super(Range, self).__init__(default)
if not isinstance(minimum, range_type):
raise AttributeDefinitionError(
'Minimum value must be of type %s, instead it is %s (%s).' %
(str(range_type), str(type(minimum)), str(minimum)))
if not isinstance(maximum, range_type):
raise AttributeDefinitionError(
'Maximum value must be of type %s, instead it is %s (%s).' %
(str(range_type), str(type(maximum)), str(maximum)))
self.minimum = minimum
self.maximum = maximum
self.expected_type = range_type
self._type_validator = Type(range_type)
def Validate(self, value):
"""Validate that value is within range.
Validates against range-type then checks the range.
Args:
value: Value to validate.
Raises:
ValidationError when value is out of range. ValidationError when value
is notd of the same range type.
"""
cast_value = self._type_validator.Validate(value)
if cast_value < self.minimum or cast_value > self.maximum:
raise ValidationError('Value \'%s\' is out of range %s - %s'
% (str(value),
str(self.minimum),
str(self.maximum)))
return cast_value
class Repeated(Validator):
"""Repeated field validator.
Indicates that attribute is expected to be a repeated value, ie,
a sequence. This adds additional validation over just Type(list)
in that it retains information about what can be stored in the list by
use of its constructor field.
"""
def __init__(self, constructor, default=None):
"""Initializer for repeated field.
Args:
constructor: Type used for verifying elements of sequence attribute.
"""
super(Repeated, self).__init__(default)
self.constructor = constructor
self.expected_type = list
def Validate(self, value):
"""Do validation of sequence.
Value must be a list and all elements must be of type 'constructor'.
Args:
value: Value to validate.
Raises:
ValidationError if value is None, not a list or one of its elements is the
wrong type.
"""
if not isinstance(value, list):
raise ValidationError('Repeated fields must be sequence, '
'but found \'%s\'.' % value)
for item in value:
if not isinstance(item, self.constructor):
raise ValidationError('Repeated items must be %s, but found \'%s\'.'
% (str(self.constructor), str(item)))
return value
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Python datastore class User to be used as a datastore data type.
Classes defined here:
User: object representing a user.
Error: base exception type
UserNotFoundError: UserService exception
RedirectTooLongError: UserService exception
NotAllowedError: UserService exception
"""
import os
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import user_service_pb
from google.appengine.api import api_base_pb
from google.appengine.runtime import apiproxy_errors
class Error(Exception):
"""Base User error type."""
class UserNotFoundError(Error):
"""Raised by User.__init__() when there's no email argument and no user is
logged in."""
class RedirectTooLongError(Error):
"""Raised by UserService calls if the generated redirect URL was too long.
"""
class NotAllowedError(Error):
"""Raised by UserService calls if the requested redirect URL is not allowed.
"""
class User(object):
"""A user.
We provide here the email address, nickname, and auth domain for a user.
A nickname is a human-readable string which uniquely identifies a Google
user, akin to a username. It will be an email address for some users, but
not all.
"""
def __init__(self, email=None, _auth_domain=None):
"""Constructor.
Args:
# email is optional. it defaults to the current user.
email: string
"""
if _auth_domain is None:
_auth_domain = os.environ.get('AUTH_DOMAIN')
else:
assert email is not None
assert _auth_domain
if email is None:
assert 'USER_EMAIL' in os.environ
email = os.environ['USER_EMAIL']
if not email:
raise UserNotFoundError
self.__email = email
self.__auth_domain = _auth_domain
def nickname(self):
"""Return this user's nickname.
The nickname will be a unique, human readable identifier for this user
with respect to this application. It will be an email address for some
users, but not all.
"""
if (self.__email and self.__auth_domain and
self.__email.endswith('@' + self.__auth_domain)):
suffix_len = len(self.__auth_domain) + 1
return self.__email[:-suffix_len]
else:
return self.__email
def email(self):
"""Return this user's email address."""
return self.__email
def auth_domain(self):
"""Return this user's auth domain."""
return self.__auth_domain
def __unicode__(self):
return unicode(self.nickname())
def __str__(self):
return str(self.nickname())
def __repr__(self):
return "users.User(email='%s')" % self.email()
def __hash__(self):
return hash((self.__email, self.__auth_domain))
def __cmp__(self, other):
if not isinstance(other, User):
return NotImplemented
return cmp((self.__email, self.__auth_domain),
(other.__email, other.__auth_domain))
def create_login_url(dest_url):
"""Computes the login URL for this request and specified destination URL.
Args:
dest_url: String that is the desired final destination URL for the user
once login is complete. If 'dest_url' does not have a host
specified, we will use the host from the current request.
Returns:
string
"""
req = user_service_pb.StringProto()
resp = user_service_pb.StringProto()
req.set_value(dest_url)
try:
apiproxy_stub_map.MakeSyncCall('user', 'CreateLoginURL', req, resp)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
user_service_pb.UserServiceError.REDIRECT_URL_TOO_LONG):
raise RedirectTooLongError
elif (e.application_error ==
user_service_pb.UserServiceError.NOT_ALLOWED):
raise NotAllowedError
else:
raise e
return resp.value()
CreateLoginURL = create_login_url
def create_logout_url(dest_url):
"""Computes the logout URL for this request and specified destination URL.
Args:
dest_url: String that is the desired final destination URL for the user
once logout is complete. If 'dest_url' does not have a host
specified, we will use the host from the current request.
Returns:
string
"""
req = user_service_pb.StringProto()
resp = user_service_pb.StringProto()
req.set_value(dest_url)
try:
apiproxy_stub_map.MakeSyncCall('user', 'CreateLogoutURL', req, resp)
except apiproxy_errors.ApplicationError, e:
if (e.application_error ==
user_service_pb.UserServiceError.REDIRECT_URL_TOO_LONG):
raise RedirectTooLongError
else:
raise e
return resp.value()
CreateLogoutURL = create_logout_url
def get_current_user():
try:
return User()
except UserNotFoundError:
return None
GetCurrentUser = get_current_user
def is_current_user_admin():
"""Return true if the user making this request is an admin for this
application, false otherwise.
We specifically make this a separate function, and not a member function of
the User class, because admin status is not persisted in the datastore. It
only exists for the user making this request right now.
"""
return (os.environ.get('USER_IS_ADMIN', '0')) == "1"
IsCurrentUserAdmin = is_current_user_admin
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Base class for implementing API proxy stubs."""
from google.appengine.api import apiproxy_rpc
from google.appengine.runtime import apiproxy_errors
MAX_REQUEST_SIZE = 1 << 20
class APIProxyStub(object):
"""Base class for implementing API proxy stub classes.
To implement an API proxy stub:
- Extend this class.
- Override __init__ to pass in appropriate default service name.
- Implement service methods as _Dynamic_<method>(request, response).
"""
def __init__(self, service_name, max_request_size=MAX_REQUEST_SIZE):
"""Constructor.
Args:
service_name: Service name expected for all calls.
max_request_size: int, maximum allowable size of the incoming request. A
apiproxy_errors.RequestTooLargeError will be raised if the inbound
request exceeds this size. Default is 1 MB.
"""
self.__service_name = service_name
self.__max_request_size = max_request_size
def CreateRPC(self):
"""Creates RPC object instance.
Returns:
a instance of RPC.
"""
return apiproxy_rpc.RPC(stub=self)
def MakeSyncCall(self, service, call, request, response):
"""The main RPC entry point.
Args:
service: Must be name as provided to service_name of constructor.
call: A string representing the rpc to make. Must be part of
the underlying services methods and impemented by _Dynamic_<call>.
request: A protocol buffer of the type corresponding to 'call'.
response: A protocol buffer of the type corresponding to 'call'.
"""
assert service == self.__service_name, ('Expected "%s" service name, '
'was "%s"' % (self.__service_name,
service))
if request.ByteSize() > self.__max_request_size:
raise apiproxy_errors.RequestTooLargeError(
'The request to API call %s.%s() was too large.' % (service, call))
messages = []
assert request.IsInitialized(messages), messages
method = getattr(self, '_Dynamic_' + call)
method(request, response)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Trivial implementation of the UserService."""
import os
import urllib
import urlparse
from google.appengine.api import apiproxy_stub
from google.appengine.api import user_service_pb
_DEFAULT_LOGIN_URL = 'https://www.google.com/accounts/Login?continue=%s'
_DEFAULT_LOGOUT_URL = 'https://www.google.com/accounts/Logout?continue=%s'
class UserServiceStub(apiproxy_stub.APIProxyStub):
"""Trivial implementation of the UserService."""
def __init__(self,
login_url=_DEFAULT_LOGIN_URL,
logout_url=_DEFAULT_LOGOUT_URL,
service_name='user'):
"""Initializer.
Args:
login_url: String containing the URL to use for logging in.
logout_url: String containing the URL to use for logging out.
service_name: Service name expected for all calls.
Note: Both the login_url and logout_url arguments must contain one format
parameter, which will be replaced with the continuation URL where the user
should be redirected after log-in or log-out has been completed.
"""
super(UserServiceStub, self).__init__(service_name)
self.__num_requests = 0
self._login_url = login_url
self._logout_url = logout_url
os.environ['AUTH_DOMAIN'] = 'gmail.com'
def num_requests(self):
return self.__num_requests
def _Dynamic_CreateLoginURL(self, request, response):
"""Trivial implementation of UserService.CreateLoginURL().
Args:
request: the URL to redirect to after login; a base.StringProto
response: the login URL; a base.StringProto
"""
self.__num_requests += 1
response.set_value(
self._login_url %
urllib.quote(self._AddHostToContinueURL(request.value())))
def _Dynamic_CreateLogoutURL(self, request, response):
"""Trivial implementation of UserService.CreateLogoutURL().
Args:
request: the URL to redirect to after logout; a base.StringProto
response: the logout URL; a base.StringProto
"""
self.__num_requests += 1
response.set_value(
self._logout_url %
urllib.quote(self._AddHostToContinueURL(request.value())))
def _AddHostToContinueURL(self, continue_url):
"""Adds the request host to the continue url if no host is specified.
Args:
continue_url: the URL which may or may not have a host specified
Returns:
string
"""
(protocol, host, path, parameters, query, fragment) = urlparse.urlparse(continue_url, 'http')
if host:
return continue_url
host = os.environ['SERVER_NAME']
if os.environ['SERVER_PORT'] != '80':
host = host + ":" + os.environ['SERVER_PORT']
if path == '':
path = '/'
return urlparse.urlunparse(
(protocol, host, path, parameters, query, fragment))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Errors used in the Python datastore API."""
class Error(Exception):
"""Base datastore error type.
"""
class BadValueError(Error):
"""Raised by Entity.__setitem__(), Query.__setitem__(), Get(), and others
when a property value or filter value is invalid.
"""
class BadPropertyError(Error):
"""Raised by Entity.__setitem__() when a property name isn't a string.
"""
class BadRequestError(Error):
"""Raised by datastore calls when the parameter(s) are invalid.
"""
class EntityNotFoundError(Error):
"""DEPRECATED: Raised by Get() when the requested entity is not found.
"""
class BadArgumentError(Error):
"""Raised by Query.Order(), Iterator.Next(), and others when they're
passed an invalid argument.
"""
class QueryNotFoundError(Error):
"""DEPRECATED: Raised by Iterator methods when the Iterator is invalid. This
should not happen during normal usage; it protects against malicious users
and system errors.
"""
class TransactionNotFoundError(Error):
"""DEPRECATED: Raised by RunInTransaction. This is an internal error; you
should not see this.
"""
class Rollback(Error):
"""May be raised by transaction functions when they want to roll back
instead of committing. Note that *any* exception raised by a transaction
function will cause a rollback. This is purely for convenience. See
datastore.RunInTransaction for details.
"""
class TransactionFailedError(Error):
"""Raised by RunInTransaction methods when the transaction could not be
committed, even after retrying. This is usually due to high contention.
"""
class BadFilterError(Error):
"""Raised by Query.__setitem__() and Query.Run() when a filter string is
invalid.
"""
def __init__(self, filter):
self.filter = filter
def __str__(self):
return (u'BadFilterError: invalid filter: %s.' % self.filter)
class BadQueryError(Error):
"""Raised by Query when a query or query string is invalid.
"""
class BadKeyError(Error):
"""Raised by Key.__str__ when the key is invalid.
"""
class InternalError(Error):
"""An internal datastore error. Please report this to Google.
"""
class NeedIndexError(Error):
"""No matching index was found for a query that requires an index. Check
the Indexes page in the Admin Console and your index.yaml file.
"""
class Timeout(Error):
"""The datastore operation timed out. This can happen when you attempt to
put, get, or delete too many entities or an entity with too many properties,
or if the datastore is overloaded or having trouble.
"""
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from google.net.proto import ProtocolBuffer
import array
import dummy_thread as thread
__pychecker__ = """maxreturns=0 maxbranches=0 no-callinit
unusednames=printElemNumber,debug_strs no-special"""
from google.appengine.api.api_base_pb import StringProto
class UserServiceError(ProtocolBuffer.ProtocolMessage):
OK = 0
REDIRECT_URL_TOO_LONG = 1
NOT_ALLOWED = 2
_ErrorCode_NAMES = {
0: "OK",
1: "REDIRECT_URL_TOO_LONG",
2: "NOT_ALLOWED",
}
def ErrorCode_Name(cls, x): return cls._ErrorCode_NAMES.get(x, "")
ErrorCode_Name = classmethod(ErrorCode_Name)
def __init__(self, contents=None):
pass
if contents is not None: self.MergeFromString(contents)
def MergeFrom(self, x):
assert x is not self
def Equals(self, x):
if x is self: return 1
return 1
def IsInitialized(self, debug_strs=None):
initialized = 1
return initialized
def ByteSize(self):
n = 0
return n + 0
def Clear(self):
pass
def OutputUnchecked(self, out):
pass
def TryMerge(self, d):
while d.avail() > 0:
tt = d.getVarInt32()
if (tt == 0): raise ProtocolBuffer.ProtocolBufferDecodeError
d.skipData(tt)
def __str__(self, prefix="", printElemNumber=0):
res=""
return res
_TEXT = (
"ErrorCode",
)
_TYPES = (
ProtocolBuffer.Encoder.NUMERIC,
)
_STYLE = """"""
_STYLE_CONTENT_TYPE = """"""
__all__ = ['UserServiceError']
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pure-Python application server for testing applications locally.
Given a port and the paths to a valid application directory (with an 'app.yaml'
file), the external library directory, and a relative URL to use for logins,
creates an HTTP server that can be used to test an application locally. Uses
stubs instead of actual APIs when SetupStubs() is called first.
Example:
root_path = '/path/to/application/directory'
login_url = '/login'
port = 8080
template_dir = '/path/to/appserver/templates'
server = dev_appserver.CreateServer(root_path, login_url, port, template_dir)
server.serve_forever()
"""
from google.appengine.tools import os_compat
import __builtin__
import BaseHTTPServer
import Cookie
import cStringIO
import cgi
import cgitb
import dummy_thread
import email.Utils
import errno
import httplib
import imp
import inspect
import itertools
import locale
import logging
import mimetools
import mimetypes
import os
import pickle
import pprint
import random
import re
import sre_compile
import sre_constants
import sre_parse
import mimetypes
import socket
import sys
import time
import traceback
import types
import urlparse
import urllib
import google
from google.pyglib import gexcept
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import appinfo
from google.appengine.api import croninfo
from google.appengine.api import datastore_admin
from google.appengine.api import datastore_file_stub
from google.appengine.api import mail_stub
from google.appengine.api import urlfetch_stub
from google.appengine.api import user_service_stub
from google.appengine.api import yaml_errors
from google.appengine.api.capabilities import capability_stub
from google.appengine.api.memcache import memcache_stub
from google.appengine import dist
from google.appengine.tools import dev_appserver_index
from google.appengine.tools import dev_appserver_login
PYTHON_LIB_VAR = '$PYTHON_LIB'
DEVEL_CONSOLE_PATH = PYTHON_LIB_VAR + '/google/appengine/ext/admin'
FILE_MISSING_EXCEPTIONS = frozenset([errno.ENOENT, errno.ENOTDIR])
MAX_URL_LENGTH = 2047
HEADER_TEMPLATE = 'logging_console_header.html'
SCRIPT_TEMPLATE = 'logging_console.js'
MIDDLE_TEMPLATE = 'logging_console_middle.html'
FOOTER_TEMPLATE = 'logging_console_footer.html'
DEFAULT_ENV = {
'GATEWAY_INTERFACE': 'CGI/1.1',
'AUTH_DOMAIN': 'gmail.com',
'TZ': 'UTC',
}
for ext, mime_type in (('.asc', 'text/plain'),
('.diff', 'text/plain'),
('.csv', 'text/comma-separated-values'),
('.rss', 'application/rss+xml'),
('.text', 'text/plain'),
('.wbmp', 'image/vnd.wap.wbmp')):
mimetypes.add_type(mime_type, ext)
MAX_RUNTIME_RESPONSE_SIZE = 10 << 20
MAX_REQUEST_SIZE = 10 * 1024 * 1024
API_VERSION = '1'
class Error(Exception):
"""Base-class for exceptions in this module."""
class InvalidAppConfigError(Error):
"""The supplied application configuration file is invalid."""
class AppConfigNotFoundError(Error):
"""Application configuration file not found."""
class TemplatesNotLoadedError(Error):
"""Templates for the debugging console were not loaded."""
def SplitURL(relative_url):
"""Splits a relative URL into its path and query-string components.
Args:
relative_url: String containing the relative URL (often starting with '/')
to split. Should be properly escaped as www-form-urlencoded data.
Returns:
Tuple (script_name, query_string) where:
script_name: Relative URL of the script that was accessed.
query_string: String containing everything after the '?' character.
"""
scheme, netloc, path, query, fragment = urlparse.urlsplit(relative_url)
return path, query
def GetFullURL(server_name, server_port, relative_url):
"""Returns the full, original URL used to access the relative URL.
Args:
server_name: Name of the local host, or the value of the 'host' header
from the request.
server_port: Port on which the request was served (string or int).
relative_url: Relative URL that was accessed, including query string.
Returns:
String containing the original URL.
"""
if str(server_port) != '80':
netloc = '%s:%s' % (server_name, server_port)
else:
netloc = server_name
return 'http://%s%s' % (netloc, relative_url)
class URLDispatcher(object):
"""Base-class for handling HTTP requests."""
def Dispatch(self,
relative_url,
path,
headers,
infile,
outfile,
base_env_dict=None):
"""Dispatch and handle an HTTP request.
base_env_dict should contain at least these CGI variables:
REQUEST_METHOD, REMOTE_ADDR, SERVER_SOFTWARE, SERVER_NAME,
SERVER_PROTOCOL, SERVER_PORT
Args:
relative_url: String containing the URL accessed.
path: Local path of the resource that was matched; back-references will be
replaced by values matched in the relative_url. Path may be relative
or absolute, depending on the resource being served (e.g., static files
will have an absolute path; scripts will be relative).
headers: Instance of mimetools.Message with headers from the request.
infile: File-like object with input data from the request.
outfile: File-like object where output data should be written.
base_env_dict: Dictionary of CGI environment parameters if available.
Defaults to None.
Returns:
None if request handling is complete.
Tuple (path, headers, input_file) for an internal redirect:
path: Path of URL to redirect to.
headers: Headers to send to other dispatcher.
input_file: New input to send to new dispatcher.
"""
raise NotImplementedError
def EndRedirect(self, dispatched_output, original_output):
"""Process the end of an internal redirect.
This method is called after all subsequent dispatch requests have finished.
By default the output from the dispatched process is copied to the original.
This will not be called on dispatchers that do not return an internal
redirect.
Args:
dispatched_output: StringIO buffer containing the results from the
dispatched
"""
original_output.write(dispatched_output.read())
class URLMatcher(object):
"""Matches an arbitrary URL using a list of URL patterns from an application.
Each URL pattern has an associated URLDispatcher instance and path to the
resource's location on disk. See AddURL for more details. The first pattern
that matches an inputted URL will have its associated values returned by
Match().
"""
def __init__(self):
"""Initializer."""
self._url_patterns = []
def AddURL(self, regex, dispatcher, path, requires_login, admin_only):
"""Adds a URL pattern to the list of patterns.
If the supplied regex starts with a '^' or ends with a '$' an
InvalidAppConfigError exception will be raised. Start and end symbols
and implicitly added to all regexes, meaning we assume that all regexes
consume all input from a URL.
Args:
regex: String containing the regular expression pattern.
dispatcher: Instance of URLDispatcher that should handle requests that
match this regex.
path: Path on disk for the resource. May contain back-references like
r'\1', r'\2', etc, which will be replaced by the corresponding groups
matched by the regex if present.
requires_login: True if the user must be logged-in before accessing this
URL; False if anyone can access this URL.
admin_only: True if the user must be a logged-in administrator to
access the URL; False if anyone can access the URL.
"""
if not isinstance(dispatcher, URLDispatcher):
raise TypeError, 'dispatcher must be a URLDispatcher sub-class'
if regex.startswith('^') or regex.endswith('$'):
raise InvalidAppConfigError, 'regex starts with "^" or ends with "$"'
adjusted_regex = '^%s$' % regex
try:
url_re = re.compile(adjusted_regex)
except re.error, e:
raise InvalidAppConfigError, 'regex invalid: %s' % e
match_tuple = (url_re, dispatcher, path, requires_login, admin_only)
self._url_patterns.append(match_tuple)
def Match(self,
relative_url,
split_url=SplitURL):
"""Matches a URL from a request against the list of URL patterns.
The supplied relative_url may include the query string (i.e., the '?'
character and everything following).
Args:
relative_url: Relative URL being accessed in a request.
Returns:
Tuple (dispatcher, matched_path, requires_login, admin_only), which are
the corresponding values passed to AddURL when the matching URL pattern
was added to this matcher. The matched_path will have back-references
replaced using values matched by the URL pattern. If no match was found,
dispatcher will be None.
"""
adjusted_url, query_string = split_url(relative_url)
for url_tuple in self._url_patterns:
url_re, dispatcher, path, requires_login, admin_only = url_tuple
the_match = url_re.match(adjusted_url)
if the_match:
adjusted_path = the_match.expand(path)
return dispatcher, adjusted_path, requires_login, admin_only
return None, None, None, None
def GetDispatchers(self):
"""Retrieves the URLDispatcher objects that could be matched.
Should only be used in tests.
Returns:
A set of URLDispatcher objects.
"""
return set([url_tuple[1] for url_tuple in self._url_patterns])
class MatcherDispatcher(URLDispatcher):
"""Dispatcher across multiple URLMatcher instances."""
def __init__(self,
login_url,
url_matchers,
get_user_info=dev_appserver_login.GetUserInfo,
login_redirect=dev_appserver_login.LoginRedirect):
"""Initializer.
Args:
login_url: Relative URL which should be used for handling user logins.
url_matchers: Sequence of URLMatcher objects.
get_user_info, login_redirect: Used for dependency injection.
"""
self._login_url = login_url
self._url_matchers = tuple(url_matchers)
self._get_user_info = get_user_info
self._login_redirect = login_redirect
def Dispatch(self,
relative_url,
path,
headers,
infile,
outfile,
base_env_dict=None):
"""Dispatches a request to the first matching dispatcher.
Matchers are checked in the order they were supplied to the constructor.
If no matcher matches, a 404 error will be written to the outfile. The
path variable supplied to this method is ignored.
"""
cookies = ', '.join(headers.getheaders('cookie'))
email, admin = self._get_user_info(cookies)
for matcher in self._url_matchers:
dispatcher, matched_path, requires_login, admin_only = matcher.Match(relative_url)
if dispatcher is None:
continue
logging.debug('Matched "%s" to %s with path %s',
relative_url, dispatcher, matched_path)
if (requires_login or admin_only) and not email:
logging.debug('Login required, redirecting user')
self._login_redirect(
self._login_url,
base_env_dict['SERVER_NAME'],
base_env_dict['SERVER_PORT'],
relative_url,
outfile)
elif admin_only and not admin:
outfile.write('Status: %d Not authorized\r\n'
'\r\n'
'Current logged in user %s is not '
'authorized to view this page.'
% (httplib.FORBIDDEN, email))
else:
forward = dispatcher.Dispatch(relative_url,
matched_path,
headers,
infile,
outfile,
base_env_dict=base_env_dict)
if forward:
new_path, new_headers, new_input = forward
logging.info('Internal redirection to %s' % new_path)
new_outfile = cStringIO.StringIO()
self.Dispatch(new_path,
None,
new_headers,
new_input,
new_outfile,
dict(base_env_dict))
new_outfile.seek(0)
dispatcher.EndRedirect(new_outfile, outfile)
return
outfile.write('Status: %d URL did not match\r\n'
'\r\n'
'Not found error: %s did not match any patterns '
'in application configuration.'
% (httplib.NOT_FOUND, relative_url))
class ApplicationLoggingHandler(logging.Handler):
"""Python Logging handler that displays the debugging console to users."""
_COOKIE_NAME = '_ah_severity'
_TEMPLATES_INITIALIZED = False
_HEADER = None
_SCRIPT = None
_MIDDLE = None
_FOOTER = None
@staticmethod
def InitializeTemplates(header, script, middle, footer):
"""Initializes the templates used to render the debugging console.
This method must be called before any ApplicationLoggingHandler instances
are created.
Args:
header: The header template that is printed first.
script: The script template that is printed after the logging messages.
middle: The middle element that's printed before the footer.
footer; The last element that's printed at the end of the document.
"""
ApplicationLoggingHandler._HEADER = header
ApplicationLoggingHandler._SCRIPT = script
ApplicationLoggingHandler._MIDDLE = middle
ApplicationLoggingHandler._FOOTER = footer
ApplicationLoggingHandler._TEMPLATES_INITIALIZED = True
@staticmethod
def AreTemplatesInitialized():
"""Returns True if InitializeTemplates has been called, False otherwise."""
return ApplicationLoggingHandler._TEMPLATES_INITIALIZED
def __init__(self, *args, **kwargs):
"""Initializer.
Args:
args, kwargs: See logging.Handler.
Raises:
TemplatesNotLoadedError exception if the InitializeTemplates method was
not called before creating this instance.
"""
if not self._TEMPLATES_INITIALIZED:
raise TemplatesNotLoadedError
logging.Handler.__init__(self, *args, **kwargs)
self._record_list = []
self._start_time = time.time()
def emit(self, record):
"""Called by the logging module each time the application logs a message.
Args:
record: logging.LogRecord instance corresponding to the newly logged
message.
"""
self._record_list.append(record)
def AddDebuggingConsole(self, relative_url, env, outfile):
"""Prints an HTML debugging console to an output stream, if requested.
Args:
relative_url: Relative URL that was accessed, including the query string.
Used to determine if the parameter 'debug' was supplied, in which case
the console will be shown.
env: Dictionary containing CGI environment variables. Checks for the
HTTP_COOKIE entry to see if the accessing user has any logging-related
cookies set.
outfile: Output stream to which the console should be written if either
a debug parameter was supplied or a logging cookie is present.
"""
script_name, query_string = SplitURL(relative_url)
param_dict = cgi.parse_qs(query_string, True)
cookie_dict = Cookie.SimpleCookie(env.get('HTTP_COOKIE', ''))
if 'debug' not in param_dict and self._COOKIE_NAME not in cookie_dict:
return
outfile.write(self._HEADER)
for record in self._record_list:
self._PrintRecord(record, outfile)
outfile.write(self._MIDDLE)
outfile.write(self._SCRIPT)
outfile.write(self._FOOTER)
def _PrintRecord(self, record, outfile):
"""Prints a single logging record to an output stream.
Args:
record: logging.LogRecord instance to print.
outfile: Output stream to which the LogRecord should be printed.
"""
message = cgi.escape(record.getMessage())
level_name = logging.getLevelName(record.levelno).lower()
level_letter = level_name[:1].upper()
time_diff = record.created - self._start_time
outfile.write('<span class="_ah_logline_%s">\n' % level_name)
outfile.write('<span class="_ah_logline_%s_prefix">%2.5f %s ></span>\n'
% (level_name, time_diff, level_letter))
outfile.write('%s\n' % message)
outfile.write('</span>\n')
_IGNORE_REQUEST_HEADERS = frozenset(['content-type', 'content-length',
'accept-encoding', 'transfer-encoding'])
def SetupEnvironment(cgi_path,
relative_url,
headers,
split_url=SplitURL,
get_user_info=dev_appserver_login.GetUserInfo):
"""Sets up environment variables for a CGI.
Args:
cgi_path: Full file-system path to the CGI being executed.
relative_url: Relative URL used to access the CGI.
headers: Instance of mimetools.Message containing request headers.
split_url, get_user_info: Used for dependency injection.
Returns:
Dictionary containing CGI environment variables.
"""
env = DEFAULT_ENV.copy()
script_name, query_string = split_url(relative_url)
env['SCRIPT_NAME'] = ''
env['QUERY_STRING'] = query_string
env['PATH_INFO'] = urllib.unquote(script_name)
env['PATH_TRANSLATED'] = cgi_path
env['CONTENT_TYPE'] = headers.getheader('content-type',
'application/x-www-form-urlencoded')
env['CONTENT_LENGTH'] = headers.getheader('content-length', '')
cookies = ', '.join(headers.getheaders('cookie'))
email, admin = get_user_info(cookies)
env['USER_EMAIL'] = email
if admin:
env['USER_IS_ADMIN'] = '1'
for key in headers:
if key in _IGNORE_REQUEST_HEADERS:
continue
adjusted_name = key.replace('-', '_').upper()
env['HTTP_' + adjusted_name] = ', '.join(headers.getheaders(key))
return env
def NotImplementedFake(*args, **kwargs):
"""Fake for methods/functions that are not implemented in the production
environment.
"""
raise NotImplementedError("This class/method is not available.")
class NotImplementedFakeClass(object):
"""Fake class for classes that are not implemented in the production
environment.
"""
__init__ = NotImplementedFake
def IsEncodingsModule(module_name):
"""Determines if the supplied module is related to encodings in any way.
Encodings-related modules cannot be reloaded, so they need to be treated
specially when sys.modules is modified in any way.
Args:
module_name: Absolute name of the module regardless of how it is imported
into the local namespace (e.g., foo.bar.baz).
Returns:
True if it's an encodings-related module; False otherwise.
"""
if (module_name in ('codecs', 'encodings') or
module_name.startswith('encodings.')):
return True
return False
def ClearAllButEncodingsModules(module_dict):
"""Clear all modules in a module dictionary except for those modules that
are in any way related to encodings.
Args:
module_dict: Dictionary in the form used by sys.modules.
"""
for module_name in module_dict.keys():
if not IsEncodingsModule(module_name):
del module_dict[module_name]
def FakeURandom(n):
"""Fake version of os.urandom."""
bytes = ''
for i in xrange(n):
bytes += chr(random.randint(0, 255))
return bytes
def FakeUname():
"""Fake version of os.uname."""
return ('Linux', '', '', '', '')
def FakeUnlink(path):
"""Fake version of os.unlink."""
if os.path.isdir(path):
raise OSError(2, "Is a directory", path)
else:
raise OSError(1, "Operation not permitted", path)
def FakeReadlink(path):
"""Fake version of os.readlink."""
raise OSError(22, "Invalid argument", path)
def FakeAccess(path, mode):
"""Fake version of os.access where only reads are supported."""
if not os.path.exists(path) or mode != os.R_OK:
return False
else:
return True
def FakeSetLocale(category, value=None, original_setlocale=locale.setlocale):
"""Fake version of locale.setlocale that only supports the default."""
if value not in (None, '', 'C', 'POSIX'):
raise locale.Error, 'locale emulation only supports "C" locale'
return original_setlocale(category, 'C')
def IsPathInSubdirectories(filename,
subdirectories,
normcase=os.path.normcase):
"""Determines if a filename is contained within one of a set of directories.
Args:
filename: Path of the file (relative or absolute).
subdirectories: Iterable collection of paths to subdirectories which the
given filename may be under.
normcase: Used for dependency injection.
Returns:
True if the supplied filename is in one of the given sub-directories or
its hierarchy of children. False otherwise.
"""
file_dir = normcase(os.path.dirname(os.path.abspath(filename)))
for parent in subdirectories:
fixed_parent = normcase(os.path.abspath(parent))
if os.path.commonprefix([file_dir, fixed_parent]) == fixed_parent:
return True
return False
SHARED_MODULE_PREFIXES = set([
'google',
'logging',
'sys',
'warnings',
're',
'sre_compile',
'sre_constants',
'sre_parse',
'wsgiref',
])
NOT_SHARED_MODULE_PREFIXES = set([
'google.appengine.ext',
])
def ModuleNameHasPrefix(module_name, prefix_set):
"""Determines if a module's name belongs to a set of prefix strings.
Args:
module_name: String containing the fully qualified module name.
prefix_set: Iterable set of module name prefixes to check against.
Returns:
True if the module_name belongs to the prefix set or is a submodule of
any of the modules specified in the prefix_set. Otherwise False.
"""
for prefix in prefix_set:
if prefix == module_name:
return True
if module_name.startswith(prefix + '.'):
return True
return False
def SetupSharedModules(module_dict):
"""Creates a module dictionary for the hardened part of the process.
Module dictionary will contain modules that should be shared between the
hardened and unhardened parts of the process.
Args:
module_dict: Module dictionary from which existing modules should be
pulled (usually sys.modules).
Returns:
A new module dictionary.
"""
output_dict = {}
for module_name, module in module_dict.iteritems():
if module is None:
continue
if IsEncodingsModule(module_name):
output_dict[module_name] = module
continue
shared_prefix = ModuleNameHasPrefix(module_name, SHARED_MODULE_PREFIXES)
banned_prefix = ModuleNameHasPrefix(module_name, NOT_SHARED_MODULE_PREFIXES)
if shared_prefix and not banned_prefix:
output_dict[module_name] = module
return output_dict
class FakeFile(file):
"""File sub-class that enforces the security restrictions of the production
environment.
"""
ALLOWED_MODES = frozenset(['r', 'rb', 'U', 'rU'])
ALLOWED_FILES = set(os.path.normcase(filename)
for filename in mimetypes.knownfiles
if os.path.isfile(filename))
ALLOWED_DIRS = set([
os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
])
NOT_ALLOWED_DIRS = set([
os.path.normcase(os.path.join(os.path.dirname(os.__file__),
'site-packages'))
])
ALLOWED_SITE_PACKAGE_DIRS = set(
os.path.normcase(os.path.abspath(os.path.join(
os.path.dirname(os.__file__), 'site-packages', path)))
for path in [
])
_original_file = file
_root_path = None
_application_paths = None
_skip_files = None
_static_file_config_matcher = None
_allow_skipped_files = True
_availability_cache = {}
@staticmethod
def SetAllowedPaths(root_path, application_paths):
"""Configures which paths are allowed to be accessed.
Must be called at least once before any file objects are created in the
hardened environment.
Args:
root_path: Absolute path to the root of the application.
application_paths: List of additional paths that the application may
access, this must include the App Engine runtime but
not the Python library directories.
"""
FakeFile._application_paths = (set(os.path.realpath(path)
for path in application_paths) |
set(os.path.abspath(path)
for path in application_paths))
FakeFile._application_paths.add(root_path)
FakeFile._root_path = os.path.join(root_path, '')
FakeFile._availability_cache = {}
@staticmethod
def SetAllowSkippedFiles(allow_skipped_files):
"""Configures access to files matching FakeFile._skip_files
Args:
allow_skipped_files: Boolean whether to allow access to skipped files
"""
FakeFile._allow_skipped_files = allow_skipped_files
FakeFile._availability_cache = {}
@staticmethod
def SetSkippedFiles(skip_files):
"""Sets which files in the application directory are to be ignored.
Must be called at least once before any file objects are created in the
hardened environment.
Must be called whenever the configuration was updated.
Args:
skip_files: Object with .match() method (e.g. compiled regexp).
"""
FakeFile._skip_files = skip_files
FakeFile._availability_cache = {}
@staticmethod
def SetStaticFileConfigMatcher(static_file_config_matcher):
"""Sets StaticFileConfigMatcher instance for checking if a file is static.
Must be called at least once before any file objects are created in the
hardened environment.
Must be called whenever the configuration was updated.
Args:
static_file_config_matcher: StaticFileConfigMatcher instance.
"""
FakeFile._static_file_config_matcher = static_file_config_matcher
FakeFile._availability_cache = {}
@staticmethod
def IsFileAccessible(filename, normcase=os.path.normcase):
"""Determines if a file's path is accessible.
SetAllowedPaths(), SetSkippedFiles() and SetStaticFileConfigMatcher() must
be called before this method or else all file accesses will raise an error.
Args:
filename: Path of the file to check (relative or absolute). May be a
directory, in which case access for files inside that directory will
be checked.
normcase: Used for dependency injection.
Returns:
True if the file is accessible, False otherwise.
"""
logical_filename = normcase(os.path.abspath(filename))
if os.path.isdir(logical_filename):
logical_filename = os.path.join(logical_filename, 'foo')
result = FakeFile._availability_cache.get(logical_filename)
if result is None:
result = FakeFile._IsFileAccessibleNoCache(logical_filename,
normcase=normcase)
FakeFile._availability_cache[logical_filename] = result
return result
@staticmethod
def _IsFileAccessibleNoCache(logical_filename, normcase=os.path.normcase):
"""Determines if a file's path is accessible.
This is an internal part of the IsFileAccessible implementation.
Args:
logical_filename: Absolute path of the file to check.
normcase: Used for dependency injection.
Returns:
True if the file is accessible, False otherwise.
"""
if IsPathInSubdirectories(logical_filename, [FakeFile._root_path],
normcase=normcase):
relative_filename = logical_filename[len(FakeFile._root_path):]
if (not FakeFile._allow_skipped_files and
FakeFile._skip_files.match(relative_filename)):
logging.warning('Blocking access to skipped file "%s"',
logical_filename)
return False
if FakeFile._static_file_config_matcher.IsStaticFile(relative_filename):
logging.warning('Blocking access to static file "%s"',
logical_filename)
return False
if logical_filename in FakeFile.ALLOWED_FILES:
return True
if IsPathInSubdirectories(logical_filename,
FakeFile.ALLOWED_SITE_PACKAGE_DIRS,
normcase=normcase):
return True
allowed_dirs = FakeFile._application_paths | FakeFile.ALLOWED_DIRS
if (IsPathInSubdirectories(logical_filename,
allowed_dirs,
normcase=normcase) and
not IsPathInSubdirectories(logical_filename,
FakeFile.NOT_ALLOWED_DIRS,
normcase=normcase)):
return True
return False
def __init__(self, filename, mode='r', bufsize=-1, **kwargs):
"""Initializer. See file built-in documentation."""
if mode not in FakeFile.ALLOWED_MODES:
raise IOError('invalid mode: %s' % mode)
if not FakeFile.IsFileAccessible(filename):
raise IOError(errno.EACCES, 'file not accessible')
super(FakeFile, self).__init__(filename, mode, bufsize, **kwargs)
class RestrictedPathFunction(object):
"""Enforces access restrictions for functions that have a file or
directory path as their first argument."""
_original_os = os
def __init__(self, original_func):
"""Initializer.
Args:
original_func: Callable that takes as its first argument the path to a
file or directory on disk; all subsequent arguments may be variable.
"""
self._original_func = original_func
def __call__(self, path, *args, **kwargs):
"""Enforces access permissions for the function passed to the constructor.
"""
if not FakeFile.IsFileAccessible(path):
raise OSError(errno.EACCES, 'path not accessible')
return self._original_func(path, *args, **kwargs)
def GetSubmoduleName(fullname):
"""Determines the leaf submodule name of a full module name.
Args:
fullname: Fully qualified module name, e.g. 'foo.bar.baz'
Returns:
Submodule name, e.g. 'baz'. If the supplied module has no submodule (e.g.,
'stuff'), the returned value will just be that module name ('stuff').
"""
return fullname.rsplit('.', 1)[-1]
class CouldNotFindModuleError(ImportError):
"""Raised when a module could not be found.
In contrast to when a module has been found, but cannot be loaded because of
hardening restrictions.
"""
def Trace(func):
"""Decorator that logs the call stack of the HardenedModulesHook class as
it executes, indenting logging messages based on the current stack depth.
"""
def decorate(self, *args, **kwargs):
args_to_show = []
if args is not None:
args_to_show.extend(str(argument) for argument in args)
if kwargs is not None:
args_to_show.extend('%s=%s' % (key, value)
for key, value in kwargs.iteritems())
args_string = ', '.join(args_to_show)
self.log('Entering %s(%s)', func.func_name, args_string)
self._indent_level += 1
try:
return func(self, *args, **kwargs)
finally:
self._indent_level -= 1
self.log('Exiting %s(%s)', func.func_name, args_string)
return decorate
class HardenedModulesHook(object):
"""Meta import hook that restricts the modules used by applications to match
the production environment.
Module controls supported:
- Disallow native/extension modules from being loaded
- Disallow built-in and/or Python-distributed modules from being loaded
- Replace modules with completely empty modules
- Override specific module attributes
- Replace one module with another
After creation, this object should be added to the front of the sys.meta_path
list (which may need to be created). The sys.path_importer_cache dictionary
should also be cleared, to prevent loading any non-restricted modules.
See PEP302 for more info on how this works:
http://www.python.org/dev/peps/pep-0302/
"""
ENABLE_LOGGING = False
def log(self, message, *args):
"""Logs an import-related message to stderr, with indentation based on
current call-stack depth.
Args:
message: Logging format string.
args: Positional format parameters for the logging message.
"""
if HardenedModulesHook.ENABLE_LOGGING:
indent = self._indent_level * ' '
print >>sys.stderr, indent + (message % args)
_WHITE_LIST_C_MODULES = [
'array',
'binascii',
'bz2',
'cmath',
'collections',
'crypt',
'cStringIO',
'datetime',
'errno',
'exceptions',
'gc',
'itertools',
'math',
'md5',
'operator',
'posix',
'posixpath',
'pyexpat',
'sha',
'struct',
'sys',
'time',
'timing',
'unicodedata',
'zlib',
'_bisect',
'_codecs',
'_codecs_cn',
'_codecs_hk',
'_codecs_iso2022',
'_codecs_jp',
'_codecs_kr',
'_codecs_tw',
'_collections',
'_csv',
'_elementtree',
'_functools',
'_hashlib',
'_heapq',
'_locale',
'_lsprof',
'_md5',
'_multibytecodec',
'_random',
'_sha',
'_sha256',
'_sha512',
'_sre',
'_struct',
'_types',
'_weakref',
'__main__',
]
_WHITE_LIST_PARTIAL_MODULES = {
'gc': [
'enable',
'disable',
'isenabled',
'collect',
'get_debug',
'set_threshold',
'get_threshold',
'get_count'
],
'os': [
'access',
'altsep',
'curdir',
'defpath',
'devnull',
'environ',
'error',
'extsep',
'EX_NOHOST',
'EX_NOINPUT',
'EX_NOPERM',
'EX_NOUSER',
'EX_OK',
'EX_OSERR',
'EX_OSFILE',
'EX_PROTOCOL',
'EX_SOFTWARE',
'EX_TEMPFAIL',
'EX_UNAVAILABLE',
'EX_USAGE',
'F_OK',
'getcwd',
'getcwdu',
'getenv',
'listdir',
'lstat',
'name',
'NGROUPS_MAX',
'O_APPEND',
'O_CREAT',
'O_DIRECT',
'O_DIRECTORY',
'O_DSYNC',
'O_EXCL',
'O_LARGEFILE',
'O_NDELAY',
'O_NOCTTY',
'O_NOFOLLOW',
'O_NONBLOCK',
'O_RDONLY',
'O_RDWR',
'O_RSYNC',
'O_SYNC',
'O_TRUNC',
'O_WRONLY',
'pardir',
'path',
'pathsep',
'R_OK',
'readlink',
'remove',
'SEEK_CUR',
'SEEK_END',
'SEEK_SET',
'sep',
'stat',
'stat_float_times',
'stat_result',
'strerror',
'TMP_MAX',
'unlink',
'urandom',
'walk',
'WCOREDUMP',
'WEXITSTATUS',
'WIFEXITED',
'WIFSIGNALED',
'WIFSTOPPED',
'WNOHANG',
'WSTOPSIG',
'WTERMSIG',
'WUNTRACED',
'W_OK',
'X_OK',
],
}
_MODULE_OVERRIDES = {
'locale': {
'setlocale': FakeSetLocale,
},
'os': {
'access': FakeAccess,
'listdir': RestrictedPathFunction(os.listdir),
'lstat': RestrictedPathFunction(os.stat),
'readlink': FakeReadlink,
'remove': FakeUnlink,
'stat': RestrictedPathFunction(os.stat),
'uname': FakeUname,
'unlink': FakeUnlink,
'urandom': FakeURandom,
},
}
_ENABLED_FILE_TYPES = (
imp.PKG_DIRECTORY,
imp.PY_SOURCE,
imp.PY_COMPILED,
imp.C_BUILTIN,
)
def __init__(self,
module_dict,
imp_module=imp,
os_module=os,
dummy_thread_module=dummy_thread,
pickle_module=pickle):
"""Initializer.
Args:
module_dict: Module dictionary to use for managing system modules.
Should be sys.modules.
imp_module, os_module, dummy_thread_module, pickle_module: References to
modules that exist in the dev_appserver that must be used by this class
in order to function, even if these modules have been unloaded from
sys.modules.
"""
self._module_dict = module_dict
self._imp = imp_module
self._os = os_module
self._dummy_thread = dummy_thread_module
self._pickle = pickle
self._indent_level = 0
@Trace
def find_module(self, fullname, path=None):
"""See PEP 302."""
if fullname in ('cPickle', 'thread'):
return self
search_path = path
all_modules = fullname.split('.')
try:
for index, current_module in enumerate(all_modules):
current_module_fullname = '.'.join(all_modules[:index + 1])
if (current_module_fullname == fullname and not
self.StubModuleExists(fullname)):
self.FindModuleRestricted(current_module,
current_module_fullname,
search_path)
else:
if current_module_fullname in self._module_dict:
module = self._module_dict[current_module_fullname]
else:
module = self.FindAndLoadModule(current_module,
current_module_fullname,
search_path)
if hasattr(module, '__path__'):
search_path = module.__path__
except CouldNotFindModuleError:
return None
return self
def StubModuleExists(self, name):
"""Check if the named module has a stub replacement."""
if name in sys.builtin_module_names:
name = 'py_%s' % name
if name in dist.__all__:
return True
return False
def ImportStubModule(self, name):
"""Import the stub module replacement for the specified module."""
if name in sys.builtin_module_names:
name = 'py_%s' % name
module = __import__(dist.__name__, {}, {}, [name])
return getattr(module, name)
@Trace
def FixModule(self, module):
"""Prunes and overrides restricted module attributes.
Args:
module: The module to prune. This should be a new module whose attributes
reference back to the real module's __dict__ members.
"""
if module.__name__ in self._WHITE_LIST_PARTIAL_MODULES:
allowed_symbols = self._WHITE_LIST_PARTIAL_MODULES[module.__name__]
for symbol in set(module.__dict__) - set(allowed_symbols):
if not (symbol.startswith('__') and symbol.endswith('__')):
del module.__dict__[symbol]
if module.__name__ in self._MODULE_OVERRIDES:
module.__dict__.update(self._MODULE_OVERRIDES[module.__name__])
@Trace
def FindModuleRestricted(self,
submodule,
submodule_fullname,
search_path):
"""Locates a module while enforcing module import restrictions.
Args:
submodule: The short name of the submodule (i.e., the last section of
the fullname; for 'foo.bar' this would be 'bar').
submodule_fullname: The fully qualified name of the module to find (e.g.,
'foo.bar').
search_path: List of paths to search for to find this module. Should be
None if the current sys.path should be used.
Returns:
Tuple (source_file, pathname, description) where:
source_file: File-like object that contains the module; in the case
of packages, this will be None, which implies to look at __init__.py.
pathname: String containing the full path of the module on disk.
description: Tuple returned by imp.find_module().
However, in the case of an import using a path hook (e.g. a zipfile),
source_file will be a PEP-302-style loader object, pathname will be None,
and description will be a tuple filled with None values.
Raises:
ImportError exception if the requested module was found, but importing
it is disallowed.
CouldNotFindModuleError exception if the request module could not even
be found for import.
"""
if search_path is None:
search_path = [None] + sys.path
for path_entry in search_path:
result = self.FindPathHook(submodule, submodule_fullname, path_entry)
if result is not None:
source_file, pathname, description = result
if description == (None, None, None):
return result
else:
break
else:
self.log('Could not find module "%s"', submodule_fullname)
raise CouldNotFindModuleError()
suffix, mode, file_type = description
if (file_type not in (self._imp.C_BUILTIN, self._imp.C_EXTENSION) and
not FakeFile.IsFileAccessible(pathname)):
error_message = 'Access to module file denied: %s' % pathname
logging.debug(error_message)
raise ImportError(error_message)
if (file_type not in self._ENABLED_FILE_TYPES and
submodule not in self._WHITE_LIST_C_MODULES):
error_message = ('Could not import "%s": Disallowed C-extension '
'or built-in module' % submodule_fullname)
logging.debug(error_message)
raise ImportError(error_message)
return source_file, pathname, description
def FindPathHook(self, submodule, submodule_fullname, path_entry):
"""Helper for FindModuleRestricted to find a module in a sys.path entry.
Args:
submodule:
submodule_fullname:
path_entry: A single sys.path entry, or None representing the builtins.
Returns:
Either None (if nothing was found), or a triple (source_file, path_name,
description). See the doc string for FindModuleRestricted() for the
meaning of the latter.
"""
if path_entry is None:
if submodule_fullname in sys.builtin_module_names:
try:
result = self._imp.find_module(submodule)
except ImportError:
pass
else:
source_file, pathname, description = result
suffix, mode, file_type = description
if file_type == self._imp.C_BUILTIN:
return result
return None
if path_entry in sys.path_importer_cache:
importer = sys.path_importer_cache[path_entry]
else:
importer = None
for hook in sys.path_hooks:
try:
importer = hook(path_entry)
break
except ImportError:
pass
sys.path_importer_cache[path_entry] = importer
if importer is None:
try:
return self._imp.find_module(submodule, [path_entry])
except ImportError:
pass
else:
loader = importer.find_module(submodule)
if loader is not None:
return (loader, None, (None, None, None))
return None
@Trace
def LoadModuleRestricted(self,
submodule_fullname,
source_file,
pathname,
description):
"""Loads a module while enforcing module import restrictions.
As a byproduct, the new module will be added to the module dictionary.
Args:
submodule_fullname: The fully qualified name of the module to find (e.g.,
'foo.bar').
source_file: File-like object that contains the module's source code,
or a PEP-302-style loader object.
pathname: String containing the full path of the module on disk.
description: Tuple returned by imp.find_module(), or (None, None, None)
in case source_file is a PEP-302-style loader object.
Returns:
The new module.
Raises:
ImportError exception of the specified module could not be loaded for
whatever reason.
"""
if description == (None, None, None):
return source_file.load_module(submodule_fullname)
try:
try:
return self._imp.load_module(submodule_fullname,
source_file,
pathname,
description)
except:
if submodule_fullname in self._module_dict:
del self._module_dict[submodule_fullname]
raise
finally:
if source_file is not None:
source_file.close()
@Trace
def FindAndLoadModule(self,
submodule,
submodule_fullname,
search_path):
"""Finds and loads a module, loads it, and adds it to the module dictionary.
Args:
submodule: Name of the module to import (e.g., baz).
submodule_fullname: Full name of the module to import (e.g., foo.bar.baz).
search_path: Path to use for searching for this submodule. For top-level
modules this should be None; otherwise it should be the __path__
attribute from the parent package.
Returns:
A new module instance that has been inserted into the module dictionary
supplied to __init__.
Raises:
ImportError exception if the module could not be loaded for whatever
reason (e.g., missing, not allowed).
"""
module = self._imp.new_module(submodule_fullname)
if submodule_fullname == 'thread':
module.__dict__.update(self._dummy_thread.__dict__)
module.__name__ = 'thread'
elif submodule_fullname == 'cPickle':
module.__dict__.update(self._pickle.__dict__)
module.__name__ = 'cPickle'
elif submodule_fullname == 'os':
module.__dict__.update(self._os.__dict__)
self._module_dict['os.path'] = module.path
elif self.StubModuleExists(submodule_fullname):
module = self.ImportStubModule(submodule_fullname)
else:
source_file, pathname, description = self.FindModuleRestricted(submodule, submodule_fullname, search_path)
module = self.LoadModuleRestricted(submodule_fullname,
source_file,
pathname,
description)
module.__loader__ = self
self.FixModule(module)
if submodule_fullname not in self._module_dict:
self._module_dict[submodule_fullname] = module
return module
@Trace
def GetParentPackage(self, fullname):
"""Retrieves the parent package of a fully qualified module name.
Args:
fullname: Full name of the module whose parent should be retrieved (e.g.,
foo.bar).
Returns:
Module instance for the parent or None if there is no parent module.
Raise:
ImportError exception if the module's parent could not be found.
"""
all_modules = fullname.split('.')
parent_module_fullname = '.'.join(all_modules[:-1])
if parent_module_fullname:
if self.find_module(fullname) is None:
raise ImportError('Could not find module %s' % fullname)
return self._module_dict[parent_module_fullname]
return None
@Trace
def GetParentSearchPath(self, fullname):
"""Determines the search path of a module's parent package.
Args:
fullname: Full name of the module to look up (e.g., foo.bar).
Returns:
Tuple (submodule, search_path) where:
submodule: The last portion of the module name from fullname (e.g.,
if fullname is foo.bar, then this is bar).
search_path: List of paths that belong to the parent package's search
path or None if there is no parent package.
Raises:
ImportError exception if the module or its parent could not be found.
"""
submodule = GetSubmoduleName(fullname)
parent_package = self.GetParentPackage(fullname)
search_path = None
if parent_package is not None and hasattr(parent_package, '__path__'):
search_path = parent_package.__path__
return submodule, search_path
@Trace
def GetModuleInfo(self, fullname):
"""Determines the path on disk and the search path of a module or package.
Args:
fullname: Full name of the module to look up (e.g., foo.bar).
Returns:
Tuple (pathname, search_path, submodule) where:
pathname: String containing the full path of the module on disk,
or None if the module wasn't loaded from disk (e.g. from a zipfile).
search_path: List of paths that belong to the found package's search
path or None if found module is not a package.
submodule: The relative name of the submodule that's being imported.
"""
submodule, search_path = self.GetParentSearchPath(fullname)
source_file, pathname, description = self.FindModuleRestricted(submodule, fullname, search_path)
suffix, mode, file_type = description
module_search_path = None
if file_type == self._imp.PKG_DIRECTORY:
module_search_path = [pathname]
pathname = os.path.join(pathname, '__init__%spy' % os.extsep)
return pathname, module_search_path, submodule
@Trace
def load_module(self, fullname):
"""See PEP 302."""
all_modules = fullname.split('.')
submodule = all_modules[-1]
parent_module_fullname = '.'.join(all_modules[:-1])
search_path = None
if parent_module_fullname and parent_module_fullname in self._module_dict:
parent_module = self._module_dict[parent_module_fullname]
if hasattr(parent_module, '__path__'):
search_path = parent_module.__path__
return self.FindAndLoadModule(submodule, fullname, search_path)
@Trace
def is_package(self, fullname):
"""See PEP 302 extensions."""
submodule, search_path = self.GetParentSearchPath(fullname)
source_file, pathname, description = self.FindModuleRestricted(submodule, fullname, search_path)
suffix, mode, file_type = description
if file_type == self._imp.PKG_DIRECTORY:
return True
return False
@Trace
def get_source(self, fullname):
"""See PEP 302 extensions."""
full_path, search_path, submodule = self.GetModuleInfo(fullname)
if full_path is None:
return None
source_file = open(full_path)
try:
return source_file.read()
finally:
source_file.close()
@Trace
def get_code(self, fullname):
"""See PEP 302 extensions."""
full_path, search_path, submodule = self.GetModuleInfo(fullname)
if full_path is None:
return None
source_file = open(full_path)
try:
source_code = source_file.read()
finally:
source_file.close()
source_code = source_code.replace('\r\n', '\n')
if not source_code.endswith('\n'):
source_code += '\n'
return compile(source_code, full_path, 'exec')
def ModuleHasValidMainFunction(module):
"""Determines if a module has a main function that takes no arguments.
This includes functions that have arguments with defaults that are all
assigned, thus requiring no additional arguments in order to be called.
Args:
module: A types.ModuleType instance.
Returns:
True if the module has a valid, reusable main function; False otherwise.
"""
if hasattr(module, 'main') and type(module.main) is types.FunctionType:
arg_names, var_args, var_kwargs, default_values = inspect.getargspec(module.main)
if len(arg_names) == 0:
return True
if default_values is not None and len(arg_names) == len(default_values):
return True
return False
def GetScriptModuleName(handler_path):
"""Determines the fully-qualified Python module name of a script on disk.
Args:
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.
Returns:
String containing the corresponding module name (e.g., 'foo.bar.baz').
"""
if handler_path.startswith(PYTHON_LIB_VAR + '/'):
handler_path = handler_path[len(PYTHON_LIB_VAR):]
handler_path = os.path.normpath(handler_path)
extension_index = handler_path.rfind('.py')
if extension_index != -1:
handler_path = handler_path[:extension_index]
module_fullname = handler_path.replace(os.sep, '.')
module_fullname = module_fullname.strip('.')
module_fullname = re.sub('\.+', '.', module_fullname)
if module_fullname.endswith('.__init__'):
module_fullname = module_fullname[:-len('.__init__')]
return module_fullname
def FindMissingInitFiles(cgi_path, module_fullname, isfile=os.path.isfile):
"""Determines which __init__.py files are missing from a module's parent
packages.
Args:
cgi_path: Absolute path of the CGI module file on disk.
module_fullname: Fully qualified Python module name used to import the
cgi_path module.
Returns:
List containing the paths to the missing __init__.py files.
"""
missing_init_files = []
if cgi_path.endswith('.py'):
module_base = os.path.dirname(cgi_path)
else:
module_base = cgi_path
depth_count = module_fullname.count('.')
if cgi_path.endswith('__init__.py') or not cgi_path.endswith('.py'):
depth_count += 1
for index in xrange(depth_count):
current_init_file = os.path.abspath(
os.path.join(module_base, '__init__.py'))
if not isfile(current_init_file):
missing_init_files.append(current_init_file)
module_base = os.path.abspath(os.path.join(module_base, os.pardir))
return missing_init_files
def LoadTargetModule(handler_path,
cgi_path,
import_hook,
module_dict=sys.modules):
"""Loads a target CGI script by importing it as a Python module.
If the module for the target CGI script has already been loaded before,
the new module will be loaded in its place using the same module object,
possibly overwriting existing module attributes.
Args:
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
import_hook: Instance of HardenedModulesHook to use for module loading.
module_dict: Used for dependency injection.
Returns:
Tuple (module_fullname, script_module, module_code) where:
module_fullname: Fully qualified module name used to import the script.
script_module: The ModuleType object corresponding to the module_fullname.
If the module has not already been loaded, this will be an empty
shell of a module.
module_code: Code object (returned by compile built-in) corresponding
to the cgi_path to run. If the script_module was previously loaded
and has a main() function that can be reused, this will be None.
"""
module_fullname = GetScriptModuleName(handler_path)
script_module = module_dict.get(module_fullname)
module_code = None
if script_module != None and ModuleHasValidMainFunction(script_module):
logging.debug('Reusing main() function of module "%s"', module_fullname)
else:
if script_module is None:
script_module = imp.new_module(module_fullname)
script_module.__loader__ = import_hook
try:
module_code = import_hook.get_code(module_fullname)
full_path, search_path, submodule = import_hook.GetModuleInfo(module_fullname)
script_module.__file__ = full_path
if search_path is not None:
script_module.__path__ = search_path
except:
exc_type, exc_value, exc_tb = sys.exc_info()
import_error_message = str(exc_type)
if exc_value:
import_error_message += ': ' + str(exc_value)
logging.exception('Encountered error loading module "%s": %s',
module_fullname, import_error_message)
missing_inits = FindMissingInitFiles(cgi_path, module_fullname)
if missing_inits:
logging.warning('Missing package initialization files: %s',
', '.join(missing_inits))
else:
logging.error('Parent package initialization files are present, '
'but must be broken')
independent_load_successful = True
if not os.path.isfile(cgi_path):
independent_load_successful = False
else:
try:
source_file = open(cgi_path)
try:
module_code = compile(source_file.read(), cgi_path, 'exec')
script_module.__file__ = cgi_path
finally:
source_file.close()
except OSError:
independent_load_successful = False
if not independent_load_successful:
raise exc_type, exc_value, exc_tb
module_dict[module_fullname] = script_module
return module_fullname, script_module, module_code
def ExecuteOrImportScript(handler_path, cgi_path, import_hook):
"""Executes a CGI script by importing it as a new module; possibly reuses
the module's main() function if it is defined and takes no arguments.
Basic technique lifted from PEP 338 and Python2.5's runpy module. See:
http://www.python.org/dev/peps/pep-0338/
See the section entitled "Import Statements and the Main Module" to understand
why a module named '__main__' cannot do relative imports. To get around this,
the requested module's path could be added to sys.path on each request.
Args:
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
import_hook: Instance of HardenedModulesHook to use for module loading.
Returns:
True if the response code had an error status (e.g., 404), or False if it
did not.
Raises:
Any kind of exception that could have been raised when loading the target
module, running a target script, or executing the application code itself.
"""
module_fullname, script_module, module_code = LoadTargetModule(
handler_path, cgi_path, import_hook)
script_module.__name__ = '__main__'
sys.modules['__main__'] = script_module
try:
if module_code:
exec module_code in script_module.__dict__
else:
script_module.main()
sys.stdout.flush()
sys.stdout.seek(0)
try:
headers = mimetools.Message(sys.stdout)
finally:
sys.stdout.seek(0, 2)
status_header = headers.get('status')
error_response = False
if status_header:
try:
status_code = int(status_header.split(' ', 1)[0])
error_response = status_code >= 400
except ValueError:
error_response = True
if not error_response:
try:
parent_package = import_hook.GetParentPackage(module_fullname)
except Exception:
parent_package = None
if parent_package is not None:
submodule = GetSubmoduleName(module_fullname)
setattr(parent_package, submodule, script_module)
return error_response
finally:
script_module.__name__ = module_fullname
def ExecuteCGI(root_path,
handler_path,
cgi_path,
env,
infile,
outfile,
module_dict,
exec_script=ExecuteOrImportScript):
"""Executes Python file in this process as if it were a CGI.
Does not return an HTTP response line. CGIs should output headers followed by
the body content.
The modules in sys.modules should be the same before and after the CGI is
executed, with the specific exception of encodings-related modules, which
cannot be reloaded and thus must always stay in sys.modules.
Args:
root_path: Path to the root of the application.
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
env: Dictionary of environment variables to use for the execution.
infile: File-like object to read HTTP request input data from.
outfile: FIle-like object to write HTTP response data to.
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This removes the need to reload modules that
are reused between requests, significantly increasing load performance.
This dictionary must be separate from the sys.modules dictionary.
exec_script: Used for dependency injection.
"""
old_module_dict = sys.modules.copy()
old_builtin = __builtin__.__dict__.copy()
old_argv = sys.argv
old_stdin = sys.stdin
old_stdout = sys.stdout
old_env = os.environ.copy()
old_cwd = os.getcwd()
old_file_type = types.FileType
reset_modules = False
try:
ClearAllButEncodingsModules(sys.modules)
sys.modules.update(module_dict)
sys.argv = [cgi_path]
sys.stdin = infile
sys.stdout = outfile
os.environ.clear()
os.environ.update(env)
before_path = sys.path[:]
cgi_dir = os.path.normpath(os.path.dirname(cgi_path))
root_path = os.path.normpath(os.path.abspath(root_path))
if cgi_dir.startswith(root_path + os.sep):
os.chdir(cgi_dir)
else:
os.chdir(root_path)
hook = HardenedModulesHook(sys.modules)
sys.meta_path = [hook]
if hasattr(sys, 'path_importer_cache'):
sys.path_importer_cache.clear()
__builtin__.file = FakeFile
__builtin__.open = FakeFile
types.FileType = FakeFile
__builtin__.buffer = NotImplementedFakeClass
logging.debug('Executing CGI with env:\n%s', pprint.pformat(env))
try:
reset_modules = exec_script(handler_path, cgi_path, hook)
except SystemExit, e:
logging.debug('CGI exited with status: %s', e)
except:
reset_modules = True
raise
finally:
sys.meta_path = []
sys.path_importer_cache.clear()
_ClearTemplateCache(sys.modules)
module_dict.update(sys.modules)
ClearAllButEncodingsModules(sys.modules)
sys.modules.update(old_module_dict)
__builtin__.__dict__.update(old_builtin)
sys.argv = old_argv
sys.stdin = old_stdin
sys.stdout = old_stdout
sys.path[:] = before_path
os.environ.clear()
os.environ.update(old_env)
os.chdir(old_cwd)
types.FileType = old_file_type
class CGIDispatcher(URLDispatcher):
"""Dispatcher that executes Python CGI scripts."""
def __init__(self,
module_dict,
root_path,
path_adjuster,
setup_env=SetupEnvironment,
exec_cgi=ExecuteCGI,
create_logging_handler=ApplicationLoggingHandler):
"""Initializer.
Args:
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This dictionary must be separate from the
sys.modules dictionary.
path_adjuster: Instance of PathAdjuster to use for finding absolute
paths of CGI files on disk.
setup_env, exec_cgi, create_logging_handler: Used for dependency
injection.
"""
self._module_dict = module_dict
self._root_path = root_path
self._path_adjuster = path_adjuster
self._setup_env = setup_env
self._exec_cgi = exec_cgi
self._create_logging_handler = create_logging_handler
def Dispatch(self,
relative_url,
path,
headers,
infile,
outfile,
base_env_dict=None):
"""Dispatches the Python CGI."""
handler = self._create_logging_handler()
logging.getLogger().addHandler(handler)
before_level = logging.root.level
try:
env = {}
if base_env_dict:
env.update(base_env_dict)
cgi_path = self._path_adjuster.AdjustPath(path)
env.update(self._setup_env(cgi_path, relative_url, headers))
self._exec_cgi(self._root_path,
path,
cgi_path,
env,
infile,
outfile,
self._module_dict)
handler.AddDebuggingConsole(relative_url, env, outfile)
finally:
logging.root.level = before_level
logging.getLogger().removeHandler(handler)
def __str__(self):
"""Returns a string representation of this dispatcher."""
return 'CGI dispatcher'
class LocalCGIDispatcher(CGIDispatcher):
"""Dispatcher that executes local functions like they're CGIs.
The contents of sys.modules will be preserved for local CGIs running this
dispatcher, but module hardening will still occur for any new imports. Thus,
be sure that any local CGIs have loaded all of their dependent modules
_before_ they are executed.
"""
def __init__(self, module_dict, path_adjuster, cgi_func):
"""Initializer.
Args:
module_dict: Passed to CGIDispatcher.
path_adjuster: Passed to CGIDispatcher.
cgi_func: Callable function taking no parameters that should be
executed in a CGI environment in the current process.
"""
self._cgi_func = cgi_func
def curried_exec_script(*args, **kwargs):
cgi_func()
return False
def curried_exec_cgi(*args, **kwargs):
kwargs['exec_script'] = curried_exec_script
return ExecuteCGI(*args, **kwargs)
CGIDispatcher.__init__(self,
module_dict,
'',
path_adjuster,
exec_cgi=curried_exec_cgi)
def Dispatch(self, *args, **kwargs):
"""Preserves sys.modules for CGIDispatcher.Dispatch."""
self._module_dict.update(sys.modules)
CGIDispatcher.Dispatch(self, *args, **kwargs)
def __str__(self):
"""Returns a string representation of this dispatcher."""
return 'Local CGI dispatcher for %s' % self._cgi_func
class PathAdjuster(object):
"""Adjusts application file paths to paths relative to the application or
external library directories."""
def __init__(self, root_path):
"""Initializer.
Args:
root_path: Path to the root of the application running on the server.
"""
self._root_path = os.path.abspath(root_path)
def AdjustPath(self, path):
"""Adjusts application file path to paths relative to the application or
external library directories.
Handler paths that start with $PYTHON_LIB will be converted to paths
relative to the google directory.
Args:
path: File path that should be adjusted.
Returns:
The adjusted path.
"""
if path.startswith(PYTHON_LIB_VAR):
path = os.path.join(os.path.dirname(os.path.dirname(google.__file__)),
path[len(PYTHON_LIB_VAR) + 1:])
else:
path = os.path.join(self._root_path, path)
return path
class StaticFileConfigMatcher(object):
"""Keeps track of file/directory specific application configuration.
Specifically:
- Computes mime type based on URLMap and file extension.
- Decides on cache expiration time based on URLMap and default expiration.
To determine the mime type, we first see if there is any mime-type property
on each URLMap entry. If non is specified, we use the mimetypes module to
guess the mime type from the file path extension, and use
application/octet-stream if we can't find the mimetype.
"""
def __init__(self,
url_map_list,
path_adjuster,
default_expiration):
"""Initializer.
Args:
url_map_list: List of appinfo.URLMap objects.
If empty or None, then we always use the mime type chosen by the
mimetypes module.
path_adjuster: PathAdjuster object used to adjust application file paths.
default_expiration: String describing default expiration time for browser
based caching of static files. If set to None this disallows any
browser caching of static content.
"""
if default_expiration is not None:
self._default_expiration = appinfo.ParseExpiration(default_expiration)
else:
self._default_expiration = None
self._patterns = []
if url_map_list:
for entry in url_map_list:
handler_type = entry.GetHandlerType()
if handler_type not in (appinfo.STATIC_FILES, appinfo.STATIC_DIR):
continue
if handler_type == appinfo.STATIC_FILES:
regex = entry.upload + '$'
else:
path = entry.static_dir
if path[-1] == '/':
path = path[:-1]
regex = re.escape(path + os.path.sep) + r'(.*)'
try:
path_re = re.compile(regex)
except re.error, e:
raise InvalidAppConfigError('regex %s does not compile: %s' %
(regex, e))
if self._default_expiration is None:
expiration = 0
elif entry.expiration is None:
expiration = self._default_expiration
else:
expiration = appinfo.ParseExpiration(entry.expiration)
self._patterns.append((path_re, entry.mime_type, expiration))
def IsStaticFile(self, path):
"""Tests if the given path points to a "static" file.
Args:
path: String containing the file's path relative to the app.
Returns:
Boolean, True if the file was configured to be static.
"""
for (path_re, _, _) in self._patterns:
if path_re.match(path):
return True
return False
def GetMimeType(self, path):
"""Returns the mime type that we should use when serving the specified file.
Args:
path: String containing the file's path relative to the app.
Returns:
String containing the mime type to use. Will be 'application/octet-stream'
if we have no idea what it should be.
"""
for (path_re, mime_type, expiration) in self._patterns:
if mime_type is not None:
the_match = path_re.match(path)
if the_match:
return mime_type
filename, extension = os.path.splitext(path)
return mimetypes.types_map.get(extension, 'application/octet-stream')
def GetExpiration(self, path):
"""Returns the cache expiration duration to be users for the given file.
Args:
path: String containing the file's path relative to the app.
Returns:
Integer number of seconds to be used for browser cache expiration time.
"""
for (path_re, mime_type, expiration) in self._patterns:
the_match = path_re.match(path)
if the_match:
return expiration
return self._default_expiration or 0
def ReadDataFile(data_path, openfile=file):
"""Reads a file on disk, returning a corresponding HTTP status and data.
Args:
data_path: Path to the file on disk to read.
openfile: Used for dependency injection.
Returns:
Tuple (status, data) where status is an HTTP response code, and data is
the data read; will be an empty string if an error occurred or the
file was empty.
"""
status = httplib.INTERNAL_SERVER_ERROR
data = ""
try:
data_file = openfile(data_path, 'rb')
try:
data = data_file.read()
finally:
data_file.close()
status = httplib.OK
except (OSError, IOError), e:
logging.error('Error encountered reading file "%s":\n%s', data_path, e)
if e.errno in FILE_MISSING_EXCEPTIONS:
status = httplib.NOT_FOUND
else:
status = httplib.FORBIDDEN
return status, data
class FileDispatcher(URLDispatcher):
"""Dispatcher that reads data files from disk."""
def __init__(self,
path_adjuster,
static_file_config_matcher,
read_data_file=ReadDataFile):
"""Initializer.
Args:
path_adjuster: Instance of PathAdjuster to use for finding absolute
paths of data files on disk.
static_file_config_matcher: StaticFileConfigMatcher object.
read_data_file: Used for dependency injection.
"""
self._path_adjuster = path_adjuster
self._static_file_config_matcher = static_file_config_matcher
self._read_data_file = read_data_file
def Dispatch(self,
relative_url,
path,
headers,
infile,
outfile,
base_env_dict=None):
"""Reads the file and returns the response status and data."""
full_path = self._path_adjuster.AdjustPath(path)
status, data = self._read_data_file(full_path)
content_type = self._static_file_config_matcher.GetMimeType(path)
expiration = self._static_file_config_matcher.GetExpiration(path)
outfile.write('Status: %d\r\n' % status)
outfile.write('Content-type: %s\r\n' % content_type)
if expiration:
outfile.write('Expires: %s\r\n'
% email.Utils.formatdate(time.time() + expiration,
usegmt=True))
outfile.write('Cache-Control: public, max-age=%i\r\n' % expiration)
outfile.write('\r\n')
outfile.write(data)
def __str__(self):
"""Returns a string representation of this dispatcher."""
return 'File dispatcher'
_IGNORE_RESPONSE_HEADERS = frozenset([
'content-encoding', 'accept-encoding', 'transfer-encoding',
'server', 'date',
])
def IgnoreHeadersRewriter(status_code, status_message, headers, body):
"""Ignore specific response headers.
Certain response headers cannot be modified by an Application. For a
complete list of these headers please see:
http://code.google.com/appengine/docs/webapp/responseclass.html#Disallowed_HTTP_Response_Headers
This rewriter simply removes those headers.
"""
for h in _IGNORE_RESPONSE_HEADERS:
if h in headers:
del headers[h]
return status_code, status_message, headers, body
def ParseStatusRewriter(status_code, status_message, headers, body):
"""Parse status header, if it exists.
Handles the server-side 'status' header, which instructs the server to change
the HTTP response code accordingly. Handles the 'location' header, which
issues an HTTP 302 redirect to the client. Also corrects the 'content-length'
header to reflect actual content length in case extra information has been
appended to the response body.
If the 'status' header supplied by the client is invalid, this method will
set the response to a 500 with an error message as content.
"""
location_value = headers.getheader('location')
status_value = headers.getheader('status')
if status_value:
response_status = status_value
del headers['status']
elif location_value:
response_status = '%d Redirecting' % httplib.FOUND
else:
return status_code, status_message, headers, body
status_parts = response_status.split(' ', 1)
status_code, status_message = (status_parts + [''])[:2]
try:
status_code = int(status_code)
except ValueError:
status_code = 500
body = cStringIO.StringIO('Error: Invalid "status" header value returned.')
return status_code, status_message, headers, body
def CacheRewriter(status_code, status_message, headers, body):
"""Update the cache header."""
if not 'Cache-Control' in headers:
headers['Cache-Control'] = 'no-cache'
return status_code, status_message, headers, body
def ContentLengthRewriter(status_code, status_message, headers, body):
"""Rewrite the Content-Length header.
Even though Content-Length is not a user modifiable header, App Engine
sends a correct Content-Length to the user based on the actual response.
"""
current_position = body.tell()
body.seek(0, 2)
headers['Content-Length'] = str(body.tell() - current_position)
body.seek(current_position)
return status_code, status_message, headers, body
def CreateResponseRewritersChain():
"""Create the default response rewriter chain.
A response rewriter is the a function that gets a final chance to change part
of the dev_appservers response. A rewriter is not like a dispatcher in that
it is called after every request has been handled by the dispatchers
regardless of which dispatcher was used.
The order in which rewriters are registered will be the order in which they
are used to rewrite the response. Modifications from earlier rewriters
are used as input to later rewriters.
A response rewriter is a function that can rewrite the request in any way.
Thefunction can returned modified values or the original values it was
passed.
A rewriter function has the following parameters and return values:
Args:
status_code: Status code of response from dev_appserver or previous
rewriter.
status_message: Text corresponding to status code.
headers: mimetools.Message instance with parsed headers. NOTE: These
headers can contain its own 'status' field, but the default
dev_appserver implementation will remove this. Future rewriters
should avoid re-introducing the status field and return new codes
instead.
body: File object containing the body of the response. This position of
this file may not be at the start of the file. Any content before the
files position is considered not to be part of the final body.
Returns:
status_code: Rewritten status code or original.
status_message: Rewritter message or original.
headers: Rewritten/modified headers or original.
body: Rewritten/modified body or original.
Returns:
List of response rewriters.
"""
return [IgnoreHeadersRewriter,
ParseStatusRewriter,
CacheRewriter,
ContentLengthRewriter,
]
def RewriteResponse(response_file, response_rewriters=None):
"""Allows final rewrite of dev_appserver response.
This function receives the unparsed HTTP response from the application
or internal handler, parses out the basic structure and feeds that structure
in to a chain of response rewriters.
It also makes sure the final HTTP headers are properly terminated.
For more about response rewriters, please see documentation for
CreateResponeRewritersChain.
Args:
response_file: File-like object containing the full HTTP response including
the response code, all headers, and the request body.
response_rewriters: A list of response rewriters. If none is provided it
will create a new chain using CreateResponseRewritersChain.
Returns:
Tuple (status_code, status_message, header, body) where:
status_code: Integer HTTP response status (e.g., 200, 302, 404, 500)
status_message: String containing an informational message about the
response code, possibly derived from the 'status' header, if supplied.
header: String containing the HTTP headers of the response, without
a trailing new-line (CRLF).
body: String containing the body of the response.
"""
if response_rewriters is None:
response_rewriters = CreateResponseRewritersChain()
status_code = 200
status_message = 'Good to go'
headers = mimetools.Message(response_file)
for response_rewriter in response_rewriters:
status_code, status_message, headers, response_file = response_rewriter(
status_code,
status_message,
headers,
response_file)
header_list = []
for header in headers.headers:
header = header.rstrip('\n')
header = header.rstrip('\r')
header_list.append(header)
header_data = '\r\n'.join(header_list) + '\r\n'
return status_code, status_message, header_data, response_file.read()
class ModuleManager(object):
"""Manages loaded modules in the runtime.
Responsible for monitoring and reporting about file modification times.
Modules can be loaded from source or precompiled byte-code files. When a
file has source code, the ModuleManager monitors the modification time of
the source file even if the module itself is loaded from byte-code.
"""
def __init__(self, modules):
"""Initializer.
Args:
modules: Dictionary containing monitored modules.
"""
self._modules = modules
self._default_modules = self._modules.copy()
self._save_path_hooks = sys.path_hooks[:]
self._modification_times = {}
@staticmethod
def GetModuleFile(module, is_file=os.path.isfile):
"""Helper method to try to determine modules source file.
Args:
module: Module object to get file for.
is_file: Function used to determine if a given path is a file.
Returns:
Path of the module's corresponding Python source file if it exists, or
just the module's compiled Python file. If the module has an invalid
__file__ attribute, None will be returned.
"""
module_file = getattr(module, '__file__', None)
if module_file is None:
return None
source_file = module_file[:module_file.rfind('py') + 2]
if is_file(source_file):
return source_file
return module.__file__
def AreModuleFilesModified(self):
"""Determines if any monitored files have been modified.
Returns:
True if one or more files have been modified, False otherwise.
"""
for name, (mtime, fname) in self._modification_times.iteritems():
if name not in self._modules:
continue
module = self._modules[name]
if not os.path.isfile(fname):
return True
if mtime != os.path.getmtime(fname):
return True
return False
def UpdateModuleFileModificationTimes(self):
"""Records the current modification times of all monitored modules.
"""
self._modification_times.clear()
for name, module in self._modules.items():
if not isinstance(module, types.ModuleType):
continue
module_file = self.GetModuleFile(module)
if not module_file:
continue
try:
self._modification_times[name] = (os.path.getmtime(module_file),
module_file)
except OSError, e:
if e.errno not in FILE_MISSING_EXCEPTIONS:
raise e
def ResetModules(self):
"""Clear modules so that when request is run they are reloaded."""
self._modules.clear()
self._modules.update(self._default_modules)
sys.path_hooks[:] = self._save_path_hooks
def _ClearTemplateCache(module_dict=sys.modules):
"""Clear template cache in webapp.template module.
Attempts to load template module. Ignores failure. If module loads, the
template cache is cleared.
"""
template_module = module_dict.get('google.appengine.ext.webapp.template')
if template_module is not None:
template_module.template_cache.clear()
def CreateRequestHandler(root_path,
login_url,
require_indexes=False,
static_caching=True):
"""Creates a new BaseHTTPRequestHandler sub-class for use with the Python
BaseHTTPServer module's HTTP server.
Python's built-in HTTP server does not support passing context information
along to instances of its request handlers. This function gets around that
by creating a sub-class of the handler in a closure that has access to
this context information.
Args:
root_path: Path to the root of the application running on the server.
login_url: Relative URL which should be used for handling user logins.
require_indexes: True if index.yaml is read-only gospel; default False.
static_caching: True if browser caching of static files should be allowed.
Returns:
Sub-class of BaseHTTPRequestHandler.
"""
application_module_dict = SetupSharedModules(sys.modules)
if require_indexes:
index_yaml_updater = None
else:
index_yaml_updater = dev_appserver_index.IndexYamlUpdater(root_path)
application_config_cache = AppConfigCache()
class DevAppServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Dispatches URLs using patterns from a URLMatcher, which is created by
loading an application's configuration file. Executes CGI scripts in the
local process so the scripts can use mock versions of APIs.
HTTP requests that correctly specify a user info cookie
(dev_appserver_login.COOKIE_NAME) will have the 'USER_EMAIL' environment
variable set accordingly. If the user is also an admin, the
'USER_IS_ADMIN' variable will exist and be set to '1'. If the user is not
logged in, 'USER_EMAIL' will be set to the empty string.
On each request, raises an InvalidAppConfigError exception if the
application configuration file in the directory specified by the root_path
argument is invalid.
"""
server_version = 'Development/1.0'
module_dict = application_module_dict
module_manager = ModuleManager(application_module_dict)
config_cache = application_config_cache
rewriter_chain = CreateResponseRewritersChain()
def __init__(self, *args, **kwargs):
"""Initializer.
Args:
args, kwargs: Positional and keyword arguments passed to the constructor
of the super class.
"""
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def version_string(self):
"""Returns server's version string used for Server HTTP header"""
return self.server_version
def do_GET(self):
"""Handle GET requests."""
self._HandleRequest()
def do_POST(self):
"""Handles POST requests."""
self._HandleRequest()
def do_PUT(self):
"""Handle PUT requests."""
self._HandleRequest()
def do_HEAD(self):
"""Handle HEAD requests."""
self._HandleRequest()
def do_OPTIONS(self):
"""Handles OPTIONS requests."""
self._HandleRequest()
def do_DELETE(self):
"""Handle DELETE requests."""
self._HandleRequest()
def do_TRACE(self):
"""Handles TRACE requests."""
self._HandleRequest()
def _HandleRequest(self):
"""Handles any type of request and prints exceptions if they occur."""
server_name = self.headers.get('host') or self.server.server_name
server_name = server_name.split(':', 1)[0]
env_dict = {
'REQUEST_METHOD': self.command,
'REMOTE_ADDR': self.client_address[0],
'SERVER_SOFTWARE': self.server_version,
'SERVER_NAME': server_name,
'SERVER_PROTOCOL': self.protocol_version,
'SERVER_PORT': str(self.server.server_port),
}
full_url = GetFullURL(server_name, self.server.server_port, self.path)
if len(full_url) > MAX_URL_LENGTH:
msg = 'Requested URI too long: %s' % full_url
logging.error(msg)
self.send_response(httplib.REQUEST_URI_TOO_LONG, msg)
return
tbhandler = cgitb.Hook(file=self.wfile).handle
try:
if self.module_manager.AreModuleFilesModified():
self.module_manager.ResetModules()
implicit_matcher = CreateImplicitMatcher(self.module_dict,
root_path,
login_url)
config, explicit_matcher = LoadAppConfig(root_path, self.module_dict,
cache=self.config_cache,
static_caching=static_caching)
if config.api_version != API_VERSION:
logging.error("API versions cannot be switched dynamically: %r != %r"
% (config.api_version, API_VERSION))
sys.exit(1)
env_dict['CURRENT_VERSION_ID'] = config.version + ".1"
env_dict['APPLICATION_ID'] = config.application
dispatcher = MatcherDispatcher(login_url,
[implicit_matcher, explicit_matcher])
if require_indexes:
dev_appserver_index.SetupIndexes(config.application, root_path)
infile = cStringIO.StringIO(self.rfile.read(
int(self.headers.get('content-length', 0))))
request_size = len(infile.getvalue())
if request_size > MAX_REQUEST_SIZE:
msg = ('HTTP request was too large: %d. The limit is: %d.'
% (request_size, MAX_REQUEST_SIZE))
logging.error(msg)
self.send_response(httplib.REQUEST_ENTITY_TOO_LARGE, msg)
return
outfile = cStringIO.StringIO()
try:
dispatcher.Dispatch(self.path,
None,
self.headers,
infile,
outfile,
base_env_dict=env_dict)
finally:
self.module_manager.UpdateModuleFileModificationTimes()
outfile.flush()
outfile.seek(0)
status_code, status_message, header_data, body = RewriteResponse(outfile, self.rewriter_chain)
runtime_response_size = len(outfile.getvalue())
if runtime_response_size > MAX_RUNTIME_RESPONSE_SIZE:
status_code = 403
status_message = 'Forbidden'
new_headers = []
for header in header_data.split('\n'):
if not header.lower().startswith('content-length'):
new_headers.append(header)
header_data = '\n'.join(new_headers)
body = ('HTTP response was too large: %d. The limit is: %d.'
% (runtime_response_size, MAX_RUNTIME_RESPONSE_SIZE))
except yaml_errors.EventListenerError, e:
title = 'Fatal error when loading application configuration'
msg = '%s:\n%s' % (title, str(e))
logging.error(msg)
self.send_response(httplib.INTERNAL_SERVER_ERROR, title)
self.wfile.write('Content-Type: text/html\n\n')
self.wfile.write('<pre>%s</pre>' % cgi.escape(msg))
except:
msg = 'Exception encountered handling request'
logging.exception(msg)
self.send_response(httplib.INTERNAL_SERVER_ERROR, msg)
tbhandler()
else:
try:
self.send_response(status_code, status_message)
self.wfile.write(header_data)
self.wfile.write('\r\n')
if self.command != 'HEAD':
self.wfile.write(body)
elif body:
logging.warning('Dropping unexpected body in response '
'to HEAD request')
except (IOError, OSError), e:
if e.errno != errno.EPIPE:
raise e
except socket.error, e:
if len(e.args) >= 1 and e.args[0] != errno.EPIPE:
raise e
else:
if index_yaml_updater is not None:
index_yaml_updater.UpdateIndexYaml()
def log_error(self, format, *args):
"""Redirect error messages through the logging module."""
logging.error(format, *args)
def log_message(self, format, *args):
"""Redirect log messages through the logging module."""
logging.info(format, *args)
return DevAppServerRequestHandler
def ReadAppConfig(appinfo_path, parse_app_config=appinfo.LoadSingleAppInfo):
"""Reads app.yaml file and returns its app id and list of URLMap instances.
Args:
appinfo_path: String containing the path to the app.yaml file.
parse_app_config: Used for dependency injection.
Returns:
AppInfoExternal instance.
Raises:
If the config file could not be read or the config does not contain any
URLMap instances, this function will raise an InvalidAppConfigError
exception.
"""
try:
appinfo_file = file(appinfo_path, 'r')
except IOError, e:
raise InvalidAppConfigError(
'Application configuration could not be read from "%s"' % appinfo_path)
try:
return parse_app_config(appinfo_file)
finally:
appinfo_file.close()
def CreateURLMatcherFromMaps(root_path,
url_map_list,
module_dict,
default_expiration,
create_url_matcher=URLMatcher,
create_cgi_dispatcher=CGIDispatcher,
create_file_dispatcher=FileDispatcher,
create_path_adjuster=PathAdjuster,
normpath=os.path.normpath):
"""Creates a URLMatcher instance from URLMap.
Creates all of the correct URLDispatcher instances to handle the various
content types in the application configuration.
Args:
root_path: Path to the root of the application running on the server.
url_map_list: List of appinfo.URLMap objects to initialize this
matcher with. Can be an empty list if you would like to add patterns
manually.
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This dictionary must be separate from the
sys.modules dictionary.
default_expiration: String describing default expiration time for browser
based caching of static files. If set to None this disallows any
browser caching of static content.
create_url_matcher, create_cgi_dispatcher, create_file_dispatcher,
create_path_adjuster: Used for dependency injection.
Returns:
Instance of URLMatcher with the supplied URLMap objects properly loaded.
"""
url_matcher = create_url_matcher()
path_adjuster = create_path_adjuster(root_path)
cgi_dispatcher = create_cgi_dispatcher(module_dict, root_path, path_adjuster)
static_file_config_matcher = StaticFileConfigMatcher(url_map_list,
path_adjuster,
default_expiration)
file_dispatcher = create_file_dispatcher(path_adjuster,
static_file_config_matcher)
FakeFile.SetStaticFileConfigMatcher(static_file_config_matcher)
for url_map in url_map_list:
admin_only = url_map.login == appinfo.LOGIN_ADMIN
requires_login = url_map.login == appinfo.LOGIN_REQUIRED or admin_only
handler_type = url_map.GetHandlerType()
if handler_type == appinfo.HANDLER_SCRIPT:
dispatcher = cgi_dispatcher
elif handler_type in (appinfo.STATIC_FILES, appinfo.STATIC_DIR):
dispatcher = file_dispatcher
else:
raise InvalidAppConfigError('Unknown handler type "%s"' % handler_type)
regex = url_map.url
path = url_map.GetHandler()
if handler_type == appinfo.STATIC_DIR:
if regex[-1] == r'/':
regex = regex[:-1]
if path[-1] == os.path.sep:
path = path[:-1]
regex = '/'.join((re.escape(regex), '(.*)'))
if os.path.sep == '\\':
backref = r'\\1'
else:
backref = r'\1'
path = (normpath(path).replace('\\', '\\\\') +
os.path.sep + backref)
url_matcher.AddURL(regex,
dispatcher,
path,
requires_login, admin_only)
return url_matcher
class AppConfigCache(object):
"""Cache used by LoadAppConfig.
If given to LoadAppConfig instances of this class are used to cache contents
of the app config (app.yaml or app.yml) and the Matcher created from it.
Code outside LoadAppConfig should treat instances of this class as opaque
objects and not access its members.
"""
path = None
mtime = None
config = None
matcher = None
def LoadAppConfig(root_path,
module_dict,
cache=None,
static_caching=True,
read_app_config=ReadAppConfig,
create_matcher=CreateURLMatcherFromMaps):
"""Creates a Matcher instance for an application configuration file.
Raises an InvalidAppConfigError exception if there is anything wrong with
the application configuration file.
Args:
root_path: Path to the root of the application to load.
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This dictionary must be separate from the
sys.modules dictionary.
cache: Instance of AppConfigCache or None.
static_caching: True if browser caching of static files should be allowed.
read_app_config, create_matcher: Used for dependency injection.
Returns:
tuple: (AppInfoExternal, URLMatcher)
"""
for appinfo_path in [os.path.join(root_path, 'app.yaml'),
os.path.join(root_path, 'app.yml')]:
if os.path.isfile(appinfo_path):
if cache is not None:
mtime = os.path.getmtime(appinfo_path)
if cache.path == appinfo_path and cache.mtime == mtime:
return (cache.config, cache.matcher)
cache.config = cache.matcher = cache.path = None
cache.mtime = mtime
try:
config = read_app_config(appinfo_path, appinfo.LoadSingleAppInfo)
if static_caching:
if config.default_expiration:
default_expiration = config.default_expiration
else:
default_expiration = '0'
else:
default_expiration = None
matcher = create_matcher(root_path,
config.handlers,
module_dict,
default_expiration)
FakeFile.SetSkippedFiles(config.skip_files)
if cache is not None:
cache.path = appinfo_path
cache.config = config
cache.matcher = matcher
return (config, matcher)
except gexcept.AbstractMethod:
pass
raise AppConfigNotFoundError
def ReadCronConfig(croninfo_path, parse_cron_config=croninfo.LoadSingleCron):
"""Reads cron.yaml file and returns a list of CronEntry instances.
Args:
croninfo_path: String containing the path to the cron.yaml file.
parse_cron_config: Used for dependency injection.
Returns:
A CronInfoExternal object.
Raises:
If the config file is unreadable, empty or invalid, this function will
raise an InvalidAppConfigError or a MalformedCronConfiguration exception.
"""
try:
croninfo_file = file(croninfo_path, 'r')
except IOError, e:
raise InvalidAppConfigError(
'Cron configuration could not be read from "%s"' % croninfo_path)
try:
return parse_cron_config(croninfo_file)
finally:
croninfo_file.close()
def SetupStubs(app_id, **config):
"""Sets up testing stubs of APIs.
Args:
app_id: Application ID being served.
Keywords:
login_url: Relative URL which should be used for handling user login/logout.
datastore_path: Path to the file to store Datastore file stub data in.
history_path: Path to the file to store Datastore history in.
clear_datastore: If the datastore and history should be cleared on startup.
smtp_host: SMTP host used for sending test mail.
smtp_port: SMTP port.
smtp_user: SMTP user.
smtp_password: SMTP password.
enable_sendmail: Whether to use sendmail as an alternative to SMTP.
show_mail_body: Whether to log the body of emails.
remove: Used for dependency injection.
"""
login_url = config['login_url']
datastore_path = config['datastore_path']
history_path = config['history_path']
clear_datastore = config['clear_datastore']
require_indexes = config.get('require_indexes', False)
smtp_host = config.get('smtp_host', None)
smtp_port = config.get('smtp_port', 25)
smtp_user = config.get('smtp_user', '')
smtp_password = config.get('smtp_password', '')
enable_sendmail = config.get('enable_sendmail', False)
show_mail_body = config.get('show_mail_body', False)
remove = config.get('remove', os.remove)
os.environ['APPLICATION_ID'] = app_id
if clear_datastore:
for path in (datastore_path, history_path):
if os.path.lexists(path):
logging.info('Attempting to remove file at %s', path)
try:
remove(path)
except OSError, e:
logging.warning('Removing file failed: %s', e)
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
datastore = datastore_file_stub.DatastoreFileStub(
app_id, datastore_path, history_path, require_indexes=require_indexes)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore)
fixed_login_url = '%s?%s=%%s' % (login_url,
dev_appserver_login.CONTINUE_PARAM)
fixed_logout_url = '%s&%s' % (fixed_login_url,
dev_appserver_login.LOGOUT_PARAM)
apiproxy_stub_map.apiproxy.RegisterStub(
'user',
user_service_stub.UserServiceStub(login_url=fixed_login_url,
logout_url=fixed_logout_url))
apiproxy_stub_map.apiproxy.RegisterStub(
'urlfetch',
urlfetch_stub.URLFetchServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'mail',
mail_stub.MailServiceStub(smtp_host,
smtp_port,
smtp_user,
smtp_password,
enable_sendmail=enable_sendmail,
show_mail_body=show_mail_body))
apiproxy_stub_map.apiproxy.RegisterStub(
'memcache',
memcache_stub.MemcacheServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'capability_service',
capability_stub.CapabilityServiceStub())
try:
from google.appengine.api.images import images_stub
apiproxy_stub_map.apiproxy.RegisterStub(
'images',
images_stub.ImagesServiceStub())
except ImportError, e:
logging.warning('Could not initialize images API; you are likely missing '
'the Python "PIL" module. ImportError: %s', e)
from google.appengine.api.images import images_not_implemented_stub
apiproxy_stub_map.apiproxy.RegisterStub('images',
images_not_implemented_stub.ImagesNotImplementedServiceStub())
def CreateImplicitMatcher(module_dict,
root_path,
login_url,
create_path_adjuster=PathAdjuster,
create_local_dispatcher=LocalCGIDispatcher,
create_cgi_dispatcher=CGIDispatcher):
"""Creates a URLMatcher instance that handles internal URLs.
Used to facilitate handling user login/logout, debugging, info about the
currently running app, etc.
Args:
module_dict: Dictionary in the form used by sys.modules.
root_path: Path to the root of the application.
login_url: Relative URL which should be used for handling user login/logout.
create_local_dispatcher: Used for dependency injection.
Returns:
Instance of URLMatcher with appropriate dispatchers.
"""
url_matcher = URLMatcher()
path_adjuster = create_path_adjuster(root_path)
login_dispatcher = create_local_dispatcher(sys.modules, path_adjuster,
dev_appserver_login.main)
url_matcher.AddURL(login_url,
login_dispatcher,
'',
False,
False)
admin_dispatcher = create_cgi_dispatcher(module_dict, root_path,
path_adjuster)
url_matcher.AddURL('/_ah/admin(?:/.*)?',
admin_dispatcher,
DEVEL_CONSOLE_PATH,
False,
False)
return url_matcher
def SetupTemplates(template_dir):
"""Reads debugging console template files and initializes the console.
Does nothing if templates have already been initialized.
Args:
template_dir: Path to the directory containing the templates files.
Raises:
OSError or IOError if any of the template files could not be read.
"""
if ApplicationLoggingHandler.AreTemplatesInitialized():
return
try:
header = open(os.path.join(template_dir, HEADER_TEMPLATE)).read()
script = open(os.path.join(template_dir, SCRIPT_TEMPLATE)).read()
middle = open(os.path.join(template_dir, MIDDLE_TEMPLATE)).read()
footer = open(os.path.join(template_dir, FOOTER_TEMPLATE)).read()
except (OSError, IOError):
logging.error('Could not read template files from %s', template_dir)
raise
ApplicationLoggingHandler.InitializeTemplates(header, script, middle, footer)
def CreateServer(root_path,
login_url,
port,
template_dir,
serve_address='',
require_indexes=False,
allow_skipped_files=False,
static_caching=True,
python_path_list=sys.path,
sdk_dir=os.path.dirname(os.path.dirname(google.__file__))):
"""Creates an new HTTPServer for an application.
The sdk_dir argument must be specified for the directory storing all code for
the SDK so as to allow for the sandboxing of module access to work for any
and all SDK code. While typically this is where the 'google' package lives,
it can be in another location because of API version support.
Args:
root_path: String containing the path to the root directory of the
application where the app.yaml file is.
login_url: Relative URL which should be used for handling user login/logout.
port: Port to start the application server on.
template_dir: Path to the directory in which the debug console templates
are stored.
serve_address: Address on which the server should serve.
require_indexes: True if index.yaml is read-only gospel; default False.
static_caching: True if browser caching of static files should be allowed.
python_path_list: Used for dependency injection.
sdk_dir: Directory where the SDK is stored.
Returns:
Instance of BaseHTTPServer.HTTPServer that's ready to start accepting.
"""
absolute_root_path = os.path.realpath(root_path)
SetupTemplates(template_dir)
FakeFile.SetAllowedPaths(absolute_root_path,
[sdk_dir,
template_dir])
FakeFile.SetAllowSkippedFiles(allow_skipped_files)
handler_class = CreateRequestHandler(absolute_root_path,
login_url,
require_indexes,
static_caching)
if absolute_root_path not in python_path_list:
python_path_list.insert(0, absolute_root_path)
return BaseHTTPServer.HTTPServer((serve_address, port), handler_class)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tool for performing authenticated RPCs against App Engine."""
import cookielib
import logging
import os
import re
import socket
import sys
import urllib
import urllib2
https_handler = urllib2.HTTPSHandler
uses_cert_verification = False
certpath = os.path.join(os.path.dirname(__file__), "cacerts.txt")
cert_file_available = os.path.exists(certpath)
try:
import https_wrapper
if cert_file_available:
https_handler = lambda: https_wrapper.CertValidatingHTTPSHandler(
ca_certs=certpath)
uses_cert_verification = True
except ImportError:
pass
def GetPlatformToken(os_module=os, sys_module=sys, platform=sys.platform):
"""Returns a 'User-agent' token for the host system platform.
Args:
os_module, sys_module, platform: Used for testing.
Returns:
String containing the platform token for the host system.
"""
if hasattr(sys_module, "getwindowsversion"):
windows_version = sys_module.getwindowsversion()
version_info = ".".join(str(i) for i in windows_version[:4])
return platform + "/" + version_info
elif hasattr(os_module, "uname"):
uname = os_module.uname()
return "%s/%s" % (uname[0], uname[2])
else:
return "unknown"
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, user_agent, source,
host_override=None, extra_headers=None, save_cookies=False,
auth_tries=3, account_type=None):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
user_agent: The user-agent string to send to the server. Specify None to
omit the user-agent header.
source: The source to specify in authentication requests.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request. Values
supplied here will override other default headers that are supplied.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
auth_tries: The number of times to attempt auth_function before failing.
account_type: One of GOOGLE, HOSTED_OR_GOOGLE, or None for automatic.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.source = source
self.authenticated = False
self.auth_tries = auth_tries
self.account_type = account_type
self.extra_headers = {}
if user_agent:
self.extra_headers["User-Agent"] = user_agent
if extra_headers:
self.extra_headers.update(extra_headers)
self.save_cookies = save_cookies
self.cookie_jar = cookielib.MozillaCookieJar()
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
if ((self.host_override and self.host_override == "localhost") or
self.host == "localhost" or self.host.startswith("localhost:")):
self._DevAppServerAuthenticate()
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplemented()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = self.account_type
if not account_type:
if (self.host.split(':')[0].endswith(".google.com")
or (self.host_override
and self.host_override.split(':')[0].endswith(".google.com"))):
account_type = "HOSTED_OR_GOOGLE"
else:
account_type = "GOOGLE"
data = {
"Email": email,
"Passwd": password,
"service": "ah",
"source": self.source,
"accountType": account_type
}
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode(data))
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
login_path = os.environ.get("APPCFG_LOGIN_PATH", "/_ah")
req = self._CreateRequest("http://%s%s/login?%s" %
(self.host, login_path, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response and directs us to
authenticate ourselves with ClientLogin.
"""
for unused_i in range(self.auth_tries):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def _DevAppServerAuthenticate(self):
"""Authenticates the user on the dev_appserver."""
credentials = self.auth_function()
self.extra_headers["Cookie"] = ('dev_appserver_login="%s:True"; Path=/;' %
(credentials[0],))
def Send(self, request_path, payload="",
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s?%s" % (self.host, request_path,
urllib.urlencode(args))
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
req.add_header("X-appcfg-api-version", "1")
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
logging.debug("Got http error, this is try #%s" % tries)
if tries > self.auth_tries:
raise
elif e.code == 401:
self._Authenticate()
elif e.code >= 500 and e.code < 600:
continue
elif e.code == 302:
loc = e.info()["location"]
logging.debug("Got 302 redirect. Location: %s" % loc)
if loc.startswith("https://www.google.com/accounts/ServiceLogin"):
self._Authenticate()
elif re.match(r"https://www.google.com/a/[a-z0-9.-]+/ServiceLogin",
loc):
self.account_type = "HOSTED"
self._Authenticate()
elif loc.startswith("http://%s/_ah/login" % (self.host,)):
self._DevAppServerAuthenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
DEFAULT_COOKIE_FILE_PATH = "~/.appcfg_cookies"
def _Authenticate(self):
"""Save the cookie jar after authentication."""
if cert_file_available and not uses_cert_verification:
logging.warn("ssl module not found. Without this the identity of the "
"remote host cannot be verified, and connections are NOT "
"secure. To fix this, please install the ssl module from "
"http://pypi.python.org/pypi/ssl")
super(HttpRpcServer, self)._Authenticate()
if self.cookie_jar.filename is not None and self.save_cookies:
logging.info("Saving authentication cookies to %s" %
self.cookie_jar.filename)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(https_handler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_jar.filename = os.path.expanduser(
HttpRpcServer.DEFAULT_COOKIE_FILE_PATH)
if os.path.exists(self.cookie_jar.filename):
try:
self.cookie_jar.load()
self.authenticated = True
logging.info("Loaded authentication cookies from %s" %
self.cookie_jar.filename)
except (OSError, IOError, cookielib.LoadError), e:
logging.debug("Could not load authentication cookies; %s: %s",
e.__class__.__name__, e)
self.cookie_jar.filename = None
else:
try:
fd = os.open(self.cookie_jar.filename, os.O_CREAT, 0600)
os.close(fd)
except (OSError, IOError), e:
logging.debug("Could not create authentication cookies file; %s: %s",
e.__class__.__name__, e)
self.cookie_jar.filename = None
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Helper CGI for logins/logout in the development application server.
This CGI has these parameters:
continue: URL to redirect to after a login or logout has completed.
email: Email address to set for the client.
admin: If 'True', the client should be logged in as an admin.
action: What action to take ('Login' or 'Logout').
To view the current user information and a form for logging in and out,
supply no parameters.
"""
import Cookie
import cgi
import os
import sys
import urllib
CONTINUE_PARAM = 'continue'
EMAIL_PARAM = 'email'
ADMIN_PARAM = 'admin'
ACTION_PARAM = 'action'
LOGOUT_ACTION = 'Logout'
LOGIN_ACTION = 'Login'
LOGOUT_PARAM = 'action=%s' % LOGOUT_ACTION
COOKIE_NAME = 'dev_appserver_login'
def GetUserInfo(http_cookie, cookie_name=COOKIE_NAME):
"""Get the requestor's user info from the HTTP cookie in the CGI environment.
Args:
http_cookie: Value of the HTTP_COOKIE environment variable.
cookie_name: Name of the cookie that stores the user info.
Returns:
Tuple (email, admin) where:
email: The user's email address, if any.
admin: True if the user is an admin; False otherwise.
"""
cookie = Cookie.SimpleCookie(http_cookie)
cookie_value = ''
if cookie_name in cookie:
cookie_value = cookie[cookie_name].value
email, admin = (cookie_value.split(':') + ['', ''])[:2]
return email, (admin == 'True')
def CreateCookieData(email, admin):
"""Creates cookie payload data.
Args:
email, admin: Parameters to incorporate into the cookie.
Returns:
String containing the cookie payload.
"""
admin_string = 'False'
if admin:
admin_string = 'True'
return '%s:%s' % (email, admin_string)
def SetUserInfoCookie(email, admin, cookie_name=COOKIE_NAME):
"""Creates a cookie to set the user information for the requestor.
Args:
email: Email to set for the user.
admin: True if the user should be admin; False otherwise.
cookie_name: Name of the cookie that stores the user info.
Returns:
'Set-Cookie' header for setting the user info of the requestor.
"""
cookie_value = CreateCookieData(email, admin)
set_cookie = Cookie.SimpleCookie()
set_cookie[cookie_name] = cookie_value
set_cookie[cookie_name]['path'] = '/'
return '%s\r\n' % set_cookie
def ClearUserInfoCookie(cookie_name=COOKIE_NAME):
"""Clears the user info cookie from the requestor, logging them out.
Args:
cookie_name: Name of the cookie that stores the user info.
Returns:
'Set-Cookie' header for clearing the user info of the requestor.
"""
set_cookie = Cookie.SimpleCookie()
set_cookie[cookie_name] = ''
set_cookie[cookie_name]['path'] = '/'
set_cookie[cookie_name]['max-age'] = '0'
return '%s\r\n' % set_cookie
LOGIN_TEMPLATE = """<html>
<head>
<title>Login</title>
</head>
<body>
<form method='get' action='%(login_url)s'
style='text-align:center; font: 13px sans-serif'>
<div style='width: 20em; margin: 1em auto;
text-align:left;
padding: 0 2em 1.25em 2em;
background-color: #d6e9f8;
border: 2px solid #67a7e3'>
<h3>%(login_message)s</h3>
<p style='padding: 0; margin: 0'>
<label for='email' style="width: 3em">Email:</label>
<input name='email' type='text' value='%(email)s' id='email'/>
</p>
<p style='margin: .5em 0 0 3em; font-size:12px'>
<input name='admin' type='checkbox' value='True'
%(admin_checked)s id='admin'/>
<label for='admin'>Sign in as Administrator</label>
</p>
<p style='margin-left: 3em'>
<input name='action' value='Login' type='submit'
id='submit-login' />
<input name='action' value='Logout' type='submit'
id='submit-logout' />
</p>
</div>
<input name='continue' type='hidden' value='%(continue_url)s'/>
</form>
</body>
</html>
"""
def RenderLoginTemplate(login_url, continue_url, email, admin):
"""Renders the login page.
Args:
login_url, continue_url, email, admin: Parameters passed to
LoginCGI.
Returns:
String containing the contents of the login page.
"""
login_message = 'Not logged in'
if email:
login_message = 'Logged in'
admin_checked = ''
if admin:
admin_checked = 'checked'
template_dict = {
'email': email or 'test\x40example.com',
'admin_checked': admin_checked,
'login_message': login_message,
'login_url': login_url,
'continue_url': continue_url
}
return LOGIN_TEMPLATE % template_dict
def LoginRedirect(login_url,
hostname,
port,
relative_url,
outfile):
"""Writes a login redirection URL to a user.
Args:
login_url: Relative URL which should be used for handling user logins.
hostname: Name of the host on which the webserver is running.
port: Port on which the webserver is running.
relative_url: String containing the URL accessed.
outfile: File-like object to which the response should be written.
"""
dest_url = "http://%s:%s%s" % (hostname, port, relative_url)
redirect_url = 'http://%s:%s%s?%s=%s' % (hostname,
port,
login_url,
CONTINUE_PARAM,
urllib.quote(dest_url))
outfile.write('Status: 302 Requires login\r\n')
outfile.write('Location: %s\r\n\r\n' % redirect_url)
def LoginCGI(login_url,
email,
admin,
action,
set_email,
set_admin,
continue_url,
outfile):
"""Runs the login CGI.
This CGI does not care about the method at all. For both POST and GET the
client will be redirected to the continue URL.
Args:
login_url: URL used to run the CGI.
email: Current email address of the requesting user.
admin: True if the requesting user is an admin; False otherwise.
action: The action used to run the CGI; 'Login' for a login action, 'Logout'
for when a logout should occur.
set_email: Email to set for the user; Empty if no email should be set.
set_admin: True if the user should be an admin; False otherwise.
continue_url: URL to which the user should be redirected when the CGI
finishes loading; defaults to the login_url with no parameters (showing
current status) if not supplied.
outfile: File-like object to which all output data should be written.
"""
redirect_url = ''
output_headers = []
if action:
if action.lower() == LOGOUT_ACTION.lower():
output_headers.append(ClearUserInfoCookie())
elif set_email:
output_headers.append(SetUserInfoCookie(set_email, set_admin))
redirect_url = continue_url or login_url
if redirect_url:
outfile.write('Status: 302 Redirecting to continue URL\r\n')
for header in output_headers:
outfile.write(header)
outfile.write('Location: %s\r\n' % redirect_url)
outfile.write('\r\n')
else:
outfile.write('Status: 200\r\n')
outfile.write('Content-Type: text/html\r\n')
outfile.write('\r\n')
outfile.write(RenderLoginTemplate(login_url,
continue_url,
email,
admin))
def main():
"""Runs the login and logout CGI script."""
form = cgi.FieldStorage()
login_url = os.environ['PATH_INFO']
email = os.environ.get('USER_EMAIL', '')
admin = os.environ.get('USER_IS_ADMIN', '0') == '1'
action = form.getfirst(ACTION_PARAM)
set_email = form.getfirst(EMAIL_PARAM, '')
set_admin = form.getfirst(ADMIN_PARAM, '') == 'True'
continue_url = form.getfirst(CONTINUE_PARAM, '')
LoginCGI(login_url,
email,
admin,
action,
set_email,
set_admin,
continue_url,
sys.stdout)
return 0
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Imports CSV data over HTTP.
Usage:
%s [flags]
--debug Show debugging information. (Optional)
--cookie=<string> Whole Cookie header to supply to the server, including
the parameter name (e.g., "ACSID=..."). (Optional)
--url=<string> URL endpoint to post to for importing data. (Required)
--batch_size=<int> Number of Entity objects to include in each post to
the URL endpoint. The more data per row/Entity, the
smaller the batch size should be. (Default 10)
--filename=<path> Path to the CSV file to import. (Required)
--kind=<string> Name of the Entity object kind to put in the datastore.
(Required)
The exit status will be 0 on success, non-zero on import failure.
Works with the bulkload mix-in library for google.appengine.ext.bulkload.
Please look there for documentation about how to setup the server side.
"""
import StringIO
import httplib
import logging
import csv
import getopt
import socket
import sys
import urllib
import urlparse
from google.appengine.ext.bulkload import constants
class Error(Exception):
"""Base-class for exceptions in this module."""
class PostError(Error):
"""An error has occured while trying to post data to the server."""
class BadServerStatusError(PostError):
"""The server has returned an error while importing data."""
def ContentGenerator(csv_file,
batch_size,
create_csv_reader=csv.reader,
create_csv_writer=csv.writer):
"""Retrieves CSV data up to a batch size at a time.
Args:
csv_file: A file-like object for reading CSV data.
batch_size: Maximum number of CSV rows to yield on each iteration.
create_csv_reader, create_csv_writer: Used for dependency injection.
Yields:
Tuple (entity_count, csv_content) where:
entity_count: Number of entities contained in the csv_content. Will be
less than or equal to the batch_size and greater than 0.
csv_content: String containing the CSV content containing the next
entity_count entities.
"""
try:
csv.field_size_limit(800000)
except AttributeError:
pass
reader = create_csv_reader(csv_file, skipinitialspace=True)
exhausted = False
while not exhausted:
rows_written = 0
content = StringIO.StringIO()
writer = create_csv_writer(content)
try:
for i in xrange(batch_size):
row = reader.next()
writer.writerow(row)
rows_written += 1
except StopIteration:
exhausted = True
if rows_written > 0:
yield rows_written, content.getvalue()
def PostEntities(host_port, uri, cookie, kind, content):
"""Posts Entity records to a remote endpoint over HTTP.
Args:
host_port: String containing the "host:port" pair; the port is optional.
uri: Relative URI to access on the remote host (e.g., '/bulkload').
cookie: String containing the Cookie header to use, if any.
kind: Kind of the Entity records being posted.
content: String containing the CSV data for the entities.
Raises:
BadServerStatusError if the server was contactable but returns an error.
PostError If an error occurred while connecting to the server or reading
or writing data.
"""
logging.debug('Connecting to %s', host_port)
try:
body = urllib.urlencode({
constants.KIND_PARAM: kind,
constants.CSV_PARAM: content,
})
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': len(body),
'Cookie': cookie,
}
logging.debug('Posting %d bytes to http://%s%s', len(body), host_port, uri)
connection = httplib.HTTPConnection(host_port)
try:
connection.request('POST', uri, body, headers)
response = connection.getresponse()
status = response.status
reason = response.reason
content = response.read()
logging.debug('Received response code %d: %s', status, reason)
if status != httplib.OK:
raise BadServerStatusError('Received code %d: %s\n%s' % (
status, reason, content))
finally:
connection.close()
except (IOError, httplib.HTTPException, socket.error), e:
logging.debug('Encountered exception accessing HTTP server: %s', e)
raise PostError(e)
def SplitURL(url):
"""Splits an HTTP URL into pieces.
Args:
url: String containing a full URL string (e.g.,
'http://blah.com:8080/stuff?param=1#foo')
Returns:
Tuple (netloc, uri) where:
netloc: String containing the host/port combination from the URL. The
port is optional. (e.g., 'blah.com:8080').
uri: String containing the relative URI of the URL. (e.g., '/stuff').
"""
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
return netloc, path
def ImportCSV(filename,
post_url,
cookie,
batch_size,
kind,
split_url=SplitURL,
openfile=file,
create_content_generator=ContentGenerator,
post_entities=PostEntities):
"""Imports CSV data using a series of HTTP posts.
Args:
filename: File on disk containing CSV data.
post_url: URL to post the Entity data to.
cookie: Full cookie header to use while connecting.
batch_size: Maximum number of Entity objects to post with each request.
kind: Entity kind of the objects being posted.
split_url, openfile, create_content_generator, post_entities: Used for
dependency injection.
Returns:
True if all entities were imported successfully; False otherwise.
"""
host_port, uri = split_url(post_url)
csv_file = openfile(filename, 'r')
try:
content_gen = create_content_generator(csv_file, batch_size)
logging.info('Starting import; maximum %d entities per post', batch_size)
for num_entities, content in content_gen:
logging.info('Importing %d entities in %d bytes',
num_entities, len(content))
try:
content = post_entities(host_port, uri, cookie, kind, content)
except PostError, e:
logging.error('An error occurred while importing: %s', e)
return False
finally:
csv_file.close()
return True
def PrintUsageExit(code):
"""Prints usage information and exits with a status code.
Args:
code: Status code to pass to sys.exit() after displaying usage information.
"""
print sys.modules['__main__'].__doc__ % sys.argv[0]
sys.stdout.flush()
sys.stderr.flush()
sys.exit(code)
def ParseArguments(argv):
"""Parses command-line arguments.
Prints out a help message if -h or --help is supplied.
Args:
argv: List of command-line arguments.
Returns:
Tuple (url, filename, cookie, batch_size, kind) containing the values from
each corresponding command-line flag.
"""
opts, args = getopt.getopt(
argv[1:],
'h',
['debug',
'help',
'url=',
'filename=',
'cookie=',
'batch_size=',
'kind='])
url = None
filename = None
cookie = ''
batch_size = 10
kind = None
encoding = None
for option, value in opts:
if option == '--debug':
logging.getLogger().setLevel(logging.DEBUG)
if option in ('-h', '--help'):
PrintUsageExit(0)
if option == '--url':
url = value
if option == '--filename':
filename = value
if option == '--cookie':
cookie = value
if option == '--batch_size':
batch_size = int(value)
if batch_size <= 0:
print >>sys.stderr, 'batch_size must be 1 or larger'
PrintUsageExit(1)
if option == '--kind':
kind = value
return (url, filename, cookie, batch_size, kind)
def main(argv):
"""Runs the importer."""
logging.basicConfig(
level=logging.INFO,
format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
args = ParseArguments(argv)
if [arg for arg in args if arg is None]:
print >>sys.stderr, 'Invalid arguments'
PrintUsageExit(1)
url, filename, cookie, batch_size, kind = args
if ImportCSV(filename, url, cookie, batch_size, kind):
logging.info('Import succcessful')
return 0
logging.error('Import failed')
return 1
if __name__ == '__main__':
sys.exit(main(sys.argv))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""CGI for displaying info about the currently running app in dev_appserver.
This serves pages under /_ah/info/ that display information about the app
currently running in the dev_appserver. It currently serves on these URLs:
/_ah/info/queries:
A list of datastore queries run so far, grouped by kind. Used to suggest
composite indices that should be built.
/_ah/info/index.yaml:
Produces an index.yaml file that can be uploaded to the real app
server by appcfg.py. This information is derived from the query
history above, by removing queries that don't need any indexes to
be built and by combining queries that can use the same index.
"""
import cgi
import wsgiref.handlers
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_pb
from google.appengine.ext import webapp
from google.appengine.tools import dev_appserver_index
class QueriesHandler(webapp.RequestHandler):
"""A handler that displays a list of the datastore queries run so far.
"""
HEADER = """<html>
<head><title>Query History</title></head>
<body>
<h3>Query History</h3>
<p>This is a list of datastore queries your app has run. You have to
make composite indices for these queries before deploying your app.
This is normally done automatically by running dev_appserver, which
will write the file index.yaml into your app's root directory, and
then deploying your app with appcfg, which will upload that
index.yaml.</p>
<p>You can also view a 'clean' <a href="index.yaml">index.yaml</a>
file and save that to your app's root directory.</p>
<table>
<tr><th>Times run</th><th>Query</th></tr>
"""
ROW = """<tr><td>%(count)s</td><td>%(query)s</td></tr>"""
FOOTER = """
</table>
</body>
</html>"""
def Render(self):
"""Renders and returns the query history page HTML.
Returns:
A string, formatted as an HTML page.
"""
history = apiproxy_stub_map.apiproxy.GetStub('datastore_v3').QueryHistory()
history_items = [(count, query) for query, count in history.items()]
history_items.sort(reverse=True)
rows = [self.ROW % {'query': _FormatQuery(query),
'count': count}
for count, query in history_items]
return self.HEADER + '\n'.join(rows) + self.FOOTER
def get(self):
"""Handle a GET. Just calls Render()."""
self.response.out.write(self.Render())
class IndexYamlHandler(webapp.RequestHandler):
"""A handler that renders an index.yaml file suitable for upload."""
def Render(self):
"""Renders and returns the index.yaml file.
Returns:
A string, formatted as an index.yaml file.
"""
datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
query_history = datastore_stub.QueryHistory()
body = dev_appserver_index.GenerateIndexFromHistory(query_history)
return 'indexes:\n' + body
def get(self):
"""Handle a GET. Just calls Render()."""
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(self.Render())
def _FormatQuery(query):
"""Format a Query protobuf as (very simple) HTML.
Args:
query: A datastore_pb.Query instance.
Returns:
A string containing formatted HTML. This is mostly the output of
str(query) with '<' etc. escaped, and '<br>' inserted in front of
Order and Filter parts.
"""
res = cgi.escape(str(query))
res = res.replace('Order', '<br>Order')
res = res.replace('Filter', '<br>Filter')
return res
def _DirectionToString(direction):
"""Turn a direction enum into a string.
Args:
direction: ASCENDING or DESCENDING
Returns:
Either 'asc' or 'descending'.
"""
if direction == datastore_pb.Query_Order.DESCENDING:
return 'descending'
else:
return 'asc'
URL_MAP = {
'/_ah/info/queries': QueriesHandler,
'/_ah/info/index.yaml': IndexYamlHandler,
}
def main():
application = webapp.WSGIApplication(URL_MAP.items())
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Helper CGI for logins/logout in the development application server.
This CGI has these parameters:
continue: URL to redirect to after a login or logout has completed.
email: Email address to set for the client.
admin: If 'True', the client should be logged in as an admin.
action: What action to take ('Login' or 'Logout').
To view the current user information and a form for logging in and out,
supply no parameters.
"""
import Cookie
import cgi
import os
import sys
import urllib
CONTINUE_PARAM = 'continue'
EMAIL_PARAM = 'email'
ADMIN_PARAM = 'admin'
ACTION_PARAM = 'action'
LOGOUT_ACTION = 'Logout'
LOGIN_ACTION = 'Login'
LOGOUT_PARAM = 'action=%s' % LOGOUT_ACTION
COOKIE_NAME = 'dev_appserver_login'
def GetUserInfo(http_cookie, cookie_name=COOKIE_NAME):
"""Get the requestor's user info from the HTTP cookie in the CGI environment.
Args:
http_cookie: Value of the HTTP_COOKIE environment variable.
cookie_name: Name of the cookie that stores the user info.
Returns:
Tuple (email, admin) where:
email: The user's email address, if any.
admin: True if the user is an admin; False otherwise.
"""
cookie = Cookie.SimpleCookie(http_cookie)
cookie_value = ''
if cookie_name in cookie:
cookie_value = cookie[cookie_name].value
email, admin = (cookie_value.split(':') + ['', ''])[:2]
return email, (admin == 'True')
def CreateCookieData(email, admin):
"""Creates cookie payload data.
Args:
email, admin: Parameters to incorporate into the cookie.
Returns:
String containing the cookie payload.
"""
admin_string = 'False'
if admin:
admin_string = 'True'
return '%s:%s' % (email, admin_string)
def SetUserInfoCookie(email, admin, cookie_name=COOKIE_NAME):
"""Creates a cookie to set the user information for the requestor.
Args:
email: Email to set for the user.
admin: True if the user should be admin; False otherwise.
cookie_name: Name of the cookie that stores the user info.
Returns:
'Set-Cookie' header for setting the user info of the requestor.
"""
cookie_value = CreateCookieData(email, admin)
set_cookie = Cookie.SimpleCookie()
set_cookie[cookie_name] = cookie_value
set_cookie[cookie_name]['path'] = '/'
return '%s\r\n' % set_cookie
def ClearUserInfoCookie(cookie_name=COOKIE_NAME):
"""Clears the user info cookie from the requestor, logging them out.
Args:
cookie_name: Name of the cookie that stores the user info.
Returns:
'Set-Cookie' header for clearing the user info of the requestor.
"""
set_cookie = Cookie.SimpleCookie()
set_cookie[cookie_name] = ''
set_cookie[cookie_name]['path'] = '/'
set_cookie[cookie_name]['max-age'] = '0'
return '%s\r\n' % set_cookie
LOGIN_TEMPLATE = """<html>
<head>
<title>Login</title>
</head>
<body>
<form method='get' action='%(login_url)s'
style='text-align:center; font: 13px sans-serif'>
<div style='width: 20em; margin: 1em auto;
text-align:left;
padding: 0 2em 1.25em 2em;
background-color: #d6e9f8;
border: 2px solid #67a7e3'>
<h3>%(login_message)s</h3>
<p style='padding: 0; margin: 0'>
<label for='email' style="width: 3em">Email:</label>
<input name='email' type='text' value='%(email)s' id='email'/>
</p>
<p style='margin: .5em 0 0 3em; font-size:12px'>
<input name='admin' type='checkbox' value='True'
%(admin_checked)s id='admin'/>
<label for='admin'>Sign in as Administrator</label>
</p>
<p style='margin-left: 3em'>
<input name='action' value='Login' type='submit'
id='submit-login' />
<input name='action' value='Logout' type='submit'
id='submit-logout' />
</p>
</div>
<input name='continue' type='hidden' value='%(continue_url)s'/>
</form>
</body>
</html>
"""
def RenderLoginTemplate(login_url, continue_url, email, admin):
"""Renders the login page.
Args:
login_url, continue_url, email, admin: Parameters passed to
LoginCGI.
Returns:
String containing the contents of the login page.
"""
login_message = 'Not logged in'
if email:
login_message = 'Logged in'
admin_checked = ''
if admin:
admin_checked = 'checked'
template_dict = {
'email': email or 'test\x40example.com',
'admin_checked': admin_checked,
'login_message': login_message,
'login_url': login_url,
'continue_url': continue_url
}
return LOGIN_TEMPLATE % template_dict
def LoginRedirect(login_url,
hostname,
port,
relative_url,
outfile):
"""Writes a login redirection URL to a user.
Args:
login_url: Relative URL which should be used for handling user logins.
hostname: Name of the host on which the webserver is running.
port: Port on which the webserver is running.
relative_url: String containing the URL accessed.
outfile: File-like object to which the response should be written.
"""
dest_url = "http://%s:%s%s" % (hostname, port, relative_url)
redirect_url = 'http://%s:%s%s?%s=%s' % (hostname,
port,
login_url,
CONTINUE_PARAM,
urllib.quote(dest_url))
outfile.write('Status: 302 Requires login\r\n')
outfile.write('Location: %s\r\n\r\n' % redirect_url)
def LoginCGI(login_url,
email,
admin,
action,
set_email,
set_admin,
continue_url,
outfile):
"""Runs the login CGI.
This CGI does not care about the method at all. For both POST and GET the
client will be redirected to the continue URL.
Args:
login_url: URL used to run the CGI.
email: Current email address of the requesting user.
admin: True if the requesting user is an admin; False otherwise.
action: The action used to run the CGI; 'Login' for a login action, 'Logout'
for when a logout should occur.
set_email: Email to set for the user; Empty if no email should be set.
set_admin: True if the user should be an admin; False otherwise.
continue_url: URL to which the user should be redirected when the CGI
finishes loading; defaults to the login_url with no parameters (showing
current status) if not supplied.
outfile: File-like object to which all output data should be written.
"""
redirect_url = ''
output_headers = []
if action:
if action.lower() == LOGOUT_ACTION.lower():
output_headers.append(ClearUserInfoCookie())
elif set_email:
output_headers.append(SetUserInfoCookie(set_email, set_admin))
redirect_url = continue_url or login_url
if redirect_url:
outfile.write('Status: 302 Redirecting to continue URL\r\n')
for header in output_headers:
outfile.write(header)
outfile.write('Location: %s\r\n' % redirect_url)
outfile.write('\r\n')
else:
outfile.write('Status: 200\r\n')
outfile.write('Content-Type: text/html\r\n')
outfile.write('\r\n')
outfile.write(RenderLoginTemplate(login_url,
continue_url,
email,
admin))
def main():
"""Runs the login and logout CGI script."""
form = cgi.FieldStorage()
login_url = os.environ['PATH_INFO']
email = os.environ.get('USER_EMAIL', '')
admin = os.environ.get('USER_IS_ADMIN', '0') == '1'
action = form.getfirst(ACTION_PARAM)
set_email = form.getfirst(EMAIL_PARAM, '')
set_admin = form.getfirst(ADMIN_PARAM, '') == 'True'
continue_url = form.getfirst(CONTINUE_PARAM, '')
LoginCGI(login_url,
email,
admin,
action,
set_email,
set_admin,
continue_url,
sys.stdout)
return 0
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Imports CSV data over HTTP.
Usage:
%s [flags]
--debug Show debugging information. (Optional)
--cookie=<string> Whole Cookie header to supply to the server, including
the parameter name (e.g., "ACSID=..."). (Optional)
--url=<string> URL endpoint to post to for importing data. (Required)
--batch_size=<int> Number of Entity objects to include in each post to
the URL endpoint. The more data per row/Entity, the
smaller the batch size should be. (Default 10)
--filename=<path> Path to the CSV file to import. (Required)
--kind=<string> Name of the Entity object kind to put in the datastore.
(Required)
The exit status will be 0 on success, non-zero on import failure.
Works with the bulkload mix-in library for google.appengine.ext.bulkload.
Please look there for documentation about how to setup the server side.
"""
import StringIO
import httplib
import logging
import csv
import getopt
import socket
import sys
import urllib
import urlparse
from google.appengine.ext.bulkload import constants
class Error(Exception):
"""Base-class for exceptions in this module."""
class PostError(Error):
"""An error has occured while trying to post data to the server."""
class BadServerStatusError(PostError):
"""The server has returned an error while importing data."""
def ContentGenerator(csv_file,
batch_size,
create_csv_reader=csv.reader,
create_csv_writer=csv.writer):
"""Retrieves CSV data up to a batch size at a time.
Args:
csv_file: A file-like object for reading CSV data.
batch_size: Maximum number of CSV rows to yield on each iteration.
create_csv_reader, create_csv_writer: Used for dependency injection.
Yields:
Tuple (entity_count, csv_content) where:
entity_count: Number of entities contained in the csv_content. Will be
less than or equal to the batch_size and greater than 0.
csv_content: String containing the CSV content containing the next
entity_count entities.
"""
try:
csv.field_size_limit(800000)
except AttributeError:
pass
reader = create_csv_reader(csv_file, skipinitialspace=True)
exhausted = False
while not exhausted:
rows_written = 0
content = StringIO.StringIO()
writer = create_csv_writer(content)
try:
for i in xrange(batch_size):
row = reader.next()
writer.writerow(row)
rows_written += 1
except StopIteration:
exhausted = True
if rows_written > 0:
yield rows_written, content.getvalue()
def PostEntities(host_port, uri, cookie, kind, content):
"""Posts Entity records to a remote endpoint over HTTP.
Args:
host_port: String containing the "host:port" pair; the port is optional.
uri: Relative URI to access on the remote host (e.g., '/bulkload').
cookie: String containing the Cookie header to use, if any.
kind: Kind of the Entity records being posted.
content: String containing the CSV data for the entities.
Raises:
BadServerStatusError if the server was contactable but returns an error.
PostError If an error occurred while connecting to the server or reading
or writing data.
"""
logging.debug('Connecting to %s', host_port)
try:
body = urllib.urlencode({
constants.KIND_PARAM: kind,
constants.CSV_PARAM: content,
})
headers = {
'Content-Type': 'application/x-www-form-urlencoded',
'Content-Length': len(body),
'Cookie': cookie,
}
logging.debug('Posting %d bytes to http://%s%s', len(body), host_port, uri)
connection = httplib.HTTPConnection(host_port)
try:
connection.request('POST', uri, body, headers)
response = connection.getresponse()
status = response.status
reason = response.reason
content = response.read()
logging.debug('Received response code %d: %s', status, reason)
if status != httplib.OK:
raise BadServerStatusError('Received code %d: %s\n%s' % (
status, reason, content))
finally:
connection.close()
except (IOError, httplib.HTTPException, socket.error), e:
logging.debug('Encountered exception accessing HTTP server: %s', e)
raise PostError(e)
def SplitURL(url):
"""Splits an HTTP URL into pieces.
Args:
url: String containing a full URL string (e.g.,
'http://blah.com:8080/stuff?param=1#foo')
Returns:
Tuple (netloc, uri) where:
netloc: String containing the host/port combination from the URL. The
port is optional. (e.g., 'blah.com:8080').
uri: String containing the relative URI of the URL. (e.g., '/stuff').
"""
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
return netloc, path
def ImportCSV(filename,
post_url,
cookie,
batch_size,
kind,
split_url=SplitURL,
openfile=file,
create_content_generator=ContentGenerator,
post_entities=PostEntities):
"""Imports CSV data using a series of HTTP posts.
Args:
filename: File on disk containing CSV data.
post_url: URL to post the Entity data to.
cookie: Full cookie header to use while connecting.
batch_size: Maximum number of Entity objects to post with each request.
kind: Entity kind of the objects being posted.
split_url, openfile, create_content_generator, post_entities: Used for
dependency injection.
Returns:
True if all entities were imported successfully; False otherwise.
"""
host_port, uri = split_url(post_url)
csv_file = openfile(filename, 'r')
try:
content_gen = create_content_generator(csv_file, batch_size)
logging.info('Starting import; maximum %d entities per post', batch_size)
for num_entities, content in content_gen:
logging.info('Importing %d entities in %d bytes',
num_entities, len(content))
try:
content = post_entities(host_port, uri, cookie, kind, content)
except PostError, e:
logging.error('An error occurred while importing: %s', e)
return False
finally:
csv_file.close()
return True
def PrintUsageExit(code):
"""Prints usage information and exits with a status code.
Args:
code: Status code to pass to sys.exit() after displaying usage information.
"""
print sys.modules['__main__'].__doc__ % sys.argv[0]
sys.stdout.flush()
sys.stderr.flush()
sys.exit(code)
def ParseArguments(argv):
"""Parses command-line arguments.
Prints out a help message if -h or --help is supplied.
Args:
argv: List of command-line arguments.
Returns:
Tuple (url, filename, cookie, batch_size, kind) containing the values from
each corresponding command-line flag.
"""
opts, args = getopt.getopt(
argv[1:],
'h',
['debug',
'help',
'url=',
'filename=',
'cookie=',
'batch_size=',
'kind='])
url = None
filename = None
cookie = ''
batch_size = 10
kind = None
encoding = None
for option, value in opts:
if option == '--debug':
logging.getLogger().setLevel(logging.DEBUG)
if option in ('-h', '--help'):
PrintUsageExit(0)
if option == '--url':
url = value
if option == '--filename':
filename = value
if option == '--cookie':
cookie = value
if option == '--batch_size':
batch_size = int(value)
if batch_size <= 0:
print >>sys.stderr, 'batch_size must be 1 or larger'
PrintUsageExit(1)
if option == '--kind':
kind = value
return (url, filename, cookie, batch_size, kind)
def main(argv):
"""Runs the importer."""
logging.basicConfig(
level=logging.INFO,
format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
args = ParseArguments(argv)
if [arg for arg in args if arg is None]:
print >>sys.stderr, 'Invalid arguments'
PrintUsageExit(1)
url, filename, cookie, batch_size, kind = args
if ImportCSV(filename, url, cookie, batch_size, kind):
logging.info('Import succcessful')
return 0
logging.error('Import failed')
return 1
if __name__ == '__main__':
sys.exit(main(sys.argv))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Runs a development application server for an application.
%(script)s [options] <application root>
Application root must be the path to the application to run in this server.
Must contain a valid app.yaml or app.yml file.
Options:
--help, -h View this helpful message.
--debug, -d Use debug logging. (Default false)
--clear_datastore, -c Clear the Datastore on startup. (Default false)
--address=ADDRESS, -a ADDRESS
Address to which this server should bind. (Default
%(address)s).
--port=PORT, -p PORT Port for the server to run on. (Default %(port)s)
--datastore_path=PATH Path to use for storing Datastore file stub data.
(Default %(datastore_path)s)
--history_path=PATH Path to use for storing Datastore history.
(Default %(history_path)s)
--require_indexes Disallows queries that require composite indexes
not defined in index.yaml.
--smtp_host=HOSTNAME SMTP host to send test mail to. Leaving this
unset will disable SMTP mail sending.
(Default '%(smtp_host)s')
--smtp_port=PORT SMTP port to send test mail to.
(Default %(smtp_port)s)
--smtp_user=USER SMTP user to connect as. Stub will only attempt
to login if this field is non-empty.
(Default '%(smtp_user)s').
--smtp_password=PASSWORD Password for SMTP server.
(Default '%(smtp_password)s')
--enable_sendmail Enable sendmail when SMTP not configured.
(Default false)
--show_mail_body Log the body of emails in mail stub.
(Default false)
--auth_domain Authorization domain that this app runs in.
(Default gmail.com)
--debug_imports Enables debug logging for module imports, showing
search paths used for finding modules and any
errors encountered during the import process.
--allow_skipped_files Allow access to files matched by app.yaml's
skipped_files (default False)
--disable_static_caching Never allow the browser to cache static files.
(Default enable if expiration set in app.yaml)
"""
from google.appengine.tools import os_compat
import getopt
import logging
import os
import re
import sys
import traceback
import tempfile
def SetGlobals():
"""Set various global variables involving the 'google' package.
This function should not be called until sys.path has been properly set.
"""
global yaml_errors, appcfg, appengine_rpc, dev_appserver, os_compat
from google.appengine.api import yaml_errors
from google.appengine.tools import appcfg
from google.appengine.tools import appengine_rpc
from google.appengine.tools import dev_appserver
from google.appengine.tools import os_compat
DEFAULT_ADMIN_CONSOLE_SERVER = 'appengine.google.com'
ARG_ADDRESS = 'address'
ARG_ADMIN_CONSOLE_SERVER = 'admin_console_server'
ARG_ADMIN_CONSOLE_HOST = 'admin_console_host'
ARG_AUTH_DOMAIN = 'auth_domain'
ARG_CLEAR_DATASTORE = 'clear_datastore'
ARG_DATASTORE_PATH = 'datastore_path'
ARG_DEBUG_IMPORTS = 'debug_imports'
ARG_ENABLE_SENDMAIL = 'enable_sendmail'
ARG_SHOW_MAIL_BODY = 'show_mail_body'
ARG_HISTORY_PATH = 'history_path'
ARG_LOGIN_URL = 'login_url'
ARG_LOG_LEVEL = 'log_level'
ARG_PORT = 'port'
ARG_REQUIRE_INDEXES = 'require_indexes'
ARG_ALLOW_SKIPPED_FILES = 'allow_skipped_files'
ARG_SMTP_HOST = 'smtp_host'
ARG_SMTP_PASSWORD = 'smtp_password'
ARG_SMTP_PORT = 'smtp_port'
ARG_SMTP_USER = 'smtp_user'
ARG_STATIC_CACHING = 'static_caching'
ARG_TEMPLATE_DIR = 'template_dir'
SDK_PATH = os.path.dirname(
os.path.dirname(
os.path.dirname(
os.path.dirname(os_compat.__file__)
)
)
)
DEFAULT_ARGS = {
ARG_PORT: 8080,
ARG_LOG_LEVEL: logging.INFO,
ARG_DATASTORE_PATH: os.path.join(tempfile.gettempdir(),
'dev_appserver.datastore'),
ARG_HISTORY_PATH: os.path.join(tempfile.gettempdir(),
'dev_appserver.datastore.history'),
ARG_LOGIN_URL: '/_ah/login',
ARG_CLEAR_DATASTORE: False,
ARG_REQUIRE_INDEXES: False,
ARG_TEMPLATE_DIR: os.path.join(SDK_PATH, 'templates'),
ARG_SMTP_HOST: '',
ARG_SMTP_PORT: 25,
ARG_SMTP_USER: '',
ARG_SMTP_PASSWORD: '',
ARG_ENABLE_SENDMAIL: False,
ARG_SHOW_MAIL_BODY: False,
ARG_AUTH_DOMAIN: 'gmail.com',
ARG_ADDRESS: 'localhost',
ARG_ADMIN_CONSOLE_SERVER: DEFAULT_ADMIN_CONSOLE_SERVER,
ARG_ADMIN_CONSOLE_HOST: None,
ARG_ALLOW_SKIPPED_FILES: False,
ARG_STATIC_CACHING: True,
}
API_PATHS = {'1':
{'google': (),
'antlr3': ('lib', 'antlr3'),
'django': ('lib', 'django'),
'webob': ('lib', 'webob'),
'yaml': ('lib', 'yaml', 'lib'),
}
}
DEFAULT_API_VERSION = '1'
API_PATHS['test'] = API_PATHS[DEFAULT_API_VERSION].copy()
API_PATHS['test']['_test'] = ('nonexistent', 'test', 'path')
def SetPaths(app_config_path):
"""Set the interpreter to use the specified API version.
The app.yaml file is scanned for the api_version field and the value is
extracted. With that information, the paths in API_PATHS are added to the
front of sys.paths to make sure that they take precedent over any other paths
to older versions of a package. All modules for each package set are cleared
out of sys.modules to make sure only the newest version is used.
Args:
- app_config_path: Path to the app.yaml file.
"""
api_version_re = re.compile(r'api_version:\s*(?P<api_version>[\w.]{1,32})')
api_version = None
app_config_file = open(app_config_path, 'r')
try:
for line in app_config_file:
re_match = api_version_re.match(line)
if re_match:
api_version = re_match.group('api_version')
break
finally:
app_config_file.close()
if api_version is None:
logging.error("Application configuration file missing an 'api_version' "
"value:\n%s" % app_config_path)
sys.exit(1)
if api_version not in API_PATHS:
logging.error("Value of %r for 'api_version' from the application "
"configuration file is not valid:\n%s" %
(api_version, app_config_path))
sys.exit(1)
if api_version == DEFAULT_API_VERSION:
return DEFAULT_API_VERSION
sdk_path = os.path.dirname(
os.path.dirname(
os.path.dirname(
os.path.dirname(os_compat.__file__)
)
)
)
for pkg_name, path_parts in API_PATHS[api_version].iteritems():
for name in sys.modules.keys():
if name == pkg_name or name.startswith('%s.' % pkg_name):
del sys.modules[name]
pkg_path = os.path.join(sdk_path, *path_parts)
sys.path.insert(0, pkg_path)
return api_version
def PrintUsageExit(code):
"""Prints usage information and exits with a status code.
Args:
code: Status code to pass to sys.exit() after displaying usage information.
"""
render_dict = DEFAULT_ARGS.copy()
render_dict['script'] = os.path.basename(sys.argv[0])
print sys.modules['__main__'].__doc__ % render_dict
sys.stdout.flush()
sys.exit(code)
def ParseArguments(argv):
"""Parses command-line arguments.
Args:
argv: Command-line arguments, including the executable name, used to
execute this application.
Returns:
Tuple (args, option_dict) where:
args: List of command-line arguments following the executable name.
option_dict: Dictionary of parsed flags that maps keys from DEFAULT_ARGS
to their values, which are either pulled from the defaults, or from
command-line flags.
"""
option_dict = DEFAULT_ARGS.copy()
try:
opts, args = getopt.gnu_getopt(
argv[1:],
'a:cdhp:',
[ 'address=',
'admin_console_server=',
'admin_console_host=',
'allow_skipped_files',
'auth_domain=',
'clear_datastore',
'datastore_path=',
'debug',
'debug_imports',
'enable_sendmail',
'disable_static_caching',
'show_mail_body',
'help',
'history_path=',
'port=',
'require_indexes',
'smtp_host=',
'smtp_password=',
'smtp_port=',
'smtp_user=',
'template_dir=',
])
except getopt.GetoptError, e:
print >>sys.stderr, 'Error: %s' % e
PrintUsageExit(1)
for option, value in opts:
if option in ('-h', '--help'):
PrintUsageExit(0)
if option in ('-d', '--debug'):
option_dict[ARG_LOG_LEVEL] = logging.DEBUG
if option in ('-p', '--port'):
try:
option_dict[ARG_PORT] = int(value)
if not (65535 > option_dict[ARG_PORT] > 0):
raise ValueError
except ValueError:
print >>sys.stderr, 'Invalid value supplied for port'
PrintUsageExit(1)
if option in ('-a', '--address'):
option_dict[ARG_ADDRESS] = value
if option == '--datastore_path':
option_dict[ARG_DATASTORE_PATH] = os.path.abspath(value)
if option == '--history_path':
option_dict[ARG_HISTORY_PATH] = os.path.abspath(value)
if option in ('-c', '--clear_datastore'):
option_dict[ARG_CLEAR_DATASTORE] = True
if option == '--require_indexes':
option_dict[ARG_REQUIRE_INDEXES] = True
if option == '--smtp_host':
option_dict[ARG_SMTP_HOST] = value
if option == '--smtp_port':
try:
option_dict[ARG_SMTP_PORT] = int(value)
if not (65535 > option_dict[ARG_SMTP_PORT] > 0):
raise ValueError
except ValueError:
print >>sys.stderr, 'Invalid value supplied for SMTP port'
PrintUsageExit(1)
if option == '--smtp_user':
option_dict[ARG_SMTP_USER] = value
if option == '--smtp_password':
option_dict[ARG_SMTP_PASSWORD] = value
if option == '--enable_sendmail':
option_dict[ARG_ENABLE_SENDMAIL] = True
if option == '--show_mail_body':
option_dict[ARG_SHOW_MAIL_BODY] = True
if option == '--auth_domain':
option_dict['_DEFAULT_ENV_AUTH_DOMAIN'] = value
if option == '--debug_imports':
option_dict['_ENABLE_LOGGING'] = True
if option == '--template_dir':
option_dict[ARG_TEMPLATE_DIR] = value
if option == '--admin_console_server':
option_dict[ARG_ADMIN_CONSOLE_SERVER] = value.strip()
if option == '--admin_console_host':
option_dict[ARG_ADMIN_CONSOLE_HOST] = value
if option == '--allow_skipped_files':
option_dict[ARG_ALLOW_SKIPPED_FILES] = True
if option == '--disable_static_caching':
option_dict[ARG_STATIC_CACHING] = False
return args, option_dict
def MakeRpcServer(option_dict):
"""Create a new HttpRpcServer.
Creates a new HttpRpcServer to check for updates to the SDK.
Args:
option_dict: The dict of command line options.
Returns:
A HttpRpcServer.
"""
server = appengine_rpc.HttpRpcServer(
option_dict[ARG_ADMIN_CONSOLE_SERVER],
lambda: ('unused_email', 'unused_password'),
appcfg.GetUserAgent(),
appcfg.GetSourceName(),
host_override=option_dict[ARG_ADMIN_CONSOLE_HOST])
server.authenticated = True
return server
def main(argv):
"""Runs the development application server."""
args, option_dict = ParseArguments(argv)
if len(args) != 1:
print >>sys.stderr, 'Invalid arguments'
PrintUsageExit(1)
root_path = args[0]
for suffix in ('yaml', 'yml'):
path = os.path.join(root_path, 'app.%s' % suffix)
if os.path.exists(path):
api_version = SetPaths(path)
break
else:
logging.error("Application configuration file not found in %s" % root_path)
return 1
SetGlobals()
dev_appserver.API_VERSION = api_version
if '_DEFAULT_ENV_AUTH_DOMAIN' in option_dict:
auth_domain = option_dict['_DEFAULT_ENV_AUTH_DOMAIN']
dev_appserver.DEFAULT_ENV['AUTH_DOMAIN'] = auth_domain
if '_ENABLE_LOGGING' in option_dict:
enable_logging = option_dict['_ENABLE_LOGGING']
dev_appserver.HardenedModulesHook.ENABLE_LOGGING = enable_logging
log_level = option_dict[ARG_LOG_LEVEL]
port = option_dict[ARG_PORT]
datastore_path = option_dict[ARG_DATASTORE_PATH]
login_url = option_dict[ARG_LOGIN_URL]
template_dir = option_dict[ARG_TEMPLATE_DIR]
serve_address = option_dict[ARG_ADDRESS]
require_indexes = option_dict[ARG_REQUIRE_INDEXES]
allow_skipped_files = option_dict[ARG_ALLOW_SKIPPED_FILES]
static_caching = option_dict[ARG_STATIC_CACHING]
logging.basicConfig(
level=log_level,
format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
config = None
try:
config, matcher = dev_appserver.LoadAppConfig(root_path, {})
except yaml_errors.EventListenerError, e:
logging.error('Fatal error when loading application configuration:\n' +
str(e))
return 1
except dev_appserver.InvalidAppConfigError, e:
logging.error('Application configuration file invalid:\n%s', e)
return 1
if option_dict[ARG_ADMIN_CONSOLE_SERVER] != '':
server = MakeRpcServer(option_dict)
update_check = appcfg.UpdateCheck(server, config)
update_check.CheckSupportedVersion()
if update_check.AllowedToCheckForUpdates():
update_check.CheckForUpdates()
try:
dev_appserver.SetupStubs(config.application, **option_dict)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
logging.error(str(exc_type) + ': ' + str(exc_value))
logging.debug(''.join(traceback.format_exception(
exc_type, exc_value, exc_traceback)))
return 1
http_server = dev_appserver.CreateServer(
root_path,
login_url,
port,
template_dir,
sdk_dir=SDK_PATH,
serve_address=serve_address,
require_indexes=require_indexes,
allow_skipped_files=allow_skipped_files,
static_caching=static_caching)
logging.info('Running application %s on port %d: http://%s:%d',
config.application, port, serve_address, port)
try:
try:
http_server.serve_forever()
except KeyboardInterrupt:
logging.info('Server interrupted by user, terminating')
except:
exc_info = sys.exc_info()
info_string = '\n'.join(traceback.format_exception(*exc_info))
logging.error('Error encountered:\n%s\nNow terminating.', info_string)
return 1
finally:
http_server.server_close()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
else:
SetGlobals()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""CGI for displaying info about the currently running app in dev_appserver.
This serves pages under /_ah/info/ that display information about the app
currently running in the dev_appserver. It currently serves on these URLs:
/_ah/info/queries:
A list of datastore queries run so far, grouped by kind. Used to suggest
composite indices that should be built.
/_ah/info/index.yaml:
Produces an index.yaml file that can be uploaded to the real app
server by appcfg.py. This information is derived from the query
history above, by removing queries that don't need any indexes to
be built and by combining queries that can use the same index.
"""
import cgi
import wsgiref.handlers
from google.appengine.api import apiproxy_stub_map
from google.appengine.datastore import datastore_pb
from google.appengine.ext import webapp
from google.appengine.tools import dev_appserver_index
class QueriesHandler(webapp.RequestHandler):
"""A handler that displays a list of the datastore queries run so far.
"""
HEADER = """<html>
<head><title>Query History</title></head>
<body>
<h3>Query History</h3>
<p>This is a list of datastore queries your app has run. You have to
make composite indices for these queries before deploying your app.
This is normally done automatically by running dev_appserver, which
will write the file index.yaml into your app's root directory, and
then deploying your app with appcfg, which will upload that
index.yaml.</p>
<p>You can also view a 'clean' <a href="index.yaml">index.yaml</a>
file and save that to your app's root directory.</p>
<table>
<tr><th>Times run</th><th>Query</th></tr>
"""
ROW = """<tr><td>%(count)s</td><td>%(query)s</td></tr>"""
FOOTER = """
</table>
</body>
</html>"""
def Render(self):
"""Renders and returns the query history page HTML.
Returns:
A string, formatted as an HTML page.
"""
history = apiproxy_stub_map.apiproxy.GetStub('datastore_v3').QueryHistory()
history_items = [(count, query) for query, count in history.items()]
history_items.sort(reverse=True)
rows = [self.ROW % {'query': _FormatQuery(query),
'count': count}
for count, query in history_items]
return self.HEADER + '\n'.join(rows) + self.FOOTER
def get(self):
"""Handle a GET. Just calls Render()."""
self.response.out.write(self.Render())
class IndexYamlHandler(webapp.RequestHandler):
"""A handler that renders an index.yaml file suitable for upload."""
def Render(self):
"""Renders and returns the index.yaml file.
Returns:
A string, formatted as an index.yaml file.
"""
datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
query_history = datastore_stub.QueryHistory()
body = dev_appserver_index.GenerateIndexFromHistory(query_history)
return 'indexes:\n' + body
def get(self):
"""Handle a GET. Just calls Render()."""
self.response.headers['Content-Type'] = 'text/plain'
self.response.out.write(self.Render())
def _FormatQuery(query):
"""Format a Query protobuf as (very simple) HTML.
Args:
query: A datastore_pb.Query instance.
Returns:
A string containing formatted HTML. This is mostly the output of
str(query) with '<' etc. escaped, and '<br>' inserted in front of
Order and Filter parts.
"""
res = cgi.escape(str(query))
res = res.replace('Order', '<br>Order')
res = res.replace('Filter', '<br>Filter')
return res
def _DirectionToString(direction):
"""Turn a direction enum into a string.
Args:
direction: ASCENDING or DESCENDING
Returns:
Either 'asc' or 'descending'.
"""
if direction == datastore_pb.Query_Order.DESCENDING:
return 'descending'
else:
return 'asc'
URL_MAP = {
'/_ah/info/queries': QueriesHandler,
'/_ah/info/index.yaml': IndexYamlHandler,
}
def main():
application = webapp.WSGIApplication(URL_MAP.items())
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Utilities for generating and updating index.yaml."""
import os
import logging
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_admin
from google.appengine.api import yaml_errors
from google.appengine.datastore import datastore_index
import yaml
AUTO_MARKER = '\n# AUTOGENERATED\n'
AUTO_COMMENT = '''
# This index.yaml is automatically updated whenever the dev_appserver
# detects that a new type of query is run. If you want to manage the
# index.yaml file manually, remove the above marker line (the line
# saying "# AUTOGENERATED"). If you want to manage some indexes
# manually, move them above the marker line. The index.yaml file is
# automatically uploaded to the admin console when you next deploy
# your application using appcfg.py.
'''
def GenerateIndexFromHistory(query_history,
all_indexes=None, manual_indexes=None):
"""Generate most of the text for index.yaml from the query history.
Args:
query_history: Query history, a dict mapping query
all_indexes: Optional datastore_index.IndexDefinitions instance
representing all the indexes found in the input file. May be None.
manual_indexes: Optional datastore_index.IndexDefinitions instance
containing indexes for which we should not generate output. May be None.
Returns:
A string representation that can safely be appended to an
existing index.yaml file.
"""
all_keys = datastore_index.IndexDefinitionsToKeys(all_indexes)
manual_keys = datastore_index.IndexDefinitionsToKeys(manual_indexes)
indexes = dict((key, 0) for key in all_keys - manual_keys)
for query, count in query_history.iteritems():
required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
if required:
key = (kind, ancestor, props)
if key not in manual_keys:
if key in indexes:
indexes[key] += count
else:
indexes[key] = count
res = []
for (kind, ancestor, props), count in sorted(indexes.iteritems()):
res.append('')
if count == 0:
message = '# Unused in query history -- copied from input.'
elif count == 1:
message = '# Used once in query history.'
else:
message = '# Used %d times in query history.' % count
res.append(message)
res.append(datastore_index.IndexYamlForQuery(kind, ancestor, props))
res.append('')
return '\n'.join(res)
class IndexYamlUpdater(object):
"""Helper class for updating index.yaml.
This class maintains some state about the query history and the
index.yaml file in order to minimize the number of times index.yaml
is actually overwritten.
"""
index_yaml_is_manual = False
index_yaml_mtime = 0
last_history_size = 0
def __init__(self, root_path):
"""Constructor.
Args:
root_path: Path to the app's root directory.
"""
self.root_path = root_path
def UpdateIndexYaml(self, openfile=open):
"""Update index.yaml.
Args:
openfile: Used for dependency injection.
We only ever write to index.yaml if either:
- it doesn't exist yet; or
- it contains an 'AUTOGENERATED' comment.
All indexes *before* the AUTOGENERATED comment will be written
back unchanged. All indexes *after* the AUTOGENERATED comment
will be updated with the latest query counts (query counts are
reset by --clear_datastore). Indexes that aren't yet in the file
will be appended to the AUTOGENERATED section.
We keep track of some data in order to avoid doing repetitive work:
- if index.yaml is fully manual, we keep track of its mtime to
avoid parsing it over and over;
- we keep track of the number of keys in the history dict since
the last time we updated index.yaml (or decided there was
nothing to update).
"""
index_yaml_file = os.path.join(self.root_path, 'index.yaml')
try:
index_yaml_mtime = os.path.getmtime(index_yaml_file)
except os.error:
index_yaml_mtime = None
index_yaml_changed = (index_yaml_mtime != self.index_yaml_mtime)
self.index_yaml_mtime = index_yaml_mtime
datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
query_history = datastore_stub.QueryHistory()
history_changed = (len(query_history) != self.last_history_size)
self.last_history_size = len(query_history)
if not (index_yaml_changed or history_changed):
logging.debug('No need to update index.yaml')
return
if self.index_yaml_is_manual and not index_yaml_changed:
logging.debug('Will not update manual index.yaml')
return
if index_yaml_mtime is None:
index_yaml_data = None
else:
try:
fh = open(index_yaml_file, 'r')
except IOError:
index_yaml_data = None
else:
try:
index_yaml_data = fh.read()
finally:
fh.close()
self.index_yaml_is_manual = (index_yaml_data is not None and
AUTO_MARKER not in index_yaml_data)
if self.index_yaml_is_manual:
logging.info('Detected manual index.yaml, will not update')
return
if index_yaml_data is None:
all_indexes = None
else:
try:
all_indexes = datastore_index.ParseIndexDefinitions(index_yaml_data)
except yaml_errors.EventListenerError, e:
logging.error('Error parsing %s:\n%s', index_yaml_file, e)
return
except Exception, err:
logging.error('Error parsing %s:\n%s.%s: %s', index_yaml_file,
err.__class__.__module__, err.__class__.__name__, err)
return
if index_yaml_data is None:
manual_part, automatic_part = 'indexes:\n', ''
manual_indexes = None
else:
manual_part, automatic_part = index_yaml_data.split(AUTO_MARKER, 1)
try:
manual_indexes = datastore_index.ParseIndexDefinitions(manual_part)
except Exception, err:
logging.error('Error parsing manual part of %s: %s',
index_yaml_file, err)
return
automatic_part = GenerateIndexFromHistory(query_history,
all_indexes, manual_indexes)
try:
fh = openfile(index_yaml_file, 'w')
except IOError, err:
logging.error('Can\'t write index.yaml: %s', err)
return
try:
logging.info('Updating %s', index_yaml_file)
fh.write(manual_part)
fh.write(AUTO_MARKER)
fh.write(AUTO_COMMENT)
fh.write(automatic_part)
finally:
fh.close()
try:
self.index_yaml_mtime = os.path.getmtime(index_yaml_file)
except os.error, err:
logging.error('Can\'t stat index.yaml we just wrote: %s', err)
self.index_yaml_mtime = None
def SetupIndexes(app_id, root_path):
"""Ensure that the set of existing composite indexes matches index.yaml.
Note: this is similar to the algorithm used by the admin console for
the same purpose.
Args:
app_id: Application ID being served.
root_path: Path to the root of the application.
"""
index_yaml_file = os.path.join(root_path, 'index.yaml')
try:
fh = open(index_yaml_file, 'r')
except IOError:
index_yaml_data = None
else:
try:
index_yaml_data = fh.read()
finally:
fh.close()
indexes = []
if index_yaml_data is not None:
index_defs = datastore_index.ParseIndexDefinitions(index_yaml_data)
if index_defs is not None:
indexes = index_defs.indexes
if indexes is None:
indexes = []
requested_indexes = datastore_admin.IndexDefinitionsToProtos(app_id, indexes)
existing_indexes = datastore_admin.GetIndices(app_id)
requested = dict((x.definition().Encode(), x) for x in requested_indexes)
existing = dict((x.definition().Encode(), x) for x in existing_indexes)
created = 0
for key, index in requested.iteritems():
if key not in existing:
datastore_admin.CreateIndex(index)
created += 1
deleted = 0
for key, index in existing.iteritems():
if key not in requested:
datastore_admin.DeleteIndex(index)
deleted += 1
if created or deleted:
logging.info("Created %d and deleted %d index(es); total %d",
created, deleted, len(requested))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""OS cross-platform compatibility tweaks.
This module will, on import, change some parts of the running evironment so
that other modules do not need special handling when running on different
operating systems, such as Linux/Mac OSX/Windows.
Some of these changes must be done before other modules are imported, so
always import this module first.
"""
import os
os.environ['TZ'] = 'UTC'
import time
if hasattr(time, 'tzset'):
time.tzset()
import __builtin__
if 'WindowsError' in __builtin__.__dict__:
WindowsError = WindowsError
else:
class WindowsError(Exception):
"""A fake Windows Error exception which should never be thrown."""
ERROR_PATH_NOT_FOUND = 3
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Pure-Python application server for testing applications locally.
Given a port and the paths to a valid application directory (with an 'app.yaml'
file), the external library directory, and a relative URL to use for logins,
creates an HTTP server that can be used to test an application locally. Uses
stubs instead of actual APIs when SetupStubs() is called first.
Example:
root_path = '/path/to/application/directory'
login_url = '/login'
port = 8080
template_dir = '/path/to/appserver/templates'
server = dev_appserver.CreateServer(root_path, login_url, port, template_dir)
server.serve_forever()
"""
from google.appengine.tools import os_compat
import __builtin__
import BaseHTTPServer
import Cookie
import cStringIO
import cgi
import cgitb
import dummy_thread
import email.Utils
import errno
import httplib
import imp
import inspect
import itertools
import locale
import logging
import mimetools
import mimetypes
import os
import pickle
import pprint
import random
import re
import sre_compile
import sre_constants
import sre_parse
import mimetypes
import socket
import sys
import time
import traceback
import types
import urlparse
import urllib
import google
from google.pyglib import gexcept
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import appinfo
from google.appengine.api import croninfo
from google.appengine.api import datastore_admin
from google.appengine.api import datastore_file_stub
from google.appengine.api import mail_stub
from google.appengine.api import urlfetch_stub
from google.appengine.api import user_service_stub
from google.appengine.api import yaml_errors
from google.appengine.api.capabilities import capability_stub
from google.appengine.api.memcache import memcache_stub
from google.appengine import dist
from google.appengine.tools import dev_appserver_index
from google.appengine.tools import dev_appserver_login
PYTHON_LIB_VAR = '$PYTHON_LIB'
DEVEL_CONSOLE_PATH = PYTHON_LIB_VAR + '/google/appengine/ext/admin'
FILE_MISSING_EXCEPTIONS = frozenset([errno.ENOENT, errno.ENOTDIR])
MAX_URL_LENGTH = 2047
HEADER_TEMPLATE = 'logging_console_header.html'
SCRIPT_TEMPLATE = 'logging_console.js'
MIDDLE_TEMPLATE = 'logging_console_middle.html'
FOOTER_TEMPLATE = 'logging_console_footer.html'
DEFAULT_ENV = {
'GATEWAY_INTERFACE': 'CGI/1.1',
'AUTH_DOMAIN': 'gmail.com',
'TZ': 'UTC',
}
for ext, mime_type in (('.asc', 'text/plain'),
('.diff', 'text/plain'),
('.csv', 'text/comma-separated-values'),
('.rss', 'application/rss+xml'),
('.text', 'text/plain'),
('.wbmp', 'image/vnd.wap.wbmp')):
mimetypes.add_type(mime_type, ext)
MAX_RUNTIME_RESPONSE_SIZE = 10 << 20
MAX_REQUEST_SIZE = 10 * 1024 * 1024
API_VERSION = '1'
class Error(Exception):
"""Base-class for exceptions in this module."""
class InvalidAppConfigError(Error):
"""The supplied application configuration file is invalid."""
class AppConfigNotFoundError(Error):
"""Application configuration file not found."""
class TemplatesNotLoadedError(Error):
"""Templates for the debugging console were not loaded."""
def SplitURL(relative_url):
"""Splits a relative URL into its path and query-string components.
Args:
relative_url: String containing the relative URL (often starting with '/')
to split. Should be properly escaped as www-form-urlencoded data.
Returns:
Tuple (script_name, query_string) where:
script_name: Relative URL of the script that was accessed.
query_string: String containing everything after the '?' character.
"""
scheme, netloc, path, query, fragment = urlparse.urlsplit(relative_url)
return path, query
def GetFullURL(server_name, server_port, relative_url):
"""Returns the full, original URL used to access the relative URL.
Args:
server_name: Name of the local host, or the value of the 'host' header
from the request.
server_port: Port on which the request was served (string or int).
relative_url: Relative URL that was accessed, including query string.
Returns:
String containing the original URL.
"""
if str(server_port) != '80':
netloc = '%s:%s' % (server_name, server_port)
else:
netloc = server_name
return 'http://%s%s' % (netloc, relative_url)
class URLDispatcher(object):
"""Base-class for handling HTTP requests."""
def Dispatch(self,
relative_url,
path,
headers,
infile,
outfile,
base_env_dict=None):
"""Dispatch and handle an HTTP request.
base_env_dict should contain at least these CGI variables:
REQUEST_METHOD, REMOTE_ADDR, SERVER_SOFTWARE, SERVER_NAME,
SERVER_PROTOCOL, SERVER_PORT
Args:
relative_url: String containing the URL accessed.
path: Local path of the resource that was matched; back-references will be
replaced by values matched in the relative_url. Path may be relative
or absolute, depending on the resource being served (e.g., static files
will have an absolute path; scripts will be relative).
headers: Instance of mimetools.Message with headers from the request.
infile: File-like object with input data from the request.
outfile: File-like object where output data should be written.
base_env_dict: Dictionary of CGI environment parameters if available.
Defaults to None.
Returns:
None if request handling is complete.
Tuple (path, headers, input_file) for an internal redirect:
path: Path of URL to redirect to.
headers: Headers to send to other dispatcher.
input_file: New input to send to new dispatcher.
"""
raise NotImplementedError
def EndRedirect(self, dispatched_output, original_output):
"""Process the end of an internal redirect.
This method is called after all subsequent dispatch requests have finished.
By default the output from the dispatched process is copied to the original.
This will not be called on dispatchers that do not return an internal
redirect.
Args:
dispatched_output: StringIO buffer containing the results from the
dispatched
"""
original_output.write(dispatched_output.read())
class URLMatcher(object):
"""Matches an arbitrary URL using a list of URL patterns from an application.
Each URL pattern has an associated URLDispatcher instance and path to the
resource's location on disk. See AddURL for more details. The first pattern
that matches an inputted URL will have its associated values returned by
Match().
"""
def __init__(self):
"""Initializer."""
self._url_patterns = []
def AddURL(self, regex, dispatcher, path, requires_login, admin_only):
"""Adds a URL pattern to the list of patterns.
If the supplied regex starts with a '^' or ends with a '$' an
InvalidAppConfigError exception will be raised. Start and end symbols
and implicitly added to all regexes, meaning we assume that all regexes
consume all input from a URL.
Args:
regex: String containing the regular expression pattern.
dispatcher: Instance of URLDispatcher that should handle requests that
match this regex.
path: Path on disk for the resource. May contain back-references like
r'\1', r'\2', etc, which will be replaced by the corresponding groups
matched by the regex if present.
requires_login: True if the user must be logged-in before accessing this
URL; False if anyone can access this URL.
admin_only: True if the user must be a logged-in administrator to
access the URL; False if anyone can access the URL.
"""
if not isinstance(dispatcher, URLDispatcher):
raise TypeError, 'dispatcher must be a URLDispatcher sub-class'
if regex.startswith('^') or regex.endswith('$'):
raise InvalidAppConfigError, 'regex starts with "^" or ends with "$"'
adjusted_regex = '^%s$' % regex
try:
url_re = re.compile(adjusted_regex)
except re.error, e:
raise InvalidAppConfigError, 'regex invalid: %s' % e
match_tuple = (url_re, dispatcher, path, requires_login, admin_only)
self._url_patterns.append(match_tuple)
def Match(self,
relative_url,
split_url=SplitURL):
"""Matches a URL from a request against the list of URL patterns.
The supplied relative_url may include the query string (i.e., the '?'
character and everything following).
Args:
relative_url: Relative URL being accessed in a request.
Returns:
Tuple (dispatcher, matched_path, requires_login, admin_only), which are
the corresponding values passed to AddURL when the matching URL pattern
was added to this matcher. The matched_path will have back-references
replaced using values matched by the URL pattern. If no match was found,
dispatcher will be None.
"""
adjusted_url, query_string = split_url(relative_url)
for url_tuple in self._url_patterns:
url_re, dispatcher, path, requires_login, admin_only = url_tuple
the_match = url_re.match(adjusted_url)
if the_match:
adjusted_path = the_match.expand(path)
return dispatcher, adjusted_path, requires_login, admin_only
return None, None, None, None
def GetDispatchers(self):
"""Retrieves the URLDispatcher objects that could be matched.
Should only be used in tests.
Returns:
A set of URLDispatcher objects.
"""
return set([url_tuple[1] for url_tuple in self._url_patterns])
class MatcherDispatcher(URLDispatcher):
"""Dispatcher across multiple URLMatcher instances."""
def __init__(self,
login_url,
url_matchers,
get_user_info=dev_appserver_login.GetUserInfo,
login_redirect=dev_appserver_login.LoginRedirect):
"""Initializer.
Args:
login_url: Relative URL which should be used for handling user logins.
url_matchers: Sequence of URLMatcher objects.
get_user_info, login_redirect: Used for dependency injection.
"""
self._login_url = login_url
self._url_matchers = tuple(url_matchers)
self._get_user_info = get_user_info
self._login_redirect = login_redirect
def Dispatch(self,
relative_url,
path,
headers,
infile,
outfile,
base_env_dict=None):
"""Dispatches a request to the first matching dispatcher.
Matchers are checked in the order they were supplied to the constructor.
If no matcher matches, a 404 error will be written to the outfile. The
path variable supplied to this method is ignored.
"""
cookies = ', '.join(headers.getheaders('cookie'))
email, admin = self._get_user_info(cookies)
for matcher in self._url_matchers:
dispatcher, matched_path, requires_login, admin_only = matcher.Match(relative_url)
if dispatcher is None:
continue
logging.debug('Matched "%s" to %s with path %s',
relative_url, dispatcher, matched_path)
if (requires_login or admin_only) and not email:
logging.debug('Login required, redirecting user')
self._login_redirect(
self._login_url,
base_env_dict['SERVER_NAME'],
base_env_dict['SERVER_PORT'],
relative_url,
outfile)
elif admin_only and not admin:
outfile.write('Status: %d Not authorized\r\n'
'\r\n'
'Current logged in user %s is not '
'authorized to view this page.'
% (httplib.FORBIDDEN, email))
else:
forward = dispatcher.Dispatch(relative_url,
matched_path,
headers,
infile,
outfile,
base_env_dict=base_env_dict)
if forward:
new_path, new_headers, new_input = forward
logging.info('Internal redirection to %s' % new_path)
new_outfile = cStringIO.StringIO()
self.Dispatch(new_path,
None,
new_headers,
new_input,
new_outfile,
dict(base_env_dict))
new_outfile.seek(0)
dispatcher.EndRedirect(new_outfile, outfile)
return
outfile.write('Status: %d URL did not match\r\n'
'\r\n'
'Not found error: %s did not match any patterns '
'in application configuration.'
% (httplib.NOT_FOUND, relative_url))
class ApplicationLoggingHandler(logging.Handler):
"""Python Logging handler that displays the debugging console to users."""
_COOKIE_NAME = '_ah_severity'
_TEMPLATES_INITIALIZED = False
_HEADER = None
_SCRIPT = None
_MIDDLE = None
_FOOTER = None
@staticmethod
def InitializeTemplates(header, script, middle, footer):
"""Initializes the templates used to render the debugging console.
This method must be called before any ApplicationLoggingHandler instances
are created.
Args:
header: The header template that is printed first.
script: The script template that is printed after the logging messages.
middle: The middle element that's printed before the footer.
footer; The last element that's printed at the end of the document.
"""
ApplicationLoggingHandler._HEADER = header
ApplicationLoggingHandler._SCRIPT = script
ApplicationLoggingHandler._MIDDLE = middle
ApplicationLoggingHandler._FOOTER = footer
ApplicationLoggingHandler._TEMPLATES_INITIALIZED = True
@staticmethod
def AreTemplatesInitialized():
"""Returns True if InitializeTemplates has been called, False otherwise."""
return ApplicationLoggingHandler._TEMPLATES_INITIALIZED
def __init__(self, *args, **kwargs):
"""Initializer.
Args:
args, kwargs: See logging.Handler.
Raises:
TemplatesNotLoadedError exception if the InitializeTemplates method was
not called before creating this instance.
"""
if not self._TEMPLATES_INITIALIZED:
raise TemplatesNotLoadedError
logging.Handler.__init__(self, *args, **kwargs)
self._record_list = []
self._start_time = time.time()
def emit(self, record):
"""Called by the logging module each time the application logs a message.
Args:
record: logging.LogRecord instance corresponding to the newly logged
message.
"""
self._record_list.append(record)
def AddDebuggingConsole(self, relative_url, env, outfile):
"""Prints an HTML debugging console to an output stream, if requested.
Args:
relative_url: Relative URL that was accessed, including the query string.
Used to determine if the parameter 'debug' was supplied, in which case
the console will be shown.
env: Dictionary containing CGI environment variables. Checks for the
HTTP_COOKIE entry to see if the accessing user has any logging-related
cookies set.
outfile: Output stream to which the console should be written if either
a debug parameter was supplied or a logging cookie is present.
"""
script_name, query_string = SplitURL(relative_url)
param_dict = cgi.parse_qs(query_string, True)
cookie_dict = Cookie.SimpleCookie(env.get('HTTP_COOKIE', ''))
if 'debug' not in param_dict and self._COOKIE_NAME not in cookie_dict:
return
outfile.write(self._HEADER)
for record in self._record_list:
self._PrintRecord(record, outfile)
outfile.write(self._MIDDLE)
outfile.write(self._SCRIPT)
outfile.write(self._FOOTER)
def _PrintRecord(self, record, outfile):
"""Prints a single logging record to an output stream.
Args:
record: logging.LogRecord instance to print.
outfile: Output stream to which the LogRecord should be printed.
"""
message = cgi.escape(record.getMessage())
level_name = logging.getLevelName(record.levelno).lower()
level_letter = level_name[:1].upper()
time_diff = record.created - self._start_time
outfile.write('<span class="_ah_logline_%s">\n' % level_name)
outfile.write('<span class="_ah_logline_%s_prefix">%2.5f %s ></span>\n'
% (level_name, time_diff, level_letter))
outfile.write('%s\n' % message)
outfile.write('</span>\n')
_IGNORE_REQUEST_HEADERS = frozenset(['content-type', 'content-length',
'accept-encoding', 'transfer-encoding'])
def SetupEnvironment(cgi_path,
relative_url,
headers,
split_url=SplitURL,
get_user_info=dev_appserver_login.GetUserInfo):
"""Sets up environment variables for a CGI.
Args:
cgi_path: Full file-system path to the CGI being executed.
relative_url: Relative URL used to access the CGI.
headers: Instance of mimetools.Message containing request headers.
split_url, get_user_info: Used for dependency injection.
Returns:
Dictionary containing CGI environment variables.
"""
env = DEFAULT_ENV.copy()
script_name, query_string = split_url(relative_url)
env['SCRIPT_NAME'] = ''
env['QUERY_STRING'] = query_string
env['PATH_INFO'] = urllib.unquote(script_name)
env['PATH_TRANSLATED'] = cgi_path
env['CONTENT_TYPE'] = headers.getheader('content-type',
'application/x-www-form-urlencoded')
env['CONTENT_LENGTH'] = headers.getheader('content-length', '')
cookies = ', '.join(headers.getheaders('cookie'))
email, admin = get_user_info(cookies)
env['USER_EMAIL'] = email
if admin:
env['USER_IS_ADMIN'] = '1'
for key in headers:
if key in _IGNORE_REQUEST_HEADERS:
continue
adjusted_name = key.replace('-', '_').upper()
env['HTTP_' + adjusted_name] = ', '.join(headers.getheaders(key))
return env
def NotImplementedFake(*args, **kwargs):
"""Fake for methods/functions that are not implemented in the production
environment.
"""
raise NotImplementedError("This class/method is not available.")
class NotImplementedFakeClass(object):
"""Fake class for classes that are not implemented in the production
environment.
"""
__init__ = NotImplementedFake
def IsEncodingsModule(module_name):
"""Determines if the supplied module is related to encodings in any way.
Encodings-related modules cannot be reloaded, so they need to be treated
specially when sys.modules is modified in any way.
Args:
module_name: Absolute name of the module regardless of how it is imported
into the local namespace (e.g., foo.bar.baz).
Returns:
True if it's an encodings-related module; False otherwise.
"""
if (module_name in ('codecs', 'encodings') or
module_name.startswith('encodings.')):
return True
return False
def ClearAllButEncodingsModules(module_dict):
"""Clear all modules in a module dictionary except for those modules that
are in any way related to encodings.
Args:
module_dict: Dictionary in the form used by sys.modules.
"""
for module_name in module_dict.keys():
if not IsEncodingsModule(module_name):
del module_dict[module_name]
def FakeURandom(n):
"""Fake version of os.urandom."""
bytes = ''
for i in xrange(n):
bytes += chr(random.randint(0, 255))
return bytes
def FakeUname():
"""Fake version of os.uname."""
return ('Linux', '', '', '', '')
def FakeUnlink(path):
"""Fake version of os.unlink."""
if os.path.isdir(path):
raise OSError(2, "Is a directory", path)
else:
raise OSError(1, "Operation not permitted", path)
def FakeReadlink(path):
"""Fake version of os.readlink."""
raise OSError(22, "Invalid argument", path)
def FakeAccess(path, mode):
"""Fake version of os.access where only reads are supported."""
if not os.path.exists(path) or mode != os.R_OK:
return False
else:
return True
def FakeSetLocale(category, value=None, original_setlocale=locale.setlocale):
"""Fake version of locale.setlocale that only supports the default."""
if value not in (None, '', 'C', 'POSIX'):
raise locale.Error, 'locale emulation only supports "C" locale'
return original_setlocale(category, 'C')
def IsPathInSubdirectories(filename,
subdirectories,
normcase=os.path.normcase):
"""Determines if a filename is contained within one of a set of directories.
Args:
filename: Path of the file (relative or absolute).
subdirectories: Iterable collection of paths to subdirectories which the
given filename may be under.
normcase: Used for dependency injection.
Returns:
True if the supplied filename is in one of the given sub-directories or
its hierarchy of children. False otherwise.
"""
file_dir = normcase(os.path.dirname(os.path.abspath(filename)))
for parent in subdirectories:
fixed_parent = normcase(os.path.abspath(parent))
if os.path.commonprefix([file_dir, fixed_parent]) == fixed_parent:
return True
return False
SHARED_MODULE_PREFIXES = set([
'google',
'logging',
'sys',
'warnings',
're',
'sre_compile',
'sre_constants',
'sre_parse',
'wsgiref',
])
NOT_SHARED_MODULE_PREFIXES = set([
'google.appengine.ext',
])
def ModuleNameHasPrefix(module_name, prefix_set):
"""Determines if a module's name belongs to a set of prefix strings.
Args:
module_name: String containing the fully qualified module name.
prefix_set: Iterable set of module name prefixes to check against.
Returns:
True if the module_name belongs to the prefix set or is a submodule of
any of the modules specified in the prefix_set. Otherwise False.
"""
for prefix in prefix_set:
if prefix == module_name:
return True
if module_name.startswith(prefix + '.'):
return True
return False
def SetupSharedModules(module_dict):
"""Creates a module dictionary for the hardened part of the process.
Module dictionary will contain modules that should be shared between the
hardened and unhardened parts of the process.
Args:
module_dict: Module dictionary from which existing modules should be
pulled (usually sys.modules).
Returns:
A new module dictionary.
"""
output_dict = {}
for module_name, module in module_dict.iteritems():
if module is None:
continue
if IsEncodingsModule(module_name):
output_dict[module_name] = module
continue
shared_prefix = ModuleNameHasPrefix(module_name, SHARED_MODULE_PREFIXES)
banned_prefix = ModuleNameHasPrefix(module_name, NOT_SHARED_MODULE_PREFIXES)
if shared_prefix and not banned_prefix:
output_dict[module_name] = module
return output_dict
class FakeFile(file):
"""File sub-class that enforces the security restrictions of the production
environment.
"""
ALLOWED_MODES = frozenset(['r', 'rb', 'U', 'rU'])
ALLOWED_FILES = set(os.path.normcase(filename)
for filename in mimetypes.knownfiles
if os.path.isfile(filename))
ALLOWED_DIRS = set([
os.path.normcase(os.path.realpath(os.path.dirname(os.__file__))),
os.path.normcase(os.path.abspath(os.path.dirname(os.__file__))),
])
NOT_ALLOWED_DIRS = set([
os.path.normcase(os.path.join(os.path.dirname(os.__file__),
'site-packages'))
])
ALLOWED_SITE_PACKAGE_DIRS = set(
os.path.normcase(os.path.abspath(os.path.join(
os.path.dirname(os.__file__), 'site-packages', path)))
for path in [
])
_original_file = file
_root_path = None
_application_paths = None
_skip_files = None
_static_file_config_matcher = None
_allow_skipped_files = True
_availability_cache = {}
@staticmethod
def SetAllowedPaths(root_path, application_paths):
"""Configures which paths are allowed to be accessed.
Must be called at least once before any file objects are created in the
hardened environment.
Args:
root_path: Absolute path to the root of the application.
application_paths: List of additional paths that the application may
access, this must include the App Engine runtime but
not the Python library directories.
"""
FakeFile._application_paths = (set(os.path.realpath(path)
for path in application_paths) |
set(os.path.abspath(path)
for path in application_paths))
FakeFile._application_paths.add(root_path)
FakeFile._root_path = os.path.join(root_path, '')
FakeFile._availability_cache = {}
@staticmethod
def SetAllowSkippedFiles(allow_skipped_files):
"""Configures access to files matching FakeFile._skip_files
Args:
allow_skipped_files: Boolean whether to allow access to skipped files
"""
FakeFile._allow_skipped_files = allow_skipped_files
FakeFile._availability_cache = {}
@staticmethod
def SetSkippedFiles(skip_files):
"""Sets which files in the application directory are to be ignored.
Must be called at least once before any file objects are created in the
hardened environment.
Must be called whenever the configuration was updated.
Args:
skip_files: Object with .match() method (e.g. compiled regexp).
"""
FakeFile._skip_files = skip_files
FakeFile._availability_cache = {}
@staticmethod
def SetStaticFileConfigMatcher(static_file_config_matcher):
"""Sets StaticFileConfigMatcher instance for checking if a file is static.
Must be called at least once before any file objects are created in the
hardened environment.
Must be called whenever the configuration was updated.
Args:
static_file_config_matcher: StaticFileConfigMatcher instance.
"""
FakeFile._static_file_config_matcher = static_file_config_matcher
FakeFile._availability_cache = {}
@staticmethod
def IsFileAccessible(filename, normcase=os.path.normcase):
"""Determines if a file's path is accessible.
SetAllowedPaths(), SetSkippedFiles() and SetStaticFileConfigMatcher() must
be called before this method or else all file accesses will raise an error.
Args:
filename: Path of the file to check (relative or absolute). May be a
directory, in which case access for files inside that directory will
be checked.
normcase: Used for dependency injection.
Returns:
True if the file is accessible, False otherwise.
"""
logical_filename = normcase(os.path.abspath(filename))
if os.path.isdir(logical_filename):
logical_filename = os.path.join(logical_filename, 'foo')
result = FakeFile._availability_cache.get(logical_filename)
if result is None:
result = FakeFile._IsFileAccessibleNoCache(logical_filename,
normcase=normcase)
FakeFile._availability_cache[logical_filename] = result
return result
@staticmethod
def _IsFileAccessibleNoCache(logical_filename, normcase=os.path.normcase):
"""Determines if a file's path is accessible.
This is an internal part of the IsFileAccessible implementation.
Args:
logical_filename: Absolute path of the file to check.
normcase: Used for dependency injection.
Returns:
True if the file is accessible, False otherwise.
"""
if IsPathInSubdirectories(logical_filename, [FakeFile._root_path],
normcase=normcase):
relative_filename = logical_filename[len(FakeFile._root_path):]
if (not FakeFile._allow_skipped_files and
FakeFile._skip_files.match(relative_filename)):
logging.warning('Blocking access to skipped file "%s"',
logical_filename)
return False
if FakeFile._static_file_config_matcher.IsStaticFile(relative_filename):
logging.warning('Blocking access to static file "%s"',
logical_filename)
return False
if logical_filename in FakeFile.ALLOWED_FILES:
return True
if IsPathInSubdirectories(logical_filename,
FakeFile.ALLOWED_SITE_PACKAGE_DIRS,
normcase=normcase):
return True
allowed_dirs = FakeFile._application_paths | FakeFile.ALLOWED_DIRS
if (IsPathInSubdirectories(logical_filename,
allowed_dirs,
normcase=normcase) and
not IsPathInSubdirectories(logical_filename,
FakeFile.NOT_ALLOWED_DIRS,
normcase=normcase)):
return True
return False
def __init__(self, filename, mode='r', bufsize=-1, **kwargs):
"""Initializer. See file built-in documentation."""
if mode not in FakeFile.ALLOWED_MODES:
raise IOError('invalid mode: %s' % mode)
if not FakeFile.IsFileAccessible(filename):
raise IOError(errno.EACCES, 'file not accessible')
super(FakeFile, self).__init__(filename, mode, bufsize, **kwargs)
class RestrictedPathFunction(object):
"""Enforces access restrictions for functions that have a file or
directory path as their first argument."""
_original_os = os
def __init__(self, original_func):
"""Initializer.
Args:
original_func: Callable that takes as its first argument the path to a
file or directory on disk; all subsequent arguments may be variable.
"""
self._original_func = original_func
def __call__(self, path, *args, **kwargs):
"""Enforces access permissions for the function passed to the constructor.
"""
if not FakeFile.IsFileAccessible(path):
raise OSError(errno.EACCES, 'path not accessible')
return self._original_func(path, *args, **kwargs)
def GetSubmoduleName(fullname):
"""Determines the leaf submodule name of a full module name.
Args:
fullname: Fully qualified module name, e.g. 'foo.bar.baz'
Returns:
Submodule name, e.g. 'baz'. If the supplied module has no submodule (e.g.,
'stuff'), the returned value will just be that module name ('stuff').
"""
return fullname.rsplit('.', 1)[-1]
class CouldNotFindModuleError(ImportError):
"""Raised when a module could not be found.
In contrast to when a module has been found, but cannot be loaded because of
hardening restrictions.
"""
def Trace(func):
"""Decorator that logs the call stack of the HardenedModulesHook class as
it executes, indenting logging messages based on the current stack depth.
"""
def decorate(self, *args, **kwargs):
args_to_show = []
if args is not None:
args_to_show.extend(str(argument) for argument in args)
if kwargs is not None:
args_to_show.extend('%s=%s' % (key, value)
for key, value in kwargs.iteritems())
args_string = ', '.join(args_to_show)
self.log('Entering %s(%s)', func.func_name, args_string)
self._indent_level += 1
try:
return func(self, *args, **kwargs)
finally:
self._indent_level -= 1
self.log('Exiting %s(%s)', func.func_name, args_string)
return decorate
class HardenedModulesHook(object):
"""Meta import hook that restricts the modules used by applications to match
the production environment.
Module controls supported:
- Disallow native/extension modules from being loaded
- Disallow built-in and/or Python-distributed modules from being loaded
- Replace modules with completely empty modules
- Override specific module attributes
- Replace one module with another
After creation, this object should be added to the front of the sys.meta_path
list (which may need to be created). The sys.path_importer_cache dictionary
should also be cleared, to prevent loading any non-restricted modules.
See PEP302 for more info on how this works:
http://www.python.org/dev/peps/pep-0302/
"""
ENABLE_LOGGING = False
def log(self, message, *args):
"""Logs an import-related message to stderr, with indentation based on
current call-stack depth.
Args:
message: Logging format string.
args: Positional format parameters for the logging message.
"""
if HardenedModulesHook.ENABLE_LOGGING:
indent = self._indent_level * ' '
print >>sys.stderr, indent + (message % args)
_WHITE_LIST_C_MODULES = [
'array',
'binascii',
'bz2',
'cmath',
'collections',
'crypt',
'cStringIO',
'datetime',
'errno',
'exceptions',
'gc',
'itertools',
'math',
'md5',
'operator',
'posix',
'posixpath',
'pyexpat',
'sha',
'struct',
'sys',
'time',
'timing',
'unicodedata',
'zlib',
'_bisect',
'_codecs',
'_codecs_cn',
'_codecs_hk',
'_codecs_iso2022',
'_codecs_jp',
'_codecs_kr',
'_codecs_tw',
'_collections',
'_csv',
'_elementtree',
'_functools',
'_hashlib',
'_heapq',
'_locale',
'_lsprof',
'_md5',
'_multibytecodec',
'_random',
'_sha',
'_sha256',
'_sha512',
'_sre',
'_struct',
'_types',
'_weakref',
'__main__',
]
_WHITE_LIST_PARTIAL_MODULES = {
'gc': [
'enable',
'disable',
'isenabled',
'collect',
'get_debug',
'set_threshold',
'get_threshold',
'get_count'
],
'os': [
'access',
'altsep',
'curdir',
'defpath',
'devnull',
'environ',
'error',
'extsep',
'EX_NOHOST',
'EX_NOINPUT',
'EX_NOPERM',
'EX_NOUSER',
'EX_OK',
'EX_OSERR',
'EX_OSFILE',
'EX_PROTOCOL',
'EX_SOFTWARE',
'EX_TEMPFAIL',
'EX_UNAVAILABLE',
'EX_USAGE',
'F_OK',
'getcwd',
'getcwdu',
'getenv',
'listdir',
'lstat',
'name',
'NGROUPS_MAX',
'O_APPEND',
'O_CREAT',
'O_DIRECT',
'O_DIRECTORY',
'O_DSYNC',
'O_EXCL',
'O_LARGEFILE',
'O_NDELAY',
'O_NOCTTY',
'O_NOFOLLOW',
'O_NONBLOCK',
'O_RDONLY',
'O_RDWR',
'O_RSYNC',
'O_SYNC',
'O_TRUNC',
'O_WRONLY',
'pardir',
'path',
'pathsep',
'R_OK',
'readlink',
'remove',
'SEEK_CUR',
'SEEK_END',
'SEEK_SET',
'sep',
'stat',
'stat_float_times',
'stat_result',
'strerror',
'TMP_MAX',
'unlink',
'urandom',
'walk',
'WCOREDUMP',
'WEXITSTATUS',
'WIFEXITED',
'WIFSIGNALED',
'WIFSTOPPED',
'WNOHANG',
'WSTOPSIG',
'WTERMSIG',
'WUNTRACED',
'W_OK',
'X_OK',
],
}
_MODULE_OVERRIDES = {
'locale': {
'setlocale': FakeSetLocale,
},
'os': {
'access': FakeAccess,
'listdir': RestrictedPathFunction(os.listdir),
'lstat': RestrictedPathFunction(os.stat),
'readlink': FakeReadlink,
'remove': FakeUnlink,
'stat': RestrictedPathFunction(os.stat),
'uname': FakeUname,
'unlink': FakeUnlink,
'urandom': FakeURandom,
},
}
_ENABLED_FILE_TYPES = (
imp.PKG_DIRECTORY,
imp.PY_SOURCE,
imp.PY_COMPILED,
imp.C_BUILTIN,
)
def __init__(self,
module_dict,
imp_module=imp,
os_module=os,
dummy_thread_module=dummy_thread,
pickle_module=pickle):
"""Initializer.
Args:
module_dict: Module dictionary to use for managing system modules.
Should be sys.modules.
imp_module, os_module, dummy_thread_module, pickle_module: References to
modules that exist in the dev_appserver that must be used by this class
in order to function, even if these modules have been unloaded from
sys.modules.
"""
self._module_dict = module_dict
self._imp = imp_module
self._os = os_module
self._dummy_thread = dummy_thread_module
self._pickle = pickle
self._indent_level = 0
@Trace
def find_module(self, fullname, path=None):
"""See PEP 302."""
if fullname in ('cPickle', 'thread'):
return self
search_path = path
all_modules = fullname.split('.')
try:
for index, current_module in enumerate(all_modules):
current_module_fullname = '.'.join(all_modules[:index + 1])
if (current_module_fullname == fullname and not
self.StubModuleExists(fullname)):
self.FindModuleRestricted(current_module,
current_module_fullname,
search_path)
else:
if current_module_fullname in self._module_dict:
module = self._module_dict[current_module_fullname]
else:
module = self.FindAndLoadModule(current_module,
current_module_fullname,
search_path)
if hasattr(module, '__path__'):
search_path = module.__path__
except CouldNotFindModuleError:
return None
return self
def StubModuleExists(self, name):
"""Check if the named module has a stub replacement."""
if name in sys.builtin_module_names:
name = 'py_%s' % name
if name in dist.__all__:
return True
return False
def ImportStubModule(self, name):
"""Import the stub module replacement for the specified module."""
if name in sys.builtin_module_names:
name = 'py_%s' % name
module = __import__(dist.__name__, {}, {}, [name])
return getattr(module, name)
@Trace
def FixModule(self, module):
"""Prunes and overrides restricted module attributes.
Args:
module: The module to prune. This should be a new module whose attributes
reference back to the real module's __dict__ members.
"""
if module.__name__ in self._WHITE_LIST_PARTIAL_MODULES:
allowed_symbols = self._WHITE_LIST_PARTIAL_MODULES[module.__name__]
for symbol in set(module.__dict__) - set(allowed_symbols):
if not (symbol.startswith('__') and symbol.endswith('__')):
del module.__dict__[symbol]
if module.__name__ in self._MODULE_OVERRIDES:
module.__dict__.update(self._MODULE_OVERRIDES[module.__name__])
@Trace
def FindModuleRestricted(self,
submodule,
submodule_fullname,
search_path):
"""Locates a module while enforcing module import restrictions.
Args:
submodule: The short name of the submodule (i.e., the last section of
the fullname; for 'foo.bar' this would be 'bar').
submodule_fullname: The fully qualified name of the module to find (e.g.,
'foo.bar').
search_path: List of paths to search for to find this module. Should be
None if the current sys.path should be used.
Returns:
Tuple (source_file, pathname, description) where:
source_file: File-like object that contains the module; in the case
of packages, this will be None, which implies to look at __init__.py.
pathname: String containing the full path of the module on disk.
description: Tuple returned by imp.find_module().
However, in the case of an import using a path hook (e.g. a zipfile),
source_file will be a PEP-302-style loader object, pathname will be None,
and description will be a tuple filled with None values.
Raises:
ImportError exception if the requested module was found, but importing
it is disallowed.
CouldNotFindModuleError exception if the request module could not even
be found for import.
"""
if search_path is None:
search_path = [None] + sys.path
for path_entry in search_path:
result = self.FindPathHook(submodule, submodule_fullname, path_entry)
if result is not None:
source_file, pathname, description = result
if description == (None, None, None):
return result
else:
break
else:
self.log('Could not find module "%s"', submodule_fullname)
raise CouldNotFindModuleError()
suffix, mode, file_type = description
if (file_type not in (self._imp.C_BUILTIN, self._imp.C_EXTENSION) and
not FakeFile.IsFileAccessible(pathname)):
error_message = 'Access to module file denied: %s' % pathname
logging.debug(error_message)
raise ImportError(error_message)
if (file_type not in self._ENABLED_FILE_TYPES and
submodule not in self._WHITE_LIST_C_MODULES):
error_message = ('Could not import "%s": Disallowed C-extension '
'or built-in module' % submodule_fullname)
logging.debug(error_message)
raise ImportError(error_message)
return source_file, pathname, description
def FindPathHook(self, submodule, submodule_fullname, path_entry):
"""Helper for FindModuleRestricted to find a module in a sys.path entry.
Args:
submodule:
submodule_fullname:
path_entry: A single sys.path entry, or None representing the builtins.
Returns:
Either None (if nothing was found), or a triple (source_file, path_name,
description). See the doc string for FindModuleRestricted() for the
meaning of the latter.
"""
if path_entry is None:
if submodule_fullname in sys.builtin_module_names:
try:
result = self._imp.find_module(submodule)
except ImportError:
pass
else:
source_file, pathname, description = result
suffix, mode, file_type = description
if file_type == self._imp.C_BUILTIN:
return result
return None
if path_entry in sys.path_importer_cache:
importer = sys.path_importer_cache[path_entry]
else:
importer = None
for hook in sys.path_hooks:
try:
importer = hook(path_entry)
break
except ImportError:
pass
sys.path_importer_cache[path_entry] = importer
if importer is None:
try:
return self._imp.find_module(submodule, [path_entry])
except ImportError:
pass
else:
loader = importer.find_module(submodule)
if loader is not None:
return (loader, None, (None, None, None))
return None
@Trace
def LoadModuleRestricted(self,
submodule_fullname,
source_file,
pathname,
description):
"""Loads a module while enforcing module import restrictions.
As a byproduct, the new module will be added to the module dictionary.
Args:
submodule_fullname: The fully qualified name of the module to find (e.g.,
'foo.bar').
source_file: File-like object that contains the module's source code,
or a PEP-302-style loader object.
pathname: String containing the full path of the module on disk.
description: Tuple returned by imp.find_module(), or (None, None, None)
in case source_file is a PEP-302-style loader object.
Returns:
The new module.
Raises:
ImportError exception of the specified module could not be loaded for
whatever reason.
"""
if description == (None, None, None):
return source_file.load_module(submodule_fullname)
try:
try:
return self._imp.load_module(submodule_fullname,
source_file,
pathname,
description)
except:
if submodule_fullname in self._module_dict:
del self._module_dict[submodule_fullname]
raise
finally:
if source_file is not None:
source_file.close()
@Trace
def FindAndLoadModule(self,
submodule,
submodule_fullname,
search_path):
"""Finds and loads a module, loads it, and adds it to the module dictionary.
Args:
submodule: Name of the module to import (e.g., baz).
submodule_fullname: Full name of the module to import (e.g., foo.bar.baz).
search_path: Path to use for searching for this submodule. For top-level
modules this should be None; otherwise it should be the __path__
attribute from the parent package.
Returns:
A new module instance that has been inserted into the module dictionary
supplied to __init__.
Raises:
ImportError exception if the module could not be loaded for whatever
reason (e.g., missing, not allowed).
"""
module = self._imp.new_module(submodule_fullname)
if submodule_fullname == 'thread':
module.__dict__.update(self._dummy_thread.__dict__)
module.__name__ = 'thread'
elif submodule_fullname == 'cPickle':
module.__dict__.update(self._pickle.__dict__)
module.__name__ = 'cPickle'
elif submodule_fullname == 'os':
module.__dict__.update(self._os.__dict__)
self._module_dict['os.path'] = module.path
elif self.StubModuleExists(submodule_fullname):
module = self.ImportStubModule(submodule_fullname)
else:
source_file, pathname, description = self.FindModuleRestricted(submodule, submodule_fullname, search_path)
module = self.LoadModuleRestricted(submodule_fullname,
source_file,
pathname,
description)
module.__loader__ = self
self.FixModule(module)
if submodule_fullname not in self._module_dict:
self._module_dict[submodule_fullname] = module
return module
@Trace
def GetParentPackage(self, fullname):
"""Retrieves the parent package of a fully qualified module name.
Args:
fullname: Full name of the module whose parent should be retrieved (e.g.,
foo.bar).
Returns:
Module instance for the parent or None if there is no parent module.
Raise:
ImportError exception if the module's parent could not be found.
"""
all_modules = fullname.split('.')
parent_module_fullname = '.'.join(all_modules[:-1])
if parent_module_fullname:
if self.find_module(fullname) is None:
raise ImportError('Could not find module %s' % fullname)
return self._module_dict[parent_module_fullname]
return None
@Trace
def GetParentSearchPath(self, fullname):
"""Determines the search path of a module's parent package.
Args:
fullname: Full name of the module to look up (e.g., foo.bar).
Returns:
Tuple (submodule, search_path) where:
submodule: The last portion of the module name from fullname (e.g.,
if fullname is foo.bar, then this is bar).
search_path: List of paths that belong to the parent package's search
path or None if there is no parent package.
Raises:
ImportError exception if the module or its parent could not be found.
"""
submodule = GetSubmoduleName(fullname)
parent_package = self.GetParentPackage(fullname)
search_path = None
if parent_package is not None and hasattr(parent_package, '__path__'):
search_path = parent_package.__path__
return submodule, search_path
@Trace
def GetModuleInfo(self, fullname):
"""Determines the path on disk and the search path of a module or package.
Args:
fullname: Full name of the module to look up (e.g., foo.bar).
Returns:
Tuple (pathname, search_path, submodule) where:
pathname: String containing the full path of the module on disk,
or None if the module wasn't loaded from disk (e.g. from a zipfile).
search_path: List of paths that belong to the found package's search
path or None if found module is not a package.
submodule: The relative name of the submodule that's being imported.
"""
submodule, search_path = self.GetParentSearchPath(fullname)
source_file, pathname, description = self.FindModuleRestricted(submodule, fullname, search_path)
suffix, mode, file_type = description
module_search_path = None
if file_type == self._imp.PKG_DIRECTORY:
module_search_path = [pathname]
pathname = os.path.join(pathname, '__init__%spy' % os.extsep)
return pathname, module_search_path, submodule
@Trace
def load_module(self, fullname):
"""See PEP 302."""
all_modules = fullname.split('.')
submodule = all_modules[-1]
parent_module_fullname = '.'.join(all_modules[:-1])
search_path = None
if parent_module_fullname and parent_module_fullname in self._module_dict:
parent_module = self._module_dict[parent_module_fullname]
if hasattr(parent_module, '__path__'):
search_path = parent_module.__path__
return self.FindAndLoadModule(submodule, fullname, search_path)
@Trace
def is_package(self, fullname):
"""See PEP 302 extensions."""
submodule, search_path = self.GetParentSearchPath(fullname)
source_file, pathname, description = self.FindModuleRestricted(submodule, fullname, search_path)
suffix, mode, file_type = description
if file_type == self._imp.PKG_DIRECTORY:
return True
return False
@Trace
def get_source(self, fullname):
"""See PEP 302 extensions."""
full_path, search_path, submodule = self.GetModuleInfo(fullname)
if full_path is None:
return None
source_file = open(full_path)
try:
return source_file.read()
finally:
source_file.close()
@Trace
def get_code(self, fullname):
"""See PEP 302 extensions."""
full_path, search_path, submodule = self.GetModuleInfo(fullname)
if full_path is None:
return None
source_file = open(full_path)
try:
source_code = source_file.read()
finally:
source_file.close()
source_code = source_code.replace('\r\n', '\n')
if not source_code.endswith('\n'):
source_code += '\n'
return compile(source_code, full_path, 'exec')
def ModuleHasValidMainFunction(module):
"""Determines if a module has a main function that takes no arguments.
This includes functions that have arguments with defaults that are all
assigned, thus requiring no additional arguments in order to be called.
Args:
module: A types.ModuleType instance.
Returns:
True if the module has a valid, reusable main function; False otherwise.
"""
if hasattr(module, 'main') and type(module.main) is types.FunctionType:
arg_names, var_args, var_kwargs, default_values = inspect.getargspec(module.main)
if len(arg_names) == 0:
return True
if default_values is not None and len(arg_names) == len(default_values):
return True
return False
def GetScriptModuleName(handler_path):
"""Determines the fully-qualified Python module name of a script on disk.
Args:
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.
Returns:
String containing the corresponding module name (e.g., 'foo.bar.baz').
"""
if handler_path.startswith(PYTHON_LIB_VAR + '/'):
handler_path = handler_path[len(PYTHON_LIB_VAR):]
handler_path = os.path.normpath(handler_path)
extension_index = handler_path.rfind('.py')
if extension_index != -1:
handler_path = handler_path[:extension_index]
module_fullname = handler_path.replace(os.sep, '.')
module_fullname = module_fullname.strip('.')
module_fullname = re.sub('\.+', '.', module_fullname)
if module_fullname.endswith('.__init__'):
module_fullname = module_fullname[:-len('.__init__')]
return module_fullname
def FindMissingInitFiles(cgi_path, module_fullname, isfile=os.path.isfile):
"""Determines which __init__.py files are missing from a module's parent
packages.
Args:
cgi_path: Absolute path of the CGI module file on disk.
module_fullname: Fully qualified Python module name used to import the
cgi_path module.
Returns:
List containing the paths to the missing __init__.py files.
"""
missing_init_files = []
if cgi_path.endswith('.py'):
module_base = os.path.dirname(cgi_path)
else:
module_base = cgi_path
depth_count = module_fullname.count('.')
if cgi_path.endswith('__init__.py') or not cgi_path.endswith('.py'):
depth_count += 1
for index in xrange(depth_count):
current_init_file = os.path.abspath(
os.path.join(module_base, '__init__.py'))
if not isfile(current_init_file):
missing_init_files.append(current_init_file)
module_base = os.path.abspath(os.path.join(module_base, os.pardir))
return missing_init_files
def LoadTargetModule(handler_path,
cgi_path,
import_hook,
module_dict=sys.modules):
"""Loads a target CGI script by importing it as a Python module.
If the module for the target CGI script has already been loaded before,
the new module will be loaded in its place using the same module object,
possibly overwriting existing module attributes.
Args:
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
import_hook: Instance of HardenedModulesHook to use for module loading.
module_dict: Used for dependency injection.
Returns:
Tuple (module_fullname, script_module, module_code) where:
module_fullname: Fully qualified module name used to import the script.
script_module: The ModuleType object corresponding to the module_fullname.
If the module has not already been loaded, this will be an empty
shell of a module.
module_code: Code object (returned by compile built-in) corresponding
to the cgi_path to run. If the script_module was previously loaded
and has a main() function that can be reused, this will be None.
"""
module_fullname = GetScriptModuleName(handler_path)
script_module = module_dict.get(module_fullname)
module_code = None
if script_module != None and ModuleHasValidMainFunction(script_module):
logging.debug('Reusing main() function of module "%s"', module_fullname)
else:
if script_module is None:
script_module = imp.new_module(module_fullname)
script_module.__loader__ = import_hook
try:
module_code = import_hook.get_code(module_fullname)
full_path, search_path, submodule = import_hook.GetModuleInfo(module_fullname)
script_module.__file__ = full_path
if search_path is not None:
script_module.__path__ = search_path
except:
exc_type, exc_value, exc_tb = sys.exc_info()
import_error_message = str(exc_type)
if exc_value:
import_error_message += ': ' + str(exc_value)
logging.exception('Encountered error loading module "%s": %s',
module_fullname, import_error_message)
missing_inits = FindMissingInitFiles(cgi_path, module_fullname)
if missing_inits:
logging.warning('Missing package initialization files: %s',
', '.join(missing_inits))
else:
logging.error('Parent package initialization files are present, '
'but must be broken')
independent_load_successful = True
if not os.path.isfile(cgi_path):
independent_load_successful = False
else:
try:
source_file = open(cgi_path)
try:
module_code = compile(source_file.read(), cgi_path, 'exec')
script_module.__file__ = cgi_path
finally:
source_file.close()
except OSError:
independent_load_successful = False
if not independent_load_successful:
raise exc_type, exc_value, exc_tb
module_dict[module_fullname] = script_module
return module_fullname, script_module, module_code
def ExecuteOrImportScript(handler_path, cgi_path, import_hook):
"""Executes a CGI script by importing it as a new module; possibly reuses
the module's main() function if it is defined and takes no arguments.
Basic technique lifted from PEP 338 and Python2.5's runpy module. See:
http://www.python.org/dev/peps/pep-0338/
See the section entitled "Import Statements and the Main Module" to understand
why a module named '__main__' cannot do relative imports. To get around this,
the requested module's path could be added to sys.path on each request.
Args:
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). Should not have $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
import_hook: Instance of HardenedModulesHook to use for module loading.
Returns:
True if the response code had an error status (e.g., 404), or False if it
did not.
Raises:
Any kind of exception that could have been raised when loading the target
module, running a target script, or executing the application code itself.
"""
module_fullname, script_module, module_code = LoadTargetModule(
handler_path, cgi_path, import_hook)
script_module.__name__ = '__main__'
sys.modules['__main__'] = script_module
try:
if module_code:
exec module_code in script_module.__dict__
else:
script_module.main()
sys.stdout.flush()
sys.stdout.seek(0)
try:
headers = mimetools.Message(sys.stdout)
finally:
sys.stdout.seek(0, 2)
status_header = headers.get('status')
error_response = False
if status_header:
try:
status_code = int(status_header.split(' ', 1)[0])
error_response = status_code >= 400
except ValueError:
error_response = True
if not error_response:
try:
parent_package = import_hook.GetParentPackage(module_fullname)
except Exception:
parent_package = None
if parent_package is not None:
submodule = GetSubmoduleName(module_fullname)
setattr(parent_package, submodule, script_module)
return error_response
finally:
script_module.__name__ = module_fullname
def ExecuteCGI(root_path,
handler_path,
cgi_path,
env,
infile,
outfile,
module_dict,
exec_script=ExecuteOrImportScript):
"""Executes Python file in this process as if it were a CGI.
Does not return an HTTP response line. CGIs should output headers followed by
the body content.
The modules in sys.modules should be the same before and after the CGI is
executed, with the specific exception of encodings-related modules, which
cannot be reloaded and thus must always stay in sys.modules.
Args:
root_path: Path to the root of the application.
handler_path: CGI path stored in the application configuration (as a path
like 'foo/bar/baz.py'). May contain $PYTHON_LIB references.
cgi_path: Absolute path to the CGI script file on disk.
env: Dictionary of environment variables to use for the execution.
infile: File-like object to read HTTP request input data from.
outfile: FIle-like object to write HTTP response data to.
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This removes the need to reload modules that
are reused between requests, significantly increasing load performance.
This dictionary must be separate from the sys.modules dictionary.
exec_script: Used for dependency injection.
"""
old_module_dict = sys.modules.copy()
old_builtin = __builtin__.__dict__.copy()
old_argv = sys.argv
old_stdin = sys.stdin
old_stdout = sys.stdout
old_env = os.environ.copy()
old_cwd = os.getcwd()
old_file_type = types.FileType
reset_modules = False
try:
ClearAllButEncodingsModules(sys.modules)
sys.modules.update(module_dict)
sys.argv = [cgi_path]
sys.stdin = infile
sys.stdout = outfile
os.environ.clear()
os.environ.update(env)
before_path = sys.path[:]
cgi_dir = os.path.normpath(os.path.dirname(cgi_path))
root_path = os.path.normpath(os.path.abspath(root_path))
if cgi_dir.startswith(root_path + os.sep):
os.chdir(cgi_dir)
else:
os.chdir(root_path)
hook = HardenedModulesHook(sys.modules)
sys.meta_path = [hook]
if hasattr(sys, 'path_importer_cache'):
sys.path_importer_cache.clear()
__builtin__.file = FakeFile
__builtin__.open = FakeFile
types.FileType = FakeFile
__builtin__.buffer = NotImplementedFakeClass
logging.debug('Executing CGI with env:\n%s', pprint.pformat(env))
try:
reset_modules = exec_script(handler_path, cgi_path, hook)
except SystemExit, e:
logging.debug('CGI exited with status: %s', e)
except:
reset_modules = True
raise
finally:
sys.meta_path = []
sys.path_importer_cache.clear()
_ClearTemplateCache(sys.modules)
module_dict.update(sys.modules)
ClearAllButEncodingsModules(sys.modules)
sys.modules.update(old_module_dict)
__builtin__.__dict__.update(old_builtin)
sys.argv = old_argv
sys.stdin = old_stdin
sys.stdout = old_stdout
sys.path[:] = before_path
os.environ.clear()
os.environ.update(old_env)
os.chdir(old_cwd)
types.FileType = old_file_type
class CGIDispatcher(URLDispatcher):
"""Dispatcher that executes Python CGI scripts."""
def __init__(self,
module_dict,
root_path,
path_adjuster,
setup_env=SetupEnvironment,
exec_cgi=ExecuteCGI,
create_logging_handler=ApplicationLoggingHandler):
"""Initializer.
Args:
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This dictionary must be separate from the
sys.modules dictionary.
path_adjuster: Instance of PathAdjuster to use for finding absolute
paths of CGI files on disk.
setup_env, exec_cgi, create_logging_handler: Used for dependency
injection.
"""
self._module_dict = module_dict
self._root_path = root_path
self._path_adjuster = path_adjuster
self._setup_env = setup_env
self._exec_cgi = exec_cgi
self._create_logging_handler = create_logging_handler
def Dispatch(self,
relative_url,
path,
headers,
infile,
outfile,
base_env_dict=None):
"""Dispatches the Python CGI."""
handler = self._create_logging_handler()
logging.getLogger().addHandler(handler)
before_level = logging.root.level
try:
env = {}
if base_env_dict:
env.update(base_env_dict)
cgi_path = self._path_adjuster.AdjustPath(path)
env.update(self._setup_env(cgi_path, relative_url, headers))
self._exec_cgi(self._root_path,
path,
cgi_path,
env,
infile,
outfile,
self._module_dict)
handler.AddDebuggingConsole(relative_url, env, outfile)
finally:
logging.root.level = before_level
logging.getLogger().removeHandler(handler)
def __str__(self):
"""Returns a string representation of this dispatcher."""
return 'CGI dispatcher'
class LocalCGIDispatcher(CGIDispatcher):
"""Dispatcher that executes local functions like they're CGIs.
The contents of sys.modules will be preserved for local CGIs running this
dispatcher, but module hardening will still occur for any new imports. Thus,
be sure that any local CGIs have loaded all of their dependent modules
_before_ they are executed.
"""
def __init__(self, module_dict, path_adjuster, cgi_func):
"""Initializer.
Args:
module_dict: Passed to CGIDispatcher.
path_adjuster: Passed to CGIDispatcher.
cgi_func: Callable function taking no parameters that should be
executed in a CGI environment in the current process.
"""
self._cgi_func = cgi_func
def curried_exec_script(*args, **kwargs):
cgi_func()
return False
def curried_exec_cgi(*args, **kwargs):
kwargs['exec_script'] = curried_exec_script
return ExecuteCGI(*args, **kwargs)
CGIDispatcher.__init__(self,
module_dict,
'',
path_adjuster,
exec_cgi=curried_exec_cgi)
def Dispatch(self, *args, **kwargs):
"""Preserves sys.modules for CGIDispatcher.Dispatch."""
self._module_dict.update(sys.modules)
CGIDispatcher.Dispatch(self, *args, **kwargs)
def __str__(self):
"""Returns a string representation of this dispatcher."""
return 'Local CGI dispatcher for %s' % self._cgi_func
class PathAdjuster(object):
"""Adjusts application file paths to paths relative to the application or
external library directories."""
def __init__(self, root_path):
"""Initializer.
Args:
root_path: Path to the root of the application running on the server.
"""
self._root_path = os.path.abspath(root_path)
def AdjustPath(self, path):
"""Adjusts application file path to paths relative to the application or
external library directories.
Handler paths that start with $PYTHON_LIB will be converted to paths
relative to the google directory.
Args:
path: File path that should be adjusted.
Returns:
The adjusted path.
"""
if path.startswith(PYTHON_LIB_VAR):
path = os.path.join(os.path.dirname(os.path.dirname(google.__file__)),
path[len(PYTHON_LIB_VAR) + 1:])
else:
path = os.path.join(self._root_path, path)
return path
class StaticFileConfigMatcher(object):
"""Keeps track of file/directory specific application configuration.
Specifically:
- Computes mime type based on URLMap and file extension.
- Decides on cache expiration time based on URLMap and default expiration.
To determine the mime type, we first see if there is any mime-type property
on each URLMap entry. If non is specified, we use the mimetypes module to
guess the mime type from the file path extension, and use
application/octet-stream if we can't find the mimetype.
"""
def __init__(self,
url_map_list,
path_adjuster,
default_expiration):
"""Initializer.
Args:
url_map_list: List of appinfo.URLMap objects.
If empty or None, then we always use the mime type chosen by the
mimetypes module.
path_adjuster: PathAdjuster object used to adjust application file paths.
default_expiration: String describing default expiration time for browser
based caching of static files. If set to None this disallows any
browser caching of static content.
"""
if default_expiration is not None:
self._default_expiration = appinfo.ParseExpiration(default_expiration)
else:
self._default_expiration = None
self._patterns = []
if url_map_list:
for entry in url_map_list:
handler_type = entry.GetHandlerType()
if handler_type not in (appinfo.STATIC_FILES, appinfo.STATIC_DIR):
continue
if handler_type == appinfo.STATIC_FILES:
regex = entry.upload + '$'
else:
path = entry.static_dir
if path[-1] == '/':
path = path[:-1]
regex = re.escape(path + os.path.sep) + r'(.*)'
try:
path_re = re.compile(regex)
except re.error, e:
raise InvalidAppConfigError('regex %s does not compile: %s' %
(regex, e))
if self._default_expiration is None:
expiration = 0
elif entry.expiration is None:
expiration = self._default_expiration
else:
expiration = appinfo.ParseExpiration(entry.expiration)
self._patterns.append((path_re, entry.mime_type, expiration))
def IsStaticFile(self, path):
"""Tests if the given path points to a "static" file.
Args:
path: String containing the file's path relative to the app.
Returns:
Boolean, True if the file was configured to be static.
"""
for (path_re, _, _) in self._patterns:
if path_re.match(path):
return True
return False
def GetMimeType(self, path):
"""Returns the mime type that we should use when serving the specified file.
Args:
path: String containing the file's path relative to the app.
Returns:
String containing the mime type to use. Will be 'application/octet-stream'
if we have no idea what it should be.
"""
for (path_re, mime_type, expiration) in self._patterns:
if mime_type is not None:
the_match = path_re.match(path)
if the_match:
return mime_type
filename, extension = os.path.splitext(path)
return mimetypes.types_map.get(extension, 'application/octet-stream')
def GetExpiration(self, path):
"""Returns the cache expiration duration to be users for the given file.
Args:
path: String containing the file's path relative to the app.
Returns:
Integer number of seconds to be used for browser cache expiration time.
"""
for (path_re, mime_type, expiration) in self._patterns:
the_match = path_re.match(path)
if the_match:
return expiration
return self._default_expiration or 0
def ReadDataFile(data_path, openfile=file):
"""Reads a file on disk, returning a corresponding HTTP status and data.
Args:
data_path: Path to the file on disk to read.
openfile: Used for dependency injection.
Returns:
Tuple (status, data) where status is an HTTP response code, and data is
the data read; will be an empty string if an error occurred or the
file was empty.
"""
status = httplib.INTERNAL_SERVER_ERROR
data = ""
try:
data_file = openfile(data_path, 'rb')
try:
data = data_file.read()
finally:
data_file.close()
status = httplib.OK
except (OSError, IOError), e:
logging.error('Error encountered reading file "%s":\n%s', data_path, e)
if e.errno in FILE_MISSING_EXCEPTIONS:
status = httplib.NOT_FOUND
else:
status = httplib.FORBIDDEN
return status, data
class FileDispatcher(URLDispatcher):
"""Dispatcher that reads data files from disk."""
def __init__(self,
path_adjuster,
static_file_config_matcher,
read_data_file=ReadDataFile):
"""Initializer.
Args:
path_adjuster: Instance of PathAdjuster to use for finding absolute
paths of data files on disk.
static_file_config_matcher: StaticFileConfigMatcher object.
read_data_file: Used for dependency injection.
"""
self._path_adjuster = path_adjuster
self._static_file_config_matcher = static_file_config_matcher
self._read_data_file = read_data_file
def Dispatch(self,
relative_url,
path,
headers,
infile,
outfile,
base_env_dict=None):
"""Reads the file and returns the response status and data."""
full_path = self._path_adjuster.AdjustPath(path)
status, data = self._read_data_file(full_path)
content_type = self._static_file_config_matcher.GetMimeType(path)
expiration = self._static_file_config_matcher.GetExpiration(path)
outfile.write('Status: %d\r\n' % status)
outfile.write('Content-type: %s\r\n' % content_type)
if expiration:
outfile.write('Expires: %s\r\n'
% email.Utils.formatdate(time.time() + expiration,
usegmt=True))
outfile.write('Cache-Control: public, max-age=%i\r\n' % expiration)
outfile.write('\r\n')
outfile.write(data)
def __str__(self):
"""Returns a string representation of this dispatcher."""
return 'File dispatcher'
_IGNORE_RESPONSE_HEADERS = frozenset([
'content-encoding', 'accept-encoding', 'transfer-encoding',
'server', 'date',
])
def IgnoreHeadersRewriter(status_code, status_message, headers, body):
"""Ignore specific response headers.
Certain response headers cannot be modified by an Application. For a
complete list of these headers please see:
http://code.google.com/appengine/docs/webapp/responseclass.html#Disallowed_HTTP_Response_Headers
This rewriter simply removes those headers.
"""
for h in _IGNORE_RESPONSE_HEADERS:
if h in headers:
del headers[h]
return status_code, status_message, headers, body
def ParseStatusRewriter(status_code, status_message, headers, body):
"""Parse status header, if it exists.
Handles the server-side 'status' header, which instructs the server to change
the HTTP response code accordingly. Handles the 'location' header, which
issues an HTTP 302 redirect to the client. Also corrects the 'content-length'
header to reflect actual content length in case extra information has been
appended to the response body.
If the 'status' header supplied by the client is invalid, this method will
set the response to a 500 with an error message as content.
"""
location_value = headers.getheader('location')
status_value = headers.getheader('status')
if status_value:
response_status = status_value
del headers['status']
elif location_value:
response_status = '%d Redirecting' % httplib.FOUND
else:
return status_code, status_message, headers, body
status_parts = response_status.split(' ', 1)
status_code, status_message = (status_parts + [''])[:2]
try:
status_code = int(status_code)
except ValueError:
status_code = 500
body = cStringIO.StringIO('Error: Invalid "status" header value returned.')
return status_code, status_message, headers, body
def CacheRewriter(status_code, status_message, headers, body):
"""Update the cache header."""
if not 'Cache-Control' in headers:
headers['Cache-Control'] = 'no-cache'
return status_code, status_message, headers, body
def ContentLengthRewriter(status_code, status_message, headers, body):
"""Rewrite the Content-Length header.
Even though Content-Length is not a user modifiable header, App Engine
sends a correct Content-Length to the user based on the actual response.
"""
current_position = body.tell()
body.seek(0, 2)
headers['Content-Length'] = str(body.tell() - current_position)
body.seek(current_position)
return status_code, status_message, headers, body
def CreateResponseRewritersChain():
"""Create the default response rewriter chain.
A response rewriter is the a function that gets a final chance to change part
of the dev_appservers response. A rewriter is not like a dispatcher in that
it is called after every request has been handled by the dispatchers
regardless of which dispatcher was used.
The order in which rewriters are registered will be the order in which they
are used to rewrite the response. Modifications from earlier rewriters
are used as input to later rewriters.
A response rewriter is a function that can rewrite the request in any way.
Thefunction can returned modified values or the original values it was
passed.
A rewriter function has the following parameters and return values:
Args:
status_code: Status code of response from dev_appserver or previous
rewriter.
status_message: Text corresponding to status code.
headers: mimetools.Message instance with parsed headers. NOTE: These
headers can contain its own 'status' field, but the default
dev_appserver implementation will remove this. Future rewriters
should avoid re-introducing the status field and return new codes
instead.
body: File object containing the body of the response. This position of
this file may not be at the start of the file. Any content before the
files position is considered not to be part of the final body.
Returns:
status_code: Rewritten status code or original.
status_message: Rewritter message or original.
headers: Rewritten/modified headers or original.
body: Rewritten/modified body or original.
Returns:
List of response rewriters.
"""
return [IgnoreHeadersRewriter,
ParseStatusRewriter,
CacheRewriter,
ContentLengthRewriter,
]
def RewriteResponse(response_file, response_rewriters=None):
"""Allows final rewrite of dev_appserver response.
This function receives the unparsed HTTP response from the application
or internal handler, parses out the basic structure and feeds that structure
in to a chain of response rewriters.
It also makes sure the final HTTP headers are properly terminated.
For more about response rewriters, please see documentation for
CreateResponeRewritersChain.
Args:
response_file: File-like object containing the full HTTP response including
the response code, all headers, and the request body.
response_rewriters: A list of response rewriters. If none is provided it
will create a new chain using CreateResponseRewritersChain.
Returns:
Tuple (status_code, status_message, header, body) where:
status_code: Integer HTTP response status (e.g., 200, 302, 404, 500)
status_message: String containing an informational message about the
response code, possibly derived from the 'status' header, if supplied.
header: String containing the HTTP headers of the response, without
a trailing new-line (CRLF).
body: String containing the body of the response.
"""
if response_rewriters is None:
response_rewriters = CreateResponseRewritersChain()
status_code = 200
status_message = 'Good to go'
headers = mimetools.Message(response_file)
for response_rewriter in response_rewriters:
status_code, status_message, headers, response_file = response_rewriter(
status_code,
status_message,
headers,
response_file)
header_list = []
for header in headers.headers:
header = header.rstrip('\n')
header = header.rstrip('\r')
header_list.append(header)
header_data = '\r\n'.join(header_list) + '\r\n'
return status_code, status_message, header_data, response_file.read()
class ModuleManager(object):
"""Manages loaded modules in the runtime.
Responsible for monitoring and reporting about file modification times.
Modules can be loaded from source or precompiled byte-code files. When a
file has source code, the ModuleManager monitors the modification time of
the source file even if the module itself is loaded from byte-code.
"""
def __init__(self, modules):
"""Initializer.
Args:
modules: Dictionary containing monitored modules.
"""
self._modules = modules
self._default_modules = self._modules.copy()
self._save_path_hooks = sys.path_hooks[:]
self._modification_times = {}
@staticmethod
def GetModuleFile(module, is_file=os.path.isfile):
"""Helper method to try to determine modules source file.
Args:
module: Module object to get file for.
is_file: Function used to determine if a given path is a file.
Returns:
Path of the module's corresponding Python source file if it exists, or
just the module's compiled Python file. If the module has an invalid
__file__ attribute, None will be returned.
"""
module_file = getattr(module, '__file__', None)
if module_file is None:
return None
source_file = module_file[:module_file.rfind('py') + 2]
if is_file(source_file):
return source_file
return module.__file__
def AreModuleFilesModified(self):
"""Determines if any monitored files have been modified.
Returns:
True if one or more files have been modified, False otherwise.
"""
for name, (mtime, fname) in self._modification_times.iteritems():
if name not in self._modules:
continue
module = self._modules[name]
if not os.path.isfile(fname):
return True
if mtime != os.path.getmtime(fname):
return True
return False
def UpdateModuleFileModificationTimes(self):
"""Records the current modification times of all monitored modules.
"""
self._modification_times.clear()
for name, module in self._modules.items():
if not isinstance(module, types.ModuleType):
continue
module_file = self.GetModuleFile(module)
if not module_file:
continue
try:
self._modification_times[name] = (os.path.getmtime(module_file),
module_file)
except OSError, e:
if e.errno not in FILE_MISSING_EXCEPTIONS:
raise e
def ResetModules(self):
"""Clear modules so that when request is run they are reloaded."""
self._modules.clear()
self._modules.update(self._default_modules)
sys.path_hooks[:] = self._save_path_hooks
def _ClearTemplateCache(module_dict=sys.modules):
"""Clear template cache in webapp.template module.
Attempts to load template module. Ignores failure. If module loads, the
template cache is cleared.
"""
template_module = module_dict.get('google.appengine.ext.webapp.template')
if template_module is not None:
template_module.template_cache.clear()
def CreateRequestHandler(root_path,
login_url,
require_indexes=False,
static_caching=True):
"""Creates a new BaseHTTPRequestHandler sub-class for use with the Python
BaseHTTPServer module's HTTP server.
Python's built-in HTTP server does not support passing context information
along to instances of its request handlers. This function gets around that
by creating a sub-class of the handler in a closure that has access to
this context information.
Args:
root_path: Path to the root of the application running on the server.
login_url: Relative URL which should be used for handling user logins.
require_indexes: True if index.yaml is read-only gospel; default False.
static_caching: True if browser caching of static files should be allowed.
Returns:
Sub-class of BaseHTTPRequestHandler.
"""
application_module_dict = SetupSharedModules(sys.modules)
if require_indexes:
index_yaml_updater = None
else:
index_yaml_updater = dev_appserver_index.IndexYamlUpdater(root_path)
application_config_cache = AppConfigCache()
class DevAppServerRequestHandler(BaseHTTPServer.BaseHTTPRequestHandler):
"""Dispatches URLs using patterns from a URLMatcher, which is created by
loading an application's configuration file. Executes CGI scripts in the
local process so the scripts can use mock versions of APIs.
HTTP requests that correctly specify a user info cookie
(dev_appserver_login.COOKIE_NAME) will have the 'USER_EMAIL' environment
variable set accordingly. If the user is also an admin, the
'USER_IS_ADMIN' variable will exist and be set to '1'. If the user is not
logged in, 'USER_EMAIL' will be set to the empty string.
On each request, raises an InvalidAppConfigError exception if the
application configuration file in the directory specified by the root_path
argument is invalid.
"""
server_version = 'Development/1.0'
module_dict = application_module_dict
module_manager = ModuleManager(application_module_dict)
config_cache = application_config_cache
rewriter_chain = CreateResponseRewritersChain()
def __init__(self, *args, **kwargs):
"""Initializer.
Args:
args, kwargs: Positional and keyword arguments passed to the constructor
of the super class.
"""
BaseHTTPServer.BaseHTTPRequestHandler.__init__(self, *args, **kwargs)
def version_string(self):
"""Returns server's version string used for Server HTTP header"""
return self.server_version
def do_GET(self):
"""Handle GET requests."""
self._HandleRequest()
def do_POST(self):
"""Handles POST requests."""
self._HandleRequest()
def do_PUT(self):
"""Handle PUT requests."""
self._HandleRequest()
def do_HEAD(self):
"""Handle HEAD requests."""
self._HandleRequest()
def do_OPTIONS(self):
"""Handles OPTIONS requests."""
self._HandleRequest()
def do_DELETE(self):
"""Handle DELETE requests."""
self._HandleRequest()
def do_TRACE(self):
"""Handles TRACE requests."""
self._HandleRequest()
def _HandleRequest(self):
"""Handles any type of request and prints exceptions if they occur."""
server_name = self.headers.get('host') or self.server.server_name
server_name = server_name.split(':', 1)[0]
env_dict = {
'REQUEST_METHOD': self.command,
'REMOTE_ADDR': self.client_address[0],
'SERVER_SOFTWARE': self.server_version,
'SERVER_NAME': server_name,
'SERVER_PROTOCOL': self.protocol_version,
'SERVER_PORT': str(self.server.server_port),
}
full_url = GetFullURL(server_name, self.server.server_port, self.path)
if len(full_url) > MAX_URL_LENGTH:
msg = 'Requested URI too long: %s' % full_url
logging.error(msg)
self.send_response(httplib.REQUEST_URI_TOO_LONG, msg)
return
tbhandler = cgitb.Hook(file=self.wfile).handle
try:
if self.module_manager.AreModuleFilesModified():
self.module_manager.ResetModules()
implicit_matcher = CreateImplicitMatcher(self.module_dict,
root_path,
login_url)
config, explicit_matcher = LoadAppConfig(root_path, self.module_dict,
cache=self.config_cache,
static_caching=static_caching)
if config.api_version != API_VERSION:
logging.error("API versions cannot be switched dynamically: %r != %r"
% (config.api_version, API_VERSION))
sys.exit(1)
env_dict['CURRENT_VERSION_ID'] = config.version + ".1"
env_dict['APPLICATION_ID'] = config.application
dispatcher = MatcherDispatcher(login_url,
[implicit_matcher, explicit_matcher])
if require_indexes:
dev_appserver_index.SetupIndexes(config.application, root_path)
infile = cStringIO.StringIO(self.rfile.read(
int(self.headers.get('content-length', 0))))
request_size = len(infile.getvalue())
if request_size > MAX_REQUEST_SIZE:
msg = ('HTTP request was too large: %d. The limit is: %d.'
% (request_size, MAX_REQUEST_SIZE))
logging.error(msg)
self.send_response(httplib.REQUEST_ENTITY_TOO_LARGE, msg)
return
outfile = cStringIO.StringIO()
try:
dispatcher.Dispatch(self.path,
None,
self.headers,
infile,
outfile,
base_env_dict=env_dict)
finally:
self.module_manager.UpdateModuleFileModificationTimes()
outfile.flush()
outfile.seek(0)
status_code, status_message, header_data, body = RewriteResponse(outfile, self.rewriter_chain)
runtime_response_size = len(outfile.getvalue())
if runtime_response_size > MAX_RUNTIME_RESPONSE_SIZE:
status_code = 403
status_message = 'Forbidden'
new_headers = []
for header in header_data.split('\n'):
if not header.lower().startswith('content-length'):
new_headers.append(header)
header_data = '\n'.join(new_headers)
body = ('HTTP response was too large: %d. The limit is: %d.'
% (runtime_response_size, MAX_RUNTIME_RESPONSE_SIZE))
except yaml_errors.EventListenerError, e:
title = 'Fatal error when loading application configuration'
msg = '%s:\n%s' % (title, str(e))
logging.error(msg)
self.send_response(httplib.INTERNAL_SERVER_ERROR, title)
self.wfile.write('Content-Type: text/html\n\n')
self.wfile.write('<pre>%s</pre>' % cgi.escape(msg))
except:
msg = 'Exception encountered handling request'
logging.exception(msg)
self.send_response(httplib.INTERNAL_SERVER_ERROR, msg)
tbhandler()
else:
try:
self.send_response(status_code, status_message)
self.wfile.write(header_data)
self.wfile.write('\r\n')
if self.command != 'HEAD':
self.wfile.write(body)
elif body:
logging.warning('Dropping unexpected body in response '
'to HEAD request')
except (IOError, OSError), e:
if e.errno != errno.EPIPE:
raise e
except socket.error, e:
if len(e.args) >= 1 and e.args[0] != errno.EPIPE:
raise e
else:
if index_yaml_updater is not None:
index_yaml_updater.UpdateIndexYaml()
def log_error(self, format, *args):
"""Redirect error messages through the logging module."""
logging.error(format, *args)
def log_message(self, format, *args):
"""Redirect log messages through the logging module."""
logging.info(format, *args)
return DevAppServerRequestHandler
def ReadAppConfig(appinfo_path, parse_app_config=appinfo.LoadSingleAppInfo):
"""Reads app.yaml file and returns its app id and list of URLMap instances.
Args:
appinfo_path: String containing the path to the app.yaml file.
parse_app_config: Used for dependency injection.
Returns:
AppInfoExternal instance.
Raises:
If the config file could not be read or the config does not contain any
URLMap instances, this function will raise an InvalidAppConfigError
exception.
"""
try:
appinfo_file = file(appinfo_path, 'r')
except IOError, e:
raise InvalidAppConfigError(
'Application configuration could not be read from "%s"' % appinfo_path)
try:
return parse_app_config(appinfo_file)
finally:
appinfo_file.close()
def CreateURLMatcherFromMaps(root_path,
url_map_list,
module_dict,
default_expiration,
create_url_matcher=URLMatcher,
create_cgi_dispatcher=CGIDispatcher,
create_file_dispatcher=FileDispatcher,
create_path_adjuster=PathAdjuster,
normpath=os.path.normpath):
"""Creates a URLMatcher instance from URLMap.
Creates all of the correct URLDispatcher instances to handle the various
content types in the application configuration.
Args:
root_path: Path to the root of the application running on the server.
url_map_list: List of appinfo.URLMap objects to initialize this
matcher with. Can be an empty list if you would like to add patterns
manually.
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This dictionary must be separate from the
sys.modules dictionary.
default_expiration: String describing default expiration time for browser
based caching of static files. If set to None this disallows any
browser caching of static content.
create_url_matcher, create_cgi_dispatcher, create_file_dispatcher,
create_path_adjuster: Used for dependency injection.
Returns:
Instance of URLMatcher with the supplied URLMap objects properly loaded.
"""
url_matcher = create_url_matcher()
path_adjuster = create_path_adjuster(root_path)
cgi_dispatcher = create_cgi_dispatcher(module_dict, root_path, path_adjuster)
static_file_config_matcher = StaticFileConfigMatcher(url_map_list,
path_adjuster,
default_expiration)
file_dispatcher = create_file_dispatcher(path_adjuster,
static_file_config_matcher)
FakeFile.SetStaticFileConfigMatcher(static_file_config_matcher)
for url_map in url_map_list:
admin_only = url_map.login == appinfo.LOGIN_ADMIN
requires_login = url_map.login == appinfo.LOGIN_REQUIRED or admin_only
handler_type = url_map.GetHandlerType()
if handler_type == appinfo.HANDLER_SCRIPT:
dispatcher = cgi_dispatcher
elif handler_type in (appinfo.STATIC_FILES, appinfo.STATIC_DIR):
dispatcher = file_dispatcher
else:
raise InvalidAppConfigError('Unknown handler type "%s"' % handler_type)
regex = url_map.url
path = url_map.GetHandler()
if handler_type == appinfo.STATIC_DIR:
if regex[-1] == r'/':
regex = regex[:-1]
if path[-1] == os.path.sep:
path = path[:-1]
regex = '/'.join((re.escape(regex), '(.*)'))
if os.path.sep == '\\':
backref = r'\\1'
else:
backref = r'\1'
path = (normpath(path).replace('\\', '\\\\') +
os.path.sep + backref)
url_matcher.AddURL(regex,
dispatcher,
path,
requires_login, admin_only)
return url_matcher
class AppConfigCache(object):
"""Cache used by LoadAppConfig.
If given to LoadAppConfig instances of this class are used to cache contents
of the app config (app.yaml or app.yml) and the Matcher created from it.
Code outside LoadAppConfig should treat instances of this class as opaque
objects and not access its members.
"""
path = None
mtime = None
config = None
matcher = None
def LoadAppConfig(root_path,
module_dict,
cache=None,
static_caching=True,
read_app_config=ReadAppConfig,
create_matcher=CreateURLMatcherFromMaps):
"""Creates a Matcher instance for an application configuration file.
Raises an InvalidAppConfigError exception if there is anything wrong with
the application configuration file.
Args:
root_path: Path to the root of the application to load.
module_dict: Dictionary in which application-loaded modules should be
preserved between requests. This dictionary must be separate from the
sys.modules dictionary.
cache: Instance of AppConfigCache or None.
static_caching: True if browser caching of static files should be allowed.
read_app_config, create_matcher: Used for dependency injection.
Returns:
tuple: (AppInfoExternal, URLMatcher)
"""
for appinfo_path in [os.path.join(root_path, 'app.yaml'),
os.path.join(root_path, 'app.yml')]:
if os.path.isfile(appinfo_path):
if cache is not None:
mtime = os.path.getmtime(appinfo_path)
if cache.path == appinfo_path and cache.mtime == mtime:
return (cache.config, cache.matcher)
cache.config = cache.matcher = cache.path = None
cache.mtime = mtime
try:
config = read_app_config(appinfo_path, appinfo.LoadSingleAppInfo)
if static_caching:
if config.default_expiration:
default_expiration = config.default_expiration
else:
default_expiration = '0'
else:
default_expiration = None
matcher = create_matcher(root_path,
config.handlers,
module_dict,
default_expiration)
FakeFile.SetSkippedFiles(config.skip_files)
if cache is not None:
cache.path = appinfo_path
cache.config = config
cache.matcher = matcher
return (config, matcher)
except gexcept.AbstractMethod:
pass
raise AppConfigNotFoundError
def ReadCronConfig(croninfo_path, parse_cron_config=croninfo.LoadSingleCron):
"""Reads cron.yaml file and returns a list of CronEntry instances.
Args:
croninfo_path: String containing the path to the cron.yaml file.
parse_cron_config: Used for dependency injection.
Returns:
A CronInfoExternal object.
Raises:
If the config file is unreadable, empty or invalid, this function will
raise an InvalidAppConfigError or a MalformedCronConfiguration exception.
"""
try:
croninfo_file = file(croninfo_path, 'r')
except IOError, e:
raise InvalidAppConfigError(
'Cron configuration could not be read from "%s"' % croninfo_path)
try:
return parse_cron_config(croninfo_file)
finally:
croninfo_file.close()
def SetupStubs(app_id, **config):
"""Sets up testing stubs of APIs.
Args:
app_id: Application ID being served.
Keywords:
login_url: Relative URL which should be used for handling user login/logout.
datastore_path: Path to the file to store Datastore file stub data in.
history_path: Path to the file to store Datastore history in.
clear_datastore: If the datastore and history should be cleared on startup.
smtp_host: SMTP host used for sending test mail.
smtp_port: SMTP port.
smtp_user: SMTP user.
smtp_password: SMTP password.
enable_sendmail: Whether to use sendmail as an alternative to SMTP.
show_mail_body: Whether to log the body of emails.
remove: Used for dependency injection.
"""
login_url = config['login_url']
datastore_path = config['datastore_path']
history_path = config['history_path']
clear_datastore = config['clear_datastore']
require_indexes = config.get('require_indexes', False)
smtp_host = config.get('smtp_host', None)
smtp_port = config.get('smtp_port', 25)
smtp_user = config.get('smtp_user', '')
smtp_password = config.get('smtp_password', '')
enable_sendmail = config.get('enable_sendmail', False)
show_mail_body = config.get('show_mail_body', False)
remove = config.get('remove', os.remove)
os.environ['APPLICATION_ID'] = app_id
if clear_datastore:
for path in (datastore_path, history_path):
if os.path.lexists(path):
logging.info('Attempting to remove file at %s', path)
try:
remove(path)
except OSError, e:
logging.warning('Removing file failed: %s', e)
apiproxy_stub_map.apiproxy = apiproxy_stub_map.APIProxyStubMap()
datastore = datastore_file_stub.DatastoreFileStub(
app_id, datastore_path, history_path, require_indexes=require_indexes)
apiproxy_stub_map.apiproxy.RegisterStub('datastore_v3', datastore)
fixed_login_url = '%s?%s=%%s' % (login_url,
dev_appserver_login.CONTINUE_PARAM)
fixed_logout_url = '%s&%s' % (fixed_login_url,
dev_appserver_login.LOGOUT_PARAM)
apiproxy_stub_map.apiproxy.RegisterStub(
'user',
user_service_stub.UserServiceStub(login_url=fixed_login_url,
logout_url=fixed_logout_url))
apiproxy_stub_map.apiproxy.RegisterStub(
'urlfetch',
urlfetch_stub.URLFetchServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'mail',
mail_stub.MailServiceStub(smtp_host,
smtp_port,
smtp_user,
smtp_password,
enable_sendmail=enable_sendmail,
show_mail_body=show_mail_body))
apiproxy_stub_map.apiproxy.RegisterStub(
'memcache',
memcache_stub.MemcacheServiceStub())
apiproxy_stub_map.apiproxy.RegisterStub(
'capability_service',
capability_stub.CapabilityServiceStub())
try:
from google.appengine.api.images import images_stub
apiproxy_stub_map.apiproxy.RegisterStub(
'images',
images_stub.ImagesServiceStub())
except ImportError, e:
logging.warning('Could not initialize images API; you are likely missing '
'the Python "PIL" module. ImportError: %s', e)
from google.appengine.api.images import images_not_implemented_stub
apiproxy_stub_map.apiproxy.RegisterStub('images',
images_not_implemented_stub.ImagesNotImplementedServiceStub())
def CreateImplicitMatcher(module_dict,
root_path,
login_url,
create_path_adjuster=PathAdjuster,
create_local_dispatcher=LocalCGIDispatcher,
create_cgi_dispatcher=CGIDispatcher):
"""Creates a URLMatcher instance that handles internal URLs.
Used to facilitate handling user login/logout, debugging, info about the
currently running app, etc.
Args:
module_dict: Dictionary in the form used by sys.modules.
root_path: Path to the root of the application.
login_url: Relative URL which should be used for handling user login/logout.
create_local_dispatcher: Used for dependency injection.
Returns:
Instance of URLMatcher with appropriate dispatchers.
"""
url_matcher = URLMatcher()
path_adjuster = create_path_adjuster(root_path)
login_dispatcher = create_local_dispatcher(sys.modules, path_adjuster,
dev_appserver_login.main)
url_matcher.AddURL(login_url,
login_dispatcher,
'',
False,
False)
admin_dispatcher = create_cgi_dispatcher(module_dict, root_path,
path_adjuster)
url_matcher.AddURL('/_ah/admin(?:/.*)?',
admin_dispatcher,
DEVEL_CONSOLE_PATH,
False,
False)
return url_matcher
def SetupTemplates(template_dir):
"""Reads debugging console template files and initializes the console.
Does nothing if templates have already been initialized.
Args:
template_dir: Path to the directory containing the templates files.
Raises:
OSError or IOError if any of the template files could not be read.
"""
if ApplicationLoggingHandler.AreTemplatesInitialized():
return
try:
header = open(os.path.join(template_dir, HEADER_TEMPLATE)).read()
script = open(os.path.join(template_dir, SCRIPT_TEMPLATE)).read()
middle = open(os.path.join(template_dir, MIDDLE_TEMPLATE)).read()
footer = open(os.path.join(template_dir, FOOTER_TEMPLATE)).read()
except (OSError, IOError):
logging.error('Could not read template files from %s', template_dir)
raise
ApplicationLoggingHandler.InitializeTemplates(header, script, middle, footer)
def CreateServer(root_path,
login_url,
port,
template_dir,
serve_address='',
require_indexes=False,
allow_skipped_files=False,
static_caching=True,
python_path_list=sys.path,
sdk_dir=os.path.dirname(os.path.dirname(google.__file__))):
"""Creates an new HTTPServer for an application.
The sdk_dir argument must be specified for the directory storing all code for
the SDK so as to allow for the sandboxing of module access to work for any
and all SDK code. While typically this is where the 'google' package lives,
it can be in another location because of API version support.
Args:
root_path: String containing the path to the root directory of the
application where the app.yaml file is.
login_url: Relative URL which should be used for handling user login/logout.
port: Port to start the application server on.
template_dir: Path to the directory in which the debug console templates
are stored.
serve_address: Address on which the server should serve.
require_indexes: True if index.yaml is read-only gospel; default False.
static_caching: True if browser caching of static files should be allowed.
python_path_list: Used for dependency injection.
sdk_dir: Directory where the SDK is stored.
Returns:
Instance of BaseHTTPServer.HTTPServer that's ready to start accepting.
"""
absolute_root_path = os.path.realpath(root_path)
SetupTemplates(template_dir)
FakeFile.SetAllowedPaths(absolute_root_path,
[sdk_dir,
template_dir])
FakeFile.SetAllowSkippedFiles(allow_skipped_files)
handler_class = CreateRequestHandler(absolute_root_path,
login_url,
require_indexes,
static_caching)
if absolute_root_path not in python_path_list:
python_path_list.insert(0, absolute_root_path)
return BaseHTTPServer.HTTPServer((serve_address, port), handler_class)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tool for deploying apps to an app server.
Currently, the application only uploads new appversions. To do this, it first
walks the directory tree rooted at the path the user specifies, adding all the
files it finds to a list. It then uploads the application configuration
(app.yaml) to the server using HTTP, followed by uploading each of the files.
It then commits the transaction with another request.
The bulk of this work is handled by the AppVersionUpload class, which exposes
methods to add to the list of files, fetch a list of modified files, upload
files, and commit or rollback the transaction.
"""
import calendar
import datetime
import getpass
import logging
import mimetypes
import optparse
import os
import re
import sha
import sys
import tempfile
import time
import urllib2
import google
import yaml
from google.appengine.cron import groctimespecification
from google.appengine.api import appinfo
from google.appengine.api import croninfo
from google.appengine.api import validation
from google.appengine.api import yaml_errors
from google.appengine.api import yaml_object
from google.appengine.datastore import datastore_index
from google.appengine.tools import appengine_rpc
MAX_FILES_TO_CLONE = 100
LIST_DELIMITER = "\n"
TUPLE_DELIMITER = "|"
VERSION_FILE = "../VERSION"
UPDATE_CHECK_TIMEOUT = 3
NAG_FILE = ".appcfg_nag"
MAX_LOG_LEVEL = 4
verbosity = 1
appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = "python"
_api_versions = os.environ.get('GOOGLE_TEST_API_VERSIONS', '1')
_options = validation.Options(*_api_versions.split(','))
appinfo.AppInfoExternal.ATTRIBUTES[appinfo.API_VERSION] = _options
del _api_versions, _options
def StatusUpdate(msg):
"""Print a status message to stderr.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print >>sys.stderr, msg
def GetMimeTypeIfStaticFile(config, filename):
"""Looks up the mime type for 'filename'.
Uses the handlers in 'config' to determine if the file should
be treated as a static file.
Args:
config: The app.yaml object to check the filename against.
filename: The name of the file.
Returns:
The mime type string. For example, 'text/plain' or 'image/gif'.
None if this is not a static file.
"""
for handler in config.handlers:
handler_type = handler.GetHandlerType()
if handler_type in ("static_dir", "static_files"):
if handler_type == "static_dir":
regex = os.path.join(re.escape(handler.GetHandler()), ".*")
else:
regex = handler.upload
if re.match(regex, filename):
if handler.mime_type is not None:
return handler.mime_type
else:
guess = mimetypes.guess_type(filename)[0]
if guess is None:
default = "application/octet-stream"
print >>sys.stderr, ("Could not guess mimetype for %s. Using %s."
% (filename, default))
return default
return guess
return None
def BuildClonePostBody(file_tuples):
"""Build the post body for the /api/clone{files,blobs} urls.
Args:
file_tuples: A list of tuples. Each tuple should contain the entries
appropriate for the endpoint in question.
Returns:
A string containing the properly delimited tuples.
"""
file_list = []
for tup in file_tuples:
path = tup[0]
tup = tup[1:]
file_list.append(TUPLE_DELIMITER.join([path] + list(tup)))
return LIST_DELIMITER.join(file_list)
class NagFile(validation.Validated):
"""A validated YAML class to represent the user's nag preferences.
Attributes:
timestamp: The timestamp of the last nag.
opt_in: True if the user wants to check for updates on dev_appserver
start. False if not. May be None if we have not asked the user yet.
"""
ATTRIBUTES = {
"timestamp": validation.TYPE_FLOAT,
"opt_in": validation.Optional(validation.TYPE_BOOL),
}
@staticmethod
def Load(nag_file):
"""Load a single NagFile object where one and only one is expected.
Args:
nag_file: A file-like object or string containing the yaml data to parse.
Returns:
A NagFile instance.
"""
return yaml_object.BuildSingleObject(NagFile, nag_file)
def GetVersionObject(isfile=os.path.isfile, open_fn=open):
"""Gets the version of the SDK by parsing the VERSION file.
Args:
isfile: used for testing.
open_fn: Used for testing.
Returns:
A Yaml object or None if the VERSION file does not exist.
"""
version_filename = os.path.join(os.path.dirname(google.__file__),
VERSION_FILE)
if not isfile(version_filename):
logging.error("Could not find version file at %s", version_filename)
return None
version_fh = open_fn(version_filename, "r")
try:
version = yaml.safe_load(version_fh)
finally:
version_fh.close()
return version
def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable):
"""Calls a function multiple times, backing off more and more each time.
Args:
initial_delay: Initial delay after first try, in seconds.
backoff_factor: Delay will be multiplied by this factor after each try.
max_tries: Maximum number of tries.
callable: The method to call, will pass no arguments.
Returns:
True if the function succeded in one of its tries.
Raises:
Whatever the function raises--an exception will immediately stop retries.
"""
delay = initial_delay
while not callable() and max_tries > 0:
StatusUpdate("Will check again in %s seconds." % delay)
time.sleep(delay)
delay *= backoff_factor
max_tries -= 1
return max_tries > 0
class UpdateCheck(object):
"""Determines if the local SDK is the latest version.
Nags the user when there are updates to the SDK. As the SDK becomes
more out of date, the language in the nagging gets stronger. We
store a little yaml file in the user's home directory so that we nag
the user only once a week.
The yaml file has the following field:
'timestamp': Last time we nagged the user in seconds since the epoch.
Attributes:
server: An AbstractRpcServer instance used to check for the latest SDK.
config: The app's AppInfoExternal. Needed to determine which api_version
the app is using.
"""
def __init__(self,
server,
config,
isdir=os.path.isdir,
isfile=os.path.isfile,
open_fn=open):
"""Create a new UpdateCheck.
Args:
server: The AbstractRpcServer to use.
config: The yaml object that specifies the configuration of this
application.
isdir: Replacement for os.path.isdir (for testing).
isfile: Replacement for os.path.isfile (for testing).
open_fn: Replacement for the open builtin (for testing).
"""
self.server = server
self.config = config
self.isdir = isdir
self.isfile = isfile
self.open = open_fn
@staticmethod
def MakeNagFilename():
"""Returns the filename for the nag file for this user."""
user_homedir = os.path.expanduser("~/")
if not os.path.isdir(user_homedir):
drive, unused_tail = os.path.splitdrive(os.__file__)
if drive:
os.environ["HOMEDRIVE"] = drive
return os.path.expanduser("~/" + NAG_FILE)
def _ParseVersionFile(self):
"""Parse the local VERSION file.
Returns:
A Yaml object or None if the file does not exist.
"""
return GetVersionObject(isfile=self.isfile, open_fn=self.open)
def CheckSupportedVersion(self):
"""Determines if the app's api_version is supported by the SDK.
Uses the api_version field from the AppInfoExternal to determine if
the SDK supports that api_version.
Raises:
SystemExit if the api_version is not supported.
"""
version = self._ParseVersionFile()
if version is None:
logging.error("Could not determine if the SDK supports the api_version "
"requested in app.yaml.")
return
if self.config.api_version not in version["api_versions"]:
logging.critical("The api_version specified in app.yaml (%s) is not "
"supported by this release of the SDK. The supported "
"api_versions are %s.",
self.config.api_version, version["api_versions"])
sys.exit(1)
def CheckForUpdates(self):
"""Queries the server for updates and nags the user if appropriate.
Queries the server for the latest SDK version at the same time reporting
the local SDK version. The server will respond with a yaml document
containing the fields:
"release": The name of the release (e.g. 1.2).
"timestamp": The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ).
"api_versions": A list of api_version strings (e.g. ['1', 'beta']).
We will nag the user with increasing severity if:
- There is a new release.
- There is a new release with a new api_version.
- There is a new release that does not support the api_version named in
self.config.
"""
version = self._ParseVersionFile()
if version is None:
logging.info("Skipping update check")
return
logging.info("Checking for updates to the SDK.")
try:
response = self.server.Send("/api/updatecheck",
timeout=UPDATE_CHECK_TIMEOUT,
release=version["release"],
timestamp=version["timestamp"],
api_versions=version["api_versions"])
except urllib2.URLError, e:
logging.info("Update check failed: %s", e)
return
latest = yaml.safe_load(response)
if latest["release"] == version["release"]:
logging.info("The SDK is up to date.")
return
api_versions = latest["api_versions"]
if self.config.api_version not in api_versions:
self._Nag(
"The api version you are using (%s) is obsolete! You should\n"
"upgrade your SDK and test that your code works with the new\n"
"api version." % self.config.api_version,
latest, version, force=True)
return
if self.config.api_version != api_versions[len(api_versions) - 1]:
self._Nag(
"The api version you are using (%s) is deprecated. You should\n"
"upgrade your SDK to try the new functionality." %
self.config.api_version, latest, version)
return
self._Nag("There is a new release of the SDK available.",
latest, version)
def _ParseNagFile(self):
"""Parses the nag file.
Returns:
A NagFile if the file was present else None.
"""
nag_filename = UpdateCheck.MakeNagFilename()
if self.isfile(nag_filename):
fh = self.open(nag_filename, "r")
try:
nag = NagFile.Load(fh)
finally:
fh.close()
return nag
return None
def _WriteNagFile(self, nag):
"""Writes the NagFile to the user's nag file.
If the destination path does not exist, this method will log an error
and fail silently.
Args:
nag: The NagFile to write.
"""
nagfilename = UpdateCheck.MakeNagFilename()
try:
fh = self.open(nagfilename, "w")
try:
fh.write(nag.ToYAML())
finally:
fh.close()
except (OSError, IOError), e:
logging.error("Could not write nag file to %s. Error: %s", nagfilename, e)
def _Nag(self, msg, latest, version, force=False):
"""Prints a nag message and updates the nag file's timestamp.
Because we don't want to nag the user everytime, we store a simple
yaml document in the user's home directory. If the timestamp in this
doc is over a week old, we'll nag the user. And when we nag the user,
we update the timestamp in this doc.
Args:
msg: The formatted message to print to the user.
latest: The yaml document received from the server.
version: The local yaml version document.
force: If True, always nag the user, ignoring the nag file.
"""
nag = self._ParseNagFile()
if nag and not force:
last_nag = datetime.datetime.fromtimestamp(nag.timestamp)
if datetime.datetime.now() - last_nag < datetime.timedelta(weeks=1):
logging.debug("Skipping nag message")
return
if nag is None:
nag = NagFile()
nag.timestamp = time.time()
self._WriteNagFile(nag)
print "****************************************************************"
print msg
print "-----------"
print "Latest SDK:"
print yaml.dump(latest)
print "-----------"
print "Your SDK:"
print yaml.dump(version)
print "-----------"
print "Please visit http://code.google.com/appengine for the latest SDK"
print "****************************************************************"
def AllowedToCheckForUpdates(self, input_fn=raw_input):
"""Determines if the user wants to check for updates.
On startup, the dev_appserver wants to check for updates to the SDK.
Because this action reports usage to Google when the user is not
otherwise communicating with Google (e.g. pushing a new app version),
the user must opt in.
If the user does not have a nag file, we will query the user and
save the response in the nag file. Subsequent calls to this function
will re-use that response.
Args:
input_fn: used to collect user input. This is for testing only.
Returns:
True if the user wants to check for updates. False otherwise.
"""
nag = self._ParseNagFile()
if nag is None:
nag = NagFile()
nag.timestamp = time.time()
if nag.opt_in is None:
answer = input_fn("Allow dev_appserver to check for updates on startup? "
"(Y/n): ")
answer = answer.strip().lower()
if answer == "n" or answer == "no":
print ("dev_appserver will not check for updates on startup. To "
"change this setting, edit %s" % UpdateCheck.MakeNagFilename())
nag.opt_in = False
else:
print ("dev_appserver will check for updates on startup. To change "
"this setting, edit %s" % UpdateCheck.MakeNagFilename())
nag.opt_in = True
self._WriteNagFile(nag)
return nag.opt_in
class IndexDefinitionUpload(object):
"""Provides facilities to upload index definitions to the hosting service."""
def __init__(self, server, config, definitions):
"""Creates a new DatastoreIndexUpload.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
config: The AppInfoExternal object derived from the app.yaml file.
definitions: An IndexDefinitions object.
"""
self.server = server
self.config = config
self.definitions = definitions
def DoUpload(self):
"""Uploads the index definitions."""
StatusUpdate("Uploading index definitions.")
self.server.Send("/api/datastore/index/add",
app_id=self.config.application,
version=self.config.version,
payload=self.definitions.ToYAML())
class CronEntryUpload(object):
"""Provides facilities to upload cron entries to the hosting service."""
def __init__(self, server, config, cron):
"""Creates a new CronEntryUpload.
Args:
server: The RPC server to use. Should be an instance of a subclass of
AbstractRpcServer
config: The AppInfoExternal object derived from the app.yaml file.
cron: The CronInfoExternal object loaded from the cron.yaml file.
"""
self.server = server
self.config = config
self.cron = cron
def DoUpload(self):
"""Uploads the cron entries."""
StatusUpdate("Uploading cron entries.")
self.server.Send("/api/datastore/cron/update",
app_id=self.config.application,
version=self.config.version,
payload=self.cron.ToYAML())
class IndexOperation(object):
"""Provide facilities for writing Index operation commands."""
def __init__(self, server, config):
"""Creates a new IndexOperation.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
config: appinfo.AppInfoExternal configuration object.
"""
self.server = server
self.config = config
def DoDiff(self, definitions):
"""Retrieve diff file from the server.
Args:
definitions: datastore_index.IndexDefinitions as loaded from users
index.yaml file.
Returns:
A pair of datastore_index.IndexDefinitions objects. The first record
is the set of indexes that are present in the index.yaml file but missing
from the server. The second record is the set of indexes that are
present on the server but missing from the index.yaml file (indicating
that these indexes should probably be vacuumed).
"""
StatusUpdate("Fetching index definitions diff.")
response = self.server.Send("/api/datastore/index/diff",
app_id=self.config.application,
payload=definitions.ToYAML())
return datastore_index.ParseMultipleIndexDefinitions(response)
def DoDelete(self, definitions):
"""Delete indexes from the server.
Args:
definitions: Index definitions to delete from datastore.
Returns:
A single datstore_index.IndexDefinitions containing indexes that were
not deleted, probably because they were already removed. This may
be normal behavior as there is a potential race condition between fetching
the index-diff and sending deletion confirmation through.
"""
StatusUpdate("Deleting selected index definitions.")
response = self.server.Send("/api/datastore/index/delete",
app_id=self.config.application,
payload=definitions.ToYAML())
return datastore_index.ParseIndexDefinitions(response)
class VacuumIndexesOperation(IndexOperation):
"""Provide facilities to request the deletion of datastore indexes."""
def __init__(self, server, config, force,
confirmation_fn=raw_input):
"""Creates a new VacuumIndexesOperation.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
config: appinfo.AppInfoExternal configuration object.
force: True to force deletion of indexes, else False.
confirmation_fn: Function used for getting input form user.
"""
super(VacuumIndexesOperation, self).__init__(server, config)
self.force = force
self.confirmation_fn = confirmation_fn
def GetConfirmation(self, index):
"""Get confirmation from user to delete an index.
This method will enter an input loop until the user provides a
response it is expecting. Valid input is one of three responses:
y: Confirm deletion of index.
n: Do not delete index.
a: Delete all indexes without asking for further confirmation.
If the user enters nothing at all, the default action is to skip
that index and do not delete.
If the user selects 'a', as a side effect, the 'force' flag is set.
Args:
index: Index to confirm.
Returns:
True if user enters 'y' or 'a'. False if user enter 'n'.
"""
while True:
print "This index is no longer defined in your index.yaml file."
print
print index.ToYAML()
print
confirmation = self.confirmation_fn(
"Are you sure you want to delete this index? (N/y/a): ")
confirmation = confirmation.strip().lower()
if confirmation == "y":
return True
elif confirmation == "n" or not confirmation:
return False
elif confirmation == "a":
self.force = True
return True
else:
print "Did not understand your response."
def DoVacuum(self, definitions):
"""Vacuum indexes in datastore.
This method will query the server to determine which indexes are not
being used according to the user's local index.yaml file. Once it has
made this determination, it confirms with the user which unused indexes
should be deleted. Once confirmation for each index is receives, it
deletes those indexes.
Because another user may in theory delete the same indexes at the same
time as the user, there is a potential race condition. In this rare cases,
some of the indexes previously confirmed for deletion will not be found.
The user is notified which indexes these were.
Args:
definitions: datastore_index.IndexDefinitions as loaded from users
index.yaml file.
"""
unused_new_indexes, notused_indexes = self.DoDiff(definitions)
deletions = datastore_index.IndexDefinitions(indexes=[])
if notused_indexes.indexes is not None:
for index in notused_indexes.indexes:
if self.force or self.GetConfirmation(index):
deletions.indexes.append(index)
if deletions.indexes:
not_deleted = self.DoDelete(deletions)
if not_deleted.indexes:
not_deleted_count = len(not_deleted.indexes)
if not_deleted_count == 1:
warning_message = ("An index was not deleted. Most likely this is "
"because it no longer exists.\n\n")
else:
warning_message = ("%d indexes were not deleted. Most likely this "
"is because they no longer exist.\n\n"
% not_deleted_count)
for index in not_deleted.indexes:
warning_message += index.ToYAML()
logging.warning(warning_message)
class LogsRequester(object):
"""Provide facilities to export request logs."""
def __init__(self, server, config, output_file,
num_days, append, severity, now):
"""Constructor.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
config: appinfo.AppInfoExternal configuration object.
output_file: Output file name.
num_days: Number of days worth of logs to export; 0 for all available.
append: True if appending to an existing file.
severity: App log severity to request (0-4); None for no app logs.
now: POSIX timestamp used for calculating valid dates for num_days.
"""
self.server = server
self.config = config
self.output_file = output_file
self.append = append
self.num_days = num_days
self.severity = severity
self.version_id = self.config.version + ".1"
self.sentinel = None
self.write_mode = "w"
if self.append:
self.sentinel = FindSentinel(self.output_file)
self.write_mode = "a"
self.valid_dates = None
if self.num_days:
patterns = []
now = PacificTime(now)
for i in xrange(self.num_days):
then = time.gmtime(now - 24*3600 * i)
patterns.append(re.escape(time.strftime("%d/%m/%Y", then)))
patterns.append(re.escape(time.strftime("%d/%b/%Y", then)))
self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):")
def DownloadLogs(self):
"""Download the requested logs.
This will write the logs to the file designated by
self.output_file, or to stdout if the filename is '-'.
Multiple roundtrips to the server may be made.
"""
StatusUpdate("Downloading request logs for %s %s." %
(self.config.application, self.version_id))
tf = tempfile.TemporaryFile()
offset = None
try:
while True:
try:
offset = self.RequestLogLines(tf, offset)
if not offset:
break
except KeyboardInterrupt:
StatusUpdate("Keyboard interrupt; saving data downloaded so far.")
break
StatusUpdate("Copying request logs to %r." % self.output_file)
if self.output_file == "-":
of = sys.stdout
else:
try:
of = open(self.output_file, self.write_mode)
except IOError, err:
StatusUpdate("Can't write %r: %s." % (self.output_file, err))
sys.exit(1)
try:
line_count = CopyReversedLines(tf, of)
finally:
of.flush()
if of is not sys.stdout:
of.close()
finally:
tf.close()
StatusUpdate("Copied %d records." % line_count)
def RequestLogLines(self, tf, offset):
"""Make a single roundtrip to the server.
Args:
tf: Writable binary stream to which the log lines returned by
the server are written, stripped of headers, and excluding
lines skipped due to self.sentinel or self.valid_dates filtering.
offset: Offset string for a continued request; None for the first.
Returns:
The offset string to be used for the next request, if another
request should be issued; or None, if not.
"""
logging.info("Request with offset %r.", offset)
kwds = {"app_id": self.config.application,
"version": self.version_id,
"limit": 100,
}
if offset:
kwds["offset"] = offset
if self.severity is not None:
kwds["severity"] = str(self.severity)
response = self.server.Send("/api/request_logs", payload=None, **kwds)
response = response.replace("\r", "\0")
lines = response.splitlines()
logging.info("Received %d bytes, %d records.", len(response), len(lines))
offset = None
if lines and lines[0].startswith("#"):
match = re.match(r"^#\s*next_offset=(\S+)\s*$", lines[0])
del lines[0]
if match:
offset = match.group(1)
if lines and lines[-1].startswith("#"):
del lines[-1]
valid_dates = self.valid_dates
sentinel = self.sentinel
len_sentinel = None
if sentinel:
len_sentinel = len(sentinel)
for line in lines:
if ((sentinel and
line.startswith(sentinel) and
line[len_sentinel : len_sentinel+1] in ("", "\0")) or
(valid_dates and not valid_dates.match(line))):
return None
tf.write(line + "\n")
if not lines:
return None
return offset
def PacificTime(now):
"""Helper to return the number of seconds between UTC and Pacific time.
This is needed to compute today's date in Pacific time (more
specifically: Mountain View local time), which is how request logs
are reported. (Google servers always report times in Mountain View
local time, regardless of where they are physically located.)
This takes (post-2006) US DST into account. Pacific time is either
8 hours or 7 hours west of UTC, depending on whether DST is in
effect. Since 2007, US DST starts on the Second Sunday in March
March, and ends on the first Sunday in November. (Reference:
http://aa.usno.navy.mil/faq/docs/daylight_time.php.)
Note that the server doesn't report its local time (the HTTP Date
header uses UTC), and the client's local time is irrelevant.
Args:
now: A posix timestamp giving current UTC time.
Returns:
A pseudo-posix timestamp giving current Pacific time. Passing
this through time.gmtime() will produce a tuple in Pacific local
time.
"""
now -= 8*3600
if IsPacificDST(now):
now += 3600
return now
def IsPacificDST(now):
"""Helper for PacificTime to decide whether now is Pacific DST (PDT).
Args:
now: A pseudo-posix timestamp giving current time in PST.
Returns:
True if now falls within the range of DST, False otherwise.
"""
DAY = 24*3600
SUNDAY = 6
pst = time.gmtime(now)
year = pst[0]
assert year >= 2007
begin = calendar.timegm((year, 3, 8, 2, 0, 0, 0, 0, 0))
while time.gmtime(begin).tm_wday != SUNDAY:
begin += DAY
end = calendar.timegm((year, 11, 1, 2, 0, 0, 0, 0, 0))
while time.gmtime(end).tm_wday != SUNDAY:
end += DAY
return begin <= now < end
def CopyReversedLines(instream, outstream, blocksize=2**16):
r"""Copy lines from input stream to output stream in reverse order.
As a special feature, null bytes in the input are turned into
newlines followed by tabs in the output, but these "sub-lines"
separated by null bytes are not reversed. E.g. If the input is
"A\0B\nC\0D\n", the output is "C\n\tD\nA\n\tB\n".
Args:
instream: A seekable stream open for reading in binary mode.
outstream: A stream open for writing; doesn't have to be seekable or binary.
blocksize: Optional block size for buffering, for unit testing.
Returns:
The number of lines copied.
"""
line_count = 0
instream.seek(0, 2)
last_block = instream.tell() // blocksize
spillover = ""
for iblock in xrange(last_block + 1, -1, -1):
instream.seek(iblock * blocksize)
data = instream.read(blocksize)
lines = data.splitlines(True)
lines[-1:] = "".join(lines[-1:] + [spillover]).splitlines(True)
if lines and not lines[-1].endswith("\n"):
lines[-1] += "\n"
lines.reverse()
if lines and iblock > 0:
spillover = lines.pop()
if lines:
line_count += len(lines)
data = "".join(lines).replace("\0", "\n\t")
outstream.write(data)
return line_count
def FindSentinel(filename, blocksize=2**16):
"""Return the sentinel line from the output file.
Args:
filename: The filename of the output file. (We'll read this file.)
blocksize: Optional block size for buffering, for unit testing.
Returns:
The contents of the last line in the file that doesn't start with
a tab, with its trailing newline stripped; or None if the file
couldn't be opened or no such line could be found by inspecting
the last 'blocksize' bytes of the file.
"""
if filename == "-":
StatusUpdate("Can't combine --append with output to stdout.")
sys.exit(2)
try:
fp = open(filename, "rb")
except IOError, err:
StatusUpdate("Append mode disabled: can't read %r: %s." % (filename, err))
return None
try:
fp.seek(0, 2)
fp.seek(max(0, fp.tell() - blocksize))
lines = fp.readlines()
del lines[:1]
sentinel = None
for line in lines:
if not line.startswith("\t"):
sentinel = line
if not sentinel:
StatusUpdate("Append mode disabled: can't find sentinel in %r." %
filename)
return None
return sentinel.rstrip("\n")
finally:
fp.close()
class AppVersionUpload(object):
"""Provides facilities to upload a new appversion to the hosting service.
Attributes:
server: The AbstractRpcServer to use for the upload.
config: The AppInfoExternal object derived from the app.yaml file.
app_id: The application string from 'config'.
version: The version string from 'config'.
files: A dictionary of files to upload to the server, mapping path to
hash of the file contents.
in_transaction: True iff a transaction with the server has started.
An AppVersionUpload can do only one transaction at a time.
deployed: True iff the Deploy method has been called.
"""
def __init__(self, server, config):
"""Creates a new AppVersionUpload.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer or
TestRpcServer.
config: An AppInfoExternal object that specifies the configuration for
this application.
"""
self.server = server
self.config = config
self.app_id = self.config.application
self.version = self.config.version
self.files = {}
self.in_transaction = False
self.deployed = False
def _Hash(self, content):
"""Compute the hash of the content.
Args:
content: The data to hash as a string.
Returns:
The string representation of the hash.
"""
h = sha.new(content).hexdigest()
return "%s_%s_%s_%s_%s" % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
def AddFile(self, path, file_handle):
"""Adds the provided file to the list to be pushed to the server.
Args:
path: The path the file should be uploaded as.
file_handle: A stream containing data to upload.
"""
assert not self.in_transaction, "Already in a transaction."
assert file_handle is not None
reason = appinfo.ValidFilename(path)
if reason:
logging.error(reason)
return
pos = file_handle.tell()
content_hash = self._Hash(file_handle.read())
file_handle.seek(pos, 0)
self.files[path] = content_hash
def Begin(self):
"""Begins the transaction, returning a list of files that need uploading.
All calls to AddFile must be made before calling Begin().
Returns:
A list of pathnames for files that should be uploaded using UploadFile()
before Commit() can be called.
"""
assert not self.in_transaction, "Already in a transaction."
StatusUpdate("Initiating update.")
self.server.Send("/api/appversion/create", app_id=self.app_id,
version=self.version, payload=self.config.ToYAML())
self.in_transaction = True
files_to_clone = []
blobs_to_clone = []
for path, content_hash in self.files.iteritems():
mime_type = GetMimeTypeIfStaticFile(self.config, path)
if mime_type is not None:
blobs_to_clone.append((path, content_hash, mime_type))
else:
files_to_clone.append((path, content_hash))
files_to_upload = {}
def CloneFiles(url, files, file_type):
"""Sends files to the given url.
Args:
url: the server URL to use.
files: a list of files
file_type: the type of the files
"""
if not files:
return
StatusUpdate("Cloning %d %s file%s." %
(len(files), file_type, len(files) != 1 and "s" or ""))
for i in xrange(0, len(files), MAX_FILES_TO_CLONE):
if i > 0 and i % MAX_FILES_TO_CLONE == 0:
StatusUpdate("Cloned %d files." % i)
chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)]
result = self.server.Send(url,
app_id=self.app_id, version=self.version,
payload=BuildClonePostBody(chunk))
if result:
files_to_upload.update(dict(
(f, self.files[f]) for f in result.split(LIST_DELIMITER)))
CloneFiles("/api/appversion/cloneblobs", blobs_to_clone, "static")
CloneFiles("/api/appversion/clonefiles", files_to_clone, "application")
logging.info("Files to upload: " + str(files_to_upload))
self.files = files_to_upload
return sorted(files_to_upload.iterkeys())
def UploadFile(self, path, file_handle):
"""Uploads a file to the hosting service.
Must only be called after Begin().
The path provided must be one of those that were returned by Begin().
Args:
path: The path the file is being uploaded as.
file_handle: A file-like object containing the data to upload.
Raises:
KeyError: The provided file is not amongst those to be uploaded.
"""
assert self.in_transaction, "Begin() must be called before UploadFile()."
if path not in self.files:
raise KeyError("File '%s' is not in the list of files to be uploaded."
% path)
del self.files[path]
mime_type = GetMimeTypeIfStaticFile(self.config, path)
if mime_type is not None:
self.server.Send("/api/appversion/addblob", app_id=self.app_id,
version=self.version, path=path, content_type=mime_type,
payload=file_handle.read())
else:
self.server.Send("/api/appversion/addfile", app_id=self.app_id,
version=self.version, path=path,
payload=file_handle.read())
def Commit(self):
"""Commits the transaction, making the new app version available.
All the files returned by Begin() must have been uploaded with UploadFile()
before Commit() can be called.
This tries the new 'deploy' method; if that fails it uses the old 'commit'.
Raises:
Exception: Some required files were not uploaded.
"""
assert self.in_transaction, "Begin() must be called before Commit()."
if self.files:
raise Exception("Not all required files have been uploaded.")
try:
self.Deploy()
if not RetryWithBackoff(1, 2, 8, self.IsReady):
logging.warning("Version still not ready to serve, aborting.")
raise Exception("Version not ready.")
self.StartServing()
except urllib2.HTTPError, e:
if e.code != 404:
raise
StatusUpdate("Closing update.")
self.server.Send("/api/appversion/commit", app_id=self.app_id,
version=self.version)
self.in_transaction = False
def Deploy(self):
"""Deploys the new app version but does not make it default.
All the files returned by Begin() must have been uploaded with UploadFile()
before Deploy() can be called.
Raises:
Exception: Some required files were not uploaded.
"""
assert self.in_transaction, "Begin() must be called before Deploy()."
if self.files:
raise Exception("Not all required files have been uploaded.")
StatusUpdate("Deploying new version.")
self.server.Send("/api/appversion/deploy", app_id=self.app_id,
version=self.version)
self.deployed = True
def IsReady(self):
"""Check if the new app version is ready to serve traffic.
Raises:
Exception: Deploy has not yet been called.
Returns:
True if the server returned the app is ready to serve.
"""
assert self.deployed, "Deploy() must be called before IsReady()."
StatusUpdate("Checking if new version is ready to serve.")
result = self.server.Send("/api/appversion/isready", app_id=self.app_id,
version=self.version)
return result == "1"
def StartServing(self):
"""Start serving with the newly created version.
Raises:
Exception: Deploy has not yet been called.
"""
assert self.deployed, "Deploy() must be called before IsReady()."
StatusUpdate("Closing update: new version is ready to start serving.")
self.server.Send("/api/appversion/startserving",
app_id=self.app_id, version=self.version)
def Rollback(self):
"""Rolls back the transaction if one is in progress."""
if not self.in_transaction:
return
StatusUpdate("Rolling back the update.")
self.server.Send("/api/appversion/rollback", app_id=self.app_id,
version=self.version)
self.in_transaction = False
self.files = {}
def DoUpload(self, paths, max_size, openfunc):
"""Uploads a new appversion with the given config and files to the server.
Args:
paths: An iterator that yields the relative paths of the files to upload.
max_size: The maximum size file to upload.
openfunc: A function that takes a path and returns a file-like object.
"""
logging.info("Reading app configuration.")
path = ""
try:
StatusUpdate("Scanning files on local disk.")
num_files = 0
for path in paths:
file_handle = openfunc(path)
try:
if self.config.skip_files.match(path):
logging.info("Ignoring file '%s': File matches ignore regex.",
path)
else:
file_length = GetFileLength(file_handle)
if file_length > max_size:
logging.error("Ignoring file '%s': Too long "
"(max %d bytes, file is %d bytes)",
path, max_size, file_length)
else:
logging.info("Processing file '%s'", path)
self.AddFile(path, file_handle)
finally:
file_handle.close()
num_files += 1
if num_files % 500 == 0:
StatusUpdate("Scanned %d files." % num_files)
except KeyboardInterrupt:
logging.info("User interrupted. Aborting.")
raise
except EnvironmentError, e:
logging.error("An error occurred processing file '%s': %s. Aborting.",
path, e)
raise
try:
missing_files = self.Begin()
if missing_files:
StatusUpdate("Uploading %d files." % len(missing_files))
num_files = 0
for missing_file in missing_files:
logging.info("Uploading file '%s'" % missing_file)
file_handle = openfunc(missing_file)
try:
self.UploadFile(missing_file, file_handle)
finally:
file_handle.close()
num_files += 1
if num_files % 500 == 0:
StatusUpdate("Uploaded %d files." % num_files)
self.Commit()
except KeyboardInterrupt:
logging.info("User interrupted. Aborting.")
self.Rollback()
raise
except:
logging.exception("An unexpected error occurred. Aborting.")
self.Rollback()
raise
logging.info("Done!")
def FileIterator(base, separator=os.path.sep):
"""Walks a directory tree, returning all the files. Follows symlinks.
Args:
base: The base path to search for files under.
separator: Path separator used by the running system's platform.
Yields:
Paths of files found, relative to base.
"""
dirs = [""]
while dirs:
current_dir = dirs.pop()
for entry in os.listdir(os.path.join(base, current_dir)):
name = os.path.join(current_dir, entry)
fullname = os.path.join(base, name)
if os.path.isfile(fullname):
if separator == "\\":
name = name.replace("\\", "/")
yield name
elif os.path.isdir(fullname):
dirs.append(name)
def GetFileLength(fh):
"""Returns the length of the file represented by fh.
This function is capable of finding the length of any seekable stream,
unlike os.fstat, which only works on file streams.
Args:
fh: The stream to get the length of.
Returns:
The length of the stream.
"""
pos = fh.tell()
fh.seek(0, 2)
length = fh.tell()
fh.seek(pos, 0)
return length
def GetUserAgent(get_version=GetVersionObject,
get_platform=appengine_rpc.GetPlatformToken):
"""Determines the value of the 'User-agent' header to use for HTTP requests.
If the 'APPCFG_SDK_NAME' environment variable is present, that will be
used as the first product token in the user-agent.
Args:
get_version: Used for testing.
get_platform: Used for testing.
Returns:
String containing the 'user-agent' header value, which includes the SDK
version, the platform information, and the version of Python;
e.g., "appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2".
"""
product_tokens = []
sdk_name = os.environ.get("APPCFG_SDK_NAME")
if sdk_name:
product_tokens.append(sdk_name)
else:
version = get_version()
if version is None:
release = "unknown"
else:
release = version["release"]
product_tokens.append("appcfg_py/%s" % release)
product_tokens.append(get_platform())
python_version = ".".join(str(i) for i in sys.version_info)
product_tokens.append("Python/%s" % python_version)
return " ".join(product_tokens)
def GetSourceName(get_version=GetVersionObject):
"""Gets the name of this source version."""
version = get_version()
if version is None:
release = "unknown"
else:
release = version["release"]
return "Google-appcfg-%s" % (release,)
class AppCfgApp(object):
"""Singleton class to wrap AppCfg tool functionality.
This class is responsible for parsing the command line and executing
the desired action on behalf of the user. Processing files and
communicating with the server is handled by other classes.
Attributes:
actions: A dictionary mapping action names to Action objects.
action: The Action specified on the command line.
parser: An instance of optparse.OptionParser.
options: The command line options parsed by 'parser'.
argv: The original command line as a list.
args: The positional command line args left over after parsing the options.
raw_input_fn: Function used for getting raw user input, like email.
password_input_fn: Function used for getting user password.
error_fh: Unexpected HTTPErrors are printed to this file handle.
Attributes for testing:
parser_class: The class to use for parsing the command line. Because
OptionsParser will exit the program when there is a parse failure, it
is nice to subclass OptionsParser and catch the error before exiting.
"""
def __init__(self, argv, parser_class=optparse.OptionParser,
rpc_server_class=appengine_rpc.HttpRpcServer,
raw_input_fn=raw_input,
password_input_fn=getpass.getpass,
error_fh=sys.stderr,
update_check_class=UpdateCheck):
"""Initializer. Parses the cmdline and selects the Action to use.
Initializes all of the attributes described in the class docstring.
Prints help or error messages if there is an error parsing the cmdline.
Args:
argv: The list of arguments passed to this program.
parser_class: Options parser to use for this application.
rpc_server_class: RPC server class to use for this application.
raw_input_fn: Function used for getting user email.
password_input_fn: Function used for getting user password.
error_fh: Unexpected HTTPErrors are printed to this file handle.
update_check_class: UpdateCheck class (can be replaced for testing).
"""
self.parser_class = parser_class
self.argv = argv
self.rpc_server_class = rpc_server_class
self.raw_input_fn = raw_input_fn
self.password_input_fn = password_input_fn
self.error_fh = error_fh
self.update_check_class = update_check_class
self.parser = self._GetOptionParser()
for action in self.actions.itervalues():
action.options(self, self.parser)
self.options, self.args = self.parser.parse_args(argv[1:])
if len(self.args) < 1:
self._PrintHelpAndExit()
if self.args[0] not in self.actions:
self.parser.error("Unknown action '%s'\n%s" %
(self.args[0], self.parser.get_description()))
action_name = self.args.pop(0)
self.action = self.actions[action_name]
self.parser, self.options = self._MakeSpecificParser(self.action)
if self.options.help:
self._PrintHelpAndExit()
if self.options.verbose == 2:
logging.getLogger().setLevel(logging.INFO)
elif self.options.verbose == 3:
logging.getLogger().setLevel(logging.DEBUG)
global verbosity
verbosity = self.options.verbose
def Run(self):
"""Executes the requested action.
Catches any HTTPErrors raised by the action and prints them to stderr.
"""
try:
self.action(self)
except urllib2.HTTPError, e:
body = e.read()
print >>self.error_fh, ("Error %d: --- begin server output ---\n"
"%s\n--- end server output ---" %
(e.code, body.rstrip("\n")))
return 1
except yaml_errors.EventListenerError, e:
print >>self.error_fh, ("Error parsing yaml file:\n%s" % e)
return 1
return 0
def _GetActionDescriptions(self):
"""Returns a formatted string containing the short_descs for all actions."""
action_names = self.actions.keys()
action_names.sort()
desc = ""
for action_name in action_names:
desc += " %s: %s\n" % (action_name, self.actions[action_name].short_desc)
return desc
def _GetOptionParser(self):
"""Creates an OptionParser with generic usage and description strings.
Returns:
An OptionParser instance.
"""
class Formatter(optparse.IndentedHelpFormatter):
"""Custom help formatter that does not reformat the description."""
def format_description(self, description):
"""Very simple formatter."""
return description + "\n"
desc = self._GetActionDescriptions()
desc = ("Action must be one of:\n%s"
"Use 'help <action>' for a detailed description.") % desc
parser = self.parser_class(usage="%prog [options] <action>",
description=desc,
formatter=Formatter(),
conflict_handler="resolve")
parser.add_option("-h", "--help", action="store_true",
dest="help", help="Show the help message and exit.")
parser.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
parser.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs.")
parser.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
parser.add_option("-s", "--server", action="store", dest="server",
default="appengine.google.com",
metavar="SERVER", help="The server to connect to.")
parser.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
parser.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
parser.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
parser.add_option("--passin", action="store_true",
dest="passin", default=False,
help="Read the login password from stdin.")
return parser
def _MakeSpecificParser(self, action):
"""Creates a new parser with documentation specific to 'action'.
Args:
action: An Action instance to be used when initializing the new parser.
Returns:
A tuple containing:
parser: An instance of OptionsParser customized to 'action'.
options: The command line options after re-parsing.
"""
parser = self._GetOptionParser()
parser.set_usage(action.usage)
parser.set_description("%s\n%s" % (action.short_desc, action.long_desc))
action.options(self, parser)
options, unused_args = parser.parse_args(self.argv[1:])
return parser, options
def _PrintHelpAndExit(self, exit_code=2):
"""Prints the parser's help message and exits the program.
Args:
exit_code: The integer code to pass to sys.exit().
"""
self.parser.print_help()
sys.exit(exit_code)
def _GetRpcServer(self):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = self.options.email
if email is None:
email = self.raw_input_fn("Email: ")
password_prompt = "Password for %s: " % email
if self.options.passin:
password = self.raw_input_fn(password_prompt)
else:
password = self.password_input_fn(password_prompt)
return (email, password)
if self.options.host and self.options.host == "localhost":
email = self.options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = self.rpc_server_class(
self.options.server,
lambda: (email, "password"),
GetUserAgent(),
GetSourceName(),
host_override=self.options.host,
save_cookies=self.options.save_cookies)
server.authenticated = True
return server
if self.options.passin:
auth_tries = 1
else:
auth_tries = 3
return self.rpc_server_class(self.options.server, GetUserCredentials,
GetUserAgent(), GetSourceName(),
host_override=self.options.host,
save_cookies=self.options.save_cookies,
auth_tries=auth_tries,
account_type="HOSTED_OR_GOOGLE")
def _FindYaml(self, basepath, file_name):
"""Find yaml files in application directory.
Args:
basepath: Base application directory.
file_name: Filename without extension to search for.
Returns:
Path to located yaml file if one exists, else None.
"""
if not os.path.isdir(basepath):
self.parser.error("Not a directory: %s" % basepath)
for yaml_file in (file_name + ".yaml", file_name + ".yml"):
yaml_path = os.path.join(basepath, yaml_file)
if os.path.isfile(yaml_path):
return yaml_path
return None
def _ParseAppYaml(self, basepath):
"""Parses the app.yaml file.
Args:
basepath: the directory of the application.
Returns:
An AppInfoExternal object.
"""
appyaml_filename = self._FindYaml(basepath, "app")
if appyaml_filename is None:
self.parser.error("Directory does not contain an app.yaml "
"configuration file.")
fh = open(appyaml_filename, "r")
try:
appyaml = appinfo.LoadSingleAppInfo(fh)
finally:
fh.close()
return appyaml
def _ParseIndexYaml(self, basepath):
"""Parses the index.yaml file.
Args:
basepath: the directory of the application.
Returns:
A single parsed yaml file or None if the file does not exist.
"""
file_name = self._FindYaml(basepath, "index")
if file_name is not None:
fh = open(file_name, "r")
try:
index_defs = datastore_index.ParseIndexDefinitions(fh)
finally:
fh.close()
return index_defs
return None
def _ParseCronYaml(self, basepath):
"""Parses the cron.yaml file.
Args:
basepath: the directory of the application.
Returns:
A CronInfoExternal object.
"""
file_name = self._FindYaml(basepath, "cron")
if file_name is not None:
fh = open(file_name, "r")
try:
cron_info = croninfo.LoadSingleCron(fh)
finally:
fh.close()
return cron_info
return None
def Help(self):
"""Prints help for a specific action.
Expects self.args[0] to contain the name of the action in question.
Exits the program after printing the help message.
"""
if len(self.args) != 1 or self.args[0] not in self.actions:
self.parser.error("Expected a single action argument. Must be one of:\n" +
self._GetActionDescriptions())
action = self.actions[self.args[0]]
self.parser, unused_options = self._MakeSpecificParser(action)
self._PrintHelpAndExit(exit_code=0)
def Update(self):
"""Updates and deploys a new appversion."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
rpc_server = self._GetRpcServer()
updatecheck = self.update_check_class(rpc_server, appyaml)
updatecheck.CheckForUpdates()
appversion = AppVersionUpload(rpc_server, appyaml)
appversion.DoUpload(FileIterator(basepath), self.options.max_size,
lambda path: open(os.path.join(basepath, path), "rb"))
index_defs = self._ParseIndexYaml(basepath)
if index_defs:
index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
try:
index_upload.DoUpload()
except urllib2.HTTPError, e:
StatusUpdate("Error %d: --- begin server output ---\n"
"%s\n--- end server output ---" %
(e.code, e.read().rstrip("\n")))
print >> self.error_fh, (
"Your app was updated, but there was an error updating your "
"indexes. Please retry later with appcfg.py update_indexes.")
cron_entries = self._ParseCronYaml(basepath)
if cron_entries:
cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
cron_upload.DoUpload()
def _UpdateOptions(self, parser):
"""Adds update-specific options to 'parser'.
Args:
parser: An instance of OptionsParser.
"""
parser.add_option("-S", "--max_size", type="int", dest="max_size",
default=10485760, metavar="SIZE",
help="Maximum size of a file to upload.")
def VacuumIndexes(self):
"""Deletes unused indexes."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
config = self._ParseAppYaml(basepath)
index_defs = self._ParseIndexYaml(basepath)
if index_defs is None:
index_defs = datastore_index.IndexDefinitions()
rpc_server = self._GetRpcServer()
vacuum = VacuumIndexesOperation(rpc_server,
config,
self.options.force_delete)
vacuum.DoVacuum(index_defs)
def _VacuumIndexesOptions(self, parser):
"""Adds vacuum_indexes-specific options to 'parser'.
Args:
parser: An instance of OptionsParser.
"""
parser.add_option("-f", "--force", action="store_true", dest="force_delete",
default=False,
help="Force deletion without being prompted.")
def UpdateCron(self):
"""Updates any new or changed cron definitions."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
rpc_server = self._GetRpcServer()
cron_entries = self._ParseCronYaml(basepath)
if cron_entries:
cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
cron_upload.DoUpload()
def UpdateIndexes(self):
"""Updates indexes."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
rpc_server = self._GetRpcServer()
index_defs = self._ParseIndexYaml(basepath)
if index_defs:
index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
index_upload.DoUpload()
def Rollback(self):
"""Does a rollback of any existing transaction for this app version."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
appversion = AppVersionUpload(self._GetRpcServer(), appyaml)
appversion.in_transaction = True
appversion.Rollback()
def RequestLogs(self):
"""Write request logs to a file."""
if len(self.args) != 2:
self.parser.error(
"Expected a <directory> argument and an <output_file> argument.")
if (self.options.severity is not None and
not 0 <= self.options.severity <= MAX_LOG_LEVEL):
self.parser.error(
"Severity range is 0 (DEBUG) through %s (CRITICAL)." % MAX_LOG_LEVEL)
if self.options.num_days is None:
self.options.num_days = int(not self.options.append)
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
rpc_server = self._GetRpcServer()
logs_requester = LogsRequester(rpc_server, appyaml, self.args[1],
self.options.num_days,
self.options.append,
self.options.severity,
time.time())
logs_requester.DownloadLogs()
def _RequestLogsOptions(self, parser):
"""Adds request_logs-specific options to 'parser'.
Args:
parser: An instance of OptionsParser.
"""
parser.add_option("-n", "--num_days", type="int", dest="num_days",
action="store", default=None,
help="Number of days worth of log data to get. "
"The cut-off point is midnight UTC. "
"Use 0 to get all available logs. "
"Default is 1, unless --append is also given; "
"then the default is 0.")
parser.add_option("-a", "--append", dest="append",
action="store_true", default=False,
help="Append to existing file.")
parser.add_option("--severity", type="int", dest="severity",
action="store", default=None,
help="Severity of app-level log messages to get. "
"The range is 0 (DEBUG) through 4 (CRITICAL). "
"If omitted, only request logs are returned.")
def CronInfo(self, now=None, output=sys.stdout):
"""Displays information about cron definitions.
Args:
now: used for testing.
output: Used for testing.
"""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
if now is None:
now = datetime.datetime.now()
basepath = self.args[0]
cron_entries = self._ParseCronYaml(basepath)
if cron_entries:
for entry in cron_entries.cron:
description = entry.description
if not description:
description = "<no description>"
print >>output, "\n%s:\nURL: %s\nSchedule: %s" % (description,
entry.url,
entry.schedule)
schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
matches = schedule.GetMatches(now, self.options.num_runs)
for match in matches:
print >>output, "%s, %s from now" % (
match.strftime("%Y-%m-%d %H:%M:%S"), match - now)
def _CronInfoOptions(self, parser):
"""Adds cron_info-specific options to 'parser'.
Args:
parser: An instance of OptionsParser.
"""
parser.add_option("-n", "--num_runs", type="int", dest="num_runs",
action="store", default=5,
help="Number of runs of each cron job to display"
"Default is 5")
class Action(object):
"""Contains information about a command line action.
Attributes:
function: The name of a function defined on AppCfg or its subclasses
that will perform the appropriate action.
usage: A command line usage string.
short_desc: A one-line description of the action.
long_desc: A detailed description of the action. Whitespace and
formatting will be preserved.
options: A function that will add extra options to a given OptionParser
object.
"""
def __init__(self, function, usage, short_desc, long_desc="",
options=lambda obj, parser: None):
"""Initializer for the class attributes."""
self.function = function
self.usage = usage
self.short_desc = short_desc
self.long_desc = long_desc
self.options = options
def __call__(self, appcfg):
"""Invoke this Action on the specified AppCfg.
This calls the function of the appropriate name on AppCfg, and
respects polymophic overrides."""
method = getattr(appcfg, self.function)
return method()
actions = {
"help": Action(
function="Help",
usage="%prog help <action>",
short_desc="Print help for a specific action."),
"update": Action(
function="Update",
usage="%prog [options] update <directory>",
options=_UpdateOptions,
short_desc="Create or update an app version.",
long_desc="""
Specify a directory that contains all of the files required by
the app, and appcfg.py will create/update the app version referenced
in the app.yaml file at the top level of that directory. appcfg.py
will follow symlinks and recursively upload all files to the server.
Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""),
"update_cron": Action(
function="UpdateCron",
usage="%prog [options] update_cron <directory>",
short_desc="Update application cron definitions.",
long_desc="""
The 'update_cron' command will update any new, removed or changed cron
definitions from the cron.yaml file."""),
"update_indexes": Action(
function="UpdateIndexes",
usage="%prog [options] update_indexes <directory>",
short_desc="Update application indexes.",
long_desc="""
The 'update_indexes' command will add additional indexes which are not currently
in production as well as restart any indexes that were not completed."""),
"vacuum_indexes": Action(
function="VacuumIndexes",
usage="%prog [options] vacuum_indexes <directory>",
options=_VacuumIndexesOptions,
short_desc="Delete unused indexes from application.",
long_desc="""
The 'vacuum_indexes' command will help clean up indexes which are no longer
in use. It does this by comparing the local index configuration with
indexes that are actually defined on the server. If any indexes on the
server do not exist in the index configuration file, the user is given the
option to delete them."""),
"rollback": Action(
function="Rollback",
usage="%prog [options] rollback <directory>",
short_desc="Rollback an in-progress update.",
long_desc="""
The 'update' command requires a server-side transaction. Use 'rollback'
if you get an error message about another transaction being in progress
and you are sure that there is no such transaction."""),
"request_logs": Action(
function="RequestLogs",
usage="%prog [options] request_logs <directory> <output_file>",
options=_RequestLogsOptions,
short_desc="Write request logs in Apache common log format.",
long_desc="""
The 'request_logs' command exports the request logs from your application
to a file. It will write Apache common log format records ordered
chronologically. If output file is '-' stdout will be written."""),
"cron_info": Action(
function="CronInfo",
usage="%prog [options] cron_info <directory>",
options=_CronInfoOptions,
short_desc="Display information about cron jobs.",
long_desc="""
The 'cron_info' command will display the next 'number' runs (default 5) for
each cron job defined in the cron.yaml file."""),
}
def main(argv):
logging.basicConfig(format=("%(asctime)s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
try:
result = AppCfgApp(argv).Run()
if result:
sys.exit(result)
except KeyboardInterrupt:
StatusUpdate("Interrupted.")
sys.exit(1)
if __name__ == "__main__":
main(sys.argv)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tool for performing authenticated RPCs against App Engine."""
import cookielib
import logging
import os
import re
import socket
import sys
import urllib
import urllib2
https_handler = urllib2.HTTPSHandler
uses_cert_verification = False
certpath = os.path.join(os.path.dirname(__file__), "cacerts.txt")
cert_file_available = os.path.exists(certpath)
try:
import https_wrapper
if cert_file_available:
https_handler = lambda: https_wrapper.CertValidatingHTTPSHandler(
ca_certs=certpath)
uses_cert_verification = True
except ImportError:
pass
def GetPlatformToken(os_module=os, sys_module=sys, platform=sys.platform):
"""Returns a 'User-agent' token for the host system platform.
Args:
os_module, sys_module, platform: Used for testing.
Returns:
String containing the platform token for the host system.
"""
if hasattr(sys_module, "getwindowsversion"):
windows_version = sys_module.getwindowsversion()
version_info = ".".join(str(i) for i in windows_version[:4])
return platform + "/" + version_info
elif hasattr(os_module, "uname"):
uname = os_module.uname()
return "%s/%s" % (uname[0], uname[2])
else:
return "unknown"
class ClientLoginError(urllib2.HTTPError):
"""Raised to indicate there was an error authenticating with ClientLogin."""
def __init__(self, url, code, msg, headers, args):
urllib2.HTTPError.__init__(self, url, code, msg, headers, None)
self.args = args
self.reason = args["Error"]
class AbstractRpcServer(object):
"""Provides a common interface for a simple RPC server."""
def __init__(self, host, auth_function, user_agent, source,
host_override=None, extra_headers=None, save_cookies=False,
auth_tries=3, account_type=None):
"""Creates a new HttpRpcServer.
Args:
host: The host to send requests to.
auth_function: A function that takes no arguments and returns an
(email, password) tuple when called. Will be called if authentication
is required.
user_agent: The user-agent string to send to the server. Specify None to
omit the user-agent header.
source: The source to specify in authentication requests.
host_override: The host header to send to the server (defaults to host).
extra_headers: A dict of extra headers to append to every request. Values
supplied here will override other default headers that are supplied.
save_cookies: If True, save the authentication cookies to local disk.
If False, use an in-memory cookiejar instead. Subclasses must
implement this functionality. Defaults to False.
auth_tries: The number of times to attempt auth_function before failing.
account_type: One of GOOGLE, HOSTED_OR_GOOGLE, or None for automatic.
"""
self.host = host
self.host_override = host_override
self.auth_function = auth_function
self.source = source
self.authenticated = False
self.auth_tries = auth_tries
self.account_type = account_type
self.extra_headers = {}
if user_agent:
self.extra_headers["User-Agent"] = user_agent
if extra_headers:
self.extra_headers.update(extra_headers)
self.save_cookies = save_cookies
self.cookie_jar = cookielib.MozillaCookieJar()
self.opener = self._GetOpener()
if self.host_override:
logging.info("Server: %s; Host: %s", self.host, self.host_override)
else:
logging.info("Server: %s", self.host)
if ((self.host_override and self.host_override == "localhost") or
self.host == "localhost" or self.host.startswith("localhost:")):
self._DevAppServerAuthenticate()
def _GetOpener(self):
"""Returns an OpenerDirector for making HTTP requests.
Returns:
A urllib2.OpenerDirector object.
"""
raise NotImplemented()
def _CreateRequest(self, url, data=None):
"""Creates a new urllib request."""
req = urllib2.Request(url, data=data)
if self.host_override:
req.add_header("Host", self.host_override)
for key, value in self.extra_headers.iteritems():
req.add_header(key, value)
return req
def _GetAuthToken(self, email, password):
"""Uses ClientLogin to authenticate the user, returning an auth token.
Args:
email: The user's email address
password: The user's password
Raises:
ClientLoginError: If there was an error authenticating with ClientLogin.
HTTPError: If there was some other form of HTTP error.
Returns:
The authentication token returned by ClientLogin.
"""
account_type = self.account_type
if not account_type:
if (self.host.split(':')[0].endswith(".google.com")
or (self.host_override
and self.host_override.split(':')[0].endswith(".google.com"))):
account_type = "HOSTED_OR_GOOGLE"
else:
account_type = "GOOGLE"
data = {
"Email": email,
"Passwd": password,
"service": "ah",
"source": self.source,
"accountType": account_type
}
req = self._CreateRequest(
url="https://www.google.com/accounts/ClientLogin",
data=urllib.urlencode(data))
try:
response = self.opener.open(req)
response_body = response.read()
response_dict = dict(x.split("=")
for x in response_body.split("\n") if x)
return response_dict["Auth"]
except urllib2.HTTPError, e:
if e.code == 403:
body = e.read()
response_dict = dict(x.split("=", 1) for x in body.split("\n") if x)
raise ClientLoginError(req.get_full_url(), e.code, e.msg,
e.headers, response_dict)
else:
raise
def _GetAuthCookie(self, auth_token):
"""Fetches authentication cookies for an authentication token.
Args:
auth_token: The authentication token returned by ClientLogin.
Raises:
HTTPError: If there was an error fetching the authentication cookies.
"""
continue_location = "http://localhost/"
args = {"continue": continue_location, "auth": auth_token}
login_path = os.environ.get("APPCFG_LOGIN_PATH", "/_ah")
req = self._CreateRequest("http://%s%s/login?%s" %
(self.host, login_path, urllib.urlencode(args)))
try:
response = self.opener.open(req)
except urllib2.HTTPError, e:
response = e
if (response.code != 302 or
response.info()["location"] != continue_location):
raise urllib2.HTTPError(req.get_full_url(), response.code, response.msg,
response.headers, response.fp)
self.authenticated = True
def _Authenticate(self):
"""Authenticates the user.
The authentication process works as follows:
1) We get a username and password from the user
2) We use ClientLogin to obtain an AUTH token for the user
(see http://code.google.com/apis/accounts/AuthForInstalledApps.html).
3) We pass the auth token to /_ah/login on the server to obtain an
authentication cookie. If login was successful, it tries to redirect
us to the URL we provided.
If we attempt to access the upload API without first obtaining an
authentication cookie, it returns a 401 response and directs us to
authenticate ourselves with ClientLogin.
"""
for unused_i in range(self.auth_tries):
credentials = self.auth_function()
try:
auth_token = self._GetAuthToken(credentials[0], credentials[1])
except ClientLoginError, e:
if e.reason == "BadAuthentication":
print >>sys.stderr, "Invalid username or password."
continue
if e.reason == "CaptchaRequired":
print >>sys.stderr, (
"Please go to\n"
"https://www.google.com/accounts/DisplayUnlockCaptcha\n"
"and verify you are a human. Then try again.")
break
if e.reason == "NotVerified":
print >>sys.stderr, "Account not verified."
break
if e.reason == "TermsNotAgreed":
print >>sys.stderr, "User has not agreed to TOS."
break
if e.reason == "AccountDeleted":
print >>sys.stderr, "The user account has been deleted."
break
if e.reason == "AccountDisabled":
print >>sys.stderr, "The user account has been disabled."
break
if e.reason == "ServiceDisabled":
print >>sys.stderr, ("The user's access to the service has been "
"disabled.")
break
if e.reason == "ServiceUnavailable":
print >>sys.stderr, "The service is not available; try again later."
break
raise
self._GetAuthCookie(auth_token)
return
def _DevAppServerAuthenticate(self):
"""Authenticates the user on the dev_appserver."""
credentials = self.auth_function()
self.extra_headers["Cookie"] = ('dev_appserver_login="%s:True"; Path=/;' %
(credentials[0],))
def Send(self, request_path, payload="",
content_type="application/octet-stream",
timeout=None,
**kwargs):
"""Sends an RPC and returns the response.
Args:
request_path: The path to send the request to, eg /api/appversion/create.
payload: The body of the request, or None to send an empty request.
content_type: The Content-Type header to use.
timeout: timeout in seconds; default None i.e. no timeout.
(Note: for large requests on OS X, the timeout doesn't work right.)
kwargs: Any keyword arguments are converted into query string parameters.
Returns:
The response body, as a string.
"""
old_timeout = socket.getdefaulttimeout()
socket.setdefaulttimeout(timeout)
try:
tries = 0
while True:
tries += 1
args = dict(kwargs)
url = "http://%s%s?%s" % (self.host, request_path,
urllib.urlencode(args))
req = self._CreateRequest(url=url, data=payload)
req.add_header("Content-Type", content_type)
req.add_header("X-appcfg-api-version", "1")
try:
f = self.opener.open(req)
response = f.read()
f.close()
return response
except urllib2.HTTPError, e:
logging.debug("Got http error, this is try #%s" % tries)
if tries > self.auth_tries:
raise
elif e.code == 401:
self._Authenticate()
elif e.code >= 500 and e.code < 600:
continue
elif e.code == 302:
loc = e.info()["location"]
logging.debug("Got 302 redirect. Location: %s" % loc)
if loc.startswith("https://www.google.com/accounts/ServiceLogin"):
self._Authenticate()
elif re.match(r"https://www.google.com/a/[a-z0-9.-]+/ServiceLogin",
loc):
self.account_type = "HOSTED"
self._Authenticate()
elif loc.startswith("http://%s/_ah/login" % (self.host,)):
self._DevAppServerAuthenticate()
else:
raise
finally:
socket.setdefaulttimeout(old_timeout)
class HttpRpcServer(AbstractRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests."""
DEFAULT_COOKIE_FILE_PATH = "~/.appcfg_cookies"
def _Authenticate(self):
"""Save the cookie jar after authentication."""
if cert_file_available and not uses_cert_verification:
logging.warn("ssl module not found. Without this the identity of the "
"remote host cannot be verified, and connections are NOT "
"secure. To fix this, please install the ssl module from "
"http://pypi.python.org/pypi/ssl")
super(HttpRpcServer, self)._Authenticate()
if self.cookie_jar.filename is not None and self.save_cookies:
logging.info("Saving authentication cookies to %s" %
self.cookie_jar.filename)
self.cookie_jar.save()
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = urllib2.OpenerDirector()
opener.add_handler(urllib2.ProxyHandler())
opener.add_handler(urllib2.UnknownHandler())
opener.add_handler(urllib2.HTTPHandler())
opener.add_handler(urllib2.HTTPDefaultErrorHandler())
opener.add_handler(https_handler())
opener.add_handler(urllib2.HTTPErrorProcessor())
if self.save_cookies:
self.cookie_jar.filename = os.path.expanduser(
HttpRpcServer.DEFAULT_COOKIE_FILE_PATH)
if os.path.exists(self.cookie_jar.filename):
try:
self.cookie_jar.load()
self.authenticated = True
logging.info("Loaded authentication cookies from %s" %
self.cookie_jar.filename)
except (OSError, IOError, cookielib.LoadError), e:
logging.debug("Could not load authentication cookies; %s: %s",
e.__class__.__name__, e)
self.cookie_jar.filename = None
else:
try:
fd = os.open(self.cookie_jar.filename, os.O_CREAT, 0600)
os.close(fd)
except (OSError, IOError), e:
logging.debug("Could not create authentication cookies file; %s: %s",
e.__class__.__name__, e)
self.cookie_jar.filename = None
opener.add_handler(urllib2.HTTPCookieProcessor(self.cookie_jar))
return opener
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Imports CSV data over HTTP.
Usage:
%(arg0)s [flags]
--debug Show debugging information. (Optional)
--app_id=<string> Application ID of endpoint (Optional for
*.appspot.com)
--auth_domain=<domain> The auth domain to use for logging in and for
UserProperties. (Default: gmail.com)
--bandwidth_limit=<int> The maximum number of bytes per second for the
aggregate transfer of data to the server. Bursts
--batch_size=<int> Number of Entity objects to include in each post to
the URL endpoint. The more data per row/Entity, the
smaller the batch size should be. (Default 10)
--config_file=<path> File containing Model and Loader definitions.
(Required)
--db_filename=<path> Specific progress database to write to, or to
resume from. If not supplied, then a new database
will be started, named:
bulkloader-progress-TIMESTAMP.
The special filename "skip" may be used to simply
skip reading/writing any progress information.
--filename=<path> Path to the CSV file to import. (Required)
--http_limit=<int> The maximum numer of HTTP requests per second to
send to the server. (Default: 8)
--kind=<string> Name of the Entity object kind to put in the
datastore. (Required)
--num_threads=<int> Number of threads to use for uploading entities
(Default 10)
may exceed this, but overall transfer rate is
restricted to this rate. (Default 250000)
--rps_limit=<int> The maximum number of records per second to
transfer to the server. (Default: 20)
--url=<string> URL endpoint to post to for importing data.
(Required)
The exit status will be 0 on success, non-zero on import failure.
Works with the remote_api mix-in library for google.appengine.ext.remote_api.
Please look there for documentation about how to setup the server side.
Example:
%(arg0)s --url=http://app.appspot.com/remote_api --kind=Model \
--filename=data.csv --config_file=loader_config.py
"""
import csv
import getopt
import getpass
import logging
import new
import os
import Queue
import signal
import sys
import threading
import time
import traceback
import urllib2
import urlparse
from google.appengine.ext import db
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.tools import appengine_rpc
try:
import sqlite3
except ImportError:
pass
UPLOADER_VERSION = '1'
DEFAULT_THREAD_COUNT = 10
DEFAULT_BATCH_SIZE = 10
DEFAULT_QUEUE_SIZE = DEFAULT_THREAD_COUNT * 10
_THREAD_SHOULD_EXIT = '_THREAD_SHOULD_EXIT'
STATE_READ = 0
STATE_SENDING = 1
STATE_SENT = 2
STATE_NOT_SENT = 3
MINIMUM_THROTTLE_SLEEP_DURATION = 0.001
DATA_CONSUMED_TO_HERE = 'DATA_CONSUMED_TO_HERE'
INITIAL_BACKOFF = 1.0
BACKOFF_FACTOR = 2.0
DEFAULT_BANDWIDTH_LIMIT = 250000
DEFAULT_RPS_LIMIT = 20
DEFAULT_REQUEST_LIMIT = 8
BANDWIDTH_UP = 'http-bandwidth-up'
BANDWIDTH_DOWN = 'http-bandwidth-down'
REQUESTS = 'http-requests'
HTTPS_BANDWIDTH_UP = 'https-bandwidth-up'
HTTPS_BANDWIDTH_DOWN = 'https-bandwidth-down'
HTTPS_REQUESTS = 'https-requests'
RECORDS = 'records'
def StateMessage(state):
"""Converts a numeric state identifier to a status message."""
return ({
STATE_READ: 'Batch read from file.',
STATE_SENDING: 'Sending batch to server.',
STATE_SENT: 'Batch successfully sent.',
STATE_NOT_SENT: 'Error while sending batch.'
}[state])
class Error(Exception):
"""Base-class for exceptions in this module."""
class FatalServerError(Error):
"""An unrecoverable error occurred while trying to post data to the server."""
class ResumeError(Error):
"""Error while trying to resume a partial upload."""
class ConfigurationError(Error):
"""Error in configuration options."""
class AuthenticationError(Error):
"""Error while trying to authenticate with the server."""
def GetCSVGeneratorFactory(csv_filename, batch_size,
openfile=open, create_csv_reader=csv.reader):
"""Return a factory that creates a CSV-based WorkItem generator.
Args:
csv_filename: File on disk containing CSV data.
batch_size: Maximum number of CSV rows to stash into a WorkItem.
openfile: Used for dependency injection.
create_csv_reader: Used for dependency injection.
Returns: A callable (accepting the Progress Queue and Progress
Generators as input) which creates the WorkItem generator.
"""
def CreateGenerator(progress_queue, progress_generator):
"""Initialize a CSV generator linked to a progress generator and queue.
Args:
progress_queue: A ProgressQueue instance to send progress information.
progress_generator: A generator of progress information or None.
Returns:
A CSVGenerator instance.
"""
return CSVGenerator(progress_queue,
progress_generator,
csv_filename,
batch_size,
openfile,
create_csv_reader)
return CreateGenerator
class CSVGenerator(object):
"""Reads a CSV file and generates WorkItems containing batches of records."""
def __init__(self,
progress_queue,
progress_generator,
csv_filename,
batch_size,
openfile,
create_csv_reader):
"""Initializes a CSV generator.
Args:
progress_queue: A queue used for tracking progress information.
progress_generator: A generator of prior progress information, or None
if there is no prior status.
csv_filename: File on disk containing CSV data.
batch_size: Maximum number of CSV rows to stash into a WorkItem.
openfile: Used for dependency injection of 'open'.
create_csv_reader: Used for dependency injection of 'csv.reader'.
"""
self.progress_queue = progress_queue
self.progress_generator = progress_generator
self.csv_filename = csv_filename
self.batch_size = batch_size
self.openfile = openfile
self.create_csv_reader = create_csv_reader
self.line_number = 1
self.column_count = None
self.read_rows = []
self.reader = None
self.row_count = 0
self.sent_count = 0
def _AdvanceTo(self, line):
"""Advance the reader to the given line.
Args:
line: A line number to advance to.
"""
while self.line_number < line:
self.reader.next()
self.line_number += 1
self.row_count += 1
self.sent_count += 1
def _ReadRows(self, key_start, key_end):
"""Attempts to read and encode rows [key_start, key_end].
The encoded rows are stored in self.read_rows.
Args:
key_start: The starting line number.
key_end: The ending line number.
Raises:
StopIteration: if the reader runs out of rows
ResumeError: if there are an inconsistent number of columns.
"""
assert self.line_number == key_start
self.read_rows = []
while self.line_number <= key_end:
row = self.reader.next()
self.row_count += 1
if self.column_count is None:
self.column_count = len(row)
else:
if self.column_count != len(row):
raise ResumeError('Column count mismatch, %d: %s' %
(self.column_count, str(row)))
self.read_rows.append((self.line_number, row))
self.line_number += 1
def _MakeItem(self, key_start, key_end, rows, progress_key=None):
"""Makes a WorkItem containing the given rows, with the given keys.
Args:
key_start: The start key for the WorkItem.
key_end: The end key for the WorkItem.
rows: A list of the rows for the WorkItem.
progress_key: The progress key for the WorkItem
Returns:
A WorkItem instance for the given batch.
"""
assert rows
item = WorkItem(self.progress_queue, rows,
key_start, key_end,
progress_key=progress_key)
return item
def Batches(self):
"""Reads the CSV data file and generates WorkItems.
Yields:
Instances of class WorkItem
Raises:
ResumeError: If the progress database and data file indicate a different
number of rows.
"""
csv_file = self.openfile(self.csv_filename, 'r')
csv_content = csv_file.read()
if csv_content:
has_headers = csv.Sniffer().has_header(csv_content)
else:
has_headers = False
csv_file.seek(0)
self.reader = self.create_csv_reader(csv_file, skipinitialspace=True)
if has_headers:
logging.info('The CSV file appears to have a header line, skipping.')
self.reader.next()
exhausted = False
self.line_number = 1
self.column_count = None
logging.info('Starting import; maximum %d entities per post',
self.batch_size)
state = None
if self.progress_generator is not None:
for progress_key, state, key_start, key_end in self.progress_generator:
if key_start:
try:
self._AdvanceTo(key_start)
self._ReadRows(key_start, key_end)
yield self._MakeItem(key_start,
key_end,
self.read_rows,
progress_key=progress_key)
except StopIteration:
logging.error('Mismatch between data file and progress database')
raise ResumeError(
'Mismatch between data file and progress database')
elif state == DATA_CONSUMED_TO_HERE:
try:
self._AdvanceTo(key_end + 1)
except StopIteration:
state = None
if self.progress_generator is None or state == DATA_CONSUMED_TO_HERE:
while not exhausted:
key_start = self.line_number
key_end = self.line_number + self.batch_size - 1
try:
self._ReadRows(key_start, key_end)
except StopIteration:
exhausted = True
key_end = self.line_number - 1
if key_start <= key_end:
yield self._MakeItem(key_start, key_end, self.read_rows)
class ReQueue(object):
"""A special thread-safe queue.
A ReQueue allows unfinished work items to be returned with a call to
reput(). When an item is reput, task_done() should *not* be called
in addition, getting an item that has been reput does not increase
the number of outstanding tasks.
This class shares an interface with Queue.Queue and provides the
additional Reput method.
"""
def __init__(self,
queue_capacity,
requeue_capacity=None,
queue_factory=Queue.Queue,
get_time=time.time):
"""Initialize a ReQueue instance.
Args:
queue_capacity: The number of items that can be put in the ReQueue.
requeue_capacity: The numer of items that can be reput in the ReQueue.
queue_factory: Used for dependency injection.
get_time: Used for dependency injection.
"""
if requeue_capacity is None:
requeue_capacity = queue_capacity
self.get_time = get_time
self.queue = queue_factory(queue_capacity)
self.requeue = queue_factory(requeue_capacity)
self.lock = threading.Lock()
self.put_cond = threading.Condition(self.lock)
self.get_cond = threading.Condition(self.lock)
def _DoWithTimeout(self,
action,
exc,
wait_cond,
done_cond,
lock,
timeout=None,
block=True):
"""Performs the given action with a timeout.
The action must be non-blocking, and raise an instance of exc on a
recoverable failure. If the action fails with an instance of exc,
we wait on wait_cond before trying again. Failure after the
timeout is reached is propagated as an exception. Success is
signalled by notifying on done_cond and returning the result of
the action. If action raises any exception besides an instance of
exc, it is immediately propagated.
Args:
action: A callable that performs a non-blocking action.
exc: An exception type that is thrown by the action to indicate
a recoverable error.
wait_cond: A condition variable which should be waited on when
action throws exc.
done_cond: A condition variable to signal if the action returns.
lock: The lock used by wait_cond and done_cond.
timeout: A non-negative float indicating the maximum time to wait.
block: Whether to block if the action cannot complete immediately.
Returns:
The result of the action, if it is successful.
Raises:
ValueError: If the timeout argument is negative.
"""
if timeout is not None and timeout < 0.0:
raise ValueError('\'timeout\' must not be a negative number')
if not block:
timeout = 0.0
result = None
success = False
start_time = self.get_time()
lock.acquire()
try:
while not success:
try:
result = action()
success = True
except Exception, e:
if not isinstance(e, exc):
raise e
if timeout is not None:
elapsed_time = self.get_time() - start_time
timeout -= elapsed_time
if timeout <= 0.0:
raise e
wait_cond.wait(timeout)
finally:
if success:
done_cond.notify()
lock.release()
return result
def put(self, item, block=True, timeout=None):
"""Put an item into the requeue.
Args:
item: An item to add to the requeue.
block: Whether to block if the requeue is full.
timeout: Maximum on how long to wait until the queue is non-full.
Raises:
Queue.Full if the queue is full and the timeout expires.
"""
def PutAction():
self.queue.put(item, block=False)
self._DoWithTimeout(PutAction,
Queue.Full,
self.get_cond,
self.put_cond,
self.lock,
timeout=timeout,
block=block)
def reput(self, item, block=True, timeout=None):
"""Re-put an item back into the requeue.
Re-putting an item does not increase the number of outstanding
tasks, so the reput item should be uniquely associated with an
item that was previously removed from the requeue and for which
task_done has not been called.
Args:
item: An item to add to the requeue.
block: Whether to block if the requeue is full.
timeout: Maximum on how long to wait until the queue is non-full.
Raises:
Queue.Full is the queue is full and the timeout expires.
"""
def ReputAction():
self.requeue.put(item, block=False)
self._DoWithTimeout(ReputAction,
Queue.Full,
self.get_cond,
self.put_cond,
self.lock,
timeout=timeout,
block=block)
def get(self, block=True, timeout=None):
"""Get an item from the requeue.
Args:
block: Whether to block if the requeue is empty.
timeout: Maximum on how long to wait until the requeue is non-empty.
Returns:
An item from the requeue.
Raises:
Queue.Empty if the queue is empty and the timeout expires.
"""
def GetAction():
try:
result = self.requeue.get(block=False)
self.requeue.task_done()
except Queue.Empty:
result = self.queue.get(block=False)
return result
return self._DoWithTimeout(GetAction,
Queue.Empty,
self.put_cond,
self.get_cond,
self.lock,
timeout=timeout,
block=block)
def join(self):
"""Blocks until all of the items in the requeue have been processed."""
self.queue.join()
def task_done(self):
"""Indicate that a previously enqueued item has been fully processed."""
self.queue.task_done()
def empty(self):
"""Returns true if the requeue is empty."""
return self.queue.empty() and self.requeue.empty()
def get_nowait(self):
"""Try to get an item from the queue without blocking."""
return self.get(block=False)
class ThrottleHandler(urllib2.BaseHandler):
"""A urllib2 handler for http and https requests that adds to a throttle."""
def __init__(self, throttle):
"""Initialize a ThrottleHandler.
Args:
throttle: A Throttle instance to call for bandwidth and http/https request
throttling.
"""
self.throttle = throttle
def AddRequest(self, throttle_name, req):
"""Add to bandwidth throttle for given request.
Args:
throttle_name: The name of the bandwidth throttle to add to.
req: The request whose size will be added to the throttle.
"""
size = 0
for key, value in req.headers.iteritems():
size += len('%s: %s\n' % (key, value))
for key, value in req.unredirected_hdrs.iteritems():
size += len('%s: %s\n' % (key, value))
(unused_scheme,
unused_host_port, url_path,
unused_query, unused_fragment) = urlparse.urlsplit(req.get_full_url())
size += len('%s %s HTTP/1.1\n' % (req.get_method(), url_path))
data = req.get_data()
if data:
size += len(data)
self.throttle.AddTransfer(throttle_name, size)
def AddResponse(self, throttle_name, res):
"""Add to bandwidth throttle for given response.
Args:
throttle_name: The name of the bandwidth throttle to add to.
res: The response whose size will be added to the throttle.
"""
content = res.read()
def ReturnContent():
return content
res.read = ReturnContent
size = len(content)
headers = res.info()
for key, value in headers.items():
size += len('%s: %s\n' % (key, value))
self.throttle.AddTransfer(throttle_name, size)
def http_request(self, req):
"""Process an HTTP request.
If the throttle is over quota, sleep first. Then add request size to
throttle before returning it to be sent.
Args:
req: A urllib2.Request object.
Returns:
The request passed in.
"""
self.throttle.Sleep()
self.AddRequest(BANDWIDTH_UP, req)
return req
def https_request(self, req):
"""Process an HTTPS request.
If the throttle is over quota, sleep first. Then add request size to
throttle before returning it to be sent.
Args:
req: A urllib2.Request object.
Returns:
The request passed in.
"""
self.throttle.Sleep()
self.AddRequest(HTTPS_BANDWIDTH_UP, req)
return req
def http_response(self, unused_req, res):
"""Process an HTTP response.
The size of the response is added to the bandwidth throttle and the request
throttle is incremented by one.
Args:
unused_req: The urllib2 request for this response.
res: A urllib2 response object.
Returns:
The response passed in.
"""
self.AddResponse(BANDWIDTH_DOWN, res)
self.throttle.AddTransfer(REQUESTS, 1)
return res
def https_response(self, unused_req, res):
"""Process an HTTPS response.
The size of the response is added to the bandwidth throttle and the request
throttle is incremented by one.
Args:
unused_req: The urllib2 request for this response.
res: A urllib2 response object.
Returns:
The response passed in.
"""
self.AddResponse(HTTPS_BANDWIDTH_DOWN, res)
self.throttle.AddTransfer(HTTPS_REQUESTS, 1)
return res
class ThrottledHttpRpcServer(appengine_rpc.HttpRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests.
This RPC server uses a Throttle to prevent exceeding quotas.
"""
def __init__(self, throttle, request_manager, *args, **kwargs):
"""Initialize a ThrottledHttpRpcServer.
Also sets request_manager.rpc_server to the ThrottledHttpRpcServer instance.
Args:
throttle: A Throttles instance.
request_manager: A RequestManager instance.
args: Positional arguments to pass through to
appengine_rpc.HttpRpcServer.__init__
kwargs: Keyword arguments to pass through to
appengine_rpc.HttpRpcServer.__init__
"""
self.throttle = throttle
appengine_rpc.HttpRpcServer.__init__(self, *args, **kwargs)
request_manager.rpc_server = self
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = appengine_rpc.HttpRpcServer._GetOpener(self)
opener.add_handler(ThrottleHandler(self.throttle))
return opener
def ThrottledHttpRpcServerFactory(throttle, request_manager):
"""Create a factory to produce ThrottledHttpRpcServer for a given throttle.
Args:
throttle: A Throttle instance to use for the ThrottledHttpRpcServer.
request_manager: A RequestManager instance.
Returns:
A factory to produce a ThrottledHttpRpcServer.
"""
def MakeRpcServer(*args, **kwargs):
kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
kwargs['save_cookies'] = True
return ThrottledHttpRpcServer(throttle, request_manager, *args, **kwargs)
return MakeRpcServer
class RequestManager(object):
"""A class which wraps a connection to the server."""
source = 'google-bulkloader-%s' % UPLOADER_VERSION
user_agent = source
def __init__(self,
app_id,
host_port,
url_path,
kind,
throttle):
"""Initialize a RequestManager object.
Args:
app_id: String containing the application id for requests.
host_port: String containing the "host:port" pair; the port is optional.
url_path: partial URL (path) to post entity data to.
kind: Kind of the Entity records being posted.
throttle: A Throttle instance.
"""
self.app_id = app_id
self.host_port = host_port
self.host = host_port.split(':')[0]
if url_path and url_path[0] != '/':
url_path = '/' + url_path
self.url_path = url_path
self.kind = kind
self.throttle = throttle
self.credentials = None
throttled_rpc_server_factory = ThrottledHttpRpcServerFactory(
self.throttle, self)
logging.debug('Configuring remote_api. app_id = %s, url_path = %s, '
'servername = %s' % (app_id, url_path, host_port))
remote_api_stub.ConfigureRemoteDatastore(
app_id,
url_path,
self.AuthFunction,
servername=host_port,
rpc_server_factory=throttled_rpc_server_factory)
self.authenticated = False
def Authenticate(self):
"""Invoke authentication if necessary."""
self.rpc_server.Send(self.url_path, payload=None)
self.authenticated = True
def AuthFunction(self,
raw_input_fn=raw_input,
password_input_fn=getpass.getpass):
"""Prompts the user for a username and password.
Caches the results the first time it is called and returns the
same result every subsequent time.
Args:
raw_input_fn: Used for dependency injection.
password_input_fn: Used for dependency injection.
Returns:
A pair of the username and password.
"""
if self.credentials is not None:
return self.credentials
print 'Please enter login credentials for %s (%s)' % (
self.host, self.app_id)
email = raw_input_fn('Email: ')
if email:
password_prompt = 'Password for %s: ' % email
password = password_input_fn(password_prompt)
else:
password = None
self.credentials = (email, password)
return self.credentials
def _GetHeaders(self):
"""Constructs a dictionary of extra headers to send with a request."""
headers = {
'GAE-Uploader-Version': UPLOADER_VERSION,
'GAE-Uploader-Kind': self.kind
}
return headers
def EncodeContent(self, rows):
"""Encodes row data to the wire format.
Args:
rows: A list of pairs of a line number and a list of column values.
Returns:
A list of db.Model instances.
"""
try:
loader = Loader.RegisteredLoaders()[self.kind]
except KeyError:
logging.error('No Loader defined for kind %s.' % self.kind)
raise ConfigurationError('No Loader defined for kind %s.' % self.kind)
entities = []
for line_number, values in rows:
key = loader.GenerateKey(line_number, values)
entity = loader.CreateEntity(values, key_name=key)
entities.extend(entity)
return entities
def PostEntities(self, item):
"""Posts Entity records to a remote endpoint over HTTP.
Args:
item: A workitem containing the entities to post.
Returns:
A pair of the estimated size of the request in bytes and the response
from the server as a str.
"""
entities = item.content
db.put(entities)
class WorkItem(object):
"""Holds a unit of uploading work.
A WorkItem represents a number of entities that need to be uploaded to
Google App Engine. These entities are encoded in the "content" field of
the WorkItem, and will be POST'd as-is to the server.
The entities are identified by a range of numeric keys, inclusively. In
the case of a resumption of an upload, or a replay to correct errors,
these keys must be able to identify the same set of entities.
Note that keys specify a range. The entities do not have to sequentially
fill the entire range, they must simply bound a range of valid keys.
"""
def __init__(self, progress_queue, rows, key_start, key_end,
progress_key=None):
"""Initialize the WorkItem instance.
Args:
progress_queue: A queue used for tracking progress information.
rows: A list of pairs of a line number and a list of column values
key_start: The (numeric) starting key, inclusive.
key_end: The (numeric) ending key, inclusive.
progress_key: If this WorkItem represents state from a prior run,
then this will be the key within the progress database.
"""
self.state = STATE_READ
self.progress_queue = progress_queue
assert isinstance(key_start, (int, long))
assert isinstance(key_end, (int, long))
assert key_start <= key_end
self.key_start = key_start
self.key_end = key_end
self.progress_key = progress_key
self.progress_event = threading.Event()
self.rows = rows
self.content = None
self.count = len(rows)
def MarkAsRead(self):
"""Mark this WorkItem as read/consumed from the data source."""
assert self.state == STATE_READ
self._StateTransition(STATE_READ, blocking=True)
assert self.progress_key is not None
def MarkAsSending(self):
"""Mark this WorkItem as in-process on being uploaded to the server."""
assert self.state == STATE_READ or self.state == STATE_NOT_SENT
assert self.progress_key is not None
self._StateTransition(STATE_SENDING, blocking=True)
def MarkAsSent(self):
"""Mark this WorkItem as sucessfully-sent to the server."""
assert self.state == STATE_SENDING
assert self.progress_key is not None
self._StateTransition(STATE_SENT, blocking=False)
def MarkAsError(self):
"""Mark this WorkItem as required manual error recovery."""
assert self.state == STATE_SENDING
assert self.progress_key is not None
self._StateTransition(STATE_NOT_SENT, blocking=True)
def _StateTransition(self, new_state, blocking=False):
"""Transition the work item to a new state, storing progress information.
Args:
new_state: The state to transition to.
blocking: Whether to block for the progress thread to acknowledge the
transition.
"""
logging.debug('[%s-%s] %s' %
(self.key_start, self.key_end, StateMessage(self.state)))
assert not self.progress_event.isSet()
self.state = new_state
self.progress_queue.put(self)
if blocking:
self.progress_event.wait()
self.progress_event.clear()
def InterruptibleSleep(sleep_time):
"""Puts thread to sleep, checking this threads exit_flag twice a second.
Args:
sleep_time: Time to sleep.
"""
slept = 0.0
epsilon = .0001
thread = threading.currentThread()
while slept < sleep_time - epsilon:
remaining = sleep_time - slept
this_sleep_time = min(remaining, 0.5)
time.sleep(this_sleep_time)
slept += this_sleep_time
if thread.exit_flag:
return
class ThreadGate(object):
"""Manage the number of active worker threads.
The ThreadGate limits the number of threads that are simultaneously
uploading batches of records in order to implement adaptive rate
control. The number of simultaneous upload threads that it takes to
start causing timeout varies widely over the course of the day, so
adaptive rate control allows the uploader to do many uploads while
reducing the error rate and thus increasing the throughput.
Initially the ThreadGate allows only one uploader thread to be active.
For each successful upload, another thread is activated and for each
failed upload, the number of active threads is reduced by one.
"""
def __init__(self, enabled, sleep=InterruptibleSleep):
self.enabled = enabled
self.enabled_count = 1
self.lock = threading.Lock()
self.thread_semaphore = threading.Semaphore(self.enabled_count)
self._threads = []
self.backoff_time = 0
self.sleep = sleep
def Register(self, thread):
"""Register a thread with the thread gate."""
self._threads.append(thread)
def Threads(self):
"""Yields the registered threads."""
for thread in self._threads:
yield thread
def EnableThread(self):
"""Enable one more worker thread."""
self.lock.acquire()
try:
self.enabled_count += 1
finally:
self.lock.release()
self.thread_semaphore.release()
def EnableAllThreads(self):
"""Enable all worker threads."""
for unused_idx in range(len(self._threads) - self.enabled_count):
self.EnableThread()
def StartWork(self):
"""Starts a critical section in which the number of workers is limited.
If thread throttling is enabled then this method starts a critical
section which allows self.enabled_count simultaneously operating
threads. The critical section is ended by calling self.FinishWork().
"""
if self.enabled:
self.thread_semaphore.acquire()
if self.backoff_time > 0.0:
if not threading.currentThread().exit_flag:
logging.info('Backing off: %.1f seconds',
self.backoff_time)
self.sleep(self.backoff_time)
def FinishWork(self):
"""Ends a critical section started with self.StartWork()."""
if self.enabled:
self.thread_semaphore.release()
def IncreaseWorkers(self):
"""Informs the throttler that an item was successfully sent.
If thread throttling is enabled, this method will cause an
additional thread to run in the critical section.
"""
if self.enabled:
if self.backoff_time > 0.0:
logging.info('Resetting backoff to 0.0')
self.backoff_time = 0.0
do_enable = False
self.lock.acquire()
try:
if self.enabled and len(self._threads) > self.enabled_count:
do_enable = True
self.enabled_count += 1
finally:
self.lock.release()
if do_enable:
self.thread_semaphore.release()
def DecreaseWorkers(self):
"""Informs the thread_gate that an item failed to send.
If thread throttling is enabled, this method will cause the
throttler to allow one fewer thread in the critical section. If
there is only one thread remaining, failures will result in
exponential backoff until there is a success.
"""
if self.enabled:
do_disable = False
self.lock.acquire()
try:
if self.enabled:
if self.enabled_count > 1:
do_disable = True
self.enabled_count -= 1
else:
if self.backoff_time == 0.0:
self.backoff_time = INITIAL_BACKOFF
else:
self.backoff_time *= BACKOFF_FACTOR
finally:
self.lock.release()
if do_disable:
self.thread_semaphore.acquire()
class Throttle(object):
"""A base class for upload rate throttling.
Transferring large number of records, too quickly, to an application
could trigger quota limits and cause the transfer process to halt.
In order to stay within the application's quota, we throttle the
data transfer to a specified limit (across all transfer threads).
This limit defaults to about half of the Google App Engine default
for an application, but can be manually adjusted faster/slower as
appropriate.
This class tracks a moving average of some aspect of the transfer
rate (bandwidth, records per second, http connections per
second). It keeps two windows of counts of bytes transferred, on a
per-thread basis. One block is the "current" block, and the other is
the "prior" block. It will rotate the counts from current to prior
when ROTATE_PERIOD has passed. Thus, the current block will
represent from 0 seconds to ROTATE_PERIOD seconds of activity
(determined by: time.time() - self.last_rotate). The prior block
will always represent a full ROTATE_PERIOD.
Sleeping is performed just before a transfer of another block, and is
based on the counts transferred *before* the next transfer. It really
does not matter how much will be transferred, but only that for all the
data transferred SO FAR that we have interspersed enough pauses to
ensure the aggregate transfer rate is within the specified limit.
These counts are maintained on a per-thread basis, so we do not require
any interlocks around incrementing the counts. There IS an interlock on
the rotation of the counts because we do not want multiple threads to
multiply-rotate the counts.
There are various race conditions in the computation and collection
of these counts. We do not require precise values, but simply to
keep the overall transfer within the bandwidth limits. If a given
pause is a little short, or a little long, then the aggregate delays
will be correct.
"""
ROTATE_PERIOD = 600
def __init__(self,
get_time=time.time,
thread_sleep=InterruptibleSleep,
layout=None):
self.get_time = get_time
self.thread_sleep = thread_sleep
self.start_time = get_time()
self.transferred = {}
self.prior_block = {}
self.totals = {}
self.throttles = {}
self.last_rotate = {}
self.rotate_mutex = {}
if layout:
self.AddThrottles(layout)
def AddThrottle(self, name, limit):
self.throttles[name] = limit
self.transferred[name] = {}
self.prior_block[name] = {}
self.totals[name] = {}
self.last_rotate[name] = self.get_time()
self.rotate_mutex[name] = threading.Lock()
def AddThrottles(self, layout):
for key, value in layout.iteritems():
self.AddThrottle(key, value)
def Register(self, thread):
"""Register this thread with the throttler."""
thread_name = thread.getName()
for throttle_name in self.throttles.iterkeys():
self.transferred[throttle_name][thread_name] = 0
self.prior_block[throttle_name][thread_name] = 0
self.totals[throttle_name][thread_name] = 0
def VerifyName(self, throttle_name):
if throttle_name not in self.throttles:
raise AssertionError('%s is not a registered throttle' % throttle_name)
def AddTransfer(self, throttle_name, token_count):
"""Add a count to the amount this thread has transferred.
Each time a thread transfers some data, it should call this method to
note the amount sent. The counts may be rotated if sufficient time
has passed since the last rotation.
Note: this method should only be called by the BulkLoaderThread
instances. The token count is allocated towards the
"current thread".
Args:
throttle_name: The name of the throttle to add to.
token_count: The number to add to the throttle counter.
"""
self.VerifyName(throttle_name)
transferred = self.transferred[throttle_name]
transferred[threading.currentThread().getName()] += token_count
if self.last_rotate[throttle_name] + self.ROTATE_PERIOD < self.get_time():
self._RotateCounts(throttle_name)
def Sleep(self, throttle_name=None):
"""Possibly sleep in order to limit the transfer rate.
Note that we sleep based on *prior* transfers rather than what we
may be about to transfer. The next transfer could put us under/over
and that will be rectified *after* that transfer. Net result is that
the average transfer rate will remain within bounds. Spiky behavior
or uneven rates among the threads could possibly bring the transfer
rate above the requested limit for short durations.
Args:
throttle_name: The name of the throttle to sleep on. If None or
omitted, then sleep on all throttles.
"""
if throttle_name is None:
for throttle_name in self.throttles:
self.Sleep(throttle_name=throttle_name)
return
self.VerifyName(throttle_name)
thread = threading.currentThread()
while True:
duration = self.get_time() - self.last_rotate[throttle_name]
total = 0
for count in self.prior_block[throttle_name].values():
total += count
if total:
duration += self.ROTATE_PERIOD
for count in self.transferred[throttle_name].values():
total += count
sleep_time = (float(total) / self.throttles[throttle_name]) - duration
if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION:
break
logging.debug('[%s] Throttling on %s. Sleeping for %.1f ms '
'(duration=%.1f ms, total=%d)',
thread.getName(), throttle_name,
sleep_time * 1000, duration * 1000, total)
self.thread_sleep(sleep_time)
if thread.exit_flag:
break
self._RotateCounts(throttle_name)
def _RotateCounts(self, throttle_name):
"""Rotate the transfer counters.
If sufficient time has passed, then rotate the counters from active to
the prior-block of counts.
This rotation is interlocked to ensure that multiple threads do not
over-rotate the counts.
Args:
throttle_name: The name of the throttle to rotate.
"""
self.VerifyName(throttle_name)
self.rotate_mutex[throttle_name].acquire()
try:
next_rotate_time = self.last_rotate[throttle_name] + self.ROTATE_PERIOD
if next_rotate_time >= self.get_time():
return
for name, count in self.transferred[throttle_name].items():
self.prior_block[throttle_name][name] = count
self.transferred[throttle_name][name] = 0
self.totals[throttle_name][name] += count
self.last_rotate[throttle_name] = self.get_time()
finally:
self.rotate_mutex[throttle_name].release()
def TotalTransferred(self, throttle_name):
"""Return the total transferred, and over what period.
Args:
throttle_name: The name of the throttle to total.
Returns:
A tuple of the total count and running time for the given throttle name.
"""
total = 0
for count in self.totals[throttle_name].values():
total += count
for count in self.transferred[throttle_name].values():
total += count
return total, self.get_time() - self.start_time
class _ThreadBase(threading.Thread):
"""Provide some basic features for the threads used in the uploader.
This abstract base class is used to provide some common features:
* Flag to ask thread to exit as soon as possible.
* Record exit/error status for the primary thread to pick up.
* Capture exceptions and record them for pickup.
* Some basic logging of thread start/stop.
* All threads are "daemon" threads.
* Friendly names for presenting to users.
Concrete sub-classes must implement PerformWork().
Either self.NAME should be set or GetFriendlyName() be overridden to
return a human-friendly name for this thread.
The run() method starts the thread and prints start/exit messages.
self.exit_flag is intended to signal that this thread should exit
when it gets the chance. PerformWork() should check self.exit_flag
whenever it has the opportunity to exit gracefully.
"""
def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(True)
self.exit_flag = False
self.error = None
def run(self):
"""Perform the work of the thread."""
logging.info('[%s] %s: started', self.getName(), self.__class__.__name__)
try:
self.PerformWork()
except:
self.error = sys.exc_info()[1]
logging.exception('[%s] %s:', self.getName(), self.__class__.__name__)
logging.info('[%s] %s: exiting', self.getName(), self.__class__.__name__)
def PerformWork(self):
"""Perform the thread-specific work."""
raise NotImplementedError()
def CheckError(self):
"""If an error is present, then log it."""
if self.error:
logging.error('Error in %s: %s', self.GetFriendlyName(), self.error)
def GetFriendlyName(self):
"""Returns a human-friendly description of the thread."""
if hasattr(self, 'NAME'):
return self.NAME
return 'unknown thread'
class BulkLoaderThread(_ThreadBase):
"""A thread which transmits entities to the server application.
This thread will read WorkItem instances from the work_queue and upload
the entities to the server application. Progress information will be
pushed into the progress_queue as the work is being performed.
If a BulkLoaderThread encounters a transient error, the entities will be
resent, if a fatal error is encoutered the BulkLoaderThread exits.
"""
def __init__(self,
work_queue,
throttle,
thread_gate,
request_manager):
"""Initialize the BulkLoaderThread instance.
Args:
work_queue: A queue containing WorkItems for processing.
throttle: A Throttles to control upload bandwidth.
thread_gate: A ThreadGate to control number of simultaneous uploads.
request_manager: A RequestManager instance.
"""
_ThreadBase.__init__(self)
self.work_queue = work_queue
self.throttle = throttle
self.thread_gate = thread_gate
self.request_manager = request_manager
def PerformWork(self):
"""Perform the work of a BulkLoaderThread."""
while not self.exit_flag:
success = False
self.thread_gate.StartWork()
try:
try:
item = self.work_queue.get(block=True, timeout=1.0)
except Queue.Empty:
continue
if item == _THREAD_SHOULD_EXIT:
break
logging.debug('[%s] Got work item [%d-%d]',
self.getName(), item.key_start, item.key_end)
try:
item.MarkAsSending()
try:
if item.content is None:
item.content = self.request_manager.EncodeContent(item.rows)
try:
self.request_manager.PostEntities(item)
success = True
logging.debug(
'[%d-%d] Sent %d entities',
item.key_start, item.key_end, item.count)
self.throttle.AddTransfer(RECORDS, item.count)
except (db.InternalError, db.NotSavedError, db.Timeout), e:
logging.debug('Caught non-fatal error: %s', e)
except urllib2.HTTPError, e:
if e.code == 403 or (e.code >= 500 and e.code < 600):
logging.debug('Caught HTTP error %d', e.code)
logging.debug('%s', e.read())
else:
raise e
except:
self.error = sys.exc_info()[1]
logging.exception('[%s] %s: caught exception %s', self.getName(),
self.__class__.__name__, str(sys.exc_info()))
raise
finally:
if success:
item.MarkAsSent()
self.thread_gate.IncreaseWorkers()
self.work_queue.task_done()
else:
item.MarkAsError()
self.thread_gate.DecreaseWorkers()
try:
self.work_queue.reput(item, block=False)
except Queue.Full:
logging.error('[%s] Failed to reput work item.', self.getName())
raise Error('Failed to reput work item')
logging.info('[%d-%d] %s',
item.key_start, item.key_end, StateMessage(item.state))
finally:
self.thread_gate.FinishWork()
def GetFriendlyName(self):
"""Returns a human-friendly name for this thread."""
return 'worker [%s]' % self.getName()
class DataSourceThread(_ThreadBase):
"""A thread which reads WorkItems and pushes them into queue.
This thread will read/consume WorkItems from a generator (produced by
the generator factory). These WorkItems will then be pushed into the
work_queue. Note that reading will block if/when the work_queue becomes
full. Information on content consumed from the generator will be pushed
into the progress_queue.
"""
NAME = 'data source thread'
def __init__(self,
work_queue,
progress_queue,
workitem_generator_factory,
progress_generator_factory):
"""Initialize the DataSourceThread instance.
Args:
work_queue: A queue containing WorkItems for processing.
progress_queue: A queue used for tracking progress information.
workitem_generator_factory: A factory that creates a WorkItem generator
progress_generator_factory: A factory that creates a generator which
produces prior progress status, or None if there is no prior status
to use.
"""
_ThreadBase.__init__(self)
self.work_queue = work_queue
self.progress_queue = progress_queue
self.workitem_generator_factory = workitem_generator_factory
self.progress_generator_factory = progress_generator_factory
self.entity_count = 0
def PerformWork(self):
"""Performs the work of a DataSourceThread."""
if self.progress_generator_factory:
progress_gen = self.progress_generator_factory()
else:
progress_gen = None
content_gen = self.workitem_generator_factory(self.progress_queue,
progress_gen)
self.sent_count = 0
self.read_count = 0
self.read_all = False
for item in content_gen.Batches():
item.MarkAsRead()
while not self.exit_flag:
try:
self.work_queue.put(item, block=True, timeout=1.0)
self.entity_count += item.count
break
except Queue.Full:
pass
if self.exit_flag:
break
if not self.exit_flag:
self.read_all = True
self.read_count = content_gen.row_count
self.sent_count = content_gen.sent_count
def _RunningInThread(thread):
"""Return True if we are running within the specified thread."""
return threading.currentThread().getName() == thread.getName()
class ProgressDatabase(object):
"""Persistently record all progress information during an upload.
This class wraps a very simple SQLite database which records each of
the relevant details from the WorkItem instances. If the uploader is
resumed, then data is replayed out of the database.
"""
def __init__(self, db_filename, commit_periodicity=100):
"""Initialize the ProgressDatabase instance.
Args:
db_filename: The name of the SQLite database to use.
commit_periodicity: How many operations to perform between commits.
"""
self.db_filename = db_filename
logging.info('Using progress database: %s', db_filename)
self.primary_conn = sqlite3.connect(db_filename, isolation_level=None)
self.primary_thread = threading.currentThread()
self.progress_conn = None
self.progress_thread = None
self.operation_count = 0
self.commit_periodicity = commit_periodicity
self.prior_key_end = None
try:
self.primary_conn.execute(
"""create table progress (
id integer primary key autoincrement,
state integer not null,
key_start integer not null,
key_end integer not null
)
""")
except sqlite3.OperationalError, e:
if 'already exists' not in e.message:
raise
try:
self.primary_conn.execute('create index i_state on progress (state)')
except sqlite3.OperationalError, e:
if 'already exists' not in e.message:
raise
def ThreadComplete(self):
"""Finalize any operations the progress thread has performed.
The database aggregates lots of operations into a single commit, and
this method is used to commit any pending operations as the thread
is about to shut down.
"""
if self.progress_conn:
self._MaybeCommit(force_commit=True)
def _MaybeCommit(self, force_commit=False):
"""Periodically commit changes into the SQLite database.
Committing every operation is quite expensive, and slows down the
operation of the script. Thus, we only commit after every N operations,
as determined by the self.commit_periodicity value. Optionally, the
caller can force a commit.
Args:
force_commit: Pass True in order for a commit to occur regardless
of the current operation count.
"""
self.operation_count += 1
if force_commit or (self.operation_count % self.commit_periodicity) == 0:
self.progress_conn.commit()
def _OpenProgressConnection(self):
"""Possibly open a database connection for the progress tracker thread.
If the connection is not open (for the calling thread, which is assumed
to be the progress tracker thread), then open it. We also open a couple
cursors for later use (and reuse).
"""
if self.progress_conn:
return
assert not _RunningInThread(self.primary_thread)
self.progress_thread = threading.currentThread()
self.progress_conn = sqlite3.connect(self.db_filename)
self.insert_cursor = self.progress_conn.cursor()
self.update_cursor = self.progress_conn.cursor()
def HasUnfinishedWork(self):
"""Returns True if the database has progress information.
Note there are two basic cases for progress information:
1) All saved records indicate a successful upload. In this case, we
need to skip everything transmitted so far and then send the rest.
2) Some records for incomplete transfer are present. These need to be
sent again, and then we resume sending after all the successful
data.
Returns:
True if the database has progress information, False otherwise.
Raises:
ResumeError: If there is an error reading the progress database.
"""
assert _RunningInThread(self.primary_thread)
cursor = self.primary_conn.cursor()
cursor.execute('select count(*) from progress')
row = cursor.fetchone()
if row is None:
raise ResumeError('Error reading progress information.')
return row[0] != 0
def StoreKeys(self, key_start, key_end):
"""Record a new progress record, returning a key for later updates.
The specified progress information will be persisted into the database.
A unique key will be returned that identifies this progress state. The
key is later used to (quickly) update this record.
For the progress resumption to proceed properly, calls to StoreKeys
MUST specify monotonically increasing key ranges. This will result in
a database whereby the ID, KEY_START, and KEY_END rows are all
increasing (rather than having ranges out of order).
NOTE: the above precondition is NOT tested by this method (since it
would imply an additional table read or two on each invocation).
Args:
key_start: The starting key of the WorkItem (inclusive)
key_end: The end key of the WorkItem (inclusive)
Returns:
A string to later be used as a unique key to update this state.
"""
self._OpenProgressConnection()
assert _RunningInThread(self.progress_thread)
assert isinstance(key_start, int)
assert isinstance(key_end, int)
assert key_start <= key_end
if self.prior_key_end is not None:
assert key_start > self.prior_key_end
self.prior_key_end = key_end
self.insert_cursor.execute(
'insert into progress (state, key_start, key_end) values (?, ?, ?)',
(STATE_READ, key_start, key_end))
progress_key = self.insert_cursor.lastrowid
self._MaybeCommit()
return progress_key
def UpdateState(self, key, new_state):
"""Update a specified progress record with new information.
Args:
key: The key for this progress record, returned from StoreKeys
new_state: The new state to associate with this progress record.
"""
self._OpenProgressConnection()
assert _RunningInThread(self.progress_thread)
assert isinstance(new_state, int)
self.update_cursor.execute('update progress set state=? where id=?',
(new_state, key))
self._MaybeCommit()
def GetProgressStatusGenerator(self):
"""Get a generator which returns progress information.
The returned generator will yield a series of 4-tuples that specify
progress information about a prior run of the uploader. The 4-tuples
have the following values:
progress_key: The unique key to later update this record with new
progress information.
state: The last state saved for this progress record.
key_start: The starting key of the items for uploading (inclusive).
key_end: The ending key of the items for uploading (inclusive).
After all incompletely-transferred records are provided, then one
more 4-tuple will be generated:
None
DATA_CONSUMED_TO_HERE: A unique string value indicating this record
is being provided.
None
key_end: An integer value specifying the last data source key that
was handled by the previous run of the uploader.
The caller should begin uploading records which occur after key_end.
Yields:
Progress information as tuples (progress_key, state, key_start, key_end).
"""
conn = sqlite3.connect(self.db_filename, isolation_level=None)
cursor = conn.cursor()
cursor.execute('select max(id) from progress')
batch_id = cursor.fetchone()[0]
cursor.execute('select key_end from progress where id = ?', (batch_id,))
key_end = cursor.fetchone()[0]
self.prior_key_end = key_end
cursor.execute(
'select id, state, key_start, key_end from progress'
' where state != ?'
' order by id',
(STATE_SENT,))
rows = cursor.fetchall()
for row in rows:
if row is None:
break
yield row
yield None, DATA_CONSUMED_TO_HERE, None, key_end
class StubProgressDatabase(object):
"""A stub implementation of ProgressDatabase which does nothing."""
def HasUnfinishedWork(self):
"""Whether the stub database has progress information (it doesn't)."""
return False
def StoreKeys(self, unused_key_start, unused_key_end):
"""Pretend to store a key in the stub database."""
return 'fake-key'
def UpdateState(self, unused_key, unused_new_state):
"""Pretend to update the state of a progress item."""
pass
def ThreadComplete(self):
"""Finalize operations on the stub database (i.e. do nothing)."""
pass
class ProgressTrackerThread(_ThreadBase):
"""A thread which records progress information for the upload process.
The progress information is stored into the provided progress database.
This class is not responsible for replaying a prior run's progress
information out of the database. Separate mechanisms must be used to
resume a prior upload attempt.
"""
NAME = 'progress tracking thread'
def __init__(self, progress_queue, progress_db):
"""Initialize the ProgressTrackerThread instance.
Args:
progress_queue: A Queue used for tracking progress information.
progress_db: The database for tracking progress information; should
be an instance of ProgressDatabase.
"""
_ThreadBase.__init__(self)
self.progress_queue = progress_queue
self.db = progress_db
self.entities_sent = 0
def PerformWork(self):
"""Performs the work of a ProgressTrackerThread."""
while not self.exit_flag:
try:
item = self.progress_queue.get(block=True, timeout=1.0)
except Queue.Empty:
continue
if item == _THREAD_SHOULD_EXIT:
break
if item.state == STATE_READ and item.progress_key is None:
item.progress_key = self.db.StoreKeys(item.key_start, item.key_end)
else:
assert item.progress_key is not None
self.db.UpdateState(item.progress_key, item.state)
if item.state == STATE_SENT:
self.entities_sent += item.count
item.progress_event.set()
self.progress_queue.task_done()
self.db.ThreadComplete()
def Validate(value, typ):
"""Checks that value is non-empty and of the right type.
Args:
value: any value
typ: a type or tuple of types
Raises:
ValueError if value is None or empty.
TypeError if it's not the given type.
"""
if not value:
raise ValueError('Value should not be empty; received %s.' % value)
elif not isinstance(value, typ):
raise TypeError('Expected a %s, but received %s (a %s).' %
(typ, value, value.__class__))
class Loader(object):
"""A base class for creating datastore entities from input data.
To add a handler for bulk loading a new entity kind into your datastore,
write a subclass of this class that calls Loader.__init__ from your
class's __init__.
If you need to run extra code to convert entities from the input
data, create new properties, or otherwise modify the entities before
they're inserted, override HandleEntity.
See the CreateEntity method for the creation of entities from the
(parsed) input data.
"""
__loaders = {}
__kind = None
__properties = None
def __init__(self, kind, properties):
"""Constructor.
Populates this Loader's kind and properties map. Also registers it with
the bulk loader, so that all you need to do is instantiate your Loader,
and the bulkload handler will automatically use it.
Args:
kind: a string containing the entity kind that this loader handles
properties: list of (name, converter) tuples.
This is used to automatically convert the CSV columns into
properties. The converter should be a function that takes one
argument, a string value from the CSV file, and returns a
correctly typed property value that should be inserted. The
tuples in this list should match the columns in your CSV file,
in order.
For example:
[('name', str),
('id_number', int),
('email', datastore_types.Email),
('user', users.User),
('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
('description', datastore_types.Text),
]
"""
Validate(kind, basestring)
self.__kind = kind
db.class_for_kind(kind)
Validate(properties, list)
for name, fn in properties:
Validate(name, basestring)
assert callable(fn), (
'Conversion function %s for property %s is not callable.' % (fn, name))
self.__properties = properties
@staticmethod
def RegisterLoader(loader):
Loader.__loaders[loader.__kind] = loader
def kind(self):
""" Return the entity kind that this Loader handes.
"""
return self.__kind
def CreateEntity(self, values, key_name=None):
"""Creates a entity from a list of property values.
Args:
values: list/tuple of str
key_name: if provided, the name for the (single) resulting entity
Returns:
list of db.Model
The returned entities are populated with the property values from the
argument, converted to native types using the properties map given in
the constructor, and passed through HandleEntity. They're ready to be
inserted.
Raises:
AssertionError if the number of values doesn't match the number
of properties in the properties map.
ValueError if any element of values is None or empty.
TypeError if values is not a list or tuple.
"""
Validate(values, (list, tuple))
assert len(values) == len(self.__properties), (
'Expected %d CSV columns, found %d.' %
(len(self.__properties), len(values)))
model_class = db.class_for_kind(self.__kind)
properties = {'key_name': key_name}
for (name, converter), val in zip(self.__properties, values):
if converter is bool and val.lower() in ('0', 'false', 'no'):
val = False
properties[name] = converter(val)
entity = model_class(**properties)
entities = self.HandleEntity(entity)
if entities:
if not isinstance(entities, (list, tuple)):
entities = [entities]
for entity in entities:
if not isinstance(entity, db.Model):
raise TypeError('Expected a db.Model, received %s (a %s).' %
(entity, entity.__class__))
return entities
def GenerateKey(self, i, values):
"""Generates a key_name to be used in creating the underlying object.
The default implementation returns None.
This method can be overridden to control the key generation for
uploaded entities. The value returned should be None (to use a
server generated numeric key), or a string which neither starts
with a digit nor has the form __*__. (See
http://code.google.com/appengine/docs/python/datastore/keysandentitygroups.html)
If you generate your own string keys, keep in mind:
1. The key name for each entity must be unique.
2. If an entity of the same kind and key already exists in the
datastore, it will be overwritten.
Args:
i: Number corresponding to this object (assume it's run in a loop,
this is your current count.
values: list/tuple of str.
Returns:
A string to be used as the key_name for an entity.
"""
return None
def HandleEntity(self, entity):
"""Subclasses can override this to add custom entity conversion code.
This is called for each entity, after its properties are populated from
CSV but before it is stored. Subclasses can override this to add custom
entity handling code.
The entity to be inserted should be returned. If multiple entities should
be inserted, return a list of entities. If no entities should be inserted,
return None or [].
Args:
entity: db.Model
Returns:
db.Model or list of db.Model
"""
return entity
@staticmethod
def RegisteredLoaders():
"""Returns a list of the Loader instances that have been created.
"""
return dict(Loader.__loaders)
class QueueJoinThread(threading.Thread):
"""A thread that joins a queue and exits.
Queue joins do not have a timeout. To simulate a queue join with
timeout, run this thread and join it with a timeout.
"""
def __init__(self, queue):
"""Initialize a QueueJoinThread.
Args:
queue: The queue for this thread to join.
"""
threading.Thread.__init__(self)
assert isinstance(queue, (Queue.Queue, ReQueue))
self.queue = queue
def run(self):
"""Perform the queue join in this thread."""
self.queue.join()
def InterruptibleQueueJoin(queue,
thread_local,
thread_gate,
queue_join_thread_factory=QueueJoinThread):
"""Repeatedly joins the given ReQueue or Queue.Queue with short timeout.
Between each timeout on the join, worker threads are checked.
Args:
queue: A Queue.Queue or ReQueue instance.
thread_local: A threading.local instance which indicates interrupts.
thread_gate: A ThreadGate instance.
queue_join_thread_factory: Used for dependency injection.
Returns:
True unless the queue join is interrupted by SIGINT or worker death.
"""
thread = queue_join_thread_factory(queue)
thread.start()
while True:
thread.join(timeout=.5)
if not thread.isAlive():
return True
if thread_local.shut_down:
logging.debug('Queue join interrupted')
return False
for worker_thread in thread_gate.Threads():
if not worker_thread.isAlive():
return False
def ShutdownThreads(data_source_thread, work_queue, thread_gate):
"""Shuts down the worker and data source threads.
Args:
data_source_thread: A running DataSourceThread instance.
work_queue: The work queue.
thread_gate: A ThreadGate instance with workers registered.
"""
logging.info('An error occurred. Shutting down...')
data_source_thread.exit_flag = True
for thread in thread_gate.Threads():
thread.exit_flag = True
for unused_thread in thread_gate.Threads():
thread_gate.EnableThread()
data_source_thread.join(timeout=3.0)
if data_source_thread.isAlive():
logging.warn('%s hung while trying to exit',
data_source_thread.GetFriendlyName())
while not work_queue.empty():
try:
unused_item = work_queue.get_nowait()
work_queue.task_done()
except Queue.Empty:
pass
def PerformBulkUpload(app_id,
post_url,
kind,
workitem_generator_factory,
num_threads,
throttle,
progress_db,
max_queue_size=DEFAULT_QUEUE_SIZE,
request_manager_factory=RequestManager,
bulkloaderthread_factory=BulkLoaderThread,
progresstrackerthread_factory=ProgressTrackerThread,
datasourcethread_factory=DataSourceThread,
work_queue_factory=ReQueue,
progress_queue_factory=Queue.Queue):
"""Uploads data into an application using a series of HTTP POSTs.
This function will spin up a number of threads to read entities from
the data source, pass those to a number of worker ("uploader") threads
for sending to the application, and track all of the progress in a
small database in case an error or pause/termination requires a
restart/resumption of the upload process.
Args:
app_id: String containing application id.
post_url: URL to post the Entity data to.
kind: Kind of the Entity records being posted.
workitem_generator_factory: A factory that creates a WorkItem generator.
num_threads: How many uploader threads should be created.
throttle: A Throttle instance.
progress_db: The database to use for replaying/recording progress.
max_queue_size: Maximum size of the queues before they should block.
request_manager_factory: Used for dependency injection.
bulkloaderthread_factory: Used for dependency injection.
progresstrackerthread_factory: Used for dependency injection.
datasourcethread_factory: Used for dependency injection.
work_queue_factory: Used for dependency injection.
progress_queue_factory: Used for dependency injection.
Raises:
AuthenticationError: If authentication is required and fails.
"""
thread_gate = ThreadGate(True)
(unused_scheme,
host_port, url_path,
unused_query, unused_fragment) = urlparse.urlsplit(post_url)
work_queue = work_queue_factory(max_queue_size)
progress_queue = progress_queue_factory(max_queue_size)
request_manager = request_manager_factory(app_id,
host_port,
url_path,
kind,
throttle)
throttle.Register(threading.currentThread())
try:
request_manager.Authenticate()
except Exception, e:
logging.exception(e)
raise AuthenticationError('Authentication failed')
if (request_manager.credentials is not None and
not request_manager.authenticated):
raise AuthenticationError('Authentication failed')
for unused_idx in range(num_threads):
thread = bulkloaderthread_factory(work_queue,
throttle,
thread_gate,
request_manager)
throttle.Register(thread)
thread_gate.Register(thread)
progress_thread = progresstrackerthread_factory(progress_queue, progress_db)
if progress_db.HasUnfinishedWork():
logging.debug('Restarting upload using progress database')
progress_generator_factory = progress_db.GetProgressStatusGenerator
else:
progress_generator_factory = None
data_source_thread = datasourcethread_factory(work_queue,
progress_queue,
workitem_generator_factory,
progress_generator_factory)
thread_local = threading.local()
thread_local.shut_down = False
def Interrupt(unused_signum, unused_frame):
"""Shutdown gracefully in response to a signal."""
thread_local.shut_down = True
signal.signal(signal.SIGINT, Interrupt)
progress_thread.start()
data_source_thread.start()
for thread in thread_gate.Threads():
thread.start()
while not thread_local.shut_down:
data_source_thread.join(timeout=0.25)
if data_source_thread.isAlive():
for thread in list(thread_gate.Threads()) + [progress_thread]:
if not thread.isAlive():
logging.info('Unexpected thread death: %s', thread.getName())
thread_local.shut_down = True
break
else:
break
if thread_local.shut_down:
ShutdownThreads(data_source_thread, work_queue, thread_gate)
def _Join(ob, msg):
logging.debug('Waiting for %s...', msg)
if isinstance(ob, threading.Thread):
ob.join(timeout=3.0)
if ob.isAlive():
logging.debug('Joining %s failed', ob.GetFriendlyName())
else:
logging.debug('... done.')
elif isinstance(ob, (Queue.Queue, ReQueue)):
if not InterruptibleQueueJoin(ob, thread_local, thread_gate):
ShutdownThreads(data_source_thread, work_queue, thread_gate)
else:
ob.join()
logging.debug('... done.')
_Join(work_queue, 'work_queue to flush')
for unused_thread in thread_gate.Threads():
work_queue.put(_THREAD_SHOULD_EXIT)
for unused_thread in thread_gate.Threads():
thread_gate.EnableThread()
for thread in thread_gate.Threads():
_Join(thread, 'thread [%s] to terminate' % thread.getName())
thread.CheckError()
if progress_thread.isAlive():
_Join(progress_queue, 'progress_queue to finish')
else:
logging.warn('Progress thread exited prematurely')
progress_queue.put(_THREAD_SHOULD_EXIT)
_Join(progress_thread, 'progress_thread to terminate')
progress_thread.CheckError()
data_source_thread.CheckError()
total_up, duration = throttle.TotalTransferred(BANDWIDTH_UP)
s_total_up, unused_duration = throttle.TotalTransferred(HTTPS_BANDWIDTH_UP)
total_up += s_total_up
logging.info('%d entites read, %d previously transferred',
data_source_thread.read_count,
data_source_thread.sent_count)
logging.info('%d entities (%d bytes) transferred in %.1f seconds',
progress_thread.entities_sent, total_up, duration)
if (data_source_thread.read_all and
progress_thread.entities_sent + data_source_thread.sent_count >=
data_source_thread.read_count):
logging.info('All entities successfully uploaded')
else:
logging.info('Some entities not successfully uploaded')
def PrintUsageExit(code):
"""Prints usage information and exits with a status code.
Args:
code: Status code to pass to sys.exit() after displaying usage information.
"""
print __doc__ % {'arg0': sys.argv[0]}
sys.stdout.flush()
sys.stderr.flush()
sys.exit(code)
def ParseArguments(argv):
"""Parses command-line arguments.
Prints out a help message if -h or --help is supplied.
Args:
argv: List of command-line arguments.
Returns:
Tuple (url, filename, cookie, batch_size, kind) containing the values from
each corresponding command-line flag.
"""
opts, unused_args = getopt.getopt(
argv[1:],
'h',
['debug',
'help',
'url=',
'filename=',
'batch_size=',
'kind=',
'num_threads=',
'bandwidth_limit=',
'rps_limit=',
'http_limit=',
'db_filename=',
'app_id=',
'config_file=',
'auth_domain=',
])
url = None
filename = None
batch_size = DEFAULT_BATCH_SIZE
kind = None
num_threads = DEFAULT_THREAD_COUNT
bandwidth_limit = DEFAULT_BANDWIDTH_LIMIT
rps_limit = DEFAULT_RPS_LIMIT
http_limit = DEFAULT_REQUEST_LIMIT
db_filename = None
app_id = None
config_file = None
auth_domain = 'gmail.com'
for option, value in opts:
if option == '--debug':
logging.getLogger().setLevel(logging.DEBUG)
elif option in ('-h', '--help'):
PrintUsageExit(0)
elif option == '--url':
url = value
elif option == '--filename':
filename = value
elif option == '--batch_size':
batch_size = int(value)
elif option == '--kind':
kind = value
elif option == '--num_threads':
num_threads = int(value)
elif option == '--bandwidth_limit':
bandwidth_limit = int(value)
elif option == '--rps_limit':
rps_limit = int(value)
elif option == '--http_limit':
http_limit = int(value)
elif option == '--db_filename':
db_filename = value
elif option == '--app_id':
app_id = value
elif option == '--config_file':
config_file = value
elif option == '--auth_domain':
auth_domain = value
return ProcessArguments(app_id=app_id,
url=url,
filename=filename,
batch_size=batch_size,
kind=kind,
num_threads=num_threads,
bandwidth_limit=bandwidth_limit,
rps_limit=rps_limit,
http_limit=http_limit,
db_filename=db_filename,
config_file=config_file,
auth_domain=auth_domain,
die_fn=lambda: PrintUsageExit(1))
def ThrottleLayout(bandwidth_limit, http_limit, rps_limit):
return {
BANDWIDTH_UP: bandwidth_limit,
BANDWIDTH_DOWN: bandwidth_limit,
REQUESTS: http_limit,
HTTPS_BANDWIDTH_UP: bandwidth_limit / 5,
HTTPS_BANDWIDTH_DOWN: bandwidth_limit / 5,
HTTPS_REQUESTS: http_limit / 5,
RECORDS: rps_limit,
}
def LoadConfig(config_file):
"""Loads a config file and registers any Loader classes present."""
if config_file:
global_dict = dict(globals())
execfile(config_file, global_dict)
for cls in Loader.__subclasses__():
Loader.RegisterLoader(cls())
def _MissingArgument(arg_name, die_fn):
"""Print error message about missing argument and die."""
print >>sys.stderr, '%s argument required' % arg_name
die_fn()
def ProcessArguments(app_id=None,
url=None,
filename=None,
batch_size=DEFAULT_BATCH_SIZE,
kind=None,
num_threads=DEFAULT_THREAD_COUNT,
bandwidth_limit=DEFAULT_BANDWIDTH_LIMIT,
rps_limit=DEFAULT_RPS_LIMIT,
http_limit=DEFAULT_REQUEST_LIMIT,
db_filename=None,
config_file=None,
auth_domain='gmail.com',
die_fn=lambda: sys.exit(1)):
"""Processes non command-line input arguments."""
if db_filename is None:
db_filename = time.strftime('bulkloader-progress-%Y%m%d.%H%M%S.sql3')
if batch_size <= 0:
print >>sys.stderr, 'batch_size must be 1 or larger'
die_fn()
if url is None:
_MissingArgument('url', die_fn)
if filename is None:
_MissingArgument('filename', die_fn)
if kind is None:
_MissingArgument('kind', die_fn)
if config_file is None:
_MissingArgument('config_file', die_fn)
if app_id is None:
(unused_scheme, host_port, unused_url_path,
unused_query, unused_fragment) = urlparse.urlsplit(url)
suffix_idx = host_port.find('.appspot.com')
if suffix_idx > -1:
app_id = host_port[:suffix_idx]
elif host_port.split(':')[0].endswith('google.com'):
app_id = host_port.split('.')[0]
else:
print >>sys.stderr, 'app_id required for non appspot.com domains'
die_fn()
return (app_id, url, filename, batch_size, kind, num_threads,
bandwidth_limit, rps_limit, http_limit, db_filename, config_file,
auth_domain)
def _PerformBulkload(app_id=None,
url=None,
filename=None,
batch_size=DEFAULT_BATCH_SIZE,
kind=None,
num_threads=DEFAULT_THREAD_COUNT,
bandwidth_limit=DEFAULT_BANDWIDTH_LIMIT,
rps_limit=DEFAULT_RPS_LIMIT,
http_limit=DEFAULT_REQUEST_LIMIT,
db_filename=None,
config_file=None,
auth_domain='gmail.com'):
"""Runs the bulkloader, given the options as keyword arguments.
Args:
app_id: The application id.
url: The url of the remote_api endpoint.
filename: The name of the file containing the CSV data.
batch_size: The number of records to send per request.
kind: The kind of entity to transfer.
num_threads: The number of threads to use to transfer data.
bandwidth_limit: Maximum bytes/second to transfers.
rps_limit: Maximum records/second to transfer.
http_limit: Maximum requests/second for transfers.
db_filename: The name of the SQLite3 progress database file.
config_file: The name of the configuration file.
auth_domain: The auth domain to use for logins and UserProperty.
Returns:
An exit code.
"""
os.environ['AUTH_DOMAIN'] = auth_domain
LoadConfig(config_file)
throttle_layout = ThrottleLayout(bandwidth_limit, http_limit, rps_limit)
throttle = Throttle(layout=throttle_layout)
workitem_generator_factory = GetCSVGeneratorFactory(filename, batch_size)
if db_filename == 'skip':
progress_db = StubProgressDatabase()
else:
progress_db = ProgressDatabase(db_filename)
max_queue_size = max(DEFAULT_QUEUE_SIZE, 2 * num_threads + 5)
PerformBulkUpload(app_id,
url,
kind,
workitem_generator_factory,
num_threads,
throttle,
progress_db,
max_queue_size=max_queue_size)
return 0
def Run(app_id=None,
url=None,
filename=None,
batch_size=DEFAULT_BATCH_SIZE,
kind=None,
num_threads=DEFAULT_THREAD_COUNT,
bandwidth_limit=DEFAULT_BANDWIDTH_LIMIT,
rps_limit=DEFAULT_RPS_LIMIT,
http_limit=DEFAULT_REQUEST_LIMIT,
db_filename=None,
auth_domain='gmail.com',
config_file=None):
"""Sets up and runs the bulkloader, given the options as keyword arguments.
Args:
app_id: The application id.
url: The url of the remote_api endpoint.
filename: The name of the file containing the CSV data.
batch_size: The number of records to send per request.
kind: The kind of entity to transfer.
num_threads: The number of threads to use to transfer data.
bandwidth_limit: Maximum bytes/second to transfers.
rps_limit: Maximum records/second to transfer.
http_limit: Maximum requests/second for transfers.
db_filename: The name of the SQLite3 progress database file.
config_file: The name of the configuration file.
auth_domain: The auth domain to use for logins and UserProperty.
Returns:
An exit code.
"""
logging.basicConfig(
format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
args = ProcessArguments(app_id=app_id,
url=url,
filename=filename,
batch_size=batch_size,
kind=kind,
num_threads=num_threads,
bandwidth_limit=bandwidth_limit,
rps_limit=rps_limit,
http_limit=http_limit,
db_filename=db_filename,
config_file=config_file)
(app_id, url, filename, batch_size, kind, num_threads, bandwidth_limit,
rps_limit, http_limit, db_filename, config_file, auth_domain) = args
return _PerformBulkload(app_id=app_id,
url=url,
filename=filename,
batch_size=batch_size,
kind=kind,
num_threads=num_threads,
bandwidth_limit=bandwidth_limit,
rps_limit=rps_limit,
http_limit=http_limit,
db_filename=db_filename,
config_file=config_file,
auth_domain=auth_domain)
def main(argv):
"""Runs the importer from the command line."""
logging.basicConfig(
level=logging.INFO,
format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
args = ParseArguments(argv)
if None in args:
print >>sys.stderr, 'Invalid arguments'
PrintUsageExit(1)
(app_id, url, filename, batch_size, kind, num_threads,
bandwidth_limit, rps_limit, http_limit, db_filename, config_file,
auth_domain) = args
return _PerformBulkload(app_id=app_id,
url=url,
filename=filename,
batch_size=batch_size,
kind=kind,
num_threads=num_threads,
bandwidth_limit=bandwidth_limit,
rps_limit=rps_limit,
http_limit=http_limit,
db_filename=db_filename,
config_file=config_file,
auth_domain=auth_domain)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""OS cross-platform compatibility tweaks.
This module will, on import, change some parts of the running evironment so
that other modules do not need special handling when running on different
operating systems, such as Linux/Mac OSX/Windows.
Some of these changes must be done before other modules are imported, so
always import this module first.
"""
import os
os.environ['TZ'] = 'UTC'
import time
if hasattr(time, 'tzset'):
time.tzset()
import __builtin__
if 'WindowsError' in __builtin__.__dict__:
WindowsError = WindowsError
else:
class WindowsError(Exception):
"""A fake Windows Error exception which should never be thrown."""
ERROR_PATH_NOT_FOUND = 3
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Utilities for generating and updating index.yaml."""
import os
import logging
from google.appengine.api import apiproxy_stub_map
from google.appengine.api import datastore_admin
from google.appengine.api import yaml_errors
from google.appengine.datastore import datastore_index
import yaml
AUTO_MARKER = '\n# AUTOGENERATED\n'
AUTO_COMMENT = '''
# This index.yaml is automatically updated whenever the dev_appserver
# detects that a new type of query is run. If you want to manage the
# index.yaml file manually, remove the above marker line (the line
# saying "# AUTOGENERATED"). If you want to manage some indexes
# manually, move them above the marker line. The index.yaml file is
# automatically uploaded to the admin console when you next deploy
# your application using appcfg.py.
'''
def GenerateIndexFromHistory(query_history,
all_indexes=None, manual_indexes=None):
"""Generate most of the text for index.yaml from the query history.
Args:
query_history: Query history, a dict mapping query
all_indexes: Optional datastore_index.IndexDefinitions instance
representing all the indexes found in the input file. May be None.
manual_indexes: Optional datastore_index.IndexDefinitions instance
containing indexes for which we should not generate output. May be None.
Returns:
A string representation that can safely be appended to an
existing index.yaml file.
"""
all_keys = datastore_index.IndexDefinitionsToKeys(all_indexes)
manual_keys = datastore_index.IndexDefinitionsToKeys(manual_indexes)
indexes = dict((key, 0) for key in all_keys - manual_keys)
for query, count in query_history.iteritems():
required, kind, ancestor, props, num_eq_filters = datastore_index.CompositeIndexForQuery(query)
if required:
key = (kind, ancestor, props)
if key not in manual_keys:
if key in indexes:
indexes[key] += count
else:
indexes[key] = count
res = []
for (kind, ancestor, props), count in sorted(indexes.iteritems()):
res.append('')
if count == 0:
message = '# Unused in query history -- copied from input.'
elif count == 1:
message = '# Used once in query history.'
else:
message = '# Used %d times in query history.' % count
res.append(message)
res.append(datastore_index.IndexYamlForQuery(kind, ancestor, props))
res.append('')
return '\n'.join(res)
class IndexYamlUpdater(object):
"""Helper class for updating index.yaml.
This class maintains some state about the query history and the
index.yaml file in order to minimize the number of times index.yaml
is actually overwritten.
"""
index_yaml_is_manual = False
index_yaml_mtime = 0
last_history_size = 0
def __init__(self, root_path):
"""Constructor.
Args:
root_path: Path to the app's root directory.
"""
self.root_path = root_path
def UpdateIndexYaml(self, openfile=open):
"""Update index.yaml.
Args:
openfile: Used for dependency injection.
We only ever write to index.yaml if either:
- it doesn't exist yet; or
- it contains an 'AUTOGENERATED' comment.
All indexes *before* the AUTOGENERATED comment will be written
back unchanged. All indexes *after* the AUTOGENERATED comment
will be updated with the latest query counts (query counts are
reset by --clear_datastore). Indexes that aren't yet in the file
will be appended to the AUTOGENERATED section.
We keep track of some data in order to avoid doing repetitive work:
- if index.yaml is fully manual, we keep track of its mtime to
avoid parsing it over and over;
- we keep track of the number of keys in the history dict since
the last time we updated index.yaml (or decided there was
nothing to update).
"""
index_yaml_file = os.path.join(self.root_path, 'index.yaml')
try:
index_yaml_mtime = os.path.getmtime(index_yaml_file)
except os.error:
index_yaml_mtime = None
index_yaml_changed = (index_yaml_mtime != self.index_yaml_mtime)
self.index_yaml_mtime = index_yaml_mtime
datastore_stub = apiproxy_stub_map.apiproxy.GetStub('datastore_v3')
query_history = datastore_stub.QueryHistory()
history_changed = (len(query_history) != self.last_history_size)
self.last_history_size = len(query_history)
if not (index_yaml_changed or history_changed):
logging.debug('No need to update index.yaml')
return
if self.index_yaml_is_manual and not index_yaml_changed:
logging.debug('Will not update manual index.yaml')
return
if index_yaml_mtime is None:
index_yaml_data = None
else:
try:
fh = open(index_yaml_file, 'r')
except IOError:
index_yaml_data = None
else:
try:
index_yaml_data = fh.read()
finally:
fh.close()
self.index_yaml_is_manual = (index_yaml_data is not None and
AUTO_MARKER not in index_yaml_data)
if self.index_yaml_is_manual:
logging.info('Detected manual index.yaml, will not update')
return
if index_yaml_data is None:
all_indexes = None
else:
try:
all_indexes = datastore_index.ParseIndexDefinitions(index_yaml_data)
except yaml_errors.EventListenerError, e:
logging.error('Error parsing %s:\n%s', index_yaml_file, e)
return
except Exception, err:
logging.error('Error parsing %s:\n%s.%s: %s', index_yaml_file,
err.__class__.__module__, err.__class__.__name__, err)
return
if index_yaml_data is None:
manual_part, automatic_part = 'indexes:\n', ''
manual_indexes = None
else:
manual_part, automatic_part = index_yaml_data.split(AUTO_MARKER, 1)
try:
manual_indexes = datastore_index.ParseIndexDefinitions(manual_part)
except Exception, err:
logging.error('Error parsing manual part of %s: %s',
index_yaml_file, err)
return
automatic_part = GenerateIndexFromHistory(query_history,
all_indexes, manual_indexes)
try:
fh = openfile(index_yaml_file, 'w')
except IOError, err:
logging.error('Can\'t write index.yaml: %s', err)
return
try:
logging.info('Updating %s', index_yaml_file)
fh.write(manual_part)
fh.write(AUTO_MARKER)
fh.write(AUTO_COMMENT)
fh.write(automatic_part)
finally:
fh.close()
try:
self.index_yaml_mtime = os.path.getmtime(index_yaml_file)
except os.error, err:
logging.error('Can\'t stat index.yaml we just wrote: %s', err)
self.index_yaml_mtime = None
def SetupIndexes(app_id, root_path):
"""Ensure that the set of existing composite indexes matches index.yaml.
Note: this is similar to the algorithm used by the admin console for
the same purpose.
Args:
app_id: Application ID being served.
root_path: Path to the root of the application.
"""
index_yaml_file = os.path.join(root_path, 'index.yaml')
try:
fh = open(index_yaml_file, 'r')
except IOError:
index_yaml_data = None
else:
try:
index_yaml_data = fh.read()
finally:
fh.close()
indexes = []
if index_yaml_data is not None:
index_defs = datastore_index.ParseIndexDefinitions(index_yaml_data)
if index_defs is not None:
indexes = index_defs.indexes
if indexes is None:
indexes = []
requested_indexes = datastore_admin.IndexDefinitionsToProtos(app_id, indexes)
existing_indexes = datastore_admin.GetIndices(app_id)
requested = dict((x.definition().Encode(), x) for x in requested_indexes)
existing = dict((x.definition().Encode(), x) for x in existing_indexes)
created = 0
for key, index in requested.iteritems():
if key not in existing:
datastore_admin.CreateIndex(index)
created += 1
deleted = 0
for key, index in existing.iteritems():
if key not in requested:
datastore_admin.DeleteIndex(index)
deleted += 1
if created or deleted:
logging.info("Created %d and deleted %d index(es); total %d",
created, deleted, len(requested))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Imports CSV data over HTTP.
Usage:
%(arg0)s [flags]
--debug Show debugging information. (Optional)
--app_id=<string> Application ID of endpoint (Optional for
*.appspot.com)
--auth_domain=<domain> The auth domain to use for logging in and for
UserProperties. (Default: gmail.com)
--bandwidth_limit=<int> The maximum number of bytes per second for the
aggregate transfer of data to the server. Bursts
--batch_size=<int> Number of Entity objects to include in each post to
the URL endpoint. The more data per row/Entity, the
smaller the batch size should be. (Default 10)
--config_file=<path> File containing Model and Loader definitions.
(Required)
--db_filename=<path> Specific progress database to write to, or to
resume from. If not supplied, then a new database
will be started, named:
bulkloader-progress-TIMESTAMP.
The special filename "skip" may be used to simply
skip reading/writing any progress information.
--filename=<path> Path to the CSV file to import. (Required)
--http_limit=<int> The maximum numer of HTTP requests per second to
send to the server. (Default: 8)
--kind=<string> Name of the Entity object kind to put in the
datastore. (Required)
--num_threads=<int> Number of threads to use for uploading entities
(Default 10)
may exceed this, but overall transfer rate is
restricted to this rate. (Default 250000)
--rps_limit=<int> The maximum number of records per second to
transfer to the server. (Default: 20)
--url=<string> URL endpoint to post to for importing data.
(Required)
The exit status will be 0 on success, non-zero on import failure.
Works with the remote_api mix-in library for google.appengine.ext.remote_api.
Please look there for documentation about how to setup the server side.
Example:
%(arg0)s --url=http://app.appspot.com/remote_api --kind=Model \
--filename=data.csv --config_file=loader_config.py
"""
import csv
import getopt
import getpass
import logging
import new
import os
import Queue
import signal
import sys
import threading
import time
import traceback
import urllib2
import urlparse
from google.appengine.ext import db
from google.appengine.ext.remote_api import remote_api_stub
from google.appengine.tools import appengine_rpc
try:
import sqlite3
except ImportError:
pass
UPLOADER_VERSION = '1'
DEFAULT_THREAD_COUNT = 10
DEFAULT_BATCH_SIZE = 10
DEFAULT_QUEUE_SIZE = DEFAULT_THREAD_COUNT * 10
_THREAD_SHOULD_EXIT = '_THREAD_SHOULD_EXIT'
STATE_READ = 0
STATE_SENDING = 1
STATE_SENT = 2
STATE_NOT_SENT = 3
MINIMUM_THROTTLE_SLEEP_DURATION = 0.001
DATA_CONSUMED_TO_HERE = 'DATA_CONSUMED_TO_HERE'
INITIAL_BACKOFF = 1.0
BACKOFF_FACTOR = 2.0
DEFAULT_BANDWIDTH_LIMIT = 250000
DEFAULT_RPS_LIMIT = 20
DEFAULT_REQUEST_LIMIT = 8
BANDWIDTH_UP = 'http-bandwidth-up'
BANDWIDTH_DOWN = 'http-bandwidth-down'
REQUESTS = 'http-requests'
HTTPS_BANDWIDTH_UP = 'https-bandwidth-up'
HTTPS_BANDWIDTH_DOWN = 'https-bandwidth-down'
HTTPS_REQUESTS = 'https-requests'
RECORDS = 'records'
def StateMessage(state):
"""Converts a numeric state identifier to a status message."""
return ({
STATE_READ: 'Batch read from file.',
STATE_SENDING: 'Sending batch to server.',
STATE_SENT: 'Batch successfully sent.',
STATE_NOT_SENT: 'Error while sending batch.'
}[state])
class Error(Exception):
"""Base-class for exceptions in this module."""
class FatalServerError(Error):
"""An unrecoverable error occurred while trying to post data to the server."""
class ResumeError(Error):
"""Error while trying to resume a partial upload."""
class ConfigurationError(Error):
"""Error in configuration options."""
class AuthenticationError(Error):
"""Error while trying to authenticate with the server."""
def GetCSVGeneratorFactory(csv_filename, batch_size,
openfile=open, create_csv_reader=csv.reader):
"""Return a factory that creates a CSV-based WorkItem generator.
Args:
csv_filename: File on disk containing CSV data.
batch_size: Maximum number of CSV rows to stash into a WorkItem.
openfile: Used for dependency injection.
create_csv_reader: Used for dependency injection.
Returns: A callable (accepting the Progress Queue and Progress
Generators as input) which creates the WorkItem generator.
"""
def CreateGenerator(progress_queue, progress_generator):
"""Initialize a CSV generator linked to a progress generator and queue.
Args:
progress_queue: A ProgressQueue instance to send progress information.
progress_generator: A generator of progress information or None.
Returns:
A CSVGenerator instance.
"""
return CSVGenerator(progress_queue,
progress_generator,
csv_filename,
batch_size,
openfile,
create_csv_reader)
return CreateGenerator
class CSVGenerator(object):
"""Reads a CSV file and generates WorkItems containing batches of records."""
def __init__(self,
progress_queue,
progress_generator,
csv_filename,
batch_size,
openfile,
create_csv_reader):
"""Initializes a CSV generator.
Args:
progress_queue: A queue used for tracking progress information.
progress_generator: A generator of prior progress information, or None
if there is no prior status.
csv_filename: File on disk containing CSV data.
batch_size: Maximum number of CSV rows to stash into a WorkItem.
openfile: Used for dependency injection of 'open'.
create_csv_reader: Used for dependency injection of 'csv.reader'.
"""
self.progress_queue = progress_queue
self.progress_generator = progress_generator
self.csv_filename = csv_filename
self.batch_size = batch_size
self.openfile = openfile
self.create_csv_reader = create_csv_reader
self.line_number = 1
self.column_count = None
self.read_rows = []
self.reader = None
self.row_count = 0
self.sent_count = 0
def _AdvanceTo(self, line):
"""Advance the reader to the given line.
Args:
line: A line number to advance to.
"""
while self.line_number < line:
self.reader.next()
self.line_number += 1
self.row_count += 1
self.sent_count += 1
def _ReadRows(self, key_start, key_end):
"""Attempts to read and encode rows [key_start, key_end].
The encoded rows are stored in self.read_rows.
Args:
key_start: The starting line number.
key_end: The ending line number.
Raises:
StopIteration: if the reader runs out of rows
ResumeError: if there are an inconsistent number of columns.
"""
assert self.line_number == key_start
self.read_rows = []
while self.line_number <= key_end:
row = self.reader.next()
self.row_count += 1
if self.column_count is None:
self.column_count = len(row)
else:
if self.column_count != len(row):
raise ResumeError('Column count mismatch, %d: %s' %
(self.column_count, str(row)))
self.read_rows.append((self.line_number, row))
self.line_number += 1
def _MakeItem(self, key_start, key_end, rows, progress_key=None):
"""Makes a WorkItem containing the given rows, with the given keys.
Args:
key_start: The start key for the WorkItem.
key_end: The end key for the WorkItem.
rows: A list of the rows for the WorkItem.
progress_key: The progress key for the WorkItem
Returns:
A WorkItem instance for the given batch.
"""
assert rows
item = WorkItem(self.progress_queue, rows,
key_start, key_end,
progress_key=progress_key)
return item
def Batches(self):
"""Reads the CSV data file and generates WorkItems.
Yields:
Instances of class WorkItem
Raises:
ResumeError: If the progress database and data file indicate a different
number of rows.
"""
csv_file = self.openfile(self.csv_filename, 'r')
csv_content = csv_file.read()
if csv_content:
has_headers = csv.Sniffer().has_header(csv_content)
else:
has_headers = False
csv_file.seek(0)
self.reader = self.create_csv_reader(csv_file, skipinitialspace=True)
if has_headers:
logging.info('The CSV file appears to have a header line, skipping.')
self.reader.next()
exhausted = False
self.line_number = 1
self.column_count = None
logging.info('Starting import; maximum %d entities per post',
self.batch_size)
state = None
if self.progress_generator is not None:
for progress_key, state, key_start, key_end in self.progress_generator:
if key_start:
try:
self._AdvanceTo(key_start)
self._ReadRows(key_start, key_end)
yield self._MakeItem(key_start,
key_end,
self.read_rows,
progress_key=progress_key)
except StopIteration:
logging.error('Mismatch between data file and progress database')
raise ResumeError(
'Mismatch between data file and progress database')
elif state == DATA_CONSUMED_TO_HERE:
try:
self._AdvanceTo(key_end + 1)
except StopIteration:
state = None
if self.progress_generator is None or state == DATA_CONSUMED_TO_HERE:
while not exhausted:
key_start = self.line_number
key_end = self.line_number + self.batch_size - 1
try:
self._ReadRows(key_start, key_end)
except StopIteration:
exhausted = True
key_end = self.line_number - 1
if key_start <= key_end:
yield self._MakeItem(key_start, key_end, self.read_rows)
class ReQueue(object):
"""A special thread-safe queue.
A ReQueue allows unfinished work items to be returned with a call to
reput(). When an item is reput, task_done() should *not* be called
in addition, getting an item that has been reput does not increase
the number of outstanding tasks.
This class shares an interface with Queue.Queue and provides the
additional Reput method.
"""
def __init__(self,
queue_capacity,
requeue_capacity=None,
queue_factory=Queue.Queue,
get_time=time.time):
"""Initialize a ReQueue instance.
Args:
queue_capacity: The number of items that can be put in the ReQueue.
requeue_capacity: The numer of items that can be reput in the ReQueue.
queue_factory: Used for dependency injection.
get_time: Used for dependency injection.
"""
if requeue_capacity is None:
requeue_capacity = queue_capacity
self.get_time = get_time
self.queue = queue_factory(queue_capacity)
self.requeue = queue_factory(requeue_capacity)
self.lock = threading.Lock()
self.put_cond = threading.Condition(self.lock)
self.get_cond = threading.Condition(self.lock)
def _DoWithTimeout(self,
action,
exc,
wait_cond,
done_cond,
lock,
timeout=None,
block=True):
"""Performs the given action with a timeout.
The action must be non-blocking, and raise an instance of exc on a
recoverable failure. If the action fails with an instance of exc,
we wait on wait_cond before trying again. Failure after the
timeout is reached is propagated as an exception. Success is
signalled by notifying on done_cond and returning the result of
the action. If action raises any exception besides an instance of
exc, it is immediately propagated.
Args:
action: A callable that performs a non-blocking action.
exc: An exception type that is thrown by the action to indicate
a recoverable error.
wait_cond: A condition variable which should be waited on when
action throws exc.
done_cond: A condition variable to signal if the action returns.
lock: The lock used by wait_cond and done_cond.
timeout: A non-negative float indicating the maximum time to wait.
block: Whether to block if the action cannot complete immediately.
Returns:
The result of the action, if it is successful.
Raises:
ValueError: If the timeout argument is negative.
"""
if timeout is not None and timeout < 0.0:
raise ValueError('\'timeout\' must not be a negative number')
if not block:
timeout = 0.0
result = None
success = False
start_time = self.get_time()
lock.acquire()
try:
while not success:
try:
result = action()
success = True
except Exception, e:
if not isinstance(e, exc):
raise e
if timeout is not None:
elapsed_time = self.get_time() - start_time
timeout -= elapsed_time
if timeout <= 0.0:
raise e
wait_cond.wait(timeout)
finally:
if success:
done_cond.notify()
lock.release()
return result
def put(self, item, block=True, timeout=None):
"""Put an item into the requeue.
Args:
item: An item to add to the requeue.
block: Whether to block if the requeue is full.
timeout: Maximum on how long to wait until the queue is non-full.
Raises:
Queue.Full if the queue is full and the timeout expires.
"""
def PutAction():
self.queue.put(item, block=False)
self._DoWithTimeout(PutAction,
Queue.Full,
self.get_cond,
self.put_cond,
self.lock,
timeout=timeout,
block=block)
def reput(self, item, block=True, timeout=None):
"""Re-put an item back into the requeue.
Re-putting an item does not increase the number of outstanding
tasks, so the reput item should be uniquely associated with an
item that was previously removed from the requeue and for which
task_done has not been called.
Args:
item: An item to add to the requeue.
block: Whether to block if the requeue is full.
timeout: Maximum on how long to wait until the queue is non-full.
Raises:
Queue.Full is the queue is full and the timeout expires.
"""
def ReputAction():
self.requeue.put(item, block=False)
self._DoWithTimeout(ReputAction,
Queue.Full,
self.get_cond,
self.put_cond,
self.lock,
timeout=timeout,
block=block)
def get(self, block=True, timeout=None):
"""Get an item from the requeue.
Args:
block: Whether to block if the requeue is empty.
timeout: Maximum on how long to wait until the requeue is non-empty.
Returns:
An item from the requeue.
Raises:
Queue.Empty if the queue is empty and the timeout expires.
"""
def GetAction():
try:
result = self.requeue.get(block=False)
self.requeue.task_done()
except Queue.Empty:
result = self.queue.get(block=False)
return result
return self._DoWithTimeout(GetAction,
Queue.Empty,
self.put_cond,
self.get_cond,
self.lock,
timeout=timeout,
block=block)
def join(self):
"""Blocks until all of the items in the requeue have been processed."""
self.queue.join()
def task_done(self):
"""Indicate that a previously enqueued item has been fully processed."""
self.queue.task_done()
def empty(self):
"""Returns true if the requeue is empty."""
return self.queue.empty() and self.requeue.empty()
def get_nowait(self):
"""Try to get an item from the queue without blocking."""
return self.get(block=False)
class ThrottleHandler(urllib2.BaseHandler):
"""A urllib2 handler for http and https requests that adds to a throttle."""
def __init__(self, throttle):
"""Initialize a ThrottleHandler.
Args:
throttle: A Throttle instance to call for bandwidth and http/https request
throttling.
"""
self.throttle = throttle
def AddRequest(self, throttle_name, req):
"""Add to bandwidth throttle for given request.
Args:
throttle_name: The name of the bandwidth throttle to add to.
req: The request whose size will be added to the throttle.
"""
size = 0
for key, value in req.headers.iteritems():
size += len('%s: %s\n' % (key, value))
for key, value in req.unredirected_hdrs.iteritems():
size += len('%s: %s\n' % (key, value))
(unused_scheme,
unused_host_port, url_path,
unused_query, unused_fragment) = urlparse.urlsplit(req.get_full_url())
size += len('%s %s HTTP/1.1\n' % (req.get_method(), url_path))
data = req.get_data()
if data:
size += len(data)
self.throttle.AddTransfer(throttle_name, size)
def AddResponse(self, throttle_name, res):
"""Add to bandwidth throttle for given response.
Args:
throttle_name: The name of the bandwidth throttle to add to.
res: The response whose size will be added to the throttle.
"""
content = res.read()
def ReturnContent():
return content
res.read = ReturnContent
size = len(content)
headers = res.info()
for key, value in headers.items():
size += len('%s: %s\n' % (key, value))
self.throttle.AddTransfer(throttle_name, size)
def http_request(self, req):
"""Process an HTTP request.
If the throttle is over quota, sleep first. Then add request size to
throttle before returning it to be sent.
Args:
req: A urllib2.Request object.
Returns:
The request passed in.
"""
self.throttle.Sleep()
self.AddRequest(BANDWIDTH_UP, req)
return req
def https_request(self, req):
"""Process an HTTPS request.
If the throttle is over quota, sleep first. Then add request size to
throttle before returning it to be sent.
Args:
req: A urllib2.Request object.
Returns:
The request passed in.
"""
self.throttle.Sleep()
self.AddRequest(HTTPS_BANDWIDTH_UP, req)
return req
def http_response(self, unused_req, res):
"""Process an HTTP response.
The size of the response is added to the bandwidth throttle and the request
throttle is incremented by one.
Args:
unused_req: The urllib2 request for this response.
res: A urllib2 response object.
Returns:
The response passed in.
"""
self.AddResponse(BANDWIDTH_DOWN, res)
self.throttle.AddTransfer(REQUESTS, 1)
return res
def https_response(self, unused_req, res):
"""Process an HTTPS response.
The size of the response is added to the bandwidth throttle and the request
throttle is incremented by one.
Args:
unused_req: The urllib2 request for this response.
res: A urllib2 response object.
Returns:
The response passed in.
"""
self.AddResponse(HTTPS_BANDWIDTH_DOWN, res)
self.throttle.AddTransfer(HTTPS_REQUESTS, 1)
return res
class ThrottledHttpRpcServer(appengine_rpc.HttpRpcServer):
"""Provides a simplified RPC-style interface for HTTP requests.
This RPC server uses a Throttle to prevent exceeding quotas.
"""
def __init__(self, throttle, request_manager, *args, **kwargs):
"""Initialize a ThrottledHttpRpcServer.
Also sets request_manager.rpc_server to the ThrottledHttpRpcServer instance.
Args:
throttle: A Throttles instance.
request_manager: A RequestManager instance.
args: Positional arguments to pass through to
appengine_rpc.HttpRpcServer.__init__
kwargs: Keyword arguments to pass through to
appengine_rpc.HttpRpcServer.__init__
"""
self.throttle = throttle
appengine_rpc.HttpRpcServer.__init__(self, *args, **kwargs)
request_manager.rpc_server = self
def _GetOpener(self):
"""Returns an OpenerDirector that supports cookies and ignores redirects.
Returns:
A urllib2.OpenerDirector object.
"""
opener = appengine_rpc.HttpRpcServer._GetOpener(self)
opener.add_handler(ThrottleHandler(self.throttle))
return opener
def ThrottledHttpRpcServerFactory(throttle, request_manager):
"""Create a factory to produce ThrottledHttpRpcServer for a given throttle.
Args:
throttle: A Throttle instance to use for the ThrottledHttpRpcServer.
request_manager: A RequestManager instance.
Returns:
A factory to produce a ThrottledHttpRpcServer.
"""
def MakeRpcServer(*args, **kwargs):
kwargs['account_type'] = 'HOSTED_OR_GOOGLE'
kwargs['save_cookies'] = True
return ThrottledHttpRpcServer(throttle, request_manager, *args, **kwargs)
return MakeRpcServer
class RequestManager(object):
"""A class which wraps a connection to the server."""
source = 'google-bulkloader-%s' % UPLOADER_VERSION
user_agent = source
def __init__(self,
app_id,
host_port,
url_path,
kind,
throttle):
"""Initialize a RequestManager object.
Args:
app_id: String containing the application id for requests.
host_port: String containing the "host:port" pair; the port is optional.
url_path: partial URL (path) to post entity data to.
kind: Kind of the Entity records being posted.
throttle: A Throttle instance.
"""
self.app_id = app_id
self.host_port = host_port
self.host = host_port.split(':')[0]
if url_path and url_path[0] != '/':
url_path = '/' + url_path
self.url_path = url_path
self.kind = kind
self.throttle = throttle
self.credentials = None
throttled_rpc_server_factory = ThrottledHttpRpcServerFactory(
self.throttle, self)
logging.debug('Configuring remote_api. app_id = %s, url_path = %s, '
'servername = %s' % (app_id, url_path, host_port))
remote_api_stub.ConfigureRemoteDatastore(
app_id,
url_path,
self.AuthFunction,
servername=host_port,
rpc_server_factory=throttled_rpc_server_factory)
self.authenticated = False
def Authenticate(self):
"""Invoke authentication if necessary."""
self.rpc_server.Send(self.url_path, payload=None)
self.authenticated = True
def AuthFunction(self,
raw_input_fn=raw_input,
password_input_fn=getpass.getpass):
"""Prompts the user for a username and password.
Caches the results the first time it is called and returns the
same result every subsequent time.
Args:
raw_input_fn: Used for dependency injection.
password_input_fn: Used for dependency injection.
Returns:
A pair of the username and password.
"""
if self.credentials is not None:
return self.credentials
print 'Please enter login credentials for %s (%s)' % (
self.host, self.app_id)
email = raw_input_fn('Email: ')
if email:
password_prompt = 'Password for %s: ' % email
password = password_input_fn(password_prompt)
else:
password = None
self.credentials = (email, password)
return self.credentials
def _GetHeaders(self):
"""Constructs a dictionary of extra headers to send with a request."""
headers = {
'GAE-Uploader-Version': UPLOADER_VERSION,
'GAE-Uploader-Kind': self.kind
}
return headers
def EncodeContent(self, rows):
"""Encodes row data to the wire format.
Args:
rows: A list of pairs of a line number and a list of column values.
Returns:
A list of db.Model instances.
"""
try:
loader = Loader.RegisteredLoaders()[self.kind]
except KeyError:
logging.error('No Loader defined for kind %s.' % self.kind)
raise ConfigurationError('No Loader defined for kind %s.' % self.kind)
entities = []
for line_number, values in rows:
key = loader.GenerateKey(line_number, values)
entity = loader.CreateEntity(values, key_name=key)
entities.extend(entity)
return entities
def PostEntities(self, item):
"""Posts Entity records to a remote endpoint over HTTP.
Args:
item: A workitem containing the entities to post.
Returns:
A pair of the estimated size of the request in bytes and the response
from the server as a str.
"""
entities = item.content
db.put(entities)
class WorkItem(object):
"""Holds a unit of uploading work.
A WorkItem represents a number of entities that need to be uploaded to
Google App Engine. These entities are encoded in the "content" field of
the WorkItem, and will be POST'd as-is to the server.
The entities are identified by a range of numeric keys, inclusively. In
the case of a resumption of an upload, or a replay to correct errors,
these keys must be able to identify the same set of entities.
Note that keys specify a range. The entities do not have to sequentially
fill the entire range, they must simply bound a range of valid keys.
"""
def __init__(self, progress_queue, rows, key_start, key_end,
progress_key=None):
"""Initialize the WorkItem instance.
Args:
progress_queue: A queue used for tracking progress information.
rows: A list of pairs of a line number and a list of column values
key_start: The (numeric) starting key, inclusive.
key_end: The (numeric) ending key, inclusive.
progress_key: If this WorkItem represents state from a prior run,
then this will be the key within the progress database.
"""
self.state = STATE_READ
self.progress_queue = progress_queue
assert isinstance(key_start, (int, long))
assert isinstance(key_end, (int, long))
assert key_start <= key_end
self.key_start = key_start
self.key_end = key_end
self.progress_key = progress_key
self.progress_event = threading.Event()
self.rows = rows
self.content = None
self.count = len(rows)
def MarkAsRead(self):
"""Mark this WorkItem as read/consumed from the data source."""
assert self.state == STATE_READ
self._StateTransition(STATE_READ, blocking=True)
assert self.progress_key is not None
def MarkAsSending(self):
"""Mark this WorkItem as in-process on being uploaded to the server."""
assert self.state == STATE_READ or self.state == STATE_NOT_SENT
assert self.progress_key is not None
self._StateTransition(STATE_SENDING, blocking=True)
def MarkAsSent(self):
"""Mark this WorkItem as sucessfully-sent to the server."""
assert self.state == STATE_SENDING
assert self.progress_key is not None
self._StateTransition(STATE_SENT, blocking=False)
def MarkAsError(self):
"""Mark this WorkItem as required manual error recovery."""
assert self.state == STATE_SENDING
assert self.progress_key is not None
self._StateTransition(STATE_NOT_SENT, blocking=True)
def _StateTransition(self, new_state, blocking=False):
"""Transition the work item to a new state, storing progress information.
Args:
new_state: The state to transition to.
blocking: Whether to block for the progress thread to acknowledge the
transition.
"""
logging.debug('[%s-%s] %s' %
(self.key_start, self.key_end, StateMessage(self.state)))
assert not self.progress_event.isSet()
self.state = new_state
self.progress_queue.put(self)
if blocking:
self.progress_event.wait()
self.progress_event.clear()
def InterruptibleSleep(sleep_time):
"""Puts thread to sleep, checking this threads exit_flag twice a second.
Args:
sleep_time: Time to sleep.
"""
slept = 0.0
epsilon = .0001
thread = threading.currentThread()
while slept < sleep_time - epsilon:
remaining = sleep_time - slept
this_sleep_time = min(remaining, 0.5)
time.sleep(this_sleep_time)
slept += this_sleep_time
if thread.exit_flag:
return
class ThreadGate(object):
"""Manage the number of active worker threads.
The ThreadGate limits the number of threads that are simultaneously
uploading batches of records in order to implement adaptive rate
control. The number of simultaneous upload threads that it takes to
start causing timeout varies widely over the course of the day, so
adaptive rate control allows the uploader to do many uploads while
reducing the error rate and thus increasing the throughput.
Initially the ThreadGate allows only one uploader thread to be active.
For each successful upload, another thread is activated and for each
failed upload, the number of active threads is reduced by one.
"""
def __init__(self, enabled, sleep=InterruptibleSleep):
self.enabled = enabled
self.enabled_count = 1
self.lock = threading.Lock()
self.thread_semaphore = threading.Semaphore(self.enabled_count)
self._threads = []
self.backoff_time = 0
self.sleep = sleep
def Register(self, thread):
"""Register a thread with the thread gate."""
self._threads.append(thread)
def Threads(self):
"""Yields the registered threads."""
for thread in self._threads:
yield thread
def EnableThread(self):
"""Enable one more worker thread."""
self.lock.acquire()
try:
self.enabled_count += 1
finally:
self.lock.release()
self.thread_semaphore.release()
def EnableAllThreads(self):
"""Enable all worker threads."""
for unused_idx in range(len(self._threads) - self.enabled_count):
self.EnableThread()
def StartWork(self):
"""Starts a critical section in which the number of workers is limited.
If thread throttling is enabled then this method starts a critical
section which allows self.enabled_count simultaneously operating
threads. The critical section is ended by calling self.FinishWork().
"""
if self.enabled:
self.thread_semaphore.acquire()
if self.backoff_time > 0.0:
if not threading.currentThread().exit_flag:
logging.info('Backing off: %.1f seconds',
self.backoff_time)
self.sleep(self.backoff_time)
def FinishWork(self):
"""Ends a critical section started with self.StartWork()."""
if self.enabled:
self.thread_semaphore.release()
def IncreaseWorkers(self):
"""Informs the throttler that an item was successfully sent.
If thread throttling is enabled, this method will cause an
additional thread to run in the critical section.
"""
if self.enabled:
if self.backoff_time > 0.0:
logging.info('Resetting backoff to 0.0')
self.backoff_time = 0.0
do_enable = False
self.lock.acquire()
try:
if self.enabled and len(self._threads) > self.enabled_count:
do_enable = True
self.enabled_count += 1
finally:
self.lock.release()
if do_enable:
self.thread_semaphore.release()
def DecreaseWorkers(self):
"""Informs the thread_gate that an item failed to send.
If thread throttling is enabled, this method will cause the
throttler to allow one fewer thread in the critical section. If
there is only one thread remaining, failures will result in
exponential backoff until there is a success.
"""
if self.enabled:
do_disable = False
self.lock.acquire()
try:
if self.enabled:
if self.enabled_count > 1:
do_disable = True
self.enabled_count -= 1
else:
if self.backoff_time == 0.0:
self.backoff_time = INITIAL_BACKOFF
else:
self.backoff_time *= BACKOFF_FACTOR
finally:
self.lock.release()
if do_disable:
self.thread_semaphore.acquire()
class Throttle(object):
"""A base class for upload rate throttling.
Transferring large number of records, too quickly, to an application
could trigger quota limits and cause the transfer process to halt.
In order to stay within the application's quota, we throttle the
data transfer to a specified limit (across all transfer threads).
This limit defaults to about half of the Google App Engine default
for an application, but can be manually adjusted faster/slower as
appropriate.
This class tracks a moving average of some aspect of the transfer
rate (bandwidth, records per second, http connections per
second). It keeps two windows of counts of bytes transferred, on a
per-thread basis. One block is the "current" block, and the other is
the "prior" block. It will rotate the counts from current to prior
when ROTATE_PERIOD has passed. Thus, the current block will
represent from 0 seconds to ROTATE_PERIOD seconds of activity
(determined by: time.time() - self.last_rotate). The prior block
will always represent a full ROTATE_PERIOD.
Sleeping is performed just before a transfer of another block, and is
based on the counts transferred *before* the next transfer. It really
does not matter how much will be transferred, but only that for all the
data transferred SO FAR that we have interspersed enough pauses to
ensure the aggregate transfer rate is within the specified limit.
These counts are maintained on a per-thread basis, so we do not require
any interlocks around incrementing the counts. There IS an interlock on
the rotation of the counts because we do not want multiple threads to
multiply-rotate the counts.
There are various race conditions in the computation and collection
of these counts. We do not require precise values, but simply to
keep the overall transfer within the bandwidth limits. If a given
pause is a little short, or a little long, then the aggregate delays
will be correct.
"""
ROTATE_PERIOD = 600
def __init__(self,
get_time=time.time,
thread_sleep=InterruptibleSleep,
layout=None):
self.get_time = get_time
self.thread_sleep = thread_sleep
self.start_time = get_time()
self.transferred = {}
self.prior_block = {}
self.totals = {}
self.throttles = {}
self.last_rotate = {}
self.rotate_mutex = {}
if layout:
self.AddThrottles(layout)
def AddThrottle(self, name, limit):
self.throttles[name] = limit
self.transferred[name] = {}
self.prior_block[name] = {}
self.totals[name] = {}
self.last_rotate[name] = self.get_time()
self.rotate_mutex[name] = threading.Lock()
def AddThrottles(self, layout):
for key, value in layout.iteritems():
self.AddThrottle(key, value)
def Register(self, thread):
"""Register this thread with the throttler."""
thread_name = thread.getName()
for throttle_name in self.throttles.iterkeys():
self.transferred[throttle_name][thread_name] = 0
self.prior_block[throttle_name][thread_name] = 0
self.totals[throttle_name][thread_name] = 0
def VerifyName(self, throttle_name):
if throttle_name not in self.throttles:
raise AssertionError('%s is not a registered throttle' % throttle_name)
def AddTransfer(self, throttle_name, token_count):
"""Add a count to the amount this thread has transferred.
Each time a thread transfers some data, it should call this method to
note the amount sent. The counts may be rotated if sufficient time
has passed since the last rotation.
Note: this method should only be called by the BulkLoaderThread
instances. The token count is allocated towards the
"current thread".
Args:
throttle_name: The name of the throttle to add to.
token_count: The number to add to the throttle counter.
"""
self.VerifyName(throttle_name)
transferred = self.transferred[throttle_name]
transferred[threading.currentThread().getName()] += token_count
if self.last_rotate[throttle_name] + self.ROTATE_PERIOD < self.get_time():
self._RotateCounts(throttle_name)
def Sleep(self, throttle_name=None):
"""Possibly sleep in order to limit the transfer rate.
Note that we sleep based on *prior* transfers rather than what we
may be about to transfer. The next transfer could put us under/over
and that will be rectified *after* that transfer. Net result is that
the average transfer rate will remain within bounds. Spiky behavior
or uneven rates among the threads could possibly bring the transfer
rate above the requested limit for short durations.
Args:
throttle_name: The name of the throttle to sleep on. If None or
omitted, then sleep on all throttles.
"""
if throttle_name is None:
for throttle_name in self.throttles:
self.Sleep(throttle_name=throttle_name)
return
self.VerifyName(throttle_name)
thread = threading.currentThread()
while True:
duration = self.get_time() - self.last_rotate[throttle_name]
total = 0
for count in self.prior_block[throttle_name].values():
total += count
if total:
duration += self.ROTATE_PERIOD
for count in self.transferred[throttle_name].values():
total += count
sleep_time = (float(total) / self.throttles[throttle_name]) - duration
if sleep_time < MINIMUM_THROTTLE_SLEEP_DURATION:
break
logging.debug('[%s] Throttling on %s. Sleeping for %.1f ms '
'(duration=%.1f ms, total=%d)',
thread.getName(), throttle_name,
sleep_time * 1000, duration * 1000, total)
self.thread_sleep(sleep_time)
if thread.exit_flag:
break
self._RotateCounts(throttle_name)
def _RotateCounts(self, throttle_name):
"""Rotate the transfer counters.
If sufficient time has passed, then rotate the counters from active to
the prior-block of counts.
This rotation is interlocked to ensure that multiple threads do not
over-rotate the counts.
Args:
throttle_name: The name of the throttle to rotate.
"""
self.VerifyName(throttle_name)
self.rotate_mutex[throttle_name].acquire()
try:
next_rotate_time = self.last_rotate[throttle_name] + self.ROTATE_PERIOD
if next_rotate_time >= self.get_time():
return
for name, count in self.transferred[throttle_name].items():
self.prior_block[throttle_name][name] = count
self.transferred[throttle_name][name] = 0
self.totals[throttle_name][name] += count
self.last_rotate[throttle_name] = self.get_time()
finally:
self.rotate_mutex[throttle_name].release()
def TotalTransferred(self, throttle_name):
"""Return the total transferred, and over what period.
Args:
throttle_name: The name of the throttle to total.
Returns:
A tuple of the total count and running time for the given throttle name.
"""
total = 0
for count in self.totals[throttle_name].values():
total += count
for count in self.transferred[throttle_name].values():
total += count
return total, self.get_time() - self.start_time
class _ThreadBase(threading.Thread):
"""Provide some basic features for the threads used in the uploader.
This abstract base class is used to provide some common features:
* Flag to ask thread to exit as soon as possible.
* Record exit/error status for the primary thread to pick up.
* Capture exceptions and record them for pickup.
* Some basic logging of thread start/stop.
* All threads are "daemon" threads.
* Friendly names for presenting to users.
Concrete sub-classes must implement PerformWork().
Either self.NAME should be set or GetFriendlyName() be overridden to
return a human-friendly name for this thread.
The run() method starts the thread and prints start/exit messages.
self.exit_flag is intended to signal that this thread should exit
when it gets the chance. PerformWork() should check self.exit_flag
whenever it has the opportunity to exit gracefully.
"""
def __init__(self):
threading.Thread.__init__(self)
self.setDaemon(True)
self.exit_flag = False
self.error = None
def run(self):
"""Perform the work of the thread."""
logging.info('[%s] %s: started', self.getName(), self.__class__.__name__)
try:
self.PerformWork()
except:
self.error = sys.exc_info()[1]
logging.exception('[%s] %s:', self.getName(), self.__class__.__name__)
logging.info('[%s] %s: exiting', self.getName(), self.__class__.__name__)
def PerformWork(self):
"""Perform the thread-specific work."""
raise NotImplementedError()
def CheckError(self):
"""If an error is present, then log it."""
if self.error:
logging.error('Error in %s: %s', self.GetFriendlyName(), self.error)
def GetFriendlyName(self):
"""Returns a human-friendly description of the thread."""
if hasattr(self, 'NAME'):
return self.NAME
return 'unknown thread'
class BulkLoaderThread(_ThreadBase):
"""A thread which transmits entities to the server application.
This thread will read WorkItem instances from the work_queue and upload
the entities to the server application. Progress information will be
pushed into the progress_queue as the work is being performed.
If a BulkLoaderThread encounters a transient error, the entities will be
resent, if a fatal error is encoutered the BulkLoaderThread exits.
"""
def __init__(self,
work_queue,
throttle,
thread_gate,
request_manager):
"""Initialize the BulkLoaderThread instance.
Args:
work_queue: A queue containing WorkItems for processing.
throttle: A Throttles to control upload bandwidth.
thread_gate: A ThreadGate to control number of simultaneous uploads.
request_manager: A RequestManager instance.
"""
_ThreadBase.__init__(self)
self.work_queue = work_queue
self.throttle = throttle
self.thread_gate = thread_gate
self.request_manager = request_manager
def PerformWork(self):
"""Perform the work of a BulkLoaderThread."""
while not self.exit_flag:
success = False
self.thread_gate.StartWork()
try:
try:
item = self.work_queue.get(block=True, timeout=1.0)
except Queue.Empty:
continue
if item == _THREAD_SHOULD_EXIT:
break
logging.debug('[%s] Got work item [%d-%d]',
self.getName(), item.key_start, item.key_end)
try:
item.MarkAsSending()
try:
if item.content is None:
item.content = self.request_manager.EncodeContent(item.rows)
try:
self.request_manager.PostEntities(item)
success = True
logging.debug(
'[%d-%d] Sent %d entities',
item.key_start, item.key_end, item.count)
self.throttle.AddTransfer(RECORDS, item.count)
except (db.InternalError, db.NotSavedError, db.Timeout), e:
logging.debug('Caught non-fatal error: %s', e)
except urllib2.HTTPError, e:
if e.code == 403 or (e.code >= 500 and e.code < 600):
logging.debug('Caught HTTP error %d', e.code)
logging.debug('%s', e.read())
else:
raise e
except:
self.error = sys.exc_info()[1]
logging.exception('[%s] %s: caught exception %s', self.getName(),
self.__class__.__name__, str(sys.exc_info()))
raise
finally:
if success:
item.MarkAsSent()
self.thread_gate.IncreaseWorkers()
self.work_queue.task_done()
else:
item.MarkAsError()
self.thread_gate.DecreaseWorkers()
try:
self.work_queue.reput(item, block=False)
except Queue.Full:
logging.error('[%s] Failed to reput work item.', self.getName())
raise Error('Failed to reput work item')
logging.info('[%d-%d] %s',
item.key_start, item.key_end, StateMessage(item.state))
finally:
self.thread_gate.FinishWork()
def GetFriendlyName(self):
"""Returns a human-friendly name for this thread."""
return 'worker [%s]' % self.getName()
class DataSourceThread(_ThreadBase):
"""A thread which reads WorkItems and pushes them into queue.
This thread will read/consume WorkItems from a generator (produced by
the generator factory). These WorkItems will then be pushed into the
work_queue. Note that reading will block if/when the work_queue becomes
full. Information on content consumed from the generator will be pushed
into the progress_queue.
"""
NAME = 'data source thread'
def __init__(self,
work_queue,
progress_queue,
workitem_generator_factory,
progress_generator_factory):
"""Initialize the DataSourceThread instance.
Args:
work_queue: A queue containing WorkItems for processing.
progress_queue: A queue used for tracking progress information.
workitem_generator_factory: A factory that creates a WorkItem generator
progress_generator_factory: A factory that creates a generator which
produces prior progress status, or None if there is no prior status
to use.
"""
_ThreadBase.__init__(self)
self.work_queue = work_queue
self.progress_queue = progress_queue
self.workitem_generator_factory = workitem_generator_factory
self.progress_generator_factory = progress_generator_factory
self.entity_count = 0
def PerformWork(self):
"""Performs the work of a DataSourceThread."""
if self.progress_generator_factory:
progress_gen = self.progress_generator_factory()
else:
progress_gen = None
content_gen = self.workitem_generator_factory(self.progress_queue,
progress_gen)
self.sent_count = 0
self.read_count = 0
self.read_all = False
for item in content_gen.Batches():
item.MarkAsRead()
while not self.exit_flag:
try:
self.work_queue.put(item, block=True, timeout=1.0)
self.entity_count += item.count
break
except Queue.Full:
pass
if self.exit_flag:
break
if not self.exit_flag:
self.read_all = True
self.read_count = content_gen.row_count
self.sent_count = content_gen.sent_count
def _RunningInThread(thread):
"""Return True if we are running within the specified thread."""
return threading.currentThread().getName() == thread.getName()
class ProgressDatabase(object):
"""Persistently record all progress information during an upload.
This class wraps a very simple SQLite database which records each of
the relevant details from the WorkItem instances. If the uploader is
resumed, then data is replayed out of the database.
"""
def __init__(self, db_filename, commit_periodicity=100):
"""Initialize the ProgressDatabase instance.
Args:
db_filename: The name of the SQLite database to use.
commit_periodicity: How many operations to perform between commits.
"""
self.db_filename = db_filename
logging.info('Using progress database: %s', db_filename)
self.primary_conn = sqlite3.connect(db_filename, isolation_level=None)
self.primary_thread = threading.currentThread()
self.progress_conn = None
self.progress_thread = None
self.operation_count = 0
self.commit_periodicity = commit_periodicity
self.prior_key_end = None
try:
self.primary_conn.execute(
"""create table progress (
id integer primary key autoincrement,
state integer not null,
key_start integer not null,
key_end integer not null
)
""")
except sqlite3.OperationalError, e:
if 'already exists' not in e.message:
raise
try:
self.primary_conn.execute('create index i_state on progress (state)')
except sqlite3.OperationalError, e:
if 'already exists' not in e.message:
raise
def ThreadComplete(self):
"""Finalize any operations the progress thread has performed.
The database aggregates lots of operations into a single commit, and
this method is used to commit any pending operations as the thread
is about to shut down.
"""
if self.progress_conn:
self._MaybeCommit(force_commit=True)
def _MaybeCommit(self, force_commit=False):
"""Periodically commit changes into the SQLite database.
Committing every operation is quite expensive, and slows down the
operation of the script. Thus, we only commit after every N operations,
as determined by the self.commit_periodicity value. Optionally, the
caller can force a commit.
Args:
force_commit: Pass True in order for a commit to occur regardless
of the current operation count.
"""
self.operation_count += 1
if force_commit or (self.operation_count % self.commit_periodicity) == 0:
self.progress_conn.commit()
def _OpenProgressConnection(self):
"""Possibly open a database connection for the progress tracker thread.
If the connection is not open (for the calling thread, which is assumed
to be the progress tracker thread), then open it. We also open a couple
cursors for later use (and reuse).
"""
if self.progress_conn:
return
assert not _RunningInThread(self.primary_thread)
self.progress_thread = threading.currentThread()
self.progress_conn = sqlite3.connect(self.db_filename)
self.insert_cursor = self.progress_conn.cursor()
self.update_cursor = self.progress_conn.cursor()
def HasUnfinishedWork(self):
"""Returns True if the database has progress information.
Note there are two basic cases for progress information:
1) All saved records indicate a successful upload. In this case, we
need to skip everything transmitted so far and then send the rest.
2) Some records for incomplete transfer are present. These need to be
sent again, and then we resume sending after all the successful
data.
Returns:
True if the database has progress information, False otherwise.
Raises:
ResumeError: If there is an error reading the progress database.
"""
assert _RunningInThread(self.primary_thread)
cursor = self.primary_conn.cursor()
cursor.execute('select count(*) from progress')
row = cursor.fetchone()
if row is None:
raise ResumeError('Error reading progress information.')
return row[0] != 0
def StoreKeys(self, key_start, key_end):
"""Record a new progress record, returning a key for later updates.
The specified progress information will be persisted into the database.
A unique key will be returned that identifies this progress state. The
key is later used to (quickly) update this record.
For the progress resumption to proceed properly, calls to StoreKeys
MUST specify monotonically increasing key ranges. This will result in
a database whereby the ID, KEY_START, and KEY_END rows are all
increasing (rather than having ranges out of order).
NOTE: the above precondition is NOT tested by this method (since it
would imply an additional table read or two on each invocation).
Args:
key_start: The starting key of the WorkItem (inclusive)
key_end: The end key of the WorkItem (inclusive)
Returns:
A string to later be used as a unique key to update this state.
"""
self._OpenProgressConnection()
assert _RunningInThread(self.progress_thread)
assert isinstance(key_start, int)
assert isinstance(key_end, int)
assert key_start <= key_end
if self.prior_key_end is not None:
assert key_start > self.prior_key_end
self.prior_key_end = key_end
self.insert_cursor.execute(
'insert into progress (state, key_start, key_end) values (?, ?, ?)',
(STATE_READ, key_start, key_end))
progress_key = self.insert_cursor.lastrowid
self._MaybeCommit()
return progress_key
def UpdateState(self, key, new_state):
"""Update a specified progress record with new information.
Args:
key: The key for this progress record, returned from StoreKeys
new_state: The new state to associate with this progress record.
"""
self._OpenProgressConnection()
assert _RunningInThread(self.progress_thread)
assert isinstance(new_state, int)
self.update_cursor.execute('update progress set state=? where id=?',
(new_state, key))
self._MaybeCommit()
def GetProgressStatusGenerator(self):
"""Get a generator which returns progress information.
The returned generator will yield a series of 4-tuples that specify
progress information about a prior run of the uploader. The 4-tuples
have the following values:
progress_key: The unique key to later update this record with new
progress information.
state: The last state saved for this progress record.
key_start: The starting key of the items for uploading (inclusive).
key_end: The ending key of the items for uploading (inclusive).
After all incompletely-transferred records are provided, then one
more 4-tuple will be generated:
None
DATA_CONSUMED_TO_HERE: A unique string value indicating this record
is being provided.
None
key_end: An integer value specifying the last data source key that
was handled by the previous run of the uploader.
The caller should begin uploading records which occur after key_end.
Yields:
Progress information as tuples (progress_key, state, key_start, key_end).
"""
conn = sqlite3.connect(self.db_filename, isolation_level=None)
cursor = conn.cursor()
cursor.execute('select max(id) from progress')
batch_id = cursor.fetchone()[0]
cursor.execute('select key_end from progress where id = ?', (batch_id,))
key_end = cursor.fetchone()[0]
self.prior_key_end = key_end
cursor.execute(
'select id, state, key_start, key_end from progress'
' where state != ?'
' order by id',
(STATE_SENT,))
rows = cursor.fetchall()
for row in rows:
if row is None:
break
yield row
yield None, DATA_CONSUMED_TO_HERE, None, key_end
class StubProgressDatabase(object):
"""A stub implementation of ProgressDatabase which does nothing."""
def HasUnfinishedWork(self):
"""Whether the stub database has progress information (it doesn't)."""
return False
def StoreKeys(self, unused_key_start, unused_key_end):
"""Pretend to store a key in the stub database."""
return 'fake-key'
def UpdateState(self, unused_key, unused_new_state):
"""Pretend to update the state of a progress item."""
pass
def ThreadComplete(self):
"""Finalize operations on the stub database (i.e. do nothing)."""
pass
class ProgressTrackerThread(_ThreadBase):
"""A thread which records progress information for the upload process.
The progress information is stored into the provided progress database.
This class is not responsible for replaying a prior run's progress
information out of the database. Separate mechanisms must be used to
resume a prior upload attempt.
"""
NAME = 'progress tracking thread'
def __init__(self, progress_queue, progress_db):
"""Initialize the ProgressTrackerThread instance.
Args:
progress_queue: A Queue used for tracking progress information.
progress_db: The database for tracking progress information; should
be an instance of ProgressDatabase.
"""
_ThreadBase.__init__(self)
self.progress_queue = progress_queue
self.db = progress_db
self.entities_sent = 0
def PerformWork(self):
"""Performs the work of a ProgressTrackerThread."""
while not self.exit_flag:
try:
item = self.progress_queue.get(block=True, timeout=1.0)
except Queue.Empty:
continue
if item == _THREAD_SHOULD_EXIT:
break
if item.state == STATE_READ and item.progress_key is None:
item.progress_key = self.db.StoreKeys(item.key_start, item.key_end)
else:
assert item.progress_key is not None
self.db.UpdateState(item.progress_key, item.state)
if item.state == STATE_SENT:
self.entities_sent += item.count
item.progress_event.set()
self.progress_queue.task_done()
self.db.ThreadComplete()
def Validate(value, typ):
"""Checks that value is non-empty and of the right type.
Args:
value: any value
typ: a type or tuple of types
Raises:
ValueError if value is None or empty.
TypeError if it's not the given type.
"""
if not value:
raise ValueError('Value should not be empty; received %s.' % value)
elif not isinstance(value, typ):
raise TypeError('Expected a %s, but received %s (a %s).' %
(typ, value, value.__class__))
class Loader(object):
"""A base class for creating datastore entities from input data.
To add a handler for bulk loading a new entity kind into your datastore,
write a subclass of this class that calls Loader.__init__ from your
class's __init__.
If you need to run extra code to convert entities from the input
data, create new properties, or otherwise modify the entities before
they're inserted, override HandleEntity.
See the CreateEntity method for the creation of entities from the
(parsed) input data.
"""
__loaders = {}
__kind = None
__properties = None
def __init__(self, kind, properties):
"""Constructor.
Populates this Loader's kind and properties map. Also registers it with
the bulk loader, so that all you need to do is instantiate your Loader,
and the bulkload handler will automatically use it.
Args:
kind: a string containing the entity kind that this loader handles
properties: list of (name, converter) tuples.
This is used to automatically convert the CSV columns into
properties. The converter should be a function that takes one
argument, a string value from the CSV file, and returns a
correctly typed property value that should be inserted. The
tuples in this list should match the columns in your CSV file,
in order.
For example:
[('name', str),
('id_number', int),
('email', datastore_types.Email),
('user', users.User),
('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
('description', datastore_types.Text),
]
"""
Validate(kind, basestring)
self.__kind = kind
db.class_for_kind(kind)
Validate(properties, list)
for name, fn in properties:
Validate(name, basestring)
assert callable(fn), (
'Conversion function %s for property %s is not callable.' % (fn, name))
self.__properties = properties
@staticmethod
def RegisterLoader(loader):
Loader.__loaders[loader.__kind] = loader
def kind(self):
""" Return the entity kind that this Loader handes.
"""
return self.__kind
def CreateEntity(self, values, key_name=None):
"""Creates a entity from a list of property values.
Args:
values: list/tuple of str
key_name: if provided, the name for the (single) resulting entity
Returns:
list of db.Model
The returned entities are populated with the property values from the
argument, converted to native types using the properties map given in
the constructor, and passed through HandleEntity. They're ready to be
inserted.
Raises:
AssertionError if the number of values doesn't match the number
of properties in the properties map.
ValueError if any element of values is None or empty.
TypeError if values is not a list or tuple.
"""
Validate(values, (list, tuple))
assert len(values) == len(self.__properties), (
'Expected %d CSV columns, found %d.' %
(len(self.__properties), len(values)))
model_class = db.class_for_kind(self.__kind)
properties = {'key_name': key_name}
for (name, converter), val in zip(self.__properties, values):
if converter is bool and val.lower() in ('0', 'false', 'no'):
val = False
properties[name] = converter(val)
entity = model_class(**properties)
entities = self.HandleEntity(entity)
if entities:
if not isinstance(entities, (list, tuple)):
entities = [entities]
for entity in entities:
if not isinstance(entity, db.Model):
raise TypeError('Expected a db.Model, received %s (a %s).' %
(entity, entity.__class__))
return entities
def GenerateKey(self, i, values):
"""Generates a key_name to be used in creating the underlying object.
The default implementation returns None.
This method can be overridden to control the key generation for
uploaded entities. The value returned should be None (to use a
server generated numeric key), or a string which neither starts
with a digit nor has the form __*__. (See
http://code.google.com/appengine/docs/python/datastore/keysandentitygroups.html)
If you generate your own string keys, keep in mind:
1. The key name for each entity must be unique.
2. If an entity of the same kind and key already exists in the
datastore, it will be overwritten.
Args:
i: Number corresponding to this object (assume it's run in a loop,
this is your current count.
values: list/tuple of str.
Returns:
A string to be used as the key_name for an entity.
"""
return None
def HandleEntity(self, entity):
"""Subclasses can override this to add custom entity conversion code.
This is called for each entity, after its properties are populated from
CSV but before it is stored. Subclasses can override this to add custom
entity handling code.
The entity to be inserted should be returned. If multiple entities should
be inserted, return a list of entities. If no entities should be inserted,
return None or [].
Args:
entity: db.Model
Returns:
db.Model or list of db.Model
"""
return entity
@staticmethod
def RegisteredLoaders():
"""Returns a list of the Loader instances that have been created.
"""
return dict(Loader.__loaders)
class QueueJoinThread(threading.Thread):
"""A thread that joins a queue and exits.
Queue joins do not have a timeout. To simulate a queue join with
timeout, run this thread and join it with a timeout.
"""
def __init__(self, queue):
"""Initialize a QueueJoinThread.
Args:
queue: The queue for this thread to join.
"""
threading.Thread.__init__(self)
assert isinstance(queue, (Queue.Queue, ReQueue))
self.queue = queue
def run(self):
"""Perform the queue join in this thread."""
self.queue.join()
def InterruptibleQueueJoin(queue,
thread_local,
thread_gate,
queue_join_thread_factory=QueueJoinThread):
"""Repeatedly joins the given ReQueue or Queue.Queue with short timeout.
Between each timeout on the join, worker threads are checked.
Args:
queue: A Queue.Queue or ReQueue instance.
thread_local: A threading.local instance which indicates interrupts.
thread_gate: A ThreadGate instance.
queue_join_thread_factory: Used for dependency injection.
Returns:
True unless the queue join is interrupted by SIGINT or worker death.
"""
thread = queue_join_thread_factory(queue)
thread.start()
while True:
thread.join(timeout=.5)
if not thread.isAlive():
return True
if thread_local.shut_down:
logging.debug('Queue join interrupted')
return False
for worker_thread in thread_gate.Threads():
if not worker_thread.isAlive():
return False
def ShutdownThreads(data_source_thread, work_queue, thread_gate):
"""Shuts down the worker and data source threads.
Args:
data_source_thread: A running DataSourceThread instance.
work_queue: The work queue.
thread_gate: A ThreadGate instance with workers registered.
"""
logging.info('An error occurred. Shutting down...')
data_source_thread.exit_flag = True
for thread in thread_gate.Threads():
thread.exit_flag = True
for unused_thread in thread_gate.Threads():
thread_gate.EnableThread()
data_source_thread.join(timeout=3.0)
if data_source_thread.isAlive():
logging.warn('%s hung while trying to exit',
data_source_thread.GetFriendlyName())
while not work_queue.empty():
try:
unused_item = work_queue.get_nowait()
work_queue.task_done()
except Queue.Empty:
pass
def PerformBulkUpload(app_id,
post_url,
kind,
workitem_generator_factory,
num_threads,
throttle,
progress_db,
max_queue_size=DEFAULT_QUEUE_SIZE,
request_manager_factory=RequestManager,
bulkloaderthread_factory=BulkLoaderThread,
progresstrackerthread_factory=ProgressTrackerThread,
datasourcethread_factory=DataSourceThread,
work_queue_factory=ReQueue,
progress_queue_factory=Queue.Queue):
"""Uploads data into an application using a series of HTTP POSTs.
This function will spin up a number of threads to read entities from
the data source, pass those to a number of worker ("uploader") threads
for sending to the application, and track all of the progress in a
small database in case an error or pause/termination requires a
restart/resumption of the upload process.
Args:
app_id: String containing application id.
post_url: URL to post the Entity data to.
kind: Kind of the Entity records being posted.
workitem_generator_factory: A factory that creates a WorkItem generator.
num_threads: How many uploader threads should be created.
throttle: A Throttle instance.
progress_db: The database to use for replaying/recording progress.
max_queue_size: Maximum size of the queues before they should block.
request_manager_factory: Used for dependency injection.
bulkloaderthread_factory: Used for dependency injection.
progresstrackerthread_factory: Used for dependency injection.
datasourcethread_factory: Used for dependency injection.
work_queue_factory: Used for dependency injection.
progress_queue_factory: Used for dependency injection.
Raises:
AuthenticationError: If authentication is required and fails.
"""
thread_gate = ThreadGate(True)
(unused_scheme,
host_port, url_path,
unused_query, unused_fragment) = urlparse.urlsplit(post_url)
work_queue = work_queue_factory(max_queue_size)
progress_queue = progress_queue_factory(max_queue_size)
request_manager = request_manager_factory(app_id,
host_port,
url_path,
kind,
throttle)
throttle.Register(threading.currentThread())
try:
request_manager.Authenticate()
except Exception, e:
logging.exception(e)
raise AuthenticationError('Authentication failed')
if (request_manager.credentials is not None and
not request_manager.authenticated):
raise AuthenticationError('Authentication failed')
for unused_idx in range(num_threads):
thread = bulkloaderthread_factory(work_queue,
throttle,
thread_gate,
request_manager)
throttle.Register(thread)
thread_gate.Register(thread)
progress_thread = progresstrackerthread_factory(progress_queue, progress_db)
if progress_db.HasUnfinishedWork():
logging.debug('Restarting upload using progress database')
progress_generator_factory = progress_db.GetProgressStatusGenerator
else:
progress_generator_factory = None
data_source_thread = datasourcethread_factory(work_queue,
progress_queue,
workitem_generator_factory,
progress_generator_factory)
thread_local = threading.local()
thread_local.shut_down = False
def Interrupt(unused_signum, unused_frame):
"""Shutdown gracefully in response to a signal."""
thread_local.shut_down = True
signal.signal(signal.SIGINT, Interrupt)
progress_thread.start()
data_source_thread.start()
for thread in thread_gate.Threads():
thread.start()
while not thread_local.shut_down:
data_source_thread.join(timeout=0.25)
if data_source_thread.isAlive():
for thread in list(thread_gate.Threads()) + [progress_thread]:
if not thread.isAlive():
logging.info('Unexpected thread death: %s', thread.getName())
thread_local.shut_down = True
break
else:
break
if thread_local.shut_down:
ShutdownThreads(data_source_thread, work_queue, thread_gate)
def _Join(ob, msg):
logging.debug('Waiting for %s...', msg)
if isinstance(ob, threading.Thread):
ob.join(timeout=3.0)
if ob.isAlive():
logging.debug('Joining %s failed', ob.GetFriendlyName())
else:
logging.debug('... done.')
elif isinstance(ob, (Queue.Queue, ReQueue)):
if not InterruptibleQueueJoin(ob, thread_local, thread_gate):
ShutdownThreads(data_source_thread, work_queue, thread_gate)
else:
ob.join()
logging.debug('... done.')
_Join(work_queue, 'work_queue to flush')
for unused_thread in thread_gate.Threads():
work_queue.put(_THREAD_SHOULD_EXIT)
for unused_thread in thread_gate.Threads():
thread_gate.EnableThread()
for thread in thread_gate.Threads():
_Join(thread, 'thread [%s] to terminate' % thread.getName())
thread.CheckError()
if progress_thread.isAlive():
_Join(progress_queue, 'progress_queue to finish')
else:
logging.warn('Progress thread exited prematurely')
progress_queue.put(_THREAD_SHOULD_EXIT)
_Join(progress_thread, 'progress_thread to terminate')
progress_thread.CheckError()
data_source_thread.CheckError()
total_up, duration = throttle.TotalTransferred(BANDWIDTH_UP)
s_total_up, unused_duration = throttle.TotalTransferred(HTTPS_BANDWIDTH_UP)
total_up += s_total_up
logging.info('%d entites read, %d previously transferred',
data_source_thread.read_count,
data_source_thread.sent_count)
logging.info('%d entities (%d bytes) transferred in %.1f seconds',
progress_thread.entities_sent, total_up, duration)
if (data_source_thread.read_all and
progress_thread.entities_sent + data_source_thread.sent_count >=
data_source_thread.read_count):
logging.info('All entities successfully uploaded')
else:
logging.info('Some entities not successfully uploaded')
def PrintUsageExit(code):
"""Prints usage information and exits with a status code.
Args:
code: Status code to pass to sys.exit() after displaying usage information.
"""
print __doc__ % {'arg0': sys.argv[0]}
sys.stdout.flush()
sys.stderr.flush()
sys.exit(code)
def ParseArguments(argv):
"""Parses command-line arguments.
Prints out a help message if -h or --help is supplied.
Args:
argv: List of command-line arguments.
Returns:
Tuple (url, filename, cookie, batch_size, kind) containing the values from
each corresponding command-line flag.
"""
opts, unused_args = getopt.getopt(
argv[1:],
'h',
['debug',
'help',
'url=',
'filename=',
'batch_size=',
'kind=',
'num_threads=',
'bandwidth_limit=',
'rps_limit=',
'http_limit=',
'db_filename=',
'app_id=',
'config_file=',
'auth_domain=',
])
url = None
filename = None
batch_size = DEFAULT_BATCH_SIZE
kind = None
num_threads = DEFAULT_THREAD_COUNT
bandwidth_limit = DEFAULT_BANDWIDTH_LIMIT
rps_limit = DEFAULT_RPS_LIMIT
http_limit = DEFAULT_REQUEST_LIMIT
db_filename = None
app_id = None
config_file = None
auth_domain = 'gmail.com'
for option, value in opts:
if option == '--debug':
logging.getLogger().setLevel(logging.DEBUG)
elif option in ('-h', '--help'):
PrintUsageExit(0)
elif option == '--url':
url = value
elif option == '--filename':
filename = value
elif option == '--batch_size':
batch_size = int(value)
elif option == '--kind':
kind = value
elif option == '--num_threads':
num_threads = int(value)
elif option == '--bandwidth_limit':
bandwidth_limit = int(value)
elif option == '--rps_limit':
rps_limit = int(value)
elif option == '--http_limit':
http_limit = int(value)
elif option == '--db_filename':
db_filename = value
elif option == '--app_id':
app_id = value
elif option == '--config_file':
config_file = value
elif option == '--auth_domain':
auth_domain = value
return ProcessArguments(app_id=app_id,
url=url,
filename=filename,
batch_size=batch_size,
kind=kind,
num_threads=num_threads,
bandwidth_limit=bandwidth_limit,
rps_limit=rps_limit,
http_limit=http_limit,
db_filename=db_filename,
config_file=config_file,
auth_domain=auth_domain,
die_fn=lambda: PrintUsageExit(1))
def ThrottleLayout(bandwidth_limit, http_limit, rps_limit):
return {
BANDWIDTH_UP: bandwidth_limit,
BANDWIDTH_DOWN: bandwidth_limit,
REQUESTS: http_limit,
HTTPS_BANDWIDTH_UP: bandwidth_limit / 5,
HTTPS_BANDWIDTH_DOWN: bandwidth_limit / 5,
HTTPS_REQUESTS: http_limit / 5,
RECORDS: rps_limit,
}
def LoadConfig(config_file):
"""Loads a config file and registers any Loader classes present."""
if config_file:
global_dict = dict(globals())
execfile(config_file, global_dict)
for cls in Loader.__subclasses__():
Loader.RegisterLoader(cls())
def _MissingArgument(arg_name, die_fn):
"""Print error message about missing argument and die."""
print >>sys.stderr, '%s argument required' % arg_name
die_fn()
def ProcessArguments(app_id=None,
url=None,
filename=None,
batch_size=DEFAULT_BATCH_SIZE,
kind=None,
num_threads=DEFAULT_THREAD_COUNT,
bandwidth_limit=DEFAULT_BANDWIDTH_LIMIT,
rps_limit=DEFAULT_RPS_LIMIT,
http_limit=DEFAULT_REQUEST_LIMIT,
db_filename=None,
config_file=None,
auth_domain='gmail.com',
die_fn=lambda: sys.exit(1)):
"""Processes non command-line input arguments."""
if db_filename is None:
db_filename = time.strftime('bulkloader-progress-%Y%m%d.%H%M%S.sql3')
if batch_size <= 0:
print >>sys.stderr, 'batch_size must be 1 or larger'
die_fn()
if url is None:
_MissingArgument('url', die_fn)
if filename is None:
_MissingArgument('filename', die_fn)
if kind is None:
_MissingArgument('kind', die_fn)
if config_file is None:
_MissingArgument('config_file', die_fn)
if app_id is None:
(unused_scheme, host_port, unused_url_path,
unused_query, unused_fragment) = urlparse.urlsplit(url)
suffix_idx = host_port.find('.appspot.com')
if suffix_idx > -1:
app_id = host_port[:suffix_idx]
elif host_port.split(':')[0].endswith('google.com'):
app_id = host_port.split('.')[0]
else:
print >>sys.stderr, 'app_id required for non appspot.com domains'
die_fn()
return (app_id, url, filename, batch_size, kind, num_threads,
bandwidth_limit, rps_limit, http_limit, db_filename, config_file,
auth_domain)
def _PerformBulkload(app_id=None,
url=None,
filename=None,
batch_size=DEFAULT_BATCH_SIZE,
kind=None,
num_threads=DEFAULT_THREAD_COUNT,
bandwidth_limit=DEFAULT_BANDWIDTH_LIMIT,
rps_limit=DEFAULT_RPS_LIMIT,
http_limit=DEFAULT_REQUEST_LIMIT,
db_filename=None,
config_file=None,
auth_domain='gmail.com'):
"""Runs the bulkloader, given the options as keyword arguments.
Args:
app_id: The application id.
url: The url of the remote_api endpoint.
filename: The name of the file containing the CSV data.
batch_size: The number of records to send per request.
kind: The kind of entity to transfer.
num_threads: The number of threads to use to transfer data.
bandwidth_limit: Maximum bytes/second to transfers.
rps_limit: Maximum records/second to transfer.
http_limit: Maximum requests/second for transfers.
db_filename: The name of the SQLite3 progress database file.
config_file: The name of the configuration file.
auth_domain: The auth domain to use for logins and UserProperty.
Returns:
An exit code.
"""
os.environ['AUTH_DOMAIN'] = auth_domain
LoadConfig(config_file)
throttle_layout = ThrottleLayout(bandwidth_limit, http_limit, rps_limit)
throttle = Throttle(layout=throttle_layout)
workitem_generator_factory = GetCSVGeneratorFactory(filename, batch_size)
if db_filename == 'skip':
progress_db = StubProgressDatabase()
else:
progress_db = ProgressDatabase(db_filename)
max_queue_size = max(DEFAULT_QUEUE_SIZE, 2 * num_threads + 5)
PerformBulkUpload(app_id,
url,
kind,
workitem_generator_factory,
num_threads,
throttle,
progress_db,
max_queue_size=max_queue_size)
return 0
def Run(app_id=None,
url=None,
filename=None,
batch_size=DEFAULT_BATCH_SIZE,
kind=None,
num_threads=DEFAULT_THREAD_COUNT,
bandwidth_limit=DEFAULT_BANDWIDTH_LIMIT,
rps_limit=DEFAULT_RPS_LIMIT,
http_limit=DEFAULT_REQUEST_LIMIT,
db_filename=None,
auth_domain='gmail.com',
config_file=None):
"""Sets up and runs the bulkloader, given the options as keyword arguments.
Args:
app_id: The application id.
url: The url of the remote_api endpoint.
filename: The name of the file containing the CSV data.
batch_size: The number of records to send per request.
kind: The kind of entity to transfer.
num_threads: The number of threads to use to transfer data.
bandwidth_limit: Maximum bytes/second to transfers.
rps_limit: Maximum records/second to transfer.
http_limit: Maximum requests/second for transfers.
db_filename: The name of the SQLite3 progress database file.
config_file: The name of the configuration file.
auth_domain: The auth domain to use for logins and UserProperty.
Returns:
An exit code.
"""
logging.basicConfig(
format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
args = ProcessArguments(app_id=app_id,
url=url,
filename=filename,
batch_size=batch_size,
kind=kind,
num_threads=num_threads,
bandwidth_limit=bandwidth_limit,
rps_limit=rps_limit,
http_limit=http_limit,
db_filename=db_filename,
config_file=config_file)
(app_id, url, filename, batch_size, kind, num_threads, bandwidth_limit,
rps_limit, http_limit, db_filename, config_file, auth_domain) = args
return _PerformBulkload(app_id=app_id,
url=url,
filename=filename,
batch_size=batch_size,
kind=kind,
num_threads=num_threads,
bandwidth_limit=bandwidth_limit,
rps_limit=rps_limit,
http_limit=http_limit,
db_filename=db_filename,
config_file=config_file,
auth_domain=auth_domain)
def main(argv):
"""Runs the importer from the command line."""
logging.basicConfig(
level=logging.INFO,
format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
args = ParseArguments(argv)
if None in args:
print >>sys.stderr, 'Invalid arguments'
PrintUsageExit(1)
(app_id, url, filename, batch_size, kind, num_threads,
bandwidth_limit, rps_limit, http_limit, db_filename, config_file,
auth_domain) = args
return _PerformBulkload(app_id=app_id,
url=url,
filename=filename,
batch_size=batch_size,
kind=kind,
num_threads=num_threads,
bandwidth_limit=bandwidth_limit,
rps_limit=rps_limit,
http_limit=http_limit,
db_filename=db_filename,
config_file=config_file,
auth_domain=auth_domain)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Runs a development application server for an application.
%(script)s [options] <application root>
Application root must be the path to the application to run in this server.
Must contain a valid app.yaml or app.yml file.
Options:
--help, -h View this helpful message.
--debug, -d Use debug logging. (Default false)
--clear_datastore, -c Clear the Datastore on startup. (Default false)
--address=ADDRESS, -a ADDRESS
Address to which this server should bind. (Default
%(address)s).
--port=PORT, -p PORT Port for the server to run on. (Default %(port)s)
--datastore_path=PATH Path to use for storing Datastore file stub data.
(Default %(datastore_path)s)
--history_path=PATH Path to use for storing Datastore history.
(Default %(history_path)s)
--require_indexes Disallows queries that require composite indexes
not defined in index.yaml.
--smtp_host=HOSTNAME SMTP host to send test mail to. Leaving this
unset will disable SMTP mail sending.
(Default '%(smtp_host)s')
--smtp_port=PORT SMTP port to send test mail to.
(Default %(smtp_port)s)
--smtp_user=USER SMTP user to connect as. Stub will only attempt
to login if this field is non-empty.
(Default '%(smtp_user)s').
--smtp_password=PASSWORD Password for SMTP server.
(Default '%(smtp_password)s')
--enable_sendmail Enable sendmail when SMTP not configured.
(Default false)
--show_mail_body Log the body of emails in mail stub.
(Default false)
--auth_domain Authorization domain that this app runs in.
(Default gmail.com)
--debug_imports Enables debug logging for module imports, showing
search paths used for finding modules and any
errors encountered during the import process.
--allow_skipped_files Allow access to files matched by app.yaml's
skipped_files (default False)
--disable_static_caching Never allow the browser to cache static files.
(Default enable if expiration set in app.yaml)
"""
from google.appengine.tools import os_compat
import getopt
import logging
import os
import re
import sys
import traceback
import tempfile
def SetGlobals():
"""Set various global variables involving the 'google' package.
This function should not be called until sys.path has been properly set.
"""
global yaml_errors, appcfg, appengine_rpc, dev_appserver, os_compat
from google.appengine.api import yaml_errors
from google.appengine.tools import appcfg
from google.appengine.tools import appengine_rpc
from google.appengine.tools import dev_appserver
from google.appengine.tools import os_compat
DEFAULT_ADMIN_CONSOLE_SERVER = 'appengine.google.com'
ARG_ADDRESS = 'address'
ARG_ADMIN_CONSOLE_SERVER = 'admin_console_server'
ARG_ADMIN_CONSOLE_HOST = 'admin_console_host'
ARG_AUTH_DOMAIN = 'auth_domain'
ARG_CLEAR_DATASTORE = 'clear_datastore'
ARG_DATASTORE_PATH = 'datastore_path'
ARG_DEBUG_IMPORTS = 'debug_imports'
ARG_ENABLE_SENDMAIL = 'enable_sendmail'
ARG_SHOW_MAIL_BODY = 'show_mail_body'
ARG_HISTORY_PATH = 'history_path'
ARG_LOGIN_URL = 'login_url'
ARG_LOG_LEVEL = 'log_level'
ARG_PORT = 'port'
ARG_REQUIRE_INDEXES = 'require_indexes'
ARG_ALLOW_SKIPPED_FILES = 'allow_skipped_files'
ARG_SMTP_HOST = 'smtp_host'
ARG_SMTP_PASSWORD = 'smtp_password'
ARG_SMTP_PORT = 'smtp_port'
ARG_SMTP_USER = 'smtp_user'
ARG_STATIC_CACHING = 'static_caching'
ARG_TEMPLATE_DIR = 'template_dir'
SDK_PATH = os.path.dirname(
os.path.dirname(
os.path.dirname(
os.path.dirname(os_compat.__file__)
)
)
)
DEFAULT_ARGS = {
ARG_PORT: 8080,
ARG_LOG_LEVEL: logging.INFO,
ARG_DATASTORE_PATH: os.path.join(tempfile.gettempdir(),
'dev_appserver.datastore'),
ARG_HISTORY_PATH: os.path.join(tempfile.gettempdir(),
'dev_appserver.datastore.history'),
ARG_LOGIN_URL: '/_ah/login',
ARG_CLEAR_DATASTORE: False,
ARG_REQUIRE_INDEXES: False,
ARG_TEMPLATE_DIR: os.path.join(SDK_PATH, 'templates'),
ARG_SMTP_HOST: '',
ARG_SMTP_PORT: 25,
ARG_SMTP_USER: '',
ARG_SMTP_PASSWORD: '',
ARG_ENABLE_SENDMAIL: False,
ARG_SHOW_MAIL_BODY: False,
ARG_AUTH_DOMAIN: 'gmail.com',
ARG_ADDRESS: 'localhost',
ARG_ADMIN_CONSOLE_SERVER: DEFAULT_ADMIN_CONSOLE_SERVER,
ARG_ADMIN_CONSOLE_HOST: None,
ARG_ALLOW_SKIPPED_FILES: False,
ARG_STATIC_CACHING: True,
}
API_PATHS = {'1':
{'google': (),
'antlr3': ('lib', 'antlr3'),
'django': ('lib', 'django'),
'webob': ('lib', 'webob'),
'yaml': ('lib', 'yaml', 'lib'),
}
}
DEFAULT_API_VERSION = '1'
API_PATHS['test'] = API_PATHS[DEFAULT_API_VERSION].copy()
API_PATHS['test']['_test'] = ('nonexistent', 'test', 'path')
def SetPaths(app_config_path):
"""Set the interpreter to use the specified API version.
The app.yaml file is scanned for the api_version field and the value is
extracted. With that information, the paths in API_PATHS are added to the
front of sys.paths to make sure that they take precedent over any other paths
to older versions of a package. All modules for each package set are cleared
out of sys.modules to make sure only the newest version is used.
Args:
- app_config_path: Path to the app.yaml file.
"""
api_version_re = re.compile(r'api_version:\s*(?P<api_version>[\w.]{1,32})')
api_version = None
app_config_file = open(app_config_path, 'r')
try:
for line in app_config_file:
re_match = api_version_re.match(line)
if re_match:
api_version = re_match.group('api_version')
break
finally:
app_config_file.close()
if api_version is None:
logging.error("Application configuration file missing an 'api_version' "
"value:\n%s" % app_config_path)
sys.exit(1)
if api_version not in API_PATHS:
logging.error("Value of %r for 'api_version' from the application "
"configuration file is not valid:\n%s" %
(api_version, app_config_path))
sys.exit(1)
if api_version == DEFAULT_API_VERSION:
return DEFAULT_API_VERSION
sdk_path = os.path.dirname(
os.path.dirname(
os.path.dirname(
os.path.dirname(os_compat.__file__)
)
)
)
for pkg_name, path_parts in API_PATHS[api_version].iteritems():
for name in sys.modules.keys():
if name == pkg_name or name.startswith('%s.' % pkg_name):
del sys.modules[name]
pkg_path = os.path.join(sdk_path, *path_parts)
sys.path.insert(0, pkg_path)
return api_version
def PrintUsageExit(code):
"""Prints usage information and exits with a status code.
Args:
code: Status code to pass to sys.exit() after displaying usage information.
"""
render_dict = DEFAULT_ARGS.copy()
render_dict['script'] = os.path.basename(sys.argv[0])
print sys.modules['__main__'].__doc__ % render_dict
sys.stdout.flush()
sys.exit(code)
def ParseArguments(argv):
"""Parses command-line arguments.
Args:
argv: Command-line arguments, including the executable name, used to
execute this application.
Returns:
Tuple (args, option_dict) where:
args: List of command-line arguments following the executable name.
option_dict: Dictionary of parsed flags that maps keys from DEFAULT_ARGS
to their values, which are either pulled from the defaults, or from
command-line flags.
"""
option_dict = DEFAULT_ARGS.copy()
try:
opts, args = getopt.gnu_getopt(
argv[1:],
'a:cdhp:',
[ 'address=',
'admin_console_server=',
'admin_console_host=',
'allow_skipped_files',
'auth_domain=',
'clear_datastore',
'datastore_path=',
'debug',
'debug_imports',
'enable_sendmail',
'disable_static_caching',
'show_mail_body',
'help',
'history_path=',
'port=',
'require_indexes',
'smtp_host=',
'smtp_password=',
'smtp_port=',
'smtp_user=',
'template_dir=',
])
except getopt.GetoptError, e:
print >>sys.stderr, 'Error: %s' % e
PrintUsageExit(1)
for option, value in opts:
if option in ('-h', '--help'):
PrintUsageExit(0)
if option in ('-d', '--debug'):
option_dict[ARG_LOG_LEVEL] = logging.DEBUG
if option in ('-p', '--port'):
try:
option_dict[ARG_PORT] = int(value)
if not (65535 > option_dict[ARG_PORT] > 0):
raise ValueError
except ValueError:
print >>sys.stderr, 'Invalid value supplied for port'
PrintUsageExit(1)
if option in ('-a', '--address'):
option_dict[ARG_ADDRESS] = value
if option == '--datastore_path':
option_dict[ARG_DATASTORE_PATH] = os.path.abspath(value)
if option == '--history_path':
option_dict[ARG_HISTORY_PATH] = os.path.abspath(value)
if option in ('-c', '--clear_datastore'):
option_dict[ARG_CLEAR_DATASTORE] = True
if option == '--require_indexes':
option_dict[ARG_REQUIRE_INDEXES] = True
if option == '--smtp_host':
option_dict[ARG_SMTP_HOST] = value
if option == '--smtp_port':
try:
option_dict[ARG_SMTP_PORT] = int(value)
if not (65535 > option_dict[ARG_SMTP_PORT] > 0):
raise ValueError
except ValueError:
print >>sys.stderr, 'Invalid value supplied for SMTP port'
PrintUsageExit(1)
if option == '--smtp_user':
option_dict[ARG_SMTP_USER] = value
if option == '--smtp_password':
option_dict[ARG_SMTP_PASSWORD] = value
if option == '--enable_sendmail':
option_dict[ARG_ENABLE_SENDMAIL] = True
if option == '--show_mail_body':
option_dict[ARG_SHOW_MAIL_BODY] = True
if option == '--auth_domain':
option_dict['_DEFAULT_ENV_AUTH_DOMAIN'] = value
if option == '--debug_imports':
option_dict['_ENABLE_LOGGING'] = True
if option == '--template_dir':
option_dict[ARG_TEMPLATE_DIR] = value
if option == '--admin_console_server':
option_dict[ARG_ADMIN_CONSOLE_SERVER] = value.strip()
if option == '--admin_console_host':
option_dict[ARG_ADMIN_CONSOLE_HOST] = value
if option == '--allow_skipped_files':
option_dict[ARG_ALLOW_SKIPPED_FILES] = True
if option == '--disable_static_caching':
option_dict[ARG_STATIC_CACHING] = False
return args, option_dict
def MakeRpcServer(option_dict):
"""Create a new HttpRpcServer.
Creates a new HttpRpcServer to check for updates to the SDK.
Args:
option_dict: The dict of command line options.
Returns:
A HttpRpcServer.
"""
server = appengine_rpc.HttpRpcServer(
option_dict[ARG_ADMIN_CONSOLE_SERVER],
lambda: ('unused_email', 'unused_password'),
appcfg.GetUserAgent(),
appcfg.GetSourceName(),
host_override=option_dict[ARG_ADMIN_CONSOLE_HOST])
server.authenticated = True
return server
def main(argv):
"""Runs the development application server."""
args, option_dict = ParseArguments(argv)
if len(args) != 1:
print >>sys.stderr, 'Invalid arguments'
PrintUsageExit(1)
root_path = args[0]
for suffix in ('yaml', 'yml'):
path = os.path.join(root_path, 'app.%s' % suffix)
if os.path.exists(path):
api_version = SetPaths(path)
break
else:
logging.error("Application configuration file not found in %s" % root_path)
return 1
SetGlobals()
dev_appserver.API_VERSION = api_version
if '_DEFAULT_ENV_AUTH_DOMAIN' in option_dict:
auth_domain = option_dict['_DEFAULT_ENV_AUTH_DOMAIN']
dev_appserver.DEFAULT_ENV['AUTH_DOMAIN'] = auth_domain
if '_ENABLE_LOGGING' in option_dict:
enable_logging = option_dict['_ENABLE_LOGGING']
dev_appserver.HardenedModulesHook.ENABLE_LOGGING = enable_logging
log_level = option_dict[ARG_LOG_LEVEL]
port = option_dict[ARG_PORT]
datastore_path = option_dict[ARG_DATASTORE_PATH]
login_url = option_dict[ARG_LOGIN_URL]
template_dir = option_dict[ARG_TEMPLATE_DIR]
serve_address = option_dict[ARG_ADDRESS]
require_indexes = option_dict[ARG_REQUIRE_INDEXES]
allow_skipped_files = option_dict[ARG_ALLOW_SKIPPED_FILES]
static_caching = option_dict[ARG_STATIC_CACHING]
logging.basicConfig(
level=log_level,
format='%(levelname)-8s %(asctime)s %(filename)s] %(message)s')
config = None
try:
config, matcher = dev_appserver.LoadAppConfig(root_path, {})
except yaml_errors.EventListenerError, e:
logging.error('Fatal error when loading application configuration:\n' +
str(e))
return 1
except dev_appserver.InvalidAppConfigError, e:
logging.error('Application configuration file invalid:\n%s', e)
return 1
if option_dict[ARG_ADMIN_CONSOLE_SERVER] != '':
server = MakeRpcServer(option_dict)
update_check = appcfg.UpdateCheck(server, config)
update_check.CheckSupportedVersion()
if update_check.AllowedToCheckForUpdates():
update_check.CheckForUpdates()
try:
dev_appserver.SetupStubs(config.application, **option_dict)
except:
exc_type, exc_value, exc_traceback = sys.exc_info()
logging.error(str(exc_type) + ': ' + str(exc_value))
logging.debug(''.join(traceback.format_exception(
exc_type, exc_value, exc_traceback)))
return 1
http_server = dev_appserver.CreateServer(
root_path,
login_url,
port,
template_dir,
sdk_dir=SDK_PATH,
serve_address=serve_address,
require_indexes=require_indexes,
allow_skipped_files=allow_skipped_files,
static_caching=static_caching)
logging.info('Running application %s on port %d: http://%s:%d',
config.application, port, serve_address, port)
try:
try:
http_server.serve_forever()
except KeyboardInterrupt:
logging.info('Server interrupted by user, terminating')
except:
exc_info = sys.exc_info()
info_string = '\n'.join(traceback.format_exception(*exc_info))
logging.error('Error encountered:\n%s\nNow terminating.', info_string)
return 1
finally:
http_server.server_close()
return 0
if __name__ == '__main__':
sys.exit(main(sys.argv))
else:
SetGlobals()
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Tool for deploying apps to an app server.
Currently, the application only uploads new appversions. To do this, it first
walks the directory tree rooted at the path the user specifies, adding all the
files it finds to a list. It then uploads the application configuration
(app.yaml) to the server using HTTP, followed by uploading each of the files.
It then commits the transaction with another request.
The bulk of this work is handled by the AppVersionUpload class, which exposes
methods to add to the list of files, fetch a list of modified files, upload
files, and commit or rollback the transaction.
"""
import calendar
import datetime
import getpass
import logging
import mimetypes
import optparse
import os
import re
import sha
import sys
import tempfile
import time
import urllib2
import google
import yaml
from google.appengine.cron import groctimespecification
from google.appengine.api import appinfo
from google.appengine.api import croninfo
from google.appengine.api import validation
from google.appengine.api import yaml_errors
from google.appengine.api import yaml_object
from google.appengine.datastore import datastore_index
from google.appengine.tools import appengine_rpc
MAX_FILES_TO_CLONE = 100
LIST_DELIMITER = "\n"
TUPLE_DELIMITER = "|"
VERSION_FILE = "../VERSION"
UPDATE_CHECK_TIMEOUT = 3
NAG_FILE = ".appcfg_nag"
MAX_LOG_LEVEL = 4
verbosity = 1
appinfo.AppInfoExternal.ATTRIBUTES[appinfo.RUNTIME] = "python"
_api_versions = os.environ.get('GOOGLE_TEST_API_VERSIONS', '1')
_options = validation.Options(*_api_versions.split(','))
appinfo.AppInfoExternal.ATTRIBUTES[appinfo.API_VERSION] = _options
del _api_versions, _options
def StatusUpdate(msg):
"""Print a status message to stderr.
If 'verbosity' is greater than 0, print the message.
Args:
msg: The string to print.
"""
if verbosity > 0:
print >>sys.stderr, msg
def GetMimeTypeIfStaticFile(config, filename):
"""Looks up the mime type for 'filename'.
Uses the handlers in 'config' to determine if the file should
be treated as a static file.
Args:
config: The app.yaml object to check the filename against.
filename: The name of the file.
Returns:
The mime type string. For example, 'text/plain' or 'image/gif'.
None if this is not a static file.
"""
for handler in config.handlers:
handler_type = handler.GetHandlerType()
if handler_type in ("static_dir", "static_files"):
if handler_type == "static_dir":
regex = os.path.join(re.escape(handler.GetHandler()), ".*")
else:
regex = handler.upload
if re.match(regex, filename):
if handler.mime_type is not None:
return handler.mime_type
else:
guess = mimetypes.guess_type(filename)[0]
if guess is None:
default = "application/octet-stream"
print >>sys.stderr, ("Could not guess mimetype for %s. Using %s."
% (filename, default))
return default
return guess
return None
def BuildClonePostBody(file_tuples):
"""Build the post body for the /api/clone{files,blobs} urls.
Args:
file_tuples: A list of tuples. Each tuple should contain the entries
appropriate for the endpoint in question.
Returns:
A string containing the properly delimited tuples.
"""
file_list = []
for tup in file_tuples:
path = tup[0]
tup = tup[1:]
file_list.append(TUPLE_DELIMITER.join([path] + list(tup)))
return LIST_DELIMITER.join(file_list)
class NagFile(validation.Validated):
"""A validated YAML class to represent the user's nag preferences.
Attributes:
timestamp: The timestamp of the last nag.
opt_in: True if the user wants to check for updates on dev_appserver
start. False if not. May be None if we have not asked the user yet.
"""
ATTRIBUTES = {
"timestamp": validation.TYPE_FLOAT,
"opt_in": validation.Optional(validation.TYPE_BOOL),
}
@staticmethod
def Load(nag_file):
"""Load a single NagFile object where one and only one is expected.
Args:
nag_file: A file-like object or string containing the yaml data to parse.
Returns:
A NagFile instance.
"""
return yaml_object.BuildSingleObject(NagFile, nag_file)
def GetVersionObject(isfile=os.path.isfile, open_fn=open):
"""Gets the version of the SDK by parsing the VERSION file.
Args:
isfile: used for testing.
open_fn: Used for testing.
Returns:
A Yaml object or None if the VERSION file does not exist.
"""
version_filename = os.path.join(os.path.dirname(google.__file__),
VERSION_FILE)
if not isfile(version_filename):
logging.error("Could not find version file at %s", version_filename)
return None
version_fh = open_fn(version_filename, "r")
try:
version = yaml.safe_load(version_fh)
finally:
version_fh.close()
return version
def RetryWithBackoff(initial_delay, backoff_factor, max_tries, callable):
"""Calls a function multiple times, backing off more and more each time.
Args:
initial_delay: Initial delay after first try, in seconds.
backoff_factor: Delay will be multiplied by this factor after each try.
max_tries: Maximum number of tries.
callable: The method to call, will pass no arguments.
Returns:
True if the function succeded in one of its tries.
Raises:
Whatever the function raises--an exception will immediately stop retries.
"""
delay = initial_delay
while not callable() and max_tries > 0:
StatusUpdate("Will check again in %s seconds." % delay)
time.sleep(delay)
delay *= backoff_factor
max_tries -= 1
return max_tries > 0
class UpdateCheck(object):
"""Determines if the local SDK is the latest version.
Nags the user when there are updates to the SDK. As the SDK becomes
more out of date, the language in the nagging gets stronger. We
store a little yaml file in the user's home directory so that we nag
the user only once a week.
The yaml file has the following field:
'timestamp': Last time we nagged the user in seconds since the epoch.
Attributes:
server: An AbstractRpcServer instance used to check for the latest SDK.
config: The app's AppInfoExternal. Needed to determine which api_version
the app is using.
"""
def __init__(self,
server,
config,
isdir=os.path.isdir,
isfile=os.path.isfile,
open_fn=open):
"""Create a new UpdateCheck.
Args:
server: The AbstractRpcServer to use.
config: The yaml object that specifies the configuration of this
application.
isdir: Replacement for os.path.isdir (for testing).
isfile: Replacement for os.path.isfile (for testing).
open_fn: Replacement for the open builtin (for testing).
"""
self.server = server
self.config = config
self.isdir = isdir
self.isfile = isfile
self.open = open_fn
@staticmethod
def MakeNagFilename():
"""Returns the filename for the nag file for this user."""
user_homedir = os.path.expanduser("~/")
if not os.path.isdir(user_homedir):
drive, unused_tail = os.path.splitdrive(os.__file__)
if drive:
os.environ["HOMEDRIVE"] = drive
return os.path.expanduser("~/" + NAG_FILE)
def _ParseVersionFile(self):
"""Parse the local VERSION file.
Returns:
A Yaml object or None if the file does not exist.
"""
return GetVersionObject(isfile=self.isfile, open_fn=self.open)
def CheckSupportedVersion(self):
"""Determines if the app's api_version is supported by the SDK.
Uses the api_version field from the AppInfoExternal to determine if
the SDK supports that api_version.
Raises:
SystemExit if the api_version is not supported.
"""
version = self._ParseVersionFile()
if version is None:
logging.error("Could not determine if the SDK supports the api_version "
"requested in app.yaml.")
return
if self.config.api_version not in version["api_versions"]:
logging.critical("The api_version specified in app.yaml (%s) is not "
"supported by this release of the SDK. The supported "
"api_versions are %s.",
self.config.api_version, version["api_versions"])
sys.exit(1)
def CheckForUpdates(self):
"""Queries the server for updates and nags the user if appropriate.
Queries the server for the latest SDK version at the same time reporting
the local SDK version. The server will respond with a yaml document
containing the fields:
"release": The name of the release (e.g. 1.2).
"timestamp": The time the release was created (YYYY-MM-DD HH:MM AM/PM TZ).
"api_versions": A list of api_version strings (e.g. ['1', 'beta']).
We will nag the user with increasing severity if:
- There is a new release.
- There is a new release with a new api_version.
- There is a new release that does not support the api_version named in
self.config.
"""
version = self._ParseVersionFile()
if version is None:
logging.info("Skipping update check")
return
logging.info("Checking for updates to the SDK.")
try:
response = self.server.Send("/api/updatecheck",
timeout=UPDATE_CHECK_TIMEOUT,
release=version["release"],
timestamp=version["timestamp"],
api_versions=version["api_versions"])
except urllib2.URLError, e:
logging.info("Update check failed: %s", e)
return
latest = yaml.safe_load(response)
if latest["release"] == version["release"]:
logging.info("The SDK is up to date.")
return
api_versions = latest["api_versions"]
if self.config.api_version not in api_versions:
self._Nag(
"The api version you are using (%s) is obsolete! You should\n"
"upgrade your SDK and test that your code works with the new\n"
"api version." % self.config.api_version,
latest, version, force=True)
return
if self.config.api_version != api_versions[len(api_versions) - 1]:
self._Nag(
"The api version you are using (%s) is deprecated. You should\n"
"upgrade your SDK to try the new functionality." %
self.config.api_version, latest, version)
return
self._Nag("There is a new release of the SDK available.",
latest, version)
def _ParseNagFile(self):
"""Parses the nag file.
Returns:
A NagFile if the file was present else None.
"""
nag_filename = UpdateCheck.MakeNagFilename()
if self.isfile(nag_filename):
fh = self.open(nag_filename, "r")
try:
nag = NagFile.Load(fh)
finally:
fh.close()
return nag
return None
def _WriteNagFile(self, nag):
"""Writes the NagFile to the user's nag file.
If the destination path does not exist, this method will log an error
and fail silently.
Args:
nag: The NagFile to write.
"""
nagfilename = UpdateCheck.MakeNagFilename()
try:
fh = self.open(nagfilename, "w")
try:
fh.write(nag.ToYAML())
finally:
fh.close()
except (OSError, IOError), e:
logging.error("Could not write nag file to %s. Error: %s", nagfilename, e)
def _Nag(self, msg, latest, version, force=False):
"""Prints a nag message and updates the nag file's timestamp.
Because we don't want to nag the user everytime, we store a simple
yaml document in the user's home directory. If the timestamp in this
doc is over a week old, we'll nag the user. And when we nag the user,
we update the timestamp in this doc.
Args:
msg: The formatted message to print to the user.
latest: The yaml document received from the server.
version: The local yaml version document.
force: If True, always nag the user, ignoring the nag file.
"""
nag = self._ParseNagFile()
if nag and not force:
last_nag = datetime.datetime.fromtimestamp(nag.timestamp)
if datetime.datetime.now() - last_nag < datetime.timedelta(weeks=1):
logging.debug("Skipping nag message")
return
if nag is None:
nag = NagFile()
nag.timestamp = time.time()
self._WriteNagFile(nag)
print "****************************************************************"
print msg
print "-----------"
print "Latest SDK:"
print yaml.dump(latest)
print "-----------"
print "Your SDK:"
print yaml.dump(version)
print "-----------"
print "Please visit http://code.google.com/appengine for the latest SDK"
print "****************************************************************"
def AllowedToCheckForUpdates(self, input_fn=raw_input):
"""Determines if the user wants to check for updates.
On startup, the dev_appserver wants to check for updates to the SDK.
Because this action reports usage to Google when the user is not
otherwise communicating with Google (e.g. pushing a new app version),
the user must opt in.
If the user does not have a nag file, we will query the user and
save the response in the nag file. Subsequent calls to this function
will re-use that response.
Args:
input_fn: used to collect user input. This is for testing only.
Returns:
True if the user wants to check for updates. False otherwise.
"""
nag = self._ParseNagFile()
if nag is None:
nag = NagFile()
nag.timestamp = time.time()
if nag.opt_in is None:
answer = input_fn("Allow dev_appserver to check for updates on startup? "
"(Y/n): ")
answer = answer.strip().lower()
if answer == "n" or answer == "no":
print ("dev_appserver will not check for updates on startup. To "
"change this setting, edit %s" % UpdateCheck.MakeNagFilename())
nag.opt_in = False
else:
print ("dev_appserver will check for updates on startup. To change "
"this setting, edit %s" % UpdateCheck.MakeNagFilename())
nag.opt_in = True
self._WriteNagFile(nag)
return nag.opt_in
class IndexDefinitionUpload(object):
"""Provides facilities to upload index definitions to the hosting service."""
def __init__(self, server, config, definitions):
"""Creates a new DatastoreIndexUpload.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
config: The AppInfoExternal object derived from the app.yaml file.
definitions: An IndexDefinitions object.
"""
self.server = server
self.config = config
self.definitions = definitions
def DoUpload(self):
"""Uploads the index definitions."""
StatusUpdate("Uploading index definitions.")
self.server.Send("/api/datastore/index/add",
app_id=self.config.application,
version=self.config.version,
payload=self.definitions.ToYAML())
class CronEntryUpload(object):
"""Provides facilities to upload cron entries to the hosting service."""
def __init__(self, server, config, cron):
"""Creates a new CronEntryUpload.
Args:
server: The RPC server to use. Should be an instance of a subclass of
AbstractRpcServer
config: The AppInfoExternal object derived from the app.yaml file.
cron: The CronInfoExternal object loaded from the cron.yaml file.
"""
self.server = server
self.config = config
self.cron = cron
def DoUpload(self):
"""Uploads the cron entries."""
StatusUpdate("Uploading cron entries.")
self.server.Send("/api/datastore/cron/update",
app_id=self.config.application,
version=self.config.version,
payload=self.cron.ToYAML())
class IndexOperation(object):
"""Provide facilities for writing Index operation commands."""
def __init__(self, server, config):
"""Creates a new IndexOperation.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
config: appinfo.AppInfoExternal configuration object.
"""
self.server = server
self.config = config
def DoDiff(self, definitions):
"""Retrieve diff file from the server.
Args:
definitions: datastore_index.IndexDefinitions as loaded from users
index.yaml file.
Returns:
A pair of datastore_index.IndexDefinitions objects. The first record
is the set of indexes that are present in the index.yaml file but missing
from the server. The second record is the set of indexes that are
present on the server but missing from the index.yaml file (indicating
that these indexes should probably be vacuumed).
"""
StatusUpdate("Fetching index definitions diff.")
response = self.server.Send("/api/datastore/index/diff",
app_id=self.config.application,
payload=definitions.ToYAML())
return datastore_index.ParseMultipleIndexDefinitions(response)
def DoDelete(self, definitions):
"""Delete indexes from the server.
Args:
definitions: Index definitions to delete from datastore.
Returns:
A single datstore_index.IndexDefinitions containing indexes that were
not deleted, probably because they were already removed. This may
be normal behavior as there is a potential race condition between fetching
the index-diff and sending deletion confirmation through.
"""
StatusUpdate("Deleting selected index definitions.")
response = self.server.Send("/api/datastore/index/delete",
app_id=self.config.application,
payload=definitions.ToYAML())
return datastore_index.ParseIndexDefinitions(response)
class VacuumIndexesOperation(IndexOperation):
"""Provide facilities to request the deletion of datastore indexes."""
def __init__(self, server, config, force,
confirmation_fn=raw_input):
"""Creates a new VacuumIndexesOperation.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
config: appinfo.AppInfoExternal configuration object.
force: True to force deletion of indexes, else False.
confirmation_fn: Function used for getting input form user.
"""
super(VacuumIndexesOperation, self).__init__(server, config)
self.force = force
self.confirmation_fn = confirmation_fn
def GetConfirmation(self, index):
"""Get confirmation from user to delete an index.
This method will enter an input loop until the user provides a
response it is expecting. Valid input is one of three responses:
y: Confirm deletion of index.
n: Do not delete index.
a: Delete all indexes without asking for further confirmation.
If the user enters nothing at all, the default action is to skip
that index and do not delete.
If the user selects 'a', as a side effect, the 'force' flag is set.
Args:
index: Index to confirm.
Returns:
True if user enters 'y' or 'a'. False if user enter 'n'.
"""
while True:
print "This index is no longer defined in your index.yaml file."
print
print index.ToYAML()
print
confirmation = self.confirmation_fn(
"Are you sure you want to delete this index? (N/y/a): ")
confirmation = confirmation.strip().lower()
if confirmation == "y":
return True
elif confirmation == "n" or not confirmation:
return False
elif confirmation == "a":
self.force = True
return True
else:
print "Did not understand your response."
def DoVacuum(self, definitions):
"""Vacuum indexes in datastore.
This method will query the server to determine which indexes are not
being used according to the user's local index.yaml file. Once it has
made this determination, it confirms with the user which unused indexes
should be deleted. Once confirmation for each index is receives, it
deletes those indexes.
Because another user may in theory delete the same indexes at the same
time as the user, there is a potential race condition. In this rare cases,
some of the indexes previously confirmed for deletion will not be found.
The user is notified which indexes these were.
Args:
definitions: datastore_index.IndexDefinitions as loaded from users
index.yaml file.
"""
unused_new_indexes, notused_indexes = self.DoDiff(definitions)
deletions = datastore_index.IndexDefinitions(indexes=[])
if notused_indexes.indexes is not None:
for index in notused_indexes.indexes:
if self.force or self.GetConfirmation(index):
deletions.indexes.append(index)
if deletions.indexes:
not_deleted = self.DoDelete(deletions)
if not_deleted.indexes:
not_deleted_count = len(not_deleted.indexes)
if not_deleted_count == 1:
warning_message = ("An index was not deleted. Most likely this is "
"because it no longer exists.\n\n")
else:
warning_message = ("%d indexes were not deleted. Most likely this "
"is because they no longer exist.\n\n"
% not_deleted_count)
for index in not_deleted.indexes:
warning_message += index.ToYAML()
logging.warning(warning_message)
class LogsRequester(object):
"""Provide facilities to export request logs."""
def __init__(self, server, config, output_file,
num_days, append, severity, now):
"""Constructor.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer
or TestRpcServer.
config: appinfo.AppInfoExternal configuration object.
output_file: Output file name.
num_days: Number of days worth of logs to export; 0 for all available.
append: True if appending to an existing file.
severity: App log severity to request (0-4); None for no app logs.
now: POSIX timestamp used for calculating valid dates for num_days.
"""
self.server = server
self.config = config
self.output_file = output_file
self.append = append
self.num_days = num_days
self.severity = severity
self.version_id = self.config.version + ".1"
self.sentinel = None
self.write_mode = "w"
if self.append:
self.sentinel = FindSentinel(self.output_file)
self.write_mode = "a"
self.valid_dates = None
if self.num_days:
patterns = []
now = PacificTime(now)
for i in xrange(self.num_days):
then = time.gmtime(now - 24*3600 * i)
patterns.append(re.escape(time.strftime("%d/%m/%Y", then)))
patterns.append(re.escape(time.strftime("%d/%b/%Y", then)))
self.valid_dates = re.compile(r"[^[]+\[(" + "|".join(patterns) + r"):")
def DownloadLogs(self):
"""Download the requested logs.
This will write the logs to the file designated by
self.output_file, or to stdout if the filename is '-'.
Multiple roundtrips to the server may be made.
"""
StatusUpdate("Downloading request logs for %s %s." %
(self.config.application, self.version_id))
tf = tempfile.TemporaryFile()
offset = None
try:
while True:
try:
offset = self.RequestLogLines(tf, offset)
if not offset:
break
except KeyboardInterrupt:
StatusUpdate("Keyboard interrupt; saving data downloaded so far.")
break
StatusUpdate("Copying request logs to %r." % self.output_file)
if self.output_file == "-":
of = sys.stdout
else:
try:
of = open(self.output_file, self.write_mode)
except IOError, err:
StatusUpdate("Can't write %r: %s." % (self.output_file, err))
sys.exit(1)
try:
line_count = CopyReversedLines(tf, of)
finally:
of.flush()
if of is not sys.stdout:
of.close()
finally:
tf.close()
StatusUpdate("Copied %d records." % line_count)
def RequestLogLines(self, tf, offset):
"""Make a single roundtrip to the server.
Args:
tf: Writable binary stream to which the log lines returned by
the server are written, stripped of headers, and excluding
lines skipped due to self.sentinel or self.valid_dates filtering.
offset: Offset string for a continued request; None for the first.
Returns:
The offset string to be used for the next request, if another
request should be issued; or None, if not.
"""
logging.info("Request with offset %r.", offset)
kwds = {"app_id": self.config.application,
"version": self.version_id,
"limit": 100,
}
if offset:
kwds["offset"] = offset
if self.severity is not None:
kwds["severity"] = str(self.severity)
response = self.server.Send("/api/request_logs", payload=None, **kwds)
response = response.replace("\r", "\0")
lines = response.splitlines()
logging.info("Received %d bytes, %d records.", len(response), len(lines))
offset = None
if lines and lines[0].startswith("#"):
match = re.match(r"^#\s*next_offset=(\S+)\s*$", lines[0])
del lines[0]
if match:
offset = match.group(1)
if lines and lines[-1].startswith("#"):
del lines[-1]
valid_dates = self.valid_dates
sentinel = self.sentinel
len_sentinel = None
if sentinel:
len_sentinel = len(sentinel)
for line in lines:
if ((sentinel and
line.startswith(sentinel) and
line[len_sentinel : len_sentinel+1] in ("", "\0")) or
(valid_dates and not valid_dates.match(line))):
return None
tf.write(line + "\n")
if not lines:
return None
return offset
def PacificTime(now):
"""Helper to return the number of seconds between UTC and Pacific time.
This is needed to compute today's date in Pacific time (more
specifically: Mountain View local time), which is how request logs
are reported. (Google servers always report times in Mountain View
local time, regardless of where they are physically located.)
This takes (post-2006) US DST into account. Pacific time is either
8 hours or 7 hours west of UTC, depending on whether DST is in
effect. Since 2007, US DST starts on the Second Sunday in March
March, and ends on the first Sunday in November. (Reference:
http://aa.usno.navy.mil/faq/docs/daylight_time.php.)
Note that the server doesn't report its local time (the HTTP Date
header uses UTC), and the client's local time is irrelevant.
Args:
now: A posix timestamp giving current UTC time.
Returns:
A pseudo-posix timestamp giving current Pacific time. Passing
this through time.gmtime() will produce a tuple in Pacific local
time.
"""
now -= 8*3600
if IsPacificDST(now):
now += 3600
return now
def IsPacificDST(now):
"""Helper for PacificTime to decide whether now is Pacific DST (PDT).
Args:
now: A pseudo-posix timestamp giving current time in PST.
Returns:
True if now falls within the range of DST, False otherwise.
"""
DAY = 24*3600
SUNDAY = 6
pst = time.gmtime(now)
year = pst[0]
assert year >= 2007
begin = calendar.timegm((year, 3, 8, 2, 0, 0, 0, 0, 0))
while time.gmtime(begin).tm_wday != SUNDAY:
begin += DAY
end = calendar.timegm((year, 11, 1, 2, 0, 0, 0, 0, 0))
while time.gmtime(end).tm_wday != SUNDAY:
end += DAY
return begin <= now < end
def CopyReversedLines(instream, outstream, blocksize=2**16):
r"""Copy lines from input stream to output stream in reverse order.
As a special feature, null bytes in the input are turned into
newlines followed by tabs in the output, but these "sub-lines"
separated by null bytes are not reversed. E.g. If the input is
"A\0B\nC\0D\n", the output is "C\n\tD\nA\n\tB\n".
Args:
instream: A seekable stream open for reading in binary mode.
outstream: A stream open for writing; doesn't have to be seekable or binary.
blocksize: Optional block size for buffering, for unit testing.
Returns:
The number of lines copied.
"""
line_count = 0
instream.seek(0, 2)
last_block = instream.tell() // blocksize
spillover = ""
for iblock in xrange(last_block + 1, -1, -1):
instream.seek(iblock * blocksize)
data = instream.read(blocksize)
lines = data.splitlines(True)
lines[-1:] = "".join(lines[-1:] + [spillover]).splitlines(True)
if lines and not lines[-1].endswith("\n"):
lines[-1] += "\n"
lines.reverse()
if lines and iblock > 0:
spillover = lines.pop()
if lines:
line_count += len(lines)
data = "".join(lines).replace("\0", "\n\t")
outstream.write(data)
return line_count
def FindSentinel(filename, blocksize=2**16):
"""Return the sentinel line from the output file.
Args:
filename: The filename of the output file. (We'll read this file.)
blocksize: Optional block size for buffering, for unit testing.
Returns:
The contents of the last line in the file that doesn't start with
a tab, with its trailing newline stripped; or None if the file
couldn't be opened or no such line could be found by inspecting
the last 'blocksize' bytes of the file.
"""
if filename == "-":
StatusUpdate("Can't combine --append with output to stdout.")
sys.exit(2)
try:
fp = open(filename, "rb")
except IOError, err:
StatusUpdate("Append mode disabled: can't read %r: %s." % (filename, err))
return None
try:
fp.seek(0, 2)
fp.seek(max(0, fp.tell() - blocksize))
lines = fp.readlines()
del lines[:1]
sentinel = None
for line in lines:
if not line.startswith("\t"):
sentinel = line
if not sentinel:
StatusUpdate("Append mode disabled: can't find sentinel in %r." %
filename)
return None
return sentinel.rstrip("\n")
finally:
fp.close()
class AppVersionUpload(object):
"""Provides facilities to upload a new appversion to the hosting service.
Attributes:
server: The AbstractRpcServer to use for the upload.
config: The AppInfoExternal object derived from the app.yaml file.
app_id: The application string from 'config'.
version: The version string from 'config'.
files: A dictionary of files to upload to the server, mapping path to
hash of the file contents.
in_transaction: True iff a transaction with the server has started.
An AppVersionUpload can do only one transaction at a time.
deployed: True iff the Deploy method has been called.
"""
def __init__(self, server, config):
"""Creates a new AppVersionUpload.
Args:
server: The RPC server to use. Should be an instance of HttpRpcServer or
TestRpcServer.
config: An AppInfoExternal object that specifies the configuration for
this application.
"""
self.server = server
self.config = config
self.app_id = self.config.application
self.version = self.config.version
self.files = {}
self.in_transaction = False
self.deployed = False
def _Hash(self, content):
"""Compute the hash of the content.
Args:
content: The data to hash as a string.
Returns:
The string representation of the hash.
"""
h = sha.new(content).hexdigest()
return "%s_%s_%s_%s_%s" % (h[0:8], h[8:16], h[16:24], h[24:32], h[32:40])
def AddFile(self, path, file_handle):
"""Adds the provided file to the list to be pushed to the server.
Args:
path: The path the file should be uploaded as.
file_handle: A stream containing data to upload.
"""
assert not self.in_transaction, "Already in a transaction."
assert file_handle is not None
reason = appinfo.ValidFilename(path)
if reason:
logging.error(reason)
return
pos = file_handle.tell()
content_hash = self._Hash(file_handle.read())
file_handle.seek(pos, 0)
self.files[path] = content_hash
def Begin(self):
"""Begins the transaction, returning a list of files that need uploading.
All calls to AddFile must be made before calling Begin().
Returns:
A list of pathnames for files that should be uploaded using UploadFile()
before Commit() can be called.
"""
assert not self.in_transaction, "Already in a transaction."
StatusUpdate("Initiating update.")
self.server.Send("/api/appversion/create", app_id=self.app_id,
version=self.version, payload=self.config.ToYAML())
self.in_transaction = True
files_to_clone = []
blobs_to_clone = []
for path, content_hash in self.files.iteritems():
mime_type = GetMimeTypeIfStaticFile(self.config, path)
if mime_type is not None:
blobs_to_clone.append((path, content_hash, mime_type))
else:
files_to_clone.append((path, content_hash))
files_to_upload = {}
def CloneFiles(url, files, file_type):
"""Sends files to the given url.
Args:
url: the server URL to use.
files: a list of files
file_type: the type of the files
"""
if not files:
return
StatusUpdate("Cloning %d %s file%s." %
(len(files), file_type, len(files) != 1 and "s" or ""))
for i in xrange(0, len(files), MAX_FILES_TO_CLONE):
if i > 0 and i % MAX_FILES_TO_CLONE == 0:
StatusUpdate("Cloned %d files." % i)
chunk = files[i:min(len(files), i + MAX_FILES_TO_CLONE)]
result = self.server.Send(url,
app_id=self.app_id, version=self.version,
payload=BuildClonePostBody(chunk))
if result:
files_to_upload.update(dict(
(f, self.files[f]) for f in result.split(LIST_DELIMITER)))
CloneFiles("/api/appversion/cloneblobs", blobs_to_clone, "static")
CloneFiles("/api/appversion/clonefiles", files_to_clone, "application")
logging.info("Files to upload: " + str(files_to_upload))
self.files = files_to_upload
return sorted(files_to_upload.iterkeys())
def UploadFile(self, path, file_handle):
"""Uploads a file to the hosting service.
Must only be called after Begin().
The path provided must be one of those that were returned by Begin().
Args:
path: The path the file is being uploaded as.
file_handle: A file-like object containing the data to upload.
Raises:
KeyError: The provided file is not amongst those to be uploaded.
"""
assert self.in_transaction, "Begin() must be called before UploadFile()."
if path not in self.files:
raise KeyError("File '%s' is not in the list of files to be uploaded."
% path)
del self.files[path]
mime_type = GetMimeTypeIfStaticFile(self.config, path)
if mime_type is not None:
self.server.Send("/api/appversion/addblob", app_id=self.app_id,
version=self.version, path=path, content_type=mime_type,
payload=file_handle.read())
else:
self.server.Send("/api/appversion/addfile", app_id=self.app_id,
version=self.version, path=path,
payload=file_handle.read())
def Commit(self):
"""Commits the transaction, making the new app version available.
All the files returned by Begin() must have been uploaded with UploadFile()
before Commit() can be called.
This tries the new 'deploy' method; if that fails it uses the old 'commit'.
Raises:
Exception: Some required files were not uploaded.
"""
assert self.in_transaction, "Begin() must be called before Commit()."
if self.files:
raise Exception("Not all required files have been uploaded.")
try:
self.Deploy()
if not RetryWithBackoff(1, 2, 8, self.IsReady):
logging.warning("Version still not ready to serve, aborting.")
raise Exception("Version not ready.")
self.StartServing()
except urllib2.HTTPError, e:
if e.code != 404:
raise
StatusUpdate("Closing update.")
self.server.Send("/api/appversion/commit", app_id=self.app_id,
version=self.version)
self.in_transaction = False
def Deploy(self):
"""Deploys the new app version but does not make it default.
All the files returned by Begin() must have been uploaded with UploadFile()
before Deploy() can be called.
Raises:
Exception: Some required files were not uploaded.
"""
assert self.in_transaction, "Begin() must be called before Deploy()."
if self.files:
raise Exception("Not all required files have been uploaded.")
StatusUpdate("Deploying new version.")
self.server.Send("/api/appversion/deploy", app_id=self.app_id,
version=self.version)
self.deployed = True
def IsReady(self):
"""Check if the new app version is ready to serve traffic.
Raises:
Exception: Deploy has not yet been called.
Returns:
True if the server returned the app is ready to serve.
"""
assert self.deployed, "Deploy() must be called before IsReady()."
StatusUpdate("Checking if new version is ready to serve.")
result = self.server.Send("/api/appversion/isready", app_id=self.app_id,
version=self.version)
return result == "1"
def StartServing(self):
"""Start serving with the newly created version.
Raises:
Exception: Deploy has not yet been called.
"""
assert self.deployed, "Deploy() must be called before IsReady()."
StatusUpdate("Closing update: new version is ready to start serving.")
self.server.Send("/api/appversion/startserving",
app_id=self.app_id, version=self.version)
def Rollback(self):
"""Rolls back the transaction if one is in progress."""
if not self.in_transaction:
return
StatusUpdate("Rolling back the update.")
self.server.Send("/api/appversion/rollback", app_id=self.app_id,
version=self.version)
self.in_transaction = False
self.files = {}
def DoUpload(self, paths, max_size, openfunc):
"""Uploads a new appversion with the given config and files to the server.
Args:
paths: An iterator that yields the relative paths of the files to upload.
max_size: The maximum size file to upload.
openfunc: A function that takes a path and returns a file-like object.
"""
logging.info("Reading app configuration.")
path = ""
try:
StatusUpdate("Scanning files on local disk.")
num_files = 0
for path in paths:
file_handle = openfunc(path)
try:
if self.config.skip_files.match(path):
logging.info("Ignoring file '%s': File matches ignore regex.",
path)
else:
file_length = GetFileLength(file_handle)
if file_length > max_size:
logging.error("Ignoring file '%s': Too long "
"(max %d bytes, file is %d bytes)",
path, max_size, file_length)
else:
logging.info("Processing file '%s'", path)
self.AddFile(path, file_handle)
finally:
file_handle.close()
num_files += 1
if num_files % 500 == 0:
StatusUpdate("Scanned %d files." % num_files)
except KeyboardInterrupt:
logging.info("User interrupted. Aborting.")
raise
except EnvironmentError, e:
logging.error("An error occurred processing file '%s': %s. Aborting.",
path, e)
raise
try:
missing_files = self.Begin()
if missing_files:
StatusUpdate("Uploading %d files." % len(missing_files))
num_files = 0
for missing_file in missing_files:
logging.info("Uploading file '%s'" % missing_file)
file_handle = openfunc(missing_file)
try:
self.UploadFile(missing_file, file_handle)
finally:
file_handle.close()
num_files += 1
if num_files % 500 == 0:
StatusUpdate("Uploaded %d files." % num_files)
self.Commit()
except KeyboardInterrupt:
logging.info("User interrupted. Aborting.")
self.Rollback()
raise
except:
logging.exception("An unexpected error occurred. Aborting.")
self.Rollback()
raise
logging.info("Done!")
def FileIterator(base, separator=os.path.sep):
"""Walks a directory tree, returning all the files. Follows symlinks.
Args:
base: The base path to search for files under.
separator: Path separator used by the running system's platform.
Yields:
Paths of files found, relative to base.
"""
dirs = [""]
while dirs:
current_dir = dirs.pop()
for entry in os.listdir(os.path.join(base, current_dir)):
name = os.path.join(current_dir, entry)
fullname = os.path.join(base, name)
if os.path.isfile(fullname):
if separator == "\\":
name = name.replace("\\", "/")
yield name
elif os.path.isdir(fullname):
dirs.append(name)
def GetFileLength(fh):
"""Returns the length of the file represented by fh.
This function is capable of finding the length of any seekable stream,
unlike os.fstat, which only works on file streams.
Args:
fh: The stream to get the length of.
Returns:
The length of the stream.
"""
pos = fh.tell()
fh.seek(0, 2)
length = fh.tell()
fh.seek(pos, 0)
return length
def GetUserAgent(get_version=GetVersionObject,
get_platform=appengine_rpc.GetPlatformToken):
"""Determines the value of the 'User-agent' header to use for HTTP requests.
If the 'APPCFG_SDK_NAME' environment variable is present, that will be
used as the first product token in the user-agent.
Args:
get_version: Used for testing.
get_platform: Used for testing.
Returns:
String containing the 'user-agent' header value, which includes the SDK
version, the platform information, and the version of Python;
e.g., "appcfg_py/1.0.1 Darwin/9.2.0 Python/2.5.2".
"""
product_tokens = []
sdk_name = os.environ.get("APPCFG_SDK_NAME")
if sdk_name:
product_tokens.append(sdk_name)
else:
version = get_version()
if version is None:
release = "unknown"
else:
release = version["release"]
product_tokens.append("appcfg_py/%s" % release)
product_tokens.append(get_platform())
python_version = ".".join(str(i) for i in sys.version_info)
product_tokens.append("Python/%s" % python_version)
return " ".join(product_tokens)
def GetSourceName(get_version=GetVersionObject):
"""Gets the name of this source version."""
version = get_version()
if version is None:
release = "unknown"
else:
release = version["release"]
return "Google-appcfg-%s" % (release,)
class AppCfgApp(object):
"""Singleton class to wrap AppCfg tool functionality.
This class is responsible for parsing the command line and executing
the desired action on behalf of the user. Processing files and
communicating with the server is handled by other classes.
Attributes:
actions: A dictionary mapping action names to Action objects.
action: The Action specified on the command line.
parser: An instance of optparse.OptionParser.
options: The command line options parsed by 'parser'.
argv: The original command line as a list.
args: The positional command line args left over after parsing the options.
raw_input_fn: Function used for getting raw user input, like email.
password_input_fn: Function used for getting user password.
error_fh: Unexpected HTTPErrors are printed to this file handle.
Attributes for testing:
parser_class: The class to use for parsing the command line. Because
OptionsParser will exit the program when there is a parse failure, it
is nice to subclass OptionsParser and catch the error before exiting.
"""
def __init__(self, argv, parser_class=optparse.OptionParser,
rpc_server_class=appengine_rpc.HttpRpcServer,
raw_input_fn=raw_input,
password_input_fn=getpass.getpass,
error_fh=sys.stderr,
update_check_class=UpdateCheck):
"""Initializer. Parses the cmdline and selects the Action to use.
Initializes all of the attributes described in the class docstring.
Prints help or error messages if there is an error parsing the cmdline.
Args:
argv: The list of arguments passed to this program.
parser_class: Options parser to use for this application.
rpc_server_class: RPC server class to use for this application.
raw_input_fn: Function used for getting user email.
password_input_fn: Function used for getting user password.
error_fh: Unexpected HTTPErrors are printed to this file handle.
update_check_class: UpdateCheck class (can be replaced for testing).
"""
self.parser_class = parser_class
self.argv = argv
self.rpc_server_class = rpc_server_class
self.raw_input_fn = raw_input_fn
self.password_input_fn = password_input_fn
self.error_fh = error_fh
self.update_check_class = update_check_class
self.parser = self._GetOptionParser()
for action in self.actions.itervalues():
action.options(self, self.parser)
self.options, self.args = self.parser.parse_args(argv[1:])
if len(self.args) < 1:
self._PrintHelpAndExit()
if self.args[0] not in self.actions:
self.parser.error("Unknown action '%s'\n%s" %
(self.args[0], self.parser.get_description()))
action_name = self.args.pop(0)
self.action = self.actions[action_name]
self.parser, self.options = self._MakeSpecificParser(self.action)
if self.options.help:
self._PrintHelpAndExit()
if self.options.verbose == 2:
logging.getLogger().setLevel(logging.INFO)
elif self.options.verbose == 3:
logging.getLogger().setLevel(logging.DEBUG)
global verbosity
verbosity = self.options.verbose
def Run(self):
"""Executes the requested action.
Catches any HTTPErrors raised by the action and prints them to stderr.
"""
try:
self.action(self)
except urllib2.HTTPError, e:
body = e.read()
print >>self.error_fh, ("Error %d: --- begin server output ---\n"
"%s\n--- end server output ---" %
(e.code, body.rstrip("\n")))
return 1
except yaml_errors.EventListenerError, e:
print >>self.error_fh, ("Error parsing yaml file:\n%s" % e)
return 1
return 0
def _GetActionDescriptions(self):
"""Returns a formatted string containing the short_descs for all actions."""
action_names = self.actions.keys()
action_names.sort()
desc = ""
for action_name in action_names:
desc += " %s: %s\n" % (action_name, self.actions[action_name].short_desc)
return desc
def _GetOptionParser(self):
"""Creates an OptionParser with generic usage and description strings.
Returns:
An OptionParser instance.
"""
class Formatter(optparse.IndentedHelpFormatter):
"""Custom help formatter that does not reformat the description."""
def format_description(self, description):
"""Very simple formatter."""
return description + "\n"
desc = self._GetActionDescriptions()
desc = ("Action must be one of:\n%s"
"Use 'help <action>' for a detailed description.") % desc
parser = self.parser_class(usage="%prog [options] <action>",
description=desc,
formatter=Formatter(),
conflict_handler="resolve")
parser.add_option("-h", "--help", action="store_true",
dest="help", help="Show the help message and exit.")
parser.add_option("-q", "--quiet", action="store_const", const=0,
dest="verbose", help="Print errors only.")
parser.add_option("-v", "--verbose", action="store_const", const=2,
dest="verbose", default=1,
help="Print info level logs.")
parser.add_option("--noisy", action="store_const", const=3,
dest="verbose", help="Print all logs.")
parser.add_option("-s", "--server", action="store", dest="server",
default="appengine.google.com",
metavar="SERVER", help="The server to connect to.")
parser.add_option("-e", "--email", action="store", dest="email",
metavar="EMAIL", default=None,
help="The username to use. Will prompt if omitted.")
parser.add_option("-H", "--host", action="store", dest="host",
metavar="HOST", default=None,
help="Overrides the Host header sent with all RPCs.")
parser.add_option("--no_cookies", action="store_false",
dest="save_cookies", default=True,
help="Do not save authentication cookies to local disk.")
parser.add_option("--passin", action="store_true",
dest="passin", default=False,
help="Read the login password from stdin.")
return parser
def _MakeSpecificParser(self, action):
"""Creates a new parser with documentation specific to 'action'.
Args:
action: An Action instance to be used when initializing the new parser.
Returns:
A tuple containing:
parser: An instance of OptionsParser customized to 'action'.
options: The command line options after re-parsing.
"""
parser = self._GetOptionParser()
parser.set_usage(action.usage)
parser.set_description("%s\n%s" % (action.short_desc, action.long_desc))
action.options(self, parser)
options, unused_args = parser.parse_args(self.argv[1:])
return parser, options
def _PrintHelpAndExit(self, exit_code=2):
"""Prints the parser's help message and exits the program.
Args:
exit_code: The integer code to pass to sys.exit().
"""
self.parser.print_help()
sys.exit(exit_code)
def _GetRpcServer(self):
"""Returns an instance of an AbstractRpcServer.
Returns:
A new AbstractRpcServer, on which RPC calls can be made.
"""
def GetUserCredentials():
"""Prompts the user for a username and password."""
email = self.options.email
if email is None:
email = self.raw_input_fn("Email: ")
password_prompt = "Password for %s: " % email
if self.options.passin:
password = self.raw_input_fn(password_prompt)
else:
password = self.password_input_fn(password_prompt)
return (email, password)
if self.options.host and self.options.host == "localhost":
email = self.options.email
if email is None:
email = "test@example.com"
logging.info("Using debug user %s. Override with --email" % email)
server = self.rpc_server_class(
self.options.server,
lambda: (email, "password"),
GetUserAgent(),
GetSourceName(),
host_override=self.options.host,
save_cookies=self.options.save_cookies)
server.authenticated = True
return server
if self.options.passin:
auth_tries = 1
else:
auth_tries = 3
return self.rpc_server_class(self.options.server, GetUserCredentials,
GetUserAgent(), GetSourceName(),
host_override=self.options.host,
save_cookies=self.options.save_cookies,
auth_tries=auth_tries,
account_type="HOSTED_OR_GOOGLE")
def _FindYaml(self, basepath, file_name):
"""Find yaml files in application directory.
Args:
basepath: Base application directory.
file_name: Filename without extension to search for.
Returns:
Path to located yaml file if one exists, else None.
"""
if not os.path.isdir(basepath):
self.parser.error("Not a directory: %s" % basepath)
for yaml_file in (file_name + ".yaml", file_name + ".yml"):
yaml_path = os.path.join(basepath, yaml_file)
if os.path.isfile(yaml_path):
return yaml_path
return None
def _ParseAppYaml(self, basepath):
"""Parses the app.yaml file.
Args:
basepath: the directory of the application.
Returns:
An AppInfoExternal object.
"""
appyaml_filename = self._FindYaml(basepath, "app")
if appyaml_filename is None:
self.parser.error("Directory does not contain an app.yaml "
"configuration file.")
fh = open(appyaml_filename, "r")
try:
appyaml = appinfo.LoadSingleAppInfo(fh)
finally:
fh.close()
return appyaml
def _ParseIndexYaml(self, basepath):
"""Parses the index.yaml file.
Args:
basepath: the directory of the application.
Returns:
A single parsed yaml file or None if the file does not exist.
"""
file_name = self._FindYaml(basepath, "index")
if file_name is not None:
fh = open(file_name, "r")
try:
index_defs = datastore_index.ParseIndexDefinitions(fh)
finally:
fh.close()
return index_defs
return None
def _ParseCronYaml(self, basepath):
"""Parses the cron.yaml file.
Args:
basepath: the directory of the application.
Returns:
A CronInfoExternal object.
"""
file_name = self._FindYaml(basepath, "cron")
if file_name is not None:
fh = open(file_name, "r")
try:
cron_info = croninfo.LoadSingleCron(fh)
finally:
fh.close()
return cron_info
return None
def Help(self):
"""Prints help for a specific action.
Expects self.args[0] to contain the name of the action in question.
Exits the program after printing the help message.
"""
if len(self.args) != 1 or self.args[0] not in self.actions:
self.parser.error("Expected a single action argument. Must be one of:\n" +
self._GetActionDescriptions())
action = self.actions[self.args[0]]
self.parser, unused_options = self._MakeSpecificParser(action)
self._PrintHelpAndExit(exit_code=0)
def Update(self):
"""Updates and deploys a new appversion."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
rpc_server = self._GetRpcServer()
updatecheck = self.update_check_class(rpc_server, appyaml)
updatecheck.CheckForUpdates()
appversion = AppVersionUpload(rpc_server, appyaml)
appversion.DoUpload(FileIterator(basepath), self.options.max_size,
lambda path: open(os.path.join(basepath, path), "rb"))
index_defs = self._ParseIndexYaml(basepath)
if index_defs:
index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
try:
index_upload.DoUpload()
except urllib2.HTTPError, e:
StatusUpdate("Error %d: --- begin server output ---\n"
"%s\n--- end server output ---" %
(e.code, e.read().rstrip("\n")))
print >> self.error_fh, (
"Your app was updated, but there was an error updating your "
"indexes. Please retry later with appcfg.py update_indexes.")
cron_entries = self._ParseCronYaml(basepath)
if cron_entries:
cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
cron_upload.DoUpload()
def _UpdateOptions(self, parser):
"""Adds update-specific options to 'parser'.
Args:
parser: An instance of OptionsParser.
"""
parser.add_option("-S", "--max_size", type="int", dest="max_size",
default=10485760, metavar="SIZE",
help="Maximum size of a file to upload.")
def VacuumIndexes(self):
"""Deletes unused indexes."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
config = self._ParseAppYaml(basepath)
index_defs = self._ParseIndexYaml(basepath)
if index_defs is None:
index_defs = datastore_index.IndexDefinitions()
rpc_server = self._GetRpcServer()
vacuum = VacuumIndexesOperation(rpc_server,
config,
self.options.force_delete)
vacuum.DoVacuum(index_defs)
def _VacuumIndexesOptions(self, parser):
"""Adds vacuum_indexes-specific options to 'parser'.
Args:
parser: An instance of OptionsParser.
"""
parser.add_option("-f", "--force", action="store_true", dest="force_delete",
default=False,
help="Force deletion without being prompted.")
def UpdateCron(self):
"""Updates any new or changed cron definitions."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
rpc_server = self._GetRpcServer()
cron_entries = self._ParseCronYaml(basepath)
if cron_entries:
cron_upload = CronEntryUpload(rpc_server, appyaml, cron_entries)
cron_upload.DoUpload()
def UpdateIndexes(self):
"""Updates indexes."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
rpc_server = self._GetRpcServer()
index_defs = self._ParseIndexYaml(basepath)
if index_defs:
index_upload = IndexDefinitionUpload(rpc_server, appyaml, index_defs)
index_upload.DoUpload()
def Rollback(self):
"""Does a rollback of any existing transaction for this app version."""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
appversion = AppVersionUpload(self._GetRpcServer(), appyaml)
appversion.in_transaction = True
appversion.Rollback()
def RequestLogs(self):
"""Write request logs to a file."""
if len(self.args) != 2:
self.parser.error(
"Expected a <directory> argument and an <output_file> argument.")
if (self.options.severity is not None and
not 0 <= self.options.severity <= MAX_LOG_LEVEL):
self.parser.error(
"Severity range is 0 (DEBUG) through %s (CRITICAL)." % MAX_LOG_LEVEL)
if self.options.num_days is None:
self.options.num_days = int(not self.options.append)
basepath = self.args[0]
appyaml = self._ParseAppYaml(basepath)
rpc_server = self._GetRpcServer()
logs_requester = LogsRequester(rpc_server, appyaml, self.args[1],
self.options.num_days,
self.options.append,
self.options.severity,
time.time())
logs_requester.DownloadLogs()
def _RequestLogsOptions(self, parser):
"""Adds request_logs-specific options to 'parser'.
Args:
parser: An instance of OptionsParser.
"""
parser.add_option("-n", "--num_days", type="int", dest="num_days",
action="store", default=None,
help="Number of days worth of log data to get. "
"The cut-off point is midnight UTC. "
"Use 0 to get all available logs. "
"Default is 1, unless --append is also given; "
"then the default is 0.")
parser.add_option("-a", "--append", dest="append",
action="store_true", default=False,
help="Append to existing file.")
parser.add_option("--severity", type="int", dest="severity",
action="store", default=None,
help="Severity of app-level log messages to get. "
"The range is 0 (DEBUG) through 4 (CRITICAL). "
"If omitted, only request logs are returned.")
def CronInfo(self, now=None, output=sys.stdout):
"""Displays information about cron definitions.
Args:
now: used for testing.
output: Used for testing.
"""
if len(self.args) != 1:
self.parser.error("Expected a single <directory> argument.")
if now is None:
now = datetime.datetime.now()
basepath = self.args[0]
cron_entries = self._ParseCronYaml(basepath)
if cron_entries:
for entry in cron_entries.cron:
description = entry.description
if not description:
description = "<no description>"
print >>output, "\n%s:\nURL: %s\nSchedule: %s" % (description,
entry.url,
entry.schedule)
schedule = groctimespecification.GrocTimeSpecification(entry.schedule)
matches = schedule.GetMatches(now, self.options.num_runs)
for match in matches:
print >>output, "%s, %s from now" % (
match.strftime("%Y-%m-%d %H:%M:%S"), match - now)
def _CronInfoOptions(self, parser):
"""Adds cron_info-specific options to 'parser'.
Args:
parser: An instance of OptionsParser.
"""
parser.add_option("-n", "--num_runs", type="int", dest="num_runs",
action="store", default=5,
help="Number of runs of each cron job to display"
"Default is 5")
class Action(object):
"""Contains information about a command line action.
Attributes:
function: The name of a function defined on AppCfg or its subclasses
that will perform the appropriate action.
usage: A command line usage string.
short_desc: A one-line description of the action.
long_desc: A detailed description of the action. Whitespace and
formatting will be preserved.
options: A function that will add extra options to a given OptionParser
object.
"""
def __init__(self, function, usage, short_desc, long_desc="",
options=lambda obj, parser: None):
"""Initializer for the class attributes."""
self.function = function
self.usage = usage
self.short_desc = short_desc
self.long_desc = long_desc
self.options = options
def __call__(self, appcfg):
"""Invoke this Action on the specified AppCfg.
This calls the function of the appropriate name on AppCfg, and
respects polymophic overrides."""
method = getattr(appcfg, self.function)
return method()
actions = {
"help": Action(
function="Help",
usage="%prog help <action>",
short_desc="Print help for a specific action."),
"update": Action(
function="Update",
usage="%prog [options] update <directory>",
options=_UpdateOptions,
short_desc="Create or update an app version.",
long_desc="""
Specify a directory that contains all of the files required by
the app, and appcfg.py will create/update the app version referenced
in the app.yaml file at the top level of that directory. appcfg.py
will follow symlinks and recursively upload all files to the server.
Temporary or source control files (e.g. foo~, .svn/*) will be skipped."""),
"update_cron": Action(
function="UpdateCron",
usage="%prog [options] update_cron <directory>",
short_desc="Update application cron definitions.",
long_desc="""
The 'update_cron' command will update any new, removed or changed cron
definitions from the cron.yaml file."""),
"update_indexes": Action(
function="UpdateIndexes",
usage="%prog [options] update_indexes <directory>",
short_desc="Update application indexes.",
long_desc="""
The 'update_indexes' command will add additional indexes which are not currently
in production as well as restart any indexes that were not completed."""),
"vacuum_indexes": Action(
function="VacuumIndexes",
usage="%prog [options] vacuum_indexes <directory>",
options=_VacuumIndexesOptions,
short_desc="Delete unused indexes from application.",
long_desc="""
The 'vacuum_indexes' command will help clean up indexes which are no longer
in use. It does this by comparing the local index configuration with
indexes that are actually defined on the server. If any indexes on the
server do not exist in the index configuration file, the user is given the
option to delete them."""),
"rollback": Action(
function="Rollback",
usage="%prog [options] rollback <directory>",
short_desc="Rollback an in-progress update.",
long_desc="""
The 'update' command requires a server-side transaction. Use 'rollback'
if you get an error message about another transaction being in progress
and you are sure that there is no such transaction."""),
"request_logs": Action(
function="RequestLogs",
usage="%prog [options] request_logs <directory> <output_file>",
options=_RequestLogsOptions,
short_desc="Write request logs in Apache common log format.",
long_desc="""
The 'request_logs' command exports the request logs from your application
to a file. It will write Apache common log format records ordered
chronologically. If output file is '-' stdout will be written."""),
"cron_info": Action(
function="CronInfo",
usage="%prog [options] cron_info <directory>",
options=_CronInfoOptions,
short_desc="Display information about cron jobs.",
long_desc="""
The 'cron_info' command will display the next 'number' runs (default 5) for
each cron job defined in the cron.yaml file."""),
}
def main(argv):
logging.basicConfig(format=("%(asctime)s %(levelname)s %(filename)s:"
"%(lineno)s %(message)s "))
try:
result = AppCfgApp(argv).Run()
if result:
sys.exit(result)
except KeyboardInterrupt:
StatusUpdate("Interrupted.")
sys.exit(1)
if __name__ == "__main__":
main(sys.argv)
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Constants used by both the bulkload server-side mixin handler and the
command-line client.
"""
KIND_PARAM = 'kind'
CSV_PARAM = 'csv'
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
""" Constants used by both the bulkload server-side mixin handler and the
command-line client.
"""
KIND_PARAM = 'kind'
CSV_PARAM = 'csv'
| Python |
#!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""A mix-in handler for bulk loading data into an application.
For complete documentation, see the Tools and Libraries section of the
documentation.
To use this in your app, first write a script, e.g. bulkload.py, that
instantiates a Loader for each entity kind you want to import and call
bulkload.main(instance). For example:
person = bulkload.Loader(
'Person',
[('name', str),
('email', datastore_types.Email),
('cool', bool), # ('0', 'False', 'No', '')=False, otherwise bool(value)
('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
])
if __name__ == '__main__':
bulkload.main(person)
See the Loader class for more information. Then, add a handler for it in your
app.yaml, e.g.:
handlers:
- url: /load
script: bulkload.py
login: admin
Finally, deploy your app and run bulkloader.py. For example, to load the
file people.csv into a dev_appserver running on your local machine:
./bulkloader.py --filename people.csv --kind Person --cookie ... \
--url http://localhost:8080/load
The kind parameter is used to look up the Loader instance that will be used.
The bulkload handler should usually be admin_only, so that non-admins can't use
the shell to modify your app's data. The bulkload client uses the cookie
parameter to piggyback its HTTP requests on your login session. A GET request
to the URL specified for your bulkload script will give you a cookie parameter
you can use (/load in the example above). If your bulkload handler is not
admin_only, you may omit the cookie parameter.
If you want to do extra processing before the entities are stored, you can
subclass Loader and override HandleEntity. HandleEntity is called once with
each entity that is imported from the CSV data. You can return one or more
entities from HandleEntity to be stored in its place, or None if nothing
should be stored.
For example, this loads calendar events and stores them as
datastore_entities.Event entities. It also populates their author field with a
reference to the corresponding datastore_entites.Contact entity. If no Contact
entity exists yet for the given author, it creates one and stores it first.
class EventLoader(bulkload.Loader):
def __init__(self):
EventLoader.__init__(self, 'Event',
[('title', str),
('creator', str),
('where', str),
('startTime', lambda x:
datetime.datetime.fromtimestamp(float(x))),
])
def HandleEntity(self, entity):
event = datastore_entities.Event(entity.title)
event.update(entity)
creator = event['creator']
if creator:
contact = datastore.Query('Contact', {'title': creator}).Get(1)
if not contact:
contact = [datastore_entities.Contact(creator)]
datastore.Put(contact[0])
event['author'] = contact[0].key()
return event
if __name__ == '__main__':
bulkload.main(EventLoader())
"""
import Cookie
import StringIO
import csv
import httplib
import os
import traceback
import google
import wsgiref.handlers
from google.appengine.api import datastore
from google.appengine.ext import webapp
from google.appengine.ext.bulkload import constants
def Validate(value, type):
""" Checks that value is non-empty and of the right type.
Raises ValueError if value is None or empty, TypeError if it's not the given
type.
Args:
value: any value
type: a type or tuple of types
"""
if not value:
raise ValueError('Value should not be empty; received %s.' % value)
elif not isinstance(value, type):
raise TypeError('Expected a %s, but received %s (a %s).' %
(type, value, value.__class__))
class Loader(object):
"""A base class for creating datastore entities from input data.
To add a handler for bulk loading a new entity kind into your datastore,
write a subclass of this class that calls Loader.__init__ from your
class's __init__.
If you need to run extra code to convert entities from the input
data, create new properties, or otherwise modify the entities before
they're inserted, override HandleEntity.
See the CreateEntity method for the creation of entities from the
(parsed) input data.
"""
__loaders = {}
__kind = None
__properties = None
def __init__(self, kind, properties):
""" Constructor.
Populates this Loader's kind and properties map. Also registers it with
the bulk loader, so that all you need to do is instantiate your Loader,
and the bulkload handler will automatically use it.
Args:
kind: a string containing the entity kind that this loader handles
properties: list of (name, converter) tuples.
This is used to automatically convert the CSV columns into properties.
The converter should be a function that takes one argument, a string
value from the CSV file, and returns a correctly typed property value
that should be inserted. The tuples in this list should match the
columns in your CSV file, in order.
For example:
[('name', str),
('id_number', int),
('email', datastore_types.Email),
('user', users.User),
('birthdate', lambda x: datetime.datetime.fromtimestamp(float(x))),
('description', datastore_types.Text),
]
"""
Validate(kind, basestring)
self.__kind = kind
Validate(properties, list)
for name, fn in properties:
Validate(name, basestring)
assert callable(fn), (
'Conversion function %s for property %s is not callable.' % (fn, name))
self.__properties = properties
Loader.__loaders[kind] = self
def kind(self):
""" Return the entity kind that this Loader handes.
"""
return self.__kind
def CreateEntity(self, values, key_name=None):
""" Creates an entity from a list of property values.
Args:
values: list/tuple of str
key_name: if provided, the name for the (single) resulting Entity
Returns:
list of datastore.Entity
The returned entities are populated with the property values from the
argument, converted to native types using the properties map given in
the constructor, and passed through HandleEntity. They're ready to be
inserted.
Raises:
AssertionError if the number of values doesn't match the number
of properties in the properties map.
"""
Validate(values, (list, tuple))
assert len(values) == len(self.__properties), (
'Expected %d CSV columns, found %d.' %
(len(self.__properties), len(values)))
entity = datastore.Entity(self.__kind, name=key_name)
for (name, converter), val in zip(self.__properties, values):
if converter is bool and val.lower() in ('0', 'false', 'no'):
val = False
entity[name] = converter(val)
entities = self.HandleEntity(entity)
if entities is not None:
if not isinstance(entities, (list, tuple)):
entities = [entities]
for entity in entities:
if not isinstance(entity, datastore.Entity):
raise TypeError('Expected a datastore.Entity, received %s (a %s).' %
(entity, entity.__class__))
return entities
def HandleEntity(self, entity):
""" Subclasses can override this to add custom entity conversion code.
This is called for each entity, after its properties are populated from
CSV but before it is stored. Subclasses can override this to add custom
entity handling code.
The entity to be inserted should be returned. If multiple entities should
be inserted, return a list of entities. If no entities should be inserted,
return None or [].
Args:
entity: datastore.Entity
Returns:
datastore.Entity or list of datastore.Entity
"""
return entity
@staticmethod
def RegisteredLoaders():
""" Returns a list of the Loader instances that have been created.
"""
return dict(Loader.__loaders)
class BulkLoad(webapp.RequestHandler):
"""A handler for bulk load requests.
This class contains handlers for the bulkloading process. One for
GET to provide cookie information for the upload script, and one
handler for a POST request to upload the entities.
In the POST request, the body contains the data representing the
entities' property values. The original format was a sequences of
lines of comma-separated values (and is handled by the Load
method). The current (version 1) format is a binary format described
in the Tools and Libraries section of the documentation, and is
handled by the LoadV1 method).
"""
def get(self):
""" Handle a GET. Just show an info page.
"""
page = self.InfoPage(self.request.uri)
self.response.out.write(page)
def post(self):
""" Handle a POST. Reads CSV data, converts to entities, and stores them.
"""
self.response.headers['Content-Type'] = 'text/plain'
response, output = self.Load(self.request.get(constants.KIND_PARAM),
self.request.get(constants.CSV_PARAM))
self.response.set_status(response)
self.response.out.write(output)
def InfoPage(self, uri):
""" Renders an information page with the POST endpoint and cookie flag.
Args:
uri: a string containing the request URI
Returns:
A string with the contents of the info page to be displayed
"""
page = """
<!DOCTYPE html PUBLIC "-//W3C//DTD XHTML 1.0 Strict//EN"
"http://www.w3.org/TR/xhtml1/DTD/xhtml1-strict.dtd">
<html><head>
<title>Bulk Loader</title>
</head><body>"""
page += ('The bulk load endpoint is: <a href="%s">%s</a><br />\n' %
(uri, uri))
cookies = os.environ.get('HTTP_COOKIE', None)
if cookies:
cookie = Cookie.BaseCookie(cookies)
for param in ['ACSID', 'dev_appserver_login']:
value = cookie.get(param)
if value:
page += ("Pass this flag to the client: --cookie='%s=%s'\n" %
(param, value.value))
break
else:
page += 'No cookie found!\n'
page += '</body></html>'
return page
def IterRows(self, reader):
""" Yields a tuple of a line number and row for each row of the CSV data.
Args:
reader: a csv reader for the input data.
"""
line_num = 1
for columns in reader:
yield (line_num, columns)
line_num += 1
def LoadEntities(self, iter, loader, key_format=None):
"""Generates entities and loads them into the datastore. Returns
a tuple of HTTP code and string reply.
Args:
iter: an iterator yielding pairs of a line number and row contents.
key_format: a format string to convert a line number into an
entity id. If None, then entity ID's are automatically generated.
"""
entities = []
output = []
for line_num, columns in iter:
key_name = None
if key_format is not None:
key_name = key_format % line_num
if columns:
try:
output.append('\nLoading from line %d...' % line_num)
new_entities = loader.CreateEntity(columns, key_name=key_name)
if new_entities:
entities.extend(new_entities)
output.append('done.')
except:
stacktrace = traceback.format_exc()
output.append('error:\n%s' % stacktrace)
return (httplib.BAD_REQUEST, ''.join(output))
datastore.Put(entities)
return (httplib.OK, ''.join(output))
def Load(self, kind, data):
"""Parses CSV data, uses a Loader to convert to entities, and stores them.
On error, fails fast. Returns a "bad request" HTTP response code and
includes the traceback in the output.
Args:
kind: a string containing the entity kind that this loader handles
data: a string containing the CSV data to load
Returns:
tuple (response code, output) where:
response code: integer HTTP response code to return
output: string containing the HTTP response body
"""
data = data.encode('utf-8')
Validate(kind, basestring)
Validate(data, basestring)
output = []
try:
loader = Loader.RegisteredLoaders()[kind]
except KeyError:
output.append('Error: no Loader defined for kind %s.' % kind)
return (httplib.BAD_REQUEST, ''.join(output))
buffer = StringIO.StringIO(data)
reader = csv.reader(buffer, skipinitialspace=True)
try:
csv.field_size_limit(800000)
except AttributeError:
pass
return self.LoadEntities(self.IterRows(reader), loader)
def main(*loaders):
"""Starts bulk upload.
Raises TypeError if not, at least one Loader instance is given.
Args:
loaders: One or more Loader instance.
"""
if not loaders:
raise TypeError('Expected at least one argument.')
for loader in loaders:
if not isinstance(loader, Loader):
raise TypeError('Expected a Loader instance; received %r' % loader)
application = webapp.WSGIApplication([('.*', BulkLoad)])
wsgiref.handlers.CGIHandler().run(application)
if __name__ == '__main__':
main()
| Python |
Subsets and Splits
SQL Console for ajibawa-2023/Python-Code-Large
Provides a useful breakdown of language distribution in the training data, showing which languages have the most samples and helping identify potential imbalances across different language groups.