id
int32
0
252k
repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
51
19.8k
code_tokens
list
docstring
stringlengths
3
17.3k
docstring_tokens
list
sha
stringlengths
40
40
url
stringlengths
87
242
22,200
timknip/pyswf
swf/stream.py
SWFStream.readRGBA
def readRGBA(self): """ Read a RGBA color """ self.reset_bits_pending(); r = self.readUI8() g = self.readUI8() b = self.readUI8() a = self.readUI8() return (a << 24) | (r << 16) | (g << 8) | b
python
def readRGBA(self): self.reset_bits_pending(); r = self.readUI8() g = self.readUI8() b = self.readUI8() a = self.readUI8() return (a << 24) | (r << 16) | (g << 8) | b
[ "def", "readRGBA", "(", "self", ")", ":", "self", ".", "reset_bits_pending", "(", ")", "r", "=", "self", ".", "readUI8", "(", ")", "g", "=", "self", ".", "readUI8", "(", ")", "b", "=", "self", ".", "readUI8", "(", ")", "a", "=", "self", ".", "r...
Read a RGBA color
[ "Read", "a", "RGBA", "color" ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/stream.py#L355-L362
22,201
timknip/pyswf
swf/stream.py
SWFStream.readString
def readString(self): """ Read a string """ s = self.f.read(1) string = b"" while ord(s) > 0: string += s s = self.f.read(1) return string.decode()
python
def readString(self): s = self.f.read(1) string = b"" while ord(s) > 0: string += s s = self.f.read(1) return string.decode()
[ "def", "readString", "(", "self", ")", ":", "s", "=", "self", ".", "f", ".", "read", "(", "1", ")", "string", "=", "b\"\"", "while", "ord", "(", "s", ")", ">", "0", ":", "string", "+=", "s", "s", "=", "self", ".", "f", ".", "read", "(", "1"...
Read a string
[ "Read", "a", "string" ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/stream.py#L368-L375
22,202
timknip/pyswf
swf/stream.py
SWFStream.readFILTER
def readFILTER(self): """ Read a SWFFilter """ filterId = self.readUI8() filter = SWFFilterFactory.create(filterId) filter.parse(self) return filter
python
def readFILTER(self): filterId = self.readUI8() filter = SWFFilterFactory.create(filterId) filter.parse(self) return filter
[ "def", "readFILTER", "(", "self", ")", ":", "filterId", "=", "self", ".", "readUI8", "(", ")", "filter", "=", "SWFFilterFactory", ".", "create", "(", "filterId", ")", "filter", ".", "parse", "(", "self", ")", "return", "filter" ]
Read a SWFFilter
[ "Read", "a", "SWFFilter" ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/stream.py#L377-L382
22,203
timknip/pyswf
swf/stream.py
SWFStream.readFILTERLIST
def readFILTERLIST(self): """ Read a length-prefixed list of FILTERs """ number = self.readUI8() return [self.readFILTER() for _ in range(number)]
python
def readFILTERLIST(self): number = self.readUI8() return [self.readFILTER() for _ in range(number)]
[ "def", "readFILTERLIST", "(", "self", ")", ":", "number", "=", "self", ".", "readUI8", "(", ")", "return", "[", "self", ".", "readFILTER", "(", ")", "for", "_", "in", "range", "(", "number", ")", "]" ]
Read a length-prefixed list of FILTERs
[ "Read", "a", "length", "-", "prefixed", "list", "of", "FILTERs" ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/stream.py#L384-L387
22,204
timknip/pyswf
swf/stream.py
SWFStream.readBUTTONCONDACTIONSs
def readBUTTONCONDACTIONSs(self): """ Read zero or more button-condition actions """ out = [] while 1: action = self.readBUTTONCONDACTION() if action: out.append(action) else: break return out
python
def readBUTTONCONDACTIONSs(self): out = [] while 1: action = self.readBUTTONCONDACTION() if action: out.append(action) else: break return out
[ "def", "readBUTTONCONDACTIONSs", "(", "self", ")", ":", "out", "=", "[", "]", "while", "1", ":", "action", "=", "self", ".", "readBUTTONCONDACTION", "(", ")", "if", "action", ":", "out", ".", "append", "(", "action", ")", "else", ":", "break", "return"...
Read zero or more button-condition actions
[ "Read", "zero", "or", "more", "button", "-", "condition", "actions" ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/stream.py#L427-L436
22,205
timknip/pyswf
swf/stream.py
SWFStream.readtag_header
def readtag_header(self): """ Read a tag header """ pos = self.tell() tag_type_and_length = self.readUI16() tag_length = tag_type_and_length & 0x003f if tag_length == 0x3f: # The SWF10 spec sez that this is a signed int. # Shouldn't it be an unsigned int? tag_length = self.readSI32(); return SWFRecordHeader(tag_type_and_length >> 6, tag_length, self.tell() - pos)
python
def readtag_header(self): pos = self.tell() tag_type_and_length = self.readUI16() tag_length = tag_type_and_length & 0x003f if tag_length == 0x3f: # The SWF10 spec sez that this is a signed int. # Shouldn't it be an unsigned int? tag_length = self.readSI32(); return SWFRecordHeader(tag_type_and_length >> 6, tag_length, self.tell() - pos)
[ "def", "readtag_header", "(", "self", ")", ":", "pos", "=", "self", ".", "tell", "(", ")", "tag_type_and_length", "=", "self", ".", "readUI16", "(", ")", "tag_length", "=", "tag_type_and_length", "&", "0x003f", "if", "tag_length", "==", "0x3f", ":", "# The...
Read a tag header
[ "Read", "a", "tag", "header" ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/stream.py#L459-L468
22,206
timknip/pyswf
swf/tag.py
SWFTimelineContainer.get_dependencies
def get_dependencies(self): """ Returns the character ids this tag refers to """ s = super(SWFTimelineContainer, self).get_dependencies() for dt in self.all_tags_of_type(DefinitionTag): s.update(dt.get_dependencies()) return s
python
def get_dependencies(self): s = super(SWFTimelineContainer, self).get_dependencies() for dt in self.all_tags_of_type(DefinitionTag): s.update(dt.get_dependencies()) return s
[ "def", "get_dependencies", "(", "self", ")", ":", "s", "=", "super", "(", "SWFTimelineContainer", ",", "self", ")", ".", "get_dependencies", "(", ")", "for", "dt", "in", "self", ".", "all_tags_of_type", "(", "DefinitionTag", ")", ":", "s", ".", "update", ...
Returns the character ids this tag refers to
[ "Returns", "the", "character", "ids", "this", "tag", "refers", "to" ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/tag.py#L151-L156
22,207
timknip/pyswf
swf/tag.py
SWFTimelineContainer.all_tags_of_type
def all_tags_of_type(self, type_or_types, recurse_into_sprites = True): """ Generator for all tags of the given type_or_types. Generates in breadth-first order, optionally including all sub-containers. """ for t in self.tags: if isinstance(t, type_or_types): yield t if recurse_into_sprites: for t in self.tags: # recurse into nested sprites if isinstance(t, SWFTimelineContainer): for containedtag in t.all_tags_of_type(type_or_types): yield containedtag
python
def all_tags_of_type(self, type_or_types, recurse_into_sprites = True): for t in self.tags: if isinstance(t, type_or_types): yield t if recurse_into_sprites: for t in self.tags: # recurse into nested sprites if isinstance(t, SWFTimelineContainer): for containedtag in t.all_tags_of_type(type_or_types): yield containedtag
[ "def", "all_tags_of_type", "(", "self", ",", "type_or_types", ",", "recurse_into_sprites", "=", "True", ")", ":", "for", "t", "in", "self", ".", "tags", ":", "if", "isinstance", "(", "t", ",", "type_or_types", ")", ":", "yield", "t", "if", "recurse_into_sp...
Generator for all tags of the given type_or_types. Generates in breadth-first order, optionally including all sub-containers.
[ "Generator", "for", "all", "tags", "of", "the", "given", "type_or_types", "." ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/tag.py#L197-L211
22,208
timknip/pyswf
swf/tag.py
SWFTimelineContainer.build_dictionary
def build_dictionary(self): """ Return a dictionary of characterIds to their defining tags. """ d = {} for t in self.all_tags_of_type(DefinitionTag, recurse_into_sprites = False): if t.characterId in d: #print 'redefinition of characterId %d:' % (t.characterId) #print ' was:', d[t.characterId] #print 'redef:', t raise ValueError('illegal redefinition of character') d[t.characterId] = t return d
python
def build_dictionary(self): d = {} for t in self.all_tags_of_type(DefinitionTag, recurse_into_sprites = False): if t.characterId in d: #print 'redefinition of characterId %d:' % (t.characterId) #print ' was:', d[t.characterId] #print 'redef:', t raise ValueError('illegal redefinition of character') d[t.characterId] = t return d
[ "def", "build_dictionary", "(", "self", ")", ":", "d", "=", "{", "}", "for", "t", "in", "self", ".", "all_tags_of_type", "(", "DefinitionTag", ",", "recurse_into_sprites", "=", "False", ")", ":", "if", "t", ".", "characterId", "in", "d", ":", "#print 're...
Return a dictionary of characterIds to their defining tags.
[ "Return", "a", "dictionary", "of", "characterIds", "to", "their", "defining", "tags", "." ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/tag.py#L213-L225
22,209
timknip/pyswf
swf/tag.py
SWFTimelineContainer.collect_sound_streams
def collect_sound_streams(self): """ Return a list of sound streams in this timeline and its children. The streams are returned in order with respect to the timeline. A stream is returned as a list: the first element is the tag which introduced that stream; other elements are the tags which made up the stream body (if any). """ rc = [] current_stream = None # looking in all containers for frames for tag in self.all_tags_of_type((TagSoundStreamHead, TagSoundStreamBlock)): if isinstance(tag, TagSoundStreamHead): # we have a new stream current_stream = [ tag ] rc.append(current_stream) if isinstance(tag, TagSoundStreamBlock): # we have a frame for the current stream current_stream.append(tag) return rc
python
def collect_sound_streams(self): rc = [] current_stream = None # looking in all containers for frames for tag in self.all_tags_of_type((TagSoundStreamHead, TagSoundStreamBlock)): if isinstance(tag, TagSoundStreamHead): # we have a new stream current_stream = [ tag ] rc.append(current_stream) if isinstance(tag, TagSoundStreamBlock): # we have a frame for the current stream current_stream.append(tag) return rc
[ "def", "collect_sound_streams", "(", "self", ")", ":", "rc", "=", "[", "]", "current_stream", "=", "None", "# looking in all containers for frames", "for", "tag", "in", "self", ".", "all_tags_of_type", "(", "(", "TagSoundStreamHead", ",", "TagSoundStreamBlock", ")",...
Return a list of sound streams in this timeline and its children. The streams are returned in order with respect to the timeline. A stream is returned as a list: the first element is the tag which introduced that stream; other elements are the tags which made up the stream body (if any).
[ "Return", "a", "list", "of", "sound", "streams", "in", "this", "timeline", "and", "its", "children", ".", "The", "streams", "are", "returned", "in", "order", "with", "respect", "to", "the", "timeline", "." ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/tag.py#L227-L247
22,210
timknip/pyswf
swf/tag.py
SWFTimelineContainer.collect_video_streams
def collect_video_streams(self): """ Return a list of video streams in this timeline and its children. The streams are returned in order with respect to the timeline. A stream is returned as a list: the first element is the tag which introduced that stream; other elements are the tags which made up the stream body (if any). """ rc = [] streams_by_id = {} # scan first for all streams for t in self.all_tags_of_type(TagDefineVideoStream): stream = [ t ] streams_by_id[t.characterId] = stream rc.append(stream) # then find the frames for t in self.all_tags_of_type(TagVideoFrame): # we have a frame for the /named/ stream assert t.streamId in streams_by_id streams_by_id[t.streamId].append(t) return rc
python
def collect_video_streams(self): rc = [] streams_by_id = {} # scan first for all streams for t in self.all_tags_of_type(TagDefineVideoStream): stream = [ t ] streams_by_id[t.characterId] = stream rc.append(stream) # then find the frames for t in self.all_tags_of_type(TagVideoFrame): # we have a frame for the /named/ stream assert t.streamId in streams_by_id streams_by_id[t.streamId].append(t) return rc
[ "def", "collect_video_streams", "(", "self", ")", ":", "rc", "=", "[", "]", "streams_by_id", "=", "{", "}", "# scan first for all streams", "for", "t", "in", "self", ".", "all_tags_of_type", "(", "TagDefineVideoStream", ")", ":", "stream", "=", "[", "t", "]"...
Return a list of video streams in this timeline and its children. The streams are returned in order with respect to the timeline. A stream is returned as a list: the first element is the tag which introduced that stream; other elements are the tags which made up the stream body (if any).
[ "Return", "a", "list", "of", "video", "streams", "in", "this", "timeline", "and", "its", "children", ".", "The", "streams", "are", "returned", "in", "order", "with", "respect", "to", "the", "timeline", "." ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/tag.py#L249-L273
22,211
timknip/pyswf
swf/export.py
SVGExporter.export
def export(self, swf, force_stroke=False): """ Exports the specified SWF to SVG. @param swf The SWF. @param force_stroke Whether to force strokes on non-stroked fills. """ self.svg = self._e.svg(version=SVG_VERSION) self.force_stroke = force_stroke self.defs = self._e.defs() self.root = self._e.g() self.svg.append(self.defs) self.svg.append(self.root) self.shape_exporter.defs = self.defs self._num_filters = 0 self.fonts = dict([(x.characterId,x) for x in swf.all_tags_of_type(TagDefineFont)]) self.fontInfos = dict([(x.characterId,x) for x in swf.all_tags_of_type(TagDefineFontInfo)]) # GO! super(SVGExporter, self).export(swf, force_stroke) # Setup svg @width, @height and @viewBox # and add the optional margin self.bounds = SVGBounds(self.svg) self.svg.set("width", "%dpx" % round(self.bounds.width)) self.svg.set("height", "%dpx" % round(self.bounds.height)) if self._margin > 0: self.bounds.grow(self._margin) vb = [self.bounds.minx, self.bounds.miny, self.bounds.width, self.bounds.height] self.svg.set("viewBox", "%s" % " ".join(map(str,vb))) # Return the SVG as StringIO return self._serialize()
python
def export(self, swf, force_stroke=False): self.svg = self._e.svg(version=SVG_VERSION) self.force_stroke = force_stroke self.defs = self._e.defs() self.root = self._e.g() self.svg.append(self.defs) self.svg.append(self.root) self.shape_exporter.defs = self.defs self._num_filters = 0 self.fonts = dict([(x.characterId,x) for x in swf.all_tags_of_type(TagDefineFont)]) self.fontInfos = dict([(x.characterId,x) for x in swf.all_tags_of_type(TagDefineFontInfo)]) # GO! super(SVGExporter, self).export(swf, force_stroke) # Setup svg @width, @height and @viewBox # and add the optional margin self.bounds = SVGBounds(self.svg) self.svg.set("width", "%dpx" % round(self.bounds.width)) self.svg.set("height", "%dpx" % round(self.bounds.height)) if self._margin > 0: self.bounds.grow(self._margin) vb = [self.bounds.minx, self.bounds.miny, self.bounds.width, self.bounds.height] self.svg.set("viewBox", "%s" % " ".join(map(str,vb))) # Return the SVG as StringIO return self._serialize()
[ "def", "export", "(", "self", ",", "swf", ",", "force_stroke", "=", "False", ")", ":", "self", ".", "svg", "=", "self", ".", "_e", ".", "svg", "(", "version", "=", "SVG_VERSION", ")", "self", ".", "force_stroke", "=", "force_stroke", "self", ".", "de...
Exports the specified SWF to SVG. @param swf The SWF. @param force_stroke Whether to force strokes on non-stroked fills.
[ "Exports", "the", "specified", "SWF", "to", "SVG", "." ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/export.py#L514-L546
22,212
timknip/pyswf
swf/export.py
SingleShapeSVGExporterMixin.export
def export(self, swf, shape, **export_opts): """ Exports the specified shape of the SWF to SVG. @param swf The SWF. @param shape Which shape to export, either by characterId(int) or as a Tag object. """ # If `shape` is given as int, find corresponding shape tag. if isinstance(shape, Tag): shape_tag = shape else: shapes = [x for x in swf.all_tags_of_type((TagDefineShape, TagDefineSprite)) if x.characterId == shape] if len(shapes): shape_tag = shapes[0] else: raise Exception("Shape %s not found" % shape) from swf.movie import SWF # find a typical use of this shape example_place_objects = [x for x in swf.all_tags_of_type(TagPlaceObject) if x.hasCharacter and x.characterId == shape_tag.characterId] if len(example_place_objects): place_object = example_place_objects[0] characters = swf.build_dictionary() ids_to_export = place_object.get_dependencies() ids_exported = set() tags_to_export = [] # this had better form a dag! while len(ids_to_export): id = ids_to_export.pop() if id in ids_exported or id not in characters: continue tag = characters[id] ids_to_export.update(tag.get_dependencies()) tags_to_export.append(tag) ids_exported.add(id) tags_to_export.reverse() tags_to_export.append(place_object) else: place_object = TagPlaceObject() place_object.hasCharacter = True place_object.characterId = shape_tag.characterId tags_to_export = [ shape_tag, place_object ] stunt_swf = SWF() stunt_swf.tags = tags_to_export return super(SingleShapeSVGExporterMixin, self).export(stunt_swf, **export_opts)
python
def export(self, swf, shape, **export_opts): # If `shape` is given as int, find corresponding shape tag. if isinstance(shape, Tag): shape_tag = shape else: shapes = [x for x in swf.all_tags_of_type((TagDefineShape, TagDefineSprite)) if x.characterId == shape] if len(shapes): shape_tag = shapes[0] else: raise Exception("Shape %s not found" % shape) from swf.movie import SWF # find a typical use of this shape example_place_objects = [x for x in swf.all_tags_of_type(TagPlaceObject) if x.hasCharacter and x.characterId == shape_tag.characterId] if len(example_place_objects): place_object = example_place_objects[0] characters = swf.build_dictionary() ids_to_export = place_object.get_dependencies() ids_exported = set() tags_to_export = [] # this had better form a dag! while len(ids_to_export): id = ids_to_export.pop() if id in ids_exported or id not in characters: continue tag = characters[id] ids_to_export.update(tag.get_dependencies()) tags_to_export.append(tag) ids_exported.add(id) tags_to_export.reverse() tags_to_export.append(place_object) else: place_object = TagPlaceObject() place_object.hasCharacter = True place_object.characterId = shape_tag.characterId tags_to_export = [ shape_tag, place_object ] stunt_swf = SWF() stunt_swf.tags = tags_to_export return super(SingleShapeSVGExporterMixin, self).export(stunt_swf, **export_opts)
[ "def", "export", "(", "self", ",", "swf", ",", "shape", ",", "*", "*", "export_opts", ")", ":", "# If `shape` is given as int, find corresponding shape tag.", "if", "isinstance", "(", "shape", ",", "Tag", ")", ":", "shape_tag", "=", "shape", "else", ":", "shap...
Exports the specified shape of the SWF to SVG. @param swf The SWF. @param shape Which shape to export, either by characterId(int) or as a Tag object.
[ "Exports", "the", "specified", "shape", "of", "the", "SWF", "to", "SVG", "." ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/export.py#L827-L876
22,213
timknip/pyswf
swf/export.py
FrameSVGExporterMixin.export
def export(self, swf, frame, **export_opts): """ Exports a frame of the specified SWF to SVG. @param swf The SWF. @param frame Which frame to export, by 0-based index (int) """ self.wanted_frame = frame return super(FrameSVGExporterMixin, self).export(swf, *export_opts)
python
def export(self, swf, frame, **export_opts): self.wanted_frame = frame return super(FrameSVGExporterMixin, self).export(swf, *export_opts)
[ "def", "export", "(", "self", ",", "swf", ",", "frame", ",", "*", "*", "export_opts", ")", ":", "self", ".", "wanted_frame", "=", "frame", "return", "super", "(", "FrameSVGExporterMixin", ",", "self", ")", ".", "export", "(", "swf", ",", "*", "export_o...
Exports a frame of the specified SWF to SVG. @param swf The SWF. @param frame Which frame to export, by 0-based index (int)
[ "Exports", "a", "frame", "of", "the", "specified", "SWF", "to", "SVG", "." ]
3740cc80d7650156831e728ea0d408819e5671eb
https://github.com/timknip/pyswf/blob/3740cc80d7650156831e728ea0d408819e5671eb/swf/export.py#L879-L886
22,214
scikit-hep/probfit
probfit/plotting.py
_get_args_and_errors
def _get_args_and_errors(self, minuit=None, args=None, errors=None): """ consistent algorithm to get argument and errors 1) get it from minuit if minuit is available 2) if not get it from args and errors 2.1) if args is dict parse it. 3) if all else fail get it from self.last_arg """ ret_arg = None ret_error = None if minuit is not None: # case 1 ret_arg = minuit.args ret_error = minuit.errors return ret_arg, ret_error # no minuit specified use args and errors if args is not None: if isinstance(args, dict): ret_arg = parse_arg(self, args) else: ret_arg = args else: # case 3 ret_arg = self.last_arg if errors is not None: ret_error = errors return ret_arg, ret_error
python
def _get_args_and_errors(self, minuit=None, args=None, errors=None): ret_arg = None ret_error = None if minuit is not None: # case 1 ret_arg = minuit.args ret_error = minuit.errors return ret_arg, ret_error # no minuit specified use args and errors if args is not None: if isinstance(args, dict): ret_arg = parse_arg(self, args) else: ret_arg = args else: # case 3 ret_arg = self.last_arg if errors is not None: ret_error = errors return ret_arg, ret_error
[ "def", "_get_args_and_errors", "(", "self", ",", "minuit", "=", "None", ",", "args", "=", "None", ",", "errors", "=", "None", ")", ":", "ret_arg", "=", "None", "ret_error", "=", "None", "if", "minuit", "is", "not", "None", ":", "# case 1", "ret_arg", "...
consistent algorithm to get argument and errors 1) get it from minuit if minuit is available 2) if not get it from args and errors 2.1) if args is dict parse it. 3) if all else fail get it from self.last_arg
[ "consistent", "algorithm", "to", "get", "argument", "and", "errors", "1", ")", "get", "it", "from", "minuit", "if", "minuit", "is", "available", "2", ")", "if", "not", "get", "it", "from", "args", "and", "errors", "2", ".", "1", ")", "if", "args", "i...
de3593798ea3877dd2785062bed6877dd9058a02
https://github.com/scikit-hep/probfit/blob/de3593798ea3877dd2785062bed6877dd9058a02/probfit/plotting.py#L28-L55
22,215
scikit-hep/probfit
probfit/plotting.py
draw_residual
def draw_residual(x, y, yerr, xerr, show_errbars=True, ax=None, zero_line=True, grid=True, **kwargs): """Draw a residual plot on the axis. By default, if show_errbars if True, residuals are drawn as blue points with errorbars with no endcaps. If show_errbars is False, residuals are drawn as a bar graph with black bars. **Arguments** - **x** array of numbers, x-coordinates - **y** array of numbers, y-coordinates - **yerr** array of numbers, the uncertainty on the y-values - **xerr** array of numbers, the uncertainty on the x-values - **show_errbars** If True, draw the data as a bar plot, else as an errorbar plot - **ax** Optional matplotlib axis instance on which to draw the plot - **zero_line** If True, draw a red line at :math:`y = 0` along the full extent in :math:`x` - **grid** If True, draw gridlines - **kwargs** passed to ``ax.errorbar`` (if ``show_errbars`` is True) or ``ax.bar`` (if ``show_errbars`` if False) **Returns** The matplotlib axis instance the plot was drawn on. """ from matplotlib import pyplot as plt ax = plt.gca() if ax is None else ax if show_errbars: plotopts = dict(fmt='b.', capsize=0) plotopts.update(kwargs) pp = ax.errorbar(x, y, yerr, xerr, zorder=0, **plotopts) else: plotopts = dict(color='k') plotopts.update(kwargs) pp = ax.bar(x - xerr, y, width=2*xerr, **plotopts) if zero_line: ax.plot([x[0] - xerr[0], x[-1] + xerr[-1]], [0, 0], 'r-', zorder=2) # Take the `grid` kwarg to mean 'add a grid if True'; if grid is False and # we called ax.grid(False) then any existing grid on ax would be turned off if grid: ax.grid(grid) return ax
python
def draw_residual(x, y, yerr, xerr, show_errbars=True, ax=None, zero_line=True, grid=True, **kwargs): from matplotlib import pyplot as plt ax = plt.gca() if ax is None else ax if show_errbars: plotopts = dict(fmt='b.', capsize=0) plotopts.update(kwargs) pp = ax.errorbar(x, y, yerr, xerr, zorder=0, **plotopts) else: plotopts = dict(color='k') plotopts.update(kwargs) pp = ax.bar(x - xerr, y, width=2*xerr, **plotopts) if zero_line: ax.plot([x[0] - xerr[0], x[-1] + xerr[-1]], [0, 0], 'r-', zorder=2) # Take the `grid` kwarg to mean 'add a grid if True'; if grid is False and # we called ax.grid(False) then any existing grid on ax would be turned off if grid: ax.grid(grid) return ax
[ "def", "draw_residual", "(", "x", ",", "y", ",", "yerr", ",", "xerr", ",", "show_errbars", "=", "True", ",", "ax", "=", "None", ",", "zero_line", "=", "True", ",", "grid", "=", "True", ",", "*", "*", "kwargs", ")", ":", "from", "matplotlib", "impor...
Draw a residual plot on the axis. By default, if show_errbars if True, residuals are drawn as blue points with errorbars with no endcaps. If show_errbars is False, residuals are drawn as a bar graph with black bars. **Arguments** - **x** array of numbers, x-coordinates - **y** array of numbers, y-coordinates - **yerr** array of numbers, the uncertainty on the y-values - **xerr** array of numbers, the uncertainty on the x-values - **show_errbars** If True, draw the data as a bar plot, else as an errorbar plot - **ax** Optional matplotlib axis instance on which to draw the plot - **zero_line** If True, draw a red line at :math:`y = 0` along the full extent in :math:`x` - **grid** If True, draw gridlines - **kwargs** passed to ``ax.errorbar`` (if ``show_errbars`` is True) or ``ax.bar`` (if ``show_errbars`` if False) **Returns** The matplotlib axis instance the plot was drawn on.
[ "Draw", "a", "residual", "plot", "on", "the", "axis", "." ]
de3593798ea3877dd2785062bed6877dd9058a02
https://github.com/scikit-hep/probfit/blob/de3593798ea3877dd2785062bed6877dd9058a02/probfit/plotting.py#L135-L193
22,216
scikit-hep/probfit
probfit/plotting.py
draw_pdf
def draw_pdf(f, arg, bound, bins=100, scale=1.0, density=True, normed_pdf=False, ax=None, **kwds): """ draw pdf with given argument and bounds. **Arguments** * **f** your pdf. The first argument is assumed to be independent variable * **arg** argument can be tuple or list * **bound** tuple(xmin,xmax) * **bins** number of bins to plot pdf. Default 100. * **scale** multiply pdf by given number. Default 1.0. * **density** plot density instead of expected count in each bin (pdf*bin width). Default True. * **normed_pdf** Normalize pdf in given bound. Default False * The rest of keyword argument will be pass to pyplot.plot **Returns** x, y of what's being plot """ edges = np.linspace(bound[0], bound[1], bins) return draw_pdf_with_edges(f, arg, edges, ax=ax, scale=scale, density=density, normed_pdf=normed_pdf, **kwds)
python
def draw_pdf(f, arg, bound, bins=100, scale=1.0, density=True, normed_pdf=False, ax=None, **kwds): edges = np.linspace(bound[0], bound[1], bins) return draw_pdf_with_edges(f, arg, edges, ax=ax, scale=scale, density=density, normed_pdf=normed_pdf, **kwds)
[ "def", "draw_pdf", "(", "f", ",", "arg", ",", "bound", ",", "bins", "=", "100", ",", "scale", "=", "1.0", ",", "density", "=", "True", ",", "normed_pdf", "=", "False", ",", "ax", "=", "None", ",", "*", "*", "kwds", ")", ":", "edges", "=", "np",...
draw pdf with given argument and bounds. **Arguments** * **f** your pdf. The first argument is assumed to be independent variable * **arg** argument can be tuple or list * **bound** tuple(xmin,xmax) * **bins** number of bins to plot pdf. Default 100. * **scale** multiply pdf by given number. Default 1.0. * **density** plot density instead of expected count in each bin (pdf*bin width). Default True. * **normed_pdf** Normalize pdf in given bound. Default False * The rest of keyword argument will be pass to pyplot.plot **Returns** x, y of what's being plot
[ "draw", "pdf", "with", "given", "argument", "and", "bounds", "." ]
de3593798ea3877dd2785062bed6877dd9058a02
https://github.com/scikit-hep/probfit/blob/de3593798ea3877dd2785062bed6877dd9058a02/probfit/plotting.py#L519-L550
22,217
jmcarp/betfair.py
betfair/utils.py
get_chunks
def get_chunks(sequence, chunk_size): """Split sequence into chunks. :param list sequence: :param int chunk_size: """ return [ sequence[idx:idx + chunk_size] for idx in range(0, len(sequence), chunk_size) ]
python
def get_chunks(sequence, chunk_size): return [ sequence[idx:idx + chunk_size] for idx in range(0, len(sequence), chunk_size) ]
[ "def", "get_chunks", "(", "sequence", ",", "chunk_size", ")", ":", "return", "[", "sequence", "[", "idx", ":", "idx", "+", "chunk_size", "]", "for", "idx", "in", "range", "(", "0", ",", "len", "(", "sequence", ")", ",", "chunk_size", ")", "]" ]
Split sequence into chunks. :param list sequence: :param int chunk_size:
[ "Split", "sequence", "into", "chunks", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L19-L28
22,218
jmcarp/betfair.py
betfair/utils.py
get_kwargs
def get_kwargs(kwargs): """Get all keys and values from dictionary where key is not `self`. :param dict kwargs: Input parameters """ return { key: value for key, value in six.iteritems(kwargs) if key != 'self' }
python
def get_kwargs(kwargs): return { key: value for key, value in six.iteritems(kwargs) if key != 'self' }
[ "def", "get_kwargs", "(", "kwargs", ")", ":", "return", "{", "key", ":", "value", "for", "key", ",", "value", "in", "six", ".", "iteritems", "(", "kwargs", ")", "if", "key", "!=", "'self'", "}" ]
Get all keys and values from dictionary where key is not `self`. :param dict kwargs: Input parameters
[ "Get", "all", "keys", "and", "values", "from", "dictionary", "where", "key", "is", "not", "self", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L31-L39
22,219
jmcarp/betfair.py
betfair/utils.py
check_status_code
def check_status_code(response, codes=None): """Check HTTP status code and raise exception if incorrect. :param Response response: HTTP response :param codes: List of accepted codes or callable :raises: ApiError if code invalid """ codes = codes or [httplib.OK] checker = ( codes if callable(codes) else lambda resp: resp.status_code in codes ) if not checker(response): raise exceptions.ApiError(response, response.json())
python
def check_status_code(response, codes=None): codes = codes or [httplib.OK] checker = ( codes if callable(codes) else lambda resp: resp.status_code in codes ) if not checker(response): raise exceptions.ApiError(response, response.json())
[ "def", "check_status_code", "(", "response", ",", "codes", "=", "None", ")", ":", "codes", "=", "codes", "or", "[", "httplib", ".", "OK", "]", "checker", "=", "(", "codes", "if", "callable", "(", "codes", ")", "else", "lambda", "resp", ":", "resp", "...
Check HTTP status code and raise exception if incorrect. :param Response response: HTTP response :param codes: List of accepted codes or callable :raises: ApiError if code invalid
[ "Check", "HTTP", "status", "code", "and", "raise", "exception", "if", "incorrect", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L42-L56
22,220
jmcarp/betfair.py
betfair/utils.py
result_or_error
def result_or_error(response): """Get `result` field from Betfair response or raise exception if not found. :param Response response: :raises: ApiError if no results passed """ data = response.json() result = data.get('result') if result is not None: return result raise exceptions.ApiError(response, data)
python
def result_or_error(response): data = response.json() result = data.get('result') if result is not None: return result raise exceptions.ApiError(response, data)
[ "def", "result_or_error", "(", "response", ")", ":", "data", "=", "response", ".", "json", "(", ")", "result", "=", "data", ".", "get", "(", "'result'", ")", "if", "result", "is", "not", "None", ":", "return", "result", "raise", "exceptions", ".", "Api...
Get `result` field from Betfair response or raise exception if not found. :param Response response: :raises: ApiError if no results passed
[ "Get", "result", "field", "from", "Betfair", "response", "or", "raise", "exception", "if", "not", "found", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L59-L70
22,221
jmcarp/betfair.py
betfair/utils.py
make_payload
def make_payload(base, method, params): """Build Betfair JSON-RPC payload. :param str base: Betfair base ("Sports" or "Account") :param str method: Betfair endpoint :param dict params: Request parameters """ payload = { 'jsonrpc': '2.0', 'method': '{base}APING/v1.0/{method}'.format(**locals()), 'params': utils.serialize_dict(params), 'id': 1, } return payload
python
def make_payload(base, method, params): payload = { 'jsonrpc': '2.0', 'method': '{base}APING/v1.0/{method}'.format(**locals()), 'params': utils.serialize_dict(params), 'id': 1, } return payload
[ "def", "make_payload", "(", "base", ",", "method", ",", "params", ")", ":", "payload", "=", "{", "'jsonrpc'", ":", "'2.0'", ",", "'method'", ":", "'{base}APING/v1.0/{method}'", ".", "format", "(", "*", "*", "locals", "(", ")", ")", ",", "'params'", ":", ...
Build Betfair JSON-RPC payload. :param str base: Betfair base ("Sports" or "Account") :param str method: Betfair endpoint :param dict params: Request parameters
[ "Build", "Betfair", "JSON", "-", "RPC", "payload", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L100-L113
22,222
jmcarp/betfair.py
betfair/utils.py
requires_login
def requires_login(func, *args, **kwargs): """Decorator to check that the user is logged in. Raises `BetfairError` if instance variable `session_token` is absent. """ self = args[0] if self.session_token: return func(*args, **kwargs) raise exceptions.NotLoggedIn()
python
def requires_login(func, *args, **kwargs): self = args[0] if self.session_token: return func(*args, **kwargs) raise exceptions.NotLoggedIn()
[ "def", "requires_login", "(", "func", ",", "*", "args", ",", "*", "*", "kwargs", ")", ":", "self", "=", "args", "[", "0", "]", "if", "self", ".", "session_token", ":", "return", "func", "(", "*", "args", ",", "*", "*", "kwargs", ")", "raise", "ex...
Decorator to check that the user is logged in. Raises `BetfairError` if instance variable `session_token` is absent.
[ "Decorator", "to", "check", "that", "the", "user", "is", "logged", "in", ".", "Raises", "BetfairError", "if", "instance", "variable", "session_token", "is", "absent", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/utils.py#L117-L124
22,223
jmcarp/betfair.py
betfair/price.py
nearest_price
def nearest_price(price, cutoffs=CUTOFFS): """Returns the nearest Betfair odds value to price. Adapted from Anton Zemlyanov's AlgoTrader project (MIT licensed). https://github.com/AlgoTrader/betfair-sports-api/blob/master/lib/betfair_price.js :param float price: Approximate Betfair price (i.e. decimal odds value) :param tuple cutoffs: Optional tuple of (cutoff, step) pairs :returns: The nearest Befair price :rtype: float """ if price <= MIN_PRICE: return MIN_PRICE if price > MAX_PRICE: return MAX_PRICE price = as_dec(price) for cutoff, step in cutoffs: if price < cutoff: break step = as_dec(step) return float((price * step).quantize(2, ROUND_HALF_UP) / step)
python
def nearest_price(price, cutoffs=CUTOFFS): if price <= MIN_PRICE: return MIN_PRICE if price > MAX_PRICE: return MAX_PRICE price = as_dec(price) for cutoff, step in cutoffs: if price < cutoff: break step = as_dec(step) return float((price * step).quantize(2, ROUND_HALF_UP) / step)
[ "def", "nearest_price", "(", "price", ",", "cutoffs", "=", "CUTOFFS", ")", ":", "if", "price", "<=", "MIN_PRICE", ":", "return", "MIN_PRICE", "if", "price", ">", "MAX_PRICE", ":", "return", "MAX_PRICE", "price", "=", "as_dec", "(", "price", ")", "for", "...
Returns the nearest Betfair odds value to price. Adapted from Anton Zemlyanov's AlgoTrader project (MIT licensed). https://github.com/AlgoTrader/betfair-sports-api/blob/master/lib/betfair_price.js :param float price: Approximate Betfair price (i.e. decimal odds value) :param tuple cutoffs: Optional tuple of (cutoff, step) pairs :returns: The nearest Befair price :rtype: float
[ "Returns", "the", "nearest", "Betfair", "odds", "value", "to", "price", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/price.py#L49-L70
22,224
jmcarp/betfair.py
betfair/betfair.py
Betfair.login
def login(self, username, password): """Log in to Betfair. Sets `session_token` if successful. :param str username: Username :param str password: Password :raises: BetfairLoginError """ response = self.session.post( os.path.join(self.identity_url, 'certlogin'), cert=self.cert_file, data=urllib.urlencode({ 'username': username, 'password': password, }), headers={ 'X-Application': self.app_key, 'Content-Type': 'application/x-www-form-urlencoded', }, timeout=self.timeout, ) utils.check_status_code(response, [httplib.OK]) data = response.json() if data.get('loginStatus') != 'SUCCESS': raise exceptions.LoginError(response, data) self.session_token = data['sessionToken']
python
def login(self, username, password): response = self.session.post( os.path.join(self.identity_url, 'certlogin'), cert=self.cert_file, data=urllib.urlencode({ 'username': username, 'password': password, }), headers={ 'X-Application': self.app_key, 'Content-Type': 'application/x-www-form-urlencoded', }, timeout=self.timeout, ) utils.check_status_code(response, [httplib.OK]) data = response.json() if data.get('loginStatus') != 'SUCCESS': raise exceptions.LoginError(response, data) self.session_token = data['sessionToken']
[ "def", "login", "(", "self", ",", "username", ",", "password", ")", ":", "response", "=", "self", ".", "session", ".", "post", "(", "os", ".", "path", ".", "join", "(", "self", ".", "identity_url", ",", "'certlogin'", ")", ",", "cert", "=", "self", ...
Log in to Betfair. Sets `session_token` if successful. :param str username: Username :param str password: Password :raises: BetfairLoginError
[ "Log", "in", "to", "Betfair", ".", "Sets", "session_token", "if", "successful", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L93-L117
22,225
jmcarp/betfair.py
betfair/betfair.py
Betfair.list_market_profit_and_loss
def list_market_profit_and_loss( self, market_ids, include_settled_bets=False, include_bsp_bets=None, net_of_commission=None): """Retrieve profit and loss for a given list of markets. :param list market_ids: List of markets to calculate profit and loss :param bool include_settled_bets: Option to include settled bets :param bool include_bsp_bets: Option to include BSP bets :param bool net_of_commission: Option to return profit and loss net of users current commission rate for this market including any special tariffs """ return self.make_api_request( 'Sports', 'listMarketProfitAndLoss', utils.get_kwargs(locals()), model=models.MarketProfitAndLoss, )
python
def list_market_profit_and_loss( self, market_ids, include_settled_bets=False, include_bsp_bets=None, net_of_commission=None): return self.make_api_request( 'Sports', 'listMarketProfitAndLoss', utils.get_kwargs(locals()), model=models.MarketProfitAndLoss, )
[ "def", "list_market_profit_and_loss", "(", "self", ",", "market_ids", ",", "include_settled_bets", "=", "False", ",", "include_bsp_bets", "=", "None", ",", "net_of_commission", "=", "None", ")", ":", "return", "self", ".", "make_api_request", "(", "'Sports'", ",",...
Retrieve profit and loss for a given list of markets. :param list market_ids: List of markets to calculate profit and loss :param bool include_settled_bets: Option to include settled bets :param bool include_bsp_bets: Option to include BSP bets :param bool net_of_commission: Option to return profit and loss net of users current commission rate for this market including any special tariffs
[ "Retrieve", "profit", "and", "loss", "for", "a", "given", "list", "of", "markets", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L284-L301
22,226
jmcarp/betfair.py
betfair/betfair.py
Betfair.iter_list_market_book
def iter_list_market_book(self, market_ids, chunk_size, **kwargs): """Split call to `list_market_book` into separate requests. :param list market_ids: List of market IDs :param int chunk_size: Number of records per chunk :param dict kwargs: Arguments passed to `list_market_book` """ return itertools.chain(*( self.list_market_book(market_chunk, **kwargs) for market_chunk in utils.get_chunks(market_ids, chunk_size) ))
python
def iter_list_market_book(self, market_ids, chunk_size, **kwargs): return itertools.chain(*( self.list_market_book(market_chunk, **kwargs) for market_chunk in utils.get_chunks(market_ids, chunk_size) ))
[ "def", "iter_list_market_book", "(", "self", ",", "market_ids", ",", "chunk_size", ",", "*", "*", "kwargs", ")", ":", "return", "itertools", ".", "chain", "(", "*", "(", "self", ".", "list_market_book", "(", "market_chunk", ",", "*", "*", "kwargs", ")", ...
Split call to `list_market_book` into separate requests. :param list market_ids: List of market IDs :param int chunk_size: Number of records per chunk :param dict kwargs: Arguments passed to `list_market_book`
[ "Split", "call", "to", "list_market_book", "into", "separate", "requests", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L305-L315
22,227
jmcarp/betfair.py
betfair/betfair.py
Betfair.iter_list_market_profit_and_loss
def iter_list_market_profit_and_loss( self, market_ids, chunk_size, **kwargs): """Split call to `list_market_profit_and_loss` into separate requests. :param list market_ids: List of market IDs :param int chunk_size: Number of records per chunk :param dict kwargs: Arguments passed to `list_market_profit_and_loss` """ return itertools.chain(*( self.list_market_profit_and_loss(market_chunk, **kwargs) for market_chunk in utils.get_chunks(market_ids, chunk_size) ))
python
def iter_list_market_profit_and_loss( self, market_ids, chunk_size, **kwargs): return itertools.chain(*( self.list_market_profit_and_loss(market_chunk, **kwargs) for market_chunk in utils.get_chunks(market_ids, chunk_size) ))
[ "def", "iter_list_market_profit_and_loss", "(", "self", ",", "market_ids", ",", "chunk_size", ",", "*", "*", "kwargs", ")", ":", "return", "itertools", ".", "chain", "(", "*", "(", "self", ".", "list_market_profit_and_loss", "(", "market_chunk", ",", "*", "*",...
Split call to `list_market_profit_and_loss` into separate requests. :param list market_ids: List of market IDs :param int chunk_size: Number of records per chunk :param dict kwargs: Arguments passed to `list_market_profit_and_loss`
[ "Split", "call", "to", "list_market_profit_and_loss", "into", "separate", "requests", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L317-L328
22,228
jmcarp/betfair.py
betfair/betfair.py
Betfair.place_orders
def place_orders(self, market_id, instructions, customer_ref=None): """Place new orders into market. This operation is atomic in that all orders will be placed or none will be placed. :param str market_id: The market id these orders are to be placed on :param list instructions: List of `PlaceInstruction` objects :param str customer_ref: Optional order identifier string """ return self.make_api_request( 'Sports', 'placeOrders', utils.get_kwargs(locals()), model=models.PlaceExecutionReport, )
python
def place_orders(self, market_id, instructions, customer_ref=None): return self.make_api_request( 'Sports', 'placeOrders', utils.get_kwargs(locals()), model=models.PlaceExecutionReport, )
[ "def", "place_orders", "(", "self", ",", "market_id", ",", "instructions", ",", "customer_ref", "=", "None", ")", ":", "return", "self", ".", "make_api_request", "(", "'Sports'", ",", "'placeOrders'", ",", "utils", ".", "get_kwargs", "(", "locals", "(", ")",...
Place new orders into market. This operation is atomic in that all orders will be placed or none will be placed. :param str market_id: The market id these orders are to be placed on :param list instructions: List of `PlaceInstruction` objects :param str customer_ref: Optional order identifier string
[ "Place", "new", "orders", "into", "market", ".", "This", "operation", "is", "atomic", "in", "that", "all", "orders", "will", "be", "placed", "or", "none", "will", "be", "placed", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L384-L397
22,229
jmcarp/betfair.py
betfair/betfair.py
Betfair.update_orders
def update_orders(self, market_id, instructions, customer_ref=None): """Update non-exposure changing fields. :param str market_id: The market id these orders are to be placed on :param list instructions: List of `UpdateInstruction` objects :param str customer_ref: Optional order identifier string """ return self.make_api_request( 'Sports', 'updateOrders', utils.get_kwargs(locals()), model=models.UpdateExecutionReport, )
python
def update_orders(self, market_id, instructions, customer_ref=None): return self.make_api_request( 'Sports', 'updateOrders', utils.get_kwargs(locals()), model=models.UpdateExecutionReport, )
[ "def", "update_orders", "(", "self", ",", "market_id", ",", "instructions", ",", "customer_ref", "=", "None", ")", ":", "return", "self", ".", "make_api_request", "(", "'Sports'", ",", "'updateOrders'", ",", "utils", ".", "get_kwargs", "(", "locals", "(", ")...
Update non-exposure changing fields. :param str market_id: The market id these orders are to be placed on :param list instructions: List of `UpdateInstruction` objects :param str customer_ref: Optional order identifier string
[ "Update", "non", "-", "exposure", "changing", "fields", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L432-L444
22,230
jmcarp/betfair.py
betfair/betfair.py
Betfair.transfer_funds
def transfer_funds(self, from_, to, amount): """Transfer funds between the UK Exchange and Australian Exchange wallets. :param Wallet from_: Source wallet :param Wallet to: Destination wallet :param float amount: Amount to transfer """ return self.make_api_request( 'Account', 'transferFunds', utils.get_kwargs(locals()), model=models.TransferResponse, )
python
def transfer_funds(self, from_, to, amount): return self.make_api_request( 'Account', 'transferFunds', utils.get_kwargs(locals()), model=models.TransferResponse, )
[ "def", "transfer_funds", "(", "self", ",", "from_", ",", "to", ",", "amount", ")", ":", "return", "self", ".", "make_api_request", "(", "'Account'", ",", "'transferFunds'", ",", "utils", ".", "get_kwargs", "(", "locals", "(", ")", ")", ",", "model", "=",...
Transfer funds between the UK Exchange and Australian Exchange wallets. :param Wallet from_: Source wallet :param Wallet to: Destination wallet :param float amount: Amount to transfer
[ "Transfer", "funds", "between", "the", "UK", "Exchange", "and", "Australian", "Exchange", "wallets", "." ]
116df2fdc512575d1b4c4f1749d4a5bf98e519ff
https://github.com/jmcarp/betfair.py/blob/116df2fdc512575d1b4c4f1749d4a5bf98e519ff/betfair/betfair.py#L505-L517
22,231
edmondburnett/twitter-text-python
ttp/ttp.py
Parser.parse
def parse(self, text, html=True): '''Parse the text and return a ParseResult instance.''' self._urls = [] self._users = [] self._lists = [] self._tags = [] reply = REPLY_REGEX.match(text) reply = reply.groups(0)[0] if reply is not None else None parsed_html = self._html(text) if html else self._text(text) return ParseResult(self._urls, self._users, reply, self._lists, self._tags, parsed_html)
python
def parse(self, text, html=True): '''Parse the text and return a ParseResult instance.''' self._urls = [] self._users = [] self._lists = [] self._tags = [] reply = REPLY_REGEX.match(text) reply = reply.groups(0)[0] if reply is not None else None parsed_html = self._html(text) if html else self._text(text) return ParseResult(self._urls, self._users, reply, self._lists, self._tags, parsed_html)
[ "def", "parse", "(", "self", ",", "text", ",", "html", "=", "True", ")", ":", "self", ".", "_urls", "=", "[", "]", "self", ".", "_users", "=", "[", "]", "self", ".", "_lists", "=", "[", "]", "self", ".", "_tags", "=", "[", "]", "reply", "=", ...
Parse the text and return a ParseResult instance.
[ "Parse", "the", "text", "and", "return", "a", "ParseResult", "instance", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L125-L137
22,232
edmondburnett/twitter-text-python
ttp/ttp.py
Parser._text
def _text(self, text): '''Parse a Tweet without generating HTML.''' URL_REGEX.sub(self._parse_urls, text) USERNAME_REGEX.sub(self._parse_users, text) LIST_REGEX.sub(self._parse_lists, text) HASHTAG_REGEX.sub(self._parse_tags, text) return None
python
def _text(self, text): '''Parse a Tweet without generating HTML.''' URL_REGEX.sub(self._parse_urls, text) USERNAME_REGEX.sub(self._parse_users, text) LIST_REGEX.sub(self._parse_lists, text) HASHTAG_REGEX.sub(self._parse_tags, text) return None
[ "def", "_text", "(", "self", ",", "text", ")", ":", "URL_REGEX", ".", "sub", "(", "self", ".", "_parse_urls", ",", "text", ")", "USERNAME_REGEX", ".", "sub", "(", "self", ".", "_parse_users", ",", "text", ")", "LIST_REGEX", ".", "sub", "(", "self", "...
Parse a Tweet without generating HTML.
[ "Parse", "a", "Tweet", "without", "generating", "HTML", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L139-L145
22,233
edmondburnett/twitter-text-python
ttp/ttp.py
Parser._html
def _html(self, text): '''Parse a Tweet and generate HTML.''' html = URL_REGEX.sub(self._parse_urls, text) html = USERNAME_REGEX.sub(self._parse_users, html) html = LIST_REGEX.sub(self._parse_lists, html) return HASHTAG_REGEX.sub(self._parse_tags, html)
python
def _html(self, text): '''Parse a Tweet and generate HTML.''' html = URL_REGEX.sub(self._parse_urls, text) html = USERNAME_REGEX.sub(self._parse_users, html) html = LIST_REGEX.sub(self._parse_lists, html) return HASHTAG_REGEX.sub(self._parse_tags, html)
[ "def", "_html", "(", "self", ",", "text", ")", ":", "html", "=", "URL_REGEX", ".", "sub", "(", "self", ".", "_parse_urls", ",", "text", ")", "html", "=", "USERNAME_REGEX", ".", "sub", "(", "self", ".", "_parse_users", ",", "html", ")", "html", "=", ...
Parse a Tweet and generate HTML.
[ "Parse", "a", "Tweet", "and", "generate", "HTML", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L147-L152
22,234
edmondburnett/twitter-text-python
ttp/ttp.py
Parser._parse_urls
def _parse_urls(self, match): '''Parse URLs.''' mat = match.group(0) # Fix a bug in the regex concerning www...com and www.-foo.com domains # TODO fix this in the regex instead of working around it here domain = match.group(5) if domain[0] in '.-': return mat # Only allow IANA one letter domains that are actually registered if len(domain) == 5 \ and domain[-4:].lower() in ('.com', '.org', '.net') \ and not domain.lower() in IANA_ONE_LETTER_DOMAINS: return mat # Check for urls without http(s) pos = mat.find('http') if pos != -1: pre, url = mat[:pos], mat[pos:] full_url = url # Find the www and force https:// else: pos = mat.lower().find('www') pre, url = mat[:pos], mat[pos:] full_url = 'https://%s' % url if self._include_spans: span = match.span(0) # add an offset if pre is e.g. ' ' span = (span[0] + len(pre), span[1]) self._urls.append((url, span)) else: self._urls.append(url) if self._html: return '%s%s' % (pre, self.format_url(full_url, self._shorten_url(escape(url))))
python
def _parse_urls(self, match): '''Parse URLs.''' mat = match.group(0) # Fix a bug in the regex concerning www...com and www.-foo.com domains # TODO fix this in the regex instead of working around it here domain = match.group(5) if domain[0] in '.-': return mat # Only allow IANA one letter domains that are actually registered if len(domain) == 5 \ and domain[-4:].lower() in ('.com', '.org', '.net') \ and not domain.lower() in IANA_ONE_LETTER_DOMAINS: return mat # Check for urls without http(s) pos = mat.find('http') if pos != -1: pre, url = mat[:pos], mat[pos:] full_url = url # Find the www and force https:// else: pos = mat.lower().find('www') pre, url = mat[:pos], mat[pos:] full_url = 'https://%s' % url if self._include_spans: span = match.span(0) # add an offset if pre is e.g. ' ' span = (span[0] + len(pre), span[1]) self._urls.append((url, span)) else: self._urls.append(url) if self._html: return '%s%s' % (pre, self.format_url(full_url, self._shorten_url(escape(url))))
[ "def", "_parse_urls", "(", "self", ",", "match", ")", ":", "mat", "=", "match", ".", "group", "(", "0", ")", "# Fix a bug in the regex concerning www...com and www.-foo.com domains", "# TODO fix this in the regex instead of working around it here", "domain", "=", "match", "...
Parse URLs.
[ "Parse", "URLs", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L155-L195
22,235
edmondburnett/twitter-text-python
ttp/ttp.py
Parser._parse_users
def _parse_users(self, match): '''Parse usernames.''' # Don't parse lists here if match.group(2) is not None: return match.group(0) mat = match.group(0) if self._include_spans: self._users.append((mat[1:], match.span(0))) else: self._users.append(mat[1:]) if self._html: return self.format_username(mat[0:1], mat[1:])
python
def _parse_users(self, match): '''Parse usernames.''' # Don't parse lists here if match.group(2) is not None: return match.group(0) mat = match.group(0) if self._include_spans: self._users.append((mat[1:], match.span(0))) else: self._users.append(mat[1:]) if self._html: return self.format_username(mat[0:1], mat[1:])
[ "def", "_parse_users", "(", "self", ",", "match", ")", ":", "# Don't parse lists here", "if", "match", ".", "group", "(", "2", ")", "is", "not", "None", ":", "return", "match", ".", "group", "(", "0", ")", "mat", "=", "match", ".", "group", "(", "0",...
Parse usernames.
[ "Parse", "usernames", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L197-L211
22,236
edmondburnett/twitter-text-python
ttp/ttp.py
Parser._parse_lists
def _parse_lists(self, match): '''Parse lists.''' # Don't parse usernames here if match.group(4) is None: return match.group(0) pre, at_char, user, list_name = match.groups() list_name = list_name[1:] if self._include_spans: self._lists.append((user, list_name, match.span(0))) else: self._lists.append((user, list_name)) if self._html: return '%s%s' % (pre, self.format_list(at_char, user, list_name))
python
def _parse_lists(self, match): '''Parse lists.''' # Don't parse usernames here if match.group(4) is None: return match.group(0) pre, at_char, user, list_name = match.groups() list_name = list_name[1:] if self._include_spans: self._lists.append((user, list_name, match.span(0))) else: self._lists.append((user, list_name)) if self._html: return '%s%s' % (pre, self.format_list(at_char, user, list_name))
[ "def", "_parse_lists", "(", "self", ",", "match", ")", ":", "# Don't parse usernames here", "if", "match", ".", "group", "(", "4", ")", "is", "None", ":", "return", "match", ".", "group", "(", "0", ")", "pre", ",", "at_char", ",", "user", ",", "list_na...
Parse lists.
[ "Parse", "lists", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L213-L228
22,237
edmondburnett/twitter-text-python
ttp/ttp.py
Parser._parse_tags
def _parse_tags(self, match): '''Parse hashtags.''' mat = match.group(0) # Fix problems with the regex capturing stuff infront of the # tag = None for i in '#\uff03': pos = mat.rfind(i) if pos != -1: tag = i break pre, text = mat[:pos], mat[pos + 1:] if self._include_spans: span = match.span(0) # add an offset if pre is e.g. ' ' span = (span[0] + len(pre), span[1]) self._tags.append((text, span)) else: self._tags.append(text) if self._html: return '%s%s' % (pre, self.format_tag(tag, text))
python
def _parse_tags(self, match): '''Parse hashtags.''' mat = match.group(0) # Fix problems with the regex capturing stuff infront of the # tag = None for i in '#\uff03': pos = mat.rfind(i) if pos != -1: tag = i break pre, text = mat[:pos], mat[pos + 1:] if self._include_spans: span = match.span(0) # add an offset if pre is e.g. ' ' span = (span[0] + len(pre), span[1]) self._tags.append((text, span)) else: self._tags.append(text) if self._html: return '%s%s' % (pre, self.format_tag(tag, text))
[ "def", "_parse_tags", "(", "self", ",", "match", ")", ":", "mat", "=", "match", ".", "group", "(", "0", ")", "# Fix problems with the regex capturing stuff infront of the #", "tag", "=", "None", "for", "i", "in", "'#\\uff03'", ":", "pos", "=", "mat", ".", "r...
Parse hashtags.
[ "Parse", "hashtags", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L230-L253
22,238
edmondburnett/twitter-text-python
ttp/ttp.py
Parser._shorten_url
def _shorten_url(self, text): '''Shorten a URL and make sure to not cut of html entities.''' if len(text) > self._max_url_length and self._max_url_length != -1: text = text[0:self._max_url_length - 3] amp = text.rfind('&') close = text.rfind(';') if amp != -1 and (close == -1 or close < amp): text = text[0:amp] return text + '...' else: return text
python
def _shorten_url(self, text): '''Shorten a URL and make sure to not cut of html entities.''' if len(text) > self._max_url_length and self._max_url_length != -1: text = text[0:self._max_url_length - 3] amp = text.rfind('&') close = text.rfind(';') if amp != -1 and (close == -1 or close < amp): text = text[0:amp] return text + '...' else: return text
[ "def", "_shorten_url", "(", "self", ",", "text", ")", ":", "if", "len", "(", "text", ")", ">", "self", ".", "_max_url_length", "and", "self", ".", "_max_url_length", "!=", "-", "1", ":", "text", "=", "text", "[", "0", ":", "self", ".", "_max_url_leng...
Shorten a URL and make sure to not cut of html entities.
[ "Shorten", "a", "URL", "and", "make", "sure", "to", "not", "cut", "of", "html", "entities", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L255-L268
22,239
edmondburnett/twitter-text-python
ttp/ttp.py
Parser.format_list
def format_list(self, at_char, user, list_name): '''Return formatted HTML for a list.''' return '<a href="https://twitter.com/%s/lists/%s">%s%s/%s</a>' \ % (user, list_name, at_char, user, list_name)
python
def format_list(self, at_char, user, list_name): '''Return formatted HTML for a list.''' return '<a href="https://twitter.com/%s/lists/%s">%s%s/%s</a>' \ % (user, list_name, at_char, user, list_name)
[ "def", "format_list", "(", "self", ",", "at_char", ",", "user", ",", "list_name", ")", ":", "return", "'<a href=\"https://twitter.com/%s/lists/%s\">%s%s/%s</a>'", "%", "(", "user", ",", "list_name", ",", "at_char", ",", "user", ",", "list_name", ")" ]
Return formatted HTML for a list.
[ "Return", "formatted", "HTML", "for", "a", "list", "." ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/ttp.py#L281-L284
22,240
edmondburnett/twitter-text-python
ttp/utils.py
follow_shortlinks
def follow_shortlinks(shortlinks): """Follow redirects in list of shortlinks, return dict of resulting URLs""" links_followed = {} for shortlink in shortlinks: url = shortlink request_result = requests.get(url) redirect_history = request_result.history # history might look like: # (<Response [301]>, <Response [301]>) # where each response object has a URL all_urls = [] for redirect in redirect_history: all_urls.append(redirect.url) # append the final URL that we finish with all_urls.append(request_result.url) links_followed[shortlink] = all_urls return links_followed
python
def follow_shortlinks(shortlinks): links_followed = {} for shortlink in shortlinks: url = shortlink request_result = requests.get(url) redirect_history = request_result.history # history might look like: # (<Response [301]>, <Response [301]>) # where each response object has a URL all_urls = [] for redirect in redirect_history: all_urls.append(redirect.url) # append the final URL that we finish with all_urls.append(request_result.url) links_followed[shortlink] = all_urls return links_followed
[ "def", "follow_shortlinks", "(", "shortlinks", ")", ":", "links_followed", "=", "{", "}", "for", "shortlink", "in", "shortlinks", ":", "url", "=", "shortlink", "request_result", "=", "requests", ".", "get", "(", "url", ")", "redirect_history", "=", "request_re...
Follow redirects in list of shortlinks, return dict of resulting URLs
[ "Follow", "redirects", "in", "list", "of", "shortlinks", "return", "dict", "of", "resulting", "URLs" ]
2a23ced35bfd34c4bc4b7148afd85771e9eb8669
https://github.com/edmondburnett/twitter-text-python/blob/2a23ced35bfd34c4bc4b7148afd85771e9eb8669/ttp/utils.py#L8-L24
22,241
cloudendpoints/endpoints-python
endpoints/resource_container.py
_GetFieldAttributes
def _GetFieldAttributes(field): """Decomposes field into the needed arguments to pass to the constructor. This can be used to create copies of the field or to compare if two fields are "equal" (since __eq__ is not implemented on messages.Field). Args: field: A ProtoRPC message field (potentially to be copied). Raises: TypeError: If the field is not an instance of messages.Field. Returns: A pair of relevant arguments to be passed to the constructor for the field type. The first element is a list of positional arguments for the constructor and the second is a dictionary of keyword arguments. """ if not isinstance(field, messages.Field): raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,)) positional_args = [] kwargs = { 'required': field.required, 'repeated': field.repeated, 'variant': field.variant, 'default': field._Field__default, # pylint: disable=protected-access } if isinstance(field, messages.MessageField): # Message fields can't have a default kwargs.pop('default') if not isinstance(field, message_types.DateTimeField): positional_args.insert(0, field.message_type) elif isinstance(field, messages.EnumField): positional_args.insert(0, field.type) return positional_args, kwargs
python
def _GetFieldAttributes(field): if not isinstance(field, messages.Field): raise TypeError('Field %r to be copied not a ProtoRPC field.' % (field,)) positional_args = [] kwargs = { 'required': field.required, 'repeated': field.repeated, 'variant': field.variant, 'default': field._Field__default, # pylint: disable=protected-access } if isinstance(field, messages.MessageField): # Message fields can't have a default kwargs.pop('default') if not isinstance(field, message_types.DateTimeField): positional_args.insert(0, field.message_type) elif isinstance(field, messages.EnumField): positional_args.insert(0, field.type) return positional_args, kwargs
[ "def", "_GetFieldAttributes", "(", "field", ")", ":", "if", "not", "isinstance", "(", "field", ",", "messages", ".", "Field", ")", ":", "raise", "TypeError", "(", "'Field %r to be copied not a ProtoRPC field.'", "%", "(", "field", ",", ")", ")", "positional_args...
Decomposes field into the needed arguments to pass to the constructor. This can be used to create copies of the field or to compare if two fields are "equal" (since __eq__ is not implemented on messages.Field). Args: field: A ProtoRPC message field (potentially to be copied). Raises: TypeError: If the field is not an instance of messages.Field. Returns: A pair of relevant arguments to be passed to the constructor for the field type. The first element is a list of positional arguments for the constructor and the second is a dictionary of keyword arguments.
[ "Decomposes", "field", "into", "the", "needed", "arguments", "to", "pass", "to", "the", "constructor", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/resource_container.py#L142-L178
22,242
cloudendpoints/endpoints-python
endpoints/resource_container.py
_CompareFields
def _CompareFields(field, other_field): """Checks if two ProtoRPC fields are "equal". Compares the arguments, rather than the id of the elements (which is the default __eq__ behavior) as well as the class of the fields. Args: field: A ProtoRPC message field to be compared. other_field: A ProtoRPC message field to be compared. Returns: Boolean indicating whether the fields are equal. """ field_attrs = _GetFieldAttributes(field) other_field_attrs = _GetFieldAttributes(other_field) if field_attrs != other_field_attrs: return False return field.__class__ == other_field.__class__
python
def _CompareFields(field, other_field): field_attrs = _GetFieldAttributes(field) other_field_attrs = _GetFieldAttributes(other_field) if field_attrs != other_field_attrs: return False return field.__class__ == other_field.__class__
[ "def", "_CompareFields", "(", "field", ",", "other_field", ")", ":", "field_attrs", "=", "_GetFieldAttributes", "(", "field", ")", "other_field_attrs", "=", "_GetFieldAttributes", "(", "other_field", ")", "if", "field_attrs", "!=", "other_field_attrs", ":", "return"...
Checks if two ProtoRPC fields are "equal". Compares the arguments, rather than the id of the elements (which is the default __eq__ behavior) as well as the class of the fields. Args: field: A ProtoRPC message field to be compared. other_field: A ProtoRPC message field to be compared. Returns: Boolean indicating whether the fields are equal.
[ "Checks", "if", "two", "ProtoRPC", "fields", "are", "equal", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/resource_container.py#L181-L198
22,243
cloudendpoints/endpoints-python
endpoints/resource_container.py
ResourceContainer.combined_message_class
def combined_message_class(self): """A ProtoRPC message class with both request and parameters fields. Caches the result in a local private variable. Uses _CopyField to create copies of the fields from the existing request and parameters classes since those fields are "owned" by the message classes. Raises: TypeError: If a field name is used in both the request message and the parameters but the two fields do not represent the same type. Returns: Value of combined message class for this property. """ if self.__combined_message_class is not None: return self.__combined_message_class fields = {} # We don't need to preserve field.number since this combined class is only # used for the protorpc remote.method and is not needed for the API config. # The only place field.number matters is in parameterOrder, but this is set # based on container.parameters_message_class which will use the field # numbers originally passed in. # Counter for fields. field_number = 1 for field in self.body_message_class.all_fields(): fields[field.name] = _CopyField(field, number=field_number) field_number += 1 for field in self.parameters_message_class.all_fields(): if field.name in fields: if not _CompareFields(field, fields[field.name]): raise TypeError('Field %r contained in both parameters and request ' 'body, but the fields differ.' % (field.name,)) else: # Skip a field that's already there. continue fields[field.name] = _CopyField(field, number=field_number) field_number += 1 self.__combined_message_class = type('CombinedContainer', (messages.Message,), fields) return self.__combined_message_class
python
def combined_message_class(self): if self.__combined_message_class is not None: return self.__combined_message_class fields = {} # We don't need to preserve field.number since this combined class is only # used for the protorpc remote.method and is not needed for the API config. # The only place field.number matters is in parameterOrder, but this is set # based on container.parameters_message_class which will use the field # numbers originally passed in. # Counter for fields. field_number = 1 for field in self.body_message_class.all_fields(): fields[field.name] = _CopyField(field, number=field_number) field_number += 1 for field in self.parameters_message_class.all_fields(): if field.name in fields: if not _CompareFields(field, fields[field.name]): raise TypeError('Field %r contained in both parameters and request ' 'body, but the fields differ.' % (field.name,)) else: # Skip a field that's already there. continue fields[field.name] = _CopyField(field, number=field_number) field_number += 1 self.__combined_message_class = type('CombinedContainer', (messages.Message,), fields) return self.__combined_message_class
[ "def", "combined_message_class", "(", "self", ")", ":", "if", "self", ".", "__combined_message_class", "is", "not", "None", ":", "return", "self", ".", "__combined_message_class", "fields", "=", "{", "}", "# We don't need to preserve field.number since this combined class...
A ProtoRPC message class with both request and parameters fields. Caches the result in a local private variable. Uses _CopyField to create copies of the fields from the existing request and parameters classes since those fields are "owned" by the message classes. Raises: TypeError: If a field name is used in both the request message and the parameters but the two fields do not represent the same type. Returns: Value of combined message class for this property.
[ "A", "ProtoRPC", "message", "class", "with", "both", "request", "and", "parameters", "fields", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/resource_container.py#L58-L100
22,244
cloudendpoints/endpoints-python
endpoints/resource_container.py
ResourceContainer.add_to_cache
def add_to_cache(cls, remote_info, container): # pylint: disable=g-bad-name """Adds a ResourceContainer to a cache tying it to a protorpc method. Args: remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding to a method. container: An instance of ResourceContainer. Raises: TypeError: if the container is not an instance of cls. KeyError: if the remote method has been reference by a container before. This created remote method should never occur because a remote method is created once. """ if not isinstance(container, cls): raise TypeError('%r not an instance of %r, could not be added to cache.' % (container, cls)) if remote_info in cls.__remote_info_cache: raise KeyError('Cache has collision but should not.') cls.__remote_info_cache[remote_info] = container
python
def add_to_cache(cls, remote_info, container): # pylint: disable=g-bad-name if not isinstance(container, cls): raise TypeError('%r not an instance of %r, could not be added to cache.' % (container, cls)) if remote_info in cls.__remote_info_cache: raise KeyError('Cache has collision but should not.') cls.__remote_info_cache[remote_info] = container
[ "def", "add_to_cache", "(", "cls", ",", "remote_info", ",", "container", ")", ":", "# pylint: disable=g-bad-name", "if", "not", "isinstance", "(", "container", ",", "cls", ")", ":", "raise", "TypeError", "(", "'%r not an instance of %r, could not be added to cache.'", ...
Adds a ResourceContainer to a cache tying it to a protorpc method. Args: remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding to a method. container: An instance of ResourceContainer. Raises: TypeError: if the container is not an instance of cls. KeyError: if the remote method has been reference by a container before. This created remote method should never occur because a remote method is created once.
[ "Adds", "a", "ResourceContainer", "to", "a", "cache", "tying", "it", "to", "a", "protorpc", "method", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/resource_container.py#L103-L122
22,245
cloudendpoints/endpoints-python
endpoints/resource_container.py
ResourceContainer.get_request_message
def get_request_message(cls, remote_info): # pylint: disable=g-bad-name """Gets request message or container from remote info. Args: remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding to a method. Returns: Either an instance of the request type from the remote or the ResourceContainer that was cached with the remote method. """ if remote_info in cls.__remote_info_cache: return cls.__remote_info_cache[remote_info] else: return remote_info.request_type()
python
def get_request_message(cls, remote_info): # pylint: disable=g-bad-name if remote_info in cls.__remote_info_cache: return cls.__remote_info_cache[remote_info] else: return remote_info.request_type()
[ "def", "get_request_message", "(", "cls", ",", "remote_info", ")", ":", "# pylint: disable=g-bad-name", "if", "remote_info", "in", "cls", ".", "__remote_info_cache", ":", "return", "cls", ".", "__remote_info_cache", "[", "remote_info", "]", "else", ":", "return", ...
Gets request message or container from remote info. Args: remote_info: Instance of protorpc.remote._RemoteMethodInfo corresponding to a method. Returns: Either an instance of the request type from the remote or the ResourceContainer that was cached with the remote method.
[ "Gets", "request", "message", "or", "container", "from", "remote", "info", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/resource_container.py#L125-L139
22,246
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_is_auth_info_available
def _is_auth_info_available(): """Check if user auth info has been set in environment variables.""" return (_ENDPOINTS_USER_INFO in os.environ or (_ENV_AUTH_EMAIL in os.environ and _ENV_AUTH_DOMAIN in os.environ) or _ENV_USE_OAUTH_SCOPE in os.environ)
python
def _is_auth_info_available(): return (_ENDPOINTS_USER_INFO in os.environ or (_ENV_AUTH_EMAIL in os.environ and _ENV_AUTH_DOMAIN in os.environ) or _ENV_USE_OAUTH_SCOPE in os.environ)
[ "def", "_is_auth_info_available", "(", ")", ":", "return", "(", "_ENDPOINTS_USER_INFO", "in", "os", ".", "environ", "or", "(", "_ENV_AUTH_EMAIL", "in", "os", ".", "environ", "and", "_ENV_AUTH_DOMAIN", "in", "os", ".", "environ", ")", "or", "_ENV_USE_OAUTH_SCOPE"...
Check if user auth info has been set in environment variables.
[ "Check", "if", "user", "auth", "info", "has", "been", "set", "in", "environment", "variables", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L151-L155
22,247
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_get_token
def _get_token( request=None, allowed_auth_schemes=('OAuth', 'Bearer'), allowed_query_keys=('bearer_token', 'access_token')): """Get the auth token for this request. Auth token may be specified in either the Authorization header or as a query param (either access_token or bearer_token). We'll check in this order: 1. Authorization header. 2. bearer_token query param. 3. access_token query param. Args: request: The current request, or None. Returns: The token in the request or None. """ allowed_auth_schemes = _listlike_guard( allowed_auth_schemes, 'allowed_auth_schemes', iterable_only=True) # Check if the token is in the Authorization header. auth_header = os.environ.get('HTTP_AUTHORIZATION') if auth_header: for auth_scheme in allowed_auth_schemes: if auth_header.startswith(auth_scheme): return auth_header[len(auth_scheme) + 1:] # If an auth header was specified, even if it's an invalid one, we won't # look for the token anywhere else. return None # Check if the token is in the query string. if request: allowed_query_keys = _listlike_guard( allowed_query_keys, 'allowed_query_keys', iterable_only=True) for key in allowed_query_keys: token, _ = request.get_unrecognized_field_info(key) if token: return token
python
def _get_token( request=None, allowed_auth_schemes=('OAuth', 'Bearer'), allowed_query_keys=('bearer_token', 'access_token')): allowed_auth_schemes = _listlike_guard( allowed_auth_schemes, 'allowed_auth_schemes', iterable_only=True) # Check if the token is in the Authorization header. auth_header = os.environ.get('HTTP_AUTHORIZATION') if auth_header: for auth_scheme in allowed_auth_schemes: if auth_header.startswith(auth_scheme): return auth_header[len(auth_scheme) + 1:] # If an auth header was specified, even if it's an invalid one, we won't # look for the token anywhere else. return None # Check if the token is in the query string. if request: allowed_query_keys = _listlike_guard( allowed_query_keys, 'allowed_query_keys', iterable_only=True) for key in allowed_query_keys: token, _ = request.get_unrecognized_field_info(key) if token: return token
[ "def", "_get_token", "(", "request", "=", "None", ",", "allowed_auth_schemes", "=", "(", "'OAuth'", ",", "'Bearer'", ")", ",", "allowed_query_keys", "=", "(", "'bearer_token'", ",", "'access_token'", ")", ")", ":", "allowed_auth_schemes", "=", "_listlike_guard", ...
Get the auth token for this request. Auth token may be specified in either the Authorization header or as a query param (either access_token or bearer_token). We'll check in this order: 1. Authorization header. 2. bearer_token query param. 3. access_token query param. Args: request: The current request, or None. Returns: The token in the request or None.
[ "Get", "the", "auth", "token", "for", "this", "request", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L248-L285
22,248
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_get_id_token_user
def _get_id_token_user(token, issuers, audiences, allowed_client_ids, time_now, cache): """Get a User for the given id token, if the token is valid. Args: token: The id_token to check. issuers: dict of Issuers audiences: List of audiences that are acceptable. allowed_client_ids: List of client IDs that are acceptable. time_now: The current time as a long (eg. long(time.time())). cache: Cache to use (eg. the memcache module). Returns: A User if the token is valid, None otherwise. """ # Verify that the token is valid before we try to extract anything from it. # This verifies the signature and some of the basic info in the token. for issuer_key, issuer in issuers.items(): issuer_cert_uri = convert_jwks_uri(issuer.jwks_uri) try: parsed_token = _verify_signed_jwt_with_certs( token, time_now, cache, cert_uri=issuer_cert_uri) except Exception: # pylint: disable=broad-except _logger.debug( 'id_token verification failed for issuer %s', issuer_key, exc_info=True) continue issuer_values = _listlike_guard(issuer.issuer, 'issuer', log_warning=False) if isinstance(audiences, _Mapping): audiences = audiences[issuer_key] if _verify_parsed_token( parsed_token, issuer_values, audiences, allowed_client_ids, # There's some special handling we do for Google issuers. # ESP doesn't do this, and it's both unnecessary and invalid for other issuers. # So we'll turn it off except in the Google issuer case. is_legacy_google_auth=(issuer.issuer == _ISSUERS)): email = parsed_token['email'] # The token might have an id, but it's a Gaia ID that's been # obfuscated with the Focus key, rather than the AppEngine (igoogle) # key. If the developer ever put this email into the user DB # and retrieved the ID from that, it'd be different from the ID we'd # return here, so it's safer to not return the ID. # Instead, we'll only return the email. return users.User(email)
python
def _get_id_token_user(token, issuers, audiences, allowed_client_ids, time_now, cache): # Verify that the token is valid before we try to extract anything from it. # This verifies the signature and some of the basic info in the token. for issuer_key, issuer in issuers.items(): issuer_cert_uri = convert_jwks_uri(issuer.jwks_uri) try: parsed_token = _verify_signed_jwt_with_certs( token, time_now, cache, cert_uri=issuer_cert_uri) except Exception: # pylint: disable=broad-except _logger.debug( 'id_token verification failed for issuer %s', issuer_key, exc_info=True) continue issuer_values = _listlike_guard(issuer.issuer, 'issuer', log_warning=False) if isinstance(audiences, _Mapping): audiences = audiences[issuer_key] if _verify_parsed_token( parsed_token, issuer_values, audiences, allowed_client_ids, # There's some special handling we do for Google issuers. # ESP doesn't do this, and it's both unnecessary and invalid for other issuers. # So we'll turn it off except in the Google issuer case. is_legacy_google_auth=(issuer.issuer == _ISSUERS)): email = parsed_token['email'] # The token might have an id, but it's a Gaia ID that's been # obfuscated with the Focus key, rather than the AppEngine (igoogle) # key. If the developer ever put this email into the user DB # and retrieved the ID from that, it'd be different from the ID we'd # return here, so it's safer to not return the ID. # Instead, we'll only return the email. return users.User(email)
[ "def", "_get_id_token_user", "(", "token", ",", "issuers", ",", "audiences", ",", "allowed_client_ids", ",", "time_now", ",", "cache", ")", ":", "# Verify that the token is valid before we try to extract anything from it.", "# This verifies the signature and some of the basic info ...
Get a User for the given id token, if the token is valid. Args: token: The id_token to check. issuers: dict of Issuers audiences: List of audiences that are acceptable. allowed_client_ids: List of client IDs that are acceptable. time_now: The current time as a long (eg. long(time.time())). cache: Cache to use (eg. the memcache module). Returns: A User if the token is valid, None otherwise.
[ "Get", "a", "User", "for", "the", "given", "id", "token", "if", "the", "token", "is", "valid", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L288-L330
22,249
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_process_scopes
def _process_scopes(scopes): """Parse a scopes list into a set of all scopes and a set of sufficient scope sets. scopes: A list of strings, each of which is a space-separated list of scopes. Examples: ['scope1'] ['scope1', 'scope2'] ['scope1', 'scope2 scope3'] Returns: all_scopes: a set of strings, each of which is one scope to check for sufficient_scopes: a set of sets of strings; each inner set is a set of scopes which are sufficient for access. Example: {{'scope1'}, {'scope2', 'scope3'}} """ all_scopes = set() sufficient_scopes = set() for scope_set in scopes: scope_set_scopes = frozenset(scope_set.split()) all_scopes.update(scope_set_scopes) sufficient_scopes.add(scope_set_scopes) return all_scopes, sufficient_scopes
python
def _process_scopes(scopes): all_scopes = set() sufficient_scopes = set() for scope_set in scopes: scope_set_scopes = frozenset(scope_set.split()) all_scopes.update(scope_set_scopes) sufficient_scopes.add(scope_set_scopes) return all_scopes, sufficient_scopes
[ "def", "_process_scopes", "(", "scopes", ")", ":", "all_scopes", "=", "set", "(", ")", "sufficient_scopes", "=", "set", "(", ")", "for", "scope_set", "in", "scopes", ":", "scope_set_scopes", "=", "frozenset", "(", "scope_set", ".", "split", "(", ")", ")", ...
Parse a scopes list into a set of all scopes and a set of sufficient scope sets. scopes: A list of strings, each of which is a space-separated list of scopes. Examples: ['scope1'] ['scope1', 'scope2'] ['scope1', 'scope2 scope3'] Returns: all_scopes: a set of strings, each of which is one scope to check for sufficient_scopes: a set of sets of strings; each inner set is a set of scopes which are sufficient for access. Example: {{'scope1'}, {'scope2', 'scope3'}}
[ "Parse", "a", "scopes", "list", "into", "a", "set", "of", "all", "scopes", "and", "a", "set", "of", "sufficient", "scope", "sets", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L342-L362
22,250
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_are_scopes_sufficient
def _are_scopes_sufficient(authorized_scopes, sufficient_scopes): """Check if a list of authorized scopes satisfies any set of sufficient scopes. Args: authorized_scopes: a list of strings, return value from oauth.get_authorized_scopes sufficient_scopes: a set of sets of strings, return value from _process_scopes """ for sufficient_scope_set in sufficient_scopes: if sufficient_scope_set.issubset(authorized_scopes): return True return False
python
def _are_scopes_sufficient(authorized_scopes, sufficient_scopes): for sufficient_scope_set in sufficient_scopes: if sufficient_scope_set.issubset(authorized_scopes): return True return False
[ "def", "_are_scopes_sufficient", "(", "authorized_scopes", ",", "sufficient_scopes", ")", ":", "for", "sufficient_scope_set", "in", "sufficient_scopes", ":", "if", "sufficient_scope_set", ".", "issubset", "(", "authorized_scopes", ")", ":", "return", "True", "return", ...
Check if a list of authorized scopes satisfies any set of sufficient scopes. Args: authorized_scopes: a list of strings, return value from oauth.get_authorized_scopes sufficient_scopes: a set of sets of strings, return value from _process_scopes
[ "Check", "if", "a", "list", "of", "authorized", "scopes", "satisfies", "any", "set", "of", "sufficient", "scopes", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L365-L375
22,251
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_set_bearer_user_vars
def _set_bearer_user_vars(allowed_client_ids, scopes): """Validate the oauth bearer token and set endpoints auth user variables. If the bearer token is valid, this sets ENDPOINTS_USE_OAUTH_SCOPE. This provides enough information that our endpoints.get_current_user() function can get the user. Args: allowed_client_ids: List of client IDs that are acceptable. scopes: List of acceptable scopes. """ all_scopes, sufficient_scopes = _process_scopes(scopes) try: authorized_scopes = oauth.get_authorized_scopes(sorted(all_scopes)) except oauth.Error: _logger.debug('Unable to get authorized scopes.', exc_info=True) return if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes): _logger.warning('Authorized scopes did not satisfy scope requirements.') return client_id = oauth.get_client_id(authorized_scopes) # The client ID must be in allowed_client_ids. If allowed_client_ids is # empty, don't allow any client ID. If allowed_client_ids is set to # SKIP_CLIENT_ID_CHECK, all client IDs will be allowed. if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and client_id not in allowed_client_ids): _logger.warning('Client ID is not allowed: %s', client_id) return os.environ[_ENV_USE_OAUTH_SCOPE] = ' '.join(authorized_scopes) _logger.debug('get_current_user() will return user from matched oauth_user.')
python
def _set_bearer_user_vars(allowed_client_ids, scopes): all_scopes, sufficient_scopes = _process_scopes(scopes) try: authorized_scopes = oauth.get_authorized_scopes(sorted(all_scopes)) except oauth.Error: _logger.debug('Unable to get authorized scopes.', exc_info=True) return if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes): _logger.warning('Authorized scopes did not satisfy scope requirements.') return client_id = oauth.get_client_id(authorized_scopes) # The client ID must be in allowed_client_ids. If allowed_client_ids is # empty, don't allow any client ID. If allowed_client_ids is set to # SKIP_CLIENT_ID_CHECK, all client IDs will be allowed. if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and client_id not in allowed_client_ids): _logger.warning('Client ID is not allowed: %s', client_id) return os.environ[_ENV_USE_OAUTH_SCOPE] = ' '.join(authorized_scopes) _logger.debug('get_current_user() will return user from matched oauth_user.')
[ "def", "_set_bearer_user_vars", "(", "allowed_client_ids", ",", "scopes", ")", ":", "all_scopes", ",", "sufficient_scopes", "=", "_process_scopes", "(", "scopes", ")", "try", ":", "authorized_scopes", "=", "oauth", ".", "get_authorized_scopes", "(", "sorted", "(", ...
Validate the oauth bearer token and set endpoints auth user variables. If the bearer token is valid, this sets ENDPOINTS_USE_OAUTH_SCOPE. This provides enough information that our endpoints.get_current_user() function can get the user. Args: allowed_client_ids: List of client IDs that are acceptable. scopes: List of acceptable scopes.
[ "Validate", "the", "oauth", "bearer", "token", "and", "set", "endpoints", "auth", "user", "variables", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L379-L410
22,252
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_set_bearer_user_vars_local
def _set_bearer_user_vars_local(token, allowed_client_ids, scopes): """Validate the oauth bearer token on the dev server. Since the functions in the oauth module return only example results in local development, this hits the tokeninfo endpoint and attempts to validate the token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we can get the user from the token. Args: token: String with the oauth token to validate. allowed_client_ids: List of client IDs that are acceptable. scopes: List of acceptable scopes. """ # Get token info from the tokeninfo endpoint. result = urlfetch.fetch( '%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token}))) if result.status_code != 200: try: error_description = json.loads(result.content)['error_description'] except (ValueError, KeyError): error_description = '' _logger.error('Token info endpoint returned status %s: %s', result.status_code, error_description) return token_info = json.loads(result.content) # Validate email. if 'email' not in token_info: _logger.warning('Oauth token doesn\'t include an email address.') return if token_info.get('email_verified') != 'true': _logger.warning('Oauth token email isn\'t verified.') return # Validate client ID. client_id = token_info.get('azp') if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and client_id not in allowed_client_ids): _logger.warning('Client ID is not allowed: %s', client_id) return # Verify at least one of the scopes matches. _, sufficient_scopes = _process_scopes(scopes) authorized_scopes = token_info.get('scope', '').split(' ') if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes): _logger.warning('Oauth token scopes don\'t match any acceptable scopes.') return os.environ[_ENV_AUTH_EMAIL] = token_info['email'] os.environ[_ENV_AUTH_DOMAIN] = '' _logger.debug('Local dev returning user from token.')
python
def _set_bearer_user_vars_local(token, allowed_client_ids, scopes): # Get token info from the tokeninfo endpoint. result = urlfetch.fetch( '%s?%s' % (_TOKENINFO_URL, urllib.urlencode({'access_token': token}))) if result.status_code != 200: try: error_description = json.loads(result.content)['error_description'] except (ValueError, KeyError): error_description = '' _logger.error('Token info endpoint returned status %s: %s', result.status_code, error_description) return token_info = json.loads(result.content) # Validate email. if 'email' not in token_info: _logger.warning('Oauth token doesn\'t include an email address.') return if token_info.get('email_verified') != 'true': _logger.warning('Oauth token email isn\'t verified.') return # Validate client ID. client_id = token_info.get('azp') if (list(allowed_client_ids) != SKIP_CLIENT_ID_CHECK and client_id not in allowed_client_ids): _logger.warning('Client ID is not allowed: %s', client_id) return # Verify at least one of the scopes matches. _, sufficient_scopes = _process_scopes(scopes) authorized_scopes = token_info.get('scope', '').split(' ') if not _are_scopes_sufficient(authorized_scopes, sufficient_scopes): _logger.warning('Oauth token scopes don\'t match any acceptable scopes.') return os.environ[_ENV_AUTH_EMAIL] = token_info['email'] os.environ[_ENV_AUTH_DOMAIN] = '' _logger.debug('Local dev returning user from token.')
[ "def", "_set_bearer_user_vars_local", "(", "token", ",", "allowed_client_ids", ",", "scopes", ")", ":", "# Get token info from the tokeninfo endpoint.", "result", "=", "urlfetch", ".", "fetch", "(", "'%s?%s'", "%", "(", "_TOKENINFO_URL", ",", "urllib", ".", "urlencode...
Validate the oauth bearer token on the dev server. Since the functions in the oauth module return only example results in local development, this hits the tokeninfo endpoint and attempts to validate the token. If it's valid, we'll set _ENV_AUTH_EMAIL and _ENV_AUTH_DOMAIN so we can get the user from the token. Args: token: String with the oauth token to validate. allowed_client_ids: List of client IDs that are acceptable. scopes: List of acceptable scopes.
[ "Validate", "the", "oauth", "bearer", "token", "on", "the", "dev", "server", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L413-L463
22,253
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_verify_parsed_token
def _verify_parsed_token(parsed_token, issuers, audiences, allowed_client_ids, is_legacy_google_auth=True): """Verify a parsed user ID token. Args: parsed_token: The parsed token information. issuers: A list of allowed issuers audiences: The allowed audiences. allowed_client_ids: The allowed client IDs. Returns: True if the token is verified, False otherwise. """ # Verify the issuer. if parsed_token.get('iss') not in issuers: _logger.warning('Issuer was not valid: %s', parsed_token.get('iss')) return False # Check audiences. aud = parsed_token.get('aud') if not aud: _logger.warning('No aud field in token') return False # Special legacy handling if aud == cid. This occurs with iOS and browsers. # As long as audience == client_id and cid is allowed, we need to accept # the audience for compatibility. cid = parsed_token.get('azp') audience_allowed = (aud in audiences) or (is_legacy_google_auth and aud == cid) if not audience_allowed: _logger.warning('Audience not allowed: %s', aud) return False # Check allowed client IDs, for legacy auth. if is_legacy_google_auth: if list(allowed_client_ids) == SKIP_CLIENT_ID_CHECK: _logger.warning('Client ID check can\'t be skipped for ID tokens. ' 'Id_token cannot be verified.') return False elif not cid or cid not in allowed_client_ids: _logger.warning('Client ID is not allowed: %s', cid) return False if 'email' not in parsed_token: return False return True
python
def _verify_parsed_token(parsed_token, issuers, audiences, allowed_client_ids, is_legacy_google_auth=True): # Verify the issuer. if parsed_token.get('iss') not in issuers: _logger.warning('Issuer was not valid: %s', parsed_token.get('iss')) return False # Check audiences. aud = parsed_token.get('aud') if not aud: _logger.warning('No aud field in token') return False # Special legacy handling if aud == cid. This occurs with iOS and browsers. # As long as audience == client_id and cid is allowed, we need to accept # the audience for compatibility. cid = parsed_token.get('azp') audience_allowed = (aud in audiences) or (is_legacy_google_auth and aud == cid) if not audience_allowed: _logger.warning('Audience not allowed: %s', aud) return False # Check allowed client IDs, for legacy auth. if is_legacy_google_auth: if list(allowed_client_ids) == SKIP_CLIENT_ID_CHECK: _logger.warning('Client ID check can\'t be skipped for ID tokens. ' 'Id_token cannot be verified.') return False elif not cid or cid not in allowed_client_ids: _logger.warning('Client ID is not allowed: %s', cid) return False if 'email' not in parsed_token: return False return True
[ "def", "_verify_parsed_token", "(", "parsed_token", ",", "issuers", ",", "audiences", ",", "allowed_client_ids", ",", "is_legacy_google_auth", "=", "True", ")", ":", "# Verify the issuer.", "if", "parsed_token", ".", "get", "(", "'iss'", ")", "not", "in", "issuers...
Verify a parsed user ID token. Args: parsed_token: The parsed token information. issuers: A list of allowed issuers audiences: The allowed audiences. allowed_client_ids: The allowed client IDs. Returns: True if the token is verified, False otherwise.
[ "Verify", "a", "parsed", "user", "ID", "token", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L470-L514
22,254
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_get_cert_expiration_time
def _get_cert_expiration_time(headers): """Get the expiration time for a cert, given the response headers. Get expiration time from the headers in the result. If we can't get a time from the headers, this returns 0, indicating that the cert shouldn't be cached. Args: headers: A dict containing the response headers from the request to get certs. Returns: An integer with the number of seconds the cert should be cached. This value is guaranteed to be >= 0. """ # Check the max age of the cert. cache_control = headers.get('Cache-Control', '') # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 indicates only # a comma-separated header is valid, so it should be fine to split this on # commas. for entry in cache_control.split(','): match = _MAX_AGE_REGEX.match(entry) if match: cache_time_seconds = int(match.group(1)) break else: return 0 # Subtract the cert's age. age = headers.get('Age') if age is not None: try: age = int(age) except ValueError: age = 0 cache_time_seconds -= age return max(0, cache_time_seconds)
python
def _get_cert_expiration_time(headers): # Check the max age of the cert. cache_control = headers.get('Cache-Control', '') # http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 indicates only # a comma-separated header is valid, so it should be fine to split this on # commas. for entry in cache_control.split(','): match = _MAX_AGE_REGEX.match(entry) if match: cache_time_seconds = int(match.group(1)) break else: return 0 # Subtract the cert's age. age = headers.get('Age') if age is not None: try: age = int(age) except ValueError: age = 0 cache_time_seconds -= age return max(0, cache_time_seconds)
[ "def", "_get_cert_expiration_time", "(", "headers", ")", ":", "# Check the max age of the cert.", "cache_control", "=", "headers", ".", "get", "(", "'Cache-Control'", ",", "''", ")", "# http://www.w3.org/Protocols/rfc2616/rfc2616-sec4.html#sec4.2 indicates only", "# a comma-separ...
Get the expiration time for a cert, given the response headers. Get expiration time from the headers in the result. If we can't get a time from the headers, this returns 0, indicating that the cert shouldn't be cached. Args: headers: A dict containing the response headers from the request to get certs. Returns: An integer with the number of seconds the cert should be cached. This value is guaranteed to be >= 0.
[ "Get", "the", "expiration", "time", "for", "a", "cert", "given", "the", "response", "headers", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L524-L561
22,255
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_get_cached_certs
def _get_cached_certs(cert_uri, cache): """Get certs from cache if present; otherwise, gets from URI and caches them. Args: cert_uri: URI from which to retrieve certs if cache is stale or empty. cache: Cache of pre-fetched certs. Returns: The retrieved certs. """ certs = cache.get(cert_uri, namespace=_CERT_NAMESPACE) if certs is None: _logger.debug('Cert cache miss for %s', cert_uri) try: result = urlfetch.fetch(cert_uri) except AssertionError: # This happens in unit tests. Act as if we couldn't get any certs. return None if result.status_code == 200: certs = json.loads(result.content) expiration_time_seconds = _get_cert_expiration_time(result.headers) if expiration_time_seconds: cache.set(cert_uri, certs, time=expiration_time_seconds, namespace=_CERT_NAMESPACE) else: _logger.error( 'Certs not available, HTTP request returned %d', result.status_code) return certs
python
def _get_cached_certs(cert_uri, cache): certs = cache.get(cert_uri, namespace=_CERT_NAMESPACE) if certs is None: _logger.debug('Cert cache miss for %s', cert_uri) try: result = urlfetch.fetch(cert_uri) except AssertionError: # This happens in unit tests. Act as if we couldn't get any certs. return None if result.status_code == 200: certs = json.loads(result.content) expiration_time_seconds = _get_cert_expiration_time(result.headers) if expiration_time_seconds: cache.set(cert_uri, certs, time=expiration_time_seconds, namespace=_CERT_NAMESPACE) else: _logger.error( 'Certs not available, HTTP request returned %d', result.status_code) return certs
[ "def", "_get_cached_certs", "(", "cert_uri", ",", "cache", ")", ":", "certs", "=", "cache", ".", "get", "(", "cert_uri", ",", "namespace", "=", "_CERT_NAMESPACE", ")", "if", "certs", "is", "None", ":", "_logger", ".", "debug", "(", "'Cert cache miss for %s'"...
Get certs from cache if present; otherwise, gets from URI and caches them. Args: cert_uri: URI from which to retrieve certs if cache is stale or empty. cache: Cache of pre-fetched certs. Returns: The retrieved certs.
[ "Get", "certs", "from", "cache", "if", "present", ";", "otherwise", "gets", "from", "URI", "and", "caches", "them", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L564-L593
22,256
cloudendpoints/endpoints-python
endpoints/users_id_token.py
_verify_signed_jwt_with_certs
def _verify_signed_jwt_with_certs( jwt, time_now, cache, cert_uri=_DEFAULT_CERT_URI): """Verify a JWT against public certs. See http://self-issued.info/docs/draft-jones-json-web-token.html. The PyCrypto library included with Google App Engine is severely limited and so you have to use it very carefully to verify JWT signatures. The first issue is that the library can't read X.509 files, so we make a call to a special URI that has the public cert in modulus/exponent form in JSON. The second issue is that the RSA.verify method doesn't work, at least for how the JWT tokens are signed, so we have to manually verify the signature of the JWT, which means hashing the signed part of the JWT and comparing that to the signature that's been encrypted with the public key. Args: jwt: string, A JWT. time_now: The current time, as a long (eg. long(time.time())). cache: Cache to use (eg. the memcache module). cert_uri: string, URI to get cert modulus and exponent in JSON format. Returns: dict, The deserialized JSON payload in the JWT. Raises: _AppIdentityError: if any checks are failed. """ segments = jwt.split('.') if len(segments) != 3: # Note that anywhere we print the jwt or its json body, we need to use # %r instead of %s, so that non-printable characters are escaped safely. raise _AppIdentityError('Token is not an id_token (Wrong number of ' 'segments)') signed = '%s.%s' % (segments[0], segments[1]) signature = _urlsafe_b64decode(segments[2]) # pycrypto only deals in integers, so we have to convert the string of bytes # into a long. lsignature = long(signature.encode('hex'), 16) # Verify expected header. header_body = _urlsafe_b64decode(segments[0]) try: header = json.loads(header_body) except: raise _AppIdentityError("Can't parse header") if header.get('alg') != 'RS256': raise _AppIdentityError('Unexpected encryption algorithm: %r' % header.get('alg')) # Formerly we would parse the token body here. # However, it's not safe to do that without first checking the signature. certs = _get_cached_certs(cert_uri, cache) if certs is None: raise _AppIdentityError( 'Unable to retrieve certs needed to verify the signed JWT') # Verify that we were able to load the Crypto libraries, before we try # to use them. if not _CRYPTO_LOADED: raise _AppIdentityError('Unable to load pycrypto library. Can\'t verify ' 'id_token signature. See http://www.pycrypto.org ' 'for more information on pycrypto.') # SHA256 hash of the already 'signed' segment from the JWT. Since a SHA256 # hash, will always have length 64. local_hash = SHA256.new(signed).hexdigest() # Check signature. verified = False for keyvalue in certs['keyvalues']: try: modulus = _b64_to_long(keyvalue['modulus']) exponent = _b64_to_long(keyvalue['exponent']) key = RSA.construct((modulus, exponent)) # Encrypt, and convert to a hex string. hexsig = '%064x' % key.encrypt(lsignature, '')[0] # Make sure we have only last 64 base64 chars hexsig = hexsig[-64:] # Check the signature on 'signed' by encrypting 'signature' with the # public key and confirming the result matches the SHA256 hash of # 'signed'. hmac.compare_digest(a, b) is used to avoid timing attacks. verified = hmac.compare_digest(hexsig, local_hash) if verified: break except Exception, e: # pylint: disable=broad-except # Log the exception for debugging purpose. _logger.debug( 'Signature verification error: %s; continuing with the next cert.', e) continue if not verified: raise _AppIdentityError('Invalid token signature') # Parse token. json_body = _urlsafe_b64decode(segments[1]) try: parsed = json.loads(json_body) except: raise _AppIdentityError("Can't parse token body") # Check creation timestamp. iat = parsed.get('iat') if iat is None: raise _AppIdentityError('No iat field in token') earliest = iat - _CLOCK_SKEW_SECS # Check expiration timestamp. exp = parsed.get('exp') if exp is None: raise _AppIdentityError('No exp field in token') if exp >= time_now + _MAX_TOKEN_LIFETIME_SECS: raise _AppIdentityError('exp field too far in future') latest = exp + _CLOCK_SKEW_SECS if time_now < earliest: raise _AppIdentityError('Token used too early, %d < %d' % (time_now, earliest)) if time_now > latest: raise _AppIdentityError('Token used too late, %d > %d' % (time_now, latest)) return parsed
python
def _verify_signed_jwt_with_certs( jwt, time_now, cache, cert_uri=_DEFAULT_CERT_URI): segments = jwt.split('.') if len(segments) != 3: # Note that anywhere we print the jwt or its json body, we need to use # %r instead of %s, so that non-printable characters are escaped safely. raise _AppIdentityError('Token is not an id_token (Wrong number of ' 'segments)') signed = '%s.%s' % (segments[0], segments[1]) signature = _urlsafe_b64decode(segments[2]) # pycrypto only deals in integers, so we have to convert the string of bytes # into a long. lsignature = long(signature.encode('hex'), 16) # Verify expected header. header_body = _urlsafe_b64decode(segments[0]) try: header = json.loads(header_body) except: raise _AppIdentityError("Can't parse header") if header.get('alg') != 'RS256': raise _AppIdentityError('Unexpected encryption algorithm: %r' % header.get('alg')) # Formerly we would parse the token body here. # However, it's not safe to do that without first checking the signature. certs = _get_cached_certs(cert_uri, cache) if certs is None: raise _AppIdentityError( 'Unable to retrieve certs needed to verify the signed JWT') # Verify that we were able to load the Crypto libraries, before we try # to use them. if not _CRYPTO_LOADED: raise _AppIdentityError('Unable to load pycrypto library. Can\'t verify ' 'id_token signature. See http://www.pycrypto.org ' 'for more information on pycrypto.') # SHA256 hash of the already 'signed' segment from the JWT. Since a SHA256 # hash, will always have length 64. local_hash = SHA256.new(signed).hexdigest() # Check signature. verified = False for keyvalue in certs['keyvalues']: try: modulus = _b64_to_long(keyvalue['modulus']) exponent = _b64_to_long(keyvalue['exponent']) key = RSA.construct((modulus, exponent)) # Encrypt, and convert to a hex string. hexsig = '%064x' % key.encrypt(lsignature, '')[0] # Make sure we have only last 64 base64 chars hexsig = hexsig[-64:] # Check the signature on 'signed' by encrypting 'signature' with the # public key and confirming the result matches the SHA256 hash of # 'signed'. hmac.compare_digest(a, b) is used to avoid timing attacks. verified = hmac.compare_digest(hexsig, local_hash) if verified: break except Exception, e: # pylint: disable=broad-except # Log the exception for debugging purpose. _logger.debug( 'Signature verification error: %s; continuing with the next cert.', e) continue if not verified: raise _AppIdentityError('Invalid token signature') # Parse token. json_body = _urlsafe_b64decode(segments[1]) try: parsed = json.loads(json_body) except: raise _AppIdentityError("Can't parse token body") # Check creation timestamp. iat = parsed.get('iat') if iat is None: raise _AppIdentityError('No iat field in token') earliest = iat - _CLOCK_SKEW_SECS # Check expiration timestamp. exp = parsed.get('exp') if exp is None: raise _AppIdentityError('No exp field in token') if exp >= time_now + _MAX_TOKEN_LIFETIME_SECS: raise _AppIdentityError('exp field too far in future') latest = exp + _CLOCK_SKEW_SECS if time_now < earliest: raise _AppIdentityError('Token used too early, %d < %d' % (time_now, earliest)) if time_now > latest: raise _AppIdentityError('Token used too late, %d > %d' % (time_now, latest)) return parsed
[ "def", "_verify_signed_jwt_with_certs", "(", "jwt", ",", "time_now", ",", "cache", ",", "cert_uri", "=", "_DEFAULT_CERT_URI", ")", ":", "segments", "=", "jwt", ".", "split", "(", "'.'", ")", "if", "len", "(", "segments", ")", "!=", "3", ":", "# Note that a...
Verify a JWT against public certs. See http://self-issued.info/docs/draft-jones-json-web-token.html. The PyCrypto library included with Google App Engine is severely limited and so you have to use it very carefully to verify JWT signatures. The first issue is that the library can't read X.509 files, so we make a call to a special URI that has the public cert in modulus/exponent form in JSON. The second issue is that the RSA.verify method doesn't work, at least for how the JWT tokens are signed, so we have to manually verify the signature of the JWT, which means hashing the signed part of the JWT and comparing that to the signature that's been encrypted with the public key. Args: jwt: string, A JWT. time_now: The current time, as a long (eg. long(time.time())). cache: Cache to use (eg. the memcache module). cert_uri: string, URI to get cert modulus and exponent in JSON format. Returns: dict, The deserialized JSON payload in the JWT. Raises: _AppIdentityError: if any checks are failed.
[ "Verify", "a", "JWT", "against", "public", "certs", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L603-L732
22,257
cloudendpoints/endpoints-python
endpoints/users_id_token.py
get_verified_jwt
def get_verified_jwt( providers, audiences, check_authorization_header=True, check_query_arg=True, request=None, cache=memcache): """ This function will extract, verify, and parse a JWT token from the Authorization header or access_token query argument. The JWT is assumed to contain an issuer and audience claim, as well as issued-at and expiration timestamps. The signature will be cryptographically verified, the claims and timestamps will be checked, and the resulting parsed JWT body is returned. If at any point the JWT is missing or found to be invalid, the return result will be None. Arguments: providers - An iterable of dicts each containing 'issuer' and 'cert_uri' keys audiences - An iterable of valid audiences check_authorization_header - Boolean; check 'Authorization: Bearer' header check_query_arg - Boolean; check 'access_token' query arg request - Must be the request object if check_query_arg is true; otherwise ignored. cache - In testing, override the certificate cache """ if not (check_authorization_header or check_query_arg): raise ValueError( 'Either check_authorization_header or check_query_arg must be True.') if check_query_arg and request is None: raise ValueError( 'Cannot check query arg without request object.') schemes = ('Bearer',) if check_authorization_header else () keys = ('access_token',) if check_query_arg else () token = _get_token( request=request, allowed_auth_schemes=schemes, allowed_query_keys=keys) if token is None: return None time_now = long(time.time()) for provider in providers: parsed_token = _parse_and_verify_jwt( token, time_now, (provider['issuer'],), audiences, provider['cert_uri'], cache) if parsed_token is not None: return parsed_token return None
python
def get_verified_jwt( providers, audiences, check_authorization_header=True, check_query_arg=True, request=None, cache=memcache): if not (check_authorization_header or check_query_arg): raise ValueError( 'Either check_authorization_header or check_query_arg must be True.') if check_query_arg and request is None: raise ValueError( 'Cannot check query arg without request object.') schemes = ('Bearer',) if check_authorization_header else () keys = ('access_token',) if check_query_arg else () token = _get_token( request=request, allowed_auth_schemes=schemes, allowed_query_keys=keys) if token is None: return None time_now = long(time.time()) for provider in providers: parsed_token = _parse_and_verify_jwt( token, time_now, (provider['issuer'],), audiences, provider['cert_uri'], cache) if parsed_token is not None: return parsed_token return None
[ "def", "get_verified_jwt", "(", "providers", ",", "audiences", ",", "check_authorization_header", "=", "True", ",", "check_query_arg", "=", "True", ",", "request", "=", "None", ",", "cache", "=", "memcache", ")", ":", "if", "not", "(", "check_authorization_heade...
This function will extract, verify, and parse a JWT token from the Authorization header or access_token query argument. The JWT is assumed to contain an issuer and audience claim, as well as issued-at and expiration timestamps. The signature will be cryptographically verified, the claims and timestamps will be checked, and the resulting parsed JWT body is returned. If at any point the JWT is missing or found to be invalid, the return result will be None. Arguments: providers - An iterable of dicts each containing 'issuer' and 'cert_uri' keys audiences - An iterable of valid audiences check_authorization_header - Boolean; check 'Authorization: Bearer' header check_query_arg - Boolean; check 'access_token' query arg request - Must be the request object if check_query_arg is true; otherwise ignored. cache - In testing, override the certificate cache
[ "This", "function", "will", "extract", "verify", "and", "parse", "a", "JWT", "token", "from", "the", "Authorization", "header", "or", "access_token", "query", "argument", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/users_id_token.py#L750-L794
22,258
cloudendpoints/endpoints-python
endpoints/directory_list_generator.py
DirectoryListGenerator.__item_descriptor
def __item_descriptor(self, config): """Builds an item descriptor for a service configuration. Args: config: A dictionary containing the service configuration to describe. Returns: A dictionary that describes the service configuration. """ descriptor = { 'kind': 'discovery#directoryItem', 'icons': { 'x16': 'https://www.gstatic.com/images/branding/product/1x/' 'googleg_16dp.png', 'x32': 'https://www.gstatic.com/images/branding/product/1x/' 'googleg_32dp.png', }, 'preferred': True, } description = config.get('description') root_url = config.get('root') name = config.get('name') version = config.get('api_version') relative_path = '/apis/{0}/{1}/rest'.format(name, version) if description: descriptor['description'] = description descriptor['name'] = name descriptor['version'] = version descriptor['discoveryLink'] = '.{0}'.format(relative_path) root_url_port = urlparse.urlparse(root_url).port original_path = self.__request.reconstruct_full_url( port_override=root_url_port) descriptor['discoveryRestUrl'] = '{0}/{1}/{2}/rest'.format( original_path, name, version) if name and version: descriptor['id'] = '{0}:{1}'.format(name, version) return descriptor
python
def __item_descriptor(self, config): descriptor = { 'kind': 'discovery#directoryItem', 'icons': { 'x16': 'https://www.gstatic.com/images/branding/product/1x/' 'googleg_16dp.png', 'x32': 'https://www.gstatic.com/images/branding/product/1x/' 'googleg_32dp.png', }, 'preferred': True, } description = config.get('description') root_url = config.get('root') name = config.get('name') version = config.get('api_version') relative_path = '/apis/{0}/{1}/rest'.format(name, version) if description: descriptor['description'] = description descriptor['name'] = name descriptor['version'] = version descriptor['discoveryLink'] = '.{0}'.format(relative_path) root_url_port = urlparse.urlparse(root_url).port original_path = self.__request.reconstruct_full_url( port_override=root_url_port) descriptor['discoveryRestUrl'] = '{0}/{1}/{2}/rest'.format( original_path, name, version) if name and version: descriptor['id'] = '{0}:{1}'.format(name, version) return descriptor
[ "def", "__item_descriptor", "(", "self", ",", "config", ")", ":", "descriptor", "=", "{", "'kind'", ":", "'discovery#directoryItem'", ",", "'icons'", ":", "{", "'x16'", ":", "'https://www.gstatic.com/images/branding/product/1x/'", "'googleg_16dp.png'", ",", "'x32'", "...
Builds an item descriptor for a service configuration. Args: config: A dictionary containing the service configuration to describe. Returns: A dictionary that describes the service configuration.
[ "Builds", "an", "item", "descriptor", "for", "a", "service", "configuration", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/directory_list_generator.py#L56-L99
22,259
cloudendpoints/endpoints-python
endpoints/directory_list_generator.py
DirectoryListGenerator.__directory_list_descriptor
def __directory_list_descriptor(self, configs): """Builds a directory list for an API. Args: configs: List of dicts containing the service configurations to list. Returns: A dictionary that can be deserialized into JSON in discovery list format. Raises: ApiConfigurationError: If there's something wrong with the API configuration, such as a multiclass API decorated with different API descriptors (see the docstring for api()), or a repeated method signature. """ descriptor = { 'kind': 'discovery#directoryList', 'discoveryVersion': 'v1', } items = [] for config in configs: item_descriptor = self.__item_descriptor(config) if item_descriptor: items.append(item_descriptor) if items: descriptor['items'] = items return descriptor
python
def __directory_list_descriptor(self, configs): descriptor = { 'kind': 'discovery#directoryList', 'discoveryVersion': 'v1', } items = [] for config in configs: item_descriptor = self.__item_descriptor(config) if item_descriptor: items.append(item_descriptor) if items: descriptor['items'] = items return descriptor
[ "def", "__directory_list_descriptor", "(", "self", ",", "configs", ")", ":", "descriptor", "=", "{", "'kind'", ":", "'discovery#directoryList'", ",", "'discoveryVersion'", ":", "'v1'", ",", "}", "items", "=", "[", "]", "for", "config", "in", "configs", ":", ...
Builds a directory list for an API. Args: configs: List of dicts containing the service configurations to list. Returns: A dictionary that can be deserialized into JSON in discovery list format. Raises: ApiConfigurationError: If there's something wrong with the API configuration, such as a multiclass API decorated with different API descriptors (see the docstring for api()), or a repeated method signature.
[ "Builds", "a", "directory", "list", "for", "an", "API", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/directory_list_generator.py#L101-L130
22,260
cloudendpoints/endpoints-python
endpoints/directory_list_generator.py
DirectoryListGenerator.get_directory_list_doc
def get_directory_list_doc(self, configs): """JSON dict description of a protorpc.remote.Service in list format. Args: configs: Either a single dict or a list of dicts containing the service configurations to list. Returns: dict, The directory list document as a JSON dict. """ if not isinstance(configs, (tuple, list)): configs = [configs] util.check_list_type(configs, dict, 'configs', allow_none=False) return self.__directory_list_descriptor(configs)
python
def get_directory_list_doc(self, configs): if not isinstance(configs, (tuple, list)): configs = [configs] util.check_list_type(configs, dict, 'configs', allow_none=False) return self.__directory_list_descriptor(configs)
[ "def", "get_directory_list_doc", "(", "self", ",", "configs", ")", ":", "if", "not", "isinstance", "(", "configs", ",", "(", "tuple", ",", "list", ")", ")", ":", "configs", "=", "[", "configs", "]", "util", ".", "check_list_type", "(", "configs", ",", ...
JSON dict description of a protorpc.remote.Service in list format. Args: configs: Either a single dict or a list of dicts containing the service configurations to list. Returns: dict, The directory list document as a JSON dict.
[ "JSON", "dict", "description", "of", "a", "protorpc", ".", "remote", ".", "Service", "in", "list", "format", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/directory_list_generator.py#L132-L148
22,261
cloudendpoints/endpoints-python
endpoints/directory_list_generator.py
DirectoryListGenerator.pretty_print_config_to_json
def pretty_print_config_to_json(self, configs): """JSON string description of a protorpc.remote.Service in a discovery doc. Args: configs: Either a single dict or a list of dicts containing the service configurations to list. Returns: string, The directory list document as a JSON string. """ descriptor = self.get_directory_list_doc(configs) return json.dumps(descriptor, sort_keys=True, indent=2, separators=(',', ': '))
python
def pretty_print_config_to_json(self, configs): descriptor = self.get_directory_list_doc(configs) return json.dumps(descriptor, sort_keys=True, indent=2, separators=(',', ': '))
[ "def", "pretty_print_config_to_json", "(", "self", ",", "configs", ")", ":", "descriptor", "=", "self", ".", "get_directory_list_doc", "(", "configs", ")", "return", "json", ".", "dumps", "(", "descriptor", ",", "sort_keys", "=", "True", ",", "indent", "=", ...
JSON string description of a protorpc.remote.Service in a discovery doc. Args: configs: Either a single dict or a list of dicts containing the service configurations to list. Returns: string, The directory list document as a JSON string.
[ "JSON", "string", "description", "of", "a", "protorpc", ".", "remote", ".", "Service", "in", "a", "discovery", "doc", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/directory_list_generator.py#L150-L162
22,262
cloudendpoints/endpoints-python
endpoints/errors.py
RequestError.__format_error
def __format_error(self, error_list_tag): """Format this error into a JSON response. Args: error_list_tag: A string specifying the name of the tag to use for the error list. Returns: A dict containing the reformatted JSON error response. """ error = {'domain': self.domain(), 'reason': self.reason(), 'message': self.message()} error.update(self.extra_fields() or {}) return {'error': {error_list_tag: [error], 'code': self.status_code(), 'message': self.message()}}
python
def __format_error(self, error_list_tag): error = {'domain': self.domain(), 'reason': self.reason(), 'message': self.message()} error.update(self.extra_fields() or {}) return {'error': {error_list_tag: [error], 'code': self.status_code(), 'message': self.message()}}
[ "def", "__format_error", "(", "self", ",", "error_list_tag", ")", ":", "error", "=", "{", "'domain'", ":", "self", ".", "domain", "(", ")", ",", "'reason'", ":", "self", ".", "reason", "(", ")", ",", "'message'", ":", "self", ".", "message", "(", ")"...
Format this error into a JSON response. Args: error_list_tag: A string specifying the name of the tag to use for the error list. Returns: A dict containing the reformatted JSON error response.
[ "Format", "this", "error", "into", "a", "JSON", "response", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/errors.py#L100-L116
22,263
cloudendpoints/endpoints-python
endpoints/errors.py
RequestError.rest_error
def rest_error(self): """Format this error into a response to a REST request. Returns: A string containing the reformatted error response. """ error_json = self.__format_error('errors') return json.dumps(error_json, indent=1, sort_keys=True)
python
def rest_error(self): error_json = self.__format_error('errors') return json.dumps(error_json, indent=1, sort_keys=True)
[ "def", "rest_error", "(", "self", ")", ":", "error_json", "=", "self", ".", "__format_error", "(", "'errors'", ")", "return", "json", ".", "dumps", "(", "error_json", ",", "indent", "=", "1", ",", "sort_keys", "=", "True", ")" ]
Format this error into a response to a REST request. Returns: A string containing the reformatted error response.
[ "Format", "this", "error", "into", "a", "response", "to", "a", "REST", "request", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/errors.py#L118-L125
22,264
cloudendpoints/endpoints-python
endpoints/errors.py
BackendError._get_status_code
def _get_status_code(self, http_status): """Get the HTTP status code from an HTTP status string. Args: http_status: A string containing a HTTP status code and reason. Returns: An integer with the status code number from http_status. """ try: return int(http_status.split(' ', 1)[0]) except TypeError: _logger.warning('Unable to find status code in HTTP status %r.', http_status) return 500
python
def _get_status_code(self, http_status): try: return int(http_status.split(' ', 1)[0]) except TypeError: _logger.warning('Unable to find status code in HTTP status %r.', http_status) return 500
[ "def", "_get_status_code", "(", "self", ",", "http_status", ")", ":", "try", ":", "return", "int", "(", "http_status", ".", "split", "(", "' '", ",", "1", ")", "[", "0", "]", ")", "except", "TypeError", ":", "_logger", ".", "warning", "(", "'Unable to ...
Get the HTTP status code from an HTTP status string. Args: http_status: A string containing a HTTP status code and reason. Returns: An integer with the status code number from http_status.
[ "Get", "the", "HTTP", "status", "code", "from", "an", "HTTP", "status", "string", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/errors.py#L239-L253
22,265
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager.process_api_config_response
def process_api_config_response(self, config_json): """Parses a JSON API config and registers methods for dispatch. Side effects: Parses method name, etc. for all methods and updates the indexing data structures with the information. Args: config_json: A dict, the JSON body of the getApiConfigs response. """ with self._config_lock: self._add_discovery_config() for config in config_json.get('items', []): lookup_key = config.get('name', ''), config.get('version', '') self._configs[lookup_key] = config for config in self._configs.itervalues(): name = config.get('name', '') api_version = config.get('api_version', '') path_version = config.get('path_version', '') sorted_methods = self._get_sorted_methods(config.get('methods', {})) for method_name, method in sorted_methods: self._save_rest_method(method_name, name, path_version, method)
python
def process_api_config_response(self, config_json): with self._config_lock: self._add_discovery_config() for config in config_json.get('items', []): lookup_key = config.get('name', ''), config.get('version', '') self._configs[lookup_key] = config for config in self._configs.itervalues(): name = config.get('name', '') api_version = config.get('api_version', '') path_version = config.get('path_version', '') sorted_methods = self._get_sorted_methods(config.get('methods', {})) for method_name, method in sorted_methods: self._save_rest_method(method_name, name, path_version, method)
[ "def", "process_api_config_response", "(", "self", ",", "config_json", ")", ":", "with", "self", ".", "_config_lock", ":", "self", ".", "_add_discovery_config", "(", ")", "for", "config", "in", "config_json", ".", "get", "(", "'items'", ",", "[", "]", ")", ...
Parses a JSON API config and registers methods for dispatch. Side effects: Parses method name, etc. for all methods and updates the indexing data structures with the information. Args: config_json: A dict, the JSON body of the getApiConfigs response.
[ "Parses", "a", "JSON", "API", "config", "and", "registers", "methods", "for", "dispatch", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L53-L77
22,266
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager._get_sorted_methods
def _get_sorted_methods(self, methods): """Get a copy of 'methods' sorted the way they would be on the live server. Args: methods: JSON configuration of an API's methods. Returns: The same configuration with the methods sorted based on what order they'll be checked by the server. """ if not methods: return methods # Comparison function we'll use to sort the methods: def _sorted_methods_comparison(method_info1, method_info2): """Sort method info by path and http_method. Args: method_info1: Method name and info for the first method to compare. method_info2: Method name and info for the method to compare to. Returns: Negative if the first method should come first, positive if the first method should come after the second. Zero if they're equivalent. """ def _score_path(path): """Calculate the score for this path, used for comparisons. Higher scores have priority, and if scores are equal, the path text is sorted alphabetically. Scores are based on the number and location of the constant parts of the path. The server has some special handling for variables with regexes, which we don't handle here. Args: path: The request path that we're calculating a score for. Returns: The score for the given path. """ score = 0 parts = path.split('/') for part in parts: score <<= 1 if not part or part[0] != '{': # Found a constant. score += 1 # Shift by 31 instead of 32 because some (!) versions of Python like # to convert the int to a long if we shift by 32, and the sorted() # function that uses this blows up if it receives anything but an int. score <<= 31 - len(parts) return score # Higher path scores come first. path_score1 = _score_path(method_info1[1].get('path', '')) path_score2 = _score_path(method_info2[1].get('path', '')) if path_score1 != path_score2: return path_score2 - path_score1 # Compare by path text next, sorted alphabetically. path_result = cmp(method_info1[1].get('path', ''), method_info2[1].get('path', '')) if path_result != 0: return path_result # All else being equal, sort by HTTP method. method_result = cmp(method_info1[1].get('httpMethod', ''), method_info2[1].get('httpMethod', '')) return method_result return sorted(methods.items(), _sorted_methods_comparison)
python
def _get_sorted_methods(self, methods): if not methods: return methods # Comparison function we'll use to sort the methods: def _sorted_methods_comparison(method_info1, method_info2): """Sort method info by path and http_method. Args: method_info1: Method name and info for the first method to compare. method_info2: Method name and info for the method to compare to. Returns: Negative if the first method should come first, positive if the first method should come after the second. Zero if they're equivalent. """ def _score_path(path): """Calculate the score for this path, used for comparisons. Higher scores have priority, and if scores are equal, the path text is sorted alphabetically. Scores are based on the number and location of the constant parts of the path. The server has some special handling for variables with regexes, which we don't handle here. Args: path: The request path that we're calculating a score for. Returns: The score for the given path. """ score = 0 parts = path.split('/') for part in parts: score <<= 1 if not part or part[0] != '{': # Found a constant. score += 1 # Shift by 31 instead of 32 because some (!) versions of Python like # to convert the int to a long if we shift by 32, and the sorted() # function that uses this blows up if it receives anything but an int. score <<= 31 - len(parts) return score # Higher path scores come first. path_score1 = _score_path(method_info1[1].get('path', '')) path_score2 = _score_path(method_info2[1].get('path', '')) if path_score1 != path_score2: return path_score2 - path_score1 # Compare by path text next, sorted alphabetically. path_result = cmp(method_info1[1].get('path', ''), method_info2[1].get('path', '')) if path_result != 0: return path_result # All else being equal, sort by HTTP method. method_result = cmp(method_info1[1].get('httpMethod', ''), method_info2[1].get('httpMethod', '')) return method_result return sorted(methods.items(), _sorted_methods_comparison)
[ "def", "_get_sorted_methods", "(", "self", ",", "methods", ")", ":", "if", "not", "methods", ":", "return", "methods", "# Comparison function we'll use to sort the methods:", "def", "_sorted_methods_comparison", "(", "method_info1", ",", "method_info2", ")", ":", "\"\"\...
Get a copy of 'methods' sorted the way they would be on the live server. Args: methods: JSON configuration of an API's methods. Returns: The same configuration with the methods sorted based on what order they'll be checked by the server.
[ "Get", "a", "copy", "of", "methods", "sorted", "the", "way", "they", "would", "be", "on", "the", "live", "server", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L79-L150
22,267
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager._get_path_params
def _get_path_params(match): """Gets path parameters from a regular expression match. Args: match: A regular expression Match object for a path. Returns: A dictionary containing the variable names converted from base64. """ result = {} for var_name, value in match.groupdict().iteritems(): actual_var_name = ApiConfigManager._from_safe_path_param_name(var_name) result[actual_var_name] = urllib.unquote_plus(value) return result
python
def _get_path_params(match): result = {} for var_name, value in match.groupdict().iteritems(): actual_var_name = ApiConfigManager._from_safe_path_param_name(var_name) result[actual_var_name] = urllib.unquote_plus(value) return result
[ "def", "_get_path_params", "(", "match", ")", ":", "result", "=", "{", "}", "for", "var_name", ",", "value", "in", "match", ".", "groupdict", "(", ")", ".", "iteritems", "(", ")", ":", "actual_var_name", "=", "ApiConfigManager", ".", "_from_safe_path_param_n...
Gets path parameters from a regular expression match. Args: match: A regular expression Match object for a path. Returns: A dictionary containing the variable names converted from base64.
[ "Gets", "path", "parameters", "from", "a", "regular", "expression", "match", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L153-L166
22,268
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager.lookup_rest_method
def lookup_rest_method(self, path, request_uri, http_method): """Look up the rest method at call time. The method is looked up in self._rest_methods, the list it is saved in for SaveRestMethod. Args: path: A string containing the path from the URL of the request. http_method: A string containing HTTP method of the request. Returns: Tuple of (<method name>, <method>, <params>) Where: <method name> is the string name of the method that was matched. <method> is the descriptor as specified in the API configuration. -and- <params> is a dict of path parameters matched in the rest request. """ method_key = http_method.lower() with self._config_lock: for compiled_path_pattern, unused_path, methods in self._rest_methods: if method_key not in methods: continue candidate_method_info = methods[method_key] match_against = request_uri if candidate_method_info[1].get('useRequestUri') else path match = compiled_path_pattern.match(match_against) if match: params = self._get_path_params(match) method_name, method = candidate_method_info break else: _logger.warn('No endpoint found for path: %r, method: %r', path, http_method) method_name = None method = None params = None return method_name, method, params
python
def lookup_rest_method(self, path, request_uri, http_method): method_key = http_method.lower() with self._config_lock: for compiled_path_pattern, unused_path, methods in self._rest_methods: if method_key not in methods: continue candidate_method_info = methods[method_key] match_against = request_uri if candidate_method_info[1].get('useRequestUri') else path match = compiled_path_pattern.match(match_against) if match: params = self._get_path_params(match) method_name, method = candidate_method_info break else: _logger.warn('No endpoint found for path: %r, method: %r', path, http_method) method_name = None method = None params = None return method_name, method, params
[ "def", "lookup_rest_method", "(", "self", ",", "path", ",", "request_uri", ",", "http_method", ")", ":", "method_key", "=", "http_method", ".", "lower", "(", ")", "with", "self", ".", "_config_lock", ":", "for", "compiled_path_pattern", ",", "unused_path", ","...
Look up the rest method at call time. The method is looked up in self._rest_methods, the list it is saved in for SaveRestMethod. Args: path: A string containing the path from the URL of the request. http_method: A string containing HTTP method of the request. Returns: Tuple of (<method name>, <method>, <params>) Where: <method name> is the string name of the method that was matched. <method> is the descriptor as specified in the API configuration. -and- <params> is a dict of path parameters matched in the rest request.
[ "Look", "up", "the", "rest", "method", "at", "call", "time", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L168-L202
22,269
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager._add_discovery_config
def _add_discovery_config(self): """Add the Discovery configuration to our list of configs. This should only be called with self._config_lock. The code here assumes the lock is held. """ lookup_key = (discovery_service.DiscoveryService.API_CONFIG['name'], discovery_service.DiscoveryService.API_CONFIG['version']) self._configs[lookup_key] = discovery_service.DiscoveryService.API_CONFIG
python
def _add_discovery_config(self): lookup_key = (discovery_service.DiscoveryService.API_CONFIG['name'], discovery_service.DiscoveryService.API_CONFIG['version']) self._configs[lookup_key] = discovery_service.DiscoveryService.API_CONFIG
[ "def", "_add_discovery_config", "(", "self", ")", ":", "lookup_key", "=", "(", "discovery_service", ".", "DiscoveryService", ".", "API_CONFIG", "[", "'name'", "]", ",", "discovery_service", ".", "DiscoveryService", ".", "API_CONFIG", "[", "'version'", "]", ")", ...
Add the Discovery configuration to our list of configs. This should only be called with self._config_lock. The code here assumes the lock is held.
[ "Add", "the", "Discovery", "configuration", "to", "our", "list", "of", "configs", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L204-L212
22,270
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager.save_config
def save_config(self, lookup_key, config): """Save a configuration to the cache of configs. Args: lookup_key: A string containing the cache lookup key. config: The dict containing the configuration to save to the cache. """ with self._config_lock: self._configs[lookup_key] = config
python
def save_config(self, lookup_key, config): with self._config_lock: self._configs[lookup_key] = config
[ "def", "save_config", "(", "self", ",", "lookup_key", ",", "config", ")", ":", "with", "self", ".", "_config_lock", ":", "self", ".", "_configs", "[", "lookup_key", "]", "=", "config" ]
Save a configuration to the cache of configs. Args: lookup_key: A string containing the cache lookup key. config: The dict containing the configuration to save to the cache.
[ "Save", "a", "configuration", "to", "the", "cache", "of", "configs", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L214-L222
22,271
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager._from_safe_path_param_name
def _from_safe_path_param_name(safe_parameter): """Takes a safe regex group name and converts it back to the original value. Only alphanumeric characters and underscore are allowed in variable name tokens, and numeric are not allowed as the first character. The safe_parameter is a base32 representation of the actual value. Args: safe_parameter: A string that was generated by _to_safe_path_param_name. Returns: A string, the parameter matched from the URL template. """ assert safe_parameter.startswith('_') safe_parameter_as_base32 = safe_parameter[1:] padding_length = - len(safe_parameter_as_base32) % 8 padding = '=' * padding_length return base64.b32decode(safe_parameter_as_base32 + padding)
python
def _from_safe_path_param_name(safe_parameter): assert safe_parameter.startswith('_') safe_parameter_as_base32 = safe_parameter[1:] padding_length = - len(safe_parameter_as_base32) % 8 padding = '=' * padding_length return base64.b32decode(safe_parameter_as_base32 + padding)
[ "def", "_from_safe_path_param_name", "(", "safe_parameter", ")", ":", "assert", "safe_parameter", ".", "startswith", "(", "'_'", ")", "safe_parameter_as_base32", "=", "safe_parameter", "[", "1", ":", "]", "padding_length", "=", "-", "len", "(", "safe_parameter_as_ba...
Takes a safe regex group name and converts it back to the original value. Only alphanumeric characters and underscore are allowed in variable name tokens, and numeric are not allowed as the first character. The safe_parameter is a base32 representation of the actual value. Args: safe_parameter: A string that was generated by _to_safe_path_param_name. Returns: A string, the parameter matched from the URL template.
[ "Takes", "a", "safe", "regex", "group", "name", "and", "converts", "it", "back", "to", "the", "original", "value", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L245-L264
22,272
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager._compile_path_pattern
def _compile_path_pattern(pattern): r"""Generates a compiled regex pattern for a path pattern. e.g. '/MyApi/v1/notes/{id}' returns re.compile(r'/MyApi/v1/notes/(?P<id>[^/?#\[\]{}]*)') Args: pattern: A string, the parameterized path pattern to be checked. Returns: A compiled regex object to match this path pattern. """ def replace_variable(match): """Replaces a {variable} with a regex to match it by name. Changes the string corresponding to the variable name to the base32 representation of the string, prepended by an underscore. This is necessary because we can have message variable names in URL patterns (e.g. via {x.y}) but the character '.' can't be in a regex group name. Args: match: A regex match object, the matching regex group as sent by re.sub(). Returns: A string regex to match the variable by name, if the full pattern was matched. """ if match.lastindex > 1: var_name = ApiConfigManager._to_safe_path_param_name(match.group(2)) return '%s(?P<%s>%s)' % (match.group(1), var_name, _PATH_VALUE_PATTERN) return match.group(0) pattern = re.sub('(/|^){(%s)}(?=/|$|:)' % _PATH_VARIABLE_PATTERN, replace_variable, pattern) return re.compile(pattern + '/?$')
python
def _compile_path_pattern(pattern): r"""Generates a compiled regex pattern for a path pattern. e.g. '/MyApi/v1/notes/{id}' returns re.compile(r'/MyApi/v1/notes/(?P<id>[^/?#\[\]{}]*)') Args: pattern: A string, the parameterized path pattern to be checked. Returns: A compiled regex object to match this path pattern. """ def replace_variable(match): """Replaces a {variable} with a regex to match it by name. Changes the string corresponding to the variable name to the base32 representation of the string, prepended by an underscore. This is necessary because we can have message variable names in URL patterns (e.g. via {x.y}) but the character '.' can't be in a regex group name. Args: match: A regex match object, the matching regex group as sent by re.sub(). Returns: A string regex to match the variable by name, if the full pattern was matched. """ if match.lastindex > 1: var_name = ApiConfigManager._to_safe_path_param_name(match.group(2)) return '%s(?P<%s>%s)' % (match.group(1), var_name, _PATH_VALUE_PATTERN) return match.group(0) pattern = re.sub('(/|^){(%s)}(?=/|$|:)' % _PATH_VARIABLE_PATTERN, replace_variable, pattern) return re.compile(pattern + '/?$')
[ "def", "_compile_path_pattern", "(", "pattern", ")", ":", "def", "replace_variable", "(", "match", ")", ":", "\"\"\"Replaces a {variable} with a regex to match it by name.\n\n Changes the string corresponding to the variable name to the base32\n representation of the string, prepen...
r"""Generates a compiled regex pattern for a path pattern. e.g. '/MyApi/v1/notes/{id}' returns re.compile(r'/MyApi/v1/notes/(?P<id>[^/?#\[\]{}]*)') Args: pattern: A string, the parameterized path pattern to be checked. Returns: A compiled regex object to match this path pattern.
[ "r", "Generates", "a", "compiled", "regex", "pattern", "for", "a", "path", "pattern", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L267-L304
22,273
cloudendpoints/endpoints-python
endpoints/api_config_manager.py
ApiConfigManager._save_rest_method
def _save_rest_method(self, method_name, api_name, version, method): """Store Rest api methods in a list for lookup at call time. The list is self._rest_methods, a list of tuples: [(<compiled_path>, <path_pattern>, <method_dict>), ...] where: <compiled_path> is a compiled regex to match against the incoming URL <path_pattern> is a string representing the original path pattern, checked on insertion to prevent duplicates. -and- <method_dict> is a dict of httpMethod => (method_name, method) This structure is a bit complex, it supports use in two contexts: Creation time: - SaveRestMethod is called repeatedly, each method will have a path, which we want to be compiled for fast lookup at call time - We want to prevent duplicate incoming path patterns, so store the un-compiled path, not counting on a compiled regex being a stable comparison as it is not documented as being stable for this use. - Need to store the method that will be mapped at calltime. - Different methods may have the same path but different http method. Call time: - Quickly scan through the list attempting .match(path) on each compiled regex to find the path that matches. - When a path is matched, look up the API method from the request and get the method name and method config for the matching API method and method name. Args: method_name: A string containing the name of the API method. api_name: A string containing the name of the API. version: A string containing the version of the API. method: A dict containing the method descriptor (as in the api config file). """ path_pattern = '/'.join((api_name, version, method.get('path', ''))) http_method = method.get('httpMethod', '').lower() for _, path, methods in self._rest_methods: if path == path_pattern: methods[http_method] = method_name, method break else: self._rest_methods.append( (self._compile_path_pattern(path_pattern), path_pattern, {http_method: (method_name, method)}))
python
def _save_rest_method(self, method_name, api_name, version, method): path_pattern = '/'.join((api_name, version, method.get('path', ''))) http_method = method.get('httpMethod', '').lower() for _, path, methods in self._rest_methods: if path == path_pattern: methods[http_method] = method_name, method break else: self._rest_methods.append( (self._compile_path_pattern(path_pattern), path_pattern, {http_method: (method_name, method)}))
[ "def", "_save_rest_method", "(", "self", ",", "method_name", ",", "api_name", ",", "version", ",", "method", ")", ":", "path_pattern", "=", "'/'", ".", "join", "(", "(", "api_name", ",", "version", ",", "method", ".", "get", "(", "'path'", ",", "''", "...
Store Rest api methods in a list for lookup at call time. The list is self._rest_methods, a list of tuples: [(<compiled_path>, <path_pattern>, <method_dict>), ...] where: <compiled_path> is a compiled regex to match against the incoming URL <path_pattern> is a string representing the original path pattern, checked on insertion to prevent duplicates. -and- <method_dict> is a dict of httpMethod => (method_name, method) This structure is a bit complex, it supports use in two contexts: Creation time: - SaveRestMethod is called repeatedly, each method will have a path, which we want to be compiled for fast lookup at call time - We want to prevent duplicate incoming path patterns, so store the un-compiled path, not counting on a compiled regex being a stable comparison as it is not documented as being stable for this use. - Need to store the method that will be mapped at calltime. - Different methods may have the same path but different http method. Call time: - Quickly scan through the list attempting .match(path) on each compiled regex to find the path that matches. - When a path is matched, look up the API method from the request and get the method name and method config for the matching API method and method name. Args: method_name: A string containing the name of the API method. api_name: A string containing the name of the API. version: A string containing the version of the API. method: A dict containing the method descriptor (as in the api config file).
[ "Store", "Rest", "api", "methods", "in", "a", "list", "for", "lookup", "at", "call", "time", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/api_config_manager.py#L306-L350
22,274
cloudendpoints/endpoints-python
endpoints/apiserving.py
api_server
def api_server(api_services, **kwargs): """Create an api_server. The primary function of this method is to set up the WSGIApplication instance for the service handlers described by the services passed in. Additionally, it registers each API in ApiConfigRegistry for later use in the BackendService.getApiConfigs() (API config enumeration service). It also configures service control. Args: api_services: List of protorpc.remote.Service classes implementing the API or a list of _ApiDecorator instances that decorate the service classes for an API. **kwargs: Passed through to protorpc.wsgi.service.service_handlers except: protocols - ProtoRPC protocols are not supported, and are disallowed. Returns: A new WSGIApplication that serves the API backend and config registry. Raises: TypeError: if protocols are configured (this feature is not supported). """ # Disallow protocol configuration for now, Lily is json-only. if 'protocols' in kwargs: raise TypeError("__init__() got an unexpected keyword argument 'protocols'") from . import _logger as endpoints_logger from . import __version__ as endpoints_version endpoints_logger.info('Initializing Endpoints Framework version %s', endpoints_version) # Construct the api serving app apis_app = _ApiServer(api_services, **kwargs) dispatcher = endpoints_dispatcher.EndpointsDispatcherMiddleware(apis_app) # Determine the service name service_name = os.environ.get('ENDPOINTS_SERVICE_NAME') if not service_name: _logger.warn('Did not specify the ENDPOINTS_SERVICE_NAME environment' ' variable so service control is disabled. Please specify' ' the name of service in ENDPOINTS_SERVICE_NAME to enable' ' it.') return dispatcher # If we're using a local server, just return the dispatcher now to bypass # control client. if control_wsgi.running_on_devserver(): _logger.warn('Running on local devserver, so service control is disabled.') return dispatcher from endpoints_management import _logger as management_logger from endpoints_management import __version__ as management_version management_logger.info('Initializing Endpoints Management Framework version %s', management_version) # The DEFAULT 'config' should be tuned so that it's always OK for python # App Engine workloads. The config can be adjusted, but that's probably # unnecessary on App Engine. controller = control_client.Loaders.DEFAULT.load(service_name) # Start the GAE background thread that powers the control client's cache. control_client.use_gae_thread() controller.start() return control_wsgi.add_all( dispatcher, app_identity.get_application_id(), controller)
python
def api_server(api_services, **kwargs): # Disallow protocol configuration for now, Lily is json-only. if 'protocols' in kwargs: raise TypeError("__init__() got an unexpected keyword argument 'protocols'") from . import _logger as endpoints_logger from . import __version__ as endpoints_version endpoints_logger.info('Initializing Endpoints Framework version %s', endpoints_version) # Construct the api serving app apis_app = _ApiServer(api_services, **kwargs) dispatcher = endpoints_dispatcher.EndpointsDispatcherMiddleware(apis_app) # Determine the service name service_name = os.environ.get('ENDPOINTS_SERVICE_NAME') if not service_name: _logger.warn('Did not specify the ENDPOINTS_SERVICE_NAME environment' ' variable so service control is disabled. Please specify' ' the name of service in ENDPOINTS_SERVICE_NAME to enable' ' it.') return dispatcher # If we're using a local server, just return the dispatcher now to bypass # control client. if control_wsgi.running_on_devserver(): _logger.warn('Running on local devserver, so service control is disabled.') return dispatcher from endpoints_management import _logger as management_logger from endpoints_management import __version__ as management_version management_logger.info('Initializing Endpoints Management Framework version %s', management_version) # The DEFAULT 'config' should be tuned so that it's always OK for python # App Engine workloads. The config can be adjusted, but that's probably # unnecessary on App Engine. controller = control_client.Loaders.DEFAULT.load(service_name) # Start the GAE background thread that powers the control client's cache. control_client.use_gae_thread() controller.start() return control_wsgi.add_all( dispatcher, app_identity.get_application_id(), controller)
[ "def", "api_server", "(", "api_services", ",", "*", "*", "kwargs", ")", ":", "# Disallow protocol configuration for now, Lily is json-only.", "if", "'protocols'", "in", "kwargs", ":", "raise", "TypeError", "(", "\"__init__() got an unexpected keyword argument 'protocols'\"", ...
Create an api_server. The primary function of this method is to set up the WSGIApplication instance for the service handlers described by the services passed in. Additionally, it registers each API in ApiConfigRegistry for later use in the BackendService.getApiConfigs() (API config enumeration service). It also configures service control. Args: api_services: List of protorpc.remote.Service classes implementing the API or a list of _ApiDecorator instances that decorate the service classes for an API. **kwargs: Passed through to protorpc.wsgi.service.service_handlers except: protocols - ProtoRPC protocols are not supported, and are disallowed. Returns: A new WSGIApplication that serves the API backend and config registry. Raises: TypeError: if protocols are configured (this feature is not supported).
[ "Create", "an", "api_server", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/apiserving.py#L541-L606
22,275
cloudendpoints/endpoints-python
endpoints/apiserving.py
ApiConfigRegistry.register_backend
def register_backend(self, config_contents): """Register a single API and its config contents. Args: config_contents: Dict containing API configuration. """ if config_contents is None: return self.__register_class(config_contents) self.__api_configs.append(config_contents) self.__register_methods(config_contents)
python
def register_backend(self, config_contents): if config_contents is None: return self.__register_class(config_contents) self.__api_configs.append(config_contents) self.__register_methods(config_contents)
[ "def", "register_backend", "(", "self", ",", "config_contents", ")", ":", "if", "config_contents", "is", "None", ":", "return", "self", ".", "__register_class", "(", "config_contents", ")", "self", ".", "__api_configs", ".", "append", "(", "config_contents", ")"...
Register a single API and its config contents. Args: config_contents: Dict containing API configuration.
[ "Register", "a", "single", "API", "and", "its", "config", "contents", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/apiserving.py#L197-L207
22,276
cloudendpoints/endpoints-python
endpoints/apiserving.py
ApiConfigRegistry.__register_class
def __register_class(self, parsed_config): """Register the class implementing this config, so we only add it once. Args: parsed_config: The JSON object with the API configuration being added. Raises: ApiConfigurationError: If the class has already been registered. """ methods = parsed_config.get('methods') if not methods: return # Determine the name of the class that implements this configuration. service_classes = set() for method in methods.itervalues(): rosy_method = method.get('rosyMethod') if rosy_method and '.' in rosy_method: method_class = rosy_method.split('.', 1)[0] service_classes.add(method_class) for service_class in service_classes: if service_class in self.__registered_classes: raise api_exceptions.ApiConfigurationError( 'API class %s has already been registered.' % service_class) self.__registered_classes.add(service_class)
python
def __register_class(self, parsed_config): methods = parsed_config.get('methods') if not methods: return # Determine the name of the class that implements this configuration. service_classes = set() for method in methods.itervalues(): rosy_method = method.get('rosyMethod') if rosy_method and '.' in rosy_method: method_class = rosy_method.split('.', 1)[0] service_classes.add(method_class) for service_class in service_classes: if service_class in self.__registered_classes: raise api_exceptions.ApiConfigurationError( 'API class %s has already been registered.' % service_class) self.__registered_classes.add(service_class)
[ "def", "__register_class", "(", "self", ",", "parsed_config", ")", ":", "methods", "=", "parsed_config", ".", "get", "(", "'methods'", ")", "if", "not", "methods", ":", "return", "# Determine the name of the class that implements this configuration.", "service_classes", ...
Register the class implementing this config, so we only add it once. Args: parsed_config: The JSON object with the API configuration being added. Raises: ApiConfigurationError: If the class has already been registered.
[ "Register", "the", "class", "implementing", "this", "config", "so", "we", "only", "add", "it", "once", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/apiserving.py#L209-L234
22,277
cloudendpoints/endpoints-python
endpoints/apiserving.py
ApiConfigRegistry.__register_methods
def __register_methods(self, parsed_config): """Register all methods from the given api config file. Methods are stored in a map from method_name to rosyMethod, the name of the ProtoRPC method to be called on the backend. If no rosyMethod was specified the value will be None. Args: parsed_config: The JSON object with the API configuration being added. """ methods = parsed_config.get('methods') if not methods: return for method_name, method in methods.iteritems(): self.__api_methods[method_name] = method.get('rosyMethod')
python
def __register_methods(self, parsed_config): methods = parsed_config.get('methods') if not methods: return for method_name, method in methods.iteritems(): self.__api_methods[method_name] = method.get('rosyMethod')
[ "def", "__register_methods", "(", "self", ",", "parsed_config", ")", ":", "methods", "=", "parsed_config", ".", "get", "(", "'methods'", ")", "if", "not", "methods", ":", "return", "for", "method_name", ",", "method", "in", "methods", ".", "iteritems", "(", ...
Register all methods from the given api config file. Methods are stored in a map from method_name to rosyMethod, the name of the ProtoRPC method to be called on the backend. If no rosyMethod was specified the value will be None. Args: parsed_config: The JSON object with the API configuration being added.
[ "Register", "all", "methods", "from", "the", "given", "api", "config", "file", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/apiserving.py#L236-L251
22,278
cloudendpoints/endpoints-python
endpoints/apiserving.py
_ApiServer.__register_services
def __register_services(api_name_version_map, api_config_registry): """Register & return a list of each URL and class that handles that URL. This finds every service class in api_name_version_map, registers it with the given ApiConfigRegistry, builds the URL for that class, and adds the URL and its factory to a list that's returned. Args: api_name_version_map: A mapping from (api name, api version) to a list of service factories, as returned by __create_name_version_map. api_config_registry: The ApiConfigRegistry where service classes will be registered. Returns: A list of (URL, service_factory) for each service class in api_name_version_map. Raises: ApiConfigurationError: If a Service class appears more than once in api_name_version_map. This could happen if one class is used to implement multiple APIs. """ generator = api_config.ApiConfigGenerator() protorpc_services = [] for service_factories in api_name_version_map.itervalues(): service_classes = [service_factory.service_class for service_factory in service_factories] config_dict = generator.get_config_dict(service_classes) api_config_registry.register_backend(config_dict) for service_factory in service_factories: protorpc_class_name = service_factory.service_class.__name__ root = '%s%s' % (service_factory.service_class.api_info.base_path, protorpc_class_name) if any(service_map[0] == root or service_map[1] == service_factory for service_map in protorpc_services): raise api_config.ApiConfigurationError( 'Can\'t reuse the same class in multiple APIs: %s' % protorpc_class_name) protorpc_services.append((root, service_factory)) return protorpc_services
python
def __register_services(api_name_version_map, api_config_registry): generator = api_config.ApiConfigGenerator() protorpc_services = [] for service_factories in api_name_version_map.itervalues(): service_classes = [service_factory.service_class for service_factory in service_factories] config_dict = generator.get_config_dict(service_classes) api_config_registry.register_backend(config_dict) for service_factory in service_factories: protorpc_class_name = service_factory.service_class.__name__ root = '%s%s' % (service_factory.service_class.api_info.base_path, protorpc_class_name) if any(service_map[0] == root or service_map[1] == service_factory for service_map in protorpc_services): raise api_config.ApiConfigurationError( 'Can\'t reuse the same class in multiple APIs: %s' % protorpc_class_name) protorpc_services.append((root, service_factory)) return protorpc_services
[ "def", "__register_services", "(", "api_name_version_map", ",", "api_config_registry", ")", ":", "generator", "=", "api_config", ".", "ApiConfigGenerator", "(", ")", "protorpc_services", "=", "[", "]", "for", "service_factories", "in", "api_name_version_map", ".", "it...
Register & return a list of each URL and class that handles that URL. This finds every service class in api_name_version_map, registers it with the given ApiConfigRegistry, builds the URL for that class, and adds the URL and its factory to a list that's returned. Args: api_name_version_map: A mapping from (api name, api version) to a list of service factories, as returned by __create_name_version_map. api_config_registry: The ApiConfigRegistry where service classes will be registered. Returns: A list of (URL, service_factory) for each service class in api_name_version_map. Raises: ApiConfigurationError: If a Service class appears more than once in api_name_version_map. This could happen if one class is used to implement multiple APIs.
[ "Register", "&", "return", "a", "list", "of", "each", "URL", "and", "class", "that", "handles", "that", "URL", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/apiserving.py#L392-L432
22,279
cloudendpoints/endpoints-python
endpoints/apiserving.py
_ApiServer.__is_json_error
def __is_json_error(self, status, headers): """Determine if response is an error. Args: status: HTTP status code. headers: Dictionary of (lowercase) header name to value. Returns: True if the response was an error, else False. """ content_header = headers.get('content-type', '') content_type, unused_params = cgi.parse_header(content_header) return (status.startswith('400') and content_type.lower() in _ALL_JSON_CONTENT_TYPES)
python
def __is_json_error(self, status, headers): content_header = headers.get('content-type', '') content_type, unused_params = cgi.parse_header(content_header) return (status.startswith('400') and content_type.lower() in _ALL_JSON_CONTENT_TYPES)
[ "def", "__is_json_error", "(", "self", ",", "status", ",", "headers", ")", ":", "content_header", "=", "headers", ".", "get", "(", "'content-type'", ",", "''", ")", "content_type", ",", "unused_params", "=", "cgi", ".", "parse_header", "(", "content_header", ...
Determine if response is an error. Args: status: HTTP status code. headers: Dictionary of (lowercase) header name to value. Returns: True if the response was an error, else False.
[ "Determine", "if", "response", "is", "an", "error", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/apiserving.py#L434-L447
22,280
cloudendpoints/endpoints-python
endpoints/apiserving.py
_ApiServer.__write_error
def __write_error(self, status_code, error_message=None): """Return the HTTP status line and body for a given error code and message. Args: status_code: HTTP status code to be returned. error_message: Error message to be returned. Returns: Tuple (http_status, body): http_status: HTTP status line, e.g. 200 OK. body: Body of the HTTP request. """ if error_message is None: error_message = httplib.responses[status_code] status = '%d %s' % (status_code, httplib.responses[status_code]) message = EndpointsErrorMessage( state=EndpointsErrorMessage.State.APPLICATION_ERROR, error_message=error_message) return status, self.__PROTOJSON.encode_message(message)
python
def __write_error(self, status_code, error_message=None): if error_message is None: error_message = httplib.responses[status_code] status = '%d %s' % (status_code, httplib.responses[status_code]) message = EndpointsErrorMessage( state=EndpointsErrorMessage.State.APPLICATION_ERROR, error_message=error_message) return status, self.__PROTOJSON.encode_message(message)
[ "def", "__write_error", "(", "self", ",", "status_code", ",", "error_message", "=", "None", ")", ":", "if", "error_message", "is", "None", ":", "error_message", "=", "httplib", ".", "responses", "[", "status_code", "]", "status", "=", "'%d %s'", "%", "(", ...
Return the HTTP status line and body for a given error code and message. Args: status_code: HTTP status code to be returned. error_message: Error message to be returned. Returns: Tuple (http_status, body): http_status: HTTP status line, e.g. 200 OK. body: Body of the HTTP request.
[ "Return", "the", "HTTP", "status", "line", "and", "body", "for", "a", "given", "error", "code", "and", "message", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/apiserving.py#L449-L467
22,281
cloudendpoints/endpoints-python
endpoints/apiserving.py
_ApiServer.protorpc_to_endpoints_error
def protorpc_to_endpoints_error(self, status, body): """Convert a ProtoRPC error to the format expected by Google Endpoints. If the body does not contain an ProtoRPC message in state APPLICATION_ERROR the status and body will be returned unchanged. Args: status: HTTP status of the response from the backend body: JSON-encoded error in format expected by Endpoints frontend. Returns: Tuple of (http status, body) """ try: rpc_error = self.__PROTOJSON.decode_message(remote.RpcStatus, body) except (ValueError, messages.ValidationError): rpc_error = remote.RpcStatus() if rpc_error.state == remote.RpcStatus.State.APPLICATION_ERROR: # Try to map to HTTP error code. error_class = _ERROR_NAME_MAP.get(rpc_error.error_name) if error_class: status, body = self.__write_error(error_class.http_status, rpc_error.error_message) return status, body
python
def protorpc_to_endpoints_error(self, status, body): try: rpc_error = self.__PROTOJSON.decode_message(remote.RpcStatus, body) except (ValueError, messages.ValidationError): rpc_error = remote.RpcStatus() if rpc_error.state == remote.RpcStatus.State.APPLICATION_ERROR: # Try to map to HTTP error code. error_class = _ERROR_NAME_MAP.get(rpc_error.error_name) if error_class: status, body = self.__write_error(error_class.http_status, rpc_error.error_message) return status, body
[ "def", "protorpc_to_endpoints_error", "(", "self", ",", "status", ",", "body", ")", ":", "try", ":", "rpc_error", "=", "self", ".", "__PROTOJSON", ".", "decode_message", "(", "remote", ".", "RpcStatus", ",", "body", ")", "except", "(", "ValueError", ",", "...
Convert a ProtoRPC error to the format expected by Google Endpoints. If the body does not contain an ProtoRPC message in state APPLICATION_ERROR the status and body will be returned unchanged. Args: status: HTTP status of the response from the backend body: JSON-encoded error in format expected by Endpoints frontend. Returns: Tuple of (http status, body)
[ "Convert", "a", "ProtoRPC", "error", "to", "the", "format", "expected", "by", "Google", "Endpoints", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/apiserving.py#L469-L494
22,282
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware._add_dispatcher
def _add_dispatcher(self, path_regex, dispatch_function): """Add a request path and dispatch handler. Args: path_regex: A string regex, the path to match against incoming requests. dispatch_function: The function to call for these requests. The function should take (request, start_response) as arguments and return the contents of the response body. """ self._dispatchers.append((re.compile(path_regex), dispatch_function))
python
def _add_dispatcher(self, path_regex, dispatch_function): self._dispatchers.append((re.compile(path_regex), dispatch_function))
[ "def", "_add_dispatcher", "(", "self", ",", "path_regex", ",", "dispatch_function", ")", ":", "self", ".", "_dispatchers", ".", "append", "(", "(", "re", ".", "compile", "(", "path_regex", ")", ",", "dispatch_function", ")", ")" ]
Add a request path and dispatch handler. Args: path_regex: A string regex, the path to match against incoming requests. dispatch_function: The function to call for these requests. The function should take (request, start_response) as arguments and return the contents of the response body.
[ "Add", "a", "request", "path", "and", "dispatch", "handler", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L103-L112
22,283
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.dispatch
def dispatch(self, request, start_response): """Handles dispatch to apiserver handlers. This typically ends up calling start_response and returning the entire body of the response. Args: request: An ApiRequest, the request from the user. start_response: A function with semantics defined in PEP-333. Returns: A string, the body of the response. """ # Check if this matches any of our special handlers. dispatched_response = self.dispatch_non_api_requests(request, start_response) if dispatched_response is not None: return dispatched_response # Call the service. try: return self.call_backend(request, start_response) except errors.RequestError as error: return self._handle_request_error(request, error, start_response)
python
def dispatch(self, request, start_response): # Check if this matches any of our special handlers. dispatched_response = self.dispatch_non_api_requests(request, start_response) if dispatched_response is not None: return dispatched_response # Call the service. try: return self.call_backend(request, start_response) except errors.RequestError as error: return self._handle_request_error(request, error, start_response)
[ "def", "dispatch", "(", "self", ",", "request", ",", "start_response", ")", ":", "# Check if this matches any of our special handlers.", "dispatched_response", "=", "self", ".", "dispatch_non_api_requests", "(", "request", ",", "start_response", ")", "if", "dispatched_res...
Handles dispatch to apiserver handlers. This typically ends up calling start_response and returning the entire body of the response. Args: request: An ApiRequest, the request from the user. start_response: A function with semantics defined in PEP-333. Returns: A string, the body of the response.
[ "Handles", "dispatch", "to", "apiserver", "handlers", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L149-L172
22,284
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.dispatch_non_api_requests
def dispatch_non_api_requests(self, request, start_response): """Dispatch this request if this is a request to a reserved URL. If the request matches one of our reserved URLs, this calls start_response and returns the response body. This also handles OPTIONS CORS requests. Args: request: An ApiRequest, the request from the user. start_response: A function with semantics defined in PEP-333. Returns: None if the request doesn't match one of the reserved URLs this handles. Otherwise, returns the response body. """ for path_regex, dispatch_function in self._dispatchers: if path_regex.match(request.relative_url): return dispatch_function(request, start_response) if request.http_method == 'OPTIONS': cors_handler = self._create_cors_handler(request) if cors_handler.allow_cors_request: # The server returns 200 rather than 204, for some reason. return util.send_wsgi_response('200', [], '', start_response, cors_handler) return None
python
def dispatch_non_api_requests(self, request, start_response): for path_regex, dispatch_function in self._dispatchers: if path_regex.match(request.relative_url): return dispatch_function(request, start_response) if request.http_method == 'OPTIONS': cors_handler = self._create_cors_handler(request) if cors_handler.allow_cors_request: # The server returns 200 rather than 204, for some reason. return util.send_wsgi_response('200', [], '', start_response, cors_handler) return None
[ "def", "dispatch_non_api_requests", "(", "self", ",", "request", ",", "start_response", ")", ":", "for", "path_regex", ",", "dispatch_function", "in", "self", ".", "_dispatchers", ":", "if", "path_regex", ".", "match", "(", "request", ".", "relative_url", ")", ...
Dispatch this request if this is a request to a reserved URL. If the request matches one of our reserved URLs, this calls start_response and returns the response body. This also handles OPTIONS CORS requests. Args: request: An ApiRequest, the request from the user. start_response: A function with semantics defined in PEP-333. Returns: None if the request doesn't match one of the reserved URLs this handles. Otherwise, returns the response body.
[ "Dispatch", "this", "request", "if", "this", "is", "a", "request", "to", "a", "reserved", "URL", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L174-L200
22,285
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.verify_response
def verify_response(response, status_code, content_type=None): """Verifies that a response has the expected status and content type. Args: response: The ResponseTuple to be checked. status_code: An int, the HTTP status code to be compared with response status. content_type: A string with the acceptable Content-Type header value. None allows any content type. Returns: True if both status_code and content_type match, else False. """ status = int(response.status.split(' ', 1)[0]) if status != status_code: return False if content_type is None: return True for header, value in response.headers: if header.lower() == 'content-type': return value == content_type # If we fall through to here, the verification has failed, so return False. return False
python
def verify_response(response, status_code, content_type=None): status = int(response.status.split(' ', 1)[0]) if status != status_code: return False if content_type is None: return True for header, value in response.headers: if header.lower() == 'content-type': return value == content_type # If we fall through to here, the verification has failed, so return False. return False
[ "def", "verify_response", "(", "response", ",", "status_code", ",", "content_type", "=", "None", ")", ":", "status", "=", "int", "(", "response", ".", "status", ".", "split", "(", "' '", ",", "1", ")", "[", "0", "]", ")", "if", "status", "!=", "statu...
Verifies that a response has the expected status and content type. Args: response: The ResponseTuple to be checked. status_code: An int, the HTTP status code to be compared with response status. content_type: A string with the acceptable Content-Type header value. None allows any content type. Returns: True if both status_code and content_type match, else False.
[ "Verifies", "that", "a", "response", "has", "the", "expected", "status", "and", "content", "type", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L246-L271
22,286
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.prepare_backend_environ
def prepare_backend_environ(self, host, method, relative_url, headers, body, source_ip, port): """Build an environ object for the backend to consume. Args: host: A string containing the host serving the request. method: A string containing the HTTP method of the request. relative_url: A string containing path and query string of the request. headers: A list of (key, value) tuples where key and value are both strings. body: A string containing the request body. source_ip: The source IP address for the request. port: The port to which to direct the request. Returns: An environ object with all the information necessary for the backend to process the request. """ if isinstance(body, unicode): body = body.encode('ascii') url = urlparse.urlsplit(relative_url) if port != 80: host = '%s:%s' % (host, port) else: host = host environ = {'CONTENT_LENGTH': str(len(body)), 'PATH_INFO': url.path, 'QUERY_STRING': url.query, 'REQUEST_METHOD': method, 'REMOTE_ADDR': source_ip, 'SERVER_NAME': host, 'SERVER_PORT': str(port), 'SERVER_PROTOCOL': 'HTTP/1.1', 'wsgi.version': (1, 0), 'wsgi.url_scheme': 'http', 'wsgi.errors': cStringIO.StringIO(), 'wsgi.multithread': True, 'wsgi.multiprocess': True, 'wsgi.input': cStringIO.StringIO(body)} util.put_headers_in_environ(headers, environ) environ['HTTP_HOST'] = host return environ
python
def prepare_backend_environ(self, host, method, relative_url, headers, body, source_ip, port): if isinstance(body, unicode): body = body.encode('ascii') url = urlparse.urlsplit(relative_url) if port != 80: host = '%s:%s' % (host, port) else: host = host environ = {'CONTENT_LENGTH': str(len(body)), 'PATH_INFO': url.path, 'QUERY_STRING': url.query, 'REQUEST_METHOD': method, 'REMOTE_ADDR': source_ip, 'SERVER_NAME': host, 'SERVER_PORT': str(port), 'SERVER_PROTOCOL': 'HTTP/1.1', 'wsgi.version': (1, 0), 'wsgi.url_scheme': 'http', 'wsgi.errors': cStringIO.StringIO(), 'wsgi.multithread': True, 'wsgi.multiprocess': True, 'wsgi.input': cStringIO.StringIO(body)} util.put_headers_in_environ(headers, environ) environ['HTTP_HOST'] = host return environ
[ "def", "prepare_backend_environ", "(", "self", ",", "host", ",", "method", ",", "relative_url", ",", "headers", ",", "body", ",", "source_ip", ",", "port", ")", ":", "if", "isinstance", "(", "body", ",", "unicode", ")", ":", "body", "=", "body", ".", "...
Build an environ object for the backend to consume. Args: host: A string containing the host serving the request. method: A string containing the HTTP method of the request. relative_url: A string containing path and query string of the request. headers: A list of (key, value) tuples where key and value are both strings. body: A string containing the request body. source_ip: The source IP address for the request. port: The port to which to direct the request. Returns: An environ object with all the information necessary for the backend to process the request.
[ "Build", "an", "environ", "object", "for", "the", "backend", "to", "consume", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L273-L315
22,287
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.handle_backend_response
def handle_backend_response(self, orig_request, backend_request, response_status, response_headers, response_body, method_config, start_response): """Handle backend response, transforming output as needed. This calls start_response and returns the response body. Args: orig_request: An ApiRequest, the original request from the user. backend_request: An ApiRequest, the transformed request that was sent to the backend handler. response_status: A string, the status from the response. response_headers: A dict, the headers from the response. response_body: A string, the body of the response. method_config: A dict, the API config of the method to be called. start_response: A function with semantics defined in PEP-333. Returns: A string containing the response body. """ # Verify that the response is json. If it isn't treat, the body as an # error message and wrap it in a json error response. for header, value in response_headers: if (header.lower() == 'content-type' and not value.lower().startswith('application/json')): return self.fail_request(orig_request, 'Non-JSON reply: %s' % response_body, start_response) self.check_error_response(response_body, response_status) # Check if the response from the API was empty. Empty REST responses # generate a HTTP 204. empty_response = self.check_empty_response(orig_request, method_config, start_response) if empty_response is not None: return empty_response body = self.transform_rest_response(response_body) cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_response(response_status, response_headers, body, start_response, cors_handler=cors_handler)
python
def handle_backend_response(self, orig_request, backend_request, response_status, response_headers, response_body, method_config, start_response): # Verify that the response is json. If it isn't treat, the body as an # error message and wrap it in a json error response. for header, value in response_headers: if (header.lower() == 'content-type' and not value.lower().startswith('application/json')): return self.fail_request(orig_request, 'Non-JSON reply: %s' % response_body, start_response) self.check_error_response(response_body, response_status) # Check if the response from the API was empty. Empty REST responses # generate a HTTP 204. empty_response = self.check_empty_response(orig_request, method_config, start_response) if empty_response is not None: return empty_response body = self.transform_rest_response(response_body) cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_response(response_status, response_headers, body, start_response, cors_handler=cors_handler)
[ "def", "handle_backend_response", "(", "self", ",", "orig_request", ",", "backend_request", ",", "response_status", ",", "response_headers", ",", "response_body", ",", "method_config", ",", "start_response", ")", ":", "# Verify that the response is json. If it isn't treat, t...
Handle backend response, transforming output as needed. This calls start_response and returns the response body. Args: orig_request: An ApiRequest, the original request from the user. backend_request: An ApiRequest, the transformed request that was sent to the backend handler. response_status: A string, the status from the response. response_headers: A dict, the headers from the response. response_body: A string, the body of the response. method_config: A dict, the API config of the method to be called. start_response: A function with semantics defined in PEP-333. Returns: A string containing the response body.
[ "Handle", "backend", "response", "transforming", "output", "as", "needed", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L415-L457
22,288
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.fail_request
def fail_request(self, orig_request, message, start_response): """Write an immediate failure response to outfile, no redirect. This calls start_response and returns the error body. Args: orig_request: An ApiRequest, the original request from the user. message: A string containing the error message to be displayed to user. start_response: A function with semantics defined in PEP-333. Returns: A string containing the body of the error response. """ cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_error_response( message, start_response, cors_handler=cors_handler)
python
def fail_request(self, orig_request, message, start_response): cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_error_response( message, start_response, cors_handler=cors_handler)
[ "def", "fail_request", "(", "self", ",", "orig_request", ",", "message", ",", "start_response", ")", ":", "cors_handler", "=", "self", ".", "_create_cors_handler", "(", "orig_request", ")", "return", "util", ".", "send_wsgi_error_response", "(", "message", ",", ...
Write an immediate failure response to outfile, no redirect. This calls start_response and returns the error body. Args: orig_request: An ApiRequest, the original request from the user. message: A string containing the error message to be displayed to user. start_response: A function with semantics defined in PEP-333. Returns: A string containing the body of the error response.
[ "Write", "an", "immediate", "failure", "response", "to", "outfile", "no", "redirect", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L459-L474
22,289
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.lookup_rest_method
def lookup_rest_method(self, orig_request): """Looks up and returns rest method for the currently-pending request. Args: orig_request: An ApiRequest, the original request from the user. Returns: A tuple of (method descriptor, parameters), or (None, None) if no method was found for the current request. """ method_name, method, params = self.config_manager.lookup_rest_method( orig_request.path, orig_request.request_uri, orig_request.http_method) orig_request.method_name = method_name return method, params
python
def lookup_rest_method(self, orig_request): method_name, method, params = self.config_manager.lookup_rest_method( orig_request.path, orig_request.request_uri, orig_request.http_method) orig_request.method_name = method_name return method, params
[ "def", "lookup_rest_method", "(", "self", ",", "orig_request", ")", ":", "method_name", ",", "method", ",", "params", "=", "self", ".", "config_manager", ".", "lookup_rest_method", "(", "orig_request", ".", "path", ",", "orig_request", ".", "request_uri", ",", ...
Looks up and returns rest method for the currently-pending request. Args: orig_request: An ApiRequest, the original request from the user. Returns: A tuple of (method descriptor, parameters), or (None, None) if no method was found for the current request.
[ "Looks", "up", "and", "returns", "rest", "method", "for", "the", "currently", "-", "pending", "request", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L476-L489
22,290
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.transform_request
def transform_request(self, orig_request, params, method_config): """Transforms orig_request to apiserving request. This method uses orig_request to determine the currently-pending request and returns a new transformed request ready to send to the backend. This method accepts a rest-style or RPC-style request. Args: orig_request: An ApiRequest, the original request from the user. params: A dictionary containing path parameters for rest requests, or None for an RPC request. method_config: A dict, the API config of the method to be called. Returns: An ApiRequest that's a copy of the current request, modified so it can be sent to the backend. The path is updated and parts of the body or other properties may also be changed. """ method_params = method_config.get('request', {}).get('parameters', {}) request = self.transform_rest_request(orig_request, params, method_params) request.path = method_config.get('rosyMethod', '') return request
python
def transform_request(self, orig_request, params, method_config): method_params = method_config.get('request', {}).get('parameters', {}) request = self.transform_rest_request(orig_request, params, method_params) request.path = method_config.get('rosyMethod', '') return request
[ "def", "transform_request", "(", "self", ",", "orig_request", ",", "params", ",", "method_config", ")", ":", "method_params", "=", "method_config", ".", "get", "(", "'request'", ",", "{", "}", ")", ".", "get", "(", "'parameters'", ",", "{", "}", ")", "re...
Transforms orig_request to apiserving request. This method uses orig_request to determine the currently-pending request and returns a new transformed request ready to send to the backend. This method accepts a rest-style or RPC-style request. Args: orig_request: An ApiRequest, the original request from the user. params: A dictionary containing path parameters for rest requests, or None for an RPC request. method_config: A dict, the API config of the method to be called. Returns: An ApiRequest that's a copy of the current request, modified so it can be sent to the backend. The path is updated and parts of the body or other properties may also be changed.
[ "Transforms", "orig_request", "to", "apiserving", "request", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L491-L512
22,291
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware._add_message_field
def _add_message_field(self, field_name, value, params): """Converts a . delimitied field name to a message field in parameters. This adds the field to the params dict, broken out so that message parameters appear as sub-dicts within the outer param. For example: {'a.b.c': ['foo']} becomes: {'a': {'b': {'c': ['foo']}}} Args: field_name: A string containing the '.' delimitied name to be converted into a dictionary. value: The value to be set. params: The dictionary holding all the parameters, where the value is eventually set. """ if '.' not in field_name: params[field_name] = value return root, remaining = field_name.split('.', 1) sub_params = params.setdefault(root, {}) self._add_message_field(remaining, value, sub_params)
python
def _add_message_field(self, field_name, value, params): if '.' not in field_name: params[field_name] = value return root, remaining = field_name.split('.', 1) sub_params = params.setdefault(root, {}) self._add_message_field(remaining, value, sub_params)
[ "def", "_add_message_field", "(", "self", ",", "field_name", ",", "value", ",", "params", ")", ":", "if", "'.'", "not", "in", "field_name", ":", "params", "[", "field_name", "]", "=", "value", "return", "root", ",", "remaining", "=", "field_name", ".", "...
Converts a . delimitied field name to a message field in parameters. This adds the field to the params dict, broken out so that message parameters appear as sub-dicts within the outer param. For example: {'a.b.c': ['foo']} becomes: {'a': {'b': {'c': ['foo']}}} Args: field_name: A string containing the '.' delimitied name to be converted into a dictionary. value: The value to be set. params: The dictionary holding all the parameters, where the value is eventually set.
[ "Converts", "a", ".", "delimitied", "field", "name", "to", "a", "message", "field", "in", "parameters", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L514-L538
22,292
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware._update_from_body
def _update_from_body(self, destination, source): """Updates the dictionary for an API payload with the request body. The values from the body should override those already in the payload, but for nested fields (message objects) the values can be combined recursively. Args: destination: A dictionary containing an API payload parsed from the path and query parameters in a request. source: A dictionary parsed from the body of the request. """ for key, value in source.iteritems(): destination_value = destination.get(key) if isinstance(value, dict) and isinstance(destination_value, dict): self._update_from_body(destination_value, value) else: destination[key] = value
python
def _update_from_body(self, destination, source): for key, value in source.iteritems(): destination_value = destination.get(key) if isinstance(value, dict) and isinstance(destination_value, dict): self._update_from_body(destination_value, value) else: destination[key] = value
[ "def", "_update_from_body", "(", "self", ",", "destination", ",", "source", ")", ":", "for", "key", ",", "value", "in", "source", ".", "iteritems", "(", ")", ":", "destination_value", "=", "destination", ".", "get", "(", "key", ")", "if", "isinstance", "...
Updates the dictionary for an API payload with the request body. The values from the body should override those already in the payload, but for nested fields (message objects) the values can be combined recursively. Args: destination: A dictionary containing an API payload parsed from the path and query parameters in a request. source: A dictionary parsed from the body of the request.
[ "Updates", "the", "dictionary", "for", "an", "API", "payload", "with", "the", "request", "body", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L540-L557
22,293
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.transform_rest_request
def transform_rest_request(self, orig_request, params, method_parameters): """Translates a Rest request into an apiserving request. This makes a copy of orig_request and transforms it to apiserving format (moving request parameters to the body). The request can receive values from the path, query and body and combine them before sending them along to the backend. In cases of collision, objects from the body take precedence over those from the query, which in turn take precedence over those from the path. In the case that a repeated value occurs in both the query and the path, those values can be combined, but if that value also occurred in the body, it would override any other values. In the case of nested values from message fields, non-colliding values from subfields can be combined. For example, if '?a.c=10' occurs in the query string and "{'a': {'b': 11}}" occurs in the body, then they will be combined as { 'a': { 'b': 11, 'c': 10, } } before being sent to the backend. Args: orig_request: An ApiRequest, the original request from the user. params: A dict with URL path parameters extracted by the config_manager lookup. method_parameters: A dictionary containing the API configuration for the parameters for the request. Returns: A copy of the current request that's been modified so it can be sent to the backend. The body is updated to include parameters from the URL. """ request = orig_request.copy() body_json = {} # Handle parameters from the URL path. for key, value in params.iteritems(): # Values need to be in a list to interact with query parameter values # and to account for case of repeated parameters body_json[key] = [value] # Add in parameters from the query string. if request.parameters: # For repeated elements, query and path work together for key, value in request.parameters.iteritems(): if key in body_json: body_json[key] = value + body_json[key] else: body_json[key] = value # Validate all parameters we've merged so far and convert any '.' delimited # parameters to nested parameters. We don't use iteritems since we may # modify body_json within the loop. For instance, 'a.b' is not a valid key # and would be replaced with 'a'. for key, value in body_json.items(): current_parameter = method_parameters.get(key, {}) repeated = current_parameter.get('repeated', False) if not repeated: body_json[key] = body_json[key][0] # Order is important here. Parameter names are dot-delimited in # parameters instead of nested in dictionaries as a message field is, so # we need to call transform_parameter_value on them before calling # _add_message_field. body_json[key] = parameter_converter.transform_parameter_value( key, body_json[key], current_parameter) # Remove the old key and try to convert to nested message value message_value = body_json.pop(key) self._add_message_field(key, message_value, body_json) # Add in values from the body of the request. if request.body_json: self._update_from_body(body_json, request.body_json) request.body_json = body_json request.body = json.dumps(request.body_json) return request
python
def transform_rest_request(self, orig_request, params, method_parameters): request = orig_request.copy() body_json = {} # Handle parameters from the URL path. for key, value in params.iteritems(): # Values need to be in a list to interact with query parameter values # and to account for case of repeated parameters body_json[key] = [value] # Add in parameters from the query string. if request.parameters: # For repeated elements, query and path work together for key, value in request.parameters.iteritems(): if key in body_json: body_json[key] = value + body_json[key] else: body_json[key] = value # Validate all parameters we've merged so far and convert any '.' delimited # parameters to nested parameters. We don't use iteritems since we may # modify body_json within the loop. For instance, 'a.b' is not a valid key # and would be replaced with 'a'. for key, value in body_json.items(): current_parameter = method_parameters.get(key, {}) repeated = current_parameter.get('repeated', False) if not repeated: body_json[key] = body_json[key][0] # Order is important here. Parameter names are dot-delimited in # parameters instead of nested in dictionaries as a message field is, so # we need to call transform_parameter_value on them before calling # _add_message_field. body_json[key] = parameter_converter.transform_parameter_value( key, body_json[key], current_parameter) # Remove the old key and try to convert to nested message value message_value = body_json.pop(key) self._add_message_field(key, message_value, body_json) # Add in values from the body of the request. if request.body_json: self._update_from_body(body_json, request.body_json) request.body_json = body_json request.body = json.dumps(request.body_json) return request
[ "def", "transform_rest_request", "(", "self", ",", "orig_request", ",", "params", ",", "method_parameters", ")", ":", "request", "=", "orig_request", ".", "copy", "(", ")", "body_json", "=", "{", "}", "# Handle parameters from the URL path.", "for", "key", ",", ...
Translates a Rest request into an apiserving request. This makes a copy of orig_request and transforms it to apiserving format (moving request parameters to the body). The request can receive values from the path, query and body and combine them before sending them along to the backend. In cases of collision, objects from the body take precedence over those from the query, which in turn take precedence over those from the path. In the case that a repeated value occurs in both the query and the path, those values can be combined, but if that value also occurred in the body, it would override any other values. In the case of nested values from message fields, non-colliding values from subfields can be combined. For example, if '?a.c=10' occurs in the query string and "{'a': {'b': 11}}" occurs in the body, then they will be combined as { 'a': { 'b': 11, 'c': 10, } } before being sent to the backend. Args: orig_request: An ApiRequest, the original request from the user. params: A dict with URL path parameters extracted by the config_manager lookup. method_parameters: A dictionary containing the API configuration for the parameters for the request. Returns: A copy of the current request that's been modified so it can be sent to the backend. The body is updated to include parameters from the URL.
[ "Translates", "a", "Rest", "request", "into", "an", "apiserving", "request", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L559-L645
22,294
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.check_error_response
def check_error_response(self, body, status): """Raise an exception if the response from the backend was an error. Args: body: A string containing the backend response body. status: A string containing the backend response status. Raises: BackendError if the response is an error. """ status_code = int(status.split(' ', 1)[0]) if status_code >= 300: raise errors.BackendError(body, status)
python
def check_error_response(self, body, status): status_code = int(status.split(' ', 1)[0]) if status_code >= 300: raise errors.BackendError(body, status)
[ "def", "check_error_response", "(", "self", ",", "body", ",", "status", ")", ":", "status_code", "=", "int", "(", "status", ".", "split", "(", "' '", ",", "1", ")", "[", "0", "]", ")", "if", "status_code", ">=", "300", ":", "raise", "errors", ".", ...
Raise an exception if the response from the backend was an error. Args: body: A string containing the backend response body. status: A string containing the backend response status. Raises: BackendError if the response is an error.
[ "Raise", "an", "exception", "if", "the", "response", "from", "the", "backend", "was", "an", "error", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L647-L659
22,295
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.check_empty_response
def check_empty_response(self, orig_request, method_config, start_response): """If the response from the backend is empty, return a HTTP 204 No Content. Args: orig_request: An ApiRequest, the original request from the user. method_config: A dict, the API config of the method to be called. start_response: A function with semantics defined in PEP-333. Returns: If the backend response was empty, this returns a string containing the response body that should be returned to the user. If the backend response wasn't empty, this returns None, indicating that we should not exit early with a 204. """ response_config = method_config.get('response', {}).get('body') if response_config == 'empty': # The response to this function should be empty. We should return a 204. # Note that it's possible that the backend returned something, but we'll # ignore it. This matches the behavior in the Endpoints server. cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_no_content_response(start_response, cors_handler)
python
def check_empty_response(self, orig_request, method_config, start_response): response_config = method_config.get('response', {}).get('body') if response_config == 'empty': # The response to this function should be empty. We should return a 204. # Note that it's possible that the backend returned something, but we'll # ignore it. This matches the behavior in the Endpoints server. cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_no_content_response(start_response, cors_handler)
[ "def", "check_empty_response", "(", "self", ",", "orig_request", ",", "method_config", ",", "start_response", ")", ":", "response_config", "=", "method_config", ".", "get", "(", "'response'", ",", "{", "}", ")", ".", "get", "(", "'body'", ")", "if", "respons...
If the response from the backend is empty, return a HTTP 204 No Content. Args: orig_request: An ApiRequest, the original request from the user. method_config: A dict, the API config of the method to be called. start_response: A function with semantics defined in PEP-333. Returns: If the backend response was empty, this returns a string containing the response body that should be returned to the user. If the backend response wasn't empty, this returns None, indicating that we should not exit early with a 204.
[ "If", "the", "response", "from", "the", "backend", "is", "empty", "return", "a", "HTTP", "204", "No", "Content", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L661-L681
22,296
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware.transform_rest_response
def transform_rest_response(self, response_body): """Translates an apiserving REST response so it's ready to return. Currently, the only thing that needs to be fixed here is indentation, so it's consistent with what the live app will return. Args: response_body: A string containing the backend response. Returns: A reformatted version of the response JSON. """ body_json = json.loads(response_body) return json.dumps(body_json, indent=1, sort_keys=True)
python
def transform_rest_response(self, response_body): body_json = json.loads(response_body) return json.dumps(body_json, indent=1, sort_keys=True)
[ "def", "transform_rest_response", "(", "self", ",", "response_body", ")", ":", "body_json", "=", "json", ".", "loads", "(", "response_body", ")", "return", "json", ".", "dumps", "(", "body_json", ",", "indent", "=", "1", ",", "sort_keys", "=", "True", ")" ...
Translates an apiserving REST response so it's ready to return. Currently, the only thing that needs to be fixed here is indentation, so it's consistent with what the live app will return. Args: response_body: A string containing the backend response. Returns: A reformatted version of the response JSON.
[ "Translates", "an", "apiserving", "REST", "response", "so", "it", "s", "ready", "to", "return", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L683-L696
22,297
cloudendpoints/endpoints-python
endpoints/endpoints_dispatcher.py
EndpointsDispatcherMiddleware._handle_request_error
def _handle_request_error(self, orig_request, error, start_response): """Handle a request error, converting it to a WSGI response. Args: orig_request: An ApiRequest, the original request from the user. error: A RequestError containing information about the error. start_response: A function with semantics defined in PEP-333. Returns: A string containing the response body. """ headers = [('Content-Type', 'application/json')] status_code = error.status_code() body = error.rest_error() response_status = '%d %s' % (status_code, httplib.responses.get(status_code, 'Unknown Error')) cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_response(response_status, headers, body, start_response, cors_handler=cors_handler)
python
def _handle_request_error(self, orig_request, error, start_response): headers = [('Content-Type', 'application/json')] status_code = error.status_code() body = error.rest_error() response_status = '%d %s' % (status_code, httplib.responses.get(status_code, 'Unknown Error')) cors_handler = self._create_cors_handler(orig_request) return util.send_wsgi_response(response_status, headers, body, start_response, cors_handler=cors_handler)
[ "def", "_handle_request_error", "(", "self", ",", "orig_request", ",", "error", ",", "start_response", ")", ":", "headers", "=", "[", "(", "'Content-Type'", ",", "'application/json'", ")", "]", "status_code", "=", "error", ".", "status_code", "(", ")", "body",...
Handle a request error, converting it to a WSGI response. Args: orig_request: An ApiRequest, the original request from the user. error: A RequestError containing information about the error. start_response: A function with semantics defined in PEP-333. Returns: A string containing the response body.
[ "Handle", "a", "request", "error", "converting", "it", "to", "a", "WSGI", "response", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/endpoints_dispatcher.py#L698-L718
22,298
cloudendpoints/endpoints-python
endpoints/_endpointscfg_impl.py
_WriteFile
def _WriteFile(output_path, name, content): """Write given content to a file in a given directory. Args: output_path: The directory to store the file in. name: The name of the file to store the content in. content: The content to write to the file.close Returns: The full path to the written file. """ path = os.path.join(output_path, name) with open(path, 'wb') as f: f.write(content) return path
python
def _WriteFile(output_path, name, content): path = os.path.join(output_path, name) with open(path, 'wb') as f: f.write(content) return path
[ "def", "_WriteFile", "(", "output_path", ",", "name", ",", "content", ")", ":", "path", "=", "os", ".", "path", ".", "join", "(", "output_path", ",", "name", ")", "with", "open", "(", "path", ",", "'wb'", ")", "as", "f", ":", "f", ".", "write", "...
Write given content to a file in a given directory. Args: output_path: The directory to store the file in. name: The name of the file to store the content in. content: The content to write to the file.close Returns: The full path to the written file.
[ "Write", "given", "content", "to", "a", "file", "in", "a", "given", "directory", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/_endpointscfg_impl.py#L138-L152
22,299
cloudendpoints/endpoints-python
endpoints/_endpointscfg_impl.py
GenApiConfig
def GenApiConfig(service_class_names, config_string_generator=None, hostname=None, application_path=None, **additional_kwargs): """Write an API configuration for endpoints annotated ProtoRPC services. Args: service_class_names: A list of fully qualified ProtoRPC service classes. config_string_generator: A generator object that produces API config strings using its pretty_print_config_to_json method. hostname: A string hostname which will be used as the default version hostname. If no hostname is specificied in the @endpoints.api decorator, this value is the fallback. application_path: A string with the path to the AppEngine application. Raises: TypeError: If any service classes don't inherit from remote.Service. messages.DefinitionNotFoundError: If a service can't be found. Returns: A map from service names to a string containing the API configuration of the service in JSON format. """ # First, gather together all the different APIs implemented by these # classes. There may be fewer APIs than service classes. Each API is # uniquely identified by (name, version). Order needs to be preserved here, # so APIs that were listed first are returned first. api_service_map = collections.OrderedDict() resolved_services = [] for service_class_name in service_class_names: module_name, base_service_class_name = service_class_name.rsplit('.', 1) module = __import__(module_name, fromlist=base_service_class_name) service = getattr(module, base_service_class_name) if hasattr(service, 'get_api_classes'): resolved_services.extend(service.get_api_classes()) elif (not isinstance(service, type) or not issubclass(service, remote.Service)): raise TypeError('%s is not a ProtoRPC service' % service_class_name) else: resolved_services.append(service) for resolved_service in resolved_services: services = api_service_map.setdefault( (resolved_service.api_info.name, resolved_service.api_info.api_version), []) services.append(resolved_service) # If hostname isn't specified in the API or on the command line, we'll # try to build it from information in app.yaml. app_yaml_hostname = _GetAppYamlHostname(application_path) service_map = collections.OrderedDict() config_string_generator = ( config_string_generator or api_config.ApiConfigGenerator()) for api_info, services in api_service_map.iteritems(): assert services, 'An API must have at least one ProtoRPC service' # Only override hostname if None. Hostname will be the same for all # services within an API, since it's stored in common info. hostname = services[0].api_info.hostname or hostname or app_yaml_hostname # Map each API by name-version. service_map['%s-%s' % api_info] = ( config_string_generator.pretty_print_config_to_json( services, hostname=hostname, **additional_kwargs)) return service_map
python
def GenApiConfig(service_class_names, config_string_generator=None, hostname=None, application_path=None, **additional_kwargs): # First, gather together all the different APIs implemented by these # classes. There may be fewer APIs than service classes. Each API is # uniquely identified by (name, version). Order needs to be preserved here, # so APIs that were listed first are returned first. api_service_map = collections.OrderedDict() resolved_services = [] for service_class_name in service_class_names: module_name, base_service_class_name = service_class_name.rsplit('.', 1) module = __import__(module_name, fromlist=base_service_class_name) service = getattr(module, base_service_class_name) if hasattr(service, 'get_api_classes'): resolved_services.extend(service.get_api_classes()) elif (not isinstance(service, type) or not issubclass(service, remote.Service)): raise TypeError('%s is not a ProtoRPC service' % service_class_name) else: resolved_services.append(service) for resolved_service in resolved_services: services = api_service_map.setdefault( (resolved_service.api_info.name, resolved_service.api_info.api_version), []) services.append(resolved_service) # If hostname isn't specified in the API or on the command line, we'll # try to build it from information in app.yaml. app_yaml_hostname = _GetAppYamlHostname(application_path) service_map = collections.OrderedDict() config_string_generator = ( config_string_generator or api_config.ApiConfigGenerator()) for api_info, services in api_service_map.iteritems(): assert services, 'An API must have at least one ProtoRPC service' # Only override hostname if None. Hostname will be the same for all # services within an API, since it's stored in common info. hostname = services[0].api_info.hostname or hostname or app_yaml_hostname # Map each API by name-version. service_map['%s-%s' % api_info] = ( config_string_generator.pretty_print_config_to_json( services, hostname=hostname, **additional_kwargs)) return service_map
[ "def", "GenApiConfig", "(", "service_class_names", ",", "config_string_generator", "=", "None", ",", "hostname", "=", "None", ",", "application_path", "=", "None", ",", "*", "*", "additional_kwargs", ")", ":", "# First, gather together all the different APIs implemented b...
Write an API configuration for endpoints annotated ProtoRPC services. Args: service_class_names: A list of fully qualified ProtoRPC service classes. config_string_generator: A generator object that produces API config strings using its pretty_print_config_to_json method. hostname: A string hostname which will be used as the default version hostname. If no hostname is specificied in the @endpoints.api decorator, this value is the fallback. application_path: A string with the path to the AppEngine application. Raises: TypeError: If any service classes don't inherit from remote.Service. messages.DefinitionNotFoundError: If a service can't be found. Returns: A map from service names to a string containing the API configuration of the service in JSON format.
[ "Write", "an", "API", "configuration", "for", "endpoints", "annotated", "ProtoRPC", "services", "." ]
00dd7c7a52a9ee39d5923191c2604b8eafdb3f24
https://github.com/cloudendpoints/endpoints-python/blob/00dd7c7a52a9ee39d5923191c2604b8eafdb3f24/endpoints/_endpointscfg_impl.py#L155-L218