repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.WriteFile | def WriteFile(self, printer):
"""Write the services in this registry to out."""
self.Validate()
client_info = self.__client_info
printer('"""Generated client library for %s version %s."""',
client_info.package, client_info.version)
printer('# NOTE: This file is autogenerated and should not be edited '
'by hand.')
printer('from %s import base_api', self.__base_files_package)
if self.__root_package:
import_prefix = 'from {0} '.format(self.__root_package)
else:
import_prefix = ''
printer('%simport %s as messages', import_prefix,
client_info.messages_rule_name)
printer()
printer()
printer('class %s(base_api.BaseApiClient):',
client_info.client_class_name)
with printer.Indent():
printer(
'"""Generated client library for service %s version %s."""',
client_info.package, client_info.version)
printer()
printer('MESSAGES_MODULE = messages')
printer('BASE_URL = {0!r}'.format(client_info.base_url))
printer()
printer('_PACKAGE = {0!r}'.format(client_info.package))
printer('_SCOPES = {0!r}'.format(
client_info.scopes or
['https://www.googleapis.com/auth/userinfo.email']))
printer('_VERSION = {0!r}'.format(client_info.version))
printer('_CLIENT_ID = {0!r}'.format(client_info.client_id))
printer('_CLIENT_SECRET = {0!r}'.format(client_info.client_secret))
printer('_USER_AGENT = {0!r}'.format(client_info.user_agent))
printer('_CLIENT_CLASS_NAME = {0!r}'.format(
client_info.client_class_name))
printer('_URL_VERSION = {0!r}'.format(client_info.url_version))
printer('_API_KEY = {0!r}'.format(client_info.api_key))
printer()
printer("def __init__(self, url='', credentials=None,")
with printer.Indent(indent=' '):
printer('get_credentials=True, http=None, model=None,')
printer('log_request=False, log_response=False,')
printer('credentials_args=None, default_global_params=None,')
printer('additional_http_headers=None, '
'response_encoding=None):')
with printer.Indent():
printer('"""Create a new %s handle."""', client_info.package)
printer('url = url or self.BASE_URL')
printer(
'super(%s, self).__init__(', client_info.client_class_name)
printer(' url, credentials=credentials,')
printer(' get_credentials=get_credentials, http=http, '
'model=model,')
printer(' log_request=log_request, '
'log_response=log_response,')
printer(' credentials_args=credentials_args,')
printer(' default_global_params=default_global_params,')
printer(' additional_http_headers=additional_http_headers,')
printer(' response_encoding=response_encoding)')
for name in self.__service_method_info_map.keys():
printer('self.%s = self.%s(self)',
name, self.__GetServiceClassName(name))
for name, method_info in self.__service_method_info_map.items():
self.__WriteSingleService(
printer, name, method_info, client_info.client_class_name) | python | def WriteFile(self, printer):
"""Write the services in this registry to out."""
self.Validate()
client_info = self.__client_info
printer('"""Generated client library for %s version %s."""',
client_info.package, client_info.version)
printer('# NOTE: This file is autogenerated and should not be edited '
'by hand.')
printer('from %s import base_api', self.__base_files_package)
if self.__root_package:
import_prefix = 'from {0} '.format(self.__root_package)
else:
import_prefix = ''
printer('%simport %s as messages', import_prefix,
client_info.messages_rule_name)
printer()
printer()
printer('class %s(base_api.BaseApiClient):',
client_info.client_class_name)
with printer.Indent():
printer(
'"""Generated client library for service %s version %s."""',
client_info.package, client_info.version)
printer()
printer('MESSAGES_MODULE = messages')
printer('BASE_URL = {0!r}'.format(client_info.base_url))
printer()
printer('_PACKAGE = {0!r}'.format(client_info.package))
printer('_SCOPES = {0!r}'.format(
client_info.scopes or
['https://www.googleapis.com/auth/userinfo.email']))
printer('_VERSION = {0!r}'.format(client_info.version))
printer('_CLIENT_ID = {0!r}'.format(client_info.client_id))
printer('_CLIENT_SECRET = {0!r}'.format(client_info.client_secret))
printer('_USER_AGENT = {0!r}'.format(client_info.user_agent))
printer('_CLIENT_CLASS_NAME = {0!r}'.format(
client_info.client_class_name))
printer('_URL_VERSION = {0!r}'.format(client_info.url_version))
printer('_API_KEY = {0!r}'.format(client_info.api_key))
printer()
printer("def __init__(self, url='', credentials=None,")
with printer.Indent(indent=' '):
printer('get_credentials=True, http=None, model=None,')
printer('log_request=False, log_response=False,')
printer('credentials_args=None, default_global_params=None,')
printer('additional_http_headers=None, '
'response_encoding=None):')
with printer.Indent():
printer('"""Create a new %s handle."""', client_info.package)
printer('url = url or self.BASE_URL')
printer(
'super(%s, self).__init__(', client_info.client_class_name)
printer(' url, credentials=credentials,')
printer(' get_credentials=get_credentials, http=http, '
'model=model,')
printer(' log_request=log_request, '
'log_response=log_response,')
printer(' credentials_args=credentials_args,')
printer(' default_global_params=default_global_params,')
printer(' additional_http_headers=additional_http_headers,')
printer(' response_encoding=response_encoding)')
for name in self.__service_method_info_map.keys():
printer('self.%s = self.%s(self)',
name, self.__GetServiceClassName(name))
for name, method_info in self.__service_method_info_map.items():
self.__WriteSingleService(
printer, name, method_info, client_info.client_class_name) | [
"def",
"WriteFile",
"(",
"self",
",",
"printer",
")",
":",
"self",
".",
"Validate",
"(",
")",
"client_info",
"=",
"self",
".",
"__client_info",
"printer",
"(",
"'\"\"\"Generated client library for %s version %s.\"\"\"'",
",",
"client_info",
".",
"package",
",",
"c... | Write the services in this registry to out. | [
"Write",
"the",
"services",
"in",
"this",
"registry",
"to",
"out",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L195-L261 | train | 207,700 |
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.__CreateRequestType | def __CreateRequestType(self, method_description, body_type=None):
"""Create a request type for this method."""
schema = {}
schema['id'] = self.__names.ClassName('%sRequest' % (
self.__names.ClassName(method_description['id'], separator='.'),))
schema['type'] = 'object'
schema['properties'] = collections.OrderedDict()
if 'parameterOrder' not in method_description:
ordered_parameters = list(method_description.get('parameters', []))
else:
ordered_parameters = method_description['parameterOrder'][:]
for k in method_description['parameters']:
if k not in ordered_parameters:
ordered_parameters.append(k)
for parameter_name in ordered_parameters:
field_name = self.__names.CleanName(parameter_name)
field = dict(method_description['parameters'][parameter_name])
if 'type' not in field:
raise ValueError('No type found in parameter %s' % field)
schema['properties'][field_name] = field
if body_type is not None:
body_field_name = self.__GetRequestField(
method_description, body_type)
if body_field_name in schema['properties']:
raise ValueError('Failed to normalize request resource name')
if 'description' not in body_type:
body_type['description'] = (
'A %s resource to be passed as the request body.' % (
self.__GetRequestType(body_type),))
schema['properties'][body_field_name] = body_type
self.__message_registry.AddDescriptorFromSchema(schema['id'], schema)
return schema['id'] | python | def __CreateRequestType(self, method_description, body_type=None):
"""Create a request type for this method."""
schema = {}
schema['id'] = self.__names.ClassName('%sRequest' % (
self.__names.ClassName(method_description['id'], separator='.'),))
schema['type'] = 'object'
schema['properties'] = collections.OrderedDict()
if 'parameterOrder' not in method_description:
ordered_parameters = list(method_description.get('parameters', []))
else:
ordered_parameters = method_description['parameterOrder'][:]
for k in method_description['parameters']:
if k not in ordered_parameters:
ordered_parameters.append(k)
for parameter_name in ordered_parameters:
field_name = self.__names.CleanName(parameter_name)
field = dict(method_description['parameters'][parameter_name])
if 'type' not in field:
raise ValueError('No type found in parameter %s' % field)
schema['properties'][field_name] = field
if body_type is not None:
body_field_name = self.__GetRequestField(
method_description, body_type)
if body_field_name in schema['properties']:
raise ValueError('Failed to normalize request resource name')
if 'description' not in body_type:
body_type['description'] = (
'A %s resource to be passed as the request body.' % (
self.__GetRequestType(body_type),))
schema['properties'][body_field_name] = body_type
self.__message_registry.AddDescriptorFromSchema(schema['id'], schema)
return schema['id'] | [
"def",
"__CreateRequestType",
"(",
"self",
",",
"method_description",
",",
"body_type",
"=",
"None",
")",
":",
"schema",
"=",
"{",
"}",
"schema",
"[",
"'id'",
"]",
"=",
"self",
".",
"__names",
".",
"ClassName",
"(",
"'%sRequest'",
"%",
"(",
"self",
".",
... | Create a request type for this method. | [
"Create",
"a",
"request",
"type",
"for",
"this",
"method",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L269-L300 | train | 207,701 |
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.__CreateVoidResponseType | def __CreateVoidResponseType(self, method_description):
"""Create an empty response type."""
schema = {}
method_name = self.__names.ClassName(
method_description['id'], separator='.')
schema['id'] = self.__names.ClassName('%sResponse' % method_name)
schema['type'] = 'object'
schema['description'] = 'An empty %s response.' % method_name
self.__message_registry.AddDescriptorFromSchema(schema['id'], schema)
return schema['id'] | python | def __CreateVoidResponseType(self, method_description):
"""Create an empty response type."""
schema = {}
method_name = self.__names.ClassName(
method_description['id'], separator='.')
schema['id'] = self.__names.ClassName('%sResponse' % method_name)
schema['type'] = 'object'
schema['description'] = 'An empty %s response.' % method_name
self.__message_registry.AddDescriptorFromSchema(schema['id'], schema)
return schema['id'] | [
"def",
"__CreateVoidResponseType",
"(",
"self",
",",
"method_description",
")",
":",
"schema",
"=",
"{",
"}",
"method_name",
"=",
"self",
".",
"__names",
".",
"ClassName",
"(",
"method_description",
"[",
"'id'",
"]",
",",
"separator",
"=",
"'.'",
")",
"schem... | Create an empty response type. | [
"Create",
"an",
"empty",
"response",
"type",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L302-L311 | train | 207,702 |
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.__NeedRequestType | def __NeedRequestType(self, method_description, request_type):
"""Determine if this method needs a new request type created."""
if not request_type:
return True
method_id = method_description.get('id', '')
if method_id in self.__unelidable_request_methods:
return True
message = self.__message_registry.LookupDescriptorOrDie(request_type)
if message is None:
return True
field_names = [x.name for x in message.fields]
parameters = method_description.get('parameters', {})
for param_name, param_info in parameters.items():
if (param_info.get('location') != 'path' or
self.__names.CleanName(param_name) not in field_names):
break
else:
return False
return True | python | def __NeedRequestType(self, method_description, request_type):
"""Determine if this method needs a new request type created."""
if not request_type:
return True
method_id = method_description.get('id', '')
if method_id in self.__unelidable_request_methods:
return True
message = self.__message_registry.LookupDescriptorOrDie(request_type)
if message is None:
return True
field_names = [x.name for x in message.fields]
parameters = method_description.get('parameters', {})
for param_name, param_info in parameters.items():
if (param_info.get('location') != 'path' or
self.__names.CleanName(param_name) not in field_names):
break
else:
return False
return True | [
"def",
"__NeedRequestType",
"(",
"self",
",",
"method_description",
",",
"request_type",
")",
":",
"if",
"not",
"request_type",
":",
"return",
"True",
"method_id",
"=",
"method_description",
".",
"get",
"(",
"'id'",
",",
"''",
")",
"if",
"method_id",
"in",
"... | Determine if this method needs a new request type created. | [
"Determine",
"if",
"this",
"method",
"needs",
"a",
"new",
"request",
"type",
"created",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L313-L331 | train | 207,703 |
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.__MaxSizeToInt | def __MaxSizeToInt(self, max_size):
"""Convert max_size to an int."""
size_groups = re.match(r'(?P<size>\d+)(?P<unit>.B)?$', max_size)
if size_groups is None:
raise ValueError('Could not parse maxSize')
size, unit = size_groups.group('size', 'unit')
shift = 0
if unit is not None:
unit_dict = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
shift = unit_dict.get(unit.upper())
if shift is None:
raise ValueError('Unknown unit %s' % unit)
return int(size) * (1 << shift) | python | def __MaxSizeToInt(self, max_size):
"""Convert max_size to an int."""
size_groups = re.match(r'(?P<size>\d+)(?P<unit>.B)?$', max_size)
if size_groups is None:
raise ValueError('Could not parse maxSize')
size, unit = size_groups.group('size', 'unit')
shift = 0
if unit is not None:
unit_dict = {'KB': 10, 'MB': 20, 'GB': 30, 'TB': 40}
shift = unit_dict.get(unit.upper())
if shift is None:
raise ValueError('Unknown unit %s' % unit)
return int(size) * (1 << shift) | [
"def",
"__MaxSizeToInt",
"(",
"self",
",",
"max_size",
")",
":",
"size_groups",
"=",
"re",
".",
"match",
"(",
"r'(?P<size>\\d+)(?P<unit>.B)?$'",
",",
"max_size",
")",
"if",
"size_groups",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Could not parse maxSize'",
... | Convert max_size to an int. | [
"Convert",
"max_size",
"to",
"an",
"int",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L333-L345 | train | 207,704 |
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.__ComputeUploadConfig | def __ComputeUploadConfig(self, media_upload_config, method_id):
"""Fill out the upload config for this method."""
config = base_api.ApiUploadInfo()
if 'maxSize' in media_upload_config:
config.max_size = self.__MaxSizeToInt(
media_upload_config['maxSize'])
if 'accept' not in media_upload_config:
logging.warn(
'No accept types found for upload configuration in '
'method %s, using */*', method_id)
config.accept.extend([
str(a) for a in media_upload_config.get('accept', '*/*')])
for accept_pattern in config.accept:
if not _MIME_PATTERN_RE.match(accept_pattern):
logging.warn('Unexpected MIME type: %s', accept_pattern)
protocols = media_upload_config.get('protocols', {})
for protocol in ('simple', 'resumable'):
media = protocols.get(protocol, {})
for attr in ('multipart', 'path'):
if attr in media:
setattr(config, '%s_%s' % (protocol, attr), media[attr])
return config | python | def __ComputeUploadConfig(self, media_upload_config, method_id):
"""Fill out the upload config for this method."""
config = base_api.ApiUploadInfo()
if 'maxSize' in media_upload_config:
config.max_size = self.__MaxSizeToInt(
media_upload_config['maxSize'])
if 'accept' not in media_upload_config:
logging.warn(
'No accept types found for upload configuration in '
'method %s, using */*', method_id)
config.accept.extend([
str(a) for a in media_upload_config.get('accept', '*/*')])
for accept_pattern in config.accept:
if not _MIME_PATTERN_RE.match(accept_pattern):
logging.warn('Unexpected MIME type: %s', accept_pattern)
protocols = media_upload_config.get('protocols', {})
for protocol in ('simple', 'resumable'):
media = protocols.get(protocol, {})
for attr in ('multipart', 'path'):
if attr in media:
setattr(config, '%s_%s' % (protocol, attr), media[attr])
return config | [
"def",
"__ComputeUploadConfig",
"(",
"self",
",",
"media_upload_config",
",",
"method_id",
")",
":",
"config",
"=",
"base_api",
".",
"ApiUploadInfo",
"(",
")",
"if",
"'maxSize'",
"in",
"media_upload_config",
":",
"config",
".",
"max_size",
"=",
"self",
".",
"_... | Fill out the upload config for this method. | [
"Fill",
"out",
"the",
"upload",
"config",
"for",
"this",
"method",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L347-L369 | train | 207,705 |
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.__ComputeMethodInfo | def __ComputeMethodInfo(self, method_description, request, response,
request_field):
"""Compute the base_api.ApiMethodInfo for this method."""
relative_path = self.__names.NormalizeRelativePath(
''.join((self.__client_info.base_path,
method_description['path'])))
method_id = method_description['id']
ordered_params = []
for param_name in method_description.get('parameterOrder', []):
param_info = method_description['parameters'][param_name]
if param_info.get('required', False):
ordered_params.append(param_name)
method_info = base_api.ApiMethodInfo(
relative_path=relative_path,
method_id=method_id,
http_method=method_description['httpMethod'],
description=util.CleanDescription(
method_description.get('description', '')),
query_params=[],
path_params=[],
ordered_params=ordered_params,
request_type_name=self.__names.ClassName(request),
response_type_name=self.__names.ClassName(response),
request_field=request_field,
)
flat_path = method_description.get('flatPath', None)
if flat_path is not None:
flat_path = self.__names.NormalizeRelativePath(
self.__client_info.base_path + flat_path)
if flat_path != relative_path:
method_info.flat_path = flat_path
if method_description.get('supportsMediaUpload', False):
method_info.upload_config = self.__ComputeUploadConfig(
method_description.get('mediaUpload'), method_id)
method_info.supports_download = method_description.get(
'supportsMediaDownload', False)
self.__all_scopes.update(method_description.get('scopes', ()))
for param, desc in method_description.get('parameters', {}).items():
param = self.__names.CleanName(param)
location = desc['location']
if location == 'query':
method_info.query_params.append(param)
elif location == 'path':
method_info.path_params.append(param)
else:
raise ValueError(
'Unknown parameter location %s for parameter %s' % (
location, param))
method_info.path_params.sort()
method_info.query_params.sort()
return method_info | python | def __ComputeMethodInfo(self, method_description, request, response,
request_field):
"""Compute the base_api.ApiMethodInfo for this method."""
relative_path = self.__names.NormalizeRelativePath(
''.join((self.__client_info.base_path,
method_description['path'])))
method_id = method_description['id']
ordered_params = []
for param_name in method_description.get('parameterOrder', []):
param_info = method_description['parameters'][param_name]
if param_info.get('required', False):
ordered_params.append(param_name)
method_info = base_api.ApiMethodInfo(
relative_path=relative_path,
method_id=method_id,
http_method=method_description['httpMethod'],
description=util.CleanDescription(
method_description.get('description', '')),
query_params=[],
path_params=[],
ordered_params=ordered_params,
request_type_name=self.__names.ClassName(request),
response_type_name=self.__names.ClassName(response),
request_field=request_field,
)
flat_path = method_description.get('flatPath', None)
if flat_path is not None:
flat_path = self.__names.NormalizeRelativePath(
self.__client_info.base_path + flat_path)
if flat_path != relative_path:
method_info.flat_path = flat_path
if method_description.get('supportsMediaUpload', False):
method_info.upload_config = self.__ComputeUploadConfig(
method_description.get('mediaUpload'), method_id)
method_info.supports_download = method_description.get(
'supportsMediaDownload', False)
self.__all_scopes.update(method_description.get('scopes', ()))
for param, desc in method_description.get('parameters', {}).items():
param = self.__names.CleanName(param)
location = desc['location']
if location == 'query':
method_info.query_params.append(param)
elif location == 'path':
method_info.path_params.append(param)
else:
raise ValueError(
'Unknown parameter location %s for parameter %s' % (
location, param))
method_info.path_params.sort()
method_info.query_params.sort()
return method_info | [
"def",
"__ComputeMethodInfo",
"(",
"self",
",",
"method_description",
",",
"request",
",",
"response",
",",
"request_field",
")",
":",
"relative_path",
"=",
"self",
".",
"__names",
".",
"NormalizeRelativePath",
"(",
"''",
".",
"join",
"(",
"(",
"self",
".",
... | Compute the base_api.ApiMethodInfo for this method. | [
"Compute",
"the",
"base_api",
".",
"ApiMethodInfo",
"for",
"this",
"method",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L371-L421 | train | 207,706 |
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.__GetRequestField | def __GetRequestField(self, method_description, body_type):
"""Determine the request field for this method."""
body_field_name = self.__BodyFieldName(body_type)
if body_field_name in method_description.get('parameters', {}):
body_field_name = self.__names.FieldName(
'%s_resource' % body_field_name)
# It's exceedingly unlikely that we'd get two name collisions, which
# means it's bound to happen at some point.
while body_field_name in method_description.get('parameters', {}):
body_field_name = self.__names.FieldName(
'%s_body' % body_field_name)
return body_field_name | python | def __GetRequestField(self, method_description, body_type):
"""Determine the request field for this method."""
body_field_name = self.__BodyFieldName(body_type)
if body_field_name in method_description.get('parameters', {}):
body_field_name = self.__names.FieldName(
'%s_resource' % body_field_name)
# It's exceedingly unlikely that we'd get two name collisions, which
# means it's bound to happen at some point.
while body_field_name in method_description.get('parameters', {}):
body_field_name = self.__names.FieldName(
'%s_body' % body_field_name)
return body_field_name | [
"def",
"__GetRequestField",
"(",
"self",
",",
"method_description",
",",
"body_type",
")",
":",
"body_field_name",
"=",
"self",
".",
"__BodyFieldName",
"(",
"body_type",
")",
"if",
"body_field_name",
"in",
"method_description",
".",
"get",
"(",
"'parameters'",
","... | Determine the request field for this method. | [
"Determine",
"the",
"request",
"field",
"for",
"this",
"method",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L431-L442 | train | 207,707 |
google/apitools | apitools/gen/service_registry.py | ServiceRegistry.AddServiceFromResource | def AddServiceFromResource(self, service_name, methods):
"""Add a new service named service_name with the given methods."""
service_name = self.__names.CleanName(service_name)
method_descriptions = methods.get('methods', {})
method_info_map = collections.OrderedDict()
items = sorted(method_descriptions.items())
for method_name, method_description in items:
method_name = self.__names.MethodName(method_name)
# NOTE: According to the discovery document, if the request or
# response is present, it will simply contain a `$ref`.
body_type = method_description.get('request')
if body_type is None:
request_type = None
else:
request_type = self.__GetRequestType(body_type)
if self.__NeedRequestType(method_description, request_type):
request = self.__CreateRequestType(
method_description, body_type=body_type)
request_field = self.__GetRequestField(
method_description, body_type)
else:
request = request_type
request_field = base_api.REQUEST_IS_BODY
if 'response' in method_description:
response = method_description['response']['$ref']
else:
response = self.__CreateVoidResponseType(method_description)
method_info_map[method_name] = self.__ComputeMethodInfo(
method_description, request, response, request_field)
nested_services = methods.get('resources', {})
services = sorted(nested_services.items())
for subservice_name, submethods in services:
new_service_name = '%s_%s' % (service_name, subservice_name)
self.AddServiceFromResource(new_service_name, submethods)
self.__RegisterService(service_name, method_info_map) | python | def AddServiceFromResource(self, service_name, methods):
"""Add a new service named service_name with the given methods."""
service_name = self.__names.CleanName(service_name)
method_descriptions = methods.get('methods', {})
method_info_map = collections.OrderedDict()
items = sorted(method_descriptions.items())
for method_name, method_description in items:
method_name = self.__names.MethodName(method_name)
# NOTE: According to the discovery document, if the request or
# response is present, it will simply contain a `$ref`.
body_type = method_description.get('request')
if body_type is None:
request_type = None
else:
request_type = self.__GetRequestType(body_type)
if self.__NeedRequestType(method_description, request_type):
request = self.__CreateRequestType(
method_description, body_type=body_type)
request_field = self.__GetRequestField(
method_description, body_type)
else:
request = request_type
request_field = base_api.REQUEST_IS_BODY
if 'response' in method_description:
response = method_description['response']['$ref']
else:
response = self.__CreateVoidResponseType(method_description)
method_info_map[method_name] = self.__ComputeMethodInfo(
method_description, request, response, request_field)
nested_services = methods.get('resources', {})
services = sorted(nested_services.items())
for subservice_name, submethods in services:
new_service_name = '%s_%s' % (service_name, subservice_name)
self.AddServiceFromResource(new_service_name, submethods)
self.__RegisterService(service_name, method_info_map) | [
"def",
"AddServiceFromResource",
"(",
"self",
",",
"service_name",
",",
"methods",
")",
":",
"service_name",
"=",
"self",
".",
"__names",
".",
"CleanName",
"(",
"service_name",
")",
"method_descriptions",
"=",
"methods",
".",
"get",
"(",
"'methods'",
",",
"{",... | Add a new service named service_name with the given methods. | [
"Add",
"a",
"new",
"service",
"named",
"service_name",
"with",
"the",
"given",
"methods",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/service_registry.py#L444-L483 | train | 207,708 |
google/apitools | apitools/base/py/gzip.py | compress | def compress(data, compresslevel=9):
"""Compress data in one shot and return the compressed string.
Optional argument is the compression level, in range of 0-9.
"""
buf = io.BytesIO()
with GzipFile(fileobj=buf, mode='wb', compresslevel=compresslevel) as f:
f.write(data)
return buf.getvalue() | python | def compress(data, compresslevel=9):
"""Compress data in one shot and return the compressed string.
Optional argument is the compression level, in range of 0-9.
"""
buf = io.BytesIO()
with GzipFile(fileobj=buf, mode='wb', compresslevel=compresslevel) as f:
f.write(data)
return buf.getvalue() | [
"def",
"compress",
"(",
"data",
",",
"compresslevel",
"=",
"9",
")",
":",
"buf",
"=",
"io",
".",
"BytesIO",
"(",
")",
"with",
"GzipFile",
"(",
"fileobj",
"=",
"buf",
",",
"mode",
"=",
"'wb'",
",",
"compresslevel",
"=",
"compresslevel",
")",
"as",
"f"... | Compress data in one shot and return the compressed string.
Optional argument is the compression level, in range of 0-9. | [
"Compress",
"data",
"in",
"one",
"shot",
"and",
"return",
"the",
"compressed",
"string",
".",
"Optional",
"argument",
"is",
"the",
"compression",
"level",
"in",
"range",
"of",
"0",
"-",
"9",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/gzip.py#L602-L609 | train | 207,709 |
google/apitools | apitools/base/py/gzip.py | decompress | def decompress(data):
"""Decompress a gzip compressed string in one shot.
Return the decompressed string.
"""
with GzipFile(fileobj=io.BytesIO(data)) as f:
return f.read() | python | def decompress(data):
"""Decompress a gzip compressed string in one shot.
Return the decompressed string.
"""
with GzipFile(fileobj=io.BytesIO(data)) as f:
return f.read() | [
"def",
"decompress",
"(",
"data",
")",
":",
"with",
"GzipFile",
"(",
"fileobj",
"=",
"io",
".",
"BytesIO",
"(",
"data",
")",
")",
"as",
"f",
":",
"return",
"f",
".",
"read",
"(",
")"
] | Decompress a gzip compressed string in one shot.
Return the decompressed string. | [
"Decompress",
"a",
"gzip",
"compressed",
"string",
"in",
"one",
"shot",
".",
"Return",
"the",
"decompressed",
"string",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/gzip.py#L612-L617 | train | 207,710 |
google/apitools | apitools/base/py/gzip.py | GzipFile.rewind | def rewind(self):
'''Return the uncompressed stream file position indicator to the
beginning of the file'''
if self.mode != READ:
raise OSError("Can't rewind in write mode")
self.fileobj.seek(0)
self._new_member = True
self.extrabuf = b""
self.extrasize = 0
self.extrastart = 0
self.offset = 0 | python | def rewind(self):
'''Return the uncompressed stream file position indicator to the
beginning of the file'''
if self.mode != READ:
raise OSError("Can't rewind in write mode")
self.fileobj.seek(0)
self._new_member = True
self.extrabuf = b""
self.extrasize = 0
self.extrastart = 0
self.offset = 0 | [
"def",
"rewind",
"(",
"self",
")",
":",
"if",
"self",
".",
"mode",
"!=",
"READ",
":",
"raise",
"OSError",
"(",
"\"Can't rewind in write mode\"",
")",
"self",
".",
"fileobj",
".",
"seek",
"(",
"0",
")",
"self",
".",
"_new_member",
"=",
"True",
"self",
"... | Return the uncompressed stream file position indicator to the
beginning of the file | [
"Return",
"the",
"uncompressed",
"stream",
"file",
"position",
"indicator",
"to",
"the",
"beginning",
"of",
"the",
"file"
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/gzip.py#L516-L526 | train | 207,711 |
google/apitools | apitools/base/py/http_wrapper.py | _Httplib2Debuglevel | def _Httplib2Debuglevel(http_request, level, http=None):
"""Temporarily change the value of httplib2.debuglevel, if necessary.
If http_request has a `loggable_body` distinct from `body`, then we
need to prevent httplib2 from logging the full body. This sets
httplib2.debuglevel for the duration of the `with` block; however,
that alone won't change the value of existing HTTP connections. If
an httplib2.Http object is provided, we'll also change the level on
any cached connections attached to it.
Args:
http_request: a Request we're logging.
level: (int) the debuglevel for logging.
http: (optional) an httplib2.Http whose connections we should
set the debuglevel on.
Yields:
None.
"""
if http_request.loggable_body is None:
yield
return
old_level = httplib2.debuglevel
http_levels = {}
httplib2.debuglevel = level
if http is not None:
for connection_key, connection in http.connections.items():
# httplib2 stores two kinds of values in this dict, connection
# classes and instances. Since the connection types are all
# old-style classes, we can't easily distinguish by connection
# type -- so instead we use the key pattern.
if ':' not in connection_key:
continue
http_levels[connection_key] = connection.debuglevel
connection.set_debuglevel(level)
yield
httplib2.debuglevel = old_level
if http is not None:
for connection_key, old_level in http_levels.items():
if connection_key in http.connections:
http.connections[connection_key].set_debuglevel(old_level) | python | def _Httplib2Debuglevel(http_request, level, http=None):
"""Temporarily change the value of httplib2.debuglevel, if necessary.
If http_request has a `loggable_body` distinct from `body`, then we
need to prevent httplib2 from logging the full body. This sets
httplib2.debuglevel for the duration of the `with` block; however,
that alone won't change the value of existing HTTP connections. If
an httplib2.Http object is provided, we'll also change the level on
any cached connections attached to it.
Args:
http_request: a Request we're logging.
level: (int) the debuglevel for logging.
http: (optional) an httplib2.Http whose connections we should
set the debuglevel on.
Yields:
None.
"""
if http_request.loggable_body is None:
yield
return
old_level = httplib2.debuglevel
http_levels = {}
httplib2.debuglevel = level
if http is not None:
for connection_key, connection in http.connections.items():
# httplib2 stores two kinds of values in this dict, connection
# classes and instances. Since the connection types are all
# old-style classes, we can't easily distinguish by connection
# type -- so instead we use the key pattern.
if ':' not in connection_key:
continue
http_levels[connection_key] = connection.debuglevel
connection.set_debuglevel(level)
yield
httplib2.debuglevel = old_level
if http is not None:
for connection_key, old_level in http_levels.items():
if connection_key in http.connections:
http.connections[connection_key].set_debuglevel(old_level) | [
"def",
"_Httplib2Debuglevel",
"(",
"http_request",
",",
"level",
",",
"http",
"=",
"None",
")",
":",
"if",
"http_request",
".",
"loggable_body",
"is",
"None",
":",
"yield",
"return",
"old_level",
"=",
"httplib2",
".",
"debuglevel",
"http_levels",
"=",
"{",
"... | Temporarily change the value of httplib2.debuglevel, if necessary.
If http_request has a `loggable_body` distinct from `body`, then we
need to prevent httplib2 from logging the full body. This sets
httplib2.debuglevel for the duration of the `with` block; however,
that alone won't change the value of existing HTTP connections. If
an httplib2.Http object is provided, we'll also change the level on
any cached connections attached to it.
Args:
http_request: a Request we're logging.
level: (int) the debuglevel for logging.
http: (optional) an httplib2.Http whose connections we should
set the debuglevel on.
Yields:
None. | [
"Temporarily",
"change",
"the",
"value",
"of",
"httplib2",
".",
"debuglevel",
"if",
"necessary",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/http_wrapper.py#L76-L116 | train | 207,712 |
google/apitools | apitools/base/py/http_wrapper.py | RebuildHttpConnections | def RebuildHttpConnections(http):
"""Rebuilds all http connections in the httplib2.Http instance.
httplib2 overloads the map in http.connections to contain two different
types of values:
{ scheme string: connection class } and
{ scheme + authority string : actual http connection }
Here we remove all of the entries for actual connections so that on the
next request httplib2 will rebuild them from the connection types.
Args:
http: An httplib2.Http instance.
"""
if getattr(http, 'connections', None):
for conn_key in list(http.connections.keys()):
if ':' in conn_key:
del http.connections[conn_key] | python | def RebuildHttpConnections(http):
"""Rebuilds all http connections in the httplib2.Http instance.
httplib2 overloads the map in http.connections to contain two different
types of values:
{ scheme string: connection class } and
{ scheme + authority string : actual http connection }
Here we remove all of the entries for actual connections so that on the
next request httplib2 will rebuild them from the connection types.
Args:
http: An httplib2.Http instance.
"""
if getattr(http, 'connections', None):
for conn_key in list(http.connections.keys()):
if ':' in conn_key:
del http.connections[conn_key] | [
"def",
"RebuildHttpConnections",
"(",
"http",
")",
":",
"if",
"getattr",
"(",
"http",
",",
"'connections'",
",",
"None",
")",
":",
"for",
"conn_key",
"in",
"list",
"(",
"http",
".",
"connections",
".",
"keys",
"(",
")",
")",
":",
"if",
"':'",
"in",
"... | Rebuilds all http connections in the httplib2.Http instance.
httplib2 overloads the map in http.connections to contain two different
types of values:
{ scheme string: connection class } and
{ scheme + authority string : actual http connection }
Here we remove all of the entries for actual connections so that on the
next request httplib2 will rebuild them from the connection types.
Args:
http: An httplib2.Http instance. | [
"Rebuilds",
"all",
"http",
"connections",
"in",
"the",
"httplib2",
".",
"Http",
"instance",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/http_wrapper.py#L228-L244 | train | 207,713 |
google/apitools | apitools/base/py/http_wrapper.py | HandleExceptionsAndRebuildHttpConnections | def HandleExceptionsAndRebuildHttpConnections(retry_args):
"""Exception handler for http failures.
This catches known failures and rebuilds the underlying HTTP connections.
Args:
retry_args: An ExceptionRetryArgs tuple.
"""
# If the server indicates how long to wait, use that value. Otherwise,
# calculate the wait time on our own.
retry_after = None
# Transport failures
if isinstance(retry_args.exc, (http_client.BadStatusLine,
http_client.IncompleteRead,
http_client.ResponseNotReady)):
logging.debug('Caught HTTP error %s, retrying: %s',
type(retry_args.exc).__name__, retry_args.exc)
elif isinstance(retry_args.exc, socket.error):
logging.debug('Caught socket error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, socket.gaierror):
logging.debug(
'Caught socket address error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, socket.timeout):
logging.debug(
'Caught socket timeout error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, httplib2.ServerNotFoundError):
logging.debug(
'Caught server not found error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, ValueError):
# oauth2client tries to JSON-decode the response, which can result
# in a ValueError if the response was invalid. Until that is fixed in
# oauth2client, need to handle it here.
logging.debug('Response content was invalid (%s), retrying',
retry_args.exc)
elif (isinstance(retry_args.exc, TokenRefreshError) and
hasattr(retry_args.exc, 'status') and
(retry_args.exc.status == TOO_MANY_REQUESTS or
retry_args.exc.status >= 500)):
logging.debug(
'Caught transient credential refresh error (%s), retrying',
retry_args.exc)
elif isinstance(retry_args.exc, exceptions.RequestError):
logging.debug('Request returned no response, retrying')
# API-level failures
elif isinstance(retry_args.exc, exceptions.BadStatusCodeError):
logging.debug('Response returned status %s, retrying',
retry_args.exc.status_code)
elif isinstance(retry_args.exc, exceptions.RetryAfterError):
logging.debug('Response returned a retry-after header, retrying')
retry_after = retry_args.exc.retry_after
else:
raise retry_args.exc
RebuildHttpConnections(retry_args.http)
logging.debug('Retrying request to url %s after exception %s',
retry_args.http_request.url, retry_args.exc)
time.sleep(
retry_after or util.CalculateWaitForRetry(
retry_args.num_retries, max_wait=retry_args.max_retry_wait)) | python | def HandleExceptionsAndRebuildHttpConnections(retry_args):
"""Exception handler for http failures.
This catches known failures and rebuilds the underlying HTTP connections.
Args:
retry_args: An ExceptionRetryArgs tuple.
"""
# If the server indicates how long to wait, use that value. Otherwise,
# calculate the wait time on our own.
retry_after = None
# Transport failures
if isinstance(retry_args.exc, (http_client.BadStatusLine,
http_client.IncompleteRead,
http_client.ResponseNotReady)):
logging.debug('Caught HTTP error %s, retrying: %s',
type(retry_args.exc).__name__, retry_args.exc)
elif isinstance(retry_args.exc, socket.error):
logging.debug('Caught socket error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, socket.gaierror):
logging.debug(
'Caught socket address error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, socket.timeout):
logging.debug(
'Caught socket timeout error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, httplib2.ServerNotFoundError):
logging.debug(
'Caught server not found error, retrying: %s', retry_args.exc)
elif isinstance(retry_args.exc, ValueError):
# oauth2client tries to JSON-decode the response, which can result
# in a ValueError if the response was invalid. Until that is fixed in
# oauth2client, need to handle it here.
logging.debug('Response content was invalid (%s), retrying',
retry_args.exc)
elif (isinstance(retry_args.exc, TokenRefreshError) and
hasattr(retry_args.exc, 'status') and
(retry_args.exc.status == TOO_MANY_REQUESTS or
retry_args.exc.status >= 500)):
logging.debug(
'Caught transient credential refresh error (%s), retrying',
retry_args.exc)
elif isinstance(retry_args.exc, exceptions.RequestError):
logging.debug('Request returned no response, retrying')
# API-level failures
elif isinstance(retry_args.exc, exceptions.BadStatusCodeError):
logging.debug('Response returned status %s, retrying',
retry_args.exc.status_code)
elif isinstance(retry_args.exc, exceptions.RetryAfterError):
logging.debug('Response returned a retry-after header, retrying')
retry_after = retry_args.exc.retry_after
else:
raise retry_args.exc
RebuildHttpConnections(retry_args.http)
logging.debug('Retrying request to url %s after exception %s',
retry_args.http_request.url, retry_args.exc)
time.sleep(
retry_after or util.CalculateWaitForRetry(
retry_args.num_retries, max_wait=retry_args.max_retry_wait)) | [
"def",
"HandleExceptionsAndRebuildHttpConnections",
"(",
"retry_args",
")",
":",
"# If the server indicates how long to wait, use that value. Otherwise,",
"# calculate the wait time on our own.",
"retry_after",
"=",
"None",
"# Transport failures",
"if",
"isinstance",
"(",
"retry_args"... | Exception handler for http failures.
This catches known failures and rebuilds the underlying HTTP connections.
Args:
retry_args: An ExceptionRetryArgs tuple. | [
"Exception",
"handler",
"for",
"http",
"failures",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/http_wrapper.py#L252-L310 | train | 207,714 |
google/apitools | apitools/base/py/http_wrapper.py | _MakeRequestNoRetry | def _MakeRequestNoRetry(http, http_request, redirections=5,
check_response_func=CheckResponse):
"""Send http_request via the given http.
This wrapper exists to handle translation between the plain httplib2
request/response types and the Request and Response types above.
Args:
http: An httplib2.Http instance, or a http multiplexer that delegates to
an underlying http, for example, HTTPMultiplexer.
http_request: A Request to send.
redirections: (int, default 5) Number of redirects to follow.
check_response_func: Function to validate the HTTP response.
Arguments are (Response, response content, url).
Returns:
A Response object.
Raises:
RequestError if no response could be parsed.
"""
connection_type = None
# Handle overrides for connection types. This is used if the caller
# wants control over the underlying connection for managing callbacks
# or hash digestion.
if getattr(http, 'connections', None):
url_scheme = parse.urlsplit(http_request.url).scheme
if url_scheme and url_scheme in http.connections:
connection_type = http.connections[url_scheme]
# Custom printing only at debuglevel 4
new_debuglevel = 4 if httplib2.debuglevel == 4 else 0
with _Httplib2Debuglevel(http_request, new_debuglevel, http=http):
info, content = http.request(
str(http_request.url), method=str(http_request.http_method),
body=http_request.body, headers=http_request.headers,
redirections=redirections, connection_type=connection_type)
if info is None:
raise exceptions.RequestError()
response = Response(info, content, http_request.url)
check_response_func(response)
return response | python | def _MakeRequestNoRetry(http, http_request, redirections=5,
check_response_func=CheckResponse):
"""Send http_request via the given http.
This wrapper exists to handle translation between the plain httplib2
request/response types and the Request and Response types above.
Args:
http: An httplib2.Http instance, or a http multiplexer that delegates to
an underlying http, for example, HTTPMultiplexer.
http_request: A Request to send.
redirections: (int, default 5) Number of redirects to follow.
check_response_func: Function to validate the HTTP response.
Arguments are (Response, response content, url).
Returns:
A Response object.
Raises:
RequestError if no response could be parsed.
"""
connection_type = None
# Handle overrides for connection types. This is used if the caller
# wants control over the underlying connection for managing callbacks
# or hash digestion.
if getattr(http, 'connections', None):
url_scheme = parse.urlsplit(http_request.url).scheme
if url_scheme and url_scheme in http.connections:
connection_type = http.connections[url_scheme]
# Custom printing only at debuglevel 4
new_debuglevel = 4 if httplib2.debuglevel == 4 else 0
with _Httplib2Debuglevel(http_request, new_debuglevel, http=http):
info, content = http.request(
str(http_request.url), method=str(http_request.http_method),
body=http_request.body, headers=http_request.headers,
redirections=redirections, connection_type=connection_type)
if info is None:
raise exceptions.RequestError()
response = Response(info, content, http_request.url)
check_response_func(response)
return response | [
"def",
"_MakeRequestNoRetry",
"(",
"http",
",",
"http_request",
",",
"redirections",
"=",
"5",
",",
"check_response_func",
"=",
"CheckResponse",
")",
":",
"connection_type",
"=",
"None",
"# Handle overrides for connection types. This is used if the caller",
"# wants control ... | Send http_request via the given http.
This wrapper exists to handle translation between the plain httplib2
request/response types and the Request and Response types above.
Args:
http: An httplib2.Http instance, or a http multiplexer that delegates to
an underlying http, for example, HTTPMultiplexer.
http_request: A Request to send.
redirections: (int, default 5) Number of redirects to follow.
check_response_func: Function to validate the HTTP response.
Arguments are (Response, response content, url).
Returns:
A Response object.
Raises:
RequestError if no response could be parsed. | [
"Send",
"http_request",
"via",
"the",
"given",
"http",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/http_wrapper.py#L359-L403 | train | 207,715 |
google/apitools | apitools/base/py/http_wrapper.py | Request.body | def body(self, value):
"""Sets the request body; handles logging and length measurement."""
self.__body = value
if value is not None:
# Avoid calling len() which cannot exceed 4GiB in 32-bit python.
body_length = getattr(
self.__body, 'length', None) or len(self.__body)
self.headers['content-length'] = str(body_length)
else:
self.headers.pop('content-length', None)
# This line ensures we don't try to print large requests.
if not isinstance(value, (type(None), six.string_types)):
self.loggable_body = '<media body>' | python | def body(self, value):
"""Sets the request body; handles logging and length measurement."""
self.__body = value
if value is not None:
# Avoid calling len() which cannot exceed 4GiB in 32-bit python.
body_length = getattr(
self.__body, 'length', None) or len(self.__body)
self.headers['content-length'] = str(body_length)
else:
self.headers.pop('content-length', None)
# This line ensures we don't try to print large requests.
if not isinstance(value, (type(None), six.string_types)):
self.loggable_body = '<media body>' | [
"def",
"body",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"__body",
"=",
"value",
"if",
"value",
"is",
"not",
"None",
":",
"# Avoid calling len() which cannot exceed 4GiB in 32-bit python.",
"body_length",
"=",
"getattr",
"(",
"self",
".",
"__body",
",",
... | Sets the request body; handles logging and length measurement. | [
"Sets",
"the",
"request",
"body",
";",
"handles",
"logging",
"and",
"length",
"measurement",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/http_wrapper.py#L147-L159 | train | 207,716 |
google/apitools | apitools/base/py/http_wrapper.py | Response.length | def length(self):
"""Return the length of this response.
We expose this as an attribute since using len() directly can fail
for responses larger than sys.maxint.
Returns:
Response length (as int or long)
"""
def ProcessContentRange(content_range):
_, _, range_spec = content_range.partition(' ')
byte_range, _, _ = range_spec.partition('/')
start, _, end = byte_range.partition('-')
return int(end) - int(start) + 1
if '-content-encoding' in self.info and 'content-range' in self.info:
# httplib2 rewrites content-length in the case of a compressed
# transfer; we can't trust the content-length header in that
# case, but we *can* trust content-range, if it's present.
return ProcessContentRange(self.info['content-range'])
elif 'content-length' in self.info:
return int(self.info.get('content-length'))
elif 'content-range' in self.info:
return ProcessContentRange(self.info['content-range'])
return len(self.content) | python | def length(self):
"""Return the length of this response.
We expose this as an attribute since using len() directly can fail
for responses larger than sys.maxint.
Returns:
Response length (as int or long)
"""
def ProcessContentRange(content_range):
_, _, range_spec = content_range.partition(' ')
byte_range, _, _ = range_spec.partition('/')
start, _, end = byte_range.partition('-')
return int(end) - int(start) + 1
if '-content-encoding' in self.info and 'content-range' in self.info:
# httplib2 rewrites content-length in the case of a compressed
# transfer; we can't trust the content-length header in that
# case, but we *can* trust content-range, if it's present.
return ProcessContentRange(self.info['content-range'])
elif 'content-length' in self.info:
return int(self.info.get('content-length'))
elif 'content-range' in self.info:
return ProcessContentRange(self.info['content-range'])
return len(self.content) | [
"def",
"length",
"(",
"self",
")",
":",
"def",
"ProcessContentRange",
"(",
"content_range",
")",
":",
"_",
",",
"_",
",",
"range_spec",
"=",
"content_range",
".",
"partition",
"(",
"' '",
")",
"byte_range",
",",
"_",
",",
"_",
"=",
"range_spec",
".",
"... | Return the length of this response.
We expose this as an attribute since using len() directly can fail
for responses larger than sys.maxint.
Returns:
Response length (as int or long) | [
"Return",
"the",
"length",
"of",
"this",
"response",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/http_wrapper.py#L174-L198 | train | 207,717 |
google/apitools | apitools/base/py/buffered_stream.py | BufferedStream.read | def read(self, size=None): # pylint: disable=invalid-name
"""Reads from the buffer."""
if size is None or size < 0:
raise exceptions.NotYetImplementedError(
'Illegal read of size %s requested on BufferedStream. '
'Wrapped stream %s is at position %s-%s, '
'%s bytes remaining.' %
(size, self.__stream, self.__start_pos, self.__end_pos,
self._bytes_remaining))
data = ''
if self._bytes_remaining:
size = min(size, self._bytes_remaining)
data = self.__buffered_data[
self.__buffer_pos:self.__buffer_pos + size]
self.__buffer_pos += size
return data | python | def read(self, size=None): # pylint: disable=invalid-name
"""Reads from the buffer."""
if size is None or size < 0:
raise exceptions.NotYetImplementedError(
'Illegal read of size %s requested on BufferedStream. '
'Wrapped stream %s is at position %s-%s, '
'%s bytes remaining.' %
(size, self.__stream, self.__start_pos, self.__end_pos,
self._bytes_remaining))
data = ''
if self._bytes_remaining:
size = min(size, self._bytes_remaining)
data = self.__buffered_data[
self.__buffer_pos:self.__buffer_pos + size]
self.__buffer_pos += size
return data | [
"def",
"read",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"# pylint: disable=invalid-name",
"if",
"size",
"is",
"None",
"or",
"size",
"<",
"0",
":",
"raise",
"exceptions",
".",
"NotYetImplementedError",
"(",
"'Illegal read of size %s requested on BufferedStrea... | Reads from the buffer. | [
"Reads",
"from",
"the",
"buffer",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/buffered_stream.py#L58-L74 | train | 207,718 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.WriteProtoFile | def WriteProtoFile(self, printer):
"""Write the messages file to out as proto."""
self.Validate()
extended_descriptor.WriteMessagesFile(
self.__file_descriptor, self.__package, self.__client_info.version,
printer) | python | def WriteProtoFile(self, printer):
"""Write the messages file to out as proto."""
self.Validate()
extended_descriptor.WriteMessagesFile(
self.__file_descriptor, self.__package, self.__client_info.version,
printer) | [
"def",
"WriteProtoFile",
"(",
"self",
",",
"printer",
")",
":",
"self",
".",
"Validate",
"(",
")",
"extended_descriptor",
".",
"WriteMessagesFile",
"(",
"self",
".",
"__file_descriptor",
",",
"self",
".",
"__package",
",",
"self",
".",
"__client_info",
".",
... | Write the messages file to out as proto. | [
"Write",
"the",
"messages",
"file",
"to",
"out",
"as",
"proto",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L113-L118 | train | 207,719 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.WriteFile | def WriteFile(self, printer):
"""Write the messages file to out."""
self.Validate()
extended_descriptor.WritePythonFile(
self.__file_descriptor, self.__package, self.__client_info.version,
printer) | python | def WriteFile(self, printer):
"""Write the messages file to out."""
self.Validate()
extended_descriptor.WritePythonFile(
self.__file_descriptor, self.__package, self.__client_info.version,
printer) | [
"def",
"WriteFile",
"(",
"self",
",",
"printer",
")",
":",
"self",
".",
"Validate",
"(",
")",
"extended_descriptor",
".",
"WritePythonFile",
"(",
"self",
".",
"__file_descriptor",
",",
"self",
".",
"__package",
",",
"self",
".",
"__client_info",
".",
"versio... | Write the messages file to out. | [
"Write",
"the",
"messages",
"file",
"to",
"out",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L120-L125 | train | 207,720 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.__RegisterDescriptor | def __RegisterDescriptor(self, new_descriptor):
"""Register the given descriptor in this registry."""
if not isinstance(new_descriptor, (
extended_descriptor.ExtendedMessageDescriptor,
extended_descriptor.ExtendedEnumDescriptor)):
raise ValueError('Cannot add descriptor of type %s' % (
type(new_descriptor),))
full_name = self.__ComputeFullName(new_descriptor.name)
if full_name in self.__message_registry:
raise ValueError(
'Attempt to re-register descriptor %s' % full_name)
if full_name not in self.__nascent_types:
raise ValueError('Directly adding types is not supported')
new_descriptor.full_name = full_name
self.__message_registry[full_name] = new_descriptor
if isinstance(new_descriptor,
extended_descriptor.ExtendedMessageDescriptor):
self.__current_env.message_types.append(new_descriptor)
elif isinstance(new_descriptor,
extended_descriptor.ExtendedEnumDescriptor):
self.__current_env.enum_types.append(new_descriptor)
self.__unknown_types.discard(full_name)
self.__nascent_types.remove(full_name) | python | def __RegisterDescriptor(self, new_descriptor):
"""Register the given descriptor in this registry."""
if not isinstance(new_descriptor, (
extended_descriptor.ExtendedMessageDescriptor,
extended_descriptor.ExtendedEnumDescriptor)):
raise ValueError('Cannot add descriptor of type %s' % (
type(new_descriptor),))
full_name = self.__ComputeFullName(new_descriptor.name)
if full_name in self.__message_registry:
raise ValueError(
'Attempt to re-register descriptor %s' % full_name)
if full_name not in self.__nascent_types:
raise ValueError('Directly adding types is not supported')
new_descriptor.full_name = full_name
self.__message_registry[full_name] = new_descriptor
if isinstance(new_descriptor,
extended_descriptor.ExtendedMessageDescriptor):
self.__current_env.message_types.append(new_descriptor)
elif isinstance(new_descriptor,
extended_descriptor.ExtendedEnumDescriptor):
self.__current_env.enum_types.append(new_descriptor)
self.__unknown_types.discard(full_name)
self.__nascent_types.remove(full_name) | [
"def",
"__RegisterDescriptor",
"(",
"self",
",",
"new_descriptor",
")",
":",
"if",
"not",
"isinstance",
"(",
"new_descriptor",
",",
"(",
"extended_descriptor",
".",
"ExtendedMessageDescriptor",
",",
"extended_descriptor",
".",
"ExtendedEnumDescriptor",
")",
")",
":",
... | Register the given descriptor in this registry. | [
"Register",
"the",
"given",
"descriptor",
"in",
"this",
"registry",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L142-L164 | train | 207,721 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.AddEnumDescriptor | def AddEnumDescriptor(self, name, description,
enum_values, enum_descriptions):
"""Add a new EnumDescriptor named name with the given enum values."""
message = extended_descriptor.ExtendedEnumDescriptor()
message.name = self.__names.ClassName(name)
message.description = util.CleanDescription(description)
self.__DeclareDescriptor(message.name)
for index, (enum_name, enum_description) in enumerate(
zip(enum_values, enum_descriptions)):
enum_value = extended_descriptor.ExtendedEnumValueDescriptor()
enum_value.name = self.__names.NormalizeEnumName(enum_name)
if enum_value.name != enum_name:
message.enum_mappings.append(
extended_descriptor.ExtendedEnumDescriptor.JsonEnumMapping(
python_name=enum_value.name, json_name=enum_name))
self.__AddImport('from %s import encoding' %
self.__base_files_package)
enum_value.number = index
enum_value.description = util.CleanDescription(
enum_description or '<no description>')
message.values.append(enum_value)
self.__RegisterDescriptor(message) | python | def AddEnumDescriptor(self, name, description,
enum_values, enum_descriptions):
"""Add a new EnumDescriptor named name with the given enum values."""
message = extended_descriptor.ExtendedEnumDescriptor()
message.name = self.__names.ClassName(name)
message.description = util.CleanDescription(description)
self.__DeclareDescriptor(message.name)
for index, (enum_name, enum_description) in enumerate(
zip(enum_values, enum_descriptions)):
enum_value = extended_descriptor.ExtendedEnumValueDescriptor()
enum_value.name = self.__names.NormalizeEnumName(enum_name)
if enum_value.name != enum_name:
message.enum_mappings.append(
extended_descriptor.ExtendedEnumDescriptor.JsonEnumMapping(
python_name=enum_value.name, json_name=enum_name))
self.__AddImport('from %s import encoding' %
self.__base_files_package)
enum_value.number = index
enum_value.description = util.CleanDescription(
enum_description or '<no description>')
message.values.append(enum_value)
self.__RegisterDescriptor(message) | [
"def",
"AddEnumDescriptor",
"(",
"self",
",",
"name",
",",
"description",
",",
"enum_values",
",",
"enum_descriptions",
")",
":",
"message",
"=",
"extended_descriptor",
".",
"ExtendedEnumDescriptor",
"(",
")",
"message",
".",
"name",
"=",
"self",
".",
"__names",... | Add a new EnumDescriptor named name with the given enum values. | [
"Add",
"a",
"new",
"EnumDescriptor",
"named",
"name",
"with",
"the",
"given",
"enum",
"values",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L196-L217 | train | 207,722 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.__DeclareMessageAlias | def __DeclareMessageAlias(self, schema, alias_for):
"""Declare schema as an alias for alias_for."""
# TODO(craigcitro): This is a hack. Remove it.
message = extended_descriptor.ExtendedMessageDescriptor()
message.name = self.__names.ClassName(schema['id'])
message.alias_for = alias_for
self.__DeclareDescriptor(message.name)
self.__AddImport('from %s import extra_types' %
self.__base_files_package)
self.__RegisterDescriptor(message) | python | def __DeclareMessageAlias(self, schema, alias_for):
"""Declare schema as an alias for alias_for."""
# TODO(craigcitro): This is a hack. Remove it.
message = extended_descriptor.ExtendedMessageDescriptor()
message.name = self.__names.ClassName(schema['id'])
message.alias_for = alias_for
self.__DeclareDescriptor(message.name)
self.__AddImport('from %s import extra_types' %
self.__base_files_package)
self.__RegisterDescriptor(message) | [
"def",
"__DeclareMessageAlias",
"(",
"self",
",",
"schema",
",",
"alias_for",
")",
":",
"# TODO(craigcitro): This is a hack. Remove it.",
"message",
"=",
"extended_descriptor",
".",
"ExtendedMessageDescriptor",
"(",
")",
"message",
".",
"name",
"=",
"self",
".",
"__na... | Declare schema as an alias for alias_for. | [
"Declare",
"schema",
"as",
"an",
"alias",
"for",
"alias_for",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L219-L228 | train | 207,723 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.__AddAdditionalProperties | def __AddAdditionalProperties(self, message, schema, properties):
"""Add an additionalProperties field to message."""
additional_properties_info = schema['additionalProperties']
entries_type_name = self.__AddAdditionalPropertyType(
message.name, additional_properties_info)
description = util.CleanDescription(
additional_properties_info.get('description'))
if description is None:
description = 'Additional properties of type %s' % message.name
attrs = {
'items': {
'$ref': entries_type_name,
},
'description': description,
'type': 'array',
}
field_name = 'additionalProperties'
message.fields.append(self.__FieldDescriptorFromProperties(
field_name, len(properties) + 1, attrs))
self.__AddImport('from %s import encoding' % self.__base_files_package)
message.decorators.append(
'encoding.MapUnrecognizedFields(%r)' % field_name) | python | def __AddAdditionalProperties(self, message, schema, properties):
"""Add an additionalProperties field to message."""
additional_properties_info = schema['additionalProperties']
entries_type_name = self.__AddAdditionalPropertyType(
message.name, additional_properties_info)
description = util.CleanDescription(
additional_properties_info.get('description'))
if description is None:
description = 'Additional properties of type %s' % message.name
attrs = {
'items': {
'$ref': entries_type_name,
},
'description': description,
'type': 'array',
}
field_name = 'additionalProperties'
message.fields.append(self.__FieldDescriptorFromProperties(
field_name, len(properties) + 1, attrs))
self.__AddImport('from %s import encoding' % self.__base_files_package)
message.decorators.append(
'encoding.MapUnrecognizedFields(%r)' % field_name) | [
"def",
"__AddAdditionalProperties",
"(",
"self",
",",
"message",
",",
"schema",
",",
"properties",
")",
":",
"additional_properties_info",
"=",
"schema",
"[",
"'additionalProperties'",
"]",
"entries_type_name",
"=",
"self",
".",
"__AddAdditionalPropertyType",
"(",
"me... | Add an additionalProperties field to message. | [
"Add",
"an",
"additionalProperties",
"field",
"to",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L230-L251 | train | 207,724 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.AddDescriptorFromSchema | def AddDescriptorFromSchema(self, schema_name, schema):
"""Add a new MessageDescriptor named schema_name based on schema."""
# TODO(craigcitro): Is schema_name redundant?
if self.__GetDescriptor(schema_name):
return
if schema.get('enum'):
self.__DeclareEnum(schema_name, schema)
return
if schema.get('type') == 'any':
self.__DeclareMessageAlias(schema, 'extra_types.JsonValue')
return
if schema.get('type') != 'object':
raise ValueError('Cannot create message descriptors for type %s' %
schema.get('type'))
message = extended_descriptor.ExtendedMessageDescriptor()
message.name = self.__names.ClassName(schema['id'])
message.description = util.CleanDescription(schema.get(
'description', 'A %s object.' % message.name))
self.__DeclareDescriptor(message.name)
with self.__DescriptorEnv(message):
properties = schema.get('properties', {})
for index, (name, attrs) in enumerate(sorted(properties.items())):
field = self.__FieldDescriptorFromProperties(
name, index + 1, attrs)
message.fields.append(field)
if field.name != name:
message.field_mappings.append(
type(message).JsonFieldMapping(
python_name=field.name, json_name=name))
self.__AddImport(
'from %s import encoding' % self.__base_files_package)
if 'additionalProperties' in schema:
self.__AddAdditionalProperties(message, schema, properties)
self.__RegisterDescriptor(message) | python | def AddDescriptorFromSchema(self, schema_name, schema):
"""Add a new MessageDescriptor named schema_name based on schema."""
# TODO(craigcitro): Is schema_name redundant?
if self.__GetDescriptor(schema_name):
return
if schema.get('enum'):
self.__DeclareEnum(schema_name, schema)
return
if schema.get('type') == 'any':
self.__DeclareMessageAlias(schema, 'extra_types.JsonValue')
return
if schema.get('type') != 'object':
raise ValueError('Cannot create message descriptors for type %s' %
schema.get('type'))
message = extended_descriptor.ExtendedMessageDescriptor()
message.name = self.__names.ClassName(schema['id'])
message.description = util.CleanDescription(schema.get(
'description', 'A %s object.' % message.name))
self.__DeclareDescriptor(message.name)
with self.__DescriptorEnv(message):
properties = schema.get('properties', {})
for index, (name, attrs) in enumerate(sorted(properties.items())):
field = self.__FieldDescriptorFromProperties(
name, index + 1, attrs)
message.fields.append(field)
if field.name != name:
message.field_mappings.append(
type(message).JsonFieldMapping(
python_name=field.name, json_name=name))
self.__AddImport(
'from %s import encoding' % self.__base_files_package)
if 'additionalProperties' in schema:
self.__AddAdditionalProperties(message, schema, properties)
self.__RegisterDescriptor(message) | [
"def",
"AddDescriptorFromSchema",
"(",
"self",
",",
"schema_name",
",",
"schema",
")",
":",
"# TODO(craigcitro): Is schema_name redundant?",
"if",
"self",
".",
"__GetDescriptor",
"(",
"schema_name",
")",
":",
"return",
"if",
"schema",
".",
"get",
"(",
"'enum'",
")... | Add a new MessageDescriptor named schema_name based on schema. | [
"Add",
"a",
"new",
"MessageDescriptor",
"named",
"schema_name",
"based",
"on",
"schema",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L253-L286 | train | 207,725 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.__AddAdditionalPropertyType | def __AddAdditionalPropertyType(self, name, property_schema):
"""Add a new nested AdditionalProperty message."""
new_type_name = 'AdditionalProperty'
property_schema = dict(property_schema)
# We drop the description here on purpose, so the resulting
# messages are less repetitive.
property_schema.pop('description', None)
description = 'An additional property for a %s object.' % name
schema = {
'id': new_type_name,
'type': 'object',
'description': description,
'properties': {
'key': {
'type': 'string',
'description': 'Name of the additional property.',
},
'value': property_schema,
},
}
self.AddDescriptorFromSchema(new_type_name, schema)
return new_type_name | python | def __AddAdditionalPropertyType(self, name, property_schema):
"""Add a new nested AdditionalProperty message."""
new_type_name = 'AdditionalProperty'
property_schema = dict(property_schema)
# We drop the description here on purpose, so the resulting
# messages are less repetitive.
property_schema.pop('description', None)
description = 'An additional property for a %s object.' % name
schema = {
'id': new_type_name,
'type': 'object',
'description': description,
'properties': {
'key': {
'type': 'string',
'description': 'Name of the additional property.',
},
'value': property_schema,
},
}
self.AddDescriptorFromSchema(new_type_name, schema)
return new_type_name | [
"def",
"__AddAdditionalPropertyType",
"(",
"self",
",",
"name",
",",
"property_schema",
")",
":",
"new_type_name",
"=",
"'AdditionalProperty'",
"property_schema",
"=",
"dict",
"(",
"property_schema",
")",
"# We drop the description here on purpose, so the resulting",
"# messa... | Add a new nested AdditionalProperty message. | [
"Add",
"a",
"new",
"nested",
"AdditionalProperty",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L288-L309 | train | 207,726 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.__AddEntryType | def __AddEntryType(self, entry_type_name, entry_schema, parent_name):
"""Add a type for a list entry."""
entry_schema.pop('description', None)
description = 'Single entry in a %s.' % parent_name
schema = {
'id': entry_type_name,
'type': 'object',
'description': description,
'properties': {
'entry': {
'type': 'array',
'items': entry_schema,
},
},
}
self.AddDescriptorFromSchema(entry_type_name, schema)
return entry_type_name | python | def __AddEntryType(self, entry_type_name, entry_schema, parent_name):
"""Add a type for a list entry."""
entry_schema.pop('description', None)
description = 'Single entry in a %s.' % parent_name
schema = {
'id': entry_type_name,
'type': 'object',
'description': description,
'properties': {
'entry': {
'type': 'array',
'items': entry_schema,
},
},
}
self.AddDescriptorFromSchema(entry_type_name, schema)
return entry_type_name | [
"def",
"__AddEntryType",
"(",
"self",
",",
"entry_type_name",
",",
"entry_schema",
",",
"parent_name",
")",
":",
"entry_schema",
".",
"pop",
"(",
"'description'",
",",
"None",
")",
"description",
"=",
"'Single entry in a %s.'",
"%",
"parent_name",
"schema",
"=",
... | Add a type for a list entry. | [
"Add",
"a",
"type",
"for",
"a",
"list",
"entry",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L311-L327 | train | 207,727 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.__FieldDescriptorFromProperties | def __FieldDescriptorFromProperties(self, name, index, attrs):
"""Create a field descriptor for these attrs."""
field = descriptor.FieldDescriptor()
field.name = self.__names.CleanName(name)
field.number = index
field.label = self.__ComputeLabel(attrs)
new_type_name_hint = self.__names.ClassName(
'%sValue' % self.__names.ClassName(name))
type_info = self.__GetTypeInfo(attrs, new_type_name_hint)
field.type_name = type_info.type_name
field.variant = type_info.variant
if 'default' in attrs:
# TODO(craigcitro): Correctly handle non-primitive default values.
default = attrs['default']
if not (field.type_name == 'string' or
field.variant == messages.Variant.ENUM):
default = str(json.loads(default))
if field.variant == messages.Variant.ENUM:
default = self.__names.NormalizeEnumName(default)
field.default_value = default
extended_field = extended_descriptor.ExtendedFieldDescriptor()
extended_field.name = field.name
extended_field.description = util.CleanDescription(
attrs.get('description', 'A %s attribute.' % field.type_name))
extended_field.field_descriptor = field
return extended_field | python | def __FieldDescriptorFromProperties(self, name, index, attrs):
"""Create a field descriptor for these attrs."""
field = descriptor.FieldDescriptor()
field.name = self.__names.CleanName(name)
field.number = index
field.label = self.__ComputeLabel(attrs)
new_type_name_hint = self.__names.ClassName(
'%sValue' % self.__names.ClassName(name))
type_info = self.__GetTypeInfo(attrs, new_type_name_hint)
field.type_name = type_info.type_name
field.variant = type_info.variant
if 'default' in attrs:
# TODO(craigcitro): Correctly handle non-primitive default values.
default = attrs['default']
if not (field.type_name == 'string' or
field.variant == messages.Variant.ENUM):
default = str(json.loads(default))
if field.variant == messages.Variant.ENUM:
default = self.__names.NormalizeEnumName(default)
field.default_value = default
extended_field = extended_descriptor.ExtendedFieldDescriptor()
extended_field.name = field.name
extended_field.description = util.CleanDescription(
attrs.get('description', 'A %s attribute.' % field.type_name))
extended_field.field_descriptor = field
return extended_field | [
"def",
"__FieldDescriptorFromProperties",
"(",
"self",
",",
"name",
",",
"index",
",",
"attrs",
")",
":",
"field",
"=",
"descriptor",
".",
"FieldDescriptor",
"(",
")",
"field",
".",
"name",
"=",
"self",
".",
"__names",
".",
"CleanName",
"(",
"name",
")",
... | Create a field descriptor for these attrs. | [
"Create",
"a",
"field",
"descriptor",
"for",
"these",
"attrs",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L329-L354 | train | 207,728 |
google/apitools | apitools/gen/message_registry.py | MessageRegistry.__GetTypeInfo | def __GetTypeInfo(self, attrs, name_hint):
"""Return a TypeInfo object for attrs, creating one if needed."""
type_ref = self.__names.ClassName(attrs.get('$ref'))
type_name = attrs.get('type')
if not (type_ref or type_name):
raise ValueError('No type found for %s' % attrs)
if type_ref:
self.__AddIfUnknown(type_ref)
# We don't actually know this is a message -- it might be an
# enum. However, we can't check that until we've created all the
# types, so we come back and fix this up later.
return TypeInfo(
type_name=type_ref, variant=messages.Variant.MESSAGE)
if 'enum' in attrs:
enum_name = '%sValuesEnum' % name_hint
return self.__DeclareEnum(enum_name, attrs)
if 'format' in attrs:
type_info = self.PRIMITIVE_FORMAT_MAP.get(attrs['format'])
if type_info is None:
# If we don't recognize the format, the spec says we fall back
# to just using the type name.
if type_name in self.PRIMITIVE_TYPE_INFO_MAP:
return self.PRIMITIVE_TYPE_INFO_MAP[type_name]
raise ValueError('Unknown type/format "%s"/"%s"' % (
attrs['format'], type_name))
if type_info.type_name.startswith((
'apitools.base.protorpclite.message_types.',
'message_types.')):
self.__AddImport(
'from %s import message_types as _message_types' %
self.__protorpc_package)
if type_info.type_name.startswith('extra_types.'):
self.__AddImport(
'from %s import extra_types' % self.__base_files_package)
return type_info
if type_name in self.PRIMITIVE_TYPE_INFO_MAP:
type_info = self.PRIMITIVE_TYPE_INFO_MAP[type_name]
if type_info.type_name.startswith('extra_types.'):
self.__AddImport(
'from %s import extra_types' % self.__base_files_package)
return type_info
if type_name == 'array':
items = attrs.get('items')
if not items:
raise ValueError('Array type with no item type: %s' % attrs)
entry_name_hint = self.__names.ClassName(
items.get('title') or '%sListEntry' % name_hint)
entry_label = self.__ComputeLabel(items)
if entry_label == descriptor.FieldDescriptor.Label.REPEATED:
parent_name = self.__names.ClassName(
items.get('title') or name_hint)
entry_type_name = self.__AddEntryType(
entry_name_hint, items.get('items'), parent_name)
return TypeInfo(type_name=entry_type_name,
variant=messages.Variant.MESSAGE)
return self.__GetTypeInfo(items, entry_name_hint)
elif type_name == 'any':
self.__AddImport('from %s import extra_types' %
self.__base_files_package)
return self.PRIMITIVE_TYPE_INFO_MAP['any']
elif type_name == 'object':
# TODO(craigcitro): Think of a better way to come up with names.
if not name_hint:
raise ValueError(
'Cannot create subtype without some name hint')
schema = dict(attrs)
schema['id'] = name_hint
self.AddDescriptorFromSchema(name_hint, schema)
self.__AddIfUnknown(name_hint)
return TypeInfo(
type_name=name_hint, variant=messages.Variant.MESSAGE)
raise ValueError('Unknown type: %s' % type_name) | python | def __GetTypeInfo(self, attrs, name_hint):
"""Return a TypeInfo object for attrs, creating one if needed."""
type_ref = self.__names.ClassName(attrs.get('$ref'))
type_name = attrs.get('type')
if not (type_ref or type_name):
raise ValueError('No type found for %s' % attrs)
if type_ref:
self.__AddIfUnknown(type_ref)
# We don't actually know this is a message -- it might be an
# enum. However, we can't check that until we've created all the
# types, so we come back and fix this up later.
return TypeInfo(
type_name=type_ref, variant=messages.Variant.MESSAGE)
if 'enum' in attrs:
enum_name = '%sValuesEnum' % name_hint
return self.__DeclareEnum(enum_name, attrs)
if 'format' in attrs:
type_info = self.PRIMITIVE_FORMAT_MAP.get(attrs['format'])
if type_info is None:
# If we don't recognize the format, the spec says we fall back
# to just using the type name.
if type_name in self.PRIMITIVE_TYPE_INFO_MAP:
return self.PRIMITIVE_TYPE_INFO_MAP[type_name]
raise ValueError('Unknown type/format "%s"/"%s"' % (
attrs['format'], type_name))
if type_info.type_name.startswith((
'apitools.base.protorpclite.message_types.',
'message_types.')):
self.__AddImport(
'from %s import message_types as _message_types' %
self.__protorpc_package)
if type_info.type_name.startswith('extra_types.'):
self.__AddImport(
'from %s import extra_types' % self.__base_files_package)
return type_info
if type_name in self.PRIMITIVE_TYPE_INFO_MAP:
type_info = self.PRIMITIVE_TYPE_INFO_MAP[type_name]
if type_info.type_name.startswith('extra_types.'):
self.__AddImport(
'from %s import extra_types' % self.__base_files_package)
return type_info
if type_name == 'array':
items = attrs.get('items')
if not items:
raise ValueError('Array type with no item type: %s' % attrs)
entry_name_hint = self.__names.ClassName(
items.get('title') or '%sListEntry' % name_hint)
entry_label = self.__ComputeLabel(items)
if entry_label == descriptor.FieldDescriptor.Label.REPEATED:
parent_name = self.__names.ClassName(
items.get('title') or name_hint)
entry_type_name = self.__AddEntryType(
entry_name_hint, items.get('items'), parent_name)
return TypeInfo(type_name=entry_type_name,
variant=messages.Variant.MESSAGE)
return self.__GetTypeInfo(items, entry_name_hint)
elif type_name == 'any':
self.__AddImport('from %s import extra_types' %
self.__base_files_package)
return self.PRIMITIVE_TYPE_INFO_MAP['any']
elif type_name == 'object':
# TODO(craigcitro): Think of a better way to come up with names.
if not name_hint:
raise ValueError(
'Cannot create subtype without some name hint')
schema = dict(attrs)
schema['id'] = name_hint
self.AddDescriptorFromSchema(name_hint, schema)
self.__AddIfUnknown(name_hint)
return TypeInfo(
type_name=name_hint, variant=messages.Variant.MESSAGE)
raise ValueError('Unknown type: %s' % type_name) | [
"def",
"__GetTypeInfo",
"(",
"self",
",",
"attrs",
",",
"name_hint",
")",
":",
"type_ref",
"=",
"self",
".",
"__names",
".",
"ClassName",
"(",
"attrs",
".",
"get",
"(",
"'$ref'",
")",
")",
"type_name",
"=",
"attrs",
".",
"get",
"(",
"'type'",
")",
"i... | Return a TypeInfo object for attrs, creating one if needed. | [
"Return",
"a",
"TypeInfo",
"object",
"for",
"attrs",
"creating",
"one",
"if",
"needed",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/message_registry.py#L383-L461 | train | 207,729 |
google/apitools | apitools/gen/gen_client.py | _GetDiscoveryDocFromFlags | def _GetDiscoveryDocFromFlags(args):
"""Get the discovery doc from flags."""
if args.discovery_url:
try:
return util.FetchDiscoveryDoc(args.discovery_url)
except exceptions.CommunicationError:
raise exceptions.GeneratedClientError(
'Could not fetch discovery doc')
infile = os.path.expanduser(args.infile) or '/dev/stdin'
with io.open(infile, encoding='utf8') as f:
return json.loads(util.ReplaceHomoglyphs(f.read())) | python | def _GetDiscoveryDocFromFlags(args):
"""Get the discovery doc from flags."""
if args.discovery_url:
try:
return util.FetchDiscoveryDoc(args.discovery_url)
except exceptions.CommunicationError:
raise exceptions.GeneratedClientError(
'Could not fetch discovery doc')
infile = os.path.expanduser(args.infile) or '/dev/stdin'
with io.open(infile, encoding='utf8') as f:
return json.loads(util.ReplaceHomoglyphs(f.read())) | [
"def",
"_GetDiscoveryDocFromFlags",
"(",
"args",
")",
":",
"if",
"args",
".",
"discovery_url",
":",
"try",
":",
"return",
"util",
".",
"FetchDiscoveryDoc",
"(",
"args",
".",
"discovery_url",
")",
"except",
"exceptions",
".",
"CommunicationError",
":",
"raise",
... | Get the discovery doc from flags. | [
"Get",
"the",
"discovery",
"doc",
"from",
"flags",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/gen_client.py#L43-L54 | train | 207,730 |
google/apitools | apitools/gen/gen_client.py | _GetCodegenFromFlags | def _GetCodegenFromFlags(args):
"""Create a codegen object from flags."""
discovery_doc = _GetDiscoveryDocFromFlags(args)
names = util.Names(
args.strip_prefix,
args.experimental_name_convention,
args.experimental_capitalize_enums)
if args.client_json:
try:
with io.open(args.client_json, encoding='utf8') as client_json:
f = json.loads(util.ReplaceHomoglyphs(client_json.read()))
web = f.get('installed', f.get('web', {}))
client_id = web.get('client_id')
client_secret = web.get('client_secret')
except IOError:
raise exceptions.NotFoundError(
'Failed to open client json file: %s' % args.client_json)
else:
client_id = args.client_id
client_secret = args.client_secret
if not client_id:
logging.warning('No client ID supplied')
client_id = ''
if not client_secret:
logging.warning('No client secret supplied')
client_secret = ''
client_info = util.ClientInfo.Create(
discovery_doc, args.scope, client_id, client_secret,
args.user_agent, names, args.api_key)
outdir = os.path.expanduser(args.outdir) or client_info.default_directory
if os.path.exists(outdir) and not args.overwrite:
raise exceptions.ConfigurationValueError(
'Output directory exists, pass --overwrite to replace '
'the existing files.')
if not os.path.exists(outdir):
os.makedirs(outdir)
return gen_client_lib.DescriptorGenerator(
discovery_doc, client_info, names, args.root_package, outdir,
base_package=args.base_package,
protorpc_package=args.protorpc_package,
init_wildcards_file=(args.init_file == 'wildcards'),
use_proto2=args.experimental_proto2_output,
unelidable_request_methods=args.unelidable_request_methods,
apitools_version=args.apitools_version) | python | def _GetCodegenFromFlags(args):
"""Create a codegen object from flags."""
discovery_doc = _GetDiscoveryDocFromFlags(args)
names = util.Names(
args.strip_prefix,
args.experimental_name_convention,
args.experimental_capitalize_enums)
if args.client_json:
try:
with io.open(args.client_json, encoding='utf8') as client_json:
f = json.loads(util.ReplaceHomoglyphs(client_json.read()))
web = f.get('installed', f.get('web', {}))
client_id = web.get('client_id')
client_secret = web.get('client_secret')
except IOError:
raise exceptions.NotFoundError(
'Failed to open client json file: %s' % args.client_json)
else:
client_id = args.client_id
client_secret = args.client_secret
if not client_id:
logging.warning('No client ID supplied')
client_id = ''
if not client_secret:
logging.warning('No client secret supplied')
client_secret = ''
client_info = util.ClientInfo.Create(
discovery_doc, args.scope, client_id, client_secret,
args.user_agent, names, args.api_key)
outdir = os.path.expanduser(args.outdir) or client_info.default_directory
if os.path.exists(outdir) and not args.overwrite:
raise exceptions.ConfigurationValueError(
'Output directory exists, pass --overwrite to replace '
'the existing files.')
if not os.path.exists(outdir):
os.makedirs(outdir)
return gen_client_lib.DescriptorGenerator(
discovery_doc, client_info, names, args.root_package, outdir,
base_package=args.base_package,
protorpc_package=args.protorpc_package,
init_wildcards_file=(args.init_file == 'wildcards'),
use_proto2=args.experimental_proto2_output,
unelidable_request_methods=args.unelidable_request_methods,
apitools_version=args.apitools_version) | [
"def",
"_GetCodegenFromFlags",
"(",
"args",
")",
":",
"discovery_doc",
"=",
"_GetDiscoveryDocFromFlags",
"(",
"args",
")",
"names",
"=",
"util",
".",
"Names",
"(",
"args",
".",
"strip_prefix",
",",
"args",
".",
"experimental_name_convention",
",",
"args",
".",
... | Create a codegen object from flags. | [
"Create",
"a",
"codegen",
"object",
"from",
"flags",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/gen_client.py#L57-L105 | train | 207,731 |
google/apitools | apitools/gen/gen_client.py | GenerateClient | def GenerateClient(args):
"""Driver for client code generation."""
codegen = _GetCodegenFromFlags(args)
if codegen is None:
logging.error('Failed to create codegen, exiting.')
return 128
_WriteGeneratedFiles(args, codegen)
if args.init_file != 'none':
_WriteInit(codegen) | python | def GenerateClient(args):
"""Driver for client code generation."""
codegen = _GetCodegenFromFlags(args)
if codegen is None:
logging.error('Failed to create codegen, exiting.')
return 128
_WriteGeneratedFiles(args, codegen)
if args.init_file != 'none':
_WriteInit(codegen) | [
"def",
"GenerateClient",
"(",
"args",
")",
":",
"codegen",
"=",
"_GetCodegenFromFlags",
"(",
"args",
")",
"if",
"codegen",
"is",
"None",
":",
"logging",
".",
"error",
"(",
"'Failed to create codegen, exiting.'",
")",
"return",
"128",
"_WriteGeneratedFiles",
"(",
... | Driver for client code generation. | [
"Driver",
"for",
"client",
"code",
"generation",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/gen_client.py#L150-L160 | train | 207,732 |
google/apitools | apitools/gen/gen_client.py | GeneratePipPackage | def GeneratePipPackage(args):
"""Generate a client as a pip-installable tarball."""
discovery_doc = _GetDiscoveryDocFromFlags(args)
package = discovery_doc['name']
original_outdir = os.path.expanduser(args.outdir)
args.outdir = os.path.join(
args.outdir, 'apitools/clients/%s' % package)
args.root_package = 'apitools.clients.%s' % package
codegen = _GetCodegenFromFlags(args)
if codegen is None:
logging.error('Failed to create codegen, exiting.')
return 1
_WriteGeneratedFiles(args, codegen)
_WriteInit(codegen)
with util.Chdir(original_outdir):
_WriteSetupPy(codegen)
with util.Chdir('apitools'):
_WriteIntermediateInit(codegen)
with util.Chdir('clients'):
_WriteIntermediateInit(codegen) | python | def GeneratePipPackage(args):
"""Generate a client as a pip-installable tarball."""
discovery_doc = _GetDiscoveryDocFromFlags(args)
package = discovery_doc['name']
original_outdir = os.path.expanduser(args.outdir)
args.outdir = os.path.join(
args.outdir, 'apitools/clients/%s' % package)
args.root_package = 'apitools.clients.%s' % package
codegen = _GetCodegenFromFlags(args)
if codegen is None:
logging.error('Failed to create codegen, exiting.')
return 1
_WriteGeneratedFiles(args, codegen)
_WriteInit(codegen)
with util.Chdir(original_outdir):
_WriteSetupPy(codegen)
with util.Chdir('apitools'):
_WriteIntermediateInit(codegen)
with util.Chdir('clients'):
_WriteIntermediateInit(codegen) | [
"def",
"GeneratePipPackage",
"(",
"args",
")",
":",
"discovery_doc",
"=",
"_GetDiscoveryDocFromFlags",
"(",
"args",
")",
"package",
"=",
"discovery_doc",
"[",
"'name'",
"]",
"original_outdir",
"=",
"os",
".",
"path",
".",
"expanduser",
"(",
"args",
".",
"outdi... | Generate a client as a pip-installable tarball. | [
"Generate",
"a",
"client",
"as",
"a",
"pip",
"-",
"installable",
"tarball",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/gen/gen_client.py#L163-L184 | train | 207,733 |
google/apitools | apitools/base/py/stream_slice.py | StreamSlice.read | def read(self, size=None): # pylint: disable=missing-docstring
"""Read at most size bytes from this slice.
Compared to other streams, there is one case where we may
unexpectedly raise an exception on read: if the underlying stream
is exhausted (i.e. returns no bytes on read), and the size of this
slice indicates we should still be able to read more bytes, we
raise exceptions.StreamExhausted.
Args:
size: If provided, read no more than size bytes from the stream.
Returns:
The bytes read from this slice.
Raises:
exceptions.StreamExhausted
"""
if size is not None:
read_size = min(size, self.__remaining_bytes)
else:
read_size = self.__remaining_bytes
data = self.__stream.read(read_size)
if read_size > 0 and not data:
raise exceptions.StreamExhausted(
'Not enough bytes in stream; expected %d, exhausted '
'after %d' % (
self.__max_bytes,
self.__max_bytes - self.__remaining_bytes))
self.__remaining_bytes -= len(data)
return data | python | def read(self, size=None): # pylint: disable=missing-docstring
"""Read at most size bytes from this slice.
Compared to other streams, there is one case where we may
unexpectedly raise an exception on read: if the underlying stream
is exhausted (i.e. returns no bytes on read), and the size of this
slice indicates we should still be able to read more bytes, we
raise exceptions.StreamExhausted.
Args:
size: If provided, read no more than size bytes from the stream.
Returns:
The bytes read from this slice.
Raises:
exceptions.StreamExhausted
"""
if size is not None:
read_size = min(size, self.__remaining_bytes)
else:
read_size = self.__remaining_bytes
data = self.__stream.read(read_size)
if read_size > 0 and not data:
raise exceptions.StreamExhausted(
'Not enough bytes in stream; expected %d, exhausted '
'after %d' % (
self.__max_bytes,
self.__max_bytes - self.__remaining_bytes))
self.__remaining_bytes -= len(data)
return data | [
"def",
"read",
"(",
"self",
",",
"size",
"=",
"None",
")",
":",
"# pylint: disable=missing-docstring",
"if",
"size",
"is",
"not",
"None",
":",
"read_size",
"=",
"min",
"(",
"size",
",",
"self",
".",
"__remaining_bytes",
")",
"else",
":",
"read_size",
"=",
... | Read at most size bytes from this slice.
Compared to other streams, there is one case where we may
unexpectedly raise an exception on read: if the underlying stream
is exhausted (i.e. returns no bytes on read), and the size of this
slice indicates we should still be able to read more bytes, we
raise exceptions.StreamExhausted.
Args:
size: If provided, read no more than size bytes from the stream.
Returns:
The bytes read from this slice.
Raises:
exceptions.StreamExhausted | [
"Read",
"at",
"most",
"size",
"bytes",
"from",
"this",
"slice",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/stream_slice.py#L48-L79 | train | 207,734 |
google/apitools | ez_setup.py | update_md5 | def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close() | python | def update_md5(filenames):
"""Update our built-in md5 registry"""
import re
for name in filenames:
base = os.path.basename(name)
f = open(name,'rb')
md5_data[base] = md5(f.read()).hexdigest()
f.close()
data = [" %r: %r,\n" % it for it in md5_data.items()]
data.sort()
repl = "".join(data)
import inspect
srcfile = inspect.getsourcefile(sys.modules[__name__])
f = open(srcfile, 'rb'); src = f.read(); f.close()
match = re.search("\nmd5_data = {\n([^}]+)}", src)
if not match:
print >>sys.stderr, "Internal error!"
sys.exit(2)
src = src[:match.start(1)] + repl + src[match.end(1):]
f = open(srcfile,'w')
f.write(src)
f.close() | [
"def",
"update_md5",
"(",
"filenames",
")",
":",
"import",
"re",
"for",
"name",
"in",
"filenames",
":",
"base",
"=",
"os",
".",
"path",
".",
"basename",
"(",
"name",
")",
"f",
"=",
"open",
"(",
"name",
",",
"'rb'",
")",
"md5_data",
"[",
"base",
"]"... | Update our built-in md5 registry | [
"Update",
"our",
"built",
"-",
"in",
"md5",
"registry"
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/ez_setup.py#L232-L259 | train | 207,735 |
google/apitools | apitools/base/py/batch.py | BatchApiRequest.Add | def Add(self, service, method, request, global_params=None):
"""Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request: An input message appropriate for the specified
service.method.
global_params: Optional additional parameters to pass into
method.PrepareHttpRequest.
Returns:
None
"""
# Retrieve the configs for the desired method and service.
method_config = service.GetMethodConfig(method)
upload_config = service.GetUploadConfig(method)
# Prepare the HTTP Request.
http_request = service.PrepareHttpRequest(
method_config, request, global_params=global_params,
upload_config=upload_config)
# Create the request and add it to our master list.
api_request = self.ApiCall(
http_request, self.retryable_codes, service, method_config)
self.api_requests.append(api_request) | python | def Add(self, service, method, request, global_params=None):
"""Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request: An input message appropriate for the specified
service.method.
global_params: Optional additional parameters to pass into
method.PrepareHttpRequest.
Returns:
None
"""
# Retrieve the configs for the desired method and service.
method_config = service.GetMethodConfig(method)
upload_config = service.GetUploadConfig(method)
# Prepare the HTTP Request.
http_request = service.PrepareHttpRequest(
method_config, request, global_params=global_params,
upload_config=upload_config)
# Create the request and add it to our master list.
api_request = self.ApiCall(
http_request, self.retryable_codes, service, method_config)
self.api_requests.append(api_request) | [
"def",
"Add",
"(",
"self",
",",
"service",
",",
"method",
",",
"request",
",",
"global_params",
"=",
"None",
")",
":",
"# Retrieve the configs for the desired method and service.",
"method_config",
"=",
"service",
".",
"GetMethodConfig",
"(",
"method",
")",
"upload_... | Add a request to the batch.
Args:
service: A class inheriting base_api.BaseApiService.
method: A string indicated desired method from the service. See
the example in the class docstring.
request: An input message appropriate for the specified
service.method.
global_params: Optional additional parameters to pass into
method.PrepareHttpRequest.
Returns:
None | [
"Add",
"a",
"request",
"to",
"the",
"batch",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/batch.py#L157-L185 | train | 207,736 |
google/apitools | apitools/base/py/batch.py | BatchApiRequest.Execute | def Execute(self, http, sleep_between_polls=5, max_retries=5,
max_batch_size=None, batch_request_callback=None):
"""Execute all of the requests in the batch.
Args:
http: httplib2.Http object for use in the request.
sleep_between_polls: Integer number of seconds to sleep between
polls.
max_retries: Max retries. Any requests that have not succeeded by
this number of retries simply report the last response or
exception, whatever it happened to be.
max_batch_size: int, if specified requests will be split in batches
of given size.
batch_request_callback: function of (http_response, exception) passed
to BatchHttpRequest which will be run on any given results.
Returns:
List of ApiCalls.
"""
requests = [request for request in self.api_requests
if not request.terminal_state]
batch_size = max_batch_size or len(requests)
for attempt in range(max_retries):
if attempt:
time.sleep(sleep_between_polls)
for i in range(0, len(requests), batch_size):
# Create a batch_http_request object and populate it with
# incomplete requests.
batch_http_request = BatchHttpRequest(
batch_url=self.batch_url,
callback=batch_request_callback,
response_encoding=self.response_encoding
)
for request in itertools.islice(requests,
i, i + batch_size):
batch_http_request.Add(
request.http_request, request.HandleResponse)
batch_http_request.Execute(http)
if hasattr(http.request, 'credentials'):
if any(request.authorization_failed
for request in itertools.islice(requests,
i, i + batch_size)):
http.request.credentials.refresh(http)
# Collect retryable requests.
requests = [request for request in self.api_requests if not
request.terminal_state]
if not requests:
break
return self.api_requests | python | def Execute(self, http, sleep_between_polls=5, max_retries=5,
max_batch_size=None, batch_request_callback=None):
"""Execute all of the requests in the batch.
Args:
http: httplib2.Http object for use in the request.
sleep_between_polls: Integer number of seconds to sleep between
polls.
max_retries: Max retries. Any requests that have not succeeded by
this number of retries simply report the last response or
exception, whatever it happened to be.
max_batch_size: int, if specified requests will be split in batches
of given size.
batch_request_callback: function of (http_response, exception) passed
to BatchHttpRequest which will be run on any given results.
Returns:
List of ApiCalls.
"""
requests = [request for request in self.api_requests
if not request.terminal_state]
batch_size = max_batch_size or len(requests)
for attempt in range(max_retries):
if attempt:
time.sleep(sleep_between_polls)
for i in range(0, len(requests), batch_size):
# Create a batch_http_request object and populate it with
# incomplete requests.
batch_http_request = BatchHttpRequest(
batch_url=self.batch_url,
callback=batch_request_callback,
response_encoding=self.response_encoding
)
for request in itertools.islice(requests,
i, i + batch_size):
batch_http_request.Add(
request.http_request, request.HandleResponse)
batch_http_request.Execute(http)
if hasattr(http.request, 'credentials'):
if any(request.authorization_failed
for request in itertools.islice(requests,
i, i + batch_size)):
http.request.credentials.refresh(http)
# Collect retryable requests.
requests = [request for request in self.api_requests if not
request.terminal_state]
if not requests:
break
return self.api_requests | [
"def",
"Execute",
"(",
"self",
",",
"http",
",",
"sleep_between_polls",
"=",
"5",
",",
"max_retries",
"=",
"5",
",",
"max_batch_size",
"=",
"None",
",",
"batch_request_callback",
"=",
"None",
")",
":",
"requests",
"=",
"[",
"request",
"for",
"request",
"in... | Execute all of the requests in the batch.
Args:
http: httplib2.Http object for use in the request.
sleep_between_polls: Integer number of seconds to sleep between
polls.
max_retries: Max retries. Any requests that have not succeeded by
this number of retries simply report the last response or
exception, whatever it happened to be.
max_batch_size: int, if specified requests will be split in batches
of given size.
batch_request_callback: function of (http_response, exception) passed
to BatchHttpRequest which will be run on any given results.
Returns:
List of ApiCalls. | [
"Execute",
"all",
"of",
"the",
"requests",
"in",
"the",
"batch",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/batch.py#L187-L240 | train | 207,737 |
google/apitools | apitools/base/py/batch.py | BatchHttpRequest._ConvertHeaderToId | def _ConvertHeaderToId(header):
"""Convert a Content-ID header value to an id.
Presumes the Content-ID header conforms to the format that
_ConvertIdToHeader() returns.
Args:
header: A string indicating the Content-ID header value.
Returns:
The extracted id value.
Raises:
BatchError if the header is not in the expected format.
"""
if not (header.startswith('<') or header.endswith('>')):
raise exceptions.BatchError(
'Invalid value for Content-ID: %s' % header)
if '+' not in header:
raise exceptions.BatchError(
'Invalid value for Content-ID: %s' % header)
_, request_id = header[1:-1].rsplit('+', 1)
return urllib_parse.unquote(request_id) | python | def _ConvertHeaderToId(header):
"""Convert a Content-ID header value to an id.
Presumes the Content-ID header conforms to the format that
_ConvertIdToHeader() returns.
Args:
header: A string indicating the Content-ID header value.
Returns:
The extracted id value.
Raises:
BatchError if the header is not in the expected format.
"""
if not (header.startswith('<') or header.endswith('>')):
raise exceptions.BatchError(
'Invalid value for Content-ID: %s' % header)
if '+' not in header:
raise exceptions.BatchError(
'Invalid value for Content-ID: %s' % header)
_, request_id = header[1:-1].rsplit('+', 1)
return urllib_parse.unquote(request_id) | [
"def",
"_ConvertHeaderToId",
"(",
"header",
")",
":",
"if",
"not",
"(",
"header",
".",
"startswith",
"(",
"'<'",
")",
"or",
"header",
".",
"endswith",
"(",
"'>'",
")",
")",
":",
"raise",
"exceptions",
".",
"BatchError",
"(",
"'Invalid value for Content-ID: %... | Convert a Content-ID header value to an id.
Presumes the Content-ID header conforms to the format that
_ConvertIdToHeader() returns.
Args:
header: A string indicating the Content-ID header value.
Returns:
The extracted id value.
Raises:
BatchError if the header is not in the expected format. | [
"Convert",
"a",
"Content",
"-",
"ID",
"header",
"value",
"to",
"an",
"id",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/batch.py#L294-L317 | train | 207,738 |
google/apitools | apitools/base/py/batch.py | BatchHttpRequest._SerializeRequest | def _SerializeRequest(self, request):
"""Convert a http_wrapper.Request object into a string.
Args:
request: A http_wrapper.Request to serialize.
Returns:
The request as a string in application/http format.
"""
# Construct status line
parsed = urllib_parse.urlsplit(request.url)
request_line = urllib_parse.urlunsplit(
('', '', parsed.path, parsed.query, ''))
if not isinstance(request_line, six.text_type):
request_line = request_line.decode('utf-8')
status_line = u' '.join((
request.http_method,
request_line,
u'HTTP/1.1\n'
))
major, minor = request.headers.get(
'content-type', 'application/json').split('/')
msg = mime_nonmultipart.MIMENonMultipart(major, minor)
# MIMENonMultipart adds its own Content-Type header.
# Keep all of the other headers in `request.headers`.
for key, value in request.headers.items():
if key == 'content-type':
continue
msg[key] = value
msg['Host'] = parsed.netloc
msg.set_unixfrom(None)
if request.body is not None:
msg.set_payload(request.body)
# Serialize the mime message.
str_io = six.StringIO()
# maxheaderlen=0 means don't line wrap headers.
gen = generator.Generator(str_io, maxheaderlen=0)
gen.flatten(msg, unixfrom=False)
body = str_io.getvalue()
return status_line + body | python | def _SerializeRequest(self, request):
"""Convert a http_wrapper.Request object into a string.
Args:
request: A http_wrapper.Request to serialize.
Returns:
The request as a string in application/http format.
"""
# Construct status line
parsed = urllib_parse.urlsplit(request.url)
request_line = urllib_parse.urlunsplit(
('', '', parsed.path, parsed.query, ''))
if not isinstance(request_line, six.text_type):
request_line = request_line.decode('utf-8')
status_line = u' '.join((
request.http_method,
request_line,
u'HTTP/1.1\n'
))
major, minor = request.headers.get(
'content-type', 'application/json').split('/')
msg = mime_nonmultipart.MIMENonMultipart(major, minor)
# MIMENonMultipart adds its own Content-Type header.
# Keep all of the other headers in `request.headers`.
for key, value in request.headers.items():
if key == 'content-type':
continue
msg[key] = value
msg['Host'] = parsed.netloc
msg.set_unixfrom(None)
if request.body is not None:
msg.set_payload(request.body)
# Serialize the mime message.
str_io = six.StringIO()
# maxheaderlen=0 means don't line wrap headers.
gen = generator.Generator(str_io, maxheaderlen=0)
gen.flatten(msg, unixfrom=False)
body = str_io.getvalue()
return status_line + body | [
"def",
"_SerializeRequest",
"(",
"self",
",",
"request",
")",
":",
"# Construct status line",
"parsed",
"=",
"urllib_parse",
".",
"urlsplit",
"(",
"request",
".",
"url",
")",
"request_line",
"=",
"urllib_parse",
".",
"urlunsplit",
"(",
"(",
"''",
",",
"''",
... | Convert a http_wrapper.Request object into a string.
Args:
request: A http_wrapper.Request to serialize.
Returns:
The request as a string in application/http format. | [
"Convert",
"a",
"http_wrapper",
".",
"Request",
"object",
"into",
"a",
"string",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/batch.py#L319-L363 | train | 207,739 |
google/apitools | apitools/base/py/batch.py | BatchHttpRequest._DeserializeResponse | def _DeserializeResponse(self, payload):
"""Convert string into Response and content.
Args:
payload: Header and body string to be deserialized.
Returns:
A Response object
"""
# Strip off the status line.
status_line, payload = payload.split('\n', 1)
_, status, _ = status_line.split(' ', 2)
# Parse the rest of the response.
parser = email_parser.Parser()
msg = parser.parsestr(payload)
# Get the headers.
info = dict(msg)
info['status'] = status
# Create Response from the parsed headers.
content = msg.get_payload()
return http_wrapper.Response(info, content, self.__batch_url) | python | def _DeserializeResponse(self, payload):
"""Convert string into Response and content.
Args:
payload: Header and body string to be deserialized.
Returns:
A Response object
"""
# Strip off the status line.
status_line, payload = payload.split('\n', 1)
_, status, _ = status_line.split(' ', 2)
# Parse the rest of the response.
parser = email_parser.Parser()
msg = parser.parsestr(payload)
# Get the headers.
info = dict(msg)
info['status'] = status
# Create Response from the parsed headers.
content = msg.get_payload()
return http_wrapper.Response(info, content, self.__batch_url) | [
"def",
"_DeserializeResponse",
"(",
"self",
",",
"payload",
")",
":",
"# Strip off the status line.",
"status_line",
",",
"payload",
"=",
"payload",
".",
"split",
"(",
"'\\n'",
",",
"1",
")",
"_",
",",
"status",
",",
"_",
"=",
"status_line",
".",
"split",
... | Convert string into Response and content.
Args:
payload: Header and body string to be deserialized.
Returns:
A Response object | [
"Convert",
"string",
"into",
"Response",
"and",
"content",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/batch.py#L365-L389 | train | 207,740 |
google/apitools | apitools/base/py/batch.py | BatchHttpRequest.Add | def Add(self, request, callback=None):
"""Add a new request.
Args:
request: A http_wrapper.Request to add to the batch.
callback: A callback to be called for this response, of the
form callback(response, exception). The first parameter is the
deserialized response object. The second is an
apiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no errors
occurred.
Returns:
None
"""
handler = RequestResponseAndHandler(request, None, callback)
self.__request_response_handlers[self._NewId()] = handler | python | def Add(self, request, callback=None):
"""Add a new request.
Args:
request: A http_wrapper.Request to add to the batch.
callback: A callback to be called for this response, of the
form callback(response, exception). The first parameter is the
deserialized response object. The second is an
apiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no errors
occurred.
Returns:
None
"""
handler = RequestResponseAndHandler(request, None, callback)
self.__request_response_handlers[self._NewId()] = handler | [
"def",
"Add",
"(",
"self",
",",
"request",
",",
"callback",
"=",
"None",
")",
":",
"handler",
"=",
"RequestResponseAndHandler",
"(",
"request",
",",
"None",
",",
"callback",
")",
"self",
".",
"__request_response_handlers",
"[",
"self",
".",
"_NewId",
"(",
... | Add a new request.
Args:
request: A http_wrapper.Request to add to the batch.
callback: A callback to be called for this response, of the
form callback(response, exception). The first parameter is the
deserialized response object. The second is an
apiclient.errors.HttpError exception object if an HTTP error
occurred while processing the request, or None if no errors
occurred.
Returns:
None | [
"Add",
"a",
"new",
"request",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/batch.py#L401-L417 | train | 207,741 |
google/apitools | apitools/base/py/batch.py | BatchHttpRequest._Execute | def _Execute(self, http):
"""Serialize batch request, send to server, process response.
Args:
http: A httplib2.Http object to be used to make the request with.
Raises:
httplib2.HttpLib2Error if a transport error has occured.
apiclient.errors.BatchError if the response is the wrong format.
"""
message = mime_multipart.MIMEMultipart('mixed')
# Message should not write out its own headers.
setattr(message, '_write_headers', lambda self: None)
# Add all the individual requests.
for key in self.__request_response_handlers:
msg = mime_nonmultipart.MIMENonMultipart('application', 'http')
msg['Content-Transfer-Encoding'] = 'binary'
msg['Content-ID'] = self._ConvertIdToHeader(key)
body = self._SerializeRequest(
self.__request_response_handlers[key].request)
msg.set_payload(body)
message.attach(msg)
request = http_wrapper.Request(self.__batch_url, 'POST')
request.body = message.as_string()
request.headers['content-type'] = (
'multipart/mixed; boundary="%s"') % message.get_boundary()
response = http_wrapper.MakeRequest(http, request)
if response.status_code >= 300:
raise exceptions.HttpError.FromResponse(response)
# Prepend with a content-type header so Parser can handle it.
header = 'content-type: %s\r\n\r\n' % response.info['content-type']
content = response.content
if isinstance(content, bytes) and self.__response_encoding:
content = response.content.decode(self.__response_encoding)
parser = email_parser.Parser()
mime_response = parser.parsestr(header + content)
if not mime_response.is_multipart():
raise exceptions.BatchError(
'Response not in multipart/mixed format.')
for part in mime_response.get_payload():
request_id = self._ConvertHeaderToId(part['Content-ID'])
response = self._DeserializeResponse(part.get_payload())
# Disable protected access because namedtuple._replace(...)
# is not actually meant to be protected.
# pylint: disable=protected-access
self.__request_response_handlers[request_id] = (
self.__request_response_handlers[request_id]._replace(
response=response)) | python | def _Execute(self, http):
"""Serialize batch request, send to server, process response.
Args:
http: A httplib2.Http object to be used to make the request with.
Raises:
httplib2.HttpLib2Error if a transport error has occured.
apiclient.errors.BatchError if the response is the wrong format.
"""
message = mime_multipart.MIMEMultipart('mixed')
# Message should not write out its own headers.
setattr(message, '_write_headers', lambda self: None)
# Add all the individual requests.
for key in self.__request_response_handlers:
msg = mime_nonmultipart.MIMENonMultipart('application', 'http')
msg['Content-Transfer-Encoding'] = 'binary'
msg['Content-ID'] = self._ConvertIdToHeader(key)
body = self._SerializeRequest(
self.__request_response_handlers[key].request)
msg.set_payload(body)
message.attach(msg)
request = http_wrapper.Request(self.__batch_url, 'POST')
request.body = message.as_string()
request.headers['content-type'] = (
'multipart/mixed; boundary="%s"') % message.get_boundary()
response = http_wrapper.MakeRequest(http, request)
if response.status_code >= 300:
raise exceptions.HttpError.FromResponse(response)
# Prepend with a content-type header so Parser can handle it.
header = 'content-type: %s\r\n\r\n' % response.info['content-type']
content = response.content
if isinstance(content, bytes) and self.__response_encoding:
content = response.content.decode(self.__response_encoding)
parser = email_parser.Parser()
mime_response = parser.parsestr(header + content)
if not mime_response.is_multipart():
raise exceptions.BatchError(
'Response not in multipart/mixed format.')
for part in mime_response.get_payload():
request_id = self._ConvertHeaderToId(part['Content-ID'])
response = self._DeserializeResponse(part.get_payload())
# Disable protected access because namedtuple._replace(...)
# is not actually meant to be protected.
# pylint: disable=protected-access
self.__request_response_handlers[request_id] = (
self.__request_response_handlers[request_id]._replace(
response=response)) | [
"def",
"_Execute",
"(",
"self",
",",
"http",
")",
":",
"message",
"=",
"mime_multipart",
".",
"MIMEMultipart",
"(",
"'mixed'",
")",
"# Message should not write out its own headers.",
"setattr",
"(",
"message",
",",
"'_write_headers'",
",",
"lambda",
"self",
":",
"... | Serialize batch request, send to server, process response.
Args:
http: A httplib2.Http object to be used to make the request with.
Raises:
httplib2.HttpLib2Error if a transport error has occured.
apiclient.errors.BatchError if the response is the wrong format. | [
"Serialize",
"batch",
"request",
"send",
"to",
"server",
"process",
"response",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/batch.py#L419-L477 | train | 207,742 |
google/apitools | apitools/base/py/batch.py | BatchHttpRequest.Execute | def Execute(self, http):
"""Execute all the requests as a single batched HTTP request.
Args:
http: A httplib2.Http object to be used with the request.
Returns:
None
Raises:
BatchError if the response is the wrong format.
"""
self._Execute(http)
for key in self.__request_response_handlers:
response = self.__request_response_handlers[key].response
callback = self.__request_response_handlers[key].handler
exception = None
if response.status_code >= 300:
exception = exceptions.HttpError.FromResponse(response)
if callback is not None:
callback(response, exception)
if self.__callback is not None:
self.__callback(response, exception) | python | def Execute(self, http):
"""Execute all the requests as a single batched HTTP request.
Args:
http: A httplib2.Http object to be used with the request.
Returns:
None
Raises:
BatchError if the response is the wrong format.
"""
self._Execute(http)
for key in self.__request_response_handlers:
response = self.__request_response_handlers[key].response
callback = self.__request_response_handlers[key].handler
exception = None
if response.status_code >= 300:
exception = exceptions.HttpError.FromResponse(response)
if callback is not None:
callback(response, exception)
if self.__callback is not None:
self.__callback(response, exception) | [
"def",
"Execute",
"(",
"self",
",",
"http",
")",
":",
"self",
".",
"_Execute",
"(",
"http",
")",
"for",
"key",
"in",
"self",
".",
"__request_response_handlers",
":",
"response",
"=",
"self",
".",
"__request_response_handlers",
"[",
"key",
"]",
".",
"respon... | Execute all the requests as a single batched HTTP request.
Args:
http: A httplib2.Http object to be used with the request.
Returns:
None
Raises:
BatchError if the response is the wrong format. | [
"Execute",
"all",
"the",
"requests",
"as",
"a",
"single",
"batched",
"HTTP",
"request",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/batch.py#L479-L506 | train | 207,743 |
google/apitools | apitools/base/protorpclite/messages.py | find_definition | def find_definition(name, relative_to=None, importer=__import__):
"""Find definition by name in module-space.
The find algorthm will look for definitions by name relative to a
message definition or by fully qualfied name. If no definition is
found relative to the relative_to parameter it will do the same
search against the container of relative_to. If relative_to is a
nested Message, it will search its message_definition(). If that
message has no message_definition() it will search its module. If
relative_to is a module, it will attempt to look for the
containing module and search relative to it. If the module is a
top-level module, it will look for the a message using a fully
qualified name. If no message is found then, the search fails and
DefinitionNotFoundError is raised.
For example, when looking for any definition 'foo.bar.ADefinition'
relative to an actual message definition abc.xyz.SomeMessage:
find_definition('foo.bar.ADefinition', SomeMessage)
It is like looking for the following fully qualified names:
abc.xyz.SomeMessage. foo.bar.ADefinition
abc.xyz. foo.bar.ADefinition
abc. foo.bar.ADefinition
foo.bar.ADefinition
When resolving the name relative to Message definitions and modules, the
algorithm searches any Messages or sub-modules found in its path.
Non-Message values are not searched.
A name that begins with '.' is considered to be a fully qualified
name. The name is always searched for from the topmost package.
For example, assume two message types:
abc.xyz.SomeMessage
xyz.SomeMessage
Searching for '.xyz.SomeMessage' relative to 'abc' will resolve to
'xyz.SomeMessage' and not 'abc.xyz.SomeMessage'. For this kind of name,
the relative_to parameter is effectively ignored and always set to None.
For more information about package name resolution, please see:
http://code.google.com/apis/protocolbuffers/docs/proto.html#packages
Args:
name: Name of definition to find. May be fully qualified or relative
name.
relative_to: Search for definition relative to message definition or
module. None will cause a fully qualified name search.
importer: Import function to use for resolving modules.
Returns:
Enum or Message class definition associated with name.
Raises:
DefinitionNotFoundError if no definition is found in any search path.
"""
# Check parameters.
if not (relative_to is None or
isinstance(relative_to, types.ModuleType) or
isinstance(relative_to, type) and
issubclass(relative_to, Message)):
raise TypeError(
'relative_to must be None, Message definition or module.'
' Found: %s' % relative_to)
name_path = name.split('.')
# Handle absolute path reference.
if not name_path[0]:
relative_to = None
name_path = name_path[1:]
def search_path():
"""Performs a single iteration searching the path from relative_to.
This is the function that searches up the path from a relative object.
fully.qualified.object . relative.or.nested.Definition
---------------------------->
^
|
this part of search --+
Returns:
Message or Enum at the end of name_path, else None.
"""
next_part = relative_to
for node in name_path:
# Look for attribute first.
attribute = getattr(next_part, node, None)
if attribute is not None:
next_part = attribute
else:
# If module, look for sub-module.
if (next_part is None or
isinstance(next_part, types.ModuleType)):
if next_part is None:
module_name = node
else:
module_name = '%s.%s' % (next_part.__name__, node)
try:
fromitem = module_name.split('.')[-1]
next_part = importer(module_name, '', '',
[str(fromitem)])
except ImportError:
return None
else:
return None
if not isinstance(next_part, types.ModuleType):
if not (isinstance(next_part, type) and
issubclass(next_part, (Message, Enum))):
return None
return next_part
while True:
found = search_path()
if isinstance(found, type) and issubclass(found, (Enum, Message)):
return found
else:
# Find next relative_to to search against.
#
# fully.qualified.object . relative.or.nested.Definition
# <---------------------
# ^
# |
# does this part of search
if relative_to is None:
# Fully qualified search was done. Nothing found. Fail.
raise DefinitionNotFoundError(
'Could not find definition for %s' % name)
else:
if isinstance(relative_to, types.ModuleType):
# Find parent module.
module_path = relative_to.__name__.split('.')[:-1]
if not module_path:
relative_to = None
else:
# Should not raise ImportError. If it does...
# weird and unexpected. Propagate.
relative_to = importer(
'.'.join(module_path), '', '', [module_path[-1]])
elif (isinstance(relative_to, type) and
issubclass(relative_to, Message)):
parent = relative_to.message_definition()
if parent is None:
last_module_name = relative_to.__module__.split(
'.')[-1]
relative_to = importer(
relative_to.__module__, '', '', [last_module_name])
else:
relative_to = parent | python | def find_definition(name, relative_to=None, importer=__import__):
"""Find definition by name in module-space.
The find algorthm will look for definitions by name relative to a
message definition or by fully qualfied name. If no definition is
found relative to the relative_to parameter it will do the same
search against the container of relative_to. If relative_to is a
nested Message, it will search its message_definition(). If that
message has no message_definition() it will search its module. If
relative_to is a module, it will attempt to look for the
containing module and search relative to it. If the module is a
top-level module, it will look for the a message using a fully
qualified name. If no message is found then, the search fails and
DefinitionNotFoundError is raised.
For example, when looking for any definition 'foo.bar.ADefinition'
relative to an actual message definition abc.xyz.SomeMessage:
find_definition('foo.bar.ADefinition', SomeMessage)
It is like looking for the following fully qualified names:
abc.xyz.SomeMessage. foo.bar.ADefinition
abc.xyz. foo.bar.ADefinition
abc. foo.bar.ADefinition
foo.bar.ADefinition
When resolving the name relative to Message definitions and modules, the
algorithm searches any Messages or sub-modules found in its path.
Non-Message values are not searched.
A name that begins with '.' is considered to be a fully qualified
name. The name is always searched for from the topmost package.
For example, assume two message types:
abc.xyz.SomeMessage
xyz.SomeMessage
Searching for '.xyz.SomeMessage' relative to 'abc' will resolve to
'xyz.SomeMessage' and not 'abc.xyz.SomeMessage'. For this kind of name,
the relative_to parameter is effectively ignored and always set to None.
For more information about package name resolution, please see:
http://code.google.com/apis/protocolbuffers/docs/proto.html#packages
Args:
name: Name of definition to find. May be fully qualified or relative
name.
relative_to: Search for definition relative to message definition or
module. None will cause a fully qualified name search.
importer: Import function to use for resolving modules.
Returns:
Enum or Message class definition associated with name.
Raises:
DefinitionNotFoundError if no definition is found in any search path.
"""
# Check parameters.
if not (relative_to is None or
isinstance(relative_to, types.ModuleType) or
isinstance(relative_to, type) and
issubclass(relative_to, Message)):
raise TypeError(
'relative_to must be None, Message definition or module.'
' Found: %s' % relative_to)
name_path = name.split('.')
# Handle absolute path reference.
if not name_path[0]:
relative_to = None
name_path = name_path[1:]
def search_path():
"""Performs a single iteration searching the path from relative_to.
This is the function that searches up the path from a relative object.
fully.qualified.object . relative.or.nested.Definition
---------------------------->
^
|
this part of search --+
Returns:
Message or Enum at the end of name_path, else None.
"""
next_part = relative_to
for node in name_path:
# Look for attribute first.
attribute = getattr(next_part, node, None)
if attribute is not None:
next_part = attribute
else:
# If module, look for sub-module.
if (next_part is None or
isinstance(next_part, types.ModuleType)):
if next_part is None:
module_name = node
else:
module_name = '%s.%s' % (next_part.__name__, node)
try:
fromitem = module_name.split('.')[-1]
next_part = importer(module_name, '', '',
[str(fromitem)])
except ImportError:
return None
else:
return None
if not isinstance(next_part, types.ModuleType):
if not (isinstance(next_part, type) and
issubclass(next_part, (Message, Enum))):
return None
return next_part
while True:
found = search_path()
if isinstance(found, type) and issubclass(found, (Enum, Message)):
return found
else:
# Find next relative_to to search against.
#
# fully.qualified.object . relative.or.nested.Definition
# <---------------------
# ^
# |
# does this part of search
if relative_to is None:
# Fully qualified search was done. Nothing found. Fail.
raise DefinitionNotFoundError(
'Could not find definition for %s' % name)
else:
if isinstance(relative_to, types.ModuleType):
# Find parent module.
module_path = relative_to.__name__.split('.')[:-1]
if not module_path:
relative_to = None
else:
# Should not raise ImportError. If it does...
# weird and unexpected. Propagate.
relative_to = importer(
'.'.join(module_path), '', '', [module_path[-1]])
elif (isinstance(relative_to, type) and
issubclass(relative_to, Message)):
parent = relative_to.message_definition()
if parent is None:
last_module_name = relative_to.__module__.split(
'.')[-1]
relative_to = importer(
relative_to.__module__, '', '', [last_module_name])
else:
relative_to = parent | [
"def",
"find_definition",
"(",
"name",
",",
"relative_to",
"=",
"None",
",",
"importer",
"=",
"__import__",
")",
":",
"# Check parameters.",
"if",
"not",
"(",
"relative_to",
"is",
"None",
"or",
"isinstance",
"(",
"relative_to",
",",
"types",
".",
"ModuleType",... | Find definition by name in module-space.
The find algorthm will look for definitions by name relative to a
message definition or by fully qualfied name. If no definition is
found relative to the relative_to parameter it will do the same
search against the container of relative_to. If relative_to is a
nested Message, it will search its message_definition(). If that
message has no message_definition() it will search its module. If
relative_to is a module, it will attempt to look for the
containing module and search relative to it. If the module is a
top-level module, it will look for the a message using a fully
qualified name. If no message is found then, the search fails and
DefinitionNotFoundError is raised.
For example, when looking for any definition 'foo.bar.ADefinition'
relative to an actual message definition abc.xyz.SomeMessage:
find_definition('foo.bar.ADefinition', SomeMessage)
It is like looking for the following fully qualified names:
abc.xyz.SomeMessage. foo.bar.ADefinition
abc.xyz. foo.bar.ADefinition
abc. foo.bar.ADefinition
foo.bar.ADefinition
When resolving the name relative to Message definitions and modules, the
algorithm searches any Messages or sub-modules found in its path.
Non-Message values are not searched.
A name that begins with '.' is considered to be a fully qualified
name. The name is always searched for from the topmost package.
For example, assume two message types:
abc.xyz.SomeMessage
xyz.SomeMessage
Searching for '.xyz.SomeMessage' relative to 'abc' will resolve to
'xyz.SomeMessage' and not 'abc.xyz.SomeMessage'. For this kind of name,
the relative_to parameter is effectively ignored and always set to None.
For more information about package name resolution, please see:
http://code.google.com/apis/protocolbuffers/docs/proto.html#packages
Args:
name: Name of definition to find. May be fully qualified or relative
name.
relative_to: Search for definition relative to message definition or
module. None will cause a fully qualified name search.
importer: Import function to use for resolving modules.
Returns:
Enum or Message class definition associated with name.
Raises:
DefinitionNotFoundError if no definition is found in any search path. | [
"Find",
"definition",
"by",
"name",
"in",
"module",
"-",
"space",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1845-L2003 | train | 207,744 |
google/apitools | apitools/base/protorpclite/messages.py | _DefinitionClass.definition_name | def definition_name(cls):
"""Helper method for creating definition name.
Names will be generated to include the classes package name,
scope (if the class is nested in another definition) and class
name.
By default, the package name for a definition is derived from
its module name. However, this value can be overriden by
placing a 'package' attribute in the module that contains the
definition class. For example:
package = 'some.alternate.package'
class MyMessage(Message):
...
>>> MyMessage.definition_name()
some.alternate.package.MyMessage
Returns:
Dot-separated fully qualified name of definition.
"""
outer_definition_name = cls.outer_definition_name()
if outer_definition_name is None:
return six.text_type(cls.__name__)
return u'%s.%s' % (outer_definition_name, cls.__name__) | python | def definition_name(cls):
"""Helper method for creating definition name.
Names will be generated to include the classes package name,
scope (if the class is nested in another definition) and class
name.
By default, the package name for a definition is derived from
its module name. However, this value can be overriden by
placing a 'package' attribute in the module that contains the
definition class. For example:
package = 'some.alternate.package'
class MyMessage(Message):
...
>>> MyMessage.definition_name()
some.alternate.package.MyMessage
Returns:
Dot-separated fully qualified name of definition.
"""
outer_definition_name = cls.outer_definition_name()
if outer_definition_name is None:
return six.text_type(cls.__name__)
return u'%s.%s' % (outer_definition_name, cls.__name__) | [
"def",
"definition_name",
"(",
"cls",
")",
":",
"outer_definition_name",
"=",
"cls",
".",
"outer_definition_name",
"(",
")",
"if",
"outer_definition_name",
"is",
"None",
":",
"return",
"six",
".",
"text_type",
"(",
"cls",
".",
"__name__",
")",
"return",
"u'%s.... | Helper method for creating definition name.
Names will be generated to include the classes package name,
scope (if the class is nested in another definition) and class
name.
By default, the package name for a definition is derived from
its module name. However, this value can be overriden by
placing a 'package' attribute in the module that contains the
definition class. For example:
package = 'some.alternate.package'
class MyMessage(Message):
...
>>> MyMessage.definition_name()
some.alternate.package.MyMessage
Returns:
Dot-separated fully qualified name of definition. | [
"Helper",
"method",
"for",
"creating",
"definition",
"name",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L227-L254 | train | 207,745 |
google/apitools | apitools/base/protorpclite/messages.py | _DefinitionClass.outer_definition_name | def outer_definition_name(cls):
"""Helper method for creating outer definition name.
Returns:
If definition is nested, will return the outer definitions
name, else the package name.
"""
outer_definition = cls.message_definition()
if not outer_definition:
return util.get_package_for_module(cls.__module__)
return outer_definition.definition_name() | python | def outer_definition_name(cls):
"""Helper method for creating outer definition name.
Returns:
If definition is nested, will return the outer definitions
name, else the package name.
"""
outer_definition = cls.message_definition()
if not outer_definition:
return util.get_package_for_module(cls.__module__)
return outer_definition.definition_name() | [
"def",
"outer_definition_name",
"(",
"cls",
")",
":",
"outer_definition",
"=",
"cls",
".",
"message_definition",
"(",
")",
"if",
"not",
"outer_definition",
":",
"return",
"util",
".",
"get_package_for_module",
"(",
"cls",
".",
"__module__",
")",
"return",
"outer... | Helper method for creating outer definition name.
Returns:
If definition is nested, will return the outer definitions
name, else the package name. | [
"Helper",
"method",
"for",
"creating",
"outer",
"definition",
"name",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L256-L267 | train | 207,746 |
google/apitools | apitools/base/protorpclite/messages.py | _DefinitionClass.definition_package | def definition_package(cls):
"""Helper method for creating creating the package of a definition.
Returns:
Name of package that definition belongs to.
"""
outer_definition = cls.message_definition()
if not outer_definition:
return util.get_package_for_module(cls.__module__)
return outer_definition.definition_package() | python | def definition_package(cls):
"""Helper method for creating creating the package of a definition.
Returns:
Name of package that definition belongs to.
"""
outer_definition = cls.message_definition()
if not outer_definition:
return util.get_package_for_module(cls.__module__)
return outer_definition.definition_package() | [
"def",
"definition_package",
"(",
"cls",
")",
":",
"outer_definition",
"=",
"cls",
".",
"message_definition",
"(",
")",
"if",
"not",
"outer_definition",
":",
"return",
"util",
".",
"get_package_for_module",
"(",
"cls",
".",
"__module__",
")",
"return",
"outer_de... | Helper method for creating creating the package of a definition.
Returns:
Name of package that definition belongs to. | [
"Helper",
"method",
"for",
"creating",
"creating",
"the",
"package",
"of",
"a",
"definition",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L269-L278 | train | 207,747 |
google/apitools | apitools/base/protorpclite/messages.py | Enum.to_dict | def to_dict(cls):
"""Make dictionary version of enumerated class.
Dictionary created this way can be used with def_num.
Returns:
A dict (name) -> number
"""
return dict((item.name, item.number) for item in iter(cls)) | python | def to_dict(cls):
"""Make dictionary version of enumerated class.
Dictionary created this way can be used with def_num.
Returns:
A dict (name) -> number
"""
return dict((item.name, item.number) for item in iter(cls)) | [
"def",
"to_dict",
"(",
"cls",
")",
":",
"return",
"dict",
"(",
"(",
"item",
".",
"name",
",",
"item",
".",
"number",
")",
"for",
"item",
"in",
"iter",
"(",
"cls",
")",
")"
] | Make dictionary version of enumerated class.
Dictionary created this way can be used with def_num.
Returns:
A dict (name) -> number | [
"Make",
"dictionary",
"version",
"of",
"enumerated",
"class",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L526-L534 | train | 207,748 |
google/apitools | apitools/base/protorpclite/messages.py | Message.check_initialized | def check_initialized(self):
"""Check class for initialization status.
Check that all required fields are initialized
Raises:
ValidationError: If message is not initialized.
"""
for name, field in self.__by_name.items():
value = getattr(self, name)
if value is None:
if field.required:
raise ValidationError(
"Message %s is missing required field %s" %
(type(self).__name__, name))
else:
try:
if (isinstance(field, MessageField) and
issubclass(field.message_type, Message)):
if field.repeated:
for item in value:
item_message_value = field.value_to_message(
item)
item_message_value.check_initialized()
else:
message_value = field.value_to_message(value)
message_value.check_initialized()
except ValidationError as err:
if not hasattr(err, 'message_name'):
err.message_name = type(self).__name__
raise | python | def check_initialized(self):
"""Check class for initialization status.
Check that all required fields are initialized
Raises:
ValidationError: If message is not initialized.
"""
for name, field in self.__by_name.items():
value = getattr(self, name)
if value is None:
if field.required:
raise ValidationError(
"Message %s is missing required field %s" %
(type(self).__name__, name))
else:
try:
if (isinstance(field, MessageField) and
issubclass(field.message_type, Message)):
if field.repeated:
for item in value:
item_message_value = field.value_to_message(
item)
item_message_value.check_initialized()
else:
message_value = field.value_to_message(value)
message_value.check_initialized()
except ValidationError as err:
if not hasattr(err, 'message_name'):
err.message_name = type(self).__name__
raise | [
"def",
"check_initialized",
"(",
"self",
")",
":",
"for",
"name",
",",
"field",
"in",
"self",
".",
"__by_name",
".",
"items",
"(",
")",
":",
"value",
"=",
"getattr",
"(",
"self",
",",
"name",
")",
"if",
"value",
"is",
"None",
":",
"if",
"field",
".... | Check class for initialization status.
Check that all required fields are initialized
Raises:
ValidationError: If message is not initialized. | [
"Check",
"class",
"for",
"initialization",
"status",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L799-L829 | train | 207,749 |
google/apitools | apitools/base/protorpclite/messages.py | Message.get_assigned_value | def get_assigned_value(self, name):
"""Get the assigned value of an attribute.
Get the underlying value of an attribute. If value has not
been set, will not return the default for the field.
Args:
name: Name of attribute to get.
Returns:
Value of attribute, None if it has not been set.
"""
message_type = type(self)
try:
field = message_type.field_by_name(name)
except KeyError:
raise AttributeError('Message %s has no field %s' % (
message_type.__name__, name))
return self.__tags.get(field.number) | python | def get_assigned_value(self, name):
"""Get the assigned value of an attribute.
Get the underlying value of an attribute. If value has not
been set, will not return the default for the field.
Args:
name: Name of attribute to get.
Returns:
Value of attribute, None if it has not been set.
"""
message_type = type(self)
try:
field = message_type.field_by_name(name)
except KeyError:
raise AttributeError('Message %s has no field %s' % (
message_type.__name__, name))
return self.__tags.get(field.number) | [
"def",
"get_assigned_value",
"(",
"self",
",",
"name",
")",
":",
"message_type",
"=",
"type",
"(",
"self",
")",
"try",
":",
"field",
"=",
"message_type",
".",
"field_by_name",
"(",
"name",
")",
"except",
"KeyError",
":",
"raise",
"AttributeError",
"(",
"'M... | Get the assigned value of an attribute.
Get the underlying value of an attribute. If value has not
been set, will not return the default for the field.
Args:
name: Name of attribute to get.
Returns:
Value of attribute, None if it has not been set. | [
"Get",
"the",
"assigned",
"value",
"of",
"an",
"attribute",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L879-L898 | train | 207,750 |
google/apitools | apitools/base/protorpclite/messages.py | Message.reset | def reset(self, name):
"""Reset assigned value for field.
Resetting a field will return it to its default value or None.
Args:
name: Name of field to reset.
"""
message_type = type(self)
try:
field = message_type.field_by_name(name)
except KeyError:
if name not in message_type.__by_name:
raise AttributeError('Message %s has no field %s' % (
message_type.__name__, name))
if field.repeated:
self.__tags[field.number] = FieldList(field, [])
else:
self.__tags.pop(field.number, None) | python | def reset(self, name):
"""Reset assigned value for field.
Resetting a field will return it to its default value or None.
Args:
name: Name of field to reset.
"""
message_type = type(self)
try:
field = message_type.field_by_name(name)
except KeyError:
if name not in message_type.__by_name:
raise AttributeError('Message %s has no field %s' % (
message_type.__name__, name))
if field.repeated:
self.__tags[field.number] = FieldList(field, [])
else:
self.__tags.pop(field.number, None) | [
"def",
"reset",
"(",
"self",
",",
"name",
")",
":",
"message_type",
"=",
"type",
"(",
"self",
")",
"try",
":",
"field",
"=",
"message_type",
".",
"field_by_name",
"(",
"name",
")",
"except",
"KeyError",
":",
"if",
"name",
"not",
"in",
"message_type",
"... | Reset assigned value for field.
Resetting a field will return it to its default value or None.
Args:
name: Name of field to reset. | [
"Reset",
"assigned",
"value",
"for",
"field",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L900-L918 | train | 207,751 |
google/apitools | apitools/base/protorpclite/messages.py | Message.get_unrecognized_field_info | def get_unrecognized_field_info(self, key, value_default=None,
variant_default=None):
"""Get the value and variant of an unknown field in this message.
Args:
key: The name or number of the field to retrieve.
value_default: Value to be returned if the key isn't found.
variant_default: Value to be returned as variant if the key isn't
found.
Returns:
(value, variant), where value and variant are whatever was passed
to set_unrecognized_field.
"""
value, variant = self.__unrecognized_fields.get(key, (value_default,
variant_default))
return value, variant | python | def get_unrecognized_field_info(self, key, value_default=None,
variant_default=None):
"""Get the value and variant of an unknown field in this message.
Args:
key: The name or number of the field to retrieve.
value_default: Value to be returned if the key isn't found.
variant_default: Value to be returned as variant if the key isn't
found.
Returns:
(value, variant), where value and variant are whatever was passed
to set_unrecognized_field.
"""
value, variant = self.__unrecognized_fields.get(key, (value_default,
variant_default))
return value, variant | [
"def",
"get_unrecognized_field_info",
"(",
"self",
",",
"key",
",",
"value_default",
"=",
"None",
",",
"variant_default",
"=",
"None",
")",
":",
"value",
",",
"variant",
"=",
"self",
".",
"__unrecognized_fields",
".",
"get",
"(",
"key",
",",
"(",
"value_defa... | Get the value and variant of an unknown field in this message.
Args:
key: The name or number of the field to retrieve.
value_default: Value to be returned if the key isn't found.
variant_default: Value to be returned as variant if the key isn't
found.
Returns:
(value, variant), where value and variant are whatever was passed
to set_unrecognized_field. | [
"Get",
"the",
"value",
"and",
"variant",
"of",
"an",
"unknown",
"field",
"in",
"this",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L924-L940 | train | 207,752 |
google/apitools | apitools/base/protorpclite/messages.py | Message.set_unrecognized_field | def set_unrecognized_field(self, key, value, variant):
"""Set an unrecognized field, used when decoding a message.
Args:
key: The name or number used to refer to this unknown value.
value: The value of the field.
variant: Type information needed to interpret the value or re-encode
it.
Raises:
TypeError: If the variant is not an instance of messages.Variant.
"""
if not isinstance(variant, Variant):
raise TypeError('Variant type %s is not valid.' % variant)
self.__unrecognized_fields[key] = value, variant | python | def set_unrecognized_field(self, key, value, variant):
"""Set an unrecognized field, used when decoding a message.
Args:
key: The name or number used to refer to this unknown value.
value: The value of the field.
variant: Type information needed to interpret the value or re-encode
it.
Raises:
TypeError: If the variant is not an instance of messages.Variant.
"""
if not isinstance(variant, Variant):
raise TypeError('Variant type %s is not valid.' % variant)
self.__unrecognized_fields[key] = value, variant | [
"def",
"set_unrecognized_field",
"(",
"self",
",",
"key",
",",
"value",
",",
"variant",
")",
":",
"if",
"not",
"isinstance",
"(",
"variant",
",",
"Variant",
")",
":",
"raise",
"TypeError",
"(",
"'Variant type %s is not valid.'",
"%",
"variant",
")",
"self",
... | Set an unrecognized field, used when decoding a message.
Args:
key: The name or number used to refer to this unknown value.
value: The value of the field.
variant: Type information needed to interpret the value or re-encode
it.
Raises:
TypeError: If the variant is not an instance of messages.Variant. | [
"Set",
"an",
"unrecognized",
"field",
"used",
"when",
"decoding",
"a",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L942-L956 | train | 207,753 |
google/apitools | apitools/base/protorpclite/messages.py | FieldList.append | def append(self, value):
"""Validate item appending to list."""
self.__field.validate_element(value)
return list.append(self, value) | python | def append(self, value):
"""Validate item appending to list."""
self.__field.validate_element(value)
return list.append(self, value) | [
"def",
"append",
"(",
"self",
",",
"value",
")",
":",
"self",
".",
"__field",
".",
"validate_element",
"(",
"value",
")",
"return",
"list",
".",
"append",
"(",
"self",
",",
"value",
")"
] | Validate item appending to list. | [
"Validate",
"item",
"appending",
"to",
"list",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1140-L1143 | train | 207,754 |
google/apitools | apitools/base/protorpclite/messages.py | FieldList.extend | def extend(self, sequence):
"""Validate extension of list."""
self.__field.validate(sequence)
return list.extend(self, sequence) | python | def extend(self, sequence):
"""Validate extension of list."""
self.__field.validate(sequence)
return list.extend(self, sequence) | [
"def",
"extend",
"(",
"self",
",",
"sequence",
")",
":",
"self",
".",
"__field",
".",
"validate",
"(",
"sequence",
")",
"return",
"list",
".",
"extend",
"(",
"self",
",",
"sequence",
")"
] | Validate extension of list. | [
"Validate",
"extension",
"of",
"list",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1145-L1148 | train | 207,755 |
google/apitools | apitools/base/protorpclite/messages.py | FieldList.insert | def insert(self, index, value):
"""Validate item insertion to list."""
self.__field.validate_element(value)
return list.insert(self, index, value) | python | def insert(self, index, value):
"""Validate item insertion to list."""
self.__field.validate_element(value)
return list.insert(self, index, value) | [
"def",
"insert",
"(",
"self",
",",
"index",
",",
"value",
")",
":",
"self",
".",
"__field",
".",
"validate_element",
"(",
"value",
")",
"return",
"list",
".",
"insert",
"(",
"self",
",",
"index",
",",
"value",
")"
] | Validate item insertion to list. | [
"Validate",
"item",
"insertion",
"to",
"list",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1150-L1153 | train | 207,756 |
google/apitools | apitools/base/protorpclite/messages.py | Field.validate_element | def validate_element(self, value):
"""Validate single element of field.
This is different from validate in that it is used on individual
values of repeated fields.
Args:
value: Value to validate.
Returns:
The value casted in the expected type.
Raises:
ValidationError if value is not expected type.
"""
if not isinstance(value, self.type):
# Authorize int values as float.
if isinstance(value, six.integer_types) and self.type == float:
return float(value)
if value is None:
if self.required:
raise ValidationError('Required field is missing')
else:
try:
name = self.name
except AttributeError:
raise ValidationError('Expected type %s for %s, '
'found %s (type %s)' %
(self.type, self.__class__.__name__,
value, type(value)))
else:
raise ValidationError(
'Expected type %s for field %s, found %s (type %s)' %
(self.type, name, value, type(value)))
return value | python | def validate_element(self, value):
"""Validate single element of field.
This is different from validate in that it is used on individual
values of repeated fields.
Args:
value: Value to validate.
Returns:
The value casted in the expected type.
Raises:
ValidationError if value is not expected type.
"""
if not isinstance(value, self.type):
# Authorize int values as float.
if isinstance(value, six.integer_types) and self.type == float:
return float(value)
if value is None:
if self.required:
raise ValidationError('Required field is missing')
else:
try:
name = self.name
except AttributeError:
raise ValidationError('Expected type %s for %s, '
'found %s (type %s)' %
(self.type, self.__class__.__name__,
value, type(value)))
else:
raise ValidationError(
'Expected type %s for field %s, found %s (type %s)' %
(self.type, name, value, type(value)))
return value | [
"def",
"validate_element",
"(",
"self",
",",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"self",
".",
"type",
")",
":",
"# Authorize int values as float.",
"if",
"isinstance",
"(",
"value",
",",
"six",
".",
"integer_types",
")",
"and",
... | Validate single element of field.
This is different from validate in that it is used on individual
values of repeated fields.
Args:
value: Value to validate.
Returns:
The value casted in the expected type.
Raises:
ValidationError if value is not expected type. | [
"Validate",
"single",
"element",
"of",
"field",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1305-L1341 | train | 207,757 |
google/apitools | apitools/base/protorpclite/messages.py | Field.__validate | def __validate(self, value, validate_element):
"""Internal validation function.
Validate an internal value using a function to validate
individual elements.
Args:
value: Value to validate.
validate_element: Function to use to validate individual elements.
Raises:
ValidationError if value is not expected type.
"""
if not self.repeated:
return validate_element(value)
else:
# Must be a list or tuple, may not be a string.
if isinstance(value, (list, tuple)):
result = []
for element in value:
if element is None:
try:
name = self.name
except AttributeError:
raise ValidationError(
'Repeated values for %s '
'may not be None' % self.__class__.__name__)
else:
raise ValidationError(
'Repeated values for field %s '
'may not be None' % name)
result.append(validate_element(element))
return result
elif value is not None:
try:
name = self.name
except AttributeError:
raise ValidationError('%s is repeated. Found: %s' % (
self.__class__.__name__, value))
else:
raise ValidationError(
'Field %s is repeated. Found: %s' % (name, value))
return value | python | def __validate(self, value, validate_element):
"""Internal validation function.
Validate an internal value using a function to validate
individual elements.
Args:
value: Value to validate.
validate_element: Function to use to validate individual elements.
Raises:
ValidationError if value is not expected type.
"""
if not self.repeated:
return validate_element(value)
else:
# Must be a list or tuple, may not be a string.
if isinstance(value, (list, tuple)):
result = []
for element in value:
if element is None:
try:
name = self.name
except AttributeError:
raise ValidationError(
'Repeated values for %s '
'may not be None' % self.__class__.__name__)
else:
raise ValidationError(
'Repeated values for field %s '
'may not be None' % name)
result.append(validate_element(element))
return result
elif value is not None:
try:
name = self.name
except AttributeError:
raise ValidationError('%s is repeated. Found: %s' % (
self.__class__.__name__, value))
else:
raise ValidationError(
'Field %s is repeated. Found: %s' % (name, value))
return value | [
"def",
"__validate",
"(",
"self",
",",
"value",
",",
"validate_element",
")",
":",
"if",
"not",
"self",
".",
"repeated",
":",
"return",
"validate_element",
"(",
"value",
")",
"else",
":",
"# Must be a list or tuple, may not be a string.",
"if",
"isinstance",
"(",
... | Internal validation function.
Validate an internal value using a function to validate
individual elements.
Args:
value: Value to validate.
validate_element: Function to use to validate individual elements.
Raises:
ValidationError if value is not expected type. | [
"Internal",
"validation",
"function",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1343-L1386 | train | 207,758 |
google/apitools | apitools/base/protorpclite/messages.py | StringField.validate_element | def validate_element(self, value):
"""Validate StringField allowing for str and unicode.
Raises:
ValidationError if a str value is not UTF-8.
"""
# If value is str is it considered valid. Satisfies "required=True".
if isinstance(value, bytes):
try:
six.text_type(value, 'UTF-8')
except UnicodeDecodeError as err:
try:
_ = self.name
except AttributeError:
validation_error = ValidationError(
'Field encountered non-UTF-8 string %r: %s' % (value,
err))
else:
validation_error = ValidationError(
'Field %s encountered non-UTF-8 string %r: %s' % (
self.name, value, err))
validation_error.field_name = self.name
raise validation_error
else:
return super(StringField, self).validate_element(value)
return value | python | def validate_element(self, value):
"""Validate StringField allowing for str and unicode.
Raises:
ValidationError if a str value is not UTF-8.
"""
# If value is str is it considered valid. Satisfies "required=True".
if isinstance(value, bytes):
try:
six.text_type(value, 'UTF-8')
except UnicodeDecodeError as err:
try:
_ = self.name
except AttributeError:
validation_error = ValidationError(
'Field encountered non-UTF-8 string %r: %s' % (value,
err))
else:
validation_error = ValidationError(
'Field %s encountered non-UTF-8 string %r: %s' % (
self.name, value, err))
validation_error.field_name = self.name
raise validation_error
else:
return super(StringField, self).validate_element(value)
return value | [
"def",
"validate_element",
"(",
"self",
",",
"value",
")",
":",
"# If value is str is it considered valid. Satisfies \"required=True\".",
"if",
"isinstance",
"(",
"value",
",",
"bytes",
")",
":",
"try",
":",
"six",
".",
"text_type",
"(",
"value",
",",
"'UTF-8'",
... | Validate StringField allowing for str and unicode.
Raises:
ValidationError if a str value is not UTF-8. | [
"Validate",
"StringField",
"allowing",
"for",
"str",
"and",
"unicode",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1519-L1544 | train | 207,759 |
google/apitools | apitools/base/protorpclite/messages.py | MessageField.type | def type(self):
"""Message type used for field."""
if self.__type is None:
message_type = find_definition(
self.__type_name, self.message_definition())
if not (message_type is not Message and
isinstance(message_type, type) and
issubclass(message_type, Message)):
raise FieldDefinitionError(
'Invalid message class: %s' % message_type)
self.__type = message_type
return self.__type | python | def type(self):
"""Message type used for field."""
if self.__type is None:
message_type = find_definition(
self.__type_name, self.message_definition())
if not (message_type is not Message and
isinstance(message_type, type) and
issubclass(message_type, Message)):
raise FieldDefinitionError(
'Invalid message class: %s' % message_type)
self.__type = message_type
return self.__type | [
"def",
"type",
"(",
"self",
")",
":",
"if",
"self",
".",
"__type",
"is",
"None",
":",
"message_type",
"=",
"find_definition",
"(",
"self",
".",
"__type_name",
",",
"self",
".",
"message_definition",
"(",
")",
")",
"if",
"not",
"(",
"message_type",
"is",
... | Message type used for field. | [
"Message",
"type",
"used",
"for",
"field",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1655-L1666 | train | 207,760 |
google/apitools | apitools/base/protorpclite/messages.py | MessageField.value_from_message | def value_from_message(self, message):
"""Convert a message to a value instance.
Used by deserializers to convert from underlying messages to
value of expected user type.
Args:
message: A message instance of type self.message_type.
Returns:
Value of self.message_type.
"""
if not isinstance(message, self.message_type):
raise DecodeError('Expected type %s, got %s: %r' %
(self.message_type.__name__,
type(message).__name__,
message))
return message | python | def value_from_message(self, message):
"""Convert a message to a value instance.
Used by deserializers to convert from underlying messages to
value of expected user type.
Args:
message: A message instance of type self.message_type.
Returns:
Value of self.message_type.
"""
if not isinstance(message, self.message_type):
raise DecodeError('Expected type %s, got %s: %r' %
(self.message_type.__name__,
type(message).__name__,
message))
return message | [
"def",
"value_from_message",
"(",
"self",
",",
"message",
")",
":",
"if",
"not",
"isinstance",
"(",
"message",
",",
"self",
".",
"message_type",
")",
":",
"raise",
"DecodeError",
"(",
"'Expected type %s, got %s: %r'",
"%",
"(",
"self",
".",
"message_type",
"."... | Convert a message to a value instance.
Used by deserializers to convert from underlying messages to
value of expected user type.
Args:
message: A message instance of type self.message_type.
Returns:
Value of self.message_type. | [
"Convert",
"a",
"message",
"to",
"a",
"value",
"instance",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1678-L1695 | train | 207,761 |
google/apitools | apitools/base/protorpclite/messages.py | MessageField.value_to_message | def value_to_message(self, value):
"""Convert a value instance to a message.
Used by serializers to convert Python user types to underlying
messages for transmission.
Args:
value: A value of type self.type.
Returns:
An instance of type self.message_type.
"""
if not isinstance(value, self.type):
raise EncodeError('Expected type %s, got %s: %r' %
(self.type.__name__,
type(value).__name__,
value))
return value | python | def value_to_message(self, value):
"""Convert a value instance to a message.
Used by serializers to convert Python user types to underlying
messages for transmission.
Args:
value: A value of type self.type.
Returns:
An instance of type self.message_type.
"""
if not isinstance(value, self.type):
raise EncodeError('Expected type %s, got %s: %r' %
(self.type.__name__,
type(value).__name__,
value))
return value | [
"def",
"value_to_message",
"(",
"self",
",",
"value",
")",
":",
"if",
"not",
"isinstance",
"(",
"value",
",",
"self",
".",
"type",
")",
":",
"raise",
"EncodeError",
"(",
"'Expected type %s, got %s: %r'",
"%",
"(",
"self",
".",
"type",
".",
"__name__",
",",... | Convert a value instance to a message.
Used by serializers to convert Python user types to underlying
messages for transmission.
Args:
value: A value of type self.type.
Returns:
An instance of type self.message_type. | [
"Convert",
"a",
"value",
"instance",
"to",
"a",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1697-L1714 | train | 207,762 |
google/apitools | apitools/base/protorpclite/messages.py | EnumField.validate_default_element | def validate_default_element(self, value):
"""Validate default element of Enum field.
Enum fields allow for delayed resolution of default values
when the type of the field has not been resolved. The default
value of a field may be a string or an integer. If the Enum
type of the field has been resolved, the default value is
validated against that type.
Args:
value: Value to validate.
Raises:
ValidationError if value is not expected message type.
"""
if isinstance(value, (six.string_types, six.integer_types)):
# Validation of the value does not happen for delayed resolution
# enumerated types. Ignore if type is not yet resolved.
if self.__type:
self.__type(value)
return value
return super(EnumField, self).validate_default_element(value) | python | def validate_default_element(self, value):
"""Validate default element of Enum field.
Enum fields allow for delayed resolution of default values
when the type of the field has not been resolved. The default
value of a field may be a string or an integer. If the Enum
type of the field has been resolved, the default value is
validated against that type.
Args:
value: Value to validate.
Raises:
ValidationError if value is not expected message type.
"""
if isinstance(value, (six.string_types, six.integer_types)):
# Validation of the value does not happen for delayed resolution
# enumerated types. Ignore if type is not yet resolved.
if self.__type:
self.__type(value)
return value
return super(EnumField, self).validate_default_element(value) | [
"def",
"validate_default_element",
"(",
"self",
",",
"value",
")",
":",
"if",
"isinstance",
"(",
"value",
",",
"(",
"six",
".",
"string_types",
",",
"six",
".",
"integer_types",
")",
")",
":",
"# Validation of the value does not happen for delayed resolution",
"# en... | Validate default element of Enum field.
Enum fields allow for delayed resolution of default values
when the type of the field has not been resolved. The default
value of a field may be a string or an integer. If the Enum
type of the field has been resolved, the default value is
validated against that type.
Args:
value: Value to validate.
Raises:
ValidationError if value is not expected message type. | [
"Validate",
"default",
"element",
"of",
"Enum",
"field",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1786-L1809 | train | 207,763 |
google/apitools | apitools/base/protorpclite/messages.py | EnumField.type | def type(self):
"""Enum type used for field."""
if self.__type is None:
found_type = find_definition(
self.__type_name, self.message_definition())
if not (found_type is not Enum and
isinstance(found_type, type) and
issubclass(found_type, Enum)):
raise FieldDefinitionError(
'Invalid enum type: %s' % found_type)
self.__type = found_type
return self.__type | python | def type(self):
"""Enum type used for field."""
if self.__type is None:
found_type = find_definition(
self.__type_name, self.message_definition())
if not (found_type is not Enum and
isinstance(found_type, type) and
issubclass(found_type, Enum)):
raise FieldDefinitionError(
'Invalid enum type: %s' % found_type)
self.__type = found_type
return self.__type | [
"def",
"type",
"(",
"self",
")",
":",
"if",
"self",
".",
"__type",
"is",
"None",
":",
"found_type",
"=",
"find_definition",
"(",
"self",
".",
"__type_name",
",",
"self",
".",
"message_definition",
"(",
")",
")",
"if",
"not",
"(",
"found_type",
"is",
"n... | Enum type used for field. | [
"Enum",
"type",
"used",
"for",
"field",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1812-L1824 | train | 207,764 |
google/apitools | apitools/base/protorpclite/messages.py | EnumField.default | def default(self):
"""Default for enum field.
Will cause resolution of Enum type and unresolved default value.
"""
try:
return self.__resolved_default
except AttributeError:
resolved_default = super(EnumField, self).default
if isinstance(resolved_default, (six.string_types,
six.integer_types)):
# pylint:disable=not-callable
resolved_default = self.type(resolved_default)
self.__resolved_default = resolved_default
return self.__resolved_default | python | def default(self):
"""Default for enum field.
Will cause resolution of Enum type and unresolved default value.
"""
try:
return self.__resolved_default
except AttributeError:
resolved_default = super(EnumField, self).default
if isinstance(resolved_default, (six.string_types,
six.integer_types)):
# pylint:disable=not-callable
resolved_default = self.type(resolved_default)
self.__resolved_default = resolved_default
return self.__resolved_default | [
"def",
"default",
"(",
"self",
")",
":",
"try",
":",
"return",
"self",
".",
"__resolved_default",
"except",
"AttributeError",
":",
"resolved_default",
"=",
"super",
"(",
"EnumField",
",",
"self",
")",
".",
"default",
"if",
"isinstance",
"(",
"resolved_default"... | Default for enum field.
Will cause resolution of Enum type and unresolved default value. | [
"Default",
"for",
"enum",
"field",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/protorpclite/messages.py#L1827-L1841 | train | 207,765 |
google/apitools | apitools/base/py/encoding_helper.py | MessageToJson | def MessageToJson(message, include_fields=None):
"""Convert the given message to JSON."""
result = _ProtoJsonApiTools.Get().encode_message(message)
return _IncludeFields(result, message, include_fields) | python | def MessageToJson(message, include_fields=None):
"""Convert the given message to JSON."""
result = _ProtoJsonApiTools.Get().encode_message(message)
return _IncludeFields(result, message, include_fields) | [
"def",
"MessageToJson",
"(",
"message",
",",
"include_fields",
"=",
"None",
")",
":",
"result",
"=",
"_ProtoJsonApiTools",
".",
"Get",
"(",
")",
".",
"encode_message",
"(",
"message",
")",
"return",
"_IncludeFields",
"(",
"result",
",",
"message",
",",
"incl... | Convert the given message to JSON. | [
"Convert",
"the",
"given",
"message",
"to",
"JSON",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L115-L118 | train | 207,766 |
google/apitools | apitools/base/py/encoding_helper.py | DictToAdditionalPropertyMessage | def DictToAdditionalPropertyMessage(properties, additional_property_type,
sort_items=False):
"""Convert the given dictionary to an AdditionalProperty message."""
items = properties.items()
if sort_items:
items = sorted(items)
map_ = []
for key, value in items:
map_.append(additional_property_type.AdditionalProperty(
key=key, value=value))
return additional_property_type(additionalProperties=map_) | python | def DictToAdditionalPropertyMessage(properties, additional_property_type,
sort_items=False):
"""Convert the given dictionary to an AdditionalProperty message."""
items = properties.items()
if sort_items:
items = sorted(items)
map_ = []
for key, value in items:
map_.append(additional_property_type.AdditionalProperty(
key=key, value=value))
return additional_property_type(additionalProperties=map_) | [
"def",
"DictToAdditionalPropertyMessage",
"(",
"properties",
",",
"additional_property_type",
",",
"sort_items",
"=",
"False",
")",
":",
"items",
"=",
"properties",
".",
"items",
"(",
")",
"if",
"sort_items",
":",
"items",
"=",
"sorted",
"(",
"items",
")",
"ma... | Convert the given dictionary to an AdditionalProperty message. | [
"Convert",
"the",
"given",
"dictionary",
"to",
"an",
"AdditionalProperty",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L137-L147 | train | 207,767 |
google/apitools | apitools/base/py/encoding_helper.py | MessageToRepr | def MessageToRepr(msg, multiline=False, **kwargs):
"""Return a repr-style string for a protorpc message.
protorpc.Message.__repr__ does not return anything that could be considered
python code. Adding this function lets us print a protorpc message in such
a way that it could be pasted into code later, and used to compare against
other things.
Args:
msg: protorpc.Message, the message to be repr'd.
multiline: bool, True if the returned string should have each field
assignment on its own line.
**kwargs: {str:str}, Additional flags for how to format the string.
Known **kwargs:
shortstrings: bool, True if all string values should be
truncated at 100 characters, since when mocking the contents
typically don't matter except for IDs, and IDs are usually
less than 100 characters.
no_modules: bool, True if the long module name should not be printed with
each type.
Returns:
str, A string of valid python (assuming the right imports have been made)
that recreates the message passed into this function.
"""
# TODO(jasmuth): craigcitro suggests a pretty-printer from apitools/gen.
indent = kwargs.get('indent', 0)
def IndentKwargs(kwargs):
kwargs = dict(kwargs)
kwargs['indent'] = kwargs.get('indent', 0) + 4
return kwargs
if isinstance(msg, list):
s = '['
for item in msg:
if multiline:
s += '\n' + ' ' * (indent + 4)
s += MessageToRepr(
item, multiline=multiline, **IndentKwargs(kwargs)) + ','
if multiline:
s += '\n' + ' ' * indent
s += ']'
return s
if isinstance(msg, messages.Message):
s = type(msg).__name__ + '('
if not kwargs.get('no_modules'):
s = msg.__module__ + '.' + s
names = sorted([field.name for field in msg.all_fields()])
for name in names:
field = msg.field_by_name(name)
if multiline:
s += '\n' + ' ' * (indent + 4)
value = getattr(msg, field.name)
s += field.name + '=' + MessageToRepr(
value, multiline=multiline, **IndentKwargs(kwargs)) + ','
if multiline:
s += '\n' + ' ' * indent
s += ')'
return s
if isinstance(msg, six.string_types):
if kwargs.get('shortstrings') and len(msg) > 100:
msg = msg[:100]
if isinstance(msg, datetime.datetime):
class SpecialTZInfo(datetime.tzinfo):
def __init__(self, offset):
super(SpecialTZInfo, self).__init__()
self.offset = offset
def __repr__(self):
s = 'TimeZoneOffset(' + repr(self.offset) + ')'
if not kwargs.get('no_modules'):
s = 'apitools.base.protorpclite.util.' + s
return s
msg = datetime.datetime(
msg.year, msg.month, msg.day, msg.hour, msg.minute, msg.second,
msg.microsecond, SpecialTZInfo(msg.tzinfo.utcoffset(0)))
return repr(msg) | python | def MessageToRepr(msg, multiline=False, **kwargs):
"""Return a repr-style string for a protorpc message.
protorpc.Message.__repr__ does not return anything that could be considered
python code. Adding this function lets us print a protorpc message in such
a way that it could be pasted into code later, and used to compare against
other things.
Args:
msg: protorpc.Message, the message to be repr'd.
multiline: bool, True if the returned string should have each field
assignment on its own line.
**kwargs: {str:str}, Additional flags for how to format the string.
Known **kwargs:
shortstrings: bool, True if all string values should be
truncated at 100 characters, since when mocking the contents
typically don't matter except for IDs, and IDs are usually
less than 100 characters.
no_modules: bool, True if the long module name should not be printed with
each type.
Returns:
str, A string of valid python (assuming the right imports have been made)
that recreates the message passed into this function.
"""
# TODO(jasmuth): craigcitro suggests a pretty-printer from apitools/gen.
indent = kwargs.get('indent', 0)
def IndentKwargs(kwargs):
kwargs = dict(kwargs)
kwargs['indent'] = kwargs.get('indent', 0) + 4
return kwargs
if isinstance(msg, list):
s = '['
for item in msg:
if multiline:
s += '\n' + ' ' * (indent + 4)
s += MessageToRepr(
item, multiline=multiline, **IndentKwargs(kwargs)) + ','
if multiline:
s += '\n' + ' ' * indent
s += ']'
return s
if isinstance(msg, messages.Message):
s = type(msg).__name__ + '('
if not kwargs.get('no_modules'):
s = msg.__module__ + '.' + s
names = sorted([field.name for field in msg.all_fields()])
for name in names:
field = msg.field_by_name(name)
if multiline:
s += '\n' + ' ' * (indent + 4)
value = getattr(msg, field.name)
s += field.name + '=' + MessageToRepr(
value, multiline=multiline, **IndentKwargs(kwargs)) + ','
if multiline:
s += '\n' + ' ' * indent
s += ')'
return s
if isinstance(msg, six.string_types):
if kwargs.get('shortstrings') and len(msg) > 100:
msg = msg[:100]
if isinstance(msg, datetime.datetime):
class SpecialTZInfo(datetime.tzinfo):
def __init__(self, offset):
super(SpecialTZInfo, self).__init__()
self.offset = offset
def __repr__(self):
s = 'TimeZoneOffset(' + repr(self.offset) + ')'
if not kwargs.get('no_modules'):
s = 'apitools.base.protorpclite.util.' + s
return s
msg = datetime.datetime(
msg.year, msg.month, msg.day, msg.hour, msg.minute, msg.second,
msg.microsecond, SpecialTZInfo(msg.tzinfo.utcoffset(0)))
return repr(msg) | [
"def",
"MessageToRepr",
"(",
"msg",
",",
"multiline",
"=",
"False",
",",
"*",
"*",
"kwargs",
")",
":",
"# TODO(jasmuth): craigcitro suggests a pretty-printer from apitools/gen.",
"indent",
"=",
"kwargs",
".",
"get",
"(",
"'indent'",
",",
"0",
")",
"def",
"IndentKw... | Return a repr-style string for a protorpc message.
protorpc.Message.__repr__ does not return anything that could be considered
python code. Adding this function lets us print a protorpc message in such
a way that it could be pasted into code later, and used to compare against
other things.
Args:
msg: protorpc.Message, the message to be repr'd.
multiline: bool, True if the returned string should have each field
assignment on its own line.
**kwargs: {str:str}, Additional flags for how to format the string.
Known **kwargs:
shortstrings: bool, True if all string values should be
truncated at 100 characters, since when mocking the contents
typically don't matter except for IDs, and IDs are usually
less than 100 characters.
no_modules: bool, True if the long module name should not be printed with
each type.
Returns:
str, A string of valid python (assuming the right imports have been made)
that recreates the message passed into this function. | [
"Return",
"a",
"repr",
"-",
"style",
"string",
"for",
"a",
"protorpc",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L160-L248 | train | 207,768 |
google/apitools | apitools/base/py/encoding_helper.py | _IncludeFields | def _IncludeFields(encoded_message, message, include_fields):
"""Add the requested fields to the encoded message."""
if include_fields is None:
return encoded_message
result = json.loads(encoded_message)
for field_name in include_fields:
try:
value = _GetField(message, field_name.split('.'))
nullvalue = None
if isinstance(value, list):
nullvalue = []
except KeyError:
raise exceptions.InvalidDataError(
'No field named %s in message of type %s' % (
field_name, type(message)))
_SetField(result, field_name.split('.'), nullvalue)
return json.dumps(result) | python | def _IncludeFields(encoded_message, message, include_fields):
"""Add the requested fields to the encoded message."""
if include_fields is None:
return encoded_message
result = json.loads(encoded_message)
for field_name in include_fields:
try:
value = _GetField(message, field_name.split('.'))
nullvalue = None
if isinstance(value, list):
nullvalue = []
except KeyError:
raise exceptions.InvalidDataError(
'No field named %s in message of type %s' % (
field_name, type(message)))
_SetField(result, field_name.split('.'), nullvalue)
return json.dumps(result) | [
"def",
"_IncludeFields",
"(",
"encoded_message",
",",
"message",
",",
"include_fields",
")",
":",
"if",
"include_fields",
"is",
"None",
":",
"return",
"encoded_message",
"result",
"=",
"json",
".",
"loads",
"(",
"encoded_message",
")",
"for",
"field_name",
"in",... | Add the requested fields to the encoded message. | [
"Add",
"the",
"requested",
"fields",
"to",
"the",
"encoded",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L265-L281 | train | 207,769 |
google/apitools | apitools/base/py/encoding_helper.py | _DecodeUnknownFields | def _DecodeUnknownFields(message, encoded_message):
"""Rewrite unknown fields in message into message.destination."""
destination = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
if destination is None:
return message
pair_field = message.field_by_name(destination)
if not isinstance(pair_field, messages.MessageField):
raise exceptions.InvalidDataFromServerError(
'Unrecognized fields must be mapped to a compound '
'message type.')
pair_type = pair_field.message_type
# TODO(craigcitro): Add more error checking around the pair
# type being exactly what we suspect (field names, etc).
if isinstance(pair_type.value, messages.MessageField):
new_values = _DecodeUnknownMessages(
message, json.loads(encoded_message), pair_type)
else:
new_values = _DecodeUnrecognizedFields(message, pair_type)
setattr(message, destination, new_values)
# We could probably get away with not setting this, but
# why not clear it?
setattr(message, '_Message__unrecognized_fields', {})
return message | python | def _DecodeUnknownFields(message, encoded_message):
"""Rewrite unknown fields in message into message.destination."""
destination = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
if destination is None:
return message
pair_field = message.field_by_name(destination)
if not isinstance(pair_field, messages.MessageField):
raise exceptions.InvalidDataFromServerError(
'Unrecognized fields must be mapped to a compound '
'message type.')
pair_type = pair_field.message_type
# TODO(craigcitro): Add more error checking around the pair
# type being exactly what we suspect (field names, etc).
if isinstance(pair_type.value, messages.MessageField):
new_values = _DecodeUnknownMessages(
message, json.loads(encoded_message), pair_type)
else:
new_values = _DecodeUnrecognizedFields(message, pair_type)
setattr(message, destination, new_values)
# We could probably get away with not setting this, but
# why not clear it?
setattr(message, '_Message__unrecognized_fields', {})
return message | [
"def",
"_DecodeUnknownFields",
"(",
"message",
",",
"encoded_message",
")",
":",
"destination",
"=",
"_UNRECOGNIZED_FIELD_MAPPINGS",
".",
"get",
"(",
"type",
"(",
"message",
")",
")",
"if",
"destination",
"is",
"None",
":",
"return",
"message",
"pair_field",
"="... | Rewrite unknown fields in message into message.destination. | [
"Rewrite",
"unknown",
"fields",
"in",
"message",
"into",
"message",
".",
"destination",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L392-L414 | train | 207,770 |
google/apitools | apitools/base/py/encoding_helper.py | _DecodeUnknownMessages | def _DecodeUnknownMessages(message, encoded_message, pair_type):
"""Process unknown fields in encoded_message of a message type."""
field_type = pair_type.value.type
new_values = []
all_field_names = [x.name for x in message.all_fields()]
for name, value_dict in six.iteritems(encoded_message):
if name in all_field_names:
continue
value = PyValueToMessage(field_type, value_dict)
if pair_type.value.repeated:
value = _AsMessageList(value)
new_pair = pair_type(key=name, value=value)
new_values.append(new_pair)
return new_values | python | def _DecodeUnknownMessages(message, encoded_message, pair_type):
"""Process unknown fields in encoded_message of a message type."""
field_type = pair_type.value.type
new_values = []
all_field_names = [x.name for x in message.all_fields()]
for name, value_dict in six.iteritems(encoded_message):
if name in all_field_names:
continue
value = PyValueToMessage(field_type, value_dict)
if pair_type.value.repeated:
value = _AsMessageList(value)
new_pair = pair_type(key=name, value=value)
new_values.append(new_pair)
return new_values | [
"def",
"_DecodeUnknownMessages",
"(",
"message",
",",
"encoded_message",
",",
"pair_type",
")",
":",
"field_type",
"=",
"pair_type",
".",
"value",
".",
"type",
"new_values",
"=",
"[",
"]",
"all_field_names",
"=",
"[",
"x",
".",
"name",
"for",
"x",
"in",
"m... | Process unknown fields in encoded_message of a message type. | [
"Process",
"unknown",
"fields",
"in",
"encoded_message",
"of",
"a",
"message",
"type",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L417-L430 | train | 207,771 |
google/apitools | apitools/base/py/encoding_helper.py | _DecodeUnrecognizedFields | def _DecodeUnrecognizedFields(message, pair_type):
"""Process unrecognized fields in message."""
new_values = []
codec = _ProtoJsonApiTools.Get()
for unknown_field in message.all_unrecognized_fields():
# TODO(craigcitro): Consider validating the variant if
# the assignment below doesn't take care of it. It may
# also be necessary to check it in the case that the
# type has multiple encodings.
value, _ = message.get_unrecognized_field_info(unknown_field)
value_type = pair_type.field_by_name('value')
if isinstance(value_type, messages.MessageField):
decoded_value = DictToMessage(value, pair_type.value.message_type)
else:
decoded_value = codec.decode_field(
pair_type.value, value)
try:
new_pair_key = str(unknown_field)
except UnicodeEncodeError:
new_pair_key = protojson.ProtoJson().decode_field(
pair_type.key, unknown_field)
new_pair = pair_type(key=new_pair_key, value=decoded_value)
new_values.append(new_pair)
return new_values | python | def _DecodeUnrecognizedFields(message, pair_type):
"""Process unrecognized fields in message."""
new_values = []
codec = _ProtoJsonApiTools.Get()
for unknown_field in message.all_unrecognized_fields():
# TODO(craigcitro): Consider validating the variant if
# the assignment below doesn't take care of it. It may
# also be necessary to check it in the case that the
# type has multiple encodings.
value, _ = message.get_unrecognized_field_info(unknown_field)
value_type = pair_type.field_by_name('value')
if isinstance(value_type, messages.MessageField):
decoded_value = DictToMessage(value, pair_type.value.message_type)
else:
decoded_value = codec.decode_field(
pair_type.value, value)
try:
new_pair_key = str(unknown_field)
except UnicodeEncodeError:
new_pair_key = protojson.ProtoJson().decode_field(
pair_type.key, unknown_field)
new_pair = pair_type(key=new_pair_key, value=decoded_value)
new_values.append(new_pair)
return new_values | [
"def",
"_DecodeUnrecognizedFields",
"(",
"message",
",",
"pair_type",
")",
":",
"new_values",
"=",
"[",
"]",
"codec",
"=",
"_ProtoJsonApiTools",
".",
"Get",
"(",
")",
"for",
"unknown_field",
"in",
"message",
".",
"all_unrecognized_fields",
"(",
")",
":",
"# TO... | Process unrecognized fields in message. | [
"Process",
"unrecognized",
"fields",
"in",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L433-L456 | train | 207,772 |
google/apitools | apitools/base/py/encoding_helper.py | _EncodeUnknownFields | def _EncodeUnknownFields(message):
"""Remap unknown fields in message out of message.source."""
source = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
if source is None:
return message
# CopyProtoMessage uses _ProtoJsonApiTools, which uses this message. Use
# the vanilla protojson-based copy function to avoid infinite recursion.
result = _CopyProtoMessageVanillaProtoJson(message)
pairs_field = message.field_by_name(source)
if not isinstance(pairs_field, messages.MessageField):
raise exceptions.InvalidUserInputError(
'Invalid pairs field %s' % pairs_field)
pairs_type = pairs_field.message_type
value_field = pairs_type.field_by_name('value')
value_variant = value_field.variant
pairs = getattr(message, source)
codec = _ProtoJsonApiTools.Get()
for pair in pairs:
encoded_value = codec.encode_field(value_field, pair.value)
result.set_unrecognized_field(pair.key, encoded_value, value_variant)
setattr(result, source, [])
return result | python | def _EncodeUnknownFields(message):
"""Remap unknown fields in message out of message.source."""
source = _UNRECOGNIZED_FIELD_MAPPINGS.get(type(message))
if source is None:
return message
# CopyProtoMessage uses _ProtoJsonApiTools, which uses this message. Use
# the vanilla protojson-based copy function to avoid infinite recursion.
result = _CopyProtoMessageVanillaProtoJson(message)
pairs_field = message.field_by_name(source)
if not isinstance(pairs_field, messages.MessageField):
raise exceptions.InvalidUserInputError(
'Invalid pairs field %s' % pairs_field)
pairs_type = pairs_field.message_type
value_field = pairs_type.field_by_name('value')
value_variant = value_field.variant
pairs = getattr(message, source)
codec = _ProtoJsonApiTools.Get()
for pair in pairs:
encoded_value = codec.encode_field(value_field, pair.value)
result.set_unrecognized_field(pair.key, encoded_value, value_variant)
setattr(result, source, [])
return result | [
"def",
"_EncodeUnknownFields",
"(",
"message",
")",
":",
"source",
"=",
"_UNRECOGNIZED_FIELD_MAPPINGS",
".",
"get",
"(",
"type",
"(",
"message",
")",
")",
"if",
"source",
"is",
"None",
":",
"return",
"message",
"# CopyProtoMessage uses _ProtoJsonApiTools, which uses t... | Remap unknown fields in message out of message.source. | [
"Remap",
"unknown",
"fields",
"in",
"message",
"out",
"of",
"message",
".",
"source",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L464-L485 | train | 207,773 |
google/apitools | apitools/base/py/encoding_helper.py | _SafeEncodeBytes | def _SafeEncodeBytes(field, value):
"""Encode the bytes in value as urlsafe base64."""
try:
if field.repeated:
result = [base64.urlsafe_b64encode(byte) for byte in value]
else:
result = base64.urlsafe_b64encode(value)
complete = True
except TypeError:
result = value
complete = False
return CodecResult(value=result, complete=complete) | python | def _SafeEncodeBytes(field, value):
"""Encode the bytes in value as urlsafe base64."""
try:
if field.repeated:
result = [base64.urlsafe_b64encode(byte) for byte in value]
else:
result = base64.urlsafe_b64encode(value)
complete = True
except TypeError:
result = value
complete = False
return CodecResult(value=result, complete=complete) | [
"def",
"_SafeEncodeBytes",
"(",
"field",
",",
"value",
")",
":",
"try",
":",
"if",
"field",
".",
"repeated",
":",
"result",
"=",
"[",
"base64",
".",
"urlsafe_b64encode",
"(",
"byte",
")",
"for",
"byte",
"in",
"value",
"]",
"else",
":",
"result",
"=",
... | Encode the bytes in value as urlsafe base64. | [
"Encode",
"the",
"bytes",
"in",
"value",
"as",
"urlsafe",
"base64",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L488-L499 | train | 207,774 |
google/apitools | apitools/base/py/encoding_helper.py | _SafeDecodeBytes | def _SafeDecodeBytes(unused_field, value):
"""Decode the urlsafe base64 value into bytes."""
try:
result = base64.urlsafe_b64decode(str(value))
complete = True
except TypeError:
result = value
complete = False
return CodecResult(value=result, complete=complete) | python | def _SafeDecodeBytes(unused_field, value):
"""Decode the urlsafe base64 value into bytes."""
try:
result = base64.urlsafe_b64decode(str(value))
complete = True
except TypeError:
result = value
complete = False
return CodecResult(value=result, complete=complete) | [
"def",
"_SafeDecodeBytes",
"(",
"unused_field",
",",
"value",
")",
":",
"try",
":",
"result",
"=",
"base64",
".",
"urlsafe_b64decode",
"(",
"str",
"(",
"value",
")",
")",
"complete",
"=",
"True",
"except",
"TypeError",
":",
"result",
"=",
"value",
"complet... | Decode the urlsafe base64 value into bytes. | [
"Decode",
"the",
"urlsafe",
"base64",
"value",
"into",
"bytes",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L502-L510 | train | 207,775 |
google/apitools | apitools/base/py/encoding_helper.py | _ProcessUnknownEnums | def _ProcessUnknownEnums(message, encoded_message):
"""Add unknown enum values from encoded_message as unknown fields.
ProtoRPC diverges from the usual protocol buffer behavior here and
doesn't allow unknown fields. Throwing on unknown fields makes it
impossible to let servers add new enum values and stay compatible
with older clients, which isn't reasonable for us. We simply store
unrecognized enum values as unknown fields, and all is well.
Args:
message: Proto message we've decoded thus far.
encoded_message: JSON string we're decoding.
Returns:
message, with any unknown enums stored as unrecognized fields.
"""
if not encoded_message:
return message
decoded_message = json.loads(six.ensure_str(encoded_message))
for field in message.all_fields():
if (isinstance(field, messages.EnumField) and
field.name in decoded_message and
message.get_assigned_value(field.name) is None):
message.set_unrecognized_field(
field.name, decoded_message[field.name], messages.Variant.ENUM)
return message | python | def _ProcessUnknownEnums(message, encoded_message):
"""Add unknown enum values from encoded_message as unknown fields.
ProtoRPC diverges from the usual protocol buffer behavior here and
doesn't allow unknown fields. Throwing on unknown fields makes it
impossible to let servers add new enum values and stay compatible
with older clients, which isn't reasonable for us. We simply store
unrecognized enum values as unknown fields, and all is well.
Args:
message: Proto message we've decoded thus far.
encoded_message: JSON string we're decoding.
Returns:
message, with any unknown enums stored as unrecognized fields.
"""
if not encoded_message:
return message
decoded_message = json.loads(six.ensure_str(encoded_message))
for field in message.all_fields():
if (isinstance(field, messages.EnumField) and
field.name in decoded_message and
message.get_assigned_value(field.name) is None):
message.set_unrecognized_field(
field.name, decoded_message[field.name], messages.Variant.ENUM)
return message | [
"def",
"_ProcessUnknownEnums",
"(",
"message",
",",
"encoded_message",
")",
":",
"if",
"not",
"encoded_message",
":",
"return",
"message",
"decoded_message",
"=",
"json",
".",
"loads",
"(",
"six",
".",
"ensure_str",
"(",
"encoded_message",
")",
")",
"for",
"fi... | Add unknown enum values from encoded_message as unknown fields.
ProtoRPC diverges from the usual protocol buffer behavior here and
doesn't allow unknown fields. Throwing on unknown fields makes it
impossible to let servers add new enum values and stay compatible
with older clients, which isn't reasonable for us. We simply store
unrecognized enum values as unknown fields, and all is well.
Args:
message: Proto message we've decoded thus far.
encoded_message: JSON string we're decoding.
Returns:
message, with any unknown enums stored as unrecognized fields. | [
"Add",
"unknown",
"enum",
"values",
"from",
"encoded_message",
"as",
"unknown",
"fields",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L513-L538 | train | 207,776 |
google/apitools | apitools/base/py/encoding_helper.py | _ProcessUnknownMessages | def _ProcessUnknownMessages(message, encoded_message):
"""Store any remaining unknown fields as strings.
ProtoRPC currently ignores unknown values for which no type can be
determined (and logs a "No variant found" message). For the purposes
of reserializing, this is quite harmful (since it throws away
information). Here we simply add those as unknown fields of type
string (so that they can easily be reserialized).
Args:
message: Proto message we've decoded thus far.
encoded_message: JSON string we're decoding.
Returns:
message, with any remaining unrecognized fields saved.
"""
if not encoded_message:
return message
decoded_message = json.loads(six.ensure_str(encoded_message))
message_fields = [x.name for x in message.all_fields()] + list(
message.all_unrecognized_fields())
missing_fields = [x for x in decoded_message.keys()
if x not in message_fields]
for field_name in missing_fields:
message.set_unrecognized_field(field_name, decoded_message[field_name],
messages.Variant.STRING)
return message | python | def _ProcessUnknownMessages(message, encoded_message):
"""Store any remaining unknown fields as strings.
ProtoRPC currently ignores unknown values for which no type can be
determined (and logs a "No variant found" message). For the purposes
of reserializing, this is quite harmful (since it throws away
information). Here we simply add those as unknown fields of type
string (so that they can easily be reserialized).
Args:
message: Proto message we've decoded thus far.
encoded_message: JSON string we're decoding.
Returns:
message, with any remaining unrecognized fields saved.
"""
if not encoded_message:
return message
decoded_message = json.loads(six.ensure_str(encoded_message))
message_fields = [x.name for x in message.all_fields()] + list(
message.all_unrecognized_fields())
missing_fields = [x for x in decoded_message.keys()
if x not in message_fields]
for field_name in missing_fields:
message.set_unrecognized_field(field_name, decoded_message[field_name],
messages.Variant.STRING)
return message | [
"def",
"_ProcessUnknownMessages",
"(",
"message",
",",
"encoded_message",
")",
":",
"if",
"not",
"encoded_message",
":",
"return",
"message",
"decoded_message",
"=",
"json",
".",
"loads",
"(",
"six",
".",
"ensure_str",
"(",
"encoded_message",
")",
")",
"message_... | Store any remaining unknown fields as strings.
ProtoRPC currently ignores unknown values for which no type can be
determined (and logs a "No variant found" message). For the purposes
of reserializing, this is quite harmful (since it throws away
information). Here we simply add those as unknown fields of type
string (so that they can easily be reserialized).
Args:
message: Proto message we've decoded thus far.
encoded_message: JSON string we're decoding.
Returns:
message, with any remaining unrecognized fields saved. | [
"Store",
"any",
"remaining",
"unknown",
"fields",
"as",
"strings",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L541-L567 | train | 207,777 |
google/apitools | apitools/base/py/encoding_helper.py | AddCustomJsonEnumMapping | def AddCustomJsonEnumMapping(enum_type, python_name, json_name,
package=None): # pylint: disable=unused-argument
"""Add a custom wire encoding for a given enum value.
This is primarily used in generated code, to handle enum values
which happen to be Python keywords.
Args:
enum_type: (messages.Enum) An enum type
python_name: (basestring) Python name for this value.
json_name: (basestring) JSON name to be used on the wire.
package: (NoneType, optional) No effect, exists for legacy compatibility.
"""
if not issubclass(enum_type, messages.Enum):
raise exceptions.TypecheckError(
'Cannot set JSON enum mapping for non-enum "%s"' % enum_type)
if python_name not in enum_type.names():
raise exceptions.InvalidDataError(
'Enum value %s not a value for type %s' % (python_name, enum_type))
field_mappings = _JSON_ENUM_MAPPINGS.setdefault(enum_type, {})
_CheckForExistingMappings('enum', enum_type, python_name, json_name)
field_mappings[python_name] = json_name | python | def AddCustomJsonEnumMapping(enum_type, python_name, json_name,
package=None): # pylint: disable=unused-argument
"""Add a custom wire encoding for a given enum value.
This is primarily used in generated code, to handle enum values
which happen to be Python keywords.
Args:
enum_type: (messages.Enum) An enum type
python_name: (basestring) Python name for this value.
json_name: (basestring) JSON name to be used on the wire.
package: (NoneType, optional) No effect, exists for legacy compatibility.
"""
if not issubclass(enum_type, messages.Enum):
raise exceptions.TypecheckError(
'Cannot set JSON enum mapping for non-enum "%s"' % enum_type)
if python_name not in enum_type.names():
raise exceptions.InvalidDataError(
'Enum value %s not a value for type %s' % (python_name, enum_type))
field_mappings = _JSON_ENUM_MAPPINGS.setdefault(enum_type, {})
_CheckForExistingMappings('enum', enum_type, python_name, json_name)
field_mappings[python_name] = json_name | [
"def",
"AddCustomJsonEnumMapping",
"(",
"enum_type",
",",
"python_name",
",",
"json_name",
",",
"package",
"=",
"None",
")",
":",
"# pylint: disable=unused-argument",
"if",
"not",
"issubclass",
"(",
"enum_type",
",",
"messages",
".",
"Enum",
")",
":",
"raise",
"... | Add a custom wire encoding for a given enum value.
This is primarily used in generated code, to handle enum values
which happen to be Python keywords.
Args:
enum_type: (messages.Enum) An enum type
python_name: (basestring) Python name for this value.
json_name: (basestring) JSON name to be used on the wire.
package: (NoneType, optional) No effect, exists for legacy compatibility. | [
"Add",
"a",
"custom",
"wire",
"encoding",
"for",
"a",
"given",
"enum",
"value",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L579-L600 | train | 207,778 |
google/apitools | apitools/base/py/encoding_helper.py | AddCustomJsonFieldMapping | def AddCustomJsonFieldMapping(message_type, python_name, json_name,
package=None): # pylint: disable=unused-argument
"""Add a custom wire encoding for a given message field.
This is primarily used in generated code, to handle enum values
which happen to be Python keywords.
Args:
message_type: (messages.Message) A message type
python_name: (basestring) Python name for this value.
json_name: (basestring) JSON name to be used on the wire.
package: (NoneType, optional) No effect, exists for legacy compatibility.
"""
if not issubclass(message_type, messages.Message):
raise exceptions.TypecheckError(
'Cannot set JSON field mapping for '
'non-message "%s"' % message_type)
try:
_ = message_type.field_by_name(python_name)
except KeyError:
raise exceptions.InvalidDataError(
'Field %s not recognized for type %s' % (
python_name, message_type))
field_mappings = _JSON_FIELD_MAPPINGS.setdefault(message_type, {})
_CheckForExistingMappings('field', message_type, python_name, json_name)
field_mappings[python_name] = json_name | python | def AddCustomJsonFieldMapping(message_type, python_name, json_name,
package=None): # pylint: disable=unused-argument
"""Add a custom wire encoding for a given message field.
This is primarily used in generated code, to handle enum values
which happen to be Python keywords.
Args:
message_type: (messages.Message) A message type
python_name: (basestring) Python name for this value.
json_name: (basestring) JSON name to be used on the wire.
package: (NoneType, optional) No effect, exists for legacy compatibility.
"""
if not issubclass(message_type, messages.Message):
raise exceptions.TypecheckError(
'Cannot set JSON field mapping for '
'non-message "%s"' % message_type)
try:
_ = message_type.field_by_name(python_name)
except KeyError:
raise exceptions.InvalidDataError(
'Field %s not recognized for type %s' % (
python_name, message_type))
field_mappings = _JSON_FIELD_MAPPINGS.setdefault(message_type, {})
_CheckForExistingMappings('field', message_type, python_name, json_name)
field_mappings[python_name] = json_name | [
"def",
"AddCustomJsonFieldMapping",
"(",
"message_type",
",",
"python_name",
",",
"json_name",
",",
"package",
"=",
"None",
")",
":",
"# pylint: disable=unused-argument",
"if",
"not",
"issubclass",
"(",
"message_type",
",",
"messages",
".",
"Message",
")",
":",
"r... | Add a custom wire encoding for a given message field.
This is primarily used in generated code, to handle enum values
which happen to be Python keywords.
Args:
message_type: (messages.Message) A message type
python_name: (basestring) Python name for this value.
json_name: (basestring) JSON name to be used on the wire.
package: (NoneType, optional) No effect, exists for legacy compatibility. | [
"Add",
"a",
"custom",
"wire",
"encoding",
"for",
"a",
"given",
"message",
"field",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L603-L628 | train | 207,779 |
google/apitools | apitools/base/py/encoding_helper.py | GetCustomJsonEnumMapping | def GetCustomJsonEnumMapping(enum_type, python_name=None, json_name=None):
"""Return the appropriate remapping for the given enum, or None."""
return _FetchRemapping(enum_type, 'enum',
python_name=python_name, json_name=json_name,
mappings=_JSON_ENUM_MAPPINGS) | python | def GetCustomJsonEnumMapping(enum_type, python_name=None, json_name=None):
"""Return the appropriate remapping for the given enum, or None."""
return _FetchRemapping(enum_type, 'enum',
python_name=python_name, json_name=json_name,
mappings=_JSON_ENUM_MAPPINGS) | [
"def",
"GetCustomJsonEnumMapping",
"(",
"enum_type",
",",
"python_name",
"=",
"None",
",",
"json_name",
"=",
"None",
")",
":",
"return",
"_FetchRemapping",
"(",
"enum_type",
",",
"'enum'",
",",
"python_name",
"=",
"python_name",
",",
"json_name",
"=",
"json_name... | Return the appropriate remapping for the given enum, or None. | [
"Return",
"the",
"appropriate",
"remapping",
"for",
"the",
"given",
"enum",
"or",
"None",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L631-L635 | train | 207,780 |
google/apitools | apitools/base/py/encoding_helper.py | GetCustomJsonFieldMapping | def GetCustomJsonFieldMapping(message_type, python_name=None, json_name=None):
"""Return the appropriate remapping for the given field, or None."""
return _FetchRemapping(message_type, 'field',
python_name=python_name, json_name=json_name,
mappings=_JSON_FIELD_MAPPINGS) | python | def GetCustomJsonFieldMapping(message_type, python_name=None, json_name=None):
"""Return the appropriate remapping for the given field, or None."""
return _FetchRemapping(message_type, 'field',
python_name=python_name, json_name=json_name,
mappings=_JSON_FIELD_MAPPINGS) | [
"def",
"GetCustomJsonFieldMapping",
"(",
"message_type",
",",
"python_name",
"=",
"None",
",",
"json_name",
"=",
"None",
")",
":",
"return",
"_FetchRemapping",
"(",
"message_type",
",",
"'field'",
",",
"python_name",
"=",
"python_name",
",",
"json_name",
"=",
"j... | Return the appropriate remapping for the given field, or None. | [
"Return",
"the",
"appropriate",
"remapping",
"for",
"the",
"given",
"field",
"or",
"None",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L638-L642 | train | 207,781 |
google/apitools | apitools/base/py/encoding_helper.py | _FetchRemapping | def _FetchRemapping(type_name, mapping_type, python_name=None, json_name=None,
mappings=None):
"""Common code for fetching a key or value from a remapping dict."""
if python_name and json_name:
raise exceptions.InvalidDataError(
'Cannot specify both python_name and json_name '
'for %s remapping' % mapping_type)
if not (python_name or json_name):
raise exceptions.InvalidDataError(
'Must specify either python_name or json_name for %s remapping' % (
mapping_type,))
field_remappings = mappings.get(type_name, {})
if field_remappings:
if python_name:
return field_remappings.get(python_name)
elif json_name:
if json_name in list(field_remappings.values()):
return [k for k in field_remappings
if field_remappings[k] == json_name][0]
return None | python | def _FetchRemapping(type_name, mapping_type, python_name=None, json_name=None,
mappings=None):
"""Common code for fetching a key or value from a remapping dict."""
if python_name and json_name:
raise exceptions.InvalidDataError(
'Cannot specify both python_name and json_name '
'for %s remapping' % mapping_type)
if not (python_name or json_name):
raise exceptions.InvalidDataError(
'Must specify either python_name or json_name for %s remapping' % (
mapping_type,))
field_remappings = mappings.get(type_name, {})
if field_remappings:
if python_name:
return field_remappings.get(python_name)
elif json_name:
if json_name in list(field_remappings.values()):
return [k for k in field_remappings
if field_remappings[k] == json_name][0]
return None | [
"def",
"_FetchRemapping",
"(",
"type_name",
",",
"mapping_type",
",",
"python_name",
"=",
"None",
",",
"json_name",
"=",
"None",
",",
"mappings",
"=",
"None",
")",
":",
"if",
"python_name",
"and",
"json_name",
":",
"raise",
"exceptions",
".",
"InvalidDataError... | Common code for fetching a key or value from a remapping dict. | [
"Common",
"code",
"for",
"fetching",
"a",
"key",
"or",
"value",
"from",
"a",
"remapping",
"dict",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L645-L664 | train | 207,782 |
google/apitools | apitools/base/py/encoding_helper.py | _CheckForExistingMappings | def _CheckForExistingMappings(mapping_type, message_type,
python_name, json_name):
"""Validate that no mappings exist for the given values."""
if mapping_type == 'field':
getter = GetCustomJsonFieldMapping
elif mapping_type == 'enum':
getter = GetCustomJsonEnumMapping
remapping = getter(message_type, python_name=python_name)
if remapping is not None and remapping != json_name:
raise exceptions.InvalidDataError(
'Cannot add mapping for %s "%s", already mapped to "%s"' % (
mapping_type, python_name, remapping))
remapping = getter(message_type, json_name=json_name)
if remapping is not None and remapping != python_name:
raise exceptions.InvalidDataError(
'Cannot add mapping for %s "%s", already mapped to "%s"' % (
mapping_type, json_name, remapping)) | python | def _CheckForExistingMappings(mapping_type, message_type,
python_name, json_name):
"""Validate that no mappings exist for the given values."""
if mapping_type == 'field':
getter = GetCustomJsonFieldMapping
elif mapping_type == 'enum':
getter = GetCustomJsonEnumMapping
remapping = getter(message_type, python_name=python_name)
if remapping is not None and remapping != json_name:
raise exceptions.InvalidDataError(
'Cannot add mapping for %s "%s", already mapped to "%s"' % (
mapping_type, python_name, remapping))
remapping = getter(message_type, json_name=json_name)
if remapping is not None and remapping != python_name:
raise exceptions.InvalidDataError(
'Cannot add mapping for %s "%s", already mapped to "%s"' % (
mapping_type, json_name, remapping)) | [
"def",
"_CheckForExistingMappings",
"(",
"mapping_type",
",",
"message_type",
",",
"python_name",
",",
"json_name",
")",
":",
"if",
"mapping_type",
"==",
"'field'",
":",
"getter",
"=",
"GetCustomJsonFieldMapping",
"elif",
"mapping_type",
"==",
"'enum'",
":",
"getter... | Validate that no mappings exist for the given values. | [
"Validate",
"that",
"no",
"mappings",
"exist",
"for",
"the",
"given",
"values",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L667-L683 | train | 207,783 |
google/apitools | apitools/base/py/encoding_helper.py | _AsMessageList | def _AsMessageList(msg):
"""Convert the provided list-as-JsonValue to a list."""
# This really needs to live in extra_types, but extra_types needs
# to import this file to be able to register codecs.
# TODO(craigcitro): Split out a codecs module and fix this ugly
# import.
from apitools.base.py import extra_types
def _IsRepeatedJsonValue(msg):
"""Return True if msg is a repeated value as a JsonValue."""
if isinstance(msg, extra_types.JsonArray):
return True
if isinstance(msg, extra_types.JsonValue) and msg.array_value:
return True
return False
if not _IsRepeatedJsonValue(msg):
raise ValueError('invalid argument to _AsMessageList')
if isinstance(msg, extra_types.JsonValue):
msg = msg.array_value
if isinstance(msg, extra_types.JsonArray):
msg = msg.entries
return msg | python | def _AsMessageList(msg):
"""Convert the provided list-as-JsonValue to a list."""
# This really needs to live in extra_types, but extra_types needs
# to import this file to be able to register codecs.
# TODO(craigcitro): Split out a codecs module and fix this ugly
# import.
from apitools.base.py import extra_types
def _IsRepeatedJsonValue(msg):
"""Return True if msg is a repeated value as a JsonValue."""
if isinstance(msg, extra_types.JsonArray):
return True
if isinstance(msg, extra_types.JsonValue) and msg.array_value:
return True
return False
if not _IsRepeatedJsonValue(msg):
raise ValueError('invalid argument to _AsMessageList')
if isinstance(msg, extra_types.JsonValue):
msg = msg.array_value
if isinstance(msg, extra_types.JsonArray):
msg = msg.entries
return msg | [
"def",
"_AsMessageList",
"(",
"msg",
")",
":",
"# This really needs to live in extra_types, but extra_types needs",
"# to import this file to be able to register codecs.",
"# TODO(craigcitro): Split out a codecs module and fix this ugly",
"# import.",
"from",
"apitools",
".",
"base",
"."... | Convert the provided list-as-JsonValue to a list. | [
"Convert",
"the",
"provided",
"list",
"-",
"as",
"-",
"JsonValue",
"to",
"a",
"list",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L709-L731 | train | 207,784 |
google/apitools | apitools/base/py/encoding_helper.py | _IsMap | def _IsMap(message, field):
"""Returns whether the "field" is actually a map-type."""
value = message.get_assigned_value(field.name)
if not isinstance(value, messages.Message):
return False
try:
additional_properties = value.field_by_name('additionalProperties')
except KeyError:
return False
else:
return additional_properties.repeated | python | def _IsMap(message, field):
"""Returns whether the "field" is actually a map-type."""
value = message.get_assigned_value(field.name)
if not isinstance(value, messages.Message):
return False
try:
additional_properties = value.field_by_name('additionalProperties')
except KeyError:
return False
else:
return additional_properties.repeated | [
"def",
"_IsMap",
"(",
"message",
",",
"field",
")",
":",
"value",
"=",
"message",
".",
"get_assigned_value",
"(",
"field",
".",
"name",
")",
"if",
"not",
"isinstance",
"(",
"value",
",",
"messages",
".",
"Message",
")",
":",
"return",
"False",
"try",
"... | Returns whether the "field" is actually a map-type. | [
"Returns",
"whether",
"the",
"field",
"is",
"actually",
"a",
"map",
"-",
"type",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L734-L744 | train | 207,785 |
google/apitools | apitools/base/py/encoding_helper.py | UnrecognizedFieldIter | def UnrecognizedFieldIter(message, _edges=()): # pylint: disable=invalid-name
"""Yields the locations of unrecognized fields within "message".
If a sub-message is found to have unrecognized fields, that sub-message
will not be searched any further. We prune the search of the sub-message
because we assume it is malformed and further checks will not yield
productive errors.
Args:
message: The Message instance to search.
_edges: Internal arg for passing state.
Yields:
(edges_to_message, field_names):
edges_to_message: List[ProtoEdge], The edges (relative to "message")
describing the path to the sub-message where the unrecognized
fields were found.
field_names: List[Str], The names of the field(s) that were
unrecognized in the sub-message.
"""
if not isinstance(message, messages.Message):
# This is a primitive leaf, no errors found down this path.
return
field_names = message.all_unrecognized_fields()
if field_names:
# This message is malformed. Stop recursing and report it.
yield _edges, field_names
return
# Recurse through all fields in the current message.
for field in message.all_fields():
value = message.get_assigned_value(field.name)
if field.repeated:
for i, item in enumerate(value):
repeated_edge = ProtoEdge(EdgeType.REPEATED, field.name, i)
iter_ = UnrecognizedFieldIter(item, _edges + (repeated_edge,))
for (e, y) in iter_:
yield e, y
elif _IsMap(message, field):
for key, item in _MapItems(message, field):
map_edge = ProtoEdge(EdgeType.MAP, field.name, key)
iter_ = UnrecognizedFieldIter(item, _edges + (map_edge,))
for (e, y) in iter_:
yield e, y
else:
scalar_edge = ProtoEdge(EdgeType.SCALAR, field.name, None)
iter_ = UnrecognizedFieldIter(value, _edges + (scalar_edge,))
for (e, y) in iter_:
yield e, y | python | def UnrecognizedFieldIter(message, _edges=()): # pylint: disable=invalid-name
"""Yields the locations of unrecognized fields within "message".
If a sub-message is found to have unrecognized fields, that sub-message
will not be searched any further. We prune the search of the sub-message
because we assume it is malformed and further checks will not yield
productive errors.
Args:
message: The Message instance to search.
_edges: Internal arg for passing state.
Yields:
(edges_to_message, field_names):
edges_to_message: List[ProtoEdge], The edges (relative to "message")
describing the path to the sub-message where the unrecognized
fields were found.
field_names: List[Str], The names of the field(s) that were
unrecognized in the sub-message.
"""
if not isinstance(message, messages.Message):
# This is a primitive leaf, no errors found down this path.
return
field_names = message.all_unrecognized_fields()
if field_names:
# This message is malformed. Stop recursing and report it.
yield _edges, field_names
return
# Recurse through all fields in the current message.
for field in message.all_fields():
value = message.get_assigned_value(field.name)
if field.repeated:
for i, item in enumerate(value):
repeated_edge = ProtoEdge(EdgeType.REPEATED, field.name, i)
iter_ = UnrecognizedFieldIter(item, _edges + (repeated_edge,))
for (e, y) in iter_:
yield e, y
elif _IsMap(message, field):
for key, item in _MapItems(message, field):
map_edge = ProtoEdge(EdgeType.MAP, field.name, key)
iter_ = UnrecognizedFieldIter(item, _edges + (map_edge,))
for (e, y) in iter_:
yield e, y
else:
scalar_edge = ProtoEdge(EdgeType.SCALAR, field.name, None)
iter_ = UnrecognizedFieldIter(value, _edges + (scalar_edge,))
for (e, y) in iter_:
yield e, y | [
"def",
"UnrecognizedFieldIter",
"(",
"message",
",",
"_edges",
"=",
"(",
")",
")",
":",
"# pylint: disable=invalid-name",
"if",
"not",
"isinstance",
"(",
"message",
",",
"messages",
".",
"Message",
")",
":",
"# This is a primitive leaf, no errors found down this path.",... | Yields the locations of unrecognized fields within "message".
If a sub-message is found to have unrecognized fields, that sub-message
will not be searched any further. We prune the search of the sub-message
because we assume it is malformed and further checks will not yield
productive errors.
Args:
message: The Message instance to search.
_edges: Internal arg for passing state.
Yields:
(edges_to_message, field_names):
edges_to_message: List[ProtoEdge], The edges (relative to "message")
describing the path to the sub-message where the unrecognized
fields were found.
field_names: List[Str], The names of the field(s) that were
unrecognized in the sub-message. | [
"Yields",
"the",
"locations",
"of",
"unrecognized",
"fields",
"within",
"message",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L757-L806 | train | 207,786 |
google/apitools | apitools/base/py/encoding_helper.py | _ProtoJsonApiTools.decode_field | def decode_field(self, field, value):
"""Decode the given JSON value.
Args:
field: a messages.Field for the field we're decoding.
value: a python value we'd like to decode.
Returns:
A value suitable for assignment to field.
"""
for decoder in _GetFieldCodecs(field, 'decoder'):
result = decoder(field, value)
value = result.value
if result.complete:
return value
if isinstance(field, messages.MessageField):
field_value = self.decode_message(
field.message_type, json.dumps(value))
elif isinstance(field, messages.EnumField):
value = GetCustomJsonEnumMapping(
field.type, json_name=value) or value
try:
field_value = super(
_ProtoJsonApiTools, self).decode_field(field, value)
except messages.DecodeError:
if not isinstance(value, six.string_types):
raise
field_value = None
else:
field_value = super(
_ProtoJsonApiTools, self).decode_field(field, value)
return field_value | python | def decode_field(self, field, value):
"""Decode the given JSON value.
Args:
field: a messages.Field for the field we're decoding.
value: a python value we'd like to decode.
Returns:
A value suitable for assignment to field.
"""
for decoder in _GetFieldCodecs(field, 'decoder'):
result = decoder(field, value)
value = result.value
if result.complete:
return value
if isinstance(field, messages.MessageField):
field_value = self.decode_message(
field.message_type, json.dumps(value))
elif isinstance(field, messages.EnumField):
value = GetCustomJsonEnumMapping(
field.type, json_name=value) or value
try:
field_value = super(
_ProtoJsonApiTools, self).decode_field(field, value)
except messages.DecodeError:
if not isinstance(value, six.string_types):
raise
field_value = None
else:
field_value = super(
_ProtoJsonApiTools, self).decode_field(field, value)
return field_value | [
"def",
"decode_field",
"(",
"self",
",",
"field",
",",
"value",
")",
":",
"for",
"decoder",
"in",
"_GetFieldCodecs",
"(",
"field",
",",
"'decoder'",
")",
":",
"result",
"=",
"decoder",
"(",
"field",
",",
"value",
")",
"value",
"=",
"result",
".",
"valu... | Decode the given JSON value.
Args:
field: a messages.Field for the field we're decoding.
value: a python value we'd like to decode.
Returns:
A value suitable for assignment to field. | [
"Decode",
"the",
"given",
"JSON",
"value",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L314-L345 | train | 207,787 |
google/apitools | apitools/base/py/encoding_helper.py | _ProtoJsonApiTools.encode_field | def encode_field(self, field, value):
"""Encode the given value as JSON.
Args:
field: a messages.Field for the field we're encoding.
value: a value for field.
Returns:
A python value suitable for json.dumps.
"""
for encoder in _GetFieldCodecs(field, 'encoder'):
result = encoder(field, value)
value = result.value
if result.complete:
return value
if isinstance(field, messages.EnumField):
if field.repeated:
remapped_value = [GetCustomJsonEnumMapping(
field.type, python_name=e.name) or e.name for e in value]
else:
remapped_value = GetCustomJsonEnumMapping(
field.type, python_name=value.name)
if remapped_value:
return remapped_value
if (isinstance(field, messages.MessageField) and
not isinstance(field, message_types.DateTimeField)):
value = json.loads(self.encode_message(value))
return super(_ProtoJsonApiTools, self).encode_field(field, value) | python | def encode_field(self, field, value):
"""Encode the given value as JSON.
Args:
field: a messages.Field for the field we're encoding.
value: a value for field.
Returns:
A python value suitable for json.dumps.
"""
for encoder in _GetFieldCodecs(field, 'encoder'):
result = encoder(field, value)
value = result.value
if result.complete:
return value
if isinstance(field, messages.EnumField):
if field.repeated:
remapped_value = [GetCustomJsonEnumMapping(
field.type, python_name=e.name) or e.name for e in value]
else:
remapped_value = GetCustomJsonEnumMapping(
field.type, python_name=value.name)
if remapped_value:
return remapped_value
if (isinstance(field, messages.MessageField) and
not isinstance(field, message_types.DateTimeField)):
value = json.loads(self.encode_message(value))
return super(_ProtoJsonApiTools, self).encode_field(field, value) | [
"def",
"encode_field",
"(",
"self",
",",
"field",
",",
"value",
")",
":",
"for",
"encoder",
"in",
"_GetFieldCodecs",
"(",
"field",
",",
"'encoder'",
")",
":",
"result",
"=",
"encoder",
"(",
"field",
",",
"value",
")",
"value",
"=",
"result",
".",
"valu... | Encode the given value as JSON.
Args:
field: a messages.Field for the field we're encoding.
value: a value for field.
Returns:
A python value suitable for json.dumps. | [
"Encode",
"the",
"given",
"value",
"as",
"JSON",
"."
] | f3745a7ea535aa0e88b0650c16479b696d6fd446 | https://github.com/google/apitools/blob/f3745a7ea535aa0e88b0650c16479b696d6fd446/apitools/base/py/encoding_helper.py#L361-L388 | train | 207,788 |
pricingassistant/mrq | mrq/utils.py | load_class_by_path | def load_class_by_path(taskpath):
""" Given a taskpath, returns the main task class. """
return getattr(
importlib.import_module(
re.sub(
r"\.[^.]+$",
"",
taskpath)),
re.sub(
r"^.*\.",
"",
taskpath)) | python | def load_class_by_path(taskpath):
""" Given a taskpath, returns the main task class. """
return getattr(
importlib.import_module(
re.sub(
r"\.[^.]+$",
"",
taskpath)),
re.sub(
r"^.*\.",
"",
taskpath)) | [
"def",
"load_class_by_path",
"(",
"taskpath",
")",
":",
"return",
"getattr",
"(",
"importlib",
".",
"import_module",
"(",
"re",
".",
"sub",
"(",
"r\"\\.[^.]+$\"",
",",
"\"\"",
",",
"taskpath",
")",
")",
",",
"re",
".",
"sub",
"(",
"r\"^.*\\.\"",
",",
"\"... | Given a taskpath, returns the main task class. | [
"Given",
"a",
"taskpath",
"returns",
"the",
"main",
"task",
"class",
"."
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/utils.py#L105-L117 | train | 207,789 |
pricingassistant/mrq | mrq/job.py | queue_raw_jobs | def queue_raw_jobs(queue, params_list, **kwargs):
""" Queue some jobs on a raw queue """
from .queue import Queue
queue_obj = Queue(queue)
queue_obj.enqueue_raw_jobs(params_list, **kwargs) | python | def queue_raw_jobs(queue, params_list, **kwargs):
""" Queue some jobs on a raw queue """
from .queue import Queue
queue_obj = Queue(queue)
queue_obj.enqueue_raw_jobs(params_list, **kwargs) | [
"def",
"queue_raw_jobs",
"(",
"queue",
",",
"params_list",
",",
"*",
"*",
"kwargs",
")",
":",
"from",
".",
"queue",
"import",
"Queue",
"queue_obj",
"=",
"Queue",
"(",
"queue",
")",
"queue_obj",
".",
"enqueue_raw_jobs",
"(",
"params_list",
",",
"*",
"*",
... | Queue some jobs on a raw queue | [
"Queue",
"some",
"jobs",
"on",
"a",
"raw",
"queue"
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L663-L668 | train | 207,790 |
pricingassistant/mrq | mrq/job.py | queue_jobs | def queue_jobs(main_task_path, params_list, queue=None, batch_size=1000):
""" Queue multiple jobs on a regular queue """
if len(params_list) == 0:
return []
if queue is None:
task_def = context.get_current_config().get("tasks", {}).get(main_task_path) or {}
queue = task_def.get("queue", "default")
from .queue import Queue
queue_obj = Queue(queue)
if queue_obj.is_raw:
raise Exception("Can't queue regular jobs on a raw queue")
all_ids = []
for params_group in group_iter(params_list, n=batch_size):
context.metric("jobs.status.queued", len(params_group))
# Insert the job in MongoDB
job_ids = Job.insert([{
"path": main_task_path,
"params": params,
"queue": queue,
"datequeued": datetime.datetime.utcnow(),
"status": "queued"
} for params in params_group], w=1, return_jobs=False)
all_ids += job_ids
queue_obj.notify(len(all_ids))
set_queues_size({queue: len(all_ids)})
return all_ids | python | def queue_jobs(main_task_path, params_list, queue=None, batch_size=1000):
""" Queue multiple jobs on a regular queue """
if len(params_list) == 0:
return []
if queue is None:
task_def = context.get_current_config().get("tasks", {}).get(main_task_path) or {}
queue = task_def.get("queue", "default")
from .queue import Queue
queue_obj = Queue(queue)
if queue_obj.is_raw:
raise Exception("Can't queue regular jobs on a raw queue")
all_ids = []
for params_group in group_iter(params_list, n=batch_size):
context.metric("jobs.status.queued", len(params_group))
# Insert the job in MongoDB
job_ids = Job.insert([{
"path": main_task_path,
"params": params,
"queue": queue,
"datequeued": datetime.datetime.utcnow(),
"status": "queued"
} for params in params_group], w=1, return_jobs=False)
all_ids += job_ids
queue_obj.notify(len(all_ids))
set_queues_size({queue: len(all_ids)})
return all_ids | [
"def",
"queue_jobs",
"(",
"main_task_path",
",",
"params_list",
",",
"queue",
"=",
"None",
",",
"batch_size",
"=",
"1000",
")",
":",
"if",
"len",
"(",
"params_list",
")",
"==",
"0",
":",
"return",
"[",
"]",
"if",
"queue",
"is",
"None",
":",
"task_def",... | Queue multiple jobs on a regular queue | [
"Queue",
"multiple",
"jobs",
"on",
"a",
"regular",
"queue"
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L685-L719 | train | 207,791 |
pricingassistant/mrq | mrq/job.py | Job.fetch | def fetch(self, start=False, full_data=True):
""" Get the current job data and possibly flag it as started. """
if self.id is None:
return self
if full_data is True:
fields = None
elif isinstance(full_data, dict):
fields = full_data
else:
fields = {
"_id": 0,
"path": 1,
"params": 1,
"status": 1,
"retry_count": 1,
}
if start:
self.datestarted = datetime.datetime.utcnow()
self.set_data(self.collection.find_and_modify(
{
"_id": self.id,
"status": {"$nin": ["cancel", "abort", "maxretries"]}
},
{"$set": {
"status": "started",
"datestarted": self.datestarted,
"worker": self.worker.id
},
"$unset": {
"dateexpires": 1 # we don't want started jobs to expire unexpectedly
}},
projection=fields)
)
context.metric("jobs.status.started")
else:
self.set_data(self.collection.find_one({
"_id": self.id
}, projection=fields))
if self.data is None:
context.log.info(
"Job %s not found in MongoDB or status was cancelled!" %
self.id)
self.stored = True
return self | python | def fetch(self, start=False, full_data=True):
""" Get the current job data and possibly flag it as started. """
if self.id is None:
return self
if full_data is True:
fields = None
elif isinstance(full_data, dict):
fields = full_data
else:
fields = {
"_id": 0,
"path": 1,
"params": 1,
"status": 1,
"retry_count": 1,
}
if start:
self.datestarted = datetime.datetime.utcnow()
self.set_data(self.collection.find_and_modify(
{
"_id": self.id,
"status": {"$nin": ["cancel", "abort", "maxretries"]}
},
{"$set": {
"status": "started",
"datestarted": self.datestarted,
"worker": self.worker.id
},
"$unset": {
"dateexpires": 1 # we don't want started jobs to expire unexpectedly
}},
projection=fields)
)
context.metric("jobs.status.started")
else:
self.set_data(self.collection.find_one({
"_id": self.id
}, projection=fields))
if self.data is None:
context.log.info(
"Job %s not found in MongoDB or status was cancelled!" %
self.id)
self.stored = True
return self | [
"def",
"fetch",
"(",
"self",
",",
"start",
"=",
"False",
",",
"full_data",
"=",
"True",
")",
":",
"if",
"self",
".",
"id",
"is",
"None",
":",
"return",
"self",
"if",
"full_data",
"is",
"True",
":",
"fields",
"=",
"None",
"elif",
"isinstance",
"(",
... | Get the current job data and possibly flag it as started. | [
"Get",
"the",
"current",
"job",
"data",
"and",
"possibly",
"flag",
"it",
"as",
"started",
"."
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L93-L144 | train | 207,792 |
pricingassistant/mrq | mrq/job.py | Job.save | def save(self):
""" Persists the current job metadata to MongoDB. Will be called at each worker report. """
if not self.saved and self.data and "progress" in self.data:
# TODO should we save more fields?
self.collection.update({"_id": self.id}, {"$set": {
"progress": self.data["progress"]
}})
self.saved = True | python | def save(self):
""" Persists the current job metadata to MongoDB. Will be called at each worker report. """
if not self.saved and self.data and "progress" in self.data:
# TODO should we save more fields?
self.collection.update({"_id": self.id}, {"$set": {
"progress": self.data["progress"]
}})
self.saved = True | [
"def",
"save",
"(",
"self",
")",
":",
"if",
"not",
"self",
".",
"saved",
"and",
"self",
".",
"data",
"and",
"\"progress\"",
"in",
"self",
".",
"data",
":",
"# TODO should we save more fields?",
"self",
".",
"collection",
".",
"update",
"(",
"{",
"\"_id\"",... | Persists the current job metadata to MongoDB. Will be called at each worker report. | [
"Persists",
"the",
"current",
"job",
"metadata",
"to",
"MongoDB",
".",
"Will",
"be",
"called",
"at",
"each",
"worker",
"report",
"."
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L177-L185 | train | 207,793 |
pricingassistant/mrq | mrq/job.py | Job.insert | def insert(cls, jobs_data, queue=None, statuses_no_storage=None, return_jobs=True, w=None, j=None):
""" Insert a job into MongoDB """
now = datetime.datetime.utcnow()
for data in jobs_data:
if data["status"] == "started":
data["datestarted"] = now
no_storage = (statuses_no_storage is not None) and ("started" in statuses_no_storage)
if no_storage and return_jobs:
for data in jobs_data:
data["_id"] = ObjectId() # Give the job a temporary ID
else:
inserted = context.connections.mongodb_jobs.mrq_jobs.insert(
jobs_data,
manipulate=True,
w=w,
j=j
)
if return_jobs:
jobs = []
for data in jobs_data:
job = cls(data["_id"], queue=queue)
job.set_data(data)
job.statuses_no_storage = statuses_no_storage
job.stored = (not no_storage)
if data["status"] == "started":
job.datestarted = data["datestarted"]
jobs.append(job)
return jobs
else:
return inserted | python | def insert(cls, jobs_data, queue=None, statuses_no_storage=None, return_jobs=True, w=None, j=None):
""" Insert a job into MongoDB """
now = datetime.datetime.utcnow()
for data in jobs_data:
if data["status"] == "started":
data["datestarted"] = now
no_storage = (statuses_no_storage is not None) and ("started" in statuses_no_storage)
if no_storage and return_jobs:
for data in jobs_data:
data["_id"] = ObjectId() # Give the job a temporary ID
else:
inserted = context.connections.mongodb_jobs.mrq_jobs.insert(
jobs_data,
manipulate=True,
w=w,
j=j
)
if return_jobs:
jobs = []
for data in jobs_data:
job = cls(data["_id"], queue=queue)
job.set_data(data)
job.statuses_no_storage = statuses_no_storage
job.stored = (not no_storage)
if data["status"] == "started":
job.datestarted = data["datestarted"]
jobs.append(job)
return jobs
else:
return inserted | [
"def",
"insert",
"(",
"cls",
",",
"jobs_data",
",",
"queue",
"=",
"None",
",",
"statuses_no_storage",
"=",
"None",
",",
"return_jobs",
"=",
"True",
",",
"w",
"=",
"None",
",",
"j",
"=",
"None",
")",
":",
"now",
"=",
"datetime",
".",
"datetime",
".",
... | Insert a job into MongoDB | [
"Insert",
"a",
"job",
"into",
"MongoDB"
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L188-L221 | train | 207,794 |
pricingassistant/mrq | mrq/job.py | Job._attach_original_exception | def _attach_original_exception(self, exc):
""" Often, a retry will be raised inside an "except" block.
This Keep track of the first exception for debugging purposes """
original_exception = sys.exc_info()
if original_exception[0] is not None:
exc.original_exception = original_exception | python | def _attach_original_exception(self, exc):
""" Often, a retry will be raised inside an "except" block.
This Keep track of the first exception for debugging purposes """
original_exception = sys.exc_info()
if original_exception[0] is not None:
exc.original_exception = original_exception | [
"def",
"_attach_original_exception",
"(",
"self",
",",
"exc",
")",
":",
"original_exception",
"=",
"sys",
".",
"exc_info",
"(",
")",
"if",
"original_exception",
"[",
"0",
"]",
"is",
"not",
"None",
":",
"exc",
".",
"original_exception",
"=",
"original_exception... | Often, a retry will be raised inside an "except" block.
This Keep track of the first exception for debugging purposes | [
"Often",
"a",
"retry",
"will",
"be",
"raised",
"inside",
"an",
"except",
"block",
".",
"This",
"Keep",
"track",
"of",
"the",
"first",
"exception",
"for",
"debugging",
"purposes"
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L223-L229 | train | 207,795 |
pricingassistant/mrq | mrq/job.py | Job.retry | def retry(self, queue=None, delay=None, max_retries=None):
""" Marks the current job as needing to be retried. Interrupts it. """
max_retries = max_retries
if max_retries is None:
max_retries = self.max_retries
if self.data.get("retry_count", 0) >= max_retries:
raise MaxRetriesInterrupt()
exc = RetryInterrupt()
exc.queue = queue or self.queue or self.data.get("queue") or "default"
exc.retry_count = self.data.get("retry_count", 0) + 1
exc.delay = delay
if exc.delay is None:
exc.delay = self.retry_delay
self._attach_original_exception(exc)
raise exc | python | def retry(self, queue=None, delay=None, max_retries=None):
""" Marks the current job as needing to be retried. Interrupts it. """
max_retries = max_retries
if max_retries is None:
max_retries = self.max_retries
if self.data.get("retry_count", 0) >= max_retries:
raise MaxRetriesInterrupt()
exc = RetryInterrupt()
exc.queue = queue or self.queue or self.data.get("queue") or "default"
exc.retry_count = self.data.get("retry_count", 0) + 1
exc.delay = delay
if exc.delay is None:
exc.delay = self.retry_delay
self._attach_original_exception(exc)
raise exc | [
"def",
"retry",
"(",
"self",
",",
"queue",
"=",
"None",
",",
"delay",
"=",
"None",
",",
"max_retries",
"=",
"None",
")",
":",
"max_retries",
"=",
"max_retries",
"if",
"max_retries",
"is",
"None",
":",
"max_retries",
"=",
"self",
".",
"max_retries",
"if",... | Marks the current job as needing to be retried. Interrupts it. | [
"Marks",
"the",
"current",
"job",
"as",
"needing",
"to",
"be",
"retried",
".",
"Interrupts",
"it",
"."
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L231-L251 | train | 207,796 |
pricingassistant/mrq | mrq/job.py | Job.requeue | def requeue(self, queue=None, retry_count=0):
""" Requeues the current job. Doesn't interrupt it """
if not queue:
if not self.data or not self.data.get("queue"):
self.fetch(full_data={"_id": 0, "queue": 1, "path": 1})
queue = self.data["queue"]
self._save_status("queued", updates={
"queue": queue,
"datequeued": datetime.datetime.utcnow(),
"retry_count": retry_count
}) | python | def requeue(self, queue=None, retry_count=0):
""" Requeues the current job. Doesn't interrupt it """
if not queue:
if not self.data or not self.data.get("queue"):
self.fetch(full_data={"_id": 0, "queue": 1, "path": 1})
queue = self.data["queue"]
self._save_status("queued", updates={
"queue": queue,
"datequeued": datetime.datetime.utcnow(),
"retry_count": retry_count
}) | [
"def",
"requeue",
"(",
"self",
",",
"queue",
"=",
"None",
",",
"retry_count",
"=",
"0",
")",
":",
"if",
"not",
"queue",
":",
"if",
"not",
"self",
".",
"data",
"or",
"not",
"self",
".",
"data",
".",
"get",
"(",
"\"queue\"",
")",
":",
"self",
".",
... | Requeues the current job. Doesn't interrupt it | [
"Requeues",
"the",
"current",
"job",
".",
"Doesn",
"t",
"interrupt",
"it"
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L263-L275 | train | 207,797 |
pricingassistant/mrq | mrq/job.py | Job.perform | def perform(self):
""" Loads and starts the main task for this job, the saves the result. """
if self.data is None:
return
context.log.debug("Starting %s(%s)" % (self.data["path"], self.data["params"]))
task_class = load_class_by_path(self.data["path"])
self.task = task_class()
self.task.is_main_task = True
if not self.task.max_concurrency:
result = self.task.run_wrapped(self.data["params"])
else:
if self.task.max_concurrency > 1:
raise NotImplementedError()
lock = None
try:
# TODO: implement a semaphore
lock = context.connections.redis.lock(self.redis_max_concurrency_key, timeout=self.timeout + 5)
if not lock.acquire(blocking=True, blocking_timeout=0):
raise MaxConcurrencyInterrupt()
result = self.task.run_wrapped(self.data["params"])
finally:
try:
if lock:
lock.release()
except LockError:
pass
self.save_success(result)
if context.get_current_config().get("trace_greenlets"):
# TODO: this is not the exact greenlet_time measurement because it doesn't
# take into account the last switch's time. This is why we force a last switch.
# This does cause a performance overhead. Instead, we should print the
# last timing directly from the trace() function in context?
# pylint: disable=protected-access
gevent.sleep(0)
current_greenlet = gevent.getcurrent()
t = (datetime.datetime.utcnow() - self.datestarted).total_seconds()
context.log.debug(
"Job %s success: %0.6fs total, %0.6fs in greenlet, %s switches" %
(self.id,
t,
current_greenlet._trace_time,
current_greenlet._trace_switches - 1)
)
else:
context.log.debug("Job %s success: %0.6fs total" % (
self.id, (datetime.datetime.utcnow() -
self.datestarted).total_seconds()
))
return result | python | def perform(self):
""" Loads and starts the main task for this job, the saves the result. """
if self.data is None:
return
context.log.debug("Starting %s(%s)" % (self.data["path"], self.data["params"]))
task_class = load_class_by_path(self.data["path"])
self.task = task_class()
self.task.is_main_task = True
if not self.task.max_concurrency:
result = self.task.run_wrapped(self.data["params"])
else:
if self.task.max_concurrency > 1:
raise NotImplementedError()
lock = None
try:
# TODO: implement a semaphore
lock = context.connections.redis.lock(self.redis_max_concurrency_key, timeout=self.timeout + 5)
if not lock.acquire(blocking=True, blocking_timeout=0):
raise MaxConcurrencyInterrupt()
result = self.task.run_wrapped(self.data["params"])
finally:
try:
if lock:
lock.release()
except LockError:
pass
self.save_success(result)
if context.get_current_config().get("trace_greenlets"):
# TODO: this is not the exact greenlet_time measurement because it doesn't
# take into account the last switch's time. This is why we force a last switch.
# This does cause a performance overhead. Instead, we should print the
# last timing directly from the trace() function in context?
# pylint: disable=protected-access
gevent.sleep(0)
current_greenlet = gevent.getcurrent()
t = (datetime.datetime.utcnow() - self.datestarted).total_seconds()
context.log.debug(
"Job %s success: %0.6fs total, %0.6fs in greenlet, %s switches" %
(self.id,
t,
current_greenlet._trace_time,
current_greenlet._trace_switches - 1)
)
else:
context.log.debug("Job %s success: %0.6fs total" % (
self.id, (datetime.datetime.utcnow() -
self.datestarted).total_seconds()
))
return result | [
"def",
"perform",
"(",
"self",
")",
":",
"if",
"self",
".",
"data",
"is",
"None",
":",
"return",
"context",
".",
"log",
".",
"debug",
"(",
"\"Starting %s(%s)\"",
"%",
"(",
"self",
".",
"data",
"[",
"\"path\"",
"]",
",",
"self",
".",
"data",
"[",
"\... | Loads and starts the main task for this job, the saves the result. | [
"Loads",
"and",
"starts",
"the",
"main",
"task",
"for",
"this",
"job",
"the",
"saves",
"the",
"result",
"."
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L277-L345 | train | 207,798 |
pricingassistant/mrq | mrq/job.py | Job.wait | def wait(self, poll_interval=1, timeout=None, full_data=False):
""" Wait for this job to finish. """
end_time = None
if timeout:
end_time = time.time() + timeout
while end_time is None or time.time() < end_time:
job_data = self.collection.find_one({
"_id": ObjectId(self.id),
"status": {"$nin": ["started", "queued"]}
}, projection=({
"_id": 0,
"result": 1,
"status": 1
} if not full_data else None))
if job_data:
return job_data
time.sleep(poll_interval)
raise Exception("Waited for job result for %s seconds, timeout." % timeout) | python | def wait(self, poll_interval=1, timeout=None, full_data=False):
""" Wait for this job to finish. """
end_time = None
if timeout:
end_time = time.time() + timeout
while end_time is None or time.time() < end_time:
job_data = self.collection.find_one({
"_id": ObjectId(self.id),
"status": {"$nin": ["started", "queued"]}
}, projection=({
"_id": 0,
"result": 1,
"status": 1
} if not full_data else None))
if job_data:
return job_data
time.sleep(poll_interval)
raise Exception("Waited for job result for %s seconds, timeout." % timeout) | [
"def",
"wait",
"(",
"self",
",",
"poll_interval",
"=",
"1",
",",
"timeout",
"=",
"None",
",",
"full_data",
"=",
"False",
")",
":",
"end_time",
"=",
"None",
"if",
"timeout",
":",
"end_time",
"=",
"time",
".",
"time",
"(",
")",
"+",
"timeout",
"while",... | Wait for this job to finish. | [
"Wait",
"for",
"this",
"job",
"to",
"finish",
"."
] | d0a5a34de9cba38afa94fb7c9e17f9b570b79a50 | https://github.com/pricingassistant/mrq/blob/d0a5a34de9cba38afa94fb7c9e17f9b570b79a50/mrq/job.py#L347-L369 | train | 207,799 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.