idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
18,800 | def _NormalizeDiscoveryUrls ( discovery_url ) : if discovery_url . startswith ( 'http' ) : return [ discovery_url ] elif '.' not in discovery_url : raise ValueError ( 'Unrecognized value "%s" for discovery url' ) api_name , _ , api_version = discovery_url . partition ( '.' ) return [ 'https://www.googleapis.com/discovery/v1/apis/%s/%s/rest' % ( api_name , api_version ) , 'https://%s.googleapis.com/$discovery/rest?version=%s' % ( api_name , api_version ) , ] | Expands a few abbreviations into full discovery urls . |
18,801 | def _Gunzip ( gzipped_content ) : f = tempfile . NamedTemporaryFile ( suffix = 'gz' , mode = 'w+b' , delete = False ) try : f . write ( gzipped_content ) f . close ( ) with gzip . open ( f . name , 'rb' ) as h : decompressed_content = h . read ( ) return decompressed_content finally : os . unlink ( f . name ) | Returns gunzipped content from gzipped contents . |
18,802 | def _GetURLContent ( url ) : response = urllib_request . urlopen ( url ) encoding = response . info ( ) . get ( 'Content-Encoding' ) if encoding == 'gzip' : content = _Gunzip ( response . read ( ) ) else : content = response . read ( ) return content | Download and return the content of URL . |
18,803 | def FetchDiscoveryDoc ( discovery_url , retries = 5 ) : discovery_urls = _NormalizeDiscoveryUrls ( discovery_url ) discovery_doc = None last_exception = None for url in discovery_urls : for _ in range ( retries ) : try : content = _GetURLContent ( url ) if isinstance ( content , bytes ) : content = content . decode ( 'utf8' ) discovery_doc = json . loads ( content ) break except ( urllib_error . HTTPError , urllib_error . URLError ) as e : logging . info ( 'Attempting to fetch discovery doc again after "%s"' , e ) last_exception = e if discovery_doc is None : raise CommunicationError ( 'Could not find discovery doc at any of %s: %s' % ( discovery_urls , last_exception ) ) return discovery_doc | Fetch the discovery document at the given url . |
18,804 | def __StripName ( self , name ) : if not name : return name for prefix in self . __strip_prefixes : if name . startswith ( prefix ) : return name [ len ( prefix ) : ] return name | Strip strip_prefix entries from name . |
18,805 | def CleanName ( name ) : name = re . sub ( '[^_A-Za-z0-9]' , '_' , name ) if name [ 0 ] . isdigit ( ) : name = '_%s' % name while keyword . iskeyword ( name ) : name = '%s_' % name if name . startswith ( '__' ) : name = 'f%s' % name return name | Perform generic name cleaning . |
18,806 | def NormalizeRelativePath ( path ) : path_components = path . split ( '/' ) normalized_components = [ ] for component in path_components : if re . match ( r'{[A-Za-z0-9_]+}$' , component ) : normalized_components . append ( '{%s}' % Names . CleanName ( component [ 1 : - 1 ] ) ) else : normalized_components . append ( component ) return '/' . join ( normalized_components ) | Normalize camelCase entries in path . |
18,807 | def ClassName ( self , name , separator = '_' ) : if name is None : return name if name . startswith ( ( 'protorpc.' , 'message_types.' , 'apitools.base.protorpclite.' , 'apitools.base.protorpclite.message_types.' ) ) : return name name = self . __StripName ( name ) name = self . __ToCamel ( name , separator = separator ) return self . CleanName ( name ) | Generate a valid class name from name . |
18,808 | def MethodName ( self , name , separator = '_' ) : if name is None : return None name = Names . __ToCamel ( name , separator = separator ) return Names . CleanName ( name ) | Generate a valid method name from name . |
18,809 | def FieldName ( self , name ) : name = self . __StripName ( name ) if self . __name_convention == 'LOWER_CAMEL' : name = Names . __ToLowerCamel ( name ) elif self . __name_convention == 'LOWER_WITH_UNDER' : name = Names . __FromCamel ( name ) return Names . CleanName ( name ) | Generate a valid field name from name . |
18,810 | def Create ( cls , discovery_doc , scope_ls , client_id , client_secret , user_agent , names , api_key ) : scopes = set ( discovery_doc . get ( 'auth' , { } ) . get ( 'oauth2' , { } ) . get ( 'scopes' , { } ) ) scopes . update ( scope_ls ) package = discovery_doc [ 'name' ] url_version = discovery_doc [ 'version' ] base_url , base_path = _ComputePaths ( package , url_version , discovery_doc ) client_info = { 'package' : package , 'version' : NormalizeVersion ( discovery_doc [ 'version' ] ) , 'url_version' : url_version , 'scopes' : sorted ( list ( scopes ) ) , 'client_id' : client_id , 'client_secret' : client_secret , 'user_agent' : user_agent , 'api_key' : api_key , 'base_url' : base_url , 'base_path' : base_path , } client_class_name = '%s%s' % ( names . ClassName ( client_info [ 'package' ] ) , names . ClassName ( client_info [ 'version' ] ) ) client_info [ 'client_class_name' ] = client_class_name return cls ( ** client_info ) | Create a new ClientInfo object from a discovery document . |
18,811 | def CommentContext ( self ) : old_context = self . __comment_context self . __comment_context = True yield self . __comment_context = old_context | Print without any argument formatting . |
18,812 | def _RegisterCredentialsMethod ( method , position = None ) : if position is None : position = len ( _CREDENTIALS_METHODS ) else : position = min ( position , len ( _CREDENTIALS_METHODS ) ) _CREDENTIALS_METHODS . insert ( position , method ) return method | Register a new method for fetching credentials . |
18,813 | def GetCredentials ( package_name , scopes , client_id , client_secret , user_agent , credentials_filename = None , api_key = None , client = None , oauth2client_args = None , ** kwds ) : scopes = util . NormalizeScopes ( scopes ) client_info = { 'client_id' : client_id , 'client_secret' : client_secret , 'scope' : ' ' . join ( sorted ( scopes ) ) , 'user_agent' : user_agent or '%s-generated/0.1' % package_name , } for method in _CREDENTIALS_METHODS : credentials = method ( client_info , ** kwds ) if credentials is not None : return credentials credentials_filename = credentials_filename or os . path . expanduser ( '~/.apitools.token' ) credentials = CredentialsFromFile ( credentials_filename , client_info , oauth2client_args = oauth2client_args ) if credentials is not None : return credentials raise exceptions . CredentialsError ( 'Could not create valid credentials' ) | Attempt to get credentials using an oauth dance as the last resort . |
18,814 | def ServiceAccountCredentialsFromFile ( filename , scopes , user_agent = None ) : filename = os . path . expanduser ( filename ) if oauth2client . __version__ > '1.5.2' : credentials = ( service_account . ServiceAccountCredentials . from_json_keyfile_name ( filename , scopes = scopes ) ) if credentials is not None : if user_agent is not None : credentials . user_agent = user_agent return credentials else : with open ( filename ) as keyfile : service_account_info = json . load ( keyfile ) account_type = service_account_info . get ( 'type' ) if account_type != oauth2client . client . SERVICE_ACCOUNT : raise exceptions . CredentialsError ( 'Invalid service account credentials: %s' % ( filename , ) ) credentials = service_account . _ServiceAccountCredentials ( service_account_id = service_account_info [ 'client_id' ] , service_account_email = service_account_info [ 'client_email' ] , private_key_id = service_account_info [ 'private_key_id' ] , private_key_pkcs8_text = service_account_info [ 'private_key' ] , scopes = scopes , user_agent = user_agent ) return credentials | Use the credentials in filename to create a token for scopes . |
18,815 | def ServiceAccountCredentialsFromP12File ( service_account_name , private_key_filename , scopes , user_agent ) : private_key_filename = os . path . expanduser ( private_key_filename ) scopes = util . NormalizeScopes ( scopes ) if oauth2client . __version__ > '1.5.2' : credentials = ( service_account . ServiceAccountCredentials . from_p12_keyfile ( service_account_name , private_key_filename , scopes = scopes ) ) if credentials is not None : credentials . user_agent = user_agent return credentials else : with open ( private_key_filename , 'rb' ) as key_file : return oauth2client . client . SignedJwtAssertionCredentials ( service_account_name , key_file . read ( ) , scopes , user_agent = user_agent ) | Create a new credential from the named . p12 keyfile . |
18,816 | def _GceMetadataRequest ( relative_url , use_metadata_ip = False ) : if use_metadata_ip : base_url = os . environ . get ( 'GCE_METADATA_IP' , '169.254.169.254' ) else : base_url = os . environ . get ( 'GCE_METADATA_ROOT' , 'metadata.google.internal' ) url = 'http://' + base_url + '/computeMetadata/v1/' + relative_url headers = { 'Metadata-Flavor' : 'Google' } request = urllib . request . Request ( url , headers = headers ) opener = urllib . request . build_opener ( urllib . request . ProxyHandler ( { } ) ) try : response = opener . open ( request ) except urllib . error . URLError as e : raise exceptions . CommunicationError ( 'Could not reach metadata service: %s' % e . reason ) return response | Request the given url from the GCE metadata service . |
18,817 | def _GetRunFlowFlags ( args = None ) : import argparse parser = argparse . ArgumentParser ( parents = [ tools . argparser ] ) flags , _ = parser . parse_known_args ( args = args ) if hasattr ( FLAGS , 'auth_host_name' ) : flags . auth_host_name = FLAGS . auth_host_name if hasattr ( FLAGS , 'auth_host_port' ) : flags . auth_host_port = FLAGS . auth_host_port if hasattr ( FLAGS , 'auth_local_webserver' ) : flags . noauth_local_webserver = ( not FLAGS . auth_local_webserver ) return flags | Retrieves command line flags based on gflags module . |
18,818 | def CredentialsFromFile ( path , client_info , oauth2client_args = None ) : user_agent = client_info [ 'user_agent' ] scope_key = client_info [ 'scope' ] if not isinstance ( scope_key , six . string_types ) : scope_key = ':' . join ( scope_key ) storage_key = client_info [ 'client_id' ] + user_agent + scope_key if _NEW_FILESTORE : credential_store = multiprocess_file_storage . MultiprocessFileStorage ( path , storage_key ) else : credential_store = multistore_file . get_credential_storage_custom_string_key ( path , storage_key ) if hasattr ( FLAGS , 'auth_local_webserver' ) : FLAGS . auth_local_webserver = False credentials = credential_store . get ( ) if credentials is None or credentials . invalid : print ( 'Generating new OAuth credentials ...' ) for _ in range ( 20 ) : try : flow = oauth2client . client . OAuth2WebServerFlow ( ** client_info ) flags = _GetRunFlowFlags ( args = oauth2client_args ) credentials = tools . run_flow ( flow , credential_store , flags ) break except ( oauth2client . client . FlowExchangeError , SystemExit ) as e : print ( 'Invalid authorization: %s' % ( e , ) ) except httplib2 . HttpLib2Error as e : print ( 'Communication error: %s' % ( e , ) ) raise exceptions . CredentialsError ( 'Communication error creating credentials: %s' % e ) return credentials | Read credentials from a file . |
18,819 | def GetUserinfo ( credentials , http = None ) : http = http or httplib2 . Http ( ) url = _GetUserinfoUrl ( credentials ) response , content = http . request ( url ) if response . status == http_client . BAD_REQUEST : credentials . refresh ( http ) url = _GetUserinfoUrl ( credentials ) response , content = http . request ( url ) return json . loads ( content or '{}' ) | Get the userinfo associated with the given credentials . |
18,820 | def _GetServiceAccountCredentials ( client_info , service_account_name = None , service_account_keyfile = None , service_account_json_keyfile = None , ** unused_kwds ) : if ( ( service_account_name and not service_account_keyfile ) or ( service_account_keyfile and not service_account_name ) ) : raise exceptions . CredentialsError ( 'Service account name or keyfile provided without the other' ) scopes = client_info [ 'scope' ] . split ( ) user_agent = client_info [ 'user_agent' ] if service_account_json_keyfile : return ServiceAccountCredentialsFromFile ( service_account_json_keyfile , scopes , user_agent = user_agent ) if service_account_name is not None : return ServiceAccountCredentialsFromP12File ( service_account_name , service_account_keyfile , scopes , user_agent ) | Returns ServiceAccountCredentials from give file . |
18,821 | def _GetApplicationDefaultCredentials ( client_info , skip_application_default_credentials = False , ** unused_kwds ) : scopes = client_info [ 'scope' ] . split ( ) if skip_application_default_credentials : return None gc = oauth2client . client . GoogleCredentials with cache_file_lock : try : credentials = gc . _implicit_credentials_from_files ( ) except oauth2client . client . ApplicationDefaultCredentialsError : return None cp = 'https://www.googleapis.com/auth/cloud-platform' if credentials is None : return None if not isinstance ( credentials , gc ) or cp in scopes : return credentials . create_scoped ( scopes ) return None | Returns ADC with right scopes . |
18,822 | def _CheckCacheFileForMatch ( self , cache_filename , scopes ) : creds = { 'scopes' : sorted ( list ( scopes ) ) if scopes else None , 'svc_acct_name' : self . __service_account_name , } cache_file = _MultiProcessCacheFile ( cache_filename ) try : cached_creds_str = cache_file . LockedRead ( ) if not cached_creds_str : return None cached_creds = json . loads ( cached_creds_str ) if creds [ 'svc_acct_name' ] == cached_creds [ 'svc_acct_name' ] : if creds [ 'scopes' ] in ( None , cached_creds [ 'scopes' ] ) : return cached_creds [ 'scopes' ] except KeyboardInterrupt : raise except : pass | Checks the cache file to see if it matches the given credentials . |
18,823 | def _WriteCacheFile ( self , cache_filename , scopes ) : creds = { 'scopes' : sorted ( list ( scopes ) ) , 'svc_acct_name' : self . __service_account_name } creds_str = json . dumps ( creds ) cache_file = _MultiProcessCacheFile ( cache_filename ) try : cache_file . LockedWrite ( creds_str ) except KeyboardInterrupt : raise except : pass | Writes the credential metadata to the cache file . |
18,824 | def _ScopesFromMetadataServer ( self , scopes ) : if not util . DetectGce ( ) : raise exceptions . ResourceUnavailableError ( 'GCE credentials requested outside a GCE instance' ) if not self . GetServiceAccount ( self . __service_account_name ) : raise exceptions . ResourceUnavailableError ( 'GCE credentials requested but service account ' '%s does not exist.' % self . __service_account_name ) if scopes : scope_ls = util . NormalizeScopes ( scopes ) instance_scopes = self . GetInstanceScopes ( ) if scope_ls > instance_scopes : raise exceptions . CredentialsError ( 'Instance did not have access to scopes %s' % ( sorted ( list ( scope_ls - instance_scopes ) ) , ) ) else : scopes = self . GetInstanceScopes ( ) return scopes | Returns instance scopes based on GCE metadata server . |
18,825 | def _do_refresh_request ( self , unused_http_request ) : relative_url = 'instance/service-accounts/{0}/token' . format ( self . __service_account_name ) try : response = _GceMetadataRequest ( relative_url ) except exceptions . CommunicationError : self . invalid = True if self . store : self . store . locked_put ( self ) raise content = response . read ( ) try : credential_info = json . loads ( content ) except ValueError : raise exceptions . CredentialsError ( 'Could not parse response as JSON: %s' % content ) self . access_token = credential_info [ 'access_token' ] if 'expires_in' in credential_info : expires_in = int ( credential_info [ 'expires_in' ] ) self . token_expiry = ( datetime . timedelta ( seconds = expires_in ) + datetime . datetime . utcnow ( ) ) else : self . token_expiry = None self . invalid = False if self . store : self . store . locked_put ( self ) | Refresh self . access_token by querying the metadata server . |
18,826 | def _refresh ( self , _ ) : from google . appengine . api import app_identity try : token , _ = app_identity . get_access_token ( self . _scopes ) except app_identity . Error as e : raise exceptions . CredentialsError ( str ( e ) ) self . access_token = token | Refresh self . access_token . |
18,827 | def _ProcessLockAcquired ( self ) : try : is_locked = self . _process_lock . acquire ( timeout = self . _lock_timeout ) yield is_locked finally : if is_locked : self . _process_lock . release ( ) | Context manager for process locks with timeout . |
18,828 | def LockedRead ( self ) : file_contents = None with self . _thread_lock : if not self . _EnsureFileExists ( ) : return None with self . _process_lock_getter ( ) as acquired_plock : if not acquired_plock : return None with open ( self . _filename , 'rb' ) as f : file_contents = f . read ( ) . decode ( encoding = self . _encoding ) return file_contents | Acquire an interprocess lock and dump cache contents . |
18,829 | def LockedWrite ( self , cache_data ) : if isinstance ( cache_data , six . text_type ) : cache_data = cache_data . encode ( encoding = self . _encoding ) with self . _thread_lock : if not self . _EnsureFileExists ( ) : return False with self . _process_lock_getter ( ) as acquired_plock : if not acquired_plock : return False with open ( self . _filename , 'wb' ) as f : f . write ( cache_data ) return True | Acquire an interprocess lock and write a string . |
18,830 | def _EnsureFileExists ( self ) : if not os . path . exists ( self . _filename ) : old_umask = os . umask ( 0o177 ) try : open ( self . _filename , 'a+b' ) . close ( ) except OSError : return False finally : os . umask ( old_umask ) return True | Touches a file ; returns False on error True on success . |
18,831 | def YieldFromList ( service , request , global_params = None , limit = None , batch_size = 100 , method = 'List' , field = 'items' , predicate = None , current_token_attribute = 'pageToken' , next_token_attribute = 'nextPageToken' , batch_size_attribute = 'maxResults' ) : request = encoding . CopyProtoMessage ( request ) setattr ( request , current_token_attribute , None ) while limit is None or limit : if batch_size_attribute : if batch_size is None : request_batch_size = None else : request_batch_size = min ( batch_size , limit or batch_size ) setattr ( request , batch_size_attribute , request_batch_size ) response = getattr ( service , method ) ( request , global_params = global_params ) items = getattr ( response , field ) if predicate : items = list ( filter ( predicate , items ) ) for item in items : yield item if limit is None : continue limit -= 1 if not limit : return token = getattr ( response , next_token_attribute ) if not token : return setattr ( request , current_token_attribute , token ) | Make a series of List requests keeping track of page tokens . |
18,832 | def __PrintDocstring ( self , printer , method_info , method_name , name ) : if method_info . description : description = util . CleanDescription ( method_info . description ) first_line , newline , remaining = method_info . description . partition ( '\n' ) if not first_line . endswith ( '.' ) : first_line = '%s.' % first_line description = '%s%s%s' % ( first_line , newline , remaining ) else : description = '%s method for the %s service.' % ( method_name , name ) with printer . CommentContext ( ) : printer ( 'r' ) | Print a docstring for a service method . |
18,833 | def __WriteProtoServiceDeclaration ( self , printer , name , method_info_map ) : printer ( ) printer ( 'service %s {' , self . __GetServiceClassName ( name ) ) with printer . Indent ( ) : for method_name , method_info in method_info_map . items ( ) : for line in textwrap . wrap ( method_info . description , printer . CalculateWidth ( ) - 3 ) : printer ( '// %s' , line ) printer ( 'rpc %s (%s) returns (%s);' , method_name , method_info . request_type_name , method_info . response_type_name ) printer ( '}' ) | Write a single service declaration to a proto file . |
18,834 | def WriteProtoFile ( self , printer ) : self . Validate ( ) client_info = self . __client_info printer ( '// Generated services for %s version %s.' , client_info . package , client_info . version ) printer ( ) printer ( 'syntax = "proto2";' ) printer ( 'package %s;' , self . __package ) printer ( 'import "%s";' , client_info . messages_proto_file_name ) printer ( ) for name , method_info_map in self . __service_method_info_map . items ( ) : self . __WriteProtoServiceDeclaration ( printer , name , method_info_map ) | Write the services in this registry to out as proto . |
18,835 | def WriteFile ( self , printer ) : self . Validate ( ) client_info = self . __client_info printer ( '' , client_info . package , client_info . version ) printer ( '# NOTE: This file is autogenerated and should not be edited ' 'by hand.' ) printer ( 'from %s import base_api' , self . __base_files_package ) if self . __root_package : import_prefix = 'from {0} ' . format ( self . __root_package ) else : import_prefix = '' printer ( '%simport %s as messages' , import_prefix , client_info . messages_rule_name ) printer ( ) printer ( ) printer ( 'class %s(base_api.BaseApiClient):' , client_info . client_class_name ) with printer . Indent ( ) : printer ( '' , client_info . package , client_info . version ) printer ( ) printer ( 'MESSAGES_MODULE = messages' ) printer ( 'BASE_URL = {0!r}' . format ( client_info . base_url ) ) printer ( ) printer ( '_PACKAGE = {0!r}' . format ( client_info . package ) ) printer ( '_SCOPES = {0!r}' . format ( client_info . scopes or [ 'https://www.googleapis.com/auth/userinfo.email' ] ) ) printer ( '_VERSION = {0!r}' . format ( client_info . version ) ) printer ( '_CLIENT_ID = {0!r}' . format ( client_info . client_id ) ) printer ( '_CLIENT_SECRET = {0!r}' . format ( client_info . client_secret ) ) printer ( '_USER_AGENT = {0!r}' . format ( client_info . user_agent ) ) printer ( '_CLIENT_CLASS_NAME = {0!r}' . format ( client_info . client_class_name ) ) printer ( '_URL_VERSION = {0!r}' . format ( client_info . url_version ) ) printer ( '_API_KEY = {0!r}' . format ( client_info . api_key ) ) printer ( ) printer ( "def __init__(self, url='', credentials=None," ) with printer . Indent ( indent = ' ' ) : printer ( 'get_credentials=True, http=None, model=None,' ) printer ( 'log_request=False, log_response=False,' ) printer ( 'credentials_args=None, default_global_params=None,' ) printer ( 'additional_http_headers=None, ' 'response_encoding=None):' ) with printer . Indent ( ) : printer ( '' , client_info . package ) printer ( 'url = url or self.BASE_URL' ) printer ( 'super(%s, self).__init__(' , client_info . client_class_name ) printer ( ' url, credentials=credentials,' ) printer ( ' get_credentials=get_credentials, http=http, ' 'model=model,' ) printer ( ' log_request=log_request, ' 'log_response=log_response,' ) printer ( ' credentials_args=credentials_args,' ) printer ( ' default_global_params=default_global_params,' ) printer ( ' additional_http_headers=additional_http_headers,' ) printer ( ' response_encoding=response_encoding)' ) for name in self . __service_method_info_map . keys ( ) : printer ( 'self.%s = self.%s(self)' , name , self . __GetServiceClassName ( name ) ) for name , method_info in self . __service_method_info_map . items ( ) : self . __WriteSingleService ( printer , name , method_info , client_info . client_class_name ) | Write the services in this registry to out . |
18,836 | def __CreateRequestType ( self , method_description , body_type = None ) : schema = { } schema [ 'id' ] = self . __names . ClassName ( '%sRequest' % ( self . __names . ClassName ( method_description [ 'id' ] , separator = '.' ) , ) ) schema [ 'type' ] = 'object' schema [ 'properties' ] = collections . OrderedDict ( ) if 'parameterOrder' not in method_description : ordered_parameters = list ( method_description . get ( 'parameters' , [ ] ) ) else : ordered_parameters = method_description [ 'parameterOrder' ] [ : ] for k in method_description [ 'parameters' ] : if k not in ordered_parameters : ordered_parameters . append ( k ) for parameter_name in ordered_parameters : field_name = self . __names . CleanName ( parameter_name ) field = dict ( method_description [ 'parameters' ] [ parameter_name ] ) if 'type' not in field : raise ValueError ( 'No type found in parameter %s' % field ) schema [ 'properties' ] [ field_name ] = field if body_type is not None : body_field_name = self . __GetRequestField ( method_description , body_type ) if body_field_name in schema [ 'properties' ] : raise ValueError ( 'Failed to normalize request resource name' ) if 'description' not in body_type : body_type [ 'description' ] = ( 'A %s resource to be passed as the request body.' % ( self . __GetRequestType ( body_type ) , ) ) schema [ 'properties' ] [ body_field_name ] = body_type self . __message_registry . AddDescriptorFromSchema ( schema [ 'id' ] , schema ) return schema [ 'id' ] | Create a request type for this method . |
18,837 | def __CreateVoidResponseType ( self , method_description ) : schema = { } method_name = self . __names . ClassName ( method_description [ 'id' ] , separator = '.' ) schema [ 'id' ] = self . __names . ClassName ( '%sResponse' % method_name ) schema [ 'type' ] = 'object' schema [ 'description' ] = 'An empty %s response.' % method_name self . __message_registry . AddDescriptorFromSchema ( schema [ 'id' ] , schema ) return schema [ 'id' ] | Create an empty response type . |
18,838 | def __NeedRequestType ( self , method_description , request_type ) : if not request_type : return True method_id = method_description . get ( 'id' , '' ) if method_id in self . __unelidable_request_methods : return True message = self . __message_registry . LookupDescriptorOrDie ( request_type ) if message is None : return True field_names = [ x . name for x in message . fields ] parameters = method_description . get ( 'parameters' , { } ) for param_name , param_info in parameters . items ( ) : if ( param_info . get ( 'location' ) != 'path' or self . __names . CleanName ( param_name ) not in field_names ) : break else : return False return True | Determine if this method needs a new request type created . |
18,839 | def __MaxSizeToInt ( self , max_size ) : size_groups = re . match ( r'(?P<size>\d+)(?P<unit>.B)?$' , max_size ) if size_groups is None : raise ValueError ( 'Could not parse maxSize' ) size , unit = size_groups . group ( 'size' , 'unit' ) shift = 0 if unit is not None : unit_dict = { 'KB' : 10 , 'MB' : 20 , 'GB' : 30 , 'TB' : 40 } shift = unit_dict . get ( unit . upper ( ) ) if shift is None : raise ValueError ( 'Unknown unit %s' % unit ) return int ( size ) * ( 1 << shift ) | Convert max_size to an int . |
18,840 | def __ComputeUploadConfig ( self , media_upload_config , method_id ) : config = base_api . ApiUploadInfo ( ) if 'maxSize' in media_upload_config : config . max_size = self . __MaxSizeToInt ( media_upload_config [ 'maxSize' ] ) if 'accept' not in media_upload_config : logging . warn ( 'No accept types found for upload configuration in ' 'method %s, using */*' , method_id ) config . accept . extend ( [ str ( a ) for a in media_upload_config . get ( 'accept' , '*/*' ) ] ) for accept_pattern in config . accept : if not _MIME_PATTERN_RE . match ( accept_pattern ) : logging . warn ( 'Unexpected MIME type: %s' , accept_pattern ) protocols = media_upload_config . get ( 'protocols' , { } ) for protocol in ( 'simple' , 'resumable' ) : media = protocols . get ( protocol , { } ) for attr in ( 'multipart' , 'path' ) : if attr in media : setattr ( config , '%s_%s' % ( protocol , attr ) , media [ attr ] ) return config | Fill out the upload config for this method . |
18,841 | def __ComputeMethodInfo ( self , method_description , request , response , request_field ) : relative_path = self . __names . NormalizeRelativePath ( '' . join ( ( self . __client_info . base_path , method_description [ 'path' ] ) ) ) method_id = method_description [ 'id' ] ordered_params = [ ] for param_name in method_description . get ( 'parameterOrder' , [ ] ) : param_info = method_description [ 'parameters' ] [ param_name ] if param_info . get ( 'required' , False ) : ordered_params . append ( param_name ) method_info = base_api . ApiMethodInfo ( relative_path = relative_path , method_id = method_id , http_method = method_description [ 'httpMethod' ] , description = util . CleanDescription ( method_description . get ( 'description' , '' ) ) , query_params = [ ] , path_params = [ ] , ordered_params = ordered_params , request_type_name = self . __names . ClassName ( request ) , response_type_name = self . __names . ClassName ( response ) , request_field = request_field , ) flat_path = method_description . get ( 'flatPath' , None ) if flat_path is not None : flat_path = self . __names . NormalizeRelativePath ( self . __client_info . base_path + flat_path ) if flat_path != relative_path : method_info . flat_path = flat_path if method_description . get ( 'supportsMediaUpload' , False ) : method_info . upload_config = self . __ComputeUploadConfig ( method_description . get ( 'mediaUpload' ) , method_id ) method_info . supports_download = method_description . get ( 'supportsMediaDownload' , False ) self . __all_scopes . update ( method_description . get ( 'scopes' , ( ) ) ) for param , desc in method_description . get ( 'parameters' , { } ) . items ( ) : param = self . __names . CleanName ( param ) location = desc [ 'location' ] if location == 'query' : method_info . query_params . append ( param ) elif location == 'path' : method_info . path_params . append ( param ) else : raise ValueError ( 'Unknown parameter location %s for parameter %s' % ( location , param ) ) method_info . path_params . sort ( ) method_info . query_params . sort ( ) return method_info | Compute the base_api . ApiMethodInfo for this method . |
18,842 | def __GetRequestField ( self , method_description , body_type ) : body_field_name = self . __BodyFieldName ( body_type ) if body_field_name in method_description . get ( 'parameters' , { } ) : body_field_name = self . __names . FieldName ( '%s_resource' % body_field_name ) while body_field_name in method_description . get ( 'parameters' , { } ) : body_field_name = self . __names . FieldName ( '%s_body' % body_field_name ) return body_field_name | Determine the request field for this method . |
18,843 | def AddServiceFromResource ( self , service_name , methods ) : service_name = self . __names . CleanName ( service_name ) method_descriptions = methods . get ( 'methods' , { } ) method_info_map = collections . OrderedDict ( ) items = sorted ( method_descriptions . items ( ) ) for method_name , method_description in items : method_name = self . __names . MethodName ( method_name ) body_type = method_description . get ( 'request' ) if body_type is None : request_type = None else : request_type = self . __GetRequestType ( body_type ) if self . __NeedRequestType ( method_description , request_type ) : request = self . __CreateRequestType ( method_description , body_type = body_type ) request_field = self . __GetRequestField ( method_description , body_type ) else : request = request_type request_field = base_api . REQUEST_IS_BODY if 'response' in method_description : response = method_description [ 'response' ] [ '$ref' ] else : response = self . __CreateVoidResponseType ( method_description ) method_info_map [ method_name ] = self . __ComputeMethodInfo ( method_description , request , response , request_field ) nested_services = methods . get ( 'resources' , { } ) services = sorted ( nested_services . items ( ) ) for subservice_name , submethods in services : new_service_name = '%s_%s' % ( service_name , subservice_name ) self . AddServiceFromResource ( new_service_name , submethods ) self . __RegisterService ( service_name , method_info_map ) | Add a new service named service_name with the given methods . |
18,844 | def compress ( data , compresslevel = 9 ) : buf = io . BytesIO ( ) with GzipFile ( fileobj = buf , mode = 'wb' , compresslevel = compresslevel ) as f : f . write ( data ) return buf . getvalue ( ) | Compress data in one shot and return the compressed string . Optional argument is the compression level in range of 0 - 9 . |
18,845 | def decompress ( data ) : with GzipFile ( fileobj = io . BytesIO ( data ) ) as f : return f . read ( ) | Decompress a gzip compressed string in one shot . Return the decompressed string . |
18,846 | def rewind ( self ) : if self . mode != READ : raise OSError ( "Can't rewind in write mode" ) self . fileobj . seek ( 0 ) self . _new_member = True self . extrabuf = b"" self . extrasize = 0 self . extrastart = 0 self . offset = 0 | Return the uncompressed stream file position indicator to the beginning of the file |
18,847 | def _Httplib2Debuglevel ( http_request , level , http = None ) : if http_request . loggable_body is None : yield return old_level = httplib2 . debuglevel http_levels = { } httplib2 . debuglevel = level if http is not None : for connection_key , connection in http . connections . items ( ) : if ':' not in connection_key : continue http_levels [ connection_key ] = connection . debuglevel connection . set_debuglevel ( level ) yield httplib2 . debuglevel = old_level if http is not None : for connection_key , old_level in http_levels . items ( ) : if connection_key in http . connections : http . connections [ connection_key ] . set_debuglevel ( old_level ) | Temporarily change the value of httplib2 . debuglevel if necessary . |
18,848 | def RebuildHttpConnections ( http ) : if getattr ( http , 'connections' , None ) : for conn_key in list ( http . connections . keys ( ) ) : if ':' in conn_key : del http . connections [ conn_key ] | Rebuilds all http connections in the httplib2 . Http instance . |
18,849 | def HandleExceptionsAndRebuildHttpConnections ( retry_args ) : retry_after = None if isinstance ( retry_args . exc , ( http_client . BadStatusLine , http_client . IncompleteRead , http_client . ResponseNotReady ) ) : logging . debug ( 'Caught HTTP error %s, retrying: %s' , type ( retry_args . exc ) . __name__ , retry_args . exc ) elif isinstance ( retry_args . exc , socket . error ) : logging . debug ( 'Caught socket error, retrying: %s' , retry_args . exc ) elif isinstance ( retry_args . exc , socket . gaierror ) : logging . debug ( 'Caught socket address error, retrying: %s' , retry_args . exc ) elif isinstance ( retry_args . exc , socket . timeout ) : logging . debug ( 'Caught socket timeout error, retrying: %s' , retry_args . exc ) elif isinstance ( retry_args . exc , httplib2 . ServerNotFoundError ) : logging . debug ( 'Caught server not found error, retrying: %s' , retry_args . exc ) elif isinstance ( retry_args . exc , ValueError ) : logging . debug ( 'Response content was invalid (%s), retrying' , retry_args . exc ) elif ( isinstance ( retry_args . exc , TokenRefreshError ) and hasattr ( retry_args . exc , 'status' ) and ( retry_args . exc . status == TOO_MANY_REQUESTS or retry_args . exc . status >= 500 ) ) : logging . debug ( 'Caught transient credential refresh error (%s), retrying' , retry_args . exc ) elif isinstance ( retry_args . exc , exceptions . RequestError ) : logging . debug ( 'Request returned no response, retrying' ) elif isinstance ( retry_args . exc , exceptions . BadStatusCodeError ) : logging . debug ( 'Response returned status %s, retrying' , retry_args . exc . status_code ) elif isinstance ( retry_args . exc , exceptions . RetryAfterError ) : logging . debug ( 'Response returned a retry-after header, retrying' ) retry_after = retry_args . exc . retry_after else : raise retry_args . exc RebuildHttpConnections ( retry_args . http ) logging . debug ( 'Retrying request to url %s after exception %s' , retry_args . http_request . url , retry_args . exc ) time . sleep ( retry_after or util . CalculateWaitForRetry ( retry_args . num_retries , max_wait = retry_args . max_retry_wait ) ) | Exception handler for http failures . |
18,850 | def _MakeRequestNoRetry ( http , http_request , redirections = 5 , check_response_func = CheckResponse ) : connection_type = None if getattr ( http , 'connections' , None ) : url_scheme = parse . urlsplit ( http_request . url ) . scheme if url_scheme and url_scheme in http . connections : connection_type = http . connections [ url_scheme ] new_debuglevel = 4 if httplib2 . debuglevel == 4 else 0 with _Httplib2Debuglevel ( http_request , new_debuglevel , http = http ) : info , content = http . request ( str ( http_request . url ) , method = str ( http_request . http_method ) , body = http_request . body , headers = http_request . headers , redirections = redirections , connection_type = connection_type ) if info is None : raise exceptions . RequestError ( ) response = Response ( info , content , http_request . url ) check_response_func ( response ) return response | Send http_request via the given http . |
18,851 | def body ( self , value ) : self . __body = value if value is not None : body_length = getattr ( self . __body , 'length' , None ) or len ( self . __body ) self . headers [ 'content-length' ] = str ( body_length ) else : self . headers . pop ( 'content-length' , None ) if not isinstance ( value , ( type ( None ) , six . string_types ) ) : self . loggable_body = '<media body>' | Sets the request body ; handles logging and length measurement . |
18,852 | def length ( self ) : def ProcessContentRange ( content_range ) : _ , _ , range_spec = content_range . partition ( ' ' ) byte_range , _ , _ = range_spec . partition ( '/' ) start , _ , end = byte_range . partition ( '-' ) return int ( end ) - int ( start ) + 1 if '-content-encoding' in self . info and 'content-range' in self . info : return ProcessContentRange ( self . info [ 'content-range' ] ) elif 'content-length' in self . info : return int ( self . info . get ( 'content-length' ) ) elif 'content-range' in self . info : return ProcessContentRange ( self . info [ 'content-range' ] ) return len ( self . content ) | Return the length of this response . |
18,853 | def read ( self , size = None ) : if size is None or size < 0 : raise exceptions . NotYetImplementedError ( 'Illegal read of size %s requested on BufferedStream. ' 'Wrapped stream %s is at position %s-%s, ' '%s bytes remaining.' % ( size , self . __stream , self . __start_pos , self . __end_pos , self . _bytes_remaining ) ) data = '' if self . _bytes_remaining : size = min ( size , self . _bytes_remaining ) data = self . __buffered_data [ self . __buffer_pos : self . __buffer_pos + size ] self . __buffer_pos += size return data | Reads from the buffer . |
18,854 | def WriteProtoFile ( self , printer ) : self . Validate ( ) extended_descriptor . WriteMessagesFile ( self . __file_descriptor , self . __package , self . __client_info . version , printer ) | Write the messages file to out as proto . |
18,855 | def WriteFile ( self , printer ) : self . Validate ( ) extended_descriptor . WritePythonFile ( self . __file_descriptor , self . __package , self . __client_info . version , printer ) | Write the messages file to out . |
18,856 | def __RegisterDescriptor ( self , new_descriptor ) : if not isinstance ( new_descriptor , ( extended_descriptor . ExtendedMessageDescriptor , extended_descriptor . ExtendedEnumDescriptor ) ) : raise ValueError ( 'Cannot add descriptor of type %s' % ( type ( new_descriptor ) , ) ) full_name = self . __ComputeFullName ( new_descriptor . name ) if full_name in self . __message_registry : raise ValueError ( 'Attempt to re-register descriptor %s' % full_name ) if full_name not in self . __nascent_types : raise ValueError ( 'Directly adding types is not supported' ) new_descriptor . full_name = full_name self . __message_registry [ full_name ] = new_descriptor if isinstance ( new_descriptor , extended_descriptor . ExtendedMessageDescriptor ) : self . __current_env . message_types . append ( new_descriptor ) elif isinstance ( new_descriptor , extended_descriptor . ExtendedEnumDescriptor ) : self . __current_env . enum_types . append ( new_descriptor ) self . __unknown_types . discard ( full_name ) self . __nascent_types . remove ( full_name ) | Register the given descriptor in this registry . |
18,857 | def AddEnumDescriptor ( self , name , description , enum_values , enum_descriptions ) : message = extended_descriptor . ExtendedEnumDescriptor ( ) message . name = self . __names . ClassName ( name ) message . description = util . CleanDescription ( description ) self . __DeclareDescriptor ( message . name ) for index , ( enum_name , enum_description ) in enumerate ( zip ( enum_values , enum_descriptions ) ) : enum_value = extended_descriptor . ExtendedEnumValueDescriptor ( ) enum_value . name = self . __names . NormalizeEnumName ( enum_name ) if enum_value . name != enum_name : message . enum_mappings . append ( extended_descriptor . ExtendedEnumDescriptor . JsonEnumMapping ( python_name = enum_value . name , json_name = enum_name ) ) self . __AddImport ( 'from %s import encoding' % self . __base_files_package ) enum_value . number = index enum_value . description = util . CleanDescription ( enum_description or '<no description>' ) message . values . append ( enum_value ) self . __RegisterDescriptor ( message ) | Add a new EnumDescriptor named name with the given enum values . |
18,858 | def __DeclareMessageAlias ( self , schema , alias_for ) : message = extended_descriptor . ExtendedMessageDescriptor ( ) message . name = self . __names . ClassName ( schema [ 'id' ] ) message . alias_for = alias_for self . __DeclareDescriptor ( message . name ) self . __AddImport ( 'from %s import extra_types' % self . __base_files_package ) self . __RegisterDescriptor ( message ) | Declare schema as an alias for alias_for . |
18,859 | def __AddAdditionalProperties ( self , message , schema , properties ) : additional_properties_info = schema [ 'additionalProperties' ] entries_type_name = self . __AddAdditionalPropertyType ( message . name , additional_properties_info ) description = util . CleanDescription ( additional_properties_info . get ( 'description' ) ) if description is None : description = 'Additional properties of type %s' % message . name attrs = { 'items' : { '$ref' : entries_type_name , } , 'description' : description , 'type' : 'array' , } field_name = 'additionalProperties' message . fields . append ( self . __FieldDescriptorFromProperties ( field_name , len ( properties ) + 1 , attrs ) ) self . __AddImport ( 'from %s import encoding' % self . __base_files_package ) message . decorators . append ( 'encoding.MapUnrecognizedFields(%r)' % field_name ) | Add an additionalProperties field to message . |
18,860 | def AddDescriptorFromSchema ( self , schema_name , schema ) : if self . __GetDescriptor ( schema_name ) : return if schema . get ( 'enum' ) : self . __DeclareEnum ( schema_name , schema ) return if schema . get ( 'type' ) == 'any' : self . __DeclareMessageAlias ( schema , 'extra_types.JsonValue' ) return if schema . get ( 'type' ) != 'object' : raise ValueError ( 'Cannot create message descriptors for type %s' % schema . get ( 'type' ) ) message = extended_descriptor . ExtendedMessageDescriptor ( ) message . name = self . __names . ClassName ( schema [ 'id' ] ) message . description = util . CleanDescription ( schema . get ( 'description' , 'A %s object.' % message . name ) ) self . __DeclareDescriptor ( message . name ) with self . __DescriptorEnv ( message ) : properties = schema . get ( 'properties' , { } ) for index , ( name , attrs ) in enumerate ( sorted ( properties . items ( ) ) ) : field = self . __FieldDescriptorFromProperties ( name , index + 1 , attrs ) message . fields . append ( field ) if field . name != name : message . field_mappings . append ( type ( message ) . JsonFieldMapping ( python_name = field . name , json_name = name ) ) self . __AddImport ( 'from %s import encoding' % self . __base_files_package ) if 'additionalProperties' in schema : self . __AddAdditionalProperties ( message , schema , properties ) self . __RegisterDescriptor ( message ) | Add a new MessageDescriptor named schema_name based on schema . |
18,861 | def __AddAdditionalPropertyType ( self , name , property_schema ) : new_type_name = 'AdditionalProperty' property_schema = dict ( property_schema ) property_schema . pop ( 'description' , None ) description = 'An additional property for a %s object.' % name schema = { 'id' : new_type_name , 'type' : 'object' , 'description' : description , 'properties' : { 'key' : { 'type' : 'string' , 'description' : 'Name of the additional property.' , } , 'value' : property_schema , } , } self . AddDescriptorFromSchema ( new_type_name , schema ) return new_type_name | Add a new nested AdditionalProperty message . |
18,862 | def __AddEntryType ( self , entry_type_name , entry_schema , parent_name ) : entry_schema . pop ( 'description' , None ) description = 'Single entry in a %s.' % parent_name schema = { 'id' : entry_type_name , 'type' : 'object' , 'description' : description , 'properties' : { 'entry' : { 'type' : 'array' , 'items' : entry_schema , } , } , } self . AddDescriptorFromSchema ( entry_type_name , schema ) return entry_type_name | Add a type for a list entry . |
18,863 | def __FieldDescriptorFromProperties ( self , name , index , attrs ) : field = descriptor . FieldDescriptor ( ) field . name = self . __names . CleanName ( name ) field . number = index field . label = self . __ComputeLabel ( attrs ) new_type_name_hint = self . __names . ClassName ( '%sValue' % self . __names . ClassName ( name ) ) type_info = self . __GetTypeInfo ( attrs , new_type_name_hint ) field . type_name = type_info . type_name field . variant = type_info . variant if 'default' in attrs : default = attrs [ 'default' ] if not ( field . type_name == 'string' or field . variant == messages . Variant . ENUM ) : default = str ( json . loads ( default ) ) if field . variant == messages . Variant . ENUM : default = self . __names . NormalizeEnumName ( default ) field . default_value = default extended_field = extended_descriptor . ExtendedFieldDescriptor ( ) extended_field . name = field . name extended_field . description = util . CleanDescription ( attrs . get ( 'description' , 'A %s attribute.' % field . type_name ) ) extended_field . field_descriptor = field return extended_field | Create a field descriptor for these attrs . |
18,864 | def __GetTypeInfo ( self , attrs , name_hint ) : type_ref = self . __names . ClassName ( attrs . get ( '$ref' ) ) type_name = attrs . get ( 'type' ) if not ( type_ref or type_name ) : raise ValueError ( 'No type found for %s' % attrs ) if type_ref : self . __AddIfUnknown ( type_ref ) return TypeInfo ( type_name = type_ref , variant = messages . Variant . MESSAGE ) if 'enum' in attrs : enum_name = '%sValuesEnum' % name_hint return self . __DeclareEnum ( enum_name , attrs ) if 'format' in attrs : type_info = self . PRIMITIVE_FORMAT_MAP . get ( attrs [ 'format' ] ) if type_info is None : if type_name in self . PRIMITIVE_TYPE_INFO_MAP : return self . PRIMITIVE_TYPE_INFO_MAP [ type_name ] raise ValueError ( 'Unknown type/format "%s"/"%s"' % ( attrs [ 'format' ] , type_name ) ) if type_info . type_name . startswith ( ( 'apitools.base.protorpclite.message_types.' , 'message_types.' ) ) : self . __AddImport ( 'from %s import message_types as _message_types' % self . __protorpc_package ) if type_info . type_name . startswith ( 'extra_types.' ) : self . __AddImport ( 'from %s import extra_types' % self . __base_files_package ) return type_info if type_name in self . PRIMITIVE_TYPE_INFO_MAP : type_info = self . PRIMITIVE_TYPE_INFO_MAP [ type_name ] if type_info . type_name . startswith ( 'extra_types.' ) : self . __AddImport ( 'from %s import extra_types' % self . __base_files_package ) return type_info if type_name == 'array' : items = attrs . get ( 'items' ) if not items : raise ValueError ( 'Array type with no item type: %s' % attrs ) entry_name_hint = self . __names . ClassName ( items . get ( 'title' ) or '%sListEntry' % name_hint ) entry_label = self . __ComputeLabel ( items ) if entry_label == descriptor . FieldDescriptor . Label . REPEATED : parent_name = self . __names . ClassName ( items . get ( 'title' ) or name_hint ) entry_type_name = self . __AddEntryType ( entry_name_hint , items . get ( 'items' ) , parent_name ) return TypeInfo ( type_name = entry_type_name , variant = messages . Variant . MESSAGE ) return self . __GetTypeInfo ( items , entry_name_hint ) elif type_name == 'any' : self . __AddImport ( 'from %s import extra_types' % self . __base_files_package ) return self . PRIMITIVE_TYPE_INFO_MAP [ 'any' ] elif type_name == 'object' : if not name_hint : raise ValueError ( 'Cannot create subtype without some name hint' ) schema = dict ( attrs ) schema [ 'id' ] = name_hint self . AddDescriptorFromSchema ( name_hint , schema ) self . __AddIfUnknown ( name_hint ) return TypeInfo ( type_name = name_hint , variant = messages . Variant . MESSAGE ) raise ValueError ( 'Unknown type: %s' % type_name ) | Return a TypeInfo object for attrs creating one if needed . |
18,865 | def _GetDiscoveryDocFromFlags ( args ) : if args . discovery_url : try : return util . FetchDiscoveryDoc ( args . discovery_url ) except exceptions . CommunicationError : raise exceptions . GeneratedClientError ( 'Could not fetch discovery doc' ) infile = os . path . expanduser ( args . infile ) or '/dev/stdin' with io . open ( infile , encoding = 'utf8' ) as f : return json . loads ( util . ReplaceHomoglyphs ( f . read ( ) ) ) | Get the discovery doc from flags . |
18,866 | def _GetCodegenFromFlags ( args ) : discovery_doc = _GetDiscoveryDocFromFlags ( args ) names = util . Names ( args . strip_prefix , args . experimental_name_convention , args . experimental_capitalize_enums ) if args . client_json : try : with io . open ( args . client_json , encoding = 'utf8' ) as client_json : f = json . loads ( util . ReplaceHomoglyphs ( client_json . read ( ) ) ) web = f . get ( 'installed' , f . get ( 'web' , { } ) ) client_id = web . get ( 'client_id' ) client_secret = web . get ( 'client_secret' ) except IOError : raise exceptions . NotFoundError ( 'Failed to open client json file: %s' % args . client_json ) else : client_id = args . client_id client_secret = args . client_secret if not client_id : logging . warning ( 'No client ID supplied' ) client_id = '' if not client_secret : logging . warning ( 'No client secret supplied' ) client_secret = '' client_info = util . ClientInfo . Create ( discovery_doc , args . scope , client_id , client_secret , args . user_agent , names , args . api_key ) outdir = os . path . expanduser ( args . outdir ) or client_info . default_directory if os . path . exists ( outdir ) and not args . overwrite : raise exceptions . ConfigurationValueError ( 'Output directory exists, pass --overwrite to replace ' 'the existing files.' ) if not os . path . exists ( outdir ) : os . makedirs ( outdir ) return gen_client_lib . DescriptorGenerator ( discovery_doc , client_info , names , args . root_package , outdir , base_package = args . base_package , protorpc_package = args . protorpc_package , init_wildcards_file = ( args . init_file == 'wildcards' ) , use_proto2 = args . experimental_proto2_output , unelidable_request_methods = args . unelidable_request_methods , apitools_version = args . apitools_version ) | Create a codegen object from flags . |
18,867 | def GenerateClient ( args ) : codegen = _GetCodegenFromFlags ( args ) if codegen is None : logging . error ( 'Failed to create codegen, exiting.' ) return 128 _WriteGeneratedFiles ( args , codegen ) if args . init_file != 'none' : _WriteInit ( codegen ) | Driver for client code generation . |
18,868 | def GeneratePipPackage ( args ) : discovery_doc = _GetDiscoveryDocFromFlags ( args ) package = discovery_doc [ 'name' ] original_outdir = os . path . expanduser ( args . outdir ) args . outdir = os . path . join ( args . outdir , 'apitools/clients/%s' % package ) args . root_package = 'apitools.clients.%s' % package codegen = _GetCodegenFromFlags ( args ) if codegen is None : logging . error ( 'Failed to create codegen, exiting.' ) return 1 _WriteGeneratedFiles ( args , codegen ) _WriteInit ( codegen ) with util . Chdir ( original_outdir ) : _WriteSetupPy ( codegen ) with util . Chdir ( 'apitools' ) : _WriteIntermediateInit ( codegen ) with util . Chdir ( 'clients' ) : _WriteIntermediateInit ( codegen ) | Generate a client as a pip - installable tarball . |
18,869 | def read ( self , size = None ) : if size is not None : read_size = min ( size , self . __remaining_bytes ) else : read_size = self . __remaining_bytes data = self . __stream . read ( read_size ) if read_size > 0 and not data : raise exceptions . StreamExhausted ( 'Not enough bytes in stream; expected %d, exhausted ' 'after %d' % ( self . __max_bytes , self . __max_bytes - self . __remaining_bytes ) ) self . __remaining_bytes -= len ( data ) return data | Read at most size bytes from this slice . |
18,870 | def update_md5 ( filenames ) : import re for name in filenames : base = os . path . basename ( name ) f = open ( name , 'rb' ) md5_data [ base ] = md5 ( f . read ( ) ) . hexdigest ( ) f . close ( ) data = [ " %r: %r,\n" % it for it in md5_data . items ( ) ] data . sort ( ) repl = "" . join ( data ) import inspect srcfile = inspect . getsourcefile ( sys . modules [ __name__ ] ) f = open ( srcfile , 'rb' ) src = f . read ( ) f . close ( ) match = re . search ( "\nmd5_data = {\n([^}]+)}" , src ) if not match : print >> sys . stderr , "Internal error!" sys . exit ( 2 ) src = src [ : match . start ( 1 ) ] + repl + src [ match . end ( 1 ) : ] f = open ( srcfile , 'w' ) f . write ( src ) f . close ( ) | Update our built - in md5 registry |
18,871 | def Add ( self , service , method , request , global_params = None ) : method_config = service . GetMethodConfig ( method ) upload_config = service . GetUploadConfig ( method ) http_request = service . PrepareHttpRequest ( method_config , request , global_params = global_params , upload_config = upload_config ) api_request = self . ApiCall ( http_request , self . retryable_codes , service , method_config ) self . api_requests . append ( api_request ) | Add a request to the batch . |
18,872 | def Execute ( self , http , sleep_between_polls = 5 , max_retries = 5 , max_batch_size = None , batch_request_callback = None ) : requests = [ request for request in self . api_requests if not request . terminal_state ] batch_size = max_batch_size or len ( requests ) for attempt in range ( max_retries ) : if attempt : time . sleep ( sleep_between_polls ) for i in range ( 0 , len ( requests ) , batch_size ) : batch_http_request = BatchHttpRequest ( batch_url = self . batch_url , callback = batch_request_callback , response_encoding = self . response_encoding ) for request in itertools . islice ( requests , i , i + batch_size ) : batch_http_request . Add ( request . http_request , request . HandleResponse ) batch_http_request . Execute ( http ) if hasattr ( http . request , 'credentials' ) : if any ( request . authorization_failed for request in itertools . islice ( requests , i , i + batch_size ) ) : http . request . credentials . refresh ( http ) requests = [ request for request in self . api_requests if not request . terminal_state ] if not requests : break return self . api_requests | Execute all of the requests in the batch . |
18,873 | def _ConvertHeaderToId ( header ) : if not ( header . startswith ( '<' ) or header . endswith ( '>' ) ) : raise exceptions . BatchError ( 'Invalid value for Content-ID: %s' % header ) if '+' not in header : raise exceptions . BatchError ( 'Invalid value for Content-ID: %s' % header ) _ , request_id = header [ 1 : - 1 ] . rsplit ( '+' , 1 ) return urllib_parse . unquote ( request_id ) | Convert a Content - ID header value to an id . |
18,874 | def _SerializeRequest ( self , request ) : parsed = urllib_parse . urlsplit ( request . url ) request_line = urllib_parse . urlunsplit ( ( '' , '' , parsed . path , parsed . query , '' ) ) if not isinstance ( request_line , six . text_type ) : request_line = request_line . decode ( 'utf-8' ) status_line = u' ' . join ( ( request . http_method , request_line , u'HTTP/1.1\n' ) ) major , minor = request . headers . get ( 'content-type' , 'application/json' ) . split ( '/' ) msg = mime_nonmultipart . MIMENonMultipart ( major , minor ) for key , value in request . headers . items ( ) : if key == 'content-type' : continue msg [ key ] = value msg [ 'Host' ] = parsed . netloc msg . set_unixfrom ( None ) if request . body is not None : msg . set_payload ( request . body ) str_io = six . StringIO ( ) gen = generator . Generator ( str_io , maxheaderlen = 0 ) gen . flatten ( msg , unixfrom = False ) body = str_io . getvalue ( ) return status_line + body | Convert a http_wrapper . Request object into a string . |
18,875 | def _DeserializeResponse ( self , payload ) : status_line , payload = payload . split ( '\n' , 1 ) _ , status , _ = status_line . split ( ' ' , 2 ) parser = email_parser . Parser ( ) msg = parser . parsestr ( payload ) info = dict ( msg ) info [ 'status' ] = status content = msg . get_payload ( ) return http_wrapper . Response ( info , content , self . __batch_url ) | Convert string into Response and content . |
18,876 | def Add ( self , request , callback = None ) : handler = RequestResponseAndHandler ( request , None , callback ) self . __request_response_handlers [ self . _NewId ( ) ] = handler | Add a new request . |
18,877 | def _Execute ( self , http ) : message = mime_multipart . MIMEMultipart ( 'mixed' ) setattr ( message , '_write_headers' , lambda self : None ) for key in self . __request_response_handlers : msg = mime_nonmultipart . MIMENonMultipart ( 'application' , 'http' ) msg [ 'Content-Transfer-Encoding' ] = 'binary' msg [ 'Content-ID' ] = self . _ConvertIdToHeader ( key ) body = self . _SerializeRequest ( self . __request_response_handlers [ key ] . request ) msg . set_payload ( body ) message . attach ( msg ) request = http_wrapper . Request ( self . __batch_url , 'POST' ) request . body = message . as_string ( ) request . headers [ 'content-type' ] = ( 'multipart/mixed; boundary="%s"' ) % message . get_boundary ( ) response = http_wrapper . MakeRequest ( http , request ) if response . status_code >= 300 : raise exceptions . HttpError . FromResponse ( response ) header = 'content-type: %s\r\n\r\n' % response . info [ 'content-type' ] content = response . content if isinstance ( content , bytes ) and self . __response_encoding : content = response . content . decode ( self . __response_encoding ) parser = email_parser . Parser ( ) mime_response = parser . parsestr ( header + content ) if not mime_response . is_multipart ( ) : raise exceptions . BatchError ( 'Response not in multipart/mixed format.' ) for part in mime_response . get_payload ( ) : request_id = self . _ConvertHeaderToId ( part [ 'Content-ID' ] ) response = self . _DeserializeResponse ( part . get_payload ( ) ) self . __request_response_handlers [ request_id ] = ( self . __request_response_handlers [ request_id ] . _replace ( response = response ) ) | Serialize batch request send to server process response . |
18,878 | def Execute ( self , http ) : self . _Execute ( http ) for key in self . __request_response_handlers : response = self . __request_response_handlers [ key ] . response callback = self . __request_response_handlers [ key ] . handler exception = None if response . status_code >= 300 : exception = exceptions . HttpError . FromResponse ( response ) if callback is not None : callback ( response , exception ) if self . __callback is not None : self . __callback ( response , exception ) | Execute all the requests as a single batched HTTP request . |
18,879 | def find_definition ( name , relative_to = None , importer = __import__ ) : if not ( relative_to is None or isinstance ( relative_to , types . ModuleType ) or isinstance ( relative_to , type ) and issubclass ( relative_to , Message ) ) : raise TypeError ( 'relative_to must be None, Message definition or module.' ' Found: %s' % relative_to ) name_path = name . split ( '.' ) if not name_path [ 0 ] : relative_to = None name_path = name_path [ 1 : ] def search_path ( ) : next_part = relative_to for node in name_path : attribute = getattr ( next_part , node , None ) if attribute is not None : next_part = attribute else : if ( next_part is None or isinstance ( next_part , types . ModuleType ) ) : if next_part is None : module_name = node else : module_name = '%s.%s' % ( next_part . __name__ , node ) try : fromitem = module_name . split ( '.' ) [ - 1 ] next_part = importer ( module_name , '' , '' , [ str ( fromitem ) ] ) except ImportError : return None else : return None if not isinstance ( next_part , types . ModuleType ) : if not ( isinstance ( next_part , type ) and issubclass ( next_part , ( Message , Enum ) ) ) : return None return next_part while True : found = search_path ( ) if isinstance ( found , type ) and issubclass ( found , ( Enum , Message ) ) : return found else : if relative_to is None : raise DefinitionNotFoundError ( 'Could not find definition for %s' % name ) else : if isinstance ( relative_to , types . ModuleType ) : module_path = relative_to . __name__ . split ( '.' ) [ : - 1 ] if not module_path : relative_to = None else : relative_to = importer ( '.' . join ( module_path ) , '' , '' , [ module_path [ - 1 ] ] ) elif ( isinstance ( relative_to , type ) and issubclass ( relative_to , Message ) ) : parent = relative_to . message_definition ( ) if parent is None : last_module_name = relative_to . __module__ . split ( '.' ) [ - 1 ] relative_to = importer ( relative_to . __module__ , '' , '' , [ last_module_name ] ) else : relative_to = parent | Find definition by name in module - space . |
18,880 | def definition_name ( cls ) : outer_definition_name = cls . outer_definition_name ( ) if outer_definition_name is None : return six . text_type ( cls . __name__ ) return u'%s.%s' % ( outer_definition_name , cls . __name__ ) | Helper method for creating definition name . |
18,881 | def outer_definition_name ( cls ) : outer_definition = cls . message_definition ( ) if not outer_definition : return util . get_package_for_module ( cls . __module__ ) return outer_definition . definition_name ( ) | Helper method for creating outer definition name . |
18,882 | def definition_package ( cls ) : outer_definition = cls . message_definition ( ) if not outer_definition : return util . get_package_for_module ( cls . __module__ ) return outer_definition . definition_package ( ) | Helper method for creating creating the package of a definition . |
18,883 | def to_dict ( cls ) : return dict ( ( item . name , item . number ) for item in iter ( cls ) ) | Make dictionary version of enumerated class . |
18,884 | def check_initialized ( self ) : for name , field in self . __by_name . items ( ) : value = getattr ( self , name ) if value is None : if field . required : raise ValidationError ( "Message %s is missing required field %s" % ( type ( self ) . __name__ , name ) ) else : try : if ( isinstance ( field , MessageField ) and issubclass ( field . message_type , Message ) ) : if field . repeated : for item in value : item_message_value = field . value_to_message ( item ) item_message_value . check_initialized ( ) else : message_value = field . value_to_message ( value ) message_value . check_initialized ( ) except ValidationError as err : if not hasattr ( err , 'message_name' ) : err . message_name = type ( self ) . __name__ raise | Check class for initialization status . |
18,885 | def get_assigned_value ( self , name ) : message_type = type ( self ) try : field = message_type . field_by_name ( name ) except KeyError : raise AttributeError ( 'Message %s has no field %s' % ( message_type . __name__ , name ) ) return self . __tags . get ( field . number ) | Get the assigned value of an attribute . |
18,886 | def reset ( self , name ) : message_type = type ( self ) try : field = message_type . field_by_name ( name ) except KeyError : if name not in message_type . __by_name : raise AttributeError ( 'Message %s has no field %s' % ( message_type . __name__ , name ) ) if field . repeated : self . __tags [ field . number ] = FieldList ( field , [ ] ) else : self . __tags . pop ( field . number , None ) | Reset assigned value for field . |
18,887 | def get_unrecognized_field_info ( self , key , value_default = None , variant_default = None ) : value , variant = self . __unrecognized_fields . get ( key , ( value_default , variant_default ) ) return value , variant | Get the value and variant of an unknown field in this message . |
18,888 | def set_unrecognized_field ( self , key , value , variant ) : if not isinstance ( variant , Variant ) : raise TypeError ( 'Variant type %s is not valid.' % variant ) self . __unrecognized_fields [ key ] = value , variant | Set an unrecognized field used when decoding a message . |
18,889 | def append ( self , value ) : self . __field . validate_element ( value ) return list . append ( self , value ) | Validate item appending to list . |
18,890 | def extend ( self , sequence ) : self . __field . validate ( sequence ) return list . extend ( self , sequence ) | Validate extension of list . |
18,891 | def insert ( self , index , value ) : self . __field . validate_element ( value ) return list . insert ( self , index , value ) | Validate item insertion to list . |
18,892 | def validate_element ( self , value ) : if not isinstance ( value , self . type ) : if isinstance ( value , six . integer_types ) and self . type == float : return float ( value ) if value is None : if self . required : raise ValidationError ( 'Required field is missing' ) else : try : name = self . name except AttributeError : raise ValidationError ( 'Expected type %s for %s, ' 'found %s (type %s)' % ( self . type , self . __class__ . __name__ , value , type ( value ) ) ) else : raise ValidationError ( 'Expected type %s for field %s, found %s (type %s)' % ( self . type , name , value , type ( value ) ) ) return value | Validate single element of field . |
18,893 | def __validate ( self , value , validate_element ) : if not self . repeated : return validate_element ( value ) else : if isinstance ( value , ( list , tuple ) ) : result = [ ] for element in value : if element is None : try : name = self . name except AttributeError : raise ValidationError ( 'Repeated values for %s ' 'may not be None' % self . __class__ . __name__ ) else : raise ValidationError ( 'Repeated values for field %s ' 'may not be None' % name ) result . append ( validate_element ( element ) ) return result elif value is not None : try : name = self . name except AttributeError : raise ValidationError ( '%s is repeated. Found: %s' % ( self . __class__ . __name__ , value ) ) else : raise ValidationError ( 'Field %s is repeated. Found: %s' % ( name , value ) ) return value | Internal validation function . |
18,894 | def validate_element ( self , value ) : if isinstance ( value , bytes ) : try : six . text_type ( value , 'UTF-8' ) except UnicodeDecodeError as err : try : _ = self . name except AttributeError : validation_error = ValidationError ( 'Field encountered non-UTF-8 string %r: %s' % ( value , err ) ) else : validation_error = ValidationError ( 'Field %s encountered non-UTF-8 string %r: %s' % ( self . name , value , err ) ) validation_error . field_name = self . name raise validation_error else : return super ( StringField , self ) . validate_element ( value ) return value | Validate StringField allowing for str and unicode . |
18,895 | def type ( self ) : if self . __type is None : message_type = find_definition ( self . __type_name , self . message_definition ( ) ) if not ( message_type is not Message and isinstance ( message_type , type ) and issubclass ( message_type , Message ) ) : raise FieldDefinitionError ( 'Invalid message class: %s' % message_type ) self . __type = message_type return self . __type | Message type used for field . |
18,896 | def value_from_message ( self , message ) : if not isinstance ( message , self . message_type ) : raise DecodeError ( 'Expected type %s, got %s: %r' % ( self . message_type . __name__ , type ( message ) . __name__ , message ) ) return message | Convert a message to a value instance . |
18,897 | def value_to_message ( self , value ) : if not isinstance ( value , self . type ) : raise EncodeError ( 'Expected type %s, got %s: %r' % ( self . type . __name__ , type ( value ) . __name__ , value ) ) return value | Convert a value instance to a message . |
18,898 | def validate_default_element ( self , value ) : if isinstance ( value , ( six . string_types , six . integer_types ) ) : if self . __type : self . __type ( value ) return value return super ( EnumField , self ) . validate_default_element ( value ) | Validate default element of Enum field . |
18,899 | def type ( self ) : if self . __type is None : found_type = find_definition ( self . __type_name , self . message_definition ( ) ) if not ( found_type is not Enum and isinstance ( found_type , type ) and issubclass ( found_type , Enum ) ) : raise FieldDefinitionError ( 'Invalid enum type: %s' % found_type ) self . __type = found_type return self . __type | Enum type used for field . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.