idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
18,700
def search_artist ( self , artist_name , quiet = False , limit = 9 ) : result = self . search ( artist_name , search_type = 100 , limit = limit ) if result [ 'result' ] [ 'artistCount' ] <= 0 : LOG . warning ( 'Artist %s not existed!' , artist_name ) raise SearchNotFound ( 'Artist {} not existed.' . format ( artist_name ) ) else : artists = result [ 'result' ] [ 'artists' ] if quiet : artist_id , artist_name = artists [ 0 ] [ 'id' ] , artists [ 0 ] [ 'name' ] artist = Artist ( artist_id , artist_name ) return artist else : return self . display . select_one_artist ( artists )
Search artist by artist name .
18,701
def search_playlist ( self , playlist_name , quiet = False , limit = 9 ) : result = self . search ( playlist_name , search_type = 1000 , limit = limit ) if result [ 'result' ] [ 'playlistCount' ] <= 0 : LOG . warning ( 'Playlist %s not existed!' , playlist_name ) raise SearchNotFound ( 'playlist {} not existed' . format ( playlist_name ) ) else : playlists = result [ 'result' ] [ 'playlists' ] if quiet : playlist_id , playlist_name = playlists [ 0 ] [ 'id' ] , playlists [ 0 ] [ 'name' ] playlist = Playlist ( playlist_id , playlist_name ) return playlist else : return self . display . select_one_playlist ( playlists )
Search playlist by playlist name .
18,702
def search_user ( self , user_name , quiet = False , limit = 9 ) : result = self . search ( user_name , search_type = 1002 , limit = limit ) if result [ 'result' ] [ 'userprofileCount' ] <= 0 : LOG . warning ( 'User %s not existed!' , user_name ) raise SearchNotFound ( 'user {} not existed' . format ( user_name ) ) else : users = result [ 'result' ] [ 'userprofiles' ] if quiet : user_id , user_name = users [ 0 ] [ 'userId' ] , users [ 0 ] [ 'nickname' ] user = User ( user_id , user_name ) return user else : return self . display . select_one_user ( users )
Search user by user name .
18,703
def get_user_playlists ( self , user_id , limit = 1000 ) : url = 'http://music.163.com/weapi/user/playlist?csrf_token=' csrf = '' params = { 'offset' : 0 , 'uid' : user_id , 'limit' : limit , 'csrf_token' : csrf } result = self . post_request ( url , params ) playlists = result [ 'playlist' ] return self . display . select_one_playlist ( playlists )
Get a user s all playlists .
18,704
def get_playlist_songs ( self , playlist_id , limit = 1000 ) : url = 'http://music.163.com/weapi/v3/playlist/detail?csrf_token=' csrf = '' params = { 'id' : playlist_id , 'offset' : 0 , 'total' : True , 'limit' : limit , 'n' : 1000 , 'csrf_token' : csrf } result = self . post_request ( url , params ) songs = result [ 'playlist' ] [ 'tracks' ] songs = [ Song ( song [ 'id' ] , song [ 'name' ] ) for song in songs ] return songs
Get a playlists s all songs .
18,705
def get_album_songs ( self , album_id ) : url = 'http://music.163.com/api/album/{}/' . format ( album_id ) result = self . get_request ( url ) songs = result [ 'album' ] [ 'songs' ] songs = [ Song ( song [ 'id' ] , song [ 'name' ] ) for song in songs ] return songs
Get a album s all songs .
18,706
def get_artists_hot_songs ( self , artist_id ) : url = 'http://music.163.com/api/artist/{}' . format ( artist_id ) result = self . get_request ( url ) hot_songs = result [ 'hotSongs' ] songs = [ Song ( song [ 'id' ] , song [ 'name' ] ) for song in hot_songs ] return songs
Get a artist s top50 songs .
18,707
def get_song_url ( self , song_id , bit_rate = 320000 ) : url = 'http://music.163.com/weapi/song/enhance/player/url?csrf_token=' csrf = '' params = { 'ids' : [ song_id ] , 'br' : bit_rate , 'csrf_token' : csrf } result = self . post_request ( url , params ) song_url = result [ 'data' ] [ 0 ] [ 'url' ] if song_url is None : LOG . warning ( 'Song %s is not available due to copyright issue. => %s' , song_id , result ) raise SongNotAvailable ( 'Song {} is not available due to copyright issue.' . format ( song_id ) ) else : return song_url
Get a song s download address .
18,708
def get_song_lyric ( self , song_id ) : url = 'http://music.163.com/api/song/lyric?os=osx&id={}&lv=-1&kv=-1&tv=-1' . format ( song_id ) result = self . get_request ( url ) if 'lrc' in result and result [ 'lrc' ] [ 'lyric' ] is not None : lyric_info = result [ 'lrc' ] [ 'lyric' ] else : lyric_info = 'Lyric not found.' return lyric_info
Get a song s lyric .
18,709
def get_song_by_url ( self , song_url , song_name , folder , lyric_info ) : if not os . path . exists ( folder ) : os . makedirs ( folder ) fpath = os . path . join ( folder , song_name + '.mp3' ) if sys . platform == 'win32' or sys . platform == 'cygwin' : valid_name = re . sub ( r'[<>:"/\\|?*]' , '' , song_name ) if valid_name != song_name : click . echo ( '{} will be saved as: {}.mp3' . format ( song_name , valid_name ) ) fpath = os . path . join ( folder , valid_name + '.mp3' ) if not os . path . exists ( fpath ) : resp = self . download_session . get ( song_url , timeout = self . timeout , stream = True ) length = int ( resp . headers . get ( 'content-length' ) ) label = 'Downloading {} {}kb' . format ( song_name , int ( length / 1024 ) ) with click . progressbar ( length = length , label = label ) as progressbar : with open ( fpath , 'wb' ) as song_file : for chunk in resp . iter_content ( chunk_size = 1024 ) : if chunk : song_file . write ( chunk ) progressbar . update ( 1024 ) if lyric_info : folder = os . path . join ( folder , 'lyric' ) if not os . path . exists ( folder ) : os . makedirs ( folder ) fpath = os . path . join ( folder , song_name + '.lrc' ) with open ( fpath , 'w' ) as lyric_file : lyric_file . write ( lyric_info )
Download a song and save it to disk .
18,710
def login ( self ) : username = click . prompt ( 'Please enter your email or phone number' ) password = click . prompt ( 'Please enter your password' , hide_input = True ) pattern = re . compile ( r'^0\d{2,3}\d{7,8}$|^1[34578]\d{9}$' ) if pattern . match ( username ) : url = 'https://music.163.com/weapi/login/cellphone' params = { 'phone' : username , 'password' : hashlib . md5 ( password . encode ( 'utf-8' ) ) . hexdigest ( ) , 'rememberLogin' : 'true' } else : url = 'https://music.163.com/weapi/login?csrf_token=' params = { 'username' : username , 'password' : hashlib . md5 ( password . encode ( 'utf-8' ) ) . hexdigest ( ) , 'rememberLogin' : 'true' } try : result = self . post_request ( url , params ) except PostRequestIllegal : click . echo ( 'Password Error!' ) sys . exit ( 1 ) self . session . cookies . save ( ) uid = result [ 'account' ] [ 'id' ] with open ( person_info_path , 'w' ) as person_info : person_info . write ( str ( uid ) )
Login entrance .
18,711
def to_representation ( self , instance ) : feature = OrderedDict ( ) fields = list ( self . fields . values ( ) ) if self . Meta . id_field : field = self . fields [ self . Meta . id_field ] value = field . get_attribute ( instance ) feature [ "id" ] = field . to_representation ( value ) fields . remove ( field ) feature [ "type" ] = "Feature" field = self . fields [ self . Meta . geo_field ] geo_value = field . get_attribute ( instance ) feature [ "geometry" ] = field . to_representation ( geo_value ) fields . remove ( field ) if self . Meta . auto_bbox and geo_value : feature [ "bbox" ] = geo_value . extent elif self . Meta . bbox_geo_field : field = self . fields [ self . Meta . bbox_geo_field ] value = field . get_attribute ( instance ) feature [ "bbox" ] = value . extent if hasattr ( value , 'extent' ) else None fields . remove ( field ) feature [ "properties" ] = self . get_properties ( instance , fields ) return feature
Serialize objects - > primitives .
18,712
def get_properties ( self , instance , fields ) : properties = OrderedDict ( ) for field in fields : if field . write_only : continue value = field . get_attribute ( instance ) representation = None if value is not None : representation = field . to_representation ( value ) properties [ field . field_name ] = representation return properties
Get the feature metadata which will be used for the GeoJSON properties key .
18,713
def to_internal_value ( self , data ) : if 'properties' in data : data = self . unformat_geojson ( data ) return super ( GeoFeatureModelSerializer , self ) . to_internal_value ( data )
Override the parent method to first remove the GeoJSON formatting
18,714
def unformat_geojson ( self , feature ) : attrs = feature [ "properties" ] if 'geometry' in feature : attrs [ self . Meta . geo_field ] = feature [ 'geometry' ] if self . Meta . bbox_geo_field and 'bbox' in feature : attrs [ self . Meta . bbox_geo_field ] = Polygon . from_bbox ( feature [ 'bbox' ] ) return attrs
This function should return a dictionary containing keys which maps to serializer fields .
18,715
def ready ( self ) : from django . contrib . gis . db import models from rest_framework . serializers import ModelSerializer from . fields import GeometryField try : field_mapping = ModelSerializer . _field_mapping . mapping except AttributeError : field_mapping = ModelSerializer . serializer_field_mapping field_mapping . update ( { models . GeometryField : GeometryField , models . PointField : GeometryField , models . LineStringField : GeometryField , models . PolygonField : GeometryField , models . MultiPointField : GeometryField , models . MultiLineStringField : GeometryField , models . MultiPolygonField : GeometryField , models . GeometryCollectionField : GeometryField } )
update Django Rest Framework serializer mappings
18,716
def dist_to_deg ( self , distance , latitude ) : lat = latitude if latitude >= 0 else - 1 * latitude rad2deg = 180 / pi earthRadius = 6378160.0 latitudeCorrection = 0.5 * ( 1 + cos ( lat * pi / 180 ) ) return ( distance / ( earthRadius * latitudeCorrection ) * rad2deg )
distance = distance in meters latitude = latitude in degrees
18,717
def _SetCredentials ( self , ** kwds ) : args = { 'api_key' : self . _API_KEY , 'client' : self , 'client_id' : self . _CLIENT_ID , 'client_secret' : self . _CLIENT_SECRET , 'package_name' : self . _PACKAGE , 'scopes' : self . _SCOPES , 'user_agent' : self . _USER_AGENT , } args . update ( kwds ) from apitools . base . py import credentials_lib self . _credentials = credentials_lib . GetCredentials ( ** args )
Fetch credentials and set them for this client .
18,718
def JsonResponseModel ( self ) : old_model = self . response_type_model self . __response_type_model = 'json' yield self . __response_type_model = old_model
In this context return raw JSON instead of proto .
18,719
def ProcessRequest ( self , method_config , request ) : if self . log_request : logging . info ( 'Calling method %s with %s: %s' , method_config . method_id , method_config . request_type_name , request ) return request
Hook for pre - processing of requests .
18,720
def ProcessHttpRequest ( self , http_request ) : http_request . headers . update ( self . additional_http_headers ) if self . log_request : logging . info ( 'Making http %s to %s' , http_request . http_method , http_request . url ) logging . info ( 'Headers: %s' , pprint . pformat ( http_request . headers ) ) if http_request . body : logging . info ( 'Body:\n%s' , http_request . loggable_body or http_request . body ) else : logging . info ( 'Body: (none)' ) return http_request
Hook for pre - processing of http requests .
18,721
def DeserializeMessage ( self , response_type , data ) : try : message = encoding . JsonToMessage ( response_type , data ) except ( exceptions . InvalidDataFromServerError , messages . ValidationError , ValueError ) as e : raise exceptions . InvalidDataFromServerError ( 'Error decoding response "%s" as type %s: %s' % ( data , response_type . __name__ , e ) ) return message
Deserialize the given data as method_config . response_type .
18,722
def FinalizeTransferUrl ( self , url ) : url_builder = _UrlBuilder . FromUrl ( url ) if self . global_params . key : url_builder . query_params [ 'key' ] = self . global_params . key return url_builder . url
Modify the url for a given transfer based on auth and version .
18,723
def GetMethodConfig ( self , method ) : method_config = self . _method_configs . get ( method ) if method_config : return method_config func = getattr ( self , method , None ) if func is None : raise KeyError ( method ) method_config = getattr ( func , 'method_config' , None ) if method_config is None : raise KeyError ( method ) self . _method_configs [ method ] = config = method_config ( ) return config
Returns service cached method config for given method .
18,724
def __CombineGlobalParams ( self , global_params , default_params ) : util . Typecheck ( global_params , ( type ( None ) , self . __client . params_type ) ) result = self . __client . params_type ( ) global_params = global_params or self . __client . params_type ( ) for field in result . all_fields ( ) : value = global_params . get_assigned_value ( field . name ) if value is None : value = default_params . get_assigned_value ( field . name ) if value not in ( None , [ ] , ( ) ) : setattr ( result , field . name , value ) return result
Combine the given params with the defaults .
18,725
def __FinalUrlValue ( self , value , field ) : if isinstance ( field , messages . BytesField ) and value is not None : return base64 . urlsafe_b64encode ( value ) elif isinstance ( value , six . text_type ) : return value . encode ( 'utf8' ) elif isinstance ( value , six . binary_type ) : return value . decode ( 'utf8' ) elif isinstance ( value , datetime . datetime ) : return value . isoformat ( ) return value
Encode value for the URL using field to skip encoding for bytes .
18,726
def __ConstructQueryParams ( self , query_params , request , global_params ) : global_params = self . __CombineGlobalParams ( global_params , self . __client . global_params ) global_param_names = util . MapParamNames ( [ x . name for x in self . __client . params_type . all_fields ( ) ] , self . __client . params_type ) global_params_type = type ( global_params ) query_info = dict ( ( param , self . __FinalUrlValue ( getattr ( global_params , param ) , getattr ( global_params_type , param ) ) ) for param in global_param_names ) query_param_names = util . MapParamNames ( query_params , type ( request ) ) request_type = type ( request ) query_info . update ( ( param , self . __FinalUrlValue ( getattr ( request , param , None ) , getattr ( request_type , param ) ) ) for param in query_param_names ) query_info = dict ( ( k , v ) for k , v in query_info . items ( ) if v is not None ) query_info = self . __EncodePrettyPrint ( query_info ) query_info = util . MapRequestParams ( query_info , type ( request ) ) return query_info
Construct a dictionary of query parameters for this request .
18,727
def __FinalizeRequest ( self , http_request , url_builder ) : if ( http_request . http_method == 'GET' and len ( http_request . url ) > _MAX_URL_LENGTH ) : http_request . http_method = 'POST' http_request . headers [ 'x-http-method-override' ] = 'GET' http_request . headers [ 'content-type' ] = 'application/x-www-form-urlencoded' http_request . body = url_builder . query url_builder . query_params = { } http_request . url = url_builder . url
Make any final general adjustments to the request .
18,728
def __ProcessHttpResponse ( self , method_config , http_response , request ) : if http_response . status_code not in ( http_client . OK , http_client . CREATED , http_client . NO_CONTENT ) : raise exceptions . HttpError . FromResponse ( http_response , method_config = method_config , request = request ) if http_response . status_code == http_client . NO_CONTENT : http_response = http_wrapper . Response ( info = http_response . info , content = '{}' , request_url = http_response . request_url ) content = http_response . content if self . _client . response_encoding and isinstance ( content , bytes ) : content = content . decode ( self . _client . response_encoding ) if self . __client . response_type_model == 'json' : return content response_type = _LoadClass ( method_config . response_type_name , self . __client . MESSAGES_MODULE ) return self . __client . DeserializeMessage ( response_type , content )
Process the given http response .
18,729
def __SetBaseHeaders ( self , http_request , client ) : user_agent = client . user_agent or 'apitools-client/1.0' http_request . headers [ 'user-agent' ] = user_agent http_request . headers [ 'accept' ] = 'application/json' http_request . headers [ 'accept-encoding' ] = 'gzip, deflate'
Fill in the basic headers on http_request .
18,730
def __SetBody ( self , http_request , method_config , request , upload ) : if not method_config . request_field : return request_type = _LoadClass ( method_config . request_type_name , self . __client . MESSAGES_MODULE ) if method_config . request_field == REQUEST_IS_BODY : body_value = request body_type = request_type else : body_value = getattr ( request , method_config . request_field ) body_field = request_type . field_by_name ( method_config . request_field ) util . Typecheck ( body_field , messages . MessageField ) body_type = body_field . type body_value = body_value or body_type ( ) if upload and not body_value : return util . Typecheck ( body_value , body_type ) http_request . headers [ 'content-type' ] = 'application/json' http_request . body = self . __client . SerializeMessage ( body_value )
Fill in the body on http_request .
18,731
def PrepareHttpRequest ( self , method_config , request , global_params = None , upload = None , upload_config = None , download = None ) : request_type = _LoadClass ( method_config . request_type_name , self . __client . MESSAGES_MODULE ) util . Typecheck ( request , request_type ) request = self . __client . ProcessRequest ( method_config , request ) http_request = http_wrapper . Request ( http_method = method_config . http_method ) self . __SetBaseHeaders ( http_request , self . __client ) self . __SetBody ( http_request , method_config , request , upload ) url_builder = _UrlBuilder ( self . __client . url , relative_path = method_config . relative_path ) url_builder . query_params = self . __ConstructQueryParams ( method_config . query_params , request , global_params ) if upload is not None : upload . ConfigureRequest ( upload_config , http_request , url_builder ) if download is not None : download . ConfigureRequest ( http_request , url_builder ) url_builder . relative_path = self . __ConstructRelativePath ( method_config , request , relative_path = url_builder . relative_path ) self . __FinalizeRequest ( http_request , url_builder ) return self . __client . ProcessHttpRequest ( http_request )
Prepares an HTTP request to be sent .
18,732
def _RunMethod ( self , method_config , request , global_params = None , upload = None , upload_config = None , download = None ) : if upload is not None and download is not None : raise exceptions . NotYetImplementedError ( 'Cannot yet use both upload and download at once' ) http_request = self . PrepareHttpRequest ( method_config , request , global_params , upload , upload_config , download ) if download is not None : download . InitializeDownload ( http_request , client = self . client ) return http_response = None if upload is not None : http_response = upload . InitializeUpload ( http_request , client = self . client ) if http_response is None : http = self . __client . http if upload and upload . bytes_http : http = upload . bytes_http opts = { 'retries' : self . __client . num_retries , 'max_retry_wait' : self . __client . max_retry_wait , } if self . __client . check_response_func : opts [ 'check_response_func' ] = self . __client . check_response_func if self . __client . retry_func : opts [ 'retry_func' ] = self . __client . retry_func http_response = http_wrapper . MakeRequest ( http , http_request , ** opts ) return self . ProcessHttpResponse ( method_config , http_response , request )
Call this method with request .
18,733
def ProcessHttpResponse ( self , method_config , http_response , request = None ) : return self . __client . ProcessResponse ( method_config , self . __ProcessHttpResponse ( method_config , http_response , request ) )
Convert an HTTP response to the expected message type .
18,734
def describe_enum_value ( enum_value ) : enum_value_descriptor = EnumValueDescriptor ( ) enum_value_descriptor . name = six . text_type ( enum_value . name ) enum_value_descriptor . number = enum_value . number return enum_value_descriptor
Build descriptor for Enum instance .
18,735
def describe_enum ( enum_definition ) : enum_descriptor = EnumDescriptor ( ) enum_descriptor . name = enum_definition . definition_name ( ) . split ( '.' ) [ - 1 ] values = [ ] for number in enum_definition . numbers ( ) : value = enum_definition . lookup_by_number ( number ) values . append ( describe_enum_value ( value ) ) if values : enum_descriptor . values = values return enum_descriptor
Build descriptor for Enum class .
18,736
def describe_field ( field_definition ) : field_descriptor = FieldDescriptor ( ) field_descriptor . name = field_definition . name field_descriptor . number = field_definition . number field_descriptor . variant = field_definition . variant if isinstance ( field_definition , messages . EnumField ) : field_descriptor . type_name = field_definition . type . definition_name ( ) if isinstance ( field_definition , messages . MessageField ) : field_descriptor . type_name = ( field_definition . message_type . definition_name ( ) ) if field_definition . default is not None : field_descriptor . default_value = _DEFAULT_TO_STRING_MAP [ type ( field_definition ) ] ( field_definition . default ) if field_definition . repeated : field_descriptor . label = FieldDescriptor . Label . REPEATED elif field_definition . required : field_descriptor . label = FieldDescriptor . Label . REQUIRED else : field_descriptor . label = FieldDescriptor . Label . OPTIONAL return field_descriptor
Build descriptor for Field instance .
18,737
def describe_message ( message_definition ) : message_descriptor = MessageDescriptor ( ) message_descriptor . name = message_definition . definition_name ( ) . split ( '.' ) [ - 1 ] fields = sorted ( message_definition . all_fields ( ) , key = lambda v : v . number ) if fields : message_descriptor . fields = [ describe_field ( field ) for field in fields ] try : nested_messages = message_definition . __messages__ except AttributeError : pass else : message_descriptors = [ ] for name in nested_messages : value = getattr ( message_definition , name ) message_descriptors . append ( describe_message ( value ) ) message_descriptor . message_types = message_descriptors try : nested_enums = message_definition . __enums__ except AttributeError : pass else : enum_descriptors = [ ] for name in nested_enums : value = getattr ( message_definition , name ) enum_descriptors . append ( describe_enum ( value ) ) message_descriptor . enum_types = enum_descriptors return message_descriptor
Build descriptor for Message class .
18,738
def describe_file ( module ) : descriptor = FileDescriptor ( ) descriptor . package = util . get_package_for_module ( module ) if not descriptor . package : descriptor . package = None message_descriptors = [ ] enum_descriptors = [ ] for name in sorted ( dir ( module ) ) : value = getattr ( module , name ) if isinstance ( value , type ) : if issubclass ( value , messages . Message ) : message_descriptors . append ( describe_message ( value ) ) elif issubclass ( value , messages . Enum ) : enum_descriptors . append ( describe_enum ( value ) ) if message_descriptors : descriptor . message_types = message_descriptors if enum_descriptors : descriptor . enum_types = enum_descriptors return descriptor
Build a file from a specified Python module .
18,739
def describe_file_set ( modules ) : descriptor = FileSet ( ) file_descriptors = [ ] for module in modules : file_descriptors . append ( describe_file ( module ) ) if file_descriptors : descriptor . files = file_descriptors return descriptor
Build a file set from a specified Python modules .
18,740
def describe ( value ) : if isinstance ( value , types . ModuleType ) : return describe_file ( value ) elif isinstance ( value , messages . Field ) : return describe_field ( value ) elif isinstance ( value , messages . Enum ) : return describe_enum_value ( value ) elif isinstance ( value , type ) : if issubclass ( value , messages . Message ) : return describe_message ( value ) elif issubclass ( value , messages . Enum ) : return describe_enum ( value ) return None
Describe any value as a descriptor .
18,741
def import_descriptor_loader ( definition_name , importer = __import__ ) : if definition_name . startswith ( '.' ) : definition_name = definition_name [ 1 : ] if not definition_name . startswith ( '.' ) : leaf = definition_name . split ( '.' ) [ - 1 ] if definition_name : try : module = importer ( definition_name , '' , '' , [ leaf ] ) except ImportError : pass else : return describe ( module ) try : return describe ( messages . find_definition ( definition_name , importer = __import__ ) ) except messages . DefinitionNotFoundError as err : split_name = definition_name . rsplit ( '.' , 1 ) if len ( split_name ) > 1 : parent , child = split_name try : parent_definition = import_descriptor_loader ( parent , importer = importer ) except messages . DefinitionNotFoundError : pass else : if isinstance ( parent_definition , EnumDescriptor ) : search_list = parent_definition . values or [ ] elif isinstance ( parent_definition , MessageDescriptor ) : search_list = parent_definition . fields or [ ] else : search_list = [ ] for definition in search_list : if definition . name == child : return definition raise err
Find objects by importing modules as needed .
18,742
def lookup_descriptor ( self , definition_name ) : try : return self . __descriptors [ definition_name ] except KeyError : pass if self . __descriptor_loader : definition = self . __descriptor_loader ( definition_name ) self . __descriptors [ definition_name ] = definition return definition else : raise messages . DefinitionNotFoundError ( 'Could not find definition for %s' % definition_name )
Lookup descriptor by name .
18,743
def lookup_package ( self , definition_name ) : while True : descriptor = self . lookup_descriptor ( definition_name ) if isinstance ( descriptor , FileDescriptor ) : return descriptor . package else : index = definition_name . rfind ( '.' ) if index < 0 : return None definition_name = definition_name [ : index ]
Determines the package name for any definition .
18,744
def _load_json_module ( ) : first_import_error = None for module_name in [ 'json' , 'simplejson' ] : try : module = __import__ ( module_name , { } , { } , 'json' ) if not hasattr ( module , 'JSONEncoder' ) : message = ( 'json library "%s" is not compatible with ProtoRPC' % module_name ) logging . warning ( message ) raise ImportError ( message ) else : return module except ImportError as err : if not first_import_error : first_import_error = err logging . error ( 'Must use valid json library (json or simplejson)' ) raise first_import_error
Try to load a valid json module .
18,745
def default ( self , value ) : if isinstance ( value , messages . Enum ) : return str ( value ) if six . PY3 and isinstance ( value , bytes ) : return value . decode ( 'utf8' ) if isinstance ( value , messages . Message ) : result = { } for field in value . all_fields ( ) : item = value . get_assigned_value ( field . name ) if item not in ( None , [ ] , ( ) ) : result [ field . name ] = ( self . __protojson_protocol . encode_field ( field , item ) ) for unknown_key in value . all_unrecognized_fields ( ) : unrecognized_field , _ = value . get_unrecognized_field_info ( unknown_key ) result [ unknown_key ] = unrecognized_field return result return super ( MessageJSONEncoder , self ) . default ( value )
Return dictionary instance from a message object .
18,746
def encode_message ( self , message ) : message . check_initialized ( ) return json . dumps ( message , cls = MessageJSONEncoder , protojson_protocol = self )
Encode Message instance to JSON string .
18,747
def decode_message ( self , message_type , encoded_message ) : encoded_message = six . ensure_str ( encoded_message ) if not encoded_message . strip ( ) : return message_type ( ) dictionary = json . loads ( encoded_message ) message = self . __decode_dictionary ( message_type , dictionary ) message . check_initialized ( ) return message
Merge JSON structure to Message instance .
18,748
def __find_variant ( self , value ) : if isinstance ( value , bool ) : return messages . Variant . BOOL elif isinstance ( value , six . integer_types ) : return messages . Variant . INT64 elif isinstance ( value , float ) : return messages . Variant . DOUBLE elif isinstance ( value , six . string_types ) : return messages . Variant . STRING elif isinstance ( value , ( list , tuple ) ) : variant_priority = [ None , messages . Variant . INT64 , messages . Variant . DOUBLE , messages . Variant . STRING ] chosen_priority = 0 for v in value : variant = self . __find_variant ( v ) try : priority = variant_priority . index ( variant ) except IndexError : priority = - 1 if priority > chosen_priority : chosen_priority = priority return variant_priority [ chosen_priority ] return None
Find the messages . Variant type that describes this value .
18,749
def __decode_dictionary ( self , message_type , dictionary ) : message = message_type ( ) for key , value in six . iteritems ( dictionary ) : if value is None : try : message . reset ( key ) except AttributeError : pass continue try : field = message . field_by_name ( key ) except KeyError : variant = self . __find_variant ( value ) if variant : message . set_unrecognized_field ( key , value , variant ) continue if field . repeated : if not isinstance ( value , list ) : value = [ value ] valid_value = [ self . decode_field ( field , item ) for item in value ] setattr ( message , field . name , valid_value ) continue if value == [ ] : continue try : setattr ( message , field . name , self . decode_field ( field , value ) ) except messages . DecodeError : if not isinstance ( field , messages . EnumField ) : raise variant = self . __find_variant ( value ) if variant : message . set_unrecognized_field ( key , value , variant ) return message
Merge dictionary in to message .
18,750
def WriteSetupPy ( self , out ) : printer = self . _GetPrinter ( out ) year = datetime . datetime . now ( ) . year printer ( '# Copyright %s Google Inc. All Rights Reserved.' % year ) printer ( '#' ) printer ( '# Licensed under the Apache License, Version 2.0 (the' '"License");' ) printer ( '# you may not use this file except in compliance with ' 'the License.' ) printer ( '# You may obtain a copy of the License at' ) printer ( '#' ) printer ( '# http://www.apache.org/licenses/LICENSE-2.0' ) printer ( '#' ) printer ( '# Unless required by applicable law or agreed to in writing, ' 'software' ) printer ( '# distributed under the License is distributed on an "AS IS" ' 'BASIS,' ) printer ( '# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either ' 'express or implied.' ) printer ( '# See the License for the specific language governing ' 'permissions and' ) printer ( '# limitations under the License.' ) printer ( ) printer ( 'import setuptools' ) printer ( 'REQUIREMENTS = [' ) with printer . Indent ( indent = ' ' ) : parts = self . apitools_version . split ( '.' ) major = parts . pop ( 0 ) minor = parts . pop ( 0 ) printer ( '"google-apitools>=%s,~=%s.%s",' , self . apitools_version , major , minor ) printer ( '"httplib2>=0.9",' ) printer ( '"oauth2client>=1.4.12",' ) printer ( ']' ) printer ( '_PACKAGE = "apitools.clients.%s"' % self . __package ) printer ( ) printer ( 'setuptools.setup(' ) with printer . Indent ( indent = ' ' ) : printer ( 'name="google-apitools-%s-%s",' , self . __package , self . __version ) printer ( 'version="%s.%s",' , self . apitools_version , self . __revision ) printer ( 'description="Autogenerated apitools library for %s",' % ( self . __package , ) ) printer ( 'url="https://github.com/google/apitools",' ) printer ( 'author="Craig Citro",' ) printer ( 'author_email="craigcitro@google.com",' ) printer ( 'packages=setuptools.find_packages(),' ) printer ( 'install_requires=REQUIREMENTS,' ) printer ( 'classifiers=[' ) with printer . Indent ( indent = ' ' ) : printer ( '"Programming Language :: Python :: 2.7",' ) printer ( '"License :: OSI Approved :: Apache Software ' 'License",' ) printer ( '],' ) printer ( 'license="Apache 2.0",' ) printer ( 'keywords="apitools apitools-%s %s",' % ( self . __package , self . __package ) ) printer ( ')' )
Write a setup . py for upload to PyPI .
18,751
def DownloadProgressPrinter ( response , unused_download ) : if 'content-range' in response . info : print ( 'Received %s' % response . info [ 'content-range' ] ) else : print ( 'Received %d bytes' % response . length )
Print download progress based on response .
18,752
def _Initialize ( self , http , url ) : self . EnsureUninitialized ( ) if self . http is None : self . __http = http or http_wrapper . GetHttp ( ) self . __url = url
Initialize this download by setting self . http and self . url .
18,753
def FromFile ( cls , filename , overwrite = False , auto_transfer = True , ** kwds ) : path = os . path . expanduser ( filename ) if os . path . exists ( path ) and not overwrite : raise exceptions . InvalidUserInputError ( 'File %s exists and overwrite not specified' % path ) return cls ( open ( path , 'wb' ) , close_stream = True , auto_transfer = auto_transfer , ** kwds )
Create a new download object from a filename .
18,754
def FromStream ( cls , stream , auto_transfer = True , total_size = None , ** kwds ) : return cls ( stream , auto_transfer = auto_transfer , total_size = total_size , ** kwds )
Create a new Download object from a stream .
18,755
def FromData ( cls , stream , json_data , http = None , auto_transfer = None , ** kwds ) : info = json . loads ( json_data ) missing_keys = cls . _REQUIRED_SERIALIZATION_KEYS - set ( info . keys ( ) ) if missing_keys : raise exceptions . InvalidDataError ( 'Invalid serialization data, missing keys: %s' % ( ', ' . join ( missing_keys ) ) ) download = cls . FromStream ( stream , ** kwds ) if auto_transfer is not None : download . auto_transfer = auto_transfer else : download . auto_transfer = info [ 'auto_transfer' ] setattr ( download , '_Download__progress' , info [ 'progress' ] ) setattr ( download , '_Download__total_size' , info [ 'total_size' ] ) download . _Initialize ( http , info [ 'url' ] ) return download
Create a new Download object from a stream and serialized data .
18,756
def __SetTotal ( self , info ) : if 'content-range' in info : _ , _ , total = info [ 'content-range' ] . rpartition ( '/' ) if total != '*' : self . __total_size = int ( total ) if self . total_size is None : self . __total_size = 0
Sets the total size based off info if possible otherwise 0 .
18,757
def InitializeDownload ( self , http_request , http = None , client = None ) : self . EnsureUninitialized ( ) if http is None and client is None : raise exceptions . UserError ( 'Must provide client or http.' ) http = http or client . http if client is not None : http_request . url = client . FinalizeTransferUrl ( http_request . url ) url = http_request . url if self . auto_transfer : end_byte = self . __ComputeEndByte ( 0 ) self . __SetRangeHeader ( http_request , 0 , end_byte ) response = http_wrapper . MakeRequest ( self . bytes_http or http , http_request ) if response . status_code not in self . _ACCEPTABLE_STATUSES : raise exceptions . HttpError . FromResponse ( response ) self . __initial_response = response self . __SetTotal ( response . info ) url = response . info . get ( 'content-location' , response . request_url ) if client is not None : url = client . FinalizeTransferUrl ( url ) self . _Initialize ( http , url ) if self . auto_transfer : self . StreamInChunks ( )
Initialize this download by making a request .
18,758
def __NormalizeStartEnd ( self , start , end = None ) : if end is not None : if start < 0 : raise exceptions . TransferInvalidError ( 'Cannot have end index with negative start index ' + '[start=%d, end=%d]' % ( start , end ) ) elif start >= self . total_size : raise exceptions . TransferInvalidError ( 'Cannot have start index greater than total size ' + '[start=%d, total_size=%d]' % ( start , self . total_size ) ) end = min ( end , self . total_size - 1 ) if end < start : raise exceptions . TransferInvalidError ( 'Range requested with end[%s] < start[%s]' % ( end , start ) ) return start , end else : if start < 0 : start = max ( 0 , start + self . total_size ) return start , self . total_size - 1
Normalizes start and end values based on total size .
18,759
def __ComputeEndByte ( self , start , end = None , use_chunks = True ) : end_byte = end if start < 0 and not self . total_size : return end_byte if use_chunks : alternate = start + self . chunksize - 1 if end_byte is not None : end_byte = min ( end_byte , alternate ) else : end_byte = alternate if self . total_size : alternate = self . total_size - 1 if end_byte is not None : end_byte = min ( end_byte , alternate ) else : end_byte = alternate return end_byte
Compute the last byte to fetch for this request .
18,760
def __GetChunk ( self , start , end , additional_headers = None ) : self . EnsureInitialized ( ) request = http_wrapper . Request ( url = self . url ) self . __SetRangeHeader ( request , start , end = end ) if additional_headers is not None : request . headers . update ( additional_headers ) return http_wrapper . MakeRequest ( self . bytes_http , request , retry_func = self . retry_func , retries = self . num_retries )
Retrieve a chunk and return the full response .
18,761
def GetRange ( self , start , end = None , additional_headers = None , use_chunks = True ) : self . EnsureInitialized ( ) progress_end_normalized = False if self . total_size is not None : progress , end_byte = self . __NormalizeStartEnd ( start , end ) progress_end_normalized = True else : progress = start end_byte = end while ( not progress_end_normalized or end_byte is None or progress <= end_byte ) : end_byte = self . __ComputeEndByte ( progress , end = end_byte , use_chunks = use_chunks ) response = self . __GetChunk ( progress , end_byte , additional_headers = additional_headers ) if not progress_end_normalized : self . __SetTotal ( response . info ) progress , end_byte = self . __NormalizeStartEnd ( start , end ) progress_end_normalized = True response = self . __ProcessResponse ( response ) progress += response . length if response . length == 0 : if response . status_code == http_client . OK : return raise exceptions . TransferRetryError ( 'Zero bytes unexpectedly returned in download response' )
Retrieve a given byte range from this download inclusive .
18,762
def StreamInChunks ( self , callback = None , finish_callback = None , additional_headers = None ) : self . StreamMedia ( callback = callback , finish_callback = finish_callback , additional_headers = additional_headers , use_chunks = True )
Stream the entire download in chunks .
18,763
def StreamMedia ( self , callback = None , finish_callback = None , additional_headers = None , use_chunks = True ) : callback = callback or self . progress_callback finish_callback = finish_callback or self . finish_callback self . EnsureInitialized ( ) while True : if self . __initial_response is not None : response = self . __initial_response self . __initial_response = None else : end_byte = self . __ComputeEndByte ( self . progress , use_chunks = use_chunks ) response = self . __GetChunk ( self . progress , end_byte , additional_headers = additional_headers ) if self . total_size is None : self . __SetTotal ( response . info ) response = self . __ProcessResponse ( response ) self . _ExecuteCallback ( callback , response ) if ( response . status_code == http_client . OK or self . progress >= self . total_size ) : break self . _ExecuteCallback ( finish_callback , response )
Stream the entire download .
18,764
def FromFile ( cls , filename , mime_type = None , auto_transfer = True , gzip_encoded = False , ** kwds ) : path = os . path . expanduser ( filename ) if not os . path . exists ( path ) : raise exceptions . NotFoundError ( 'Could not find file %s' % path ) if not mime_type : mime_type , _ = mimetypes . guess_type ( path ) if mime_type is None : raise exceptions . InvalidUserInputError ( 'Could not guess mime type for %s' % path ) size = os . stat ( path ) . st_size return cls ( open ( path , 'rb' ) , mime_type , total_size = size , close_stream = True , auto_transfer = auto_transfer , gzip_encoded = gzip_encoded , ** kwds )
Create a new Upload object from a filename .
18,765
def FromStream ( cls , stream , mime_type , total_size = None , auto_transfer = True , gzip_encoded = False , ** kwds ) : if mime_type is None : raise exceptions . InvalidUserInputError ( 'No mime_type specified for stream' ) return cls ( stream , mime_type , total_size = total_size , close_stream = False , auto_transfer = auto_transfer , gzip_encoded = gzip_encoded , ** kwds )
Create a new Upload object from a stream .
18,766
def FromData ( cls , stream , json_data , http , auto_transfer = None , gzip_encoded = False , ** kwds ) : info = json . loads ( json_data ) missing_keys = cls . _REQUIRED_SERIALIZATION_KEYS - set ( info . keys ( ) ) if missing_keys : raise exceptions . InvalidDataError ( 'Invalid serialization data, missing keys: %s' % ( ', ' . join ( missing_keys ) ) ) if 'total_size' in kwds : raise exceptions . InvalidUserInputError ( 'Cannot override total_size on serialized Upload' ) upload = cls . FromStream ( stream , info [ 'mime_type' ] , total_size = info . get ( 'total_size' ) , gzip_encoded = gzip_encoded , ** kwds ) if isinstance ( stream , io . IOBase ) and not stream . seekable ( ) : raise exceptions . InvalidUserInputError ( 'Cannot restart resumable upload on non-seekable stream' ) if auto_transfer is not None : upload . auto_transfer = auto_transfer else : upload . auto_transfer = info [ 'auto_transfer' ] upload . strategy = RESUMABLE_UPLOAD upload . _Initialize ( http , info [ 'url' ] ) upload . RefreshResumableUploadState ( ) upload . EnsureInitialized ( ) if upload . auto_transfer : upload . StreamInChunks ( ) return upload
Create a new Upload of stream from serialized json_data and http .
18,767
def __SetDefaultUploadStrategy ( self , upload_config , http_request ) : if upload_config . resumable_path is None : self . strategy = SIMPLE_UPLOAD if self . strategy is not None : return strategy = SIMPLE_UPLOAD if ( self . total_size is not None and self . total_size > _RESUMABLE_UPLOAD_THRESHOLD ) : strategy = RESUMABLE_UPLOAD if http_request . body and not upload_config . simple_multipart : strategy = RESUMABLE_UPLOAD if not upload_config . simple_path : strategy = RESUMABLE_UPLOAD self . strategy = strategy
Determine and set the default upload strategy for this upload .
18,768
def ConfigureRequest ( self , upload_config , http_request , url_builder ) : if ( self . total_size and upload_config . max_size and self . total_size > upload_config . max_size ) : raise exceptions . InvalidUserInputError ( 'Upload too big: %s larger than max size %s' % ( self . total_size , upload_config . max_size ) ) if not util . AcceptableMimeType ( upload_config . accept , self . mime_type ) : raise exceptions . InvalidUserInputError ( 'MIME type %s does not match any accepted MIME ranges %s' % ( self . mime_type , upload_config . accept ) ) self . __SetDefaultUploadStrategy ( upload_config , http_request ) if self . strategy == SIMPLE_UPLOAD : url_builder . relative_path = upload_config . simple_path if http_request . body : url_builder . query_params [ 'uploadType' ] = 'multipart' self . __ConfigureMultipartRequest ( http_request ) else : url_builder . query_params [ 'uploadType' ] = 'media' self . __ConfigureMediaRequest ( http_request ) if self . __gzip_encoded : http_request . headers [ 'Content-Encoding' ] = 'gzip' http_request . body = ( compression . CompressStream ( six . BytesIO ( http_request . body ) ) [ 0 ] . read ( ) ) else : url_builder . relative_path = upload_config . resumable_path url_builder . query_params [ 'uploadType' ] = 'resumable' self . __ConfigureResumableRequest ( http_request )
Configure the request and url for this upload .
18,769
def __ConfigureMediaRequest ( self , http_request ) : http_request . headers [ 'content-type' ] = self . mime_type http_request . body = self . stream . read ( ) http_request . loggable_body = '<media body>'
Configure http_request as a simple request for this upload .
18,770
def __ConfigureMultipartRequest ( self , http_request ) : msg_root = mime_multipart . MIMEMultipart ( 'related' ) setattr ( msg_root , '_write_headers' , lambda self : None ) msg = mime_nonmultipart . MIMENonMultipart ( * http_request . headers [ 'content-type' ] . split ( '/' ) ) msg . set_payload ( http_request . body ) msg_root . attach ( msg ) msg = mime_nonmultipart . MIMENonMultipart ( * self . mime_type . split ( '/' ) ) msg [ 'Content-Transfer-Encoding' ] = 'binary' msg . set_payload ( self . stream . read ( ) ) msg_root . attach ( msg ) fp = six . BytesIO ( ) if six . PY3 : generator_class = MultipartBytesGenerator else : generator_class = email_generator . Generator g = generator_class ( fp , mangle_from_ = False ) g . flatten ( msg_root , unixfrom = False ) http_request . body = fp . getvalue ( ) multipart_boundary = msg_root . get_boundary ( ) http_request . headers [ 'content-type' ] = ( 'multipart/related; boundary=%r' % multipart_boundary ) if isinstance ( multipart_boundary , six . text_type ) : multipart_boundary = multipart_boundary . encode ( 'ascii' ) body_components = http_request . body . split ( multipart_boundary ) headers , _ , _ = body_components [ - 2 ] . partition ( b'\n\n' ) body_components [ - 2 ] = b'\n\n' . join ( [ headers , b'<media body>\n\n--' ] ) http_request . loggable_body = multipart_boundary . join ( body_components )
Configure http_request as a multipart request for this upload .
18,771
def RefreshResumableUploadState ( self ) : if self . strategy != RESUMABLE_UPLOAD : return self . EnsureInitialized ( ) refresh_request = http_wrapper . Request ( url = self . url , http_method = 'PUT' , headers = { 'Content-Range' : 'bytes */*' } ) refresh_response = http_wrapper . MakeRequest ( self . http , refresh_request , redirections = 0 , retries = self . num_retries ) range_header = self . _GetRangeHeaderFromResponse ( refresh_response ) if refresh_response . status_code in ( http_client . OK , http_client . CREATED ) : self . __complete = True self . __progress = self . total_size self . stream . seek ( self . progress ) self . __final_response = refresh_response elif refresh_response . status_code == http_wrapper . RESUME_INCOMPLETE : if range_header is None : self . __progress = 0 else : self . __progress = self . __GetLastByte ( range_header ) + 1 self . stream . seek ( self . progress ) else : raise exceptions . HttpError . FromResponse ( refresh_response )
Talk to the server and refresh the state of this resumable upload .
18,772
def InitializeUpload ( self , http_request , http = None , client = None ) : if self . strategy is None : raise exceptions . UserError ( 'No upload strategy set; did you call ConfigureRequest?' ) if http is None and client is None : raise exceptions . UserError ( 'Must provide client or http.' ) if self . strategy != RESUMABLE_UPLOAD : return http = http or client . http if client is not None : http_request . url = client . FinalizeTransferUrl ( http_request . url ) self . EnsureUninitialized ( ) http_response = http_wrapper . MakeRequest ( http , http_request , retries = self . num_retries ) if http_response . status_code != http_client . OK : raise exceptions . HttpError . FromResponse ( http_response ) self . __server_chunk_granularity = http_response . info . get ( 'X-Goog-Upload-Chunk-Granularity' ) url = http_response . info [ 'location' ] if client is not None : url = client . FinalizeTransferUrl ( url ) self . _Initialize ( http , url ) if self . auto_transfer : return self . StreamInChunks ( ) return http_response
Initialize this upload from the given http_request .
18,773
def StreamMedia ( self , callback = None , finish_callback = None , additional_headers = None ) : return self . __StreamMedia ( callback = callback , finish_callback = finish_callback , additional_headers = additional_headers , use_chunks = False )
Send this resumable upload in a single request .
18,774
def __SendMediaRequest ( self , request , end ) : def CheckResponse ( response ) : if response is None : raise exceptions . RequestError ( 'Request to url %s did not return a response.' % response . request_url ) response = http_wrapper . MakeRequest ( self . bytes_http , request , retry_func = self . retry_func , retries = self . num_retries , check_response_func = CheckResponse ) if response . status_code == http_wrapper . RESUME_INCOMPLETE : last_byte = self . __GetLastByte ( self . _GetRangeHeaderFromResponse ( response ) ) if last_byte + 1 != end : self . stream . seek ( last_byte + 1 ) return response
Request helper function for SendMediaBody & SendChunk .
18,775
def __SendMediaBody ( self , start , additional_headers = None ) : self . EnsureInitialized ( ) if self . total_size is None : raise exceptions . TransferInvalidError ( 'Total size must be known for SendMediaBody' ) body_stream = stream_slice . StreamSlice ( self . stream , self . total_size - start ) request = http_wrapper . Request ( url = self . url , http_method = 'PUT' , body = body_stream ) request . headers [ 'Content-Type' ] = self . mime_type if start == self . total_size : range_string = 'bytes */%s' % self . total_size else : range_string = 'bytes %s-%s/%s' % ( start , self . total_size - 1 , self . total_size ) request . headers [ 'Content-Range' ] = range_string if additional_headers : request . headers . update ( additional_headers ) return self . __SendMediaRequest ( request , self . total_size )
Send the entire media stream in a single request .
18,776
def __SendChunk ( self , start , additional_headers = None ) : self . EnsureInitialized ( ) no_log_body = self . total_size is None request = http_wrapper . Request ( url = self . url , http_method = 'PUT' ) if self . __gzip_encoded : request . headers [ 'Content-Encoding' ] = 'gzip' body_stream , read_length , exhausted = compression . CompressStream ( self . stream , self . chunksize ) end = start + read_length if self . total_size is None and exhausted : self . __total_size = end elif self . total_size is None : body_stream = buffered_stream . BufferedStream ( self . stream , start , self . chunksize ) end = body_stream . stream_end_position if body_stream . stream_exhausted : self . __total_size = end body_stream = body_stream . read ( self . chunksize ) else : end = min ( start + self . chunksize , self . total_size ) body_stream = stream_slice . StreamSlice ( self . stream , end - start ) request . body = body_stream request . headers [ 'Content-Type' ] = self . mime_type if no_log_body : request . loggable_body = '<media body>' if self . total_size is None : range_string = 'bytes %s-%s/*' % ( start , end - 1 ) elif end == start : range_string = 'bytes */%s' % self . total_size else : range_string = 'bytes %s-%s/%s' % ( start , end - 1 , self . total_size ) request . headers [ 'Content-Range' ] = range_string if additional_headers : request . headers . update ( additional_headers ) return self . __SendMediaRequest ( request , end )
Send the specified chunk .
18,777
def CompressStream ( in_stream , length = None , compresslevel = 2 , chunksize = 16777216 ) : in_read = 0 in_exhausted = False out_stream = StreamingBuffer ( ) with gzip . GzipFile ( mode = 'wb' , fileobj = out_stream , compresslevel = compresslevel ) as compress_stream : while not length or out_stream . length < length : data = in_stream . read ( chunksize ) data_length = len ( data ) compress_stream . write ( data ) in_read += data_length if data_length < chunksize : in_exhausted = True break return out_stream , in_read , in_exhausted
Compresses an input stream into a file - like buffer .
18,778
def read ( self , size = None ) : if size is None : size = self . __size ret_list = [ ] while size > 0 and self . __buf : data = self . __buf . popleft ( ) size -= len ( data ) ret_list . append ( data ) if size < 0 : ret_list [ - 1 ] , remainder = ret_list [ - 1 ] [ : size ] , ret_list [ - 1 ] [ size : ] self . __buf . appendleft ( remainder ) ret = b'' . join ( ret_list ) self . __size -= len ( ret ) return ret
Read at most size bytes from this buffer .
18,779
def _WriteFile ( file_descriptor , package , version , proto_printer ) : proto_printer . PrintPreamble ( package , version , file_descriptor ) _PrintEnums ( proto_printer , file_descriptor . enum_types ) _PrintMessages ( proto_printer , file_descriptor . message_types ) custom_json_mappings = _FetchCustomMappings ( file_descriptor . enum_types ) custom_json_mappings . extend ( _FetchCustomMappings ( file_descriptor . message_types ) ) for mapping in custom_json_mappings : proto_printer . PrintCustomJsonMapping ( mapping )
Write the given extended file descriptor to the printer .
18,780
def WriteMessagesFile ( file_descriptor , package , version , printer ) : _WriteFile ( file_descriptor , package , version , _Proto2Printer ( printer ) )
Write the given extended file descriptor to out as a message file .
18,781
def WritePythonFile ( file_descriptor , package , version , printer ) : _WriteFile ( file_descriptor , package , version , _ProtoRpcPrinter ( printer ) )
Write the given extended file descriptor to out .
18,782
def _FetchCustomMappings ( descriptor_ls ) : custom_mappings = [ ] for descriptor in descriptor_ls : if isinstance ( descriptor , ExtendedEnumDescriptor ) : custom_mappings . extend ( _FormatCustomJsonMapping ( 'Enum' , m , descriptor ) for m in descriptor . enum_mappings ) elif isinstance ( descriptor , ExtendedMessageDescriptor ) : custom_mappings . extend ( _FormatCustomJsonMapping ( 'Field' , m , descriptor ) for m in descriptor . field_mappings ) custom_mappings . extend ( _FetchCustomMappings ( descriptor . enum_types ) ) custom_mappings . extend ( _FetchCustomMappings ( descriptor . message_types ) ) return custom_mappings
Find and return all custom mappings for descriptors in descriptor_ls .
18,783
def _PrintEnums ( proto_printer , enum_types ) : enum_types = sorted ( enum_types , key = operator . attrgetter ( 'name' ) ) for enum_type in enum_types : proto_printer . PrintEnum ( enum_type )
Print all enums to the given proto_printer .
18,784
def __PrintMessageCommentLines ( self , message_type ) : description = message_type . description or '%s message type.' % ( message_type . name ) width = self . __printer . CalculateWidth ( ) - 3 for line in textwrap . wrap ( description , width ) : self . __printer ( '// %s' , line ) PrintIndentedDescriptions ( self . __printer , message_type . enum_types , 'Enums' , prefix = '// ' ) PrintIndentedDescriptions ( self . __printer , message_type . message_types , 'Messages' , prefix = '// ' ) PrintIndentedDescriptions ( self . __printer , message_type . fields , 'Fields' , prefix = '// ' )
Print the description of this message .
18,785
def __PrintAdditionalImports ( self , imports ) : google_imports = [ x for x in imports if 'google' in x ] other_imports = [ x for x in imports if 'google' not in x ] if other_imports : for import_ in sorted ( other_imports ) : self . __printer ( import_ ) self . __printer ( ) if google_imports : for import_ in sorted ( google_imports ) : self . __printer ( import_ ) self . __printer ( )
Print additional imports needed for protorpc .
18,786
def positional ( max_positional_args ) : def positional_decorator ( wrapped ) : @ functools . wraps ( wrapped ) def positional_wrapper ( * args , ** kwargs ) : if len ( args ) > max_positional_args : plural_s = '' if max_positional_args != 1 : plural_s = 's' raise TypeError ( '%s() takes at most %d positional argument%s ' '(%d given)' % ( wrapped . __name__ , max_positional_args , plural_s , len ( args ) ) ) return wrapped ( * args , ** kwargs ) return positional_wrapper if isinstance ( max_positional_args , six . integer_types ) : return positional_decorator else : args , _ , _ , defaults = inspect . getargspec ( max_positional_args ) if defaults is None : raise ValueError ( 'Functions with no keyword arguments must specify ' 'max_positional_args' ) return positional ( len ( args ) - len ( defaults ) ) ( max_positional_args )
A decorator that declares only the first N arguments may be positional .
18,787
def get_package_for_module ( module ) : if isinstance ( module , six . string_types ) : try : module = sys . modules [ module ] except KeyError : return None try : return six . text_type ( module . package ) except AttributeError : if module . __name__ == '__main__' : try : file_name = module . __file__ except AttributeError : pass else : base_name = os . path . basename ( file_name ) split_name = os . path . splitext ( base_name ) if len ( split_name ) == 1 : return six . text_type ( base_name ) return u'.' . join ( split_name [ : - 1 ] ) return six . text_type ( module . __name__ )
Get package name for a module .
18,788
def decode_datetime ( encoded_datetime ) : time_zone_match = _TIME_ZONE_RE . search ( encoded_datetime ) if time_zone_match : time_string = encoded_datetime [ : time_zone_match . start ( 1 ) ] . upper ( ) else : time_string = encoded_datetime . upper ( ) if '.' in time_string : format_string = '%Y-%m-%dT%H:%M:%S.%f' else : format_string = '%Y-%m-%dT%H:%M:%S' decoded_datetime = datetime . datetime . strptime ( time_string , format_string ) if not time_zone_match : return decoded_datetime if time_zone_match . group ( 'z' ) : offset_minutes = 0 else : sign = time_zone_match . group ( 'sign' ) hours , minutes = [ int ( value ) for value in time_zone_match . group ( 'hours' , 'minutes' ) ] offset_minutes = hours * 60 + minutes if sign == '-' : offset_minutes *= - 1 return datetime . datetime ( decoded_datetime . year , decoded_datetime . month , decoded_datetime . day , decoded_datetime . hour , decoded_datetime . minute , decoded_datetime . second , decoded_datetime . microsecond , TimeZoneOffset ( offset_minutes ) )
Decode a DateTimeField parameter from a string to a python datetime .
18,789
def value_from_message ( self , message ) : message = super ( DateTimeField , self ) . value_from_message ( message ) if message . time_zone_offset is None : return datetime . datetime . utcfromtimestamp ( message . milliseconds / 1000.0 ) milliseconds = ( message . milliseconds - 60000 * message . time_zone_offset ) timezone = util . TimeZoneOffset ( message . time_zone_offset ) return datetime . datetime . fromtimestamp ( milliseconds / 1000.0 , tz = timezone )
Convert DateTimeMessage to a datetime .
18,790
def DetectGce ( ) : metadata_url = 'http://{}' . format ( os . environ . get ( 'GCE_METADATA_ROOT' , 'metadata.google.internal' ) ) try : o = urllib_request . build_opener ( urllib_request . ProxyHandler ( { } ) ) . open ( urllib_request . Request ( metadata_url , headers = { 'Metadata-Flavor' : 'Google' } ) ) except urllib_error . URLError : return False return ( o . getcode ( ) == http_client . OK and o . headers . get ( 'metadata-flavor' ) == 'Google' )
Determine whether or not we re running on GCE .
18,791
def NormalizeScopes ( scope_spec ) : if isinstance ( scope_spec , six . string_types ) : return set ( scope_spec . split ( ' ' ) ) elif isinstance ( scope_spec , collections . Iterable ) : return set ( scope_spec ) raise exceptions . TypecheckError ( 'NormalizeScopes expected string or iterable, found %s' % ( type ( scope_spec ) , ) )
Normalize scope_spec to a set of strings .
18,792
def CalculateWaitForRetry ( retry_attempt , max_wait = 60 ) : wait_time = 2 ** retry_attempt max_jitter = wait_time / 4.0 wait_time += random . uniform ( - max_jitter , max_jitter ) return max ( 1 , min ( wait_time , max_wait ) )
Calculates amount of time to wait before a retry attempt .
18,793
def AcceptableMimeType ( accept_patterns , mime_type ) : if '/' not in mime_type : raise exceptions . InvalidUserInputError ( 'Invalid MIME type: "%s"' % mime_type ) unsupported_patterns = [ p for p in accept_patterns if ';' in p ] if unsupported_patterns : raise exceptions . GeneratedClientError ( 'MIME patterns with parameter unsupported: "%s"' % ', ' . join ( unsupported_patterns ) ) def MimeTypeMatches ( pattern , mime_type ) : if pattern == '*' : pattern = '*/*' return all ( accept in ( '*' , provided ) for accept , provided in zip ( pattern . split ( '/' ) , mime_type . split ( '/' ) ) ) return any ( MimeTypeMatches ( pattern , mime_type ) for pattern in accept_patterns )
Return True iff mime_type is acceptable for one of accept_patterns .
18,794
def MapParamNames ( params , request_type ) : return [ encoding . GetCustomJsonFieldMapping ( request_type , json_name = p ) or p for p in params ]
Reverse parameter remappings for URL construction .
18,795
def _JsonValueToPythonValue ( json_value ) : util . Typecheck ( json_value , JsonValue ) _ValidateJsonValue ( json_value ) if json_value . is_null : return None entries = [ ( f , json_value . get_assigned_value ( f . name ) ) for f in json_value . all_fields ( ) ] assigned_entries = [ ( f , value ) for f , value in entries if value is not None ] field , value = assigned_entries [ 0 ] if not isinstance ( field , messages . MessageField ) : return value elif field . message_type is JsonObject : return _JsonObjectToPythonValue ( value ) elif field . message_type is JsonArray : return _JsonArrayToPythonValue ( value )
Convert the given JsonValue to a json string .
18,796
def _PythonValueToJsonValue ( py_value ) : if py_value is None : return JsonValue ( is_null = True ) if isinstance ( py_value , bool ) : return JsonValue ( boolean_value = py_value ) if isinstance ( py_value , six . string_types ) : return JsonValue ( string_value = py_value ) if isinstance ( py_value , numbers . Number ) : if isinstance ( py_value , six . integer_types ) : if _MININT64 < py_value < _MAXINT64 : return JsonValue ( integer_value = py_value ) return JsonValue ( double_value = float ( py_value ) ) if isinstance ( py_value , dict ) : return JsonValue ( object_value = _PythonValueToJsonObject ( py_value ) ) if isinstance ( py_value , collections . Iterable ) : return JsonValue ( array_value = _PythonValueToJsonArray ( py_value ) ) raise exceptions . InvalidDataError ( 'Cannot convert "%s" to JsonValue' % py_value )
Convert the given python value to a JsonValue .
18,797
def _EncodeInt64Field ( field , value ) : capabilities = [ messages . Variant . INT64 , messages . Variant . UINT64 , ] if field . variant not in capabilities : return encoding . CodecResult ( value = value , complete = False ) if field . repeated : result = [ str ( x ) for x in value ] else : result = str ( value ) return encoding . CodecResult ( value = result , complete = True )
Handle the special case of int64 as a string .
18,798
def _EncodeDateField ( field , value ) : if field . repeated : result = [ d . isoformat ( ) for d in value ] else : result = value . isoformat ( ) return encoding . CodecResult ( value = result , complete = True )
Encoder for datetime . date objects .
18,799
def ReplaceHomoglyphs ( s ) : homoglyphs = { '\xa0' : ' ' , '\u00e3' : '' , '\u00a0' : ' ' , '\u00a9' : '(C)' , '\u00ae' : '(R)' , '\u2014' : '-' , '\u2018' : "'" , '\u2019' : "'" , '\u201c' : '"' , '\u201d' : '"' , '\u2026' : '...' , '\u2e3a' : '-' , } def _ReplaceOne ( c ) : equiv = homoglyphs . get ( c ) if equiv is not None : return equiv try : c . encode ( 'ascii' ) return c except UnicodeError : pass try : return c . encode ( 'unicode-escape' ) . decode ( 'ascii' ) except UnicodeError : return '?' return '' . join ( [ _ReplaceOne ( c ) for c in s ] )
Returns s with unicode homoglyphs replaced by ascii equivalents .