idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
35,600 | def get_range_by_effective_partition_key ( self , effective_partition_key_value ) : if _CollectionRoutingMap . MinimumInclusiveEffectivePartitionKey == effective_partition_key_value : return self . _orderedPartitionKeyRanges [ 0 ] if _CollectionRoutingMap . MaximumExclusiveEffectivePartitionKey == effective_partition_k... | Gets the range containing the given partition key |
35,601 | def get_range_by_partition_key_range_id ( self , partition_key_range_id ) : t = self . _rangeById . get ( partition_key_range_id ) if t is None : return None return t [ 0 ] | Gets the partition key range given the partition key range id |
35,602 | def get_overlapping_ranges ( self , provided_partition_key_ranges ) : if isinstance ( provided_partition_key_ranges , routing_range . _Range ) : return self . get_overlapping_ranges ( [ provided_partition_key_ranges ] ) minToPartitionRange = { } sortedLow = [ ( r . min , not r . isMinInclusive ) for r in self . _ordere... | Gets the partition key ranges overlapping the provided ranges |
35,603 | def GetAuthorizationHeader ( cosmos_client , verb , path , resource_id_or_fullname , is_name_based , resource_type , headers ) : if resource_id_or_fullname is not None and not is_name_based : resource_id_or_fullname = resource_id_or_fullname . lower ( ) if cosmos_client . master_key : return __GetAuthorizationTokenUsin... | Gets the authorization header . |
35,604 | def __GetAuthorizationTokenUsingMasterKey ( verb , resource_id_or_fullname , resource_type , headers , master_key ) : key = base64 . b64decode ( master_key ) text = '{verb}\n{resource_type}\n{resource_id_or_fullname}\n{x_date}\n{http_date}\n' . format ( verb = ( verb . lower ( ) or '' ) , resource_type = ( resource_typ... | Gets the authorization token using master_key . |
35,605 | def __GetAuthorizationTokenUsingResourceTokens ( resource_tokens , path , resource_id_or_fullname ) : if resource_tokens and len ( resource_tokens ) > 0 : if not path and not resource_id_or_fullname : return next ( six . itervalues ( resource_tokens ) ) if resource_tokens . get ( resource_id_or_fullname ) : return reso... | Get the authorization token using resource_tokens . |
35,606 | def parse_session_token ( response_headers ) : session_token = '' if http_constants . HttpHeaders . SessionToken in response_headers : session_token = response_headers [ http_constants . HttpHeaders . SessionToken ] id_to_sessionlsn = { } if session_token is not '' : token_pairs = session_token . split ( ',' ) for toke... | Extracts session token from response headers and parses |
35,607 | def generate_vector_color_map ( self ) : vector_stops = [ ] if type ( self . data ) == str : self . data = geojson_to_dict_list ( self . data ) for row in self . data : color = color_map ( row [ self . color_property ] , self . color_stops , self . color_default ) vector_stops . append ( [ row [ self . data_join_proper... | Generate color stops array for use with match expression in mapbox template |
35,608 | def generate_vector_numeric_map ( self , numeric_property ) : vector_stops = [ ] function_type = getattr ( self , '{}_function_type' . format ( numeric_property ) ) lookup_property = getattr ( self , '{}_property' . format ( numeric_property ) ) numeric_stops = getattr ( self , '{}_stops' . format ( numeric_property ) ... | Generate stops array for use with match expression in mapbox template |
35,609 | def check_vector_template ( self ) : if self . vector_url is not None and self . vector_layer_name is not None : self . template = 'vector_' + self . template self . vector_source = True else : self . vector_source = False | Determines if features are defined as vector source based on MapViz arguments . |
35,610 | def as_iframe ( self , html_data ) : srcdoc = html_data . replace ( '"' , "'" ) return ( '<iframe id="{div_id}", srcdoc="{srcdoc}" style="width: {width}; ' 'height: {height};"></iframe>' . format ( div_id = self . div_id , srcdoc = srcdoc , width = self . width , height = self . height ) ) | Build the HTML representation for the mapviz . |
35,611 | def add_unique_template_variables ( self , options ) : options . update ( dict ( geojson_data = json . dumps ( self . data , ensure_ascii = False ) , colorProperty = self . color_property , colorType = self . color_function_type , colorStops = self . color_stops , strokeWidth = self . stroke_width , strokeColor = self ... | Update map template variables specific to circle visual |
35,612 | def add_unique_template_variables ( self , options ) : options . update ( dict ( colorProperty = self . color_property , colorStops = self . color_stops , colorType = self . color_function_type , radiusType = self . radius_function_type , defaultColor = self . color_default , defaultRadius = self . radius_default , rad... | Update map template variables specific to graduated circle visual |
35,613 | def add_unique_template_variables ( self , options ) : options . update ( dict ( colorStops = self . color_stops , colorDefault = self . color_default , radiusStops = self . radius_stops , clusterRadius = self . clusterRadius , clusterMaxZoom = self . clusterMaxZoom , strokeWidth = self . stroke_width , strokeColor = s... | Update map template variables specific to a clustered circle visual |
35,614 | def add_unique_template_variables ( self , options ) : options . update ( dict ( image = self . image , coordinates = self . coordinates ) ) | Update map template variables specific to image visual |
35,615 | def add_unique_template_variables ( self , options ) : options . update ( dict ( tiles_url = self . tiles_url , tiles_size = self . tiles_size , tiles_minzoom = self . tiles_minzoom , tiles_maxzoom = self . tiles_maxzoom , tiles_bounds = self . tiles_bounds if self . tiles_bounds else 'undefined' ) ) | Update map template variables specific to a raster visual |
35,616 | def add_unique_template_variables ( self , options ) : if self . line_stroke in [ "dashed" , "--" ] : self . line_dash_array = [ 6 , 4 ] elif self . line_stroke in [ "dotted" , ":" ] : self . line_dash_array = [ 0.5 , 4 ] elif self . line_stroke in [ "dash dot" , "-." ] : self . line_dash_array = [ 6 , 4 , 0.5 , 4 ] el... | Update map template variables specific to linestring visual |
35,617 | def row_to_geojson ( row , lon , lat , precision , date_format = 'epoch' ) : row_json = json . loads ( row . to_json ( date_format = date_format , date_unit = 's' ) ) return geojson . Feature ( geometry = geojson . Point ( ( round ( row_json [ lon ] , precision ) , round ( row_json [ lat ] , precision ) ) ) , propertie... | Convert a pandas dataframe row to a geojson format object . Converts all datetimes to epoch seconds . |
35,618 | def scale_between ( minval , maxval , numStops ) : scale = [ ] if numStops < 2 : return [ minval , maxval ] elif maxval < minval : raise ValueError ( ) else : domain = maxval - minval interval = float ( domain ) / float ( numStops ) for i in range ( numStops ) : scale . append ( round ( minval + interval * i , 2 ) ) re... | Scale a min and max value to equal interval domain with numStops discrete values |
35,619 | def create_radius_stops ( breaks , min_radius , max_radius ) : num_breaks = len ( breaks ) radius_breaks = scale_between ( min_radius , max_radius , num_breaks ) stops = [ ] for i , b in enumerate ( breaks ) : stops . append ( [ b , radius_breaks [ i ] ] ) return stops | Convert a data breaks into a radius ramp |
35,620 | def create_weight_stops ( breaks ) : num_breaks = len ( breaks ) weight_breaks = scale_between ( 0 , 1 , num_breaks ) stops = [ ] for i , b in enumerate ( breaks ) : stops . append ( [ b , weight_breaks [ i ] ] ) return stops | Convert data breaks into a heatmap - weight ramp |
35,621 | def create_color_stops ( breaks , colors = 'RdYlGn' , color_ramps = color_ramps ) : num_breaks = len ( breaks ) stops = [ ] if isinstance ( colors , list ) : if len ( colors ) == 0 or len ( colors ) != num_breaks : raise ValueError ( 'custom color list must be of same length as breaks list' ) for color in colors : try ... | Convert a list of breaks into color stops using colors from colorBrewer or a custom list of color values in RGB RGBA HSL CSS text or HEX format . See www . colorbrewer2 . org for a list of color options to pass |
35,622 | def numeric_map ( lookup , numeric_stops , default = 0.0 ) : if len ( numeric_stops ) == 0 : return default match_map = dict ( ( x , y ) for ( x , y ) in numeric_stops ) if lookup in match_map . keys ( ) : return match_map . get ( lookup ) if isinstance ( lookup , ( int , float , complex ) ) : try : stops , values = zi... | Return a number value interpolated from given numeric_stops |
35,623 | def load_yaml_from_docstring ( docstring ) : split_lines = trim_docstring ( docstring ) . split ( "\n" ) for index , line in enumerate ( split_lines ) : line = line . strip ( ) if line . startswith ( "---" ) : cut_from = index break else : return { } yaml_string = "\n" . join ( split_lines [ cut_from : ] ) yaml_string ... | Loads YAML from docstring . |
35,624 | def load_operations_from_docstring ( docstring ) : doc_data = load_yaml_from_docstring ( docstring ) return { key : val for key , val in iteritems ( doc_data ) if key in PATH_KEYS or key . startswith ( "x-" ) } | Return a dictionary of OpenAPI operations parsed from a a docstring . |
35,625 | def get_fields ( schema , exclude_dump_only = False ) : if hasattr ( schema , "fields" ) : fields = schema . fields elif hasattr ( schema , "_declared_fields" ) : fields = copy . deepcopy ( schema . _declared_fields ) else : raise ValueError ( "{!r} doesn't have either `fields` or `_declared_fields`." . format ( schema... | Return fields from schema |
35,626 | def warn_if_fields_defined_in_meta ( fields , Meta ) : if getattr ( Meta , "fields" , None ) or getattr ( Meta , "additional" , None ) : declared_fields = set ( fields . keys ( ) ) if ( set ( getattr ( Meta , "fields" , set ( ) ) ) > declared_fields or set ( getattr ( Meta , "additional" , set ( ) ) ) > declared_fields... | Warns user that fields defined in Meta . fields or Meta . additional will be ignored |
35,627 | def filter_excluded_fields ( fields , Meta , exclude_dump_only ) : exclude = list ( getattr ( Meta , "exclude" , [ ] ) ) if exclude_dump_only : exclude . extend ( getattr ( Meta , "dump_only" , [ ] ) ) filtered_fields = OrderedDict ( ( key , value ) for key , value in fields . items ( ) if key not in exclude ) return f... | Filter fields that should be ignored in the OpenAPI spec |
35,628 | def get_unique_schema_name ( components , name , counter = 0 ) : if name not in components . _schemas : return name if not counter : warnings . warn ( "Multiple schemas resolved to the name {}. The name has been modified. " "Either manually add each of the schemas with a different name or " "provide a custom schema_nam... | Function to generate a unique name based on the provided name and names already in the spec . Will append a number to the name to make it unique if the name is already in the spec . |
35,629 | def build_reference ( component_type , openapi_major_version , component_name ) : return { "$ref" : "#/{}{}/{}" . format ( "components/" if openapi_major_version >= 3 else "" , COMPONENT_SUBSECTIONS [ openapi_major_version ] [ component_type ] , component_name , ) } | Return path to reference |
35,630 | def deepupdate ( original , update ) : for key , value in original . items ( ) : if key not in update : update [ key ] = value elif isinstance ( value , dict ) : deepupdate ( value , update [ key ] ) return update | Recursively update a dict . |
35,631 | def _observed_name ( field , name ) : if MARSHMALLOW_VERSION_INFO [ 0 ] < 3 : dump_to = getattr ( field , "dump_to" , None ) load_from = getattr ( field , "load_from" , None ) return dump_to or load_from or name return field . data_key or name | Adjust field name to reflect dump_to and load_from attributes . |
35,632 | def map_to_openapi_type ( self , * args ) : if len ( args ) == 1 and args [ 0 ] in self . field_mapping : openapi_type_field = self . field_mapping [ args [ 0 ] ] elif len ( args ) == 2 : openapi_type_field = args else : raise TypeError ( "Pass core marshmallow field type or (type, fmt) pair." ) def inner ( field_type ... | Decorator to set mapping for custom fields . |
35,633 | def field2type_and_format ( self , field ) : for field_class in type ( field ) . __mro__ : if field_class in self . field_mapping : type_ , fmt = self . field_mapping [ field_class ] break else : warnings . warn ( "Field of type {} does not inherit from marshmallow.Field." . format ( type ( field ) ) , UserWarning , ) ... | Return the dictionary of OpenAPI type and format based on the field type |
35,634 | def field2default ( self , field ) : ret = { } if "doc_default" in field . metadata : ret [ "default" ] = field . metadata [ "doc_default" ] else : default = field . missing if default is not marshmallow . missing and not callable ( default ) : ret [ "default" ] = default return ret | Return the dictionary containing the field s default value |
35,635 | def field2choices ( self , field , ** kwargs ) : attributes = { } comparable = [ validator . comparable for validator in field . validators if hasattr ( validator , "comparable" ) ] if comparable : attributes [ "enum" ] = comparable else : choices = [ OrderedSet ( validator . choices ) for validator in field . validato... | Return the dictionary of OpenAPI field attributes for valid choices definition |
35,636 | def field2read_only ( self , field , ** kwargs ) : attributes = { } if field . dump_only : attributes [ "readOnly" ] = True return attributes | Return the dictionary of OpenAPI field attributes for a dump_only field . |
35,637 | def field2write_only ( self , field , ** kwargs ) : attributes = { } if field . load_only and self . openapi_version . major >= 3 : attributes [ "writeOnly" ] = True return attributes | Return the dictionary of OpenAPI field attributes for a load_only field . |
35,638 | def field2nullable ( self , field , ** kwargs ) : attributes = { } if field . allow_none : attributes [ "x-nullable" if self . openapi_version . major < 3 else "nullable" ] = True return attributes | Return the dictionary of OpenAPI field attributes for a nullable field . |
35,639 | def metadata2properties ( self , field ) : metadata = { key . replace ( "_" , "-" ) if key . startswith ( "x_" ) else key : value for key , value in iteritems ( field . metadata ) } ret = { key : value for key , value in metadata . items ( ) if key in _VALID_PROPERTIES or key . startswith ( _VALID_PREFIX ) } return ret | Return a dictionary of properties extracted from field Metadata |
35,640 | def resolve_nested_schema ( self , schema ) : schema_instance = resolve_schema_instance ( schema ) schema_key = make_schema_key ( schema_instance ) if schema_key not in self . refs : schema_cls = self . resolve_schema_class ( schema ) name = self . schema_name_resolver ( schema_cls ) if not name : try : json_schema = s... | Return the Open API representation of a marshmallow Schema . |
35,641 | def property2parameter ( self , prop , name = "body" , required = False , multiple = False , location = None , default_in = "body" , ) : openapi_default_in = __location_map__ . get ( default_in , default_in ) openapi_location = __location_map__ . get ( location , openapi_default_in ) ret = { "in" : openapi_location , "... | Return the Parameter Object definition for a JSON Schema property . |
35,642 | def get_ref_dict ( self , schema ) : schema_key = make_schema_key ( schema ) ref_schema = build_reference ( "schema" , self . openapi_version . major , self . refs [ schema_key ] ) if getattr ( schema , "many" , False ) : return { "type" : "array" , "items" : ref_schema } return ref_schema | Method to create a dictionary containing a JSON reference to the schema in the spec |
35,643 | def clean_operations ( operations , openapi_major_version ) : invalid = { key for key in set ( iterkeys ( operations ) ) - set ( VALID_METHODS [ openapi_major_version ] ) if not key . startswith ( "x-" ) } if invalid : raise APISpecError ( "One or more HTTP methods are invalid: {}" . format ( ", " . join ( invalid ) ) ... | Ensure that all parameters with in equal to path are also required as required by the OpenAPI specification as well as normalizing any references to global parameters . Also checks for invalid HTTP methods . |
35,644 | def schema ( self , name , component = None , ** kwargs ) : if name in self . _schemas : raise DuplicateComponentNameError ( 'Another schema with name "{}" is already registered.' . format ( name ) ) component = component or { } ret = component . copy ( ) for plugin in self . _plugins : try : ret . update ( plugin . sc... | Add a new schema to the spec . |
35,645 | def parameter ( self , component_id , location , component = None , ** kwargs ) : if component_id in self . _parameters : raise DuplicateComponentNameError ( 'Another parameter with name "{}" is already registered.' . format ( component_id ) ) component = component or { } ret = component . copy ( ) ret . setdefault ( "... | Add a parameter which can be referenced . |
35,646 | def response ( self , component_id , component = None , ** kwargs ) : if component_id in self . _responses : raise DuplicateComponentNameError ( 'Another response with name "{}" is already registered.' . format ( component_id ) ) component = component or { } ret = component . copy ( ) for plugin in self . _plugins : tr... | Add a response which can be referenced . |
35,647 | def security_scheme ( self , component_id , component ) : if component_id in self . _security_schemes : raise DuplicateComponentNameError ( 'Another security scheme with name "{}" is already registered.' . format ( component_id ) ) self . _security_schemes [ component_id ] = component return self | Add a security scheme which can be referenced . |
35,648 | def path ( self , path = None , operations = None , summary = None , description = None , ** kwargs ) : operations = operations or OrderedDict ( ) for plugin in self . plugins : try : ret = plugin . path_helper ( path = path , operations = operations , ** kwargs ) except PluginMethodNotImplementedError : continue if re... | Add a new path object to the spec . |
35,649 | def resolver ( schema ) : name = schema . __name__ if name . endswith ( "Schema" ) : return name [ : - 6 ] or name return name | Default implementation of a schema name resolver function |
35,650 | def resolve_schema_in_request_body ( self , request_body ) : content = request_body [ "content" ] for content_type in content : schema = content [ content_type ] [ "schema" ] content [ content_type ] [ "schema" ] = self . openapi . resolve_schema_dict ( schema ) | Function to resolve a schema in a requestBody object - modifies then response dict to convert Marshmallow Schema object or class into dict |
35,651 | def resolve_schema ( self , data ) : if not isinstance ( data , dict ) : return if "schema" in data : data [ "schema" ] = self . openapi . resolve_schema_dict ( data [ "schema" ] ) if self . openapi_version . major >= 3 : if "content" in data : for content_type in data [ "content" ] : schema = data [ "content" ] [ cont... | Function to resolve a schema in a parameter or response - modifies the corresponding dict to convert Marshmallow Schema object or class into dict |
35,652 | def warn_if_schema_already_in_spec ( self , schema_key ) : if schema_key in self . openapi . refs : warnings . warn ( "{} has already been added to the spec. Adding it twice may " "cause references to not resolve properly." . format ( schema_key [ 0 ] ) , UserWarning , ) | Method to warn the user if the schema has already been added to the spec . |
35,653 | def size_in_bytes ( insize ) : if insize is None or insize . strip ( ) == '' : raise ValueError ( 'no string specified' ) units = { 'k' : 1024 , 'm' : 1024 ** 2 , 'g' : 1024 ** 3 , 't' : 1024 ** 4 , 'p' : 1024 ** 5 , } match = re . search ( '^\s*([0-9\.]+)\s*([kmgtp])?' , insize , re . I ) if match is None : raise Valu... | Returns the size in bytes from strings such as 5 mb into 5242880 . |
35,654 | def main ( ) : signal . signal ( signal . SIGINT , signal_handler ) global offset global arguments arguments = docopt ( __doc__ , version = __version__ ) if arguments [ '--debug' ] : logger . level = logging . DEBUG elif arguments [ '--error' ] : logger . level = logging . ERROR get_config ( ) logger . info ( 'Soundclo... | Main function parses the URL from command line arguments |
35,655 | def get_config ( ) : global token config = configparser . ConfigParser ( ) config . read ( os . path . join ( os . path . expanduser ( '~' ) , '.config/scdl/scdl.cfg' ) ) try : token = config [ 'scdl' ] [ 'auth_token' ] path = config [ 'scdl' ] [ 'path' ] except : logger . error ( 'Are you sure scdl.cfg is in $HOME/.co... | Reads the music download filepath from scdl . cfg |
35,656 | def get_item ( track_url , client_id = CLIENT_ID ) : try : item_url = url [ 'resolve' ] . format ( track_url ) r = requests . get ( item_url , params = { 'client_id' : client_id } ) logger . debug ( r . url ) if r . status_code == 403 : return get_item ( track_url , ALT_CLIENT_ID ) item = r . json ( ) no_tracks = item ... | Fetches metadata for a track or playlist |
35,657 | def who_am_i ( ) : me = url [ 'me' ] . format ( token ) r = requests . get ( me , params = { 'client_id' : CLIENT_ID } ) r . raise_for_status ( ) current_user = r . json ( ) logger . debug ( me ) logger . info ( 'Hello {0}!' . format ( current_user [ 'username' ] ) ) return current_user | Display username from current token and check for validity |
35,658 | def remove_files ( ) : logger . info ( "Removing local track files that were not downloaded..." ) files = [ f for f in os . listdir ( '.' ) if os . path . isfile ( f ) ] for f in files : if f not in fileToKeep : os . remove ( f ) | Removes any pre - existing tracks that were not just downloaded |
35,659 | def get_track_info ( track_id ) : logger . info ( 'Retrieving more info on the track' ) info_url = url [ "trackinfo" ] . format ( track_id ) r = requests . get ( info_url , params = { 'client_id' : CLIENT_ID } , stream = True ) item = r . json ( ) logger . debug ( item ) return item | Fetches track info from Soundcloud given a track_id |
35,660 | def already_downloaded ( track , title , filename ) : global arguments already_downloaded = False if os . path . isfile ( filename ) : already_downloaded = True if arguments [ '--flac' ] and can_convert ( filename ) and os . path . isfile ( filename [ : - 4 ] + ".flac" ) : already_downloaded = True if arguments [ '--do... | Returns True if the file has already been downloaded |
35,661 | def in_download_archive ( track ) : global arguments if not arguments [ '--download-archive' ] : return archive_filename = arguments . get ( '--download-archive' ) try : with open ( archive_filename , 'a+' , encoding = 'utf-8' ) as file : logger . debug ( 'Contents of {0}:' . format ( archive_filename ) ) file . seek (... | Returns True if a track_id exists in the download archive |
35,662 | def record_download_archive ( track ) : global arguments if not arguments [ '--download-archive' ] : return archive_filename = arguments . get ( '--download-archive' ) try : with open ( archive_filename , 'a' , encoding = 'utf-8' ) as file : file . write ( '{0}' . format ( track [ 'id' ] ) + '\n' ) except IOError as io... | Write the track_id in the download archive |
35,663 | def unicode_compact ( func ) : @ wraps ( func ) def wrapped ( self , text , * a , ** kw ) : if not isinstance ( text , six . text_type ) : text = text . decode ( 'utf-8' ) return func ( self , text , * a , ** kw ) return wrapped | Make sure the first parameter of the decorated method to be a unicode object . |
35,664 | def reply_webapi ( self , text , attachments = None , as_user = True , in_thread = None ) : if in_thread is None : in_thread = 'thread_ts' in self . body if in_thread : self . send_webapi ( text , attachments = attachments , as_user = as_user , thread_ts = self . thread_ts ) else : text = self . gen_reply ( text ) self... | Send a reply to the sender using Web API |
35,665 | def send_webapi ( self , text , attachments = None , as_user = True , thread_ts = None ) : self . _client . send_message ( self . _body [ 'channel' ] , text , attachments = attachments , as_user = as_user , thread_ts = thread_ts ) | Send a reply using Web API |
35,666 | def reply ( self , text , in_thread = None ) : if in_thread is None : in_thread = 'thread_ts' in self . body if in_thread : self . send ( text , thread_ts = self . thread_ts ) else : text = self . gen_reply ( text ) self . send ( text ) | Send a reply to the sender using RTM API |
35,667 | def direct_reply ( self , text ) : channel_id = self . _client . open_dm_channel ( self . _get_user_id ( ) ) self . _client . rtm_send_message ( channel_id , text ) | Send a reply via direct message using RTM API |
35,668 | def send ( self , text , thread_ts = None ) : self . _client . rtm_send_message ( self . _body [ 'channel' ] , text , thread_ts = thread_ts ) | Send a reply using RTM API |
35,669 | def react ( self , emojiname ) : self . _client . react_to_message ( emojiname = emojiname , channel = self . _body [ 'channel' ] , timestamp = self . _body [ 'ts' ] ) | React to a message using the web api |
35,670 | def default_reply ( * args , ** kwargs ) : invoked = bool ( not args or kwargs ) matchstr = kwargs . pop ( 'matchstr' , r'^.*$' ) flags = kwargs . pop ( 'flags' , 0 ) if not invoked : func = args [ 0 ] def wrapper ( func ) : PluginsManager . commands [ 'default_reply' ] [ re . compile ( matchstr , flags ) ] = func logg... | Decorator declaring the wrapped function to the default reply hanlder . |
35,671 | def get_previous_price ( self , currency , date_obj ) : start = date_obj . strftime ( '%Y-%m-%d' ) end = date_obj . strftime ( '%Y-%m-%d' ) url = ( 'https://api.coindesk.com/v1/bpi/historical/close.json' '?start={}&end={}¤cy={}' . format ( start , end , currency ) ) response = requests . get ( url ) if response .... | Get Price for one bit coin on given date |
35,672 | def get_previous_price_list ( self , currency , start_date , end_date ) : start = start_date . strftime ( '%Y-%m-%d' ) end = end_date . strftime ( '%Y-%m-%d' ) url = ( 'https://api.coindesk.com/v1/bpi/historical/close.json' '?start={}&end={}¤cy={}' . format ( start , end , currency ) ) response = requests . get (... | Get List of prices between two dates |
35,673 | def convert_to_btc ( self , amount , currency ) : if isinstance ( amount , Decimal ) : use_decimal = True else : use_decimal = self . _force_decimal url = 'https://api.coindesk.com/v1/bpi/currentprice/{}.json' . format ( currency ) response = requests . get ( url ) if response . status_code == 200 : data = response . j... | Convert X amount to Bit Coins |
35,674 | def convert_btc_to_cur ( self , coins , currency ) : if isinstance ( coins , Decimal ) : use_decimal = True else : use_decimal = self . _force_decimal url = 'https://api.coindesk.com/v1/bpi/currentprice/{}.json' . format ( currency ) response = requests . get ( url ) if response . status_code == 200 : data = response .... | Convert X bit coins to valid currency amount |
35,675 | def convert_to_btc_on ( self , amount , currency , date_obj ) : if isinstance ( amount , Decimal ) : use_decimal = True else : use_decimal = self . _force_decimal start = date_obj . strftime ( '%Y-%m-%d' ) end = date_obj . strftime ( '%Y-%m-%d' ) url = ( 'https://api.coindesk.com/v1/bpi/historical/close.json' '?start={... | Convert X amount to BTC based on given date rate |
35,676 | def rate_limit ( f ) : def new_f ( * args , ** kwargs ) : errors = 0 while True : resp = f ( * args , ** kwargs ) if resp . status_code == 200 : errors = 0 return resp elif resp . status_code == 401 : try : resp . raise_for_status ( ) except requests . HTTPError as e : message = "\nThis is a protected or locked account... | A decorator to handle rate limiting from the Twitter API . If a rate limit error is encountered we will sleep until we can issue the API call again . |
35,677 | def catch_timeout ( f ) : def new_f ( self , * args , ** kwargs ) : try : return f ( self , * args , ** kwargs ) except ( requests . exceptions . ReadTimeout , requests . packages . urllib3 . exceptions . ReadTimeoutError ) as e : log . warning ( "caught read timeout: %s" , e ) self . connect ( ) return f ( self , * ar... | A decorator to handle read timeouts from Twitter . |
35,678 | def catch_gzip_errors ( f ) : def new_f ( self , * args , ** kwargs ) : try : return f ( self , * args , ** kwargs ) except requests . exceptions . ContentDecodingError as e : log . warning ( "caught gzip error: %s" , e ) self . connect ( ) return f ( self , * args , ** kwargs ) return new_f | A decorator to handle gzip encoding errors which have been known to happen during hydration . |
35,679 | def interruptible_sleep ( t , event = None ) : log . info ( "sleeping %s" , t ) if event is None : time . sleep ( t ) return False else : return not event . wait ( t ) | Sleeps for a specified duration optionally stopping early for event . |
35,680 | def filter_protected ( f ) : def new_f ( self , * args , ** kwargs ) : for obj in f ( self , * args , ** kwargs ) : if self . protected == False : if 'user' in obj and obj [ 'user' ] [ 'protected' ] : continue elif 'protected' in obj and obj [ 'protected' ] : continue yield obj return new_f | filter_protected will filter out protected tweets and users unless explicitly requested not to . |
35,681 | def tweets_files ( string , path ) : for filename in os . listdir ( path ) : if re . match ( string , filename ) and ".jsonl" in filename : f = gzip . open if ".gz" in filename else open yield path + filename , f Ellipsis | Iterates over json files in path . |
35,682 | def extract ( json_object , args , csv_writer ) : found = [ [ ] ] for attribute in args . attributes : item = attribute . getElement ( json_object ) if len ( item ) == 0 : for row in found : row . append ( "NA" ) else : found1 = [ ] for value in item : if value is None : value = "NA" new = copy . deepcopy ( found ) for... | Extract and write found attributes . |
35,683 | def add ( from_user , from_id , to_user , to_id , type ) : "adds a relation to the graph" if options . users and to_user : G . add_node ( from_user , screen_name = from_user ) G . add_node ( to_user , screen_name = to_user ) if G . has_edge ( from_user , to_user ) : weight = G [ from_user ] [ to_user ] [ 'weight' ] + 1... | adds a relation to the graph |
35,684 | def timeline ( self , user_id = None , screen_name = None , max_id = None , since_id = None , max_pages = None ) : if user_id and screen_name : raise ValueError ( 'only user_id or screen_name may be passed' ) if screen_name : screen_name = screen_name . lstrip ( '@' ) id = screen_name or str ( user_id ) id_type = "scre... | Returns a collection of the most recent tweets posted by the user indicated by the user_id or screen_name parameter . Provide a user_id or screen_name . |
35,685 | def follower_ids ( self , user ) : user = str ( user ) user = user . lstrip ( '@' ) url = 'https://api.twitter.com/1.1/followers/ids.json' if re . match ( r'^\d+$' , user ) : params = { 'user_id' : user , 'cursor' : - 1 } else : params = { 'screen_name' : user , 'cursor' : - 1 } while params [ 'cursor' ] != 0 : try : r... | Returns Twitter user id lists for the specified user s followers . A user can be a specific using their screen_name or user_id |
35,686 | def filter ( self , track = None , follow = None , locations = None , event = None , record_keepalive = False ) : if locations is not None : if type ( locations ) == list : locations = ',' . join ( locations ) locations = locations . replace ( '\\' , '' ) url = 'https://stream.twitter.com/1.1/statuses/filter.json' para... | Returns an iterator for tweets that match a given filter track from the livestream of tweets happening right now . |
35,687 | def sample ( self , event = None , record_keepalive = False ) : url = 'https://stream.twitter.com/1.1/statuses/sample.json' params = { "stall_warning" : True } headers = { 'accept-encoding' : 'deflate, gzip' } errors = 0 while True : try : log . info ( "connecting to sample stream" ) resp = self . post ( url , params ,... | Returns a small random sample of all public statuses . The Tweets returned by the default access level are the same so if two different clients connect to this endpoint they will see the same Tweets . |
35,688 | def dehydrate ( self , iterator ) : for line in iterator : try : yield json . loads ( line ) [ 'id_str' ] except Exception as e : log . error ( "uhoh: %s\n" % e ) | Pass in an iterator of tweets JSON and get back an iterator of the IDs of each tweet . |
35,689 | def hydrate ( self , iterator ) : ids = [ ] url = "https://api.twitter.com/1.1/statuses/lookup.json" for tweet_id in iterator : tweet_id = str ( tweet_id ) tweet_id = tweet_id . strip ( ) ids . append ( tweet_id ) if len ( ids ) == 100 : log . info ( "hydrating %s ids" , len ( ids ) ) resp = self . post ( url , data = ... | Pass in an iterator of tweet ids and get back an iterator for the decoded JSON for each corresponding tweet . |
35,690 | def retweets ( self , tweet_id ) : log . info ( "retrieving retweets of %s" , tweet_id ) url = "https://api.twitter.com/1.1/statuses/retweets/" "{}.json" . format ( tweet_id ) resp = self . get ( url , params = { "count" : 100 } ) for tweet in resp . json ( ) : yield tweet | Retrieves up to the last 100 retweets for the provided tweet . |
35,691 | def trends_available ( self ) : url = 'https://api.twitter.com/1.1/trends/available.json' try : resp = self . get ( url ) except requests . exceptions . HTTPError as e : raise e return resp . json ( ) | Returns a list of regions for which Twitter tracks trends . |
35,692 | def trends_place ( self , woeid , exclude = None ) : url = 'https://api.twitter.com/1.1/trends/place.json' params = { 'id' : woeid } if exclude : params [ 'exclude' ] = exclude try : resp = self . get ( url , params = params , allow_404 = True ) except requests . exceptions . HTTPError as e : if e . response . status_c... | Returns recent Twitter trends for the specified WOEID . If exclude == hashtags Twitter will remove hashtag trends from the response . |
35,693 | def replies ( self , tweet , recursive = False , prune = ( ) ) : yield tweet screen_name = tweet [ 'user' ] [ 'screen_name' ] tweet_id = tweet [ 'id_str' ] log . info ( "looking for replies to: %s" , tweet_id ) for reply in self . search ( "to:%s" % screen_name , since_id = tweet_id ) : if reply [ 'in_reply_to_status_i... | replies returns a generator of tweets that are replies for a given tweet . It includes the original tweet . If you would like to fetch the replies to the replies use recursive = True which will do a depth - first recursive walk of the replies . It also walk up the reply chain if you supply a tweet that is itself a repl... |
35,694 | def list_members ( self , list_id = None , slug = None , owner_screen_name = None , owner_id = None ) : assert list_id or ( slug and ( owner_screen_name or owner_id ) ) url = 'https://api.twitter.com/1.1/lists/members.json' params = { 'cursor' : - 1 } if list_id : params [ 'list_id' ] = list_id else : params [ 'slug' ]... | Returns the members of a list . |
35,695 | def connect ( self ) : if not ( self . consumer_key and self . consumer_secret and self . access_token and self . access_token_secret ) : raise RuntimeError ( "MissingKeys" ) if self . client : log . info ( "closing existing http session" ) self . client . close ( ) if self . last_response : log . info ( "closing last ... | Sets up the HTTP session to talk to Twitter . If one is active it is closed and another one is opened . |
35,696 | def get_keys ( self ) : env = os . environ . get if not self . consumer_key : self . consumer_key = env ( 'CONSUMER_KEY' ) if not self . consumer_secret : self . consumer_secret = env ( 'CONSUMER_SECRET' ) if not self . access_token : self . access_token = env ( 'ACCESS_TOKEN' ) if not self . access_token_secret : self... | Get the Twitter API keys . Order of precedence is command line environment config file . Return True if all the keys were found and False if not . |
35,697 | def validate_keys ( self ) : url = 'https://api.twitter.com/1.1/account/verify_credentials.json' keys_present = self . consumer_key and self . consumer_secret and self . access_token and self . access_token_secret if keys_present : try : self . connect ( ) self . get ( url ) except requests . HTTPError as e : if e . re... | Validate the keys provided are authentic credentials . |
35,698 | def init_app ( self , app , ** kwargs ) : self . kwargs . update ( kwargs ) self . settings = self . dynaconf_instance or LazySettings ( ** self . kwargs ) app . config = self . make_config ( app ) app . dynaconf = self . settings | kwargs holds initial dynaconf configuration |
35,699 | def get ( self , key , default = None ) : return self . _settings . get ( key , Config . get ( self , key , default ) ) | Gets config from dynaconf variables if variables does not exists in dynaconf try getting from app . config to support runtime settings . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.