idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
223,300
def load_arguments ( self , command ) : from knack . arguments import ArgumentsContext self . cli_ctx . raise_event ( EVENT_CMDLOADER_LOAD_ARGUMENTS , cmd_tbl = self . command_table , command = command ) try : self . command_table [ command ] . load_arguments ( ) except KeyError : return # ensure global 'cmd' is ignored with ArgumentsContext ( self , '' ) as c : c . ignore ( 'cmd' ) self . _apply_parameter_info ( command , self . command_table [ command ] )
Load the arguments for the specified command
130
7
223,301
def create_command ( self , name , operation , * * kwargs ) : if not isinstance ( operation , six . string_types ) : raise ValueError ( "Operation must be a string. Got '{}'" . format ( operation ) ) name = ' ' . join ( name . split ( ) ) client_factory = kwargs . get ( 'client_factory' , None ) def _command_handler ( command_args ) : op = CLICommandsLoader . _get_op_handler ( operation ) client = client_factory ( command_args ) if client_factory else None result = op ( client , * * command_args ) if client else op ( * * command_args ) return result def arguments_loader ( ) : return list ( extract_args_from_signature ( CLICommandsLoader . _get_op_handler ( operation ) , excluded_params = self . excluded_command_handler_args ) ) def description_loader ( ) : return extract_full_summary_from_signature ( CLICommandsLoader . _get_op_handler ( operation ) ) kwargs [ 'arguments_loader' ] = arguments_loader kwargs [ 'description_loader' ] = description_loader cmd = self . command_cls ( self . cli_ctx , name , _command_handler , * * kwargs ) return cmd
Constructs the command object that can then be added to the command table
303
14
223,302
def _get_op_handler ( operation ) : try : mod_to_import , attr_path = operation . split ( '#' ) op = import_module ( mod_to_import ) for part in attr_path . split ( '.' ) : op = getattr ( op , part ) if isinstance ( op , types . FunctionType ) : return op return six . get_method_function ( op ) except ( ValueError , AttributeError ) : raise ValueError ( "The operation '{}' is invalid." . format ( operation ) )
Import and load the operation handler
122
6
223,303
def command ( self , name , handler_name , * * kwargs ) : import copy command_name = '{} {}' . format ( self . group_name , name ) if self . group_name else name command_kwargs = copy . deepcopy ( self . group_kwargs ) command_kwargs . update ( kwargs ) # don't inherit deprecation info from command group command_kwargs [ 'deprecate_info' ] = kwargs . get ( 'deprecate_info' , None ) self . command_loader . _populate_command_group_table_with_subgroups ( ' ' . join ( command_name . split ( ) [ : - 1 ] ) ) # pylint: disable=protected-access self . command_loader . command_table [ command_name ] = self . command_loader . create_command ( command_name , self . operations_tmpl . format ( handler_name ) , * * command_kwargs )
Register a command into the command table
217
7
223,304
def _rudimentary_get_command ( self , args ) : nouns = [ ] command_names = self . commands_loader . command_table . keys ( ) for arg in args : if arg and arg [ 0 ] != '-' : nouns . append ( arg ) else : break def _find_args ( args ) : search = ' ' . join ( args ) . lower ( ) return next ( ( x for x in command_names if x . startswith ( search ) ) , False ) # since the command name may be immediately followed by a positional arg, strip those off while nouns and not _find_args ( nouns ) : del nouns [ - 1 ] # ensure the command string is case-insensitive for i in range ( len ( nouns ) ) : args [ i ] = args [ i ] . lower ( ) return ' ' . join ( nouns )
Rudimentary parsing to get the command
191
8
223,305
def get_completion_args ( self , is_completion = False , comp_line = None ) : # pylint: disable=no-self-use is_completion = is_completion or os . environ . get ( ARGCOMPLETE_ENV_NAME ) comp_line = comp_line or os . environ . get ( 'COMP_LINE' ) # The first item is the exe name so ignore that. return comp_line . split ( ) [ 1 : ] if is_completion and comp_line else None
Get the args that will be used to tab completion if completion is active .
121
15
223,306
def out ( self , obj , formatter = None , out_file = None ) : # pylint: disable=no-self-use if not isinstance ( obj , CommandResultItem ) : raise TypeError ( 'Expected {} got {}' . format ( CommandResultItem . __name__ , type ( obj ) ) ) import platform import colorama if platform . system ( ) == 'Windows' : out_file = colorama . AnsiToWin32 ( out_file ) . stream output = formatter ( obj ) try : print ( output , file = out_file , end = '' ) except IOError as ex : if ex . errno == errno . EPIPE : pass else : raise except UnicodeEncodeError : print ( output . encode ( 'ascii' , 'ignore' ) . decode ( 'utf-8' , 'ignore' ) , file = out_file , end = '' )
Produces the output using the command result . The method does not return a result as the output is written straight to the output file .
198
27
223,307
def update_status_with_media ( self , * * params ) : # pragma: no cover warnings . warn ( 'This method is deprecated. You should use Twython.upload_media instead.' , TwythonDeprecationWarning , stacklevel = 2 ) return self . post ( 'statuses/update_with_media' , params = params )
Updates the authenticating user s current status and attaches media for upload . In other words it creates a Tweet with a picture attached .
76
27
223,308
def create_metadata ( self , * * params ) : params = json . dumps ( params ) return self . post ( "https://upload.twitter.com/1.1/media/metadata/create.json" , params = params )
Adds metadata to a media element such as image descriptions for visually impaired .
51
14
223,309
def _get_error_message ( self , response ) : error_message = 'An error occurred processing your request.' try : content = response . json ( ) # {"errors":[{"code":34,"message":"Sorry, # that page does not exist"}]} error_message = content [ 'errors' ] [ 0 ] [ 'message' ] except TypeError : error_message = content [ 'errors' ] except ValueError : # bad json data from Twitter for an error pass except ( KeyError , IndexError ) : # missing data so fallback to default message pass return error_message
Parse and return the first error message
123
8
223,310
def request ( self , endpoint , method = 'GET' , params = None , version = '1.1' , json_encoded = False ) : if endpoint . startswith ( 'http://' ) : raise TwythonError ( 'api.twitter.com is restricted to SSL/TLS traffic.' ) # In case they want to pass a full Twitter URL # i.e. https://api.twitter.com/1.1/search/tweets.json if endpoint . startswith ( 'https://' ) : url = endpoint else : url = '%s/%s.json' % ( self . api_url % version , endpoint ) content = self . _request ( url , method = method , params = params , api_call = url , json_encoded = json_encoded ) return content
Return dict of response received from Twitter s API
178
9
223,311
def get_lastfunction_header ( self , header , default_return_value = None ) : if self . _last_call is None : raise TwythonError ( 'This function must be called after an API call. \ It delivers header information.' ) return self . _last_call [ 'headers' ] . get ( header , default_return_value )
Returns a specific header from the last API call This will return None if the header is not present
77
19
223,312
def get_authentication_tokens ( self , callback_url = None , force_login = False , screen_name = '' ) : if self . oauth_version != 1 : raise TwythonError ( 'This method can only be called when your \ OAuth version is 1.0.' ) request_args = { } if callback_url : request_args [ 'oauth_callback' ] = callback_url response = self . client . get ( self . request_token_url , params = request_args ) if response . status_code == 401 : raise TwythonAuthError ( response . content , error_code = response . status_code ) elif response . status_code != 200 : raise TwythonError ( response . content , error_code = response . status_code ) request_tokens = dict ( parse_qsl ( response . content . decode ( 'utf-8' ) ) ) if not request_tokens : raise TwythonError ( 'Unable to decode request tokens.' ) oauth_callback_confirmed = request_tokens . get ( 'oauth_callback_confirmed' ) == 'true' auth_url_params = { 'oauth_token' : request_tokens [ 'oauth_token' ] , } if force_login : auth_url_params . update ( { 'force_login' : force_login , 'screen_name' : screen_name } ) # Use old-style callback argument if server didn't accept new-style if callback_url and not oauth_callback_confirmed : auth_url_params [ 'oauth_callback' ] = self . callback_url request_tokens [ 'auth_url' ] = self . authenticate_url + '?' + urlencode ( auth_url_params ) return request_tokens
Returns a dict including an authorization URL auth_url to direct a user to
398
15
223,313
def obtain_access_token ( self ) : if self . oauth_version != 2 : raise TwythonError ( 'This method can only be called when your \ OAuth version is 2.0.' ) data = { 'grant_type' : 'client_credentials' } basic_auth = HTTPBasicAuth ( self . app_key , self . app_secret ) try : response = self . client . post ( self . request_token_url , data = data , auth = basic_auth ) content = response . content . decode ( 'utf-8' ) try : content = content . json ( ) except AttributeError : content = json . loads ( content ) access_token = content [ 'access_token' ] except ( KeyError , ValueError , requests . exceptions . RequestException ) : raise TwythonAuthError ( 'Unable to obtain OAuth 2 access token.' ) else : return access_token
Returns an OAuth 2 access token to make OAuth 2 authenticated read - only calls .
198
18
223,314
def construct_api_url ( api_url , * * params ) : querystring = [ ] params , _ = _transparent_params ( params or { } ) params = requests . utils . to_key_val_list ( params ) for ( k , v ) in params : querystring . append ( '%s=%s' % ( Twython . encode ( k ) , quote_plus ( Twython . encode ( v ) ) ) ) return '%s?%s' % ( api_url , '&' . join ( querystring ) )
Construct a Twitter API url encoded with parameters
122
8
223,315
def cursor ( self , function , return_pages = False , * * params ) : if not callable ( function ) : raise TypeError ( '.cursor() takes a Twython function as its first \ argument. Did you provide the result of a \ function call?' ) if not hasattr ( function , 'iter_mode' ) : raise TwythonError ( 'Unable to create generator for Twython \ method "%s"' % function . __name__ ) while True : content = function ( * * params ) if not content : raise StopIteration if hasattr ( function , 'iter_key' ) : results = content . get ( function . iter_key ) else : results = content if return_pages : yield results else : for result in results : yield result if function . iter_mode == 'cursor' and content [ 'next_cursor_str' ] == '0' : raise StopIteration try : if function . iter_mode == 'id' : # Set max_id in params to one less than lowest tweet id if hasattr ( function , 'iter_metadata' ) : # Get supplied next max_id metadata = content . get ( function . iter_metadata ) if 'next_results' in metadata : next_results = urlsplit ( metadata [ 'next_results' ] ) params = dict ( parse_qsl ( next_results . query ) ) else : # No more results raise StopIteration else : # Twitter gives tweets in reverse chronological order: params [ 'max_id' ] = str ( int ( content [ - 1 ] [ 'id_str' ] ) - 1 ) elif function . iter_mode == 'cursor' : params [ 'cursor' ] = content [ 'next_cursor_str' ] except ( TypeError , ValueError ) : # pragma: no cover raise TwythonError ( 'Unable to generate next page of search \ results, `page` is not a number.' ) except ( KeyError , AttributeError ) : #pragma no cover raise TwythonError ( 'Unable to generate next page of search \ results, content has unexpected structure.' )
Returns a generator for results that match a specified query .
458
11
223,316
def _request ( self , url , method = 'GET' , params = None ) : self . connected = True retry_counter = 0 method = method . lower ( ) func = getattr ( self . client , method ) params , _ = _transparent_params ( params ) def _send ( retry_counter ) : requests_args = { } for k , v in self . client_args . items ( ) : # Maybe this should be set as a class # variable and only done once? if k in ( 'timeout' , 'allow_redirects' , 'verify' ) : requests_args [ k ] = v while self . connected : try : if method == 'get' : requests_args [ 'params' ] = params else : requests_args [ 'data' ] = params response = func ( url , * * requests_args ) except requests . exceptions . Timeout : self . on_timeout ( ) else : if response . status_code != 200 : self . on_error ( response . status_code , response . content ) if self . retry_count and ( self . retry_count - retry_counter ) > 0 : time . sleep ( self . retry_in ) retry_counter += 1 _send ( retry_counter ) return response while self . connected : response = _send ( retry_counter ) for line in response . iter_lines ( self . chunk_size ) : if not self . connected : break if line : try : if is_py3 : line = line . decode ( 'utf-8' ) data = json . loads ( line ) except ValueError : # pragma: no cover self . on_error ( response . status_code , 'Unable to decode response, \ not valid JSON.' ) else : if self . on_success ( data ) : # pragma: no cover for message_type in self . handlers : if message_type in data : handler = getattr ( self , 'on_' + message_type , None ) if handler and callable ( handler ) and not handler ( data . get ( message_type ) ) : break response . close ( )
Internal stream request handling
462
4
223,317
def optimize ( self ) : # load benchmarks results with open ( LIBRARIES_FILE , 'r' ) as f : libs_data = json . load ( f ) # optimize for alg , libs_names in libs_data . items ( ) : libs = self . get_libs ( alg ) if not libs : continue # drop slow libs self . libs [ alg ] = [ lib for lib in libs if [ lib . module_name , lib . func_name ] in libs_names ] # sort libs by speed self . libs [ alg ] . sort ( key = lambda lib : libs_names . index ( [ lib . module_name , lib . func_name ] ) )
Sort algorithm implementations by speed .
162
6
223,318
def clone ( self ) : obj = self . __class__ ( ) obj . libs = deepcopy ( self . libs ) return obj
Clone library manager prototype
30
5
223,319
def normalized_distance ( self , * sequences ) : return float ( self . distance ( * sequences ) ) / self . maximum ( * sequences )
Get distance from 0 to 1
30
6
223,320
def external_answer ( self , * sequences ) : # if this feature disabled if not getattr ( self , 'external' , False ) : return # all external libs doesn't support test_func if hasattr ( self , 'test_func' ) and self . test_func is not self . _ident : return # try to get external libs for algorithm libs = libraries . get_libs ( self . __class__ . __name__ ) for lib in libs : # if conditions not satisfied if not lib . check_conditions ( self , * sequences ) : continue # if library is not installed yet if not lib . get_function ( ) : continue prepared_sequences = lib . prepare ( * sequences ) # fail side libraries silently and try next libs try : return lib . func ( * prepared_sequences ) except Exception : pass
Try to get answer from known external libraries .
181
9
223,321
def _ident ( * elements ) : try : # for hashable elements return len ( set ( elements ) ) == 1 except TypeError : # for unhashable elements for e1 , e2 in zip ( elements , elements [ 1 : ] ) : if e1 != e2 : return False return True
Return True if all sequences are equal .
64
8
223,322
def _get_sequences ( self , * sequences ) : # by words if not self . qval : return [ s . split ( ) for s in sequences ] # by chars if self . qval == 1 : return sequences # by n-grams return [ find_ngrams ( s , self . qval ) for s in sequences ]
Prepare sequences .
73
4
223,323
def _get_counters ( self , * sequences ) : # already Counters if all ( isinstance ( s , Counter ) for s in sequences ) : return sequences return [ Counter ( s ) for s in self . _get_sequences ( * sequences ) ]
Prepare sequences and convert it to Counters .
56
10
223,324
def _count_counters ( self , counter ) : if getattr ( self , 'as_set' , False ) : return len ( set ( counter ) ) else : return sum ( counter . values ( ) )
Return all elements count from Counter
46
6
223,325
def make_inst2 ( ) : I , d = multidict ( { 1 : 45 , 2 : 20 , 3 : 30 , 4 : 30 } ) # demand J , M = multidict ( { 1 : 35 , 2 : 50 , 3 : 40 } ) # capacity c = { ( 1 , 1 ) : 8 , ( 1 , 2 ) : 9 , ( 1 , 3 ) : 14 , # {(customer,factory) : cost<float>} ( 2 , 1 ) : 6 , ( 2 , 2 ) : 12 , ( 2 , 3 ) : 9 , ( 3 , 1 ) : 10 , ( 3 , 2 ) : 13 , ( 3 , 3 ) : 16 , ( 4 , 1 ) : 9 , ( 4 , 2 ) : 7 , ( 4 , 3 ) : 5 , } return I , J , c , d , M
creates example data set 2
185
6
223,326
def addCuts ( self , checkonly ) : cutsadded = False edges = [ ] x = self . model . data for ( i , j ) in x : if self . model . getVal ( x [ i , j ] ) > .5 : if i != V [ 0 ] and j != V [ 0 ] : edges . append ( ( i , j ) ) G = networkx . Graph ( ) G . add_edges_from ( edges ) Components = list ( networkx . connected_components ( G ) ) for S in Components : S_card = len ( S ) q_sum = sum ( q [ i ] for i in S ) NS = int ( math . ceil ( float ( q_sum ) / Q ) ) S_edges = [ ( i , j ) for i in S for j in S if i < j and ( i , j ) in edges ] if S_card >= 3 and ( len ( S_edges ) >= S_card or NS > 1 ) : cutsadded = True if checkonly : break else : self . model . addCons ( quicksum ( x [ i , j ] for i in S for j in S if j > i ) <= S_card - NS ) print ( "adding cut for" , S_edges ) return cutsadded
add cuts if necessary and return whether model is feasible
281
10
223,327
def distCEIL2D ( x1 , y1 , x2 , y2 ) : xdiff = x2 - x1 ydiff = y2 - y1 return int ( math . ceil ( math . sqrt ( xdiff * xdiff + ydiff * ydiff ) ) )
returns smallest integer not less than the distance of two points
63
12
223,328
def read_atsplib ( filename ) : "NOTE: only works for explicit matrices" if filename [ - 3 : ] == ".gz" : f = gzip . open ( filename , 'r' ) data = f . readlines ( ) else : f = open ( filename , 'r' ) data = f . readlines ( ) for line in data : if line . find ( "DIMENSION" ) >= 0 : n = int ( line . split ( ) [ 1 ] ) break else : raise IOError ( "'DIMENSION' keyword not found in file '%s'" % filename ) for line in data : if line . find ( "EDGE_WEIGHT_TYPE" ) >= 0 : if line . split ( ) [ 1 ] == "EXPLICIT" : break else : raise IOError ( "'EDGE_WEIGHT_TYPE' is not 'EXPLICIT' in file '%s'" % filename ) for k , line in enumerate ( data ) : if line . find ( "EDGE_WEIGHT_SECTION" ) >= 0 : break else : raise IOError ( "'EDGE_WEIGHT_SECTION' not found in file '%s'" % filename ) c = { } # flatten list of distances dist = [ ] for line in data [ k + 1 : ] : if line . find ( "EOF" ) >= 0 : break for val in line . split ( ) : dist . append ( int ( val ) ) k = 0 for i in range ( n ) : for j in range ( n ) : c [ i + 1 , j + 1 ] = dist [ k ] k += 1 return n , c
basic function for reading a ATSP problem on the TSPLIB format
358
14
223,329
def multidict ( D ) : keys = list ( D . keys ( ) ) if len ( keys ) == 0 : return [ [ ] ] try : N = len ( D [ keys [ 0 ] ] ) islist = True except : N = 1 islist = False dlist = [ dict ( ) for d in range ( N ) ] for k in keys : if islist : for i in range ( N ) : dlist [ i ] [ k ] = D [ k ] [ i ] else : dlist [ 0 ] [ k ] = D [ k ] return [ keys ] + dlist
creates a multidictionary
128
6
223,330
def register_plugin_module ( mod ) : for k , v in load_plugins_from_module ( mod ) . items ( ) : if k : if isinstance ( k , ( list , tuple ) ) : k = k [ 0 ] global_registry [ k ] = v
Find plugins in given module
61
5
223,331
def register_plugin_dir ( path ) : import glob for f in glob . glob ( path + '/*.py' ) : for k , v in load_plugins_from_module ( f ) . items ( ) : if k : global_registry [ k ] = v
Find plugins in given directory
60
5
223,332
def describe ( self ) : desc = { 'name' : self . name , 'description' : self . description , # the Parameter might not have a type at all 'type' : self . type or 'unknown' , } for attr in [ 'min' , 'max' , 'allowed' , 'default' ] : v = getattr ( self , attr ) if v is not None : desc [ attr ] = v return desc
Information about this parameter
96
4
223,333
def validate ( self , value ) : if self . type is not None : value = coerce ( self . type , value ) if self . min is not None and value < self . min : raise ValueError ( '%s=%s is less than %s' % ( self . name , value , self . min ) ) if self . max is not None and value > self . max : raise ValueError ( '%s=%s is greater than %s' % ( self . name , value , self . max ) ) if self . allowed is not None and value not in self . allowed : raise ValueError ( '%s=%s is not one of the allowed values: %s' % ( self . name , value , ',' . join ( map ( str , self . allowed ) ) ) ) return value
Does value meet parameter requirements?
175
6
223,334
def describe ( self ) : if isinstance ( self . _plugin , list ) : pl = [ p . name for p in self . _plugin ] elif isinstance ( self . _plugin , dict ) : pl = { k : classname ( v ) for k , v in self . _plugin . items ( ) } else : pl = self . _plugin if isinstance ( self . _plugin , str ) else self . _plugin . name return { 'name' : self . _name , 'container' : self . _container , 'plugin' : pl , 'description' : self . _description , 'direct_access' : self . _direct_access , 'user_parameters' : [ u . describe ( ) for u in self . _user_parameters ] , 'metadata' : self . _metadata , 'args' : self . _open_args }
Basic information about this entry
188
5
223,335
def get ( self , * * user_parameters ) : plugin , open_args = self . _create_open_args ( user_parameters ) data_source = plugin ( * * open_args ) data_source . catalog_object = self . _catalog data_source . name = self . name data_source . description = self . _description data_source . cat = self . _catalog return data_source
Instantiate the DataSource for the given parameters
92
9
223,336
def _load ( self , reload = False ) : if self . autoreload or reload : # First, we load from YAML, failing if syntax errors are found options = self . storage_options or { } if hasattr ( self . path , 'path' ) or hasattr ( self . path , 'read' ) : file_open = self . path self . path = make_path_posix ( getattr ( self . path , 'path' , getattr ( self . path , 'name' , 'file' ) ) ) else : file_open = open_files ( self . path , mode = 'rb' , * * options ) assert len ( file_open ) == 1 file_open = file_open [ 0 ] self . _dir = get_dir ( self . path ) with file_open as f : text = f . read ( ) . decode ( ) if "!template " in text : logger . warning ( "Use of '!template' deprecated - fixing" ) text = text . replace ( '!template ' , '' ) self . parse ( text )
Load text of fcatalog file and pass to parse
234
11
223,337
def parse ( self , text ) : self . text = text data = yaml_load ( self . text ) if data is None : raise exceptions . CatalogException ( 'No YAML data in file' ) # Second, we validate the schema and semantics context = dict ( root = self . _dir ) result = CatalogParser ( data , context = context , getenv = self . getenv , getshell = self . getshell ) if result . errors : raise exceptions . ValidationError ( "Catalog '{}' has validation errors:\n\n{}" "" . format ( self . path , "\n" . join ( result . errors ) ) , result . errors ) cfg = result . data self . _entries = { } for entry in cfg [ 'data_sources' ] : entry . _catalog = self self . _entries [ entry . name ] = entry self . metadata = cfg . get ( 'metadata' , { } ) self . name = self . name or cfg . get ( 'name' ) or self . name_from_path self . description = self . description or cfg . get ( 'description' )
Create entries from catalog text
248
5
223,338
def name_from_path ( self ) : name = os . path . splitext ( os . path . basename ( self . path ) ) [ 0 ] if name == 'catalog' : name = os . path . basename ( os . path . dirname ( self . path ) ) return name . replace ( '.' , '_' )
If catalog is named catalog take name from parent directory
76
10
223,339
def get ( self ) : head = self . request . headers name = self . get_argument ( 'name' ) if self . auth . allow_connect ( head ) : if 'source_id' in head : cat = self . _cache . get ( head [ 'source_id' ] ) else : cat = self . _catalog try : source = cat [ name ] except KeyError : msg = 'No such entry' raise tornado . web . HTTPError ( status_code = 404 , log_message = msg , reason = msg ) if self . auth . allow_access ( head , source , self . _catalog ) : info = source . describe ( ) info [ 'name' ] = name source_info = dict ( source = info ) self . write ( msgpack . packb ( source_info , use_bin_type = True ) ) return msg = 'Access forbidden' raise tornado . web . HTTPError ( status_code = 403 , log_message = msg , reason = msg )
Access one source s info .
215
6
223,340
def preprocess ( cls , cat ) : if isinstance ( cat , str ) : cat = intake . open_catalog ( cat ) return cat
Function to run on each cat input
32
7
223,341
def expand_nested ( self , cats ) : down = '│' right = '└──' def get_children ( parent ) : return [ e ( ) for e in parent . _entries . values ( ) if e . _container == 'catalog' ] if len ( cats ) == 0 : return cat = cats [ 0 ] old = list ( self . options . items ( ) ) name = next ( k for k , v in old if v == cat ) index = next ( i for i , ( k , v ) in enumerate ( old ) if v == cat ) if right in name : prefix = f'{name.split(right)[0]}{down} {right}' else : prefix = right children = get_children ( cat ) for i , child in enumerate ( children ) : old . insert ( index + i + 1 , ( f'{prefix} {child.name}' , child ) ) self . widget . options = dict ( old )
Populate widget with nested catalogs
212
7
223,342
def collapse_nested ( self , cats , max_nestedness = 10 ) : children = [ ] removed = set ( ) nestedness = max_nestedness old = list ( self . widget . options . values ( ) ) nested = [ cat for cat in old if getattr ( cat , 'cat' ) is not None ] parents = { cat . cat for cat in nested } parents_to_remove = cats while len ( parents_to_remove ) > 0 and nestedness > 0 : for cat in nested : if cat . cat in parents_to_remove : children . append ( cat ) removed = removed . union ( parents_to_remove ) nested = [ cat for cat in nested if cat not in children ] parents_to_remove = { c for c in children if c in parents - removed } nestedness -= 1 self . remove ( children )
Collapse any items that are nested under cats . max_nestedness acts as a fail - safe to prevent infinite looping .
184
27
223,343
def remove_selected ( self , * args ) : self . collapse_nested ( self . selected ) self . remove ( self . selected )
Remove the selected catalog - allow the passing of arbitrary args so that buttons work . Also remove any nested catalogs .
30
23
223,344
def add ( self , key , source ) : from intake . catalog . local import LocalCatalogEntry try : with self . fs . open ( self . path , 'rb' ) as f : data = yaml . safe_load ( f ) except IOError : data = { 'sources' : { } } ds = source . _yaml ( ) [ 'sources' ] [ source . name ] data [ 'sources' ] [ key ] = ds with self . fs . open ( self . path , 'wb' ) as fo : fo . write ( yaml . dump ( data , default_flow_style = False ) . encode ( ) ) self . _entries [ key ] = LocalCatalogEntry ( name = ds [ 'metadata' ] [ 'original_name' ] , direct_access = True , cache = [ ] , parameters = [ ] , catalog_dir = None , * * data [ 'sources' ] [ key ] )
Add the persisted source to the store under the given key
207
11
223,345
def get_tok ( self , source ) : if isinstance ( source , str ) : return source if isinstance ( source , CatalogEntry ) : return source . _metadata . get ( 'original_tok' , source . _tok ) if isinstance ( source , DataSource ) : return source . metadata . get ( 'original_tok' , source . _tok ) raise IndexError
Get string token from object
86
5
223,346
def remove ( self , source , delfiles = True ) : source = self . get_tok ( source ) with self . fs . open ( self . path , 'rb' ) as f : data = yaml . safe_load ( f . read ( ) . decode ( ) ) data [ 'sources' ] . pop ( source , None ) with self . fs . open ( self . path , 'wb' ) as fo : fo . write ( yaml . dump ( data , default_flow_style = False ) . encode ( ) ) if delfiles : path = posixpath . join ( self . pdir , source ) try : self . fs . rm ( path , True ) except Exception as e : logger . debug ( "Failed to delete persisted data dir %s" % path ) self . _entries . pop ( source , None )
Remove a dataset from the persist store
183
7
223,347
def backtrack ( self , source ) : key = self . get_tok ( source ) s = self [ key ] ( ) meta = s . metadata [ 'original_source' ] cls = meta [ 'cls' ] args = meta [ 'args' ] kwargs = meta [ 'kwargs' ] cls = import_name ( cls ) sout = cls ( * args , * * kwargs ) sout . metadata = s . metadata [ 'original_metadata' ] sout . name = s . metadata [ 'original_name' ] return sout
Given a unique key in the store recreate original source
127
10
223,348
def refresh ( self , key ) : s0 = self [ key ] s = self . backtrack ( key ) s . persist ( * * s0 . metadata [ 'persist_kwargs' ] )
Recreate and re - persist the source for the given unique ID
44
14
223,349
def cats ( self , cats ) : sources = [ ] for cat in coerce_to_list ( cats ) : sources . extend ( [ entry for entry in cat . _entries . values ( ) if entry . _container != 'catalog' ] ) self . items = sources
Set sources from a list of cats
60
7
223,350
def main ( argv = None ) : from intake . cli . bootstrap import main as _main return _main ( 'Intake Catalog CLI' , subcommands . all , argv or sys . argv )
Execute the intake command line program .
47
8
223,351
def _load_metadata ( self ) : if self . _schema is None : self . _schema = self . _get_schema ( ) self . datashape = self . _schema . datashape self . dtype = self . _schema . dtype self . shape = self . _schema . shape self . npartitions = self . _schema . npartitions self . metadata . update ( self . _schema . extra_metadata )
load metadata only if needed
103
5
223,352
def yaml ( self , with_plugin = False ) : from yaml import dump data = self . _yaml ( with_plugin = with_plugin ) return dump ( data , default_flow_style = False )
Return YAML representation of this data - source
47
10
223,353
def discover ( self ) : self . _load_metadata ( ) return dict ( datashape = self . datashape , dtype = self . dtype , shape = self . shape , npartitions = self . npartitions , metadata = self . metadata )
Open resource and populate the source attributes .
57
8
223,354
def read_chunked ( self ) : self . _load_metadata ( ) for i in range ( self . npartitions ) : yield self . _get_partition ( i )
Return iterator over container fragments of data source
41
8
223,355
def read_partition ( self , i ) : self . _load_metadata ( ) if i < 0 or i >= self . npartitions : raise IndexError ( '%d is out of range' % i ) return self . _get_partition ( i )
Return a part of the data corresponding to i - th partition .
58
13
223,356
def plot ( self ) : try : from hvplot import hvPlot except ImportError : raise ImportError ( "The intake plotting API requires hvplot." "hvplot may be installed with:\n\n" "`conda install -c pyviz hvplot` or " "`pip install hvplot`." ) metadata = self . metadata . get ( 'plot' , { } ) fields = self . metadata . get ( 'fields' , { } ) for attrs in fields . values ( ) : if 'range' in attrs : attrs [ 'range' ] = tuple ( attrs [ 'range' ] ) metadata [ 'fields' ] = fields plots = self . metadata . get ( 'plots' , { } ) return hvPlot ( self , custom_plots = plots , * * metadata )
Returns a hvPlot object to provide a high - level plotting API .
182
15
223,357
def persist ( self , ttl = None , * * kwargs ) : from . . container import container_map from . . container . persist import PersistStore import time if 'original_tok' in self . metadata : raise ValueError ( 'Cannot persist a source taken from the persist ' 'store' ) method = container_map [ self . container ] . _persist store = PersistStore ( ) out = method ( self , path = store . getdir ( self ) , * * kwargs ) out . description = self . description metadata = { 'timestamp' : time . time ( ) , 'original_metadata' : self . metadata , 'original_source' : self . __getstate__ ( ) , 'original_name' : self . name , 'original_tok' : self . _tok , 'persist_kwargs' : kwargs , 'ttl' : ttl , 'cat' : { } if self . cat is None else self . cat . __getstate__ ( ) } out . metadata = metadata out . name = self . name store . add ( self . _tok , out ) return out
Save data from this source to local persistent storage
250
9
223,358
def export ( self , path , * * kwargs ) : from . . container import container_map import time method = container_map [ self . container ] . _persist # may need to create path - access file-system method out = method ( self , path = path , * * kwargs ) out . description = self . description metadata = { 'timestamp' : time . time ( ) , 'original_metadata' : self . metadata , 'original_source' : self . __getstate__ ( ) , 'original_name' : self . name , 'original_tok' : self . _tok , 'persist_kwargs' : kwargs } out . metadata = metadata out . name = self . name return out . yaml ( )
Save this data for sharing with other people
166
8
223,359
def load_user_catalog ( ) : cat_dir = user_data_dir ( ) if not os . path . isdir ( cat_dir ) : return Catalog ( ) else : return YAMLFilesCatalog ( cat_dir )
Return a catalog for the platform - specific user Intake directory
52
11
223,360
def load_global_catalog ( ) : cat_dir = global_data_dir ( ) if not os . path . isdir ( cat_dir ) : return Catalog ( ) else : return YAMLFilesCatalog ( cat_dir )
Return a catalog for the environment - specific Intake directory
52
10
223,361
def global_data_dir ( ) : prefix = False if VIRTUALENV_VAR in os . environ : prefix = os . environ [ VIRTUALENV_VAR ] elif CONDA_VAR in os . environ : prefix = sys . prefix elif which ( 'conda' ) : # conda exists but is not activated prefix = conda_prefix ( ) if prefix : # conda and virtualenv use Linux-style directory pattern return make_path_posix ( os . path . join ( prefix , 'share' , 'intake' ) ) else : return appdirs . site_data_dir ( appname = 'intake' , appauthor = 'intake' )
Return the global Intake catalog dir for the current environment
157
10
223,362
def load_combo_catalog ( ) : user_dir = user_data_dir ( ) global_dir = global_data_dir ( ) desc = 'Generated from data packages found on your intake search path' cat_dirs = [ ] if os . path . isdir ( user_dir ) : cat_dirs . append ( user_dir + '/*.yaml' ) cat_dirs . append ( user_dir + '/*.yml' ) if os . path . isdir ( global_dir ) : cat_dirs . append ( global_dir + '/*.yaml' ) cat_dirs . append ( global_dir + '/*.yml' ) for path_dir in conf . get ( 'catalog_path' , [ ] ) : if path_dir != '' : if not path_dir . endswith ( ( 'yaml' , 'yml' ) ) : cat_dirs . append ( path_dir + '/*.yaml' ) cat_dirs . append ( path_dir + '/*.yml' ) else : cat_dirs . append ( path_dir ) return YAMLFilesCatalog ( cat_dirs , name = 'builtin' , description = desc )
Load a union of the user and global catalogs for convenience
275
12
223,363
def from_dict ( cls , entries , * * kwargs ) : from dask . base import tokenize cat = cls ( * * kwargs ) cat . _entries = entries cat . _tok = tokenize ( kwargs , entries ) return cat
Create Catalog from the given set of entries
60
8
223,364
def reload ( self ) : if time . time ( ) - self . updated > self . ttl : self . force_reload ( )
Reload catalog if sufficient time has passed
30
8
223,365
def filter ( self , func ) : return Catalog . from_dict ( { key : entry for key , entry in self . items ( ) if func ( entry ) } )
Create a Catalog of a subset of entries based on a condition
36
12
223,366
def walk ( self , sofar = None , prefix = None , depth = 2 ) : out = sofar if sofar is not None else { } prefix = [ ] if prefix is None else prefix for name , item in self . _entries . items ( ) : if item . _container == 'catalog' and depth > 1 : # recurse with default open parameters try : item ( ) . walk ( out , prefix + [ name ] , depth - 1 ) except Exception as e : print ( e ) pass # ignore inability to descend n = '.' . join ( prefix + [ name ] ) out [ n ] = item return out
Get all entries in this catalog and sub - catalogs
135
11
223,367
def serialize ( self ) : import yaml output = { "metadata" : self . metadata , "sources" : { } , "name" : self . name } for key , entry in self . items ( ) : output [ "sources" ] [ key ] = entry . _captured_init_kwargs return yaml . dump ( output )
Produce YAML version of this catalog .
77
10
223,368
def save ( self , url , storage_options = None ) : from dask . bytes import open_files with open_files ( [ url ] , * * ( storage_options or { } ) , mode = 'wt' ) [ 0 ] as f : f . write ( self . serialize ( ) )
Output this catalog to a file as YAML
66
10
223,369
def reset ( self ) : self . _page_cache . clear ( ) self . _direct_lookup_cache . clear ( ) self . _page_offset = 0 self . complete = self . _catalog . page_size is None
Clear caches to force a reload .
52
7
223,370
def cached_items ( self ) : for item in six . iteritems ( self . _page_cache ) : yield item for item in six . iteritems ( self . _direct_lookup_cache ) : yield item
Iterate over items that are already cached . Perform no requests .
47
13
223,371
def _get_http_args ( self , params ) : # Add the auth headers to any other headers headers = self . http_args . get ( 'headers' , { } ) if self . auth is not None : auth_headers = self . auth . get_headers ( ) headers . update ( auth_headers ) # build new http args with these headers http_args = self . http_args . copy ( ) if self . _source_id is not None : headers [ 'source_id' ] = self . _source_id http_args [ 'headers' ] = headers # Merge in any params specified by the caller. merged_params = http_args . get ( 'params' , { } ) merged_params . update ( params ) http_args [ 'params' ] = merged_params return http_args
Return a copy of the http_args
176
8
223,372
def _load ( self ) : # This will not immediately fetch any sources (entries). It will lazily # fetch sources from the server in paginated blocks when this Catalog # is iterated over. It will fetch specific sources when they are # accessed in this Catalog via __getitem__. if self . page_size is None : # Fetch all source info. params = { } else : # Just fetch the metadata now; fetch source info later in pages. params = { 'page_offset' : 0 , 'page_size' : 0 } http_args = self . _get_http_args ( params ) response = requests . get ( self . info_url , * * http_args ) try : response . raise_for_status ( ) except requests . HTTPError as err : six . raise_from ( RemoteCatalogError ( "Failed to fetch metadata." ) , err ) info = msgpack . unpackb ( response . content , * * unpack_kwargs ) self . metadata = info [ 'metadata' ] # The intake server now always provides a length, but the server may be # running an older version of intake. self . _len = info . get ( 'length' ) self . _entries . reset ( ) # If we are paginating (page_size is not None) and the server we are # working with is new enough to support pagination, info['sources'] # should be empty. If either of those things is not true, # info['sources'] will contain all the entries and we should cache them # now. if info [ 'sources' ] : # Signal that we are not paginating, even if we were asked to. self . _page_size = None self . _entries . _page_cache . update ( { source [ 'name' ] : RemoteCatalogEntry ( url = self . url , getenv = self . getenv , getshell = self . getshell , auth = self . auth , http_args = self . http_args , * * source ) for source in info [ 'sources' ] } )
Fetch metadata from remote . Entries are fetched lazily .
445
14
223,373
def nice_join ( seq , sep = ", " , conjunction = "or" ) : seq = [ str ( x ) for x in seq ] if len ( seq ) <= 1 or conjunction is None : return sep . join ( seq ) else : return "%s %s %s" % ( sep . join ( seq [ : - 1 ] ) , conjunction , seq [ - 1 ] )
Join together sequences of strings into English - friendly phrases using a conjunction when appropriate .
82
16
223,374
def output_notebook ( inline = True , logo = False ) : try : import hvplot except ImportError : raise ImportError ( "The intake plotting API requires hvplot." "hvplot may be installed with:\n\n" "`conda install -c pyviz hvplot` or " "`pip install hvplot`." ) import holoviews as hv return hv . extension ( 'bokeh' , inline = inline , logo = logo )
Load the notebook extension
107
4
223,375
def open_catalog ( uri = None , * * kwargs ) : driver = kwargs . pop ( 'driver' , None ) if driver is None : if uri : if ( ( isinstance ( uri , str ) and "*" in uri ) or ( ( isinstance ( uri , ( list , tuple ) ) ) and len ( uri ) > 1 ) ) : # glob string or list of files/globs driver = 'yaml_files_cat' elif isinstance ( uri , ( list , tuple ) ) and len ( uri ) == 1 : uri = uri [ 0 ] if "*" in uri [ 0 ] : # single glob string in a list driver = 'yaml_files_cat' else : # single filename in a list driver = 'yaml_file_cat' elif isinstance ( uri , str ) : # single URL if uri . startswith ( 'intake:' ) : # server driver = 'intake_remote' else : if uri . endswith ( ( '.yml' , '.yaml' ) ) : driver = 'yaml_file_cat' else : uri = uri . rstrip ( '/' ) + '/*.y*ml' driver = 'yaml_files_cat' else : # empty cat driver = 'catalog' if driver not in registry : raise ValueError ( 'Unknown catalog driver (%s), supply one of: %s' % ( driver , list ( sorted ( registry ) ) ) ) return registry [ driver ] ( uri , * * kwargs )
Create a Catalog object
350
4
223,376
def _persist ( source , path , * * kwargs ) : try : from intake_parquet import ParquetSource except ImportError : raise ImportError ( "Please install intake-parquet to use persistence" " on dataframe container sources." ) try : df = source . to_dask ( ) except NotImplementedError : import dask . dataframe as dd df = dd . from_pandas ( source . read ( ) , 1 ) df . to_parquet ( path , * * kwargs ) source = ParquetSource ( path , meta = { } ) return source
Save dataframe to local persistent store
129
7
223,377
def save_conf ( fn = None ) : if fn is None : fn = cfile ( ) try : os . makedirs ( os . path . dirname ( fn ) ) except ( OSError , IOError ) : pass with open ( fn , 'w' ) as f : yaml . dump ( conf , f )
Save current configuration to file as YAML
72
9
223,378
def load_conf ( fn = None ) : if fn is None : fn = cfile ( ) if os . path . isfile ( fn ) : with open ( fn ) as f : try : conf . update ( yaml_load ( f ) ) except Exception as e : logger . warning ( 'Failure to load config file "{fn}": {e}' '' . format ( fn = fn , e = e ) )
Update global config from YAML file
90
8
223,379
def intake_path_dirs ( path ) : if isinstance ( path , ( list , tuple ) ) : return path import re pattern = re . compile ( ";" if os . name == 'nt' else r"(?<!:):(?![:/])" ) return pattern . split ( path )
Return a list of directories from the intake path .
65
10
223,380
def load_env ( ) : # environment variables take precedence over conf file for key , envvar in [ [ 'cache_dir' , 'INTAKE_CACHE_DIR' ] , [ 'catalog_path' , 'INTAKE_PATH' ] , [ 'persist_path' , 'INTAKE_PERSIST_PATH' ] ] : if envvar in os . environ : conf [ key ] = make_path_posix ( os . environ [ envvar ] ) conf [ 'catalog_path' ] = intake_path_dirs ( conf [ 'catalog_path' ] ) for key , envvar in [ [ 'cache_disabled' , 'INTAKE_DISABLE_CACHING' ] , [ 'cache_download_progress' , 'INTAKE_CACHE_PROGRESS' ] ] : if envvar in os . environ : conf [ key ] = os . environ [ envvar ] . lower ( ) in [ 'true' , 't' , 'y' , 'yes' ] if 'INTAKE_LOG_LEVEL' in os . environ : conf [ 'logging' ] = os . environ [ 'INTAKE_LOG_LEVEL' ]
Analyse enviroment variables and update conf accordingly
276
11
223,381
def source ( self , source ) : BaseView . source . fset ( self , source ) if self . main_pane : self . main_pane . object = self . contents self . label_pane . object = self . label
When the source gets updated update the pane object
52
9
223,382
def contents ( self ) : if not self . _source : return ' ' * 100 # HACK - make sure that area is big contents = self . source . describe ( ) return pretty_describe ( contents )
String representation of the source s description
45
7
223,383
def _get_parts_of_format_string ( resolved_string , literal_texts , format_specs ) : _text = resolved_string bits = [ ] if literal_texts [ - 1 ] != '' and _text . endswith ( literal_texts [ - 1 ] ) : _text = _text [ : - len ( literal_texts [ - 1 ] ) ] literal_texts = literal_texts [ : - 1 ] format_specs = format_specs [ : - 1 ] for i , literal_text in enumerate ( literal_texts ) : if literal_text != '' : if literal_text not in _text : raise ValueError ( ( "Resolved string must match pattern. " "'{}' not found." . format ( literal_text ) ) ) bit , _text = _text . split ( literal_text , 1 ) if bit : bits . append ( bit ) elif i == 0 : continue else : try : format_spec = _validate_format_spec ( format_specs [ i - 1 ] ) bits . append ( _text [ 0 : format_spec ] ) _text = _text [ format_spec : ] except : if i == len ( format_specs ) - 1 : format_spec = _validate_format_spec ( format_specs [ i ] ) bits . append ( _text [ : - format_spec ] ) bits . append ( _text [ - format_spec : ] ) _text = [ ] else : _validate_format_spec ( format_specs [ i - 1 ] ) if _text : bits . append ( _text ) if len ( bits ) > len ( [ fs for fs in format_specs if fs is not None ] ) : bits = bits [ 1 : ] return bits
Inner function of reverse_format returns the resolved value for each field in pattern .
392
17
223,384
def reverse_formats ( format_string , resolved_strings ) : from string import Formatter fmt = Formatter ( ) # get the fields from the format_string field_names = [ i [ 1 ] for i in fmt . parse ( format_string ) if i [ 1 ] ] # itialize the args dict with an empty dict for each field args = { field_name : [ ] for field_name in field_names } for resolved_string in resolved_strings : for field , value in reverse_format ( format_string , resolved_string ) . items ( ) : args [ field ] . append ( value ) return args
Reverse the string method format for a list of strings .
135
13
223,385
def reverse_format ( format_string , resolved_string ) : from string import Formatter from datetime import datetime fmt = Formatter ( ) args = { } # ensure that format_string is in posix format format_string = make_path_posix ( format_string ) # split the string into bits literal_texts , field_names , format_specs , conversions = zip ( * fmt . parse ( format_string ) ) if not any ( field_names ) : return { } for i , conversion in enumerate ( conversions ) : if conversion : raise ValueError ( ( 'Conversion not allowed. Found on {}.' . format ( field_names [ i ] ) ) ) # ensure that resolved string is in posix format resolved_string = make_path_posix ( resolved_string ) # get a list of the parts that matter bits = _get_parts_of_format_string ( resolved_string , literal_texts , format_specs ) for i , ( field_name , format_spec ) in enumerate ( zip ( field_names , format_specs ) ) : if field_name : try : if format_spec . startswith ( '%' ) : args [ field_name ] = datetime . strptime ( bits [ i ] , format_spec ) elif format_spec [ - 1 ] in list ( 'bcdoxX' ) : args [ field_name ] = int ( bits [ i ] ) elif format_spec [ - 1 ] in list ( 'eEfFgGn' ) : args [ field_name ] = float ( bits [ i ] ) elif format_spec [ - 1 ] == '%' : args [ field_name ] = float ( bits [ i ] [ : - 1 ] ) / 100 else : args [ field_name ] = fmt . format_field ( bits [ i ] , format_spec ) except : args [ field_name ] = bits [ i ] return args
Reverse the string method format .
427
8
223,386
def path_to_glob ( path ) : from string import Formatter fmt = Formatter ( ) if not isinstance ( path , str ) : return path # Get just the real bits of the urlpath literal_texts = [ i [ 0 ] for i in fmt . parse ( path ) ] # Only use a star for first empty string in literal_texts index_of_empty = [ i for i , lt in enumerate ( literal_texts ) if lt == '' and i != 0 ] glob = '*' . join ( [ literal_texts [ i ] for i in range ( len ( literal_texts ) ) if i not in index_of_empty ] ) return glob
Convert pattern style paths to glob style paths
152
9
223,387
def path_to_pattern ( path , metadata = None ) : if not isinstance ( path , str ) : return pattern = path if metadata : cache = metadata . get ( 'cache' ) if cache : regex = next ( c . get ( 'regex' ) for c in cache if c . get ( 'argkey' ) == 'urlpath' ) pattern = pattern . split ( regex ) [ - 1 ] return pattern
Remove source information from path when using chaching
91
9
223,388
def get_partition ( url , headers , source_id , container , partition ) : accepted_formats = list ( serializer . format_registry . keys ( ) ) accepted_compression = list ( serializer . compression_registry . keys ( ) ) payload = dict ( action = 'read' , source_id = source_id , accepted_formats = accepted_formats , accepted_compression = accepted_compression ) if partition is not None : payload [ 'partition' ] = partition try : resp = requests . post ( urljoin ( url , '/v1/source' ) , data = msgpack . packb ( payload , use_bin_type = True ) , * * headers ) if resp . status_code != 200 : raise Exception ( 'Error reading data' ) msg = msgpack . unpackb ( resp . content , * * unpack_kwargs ) format = msg [ 'format' ] compression = msg [ 'compression' ] compressor = serializer . compression_registry [ compression ] encoder = serializer . format_registry [ format ] chunk = encoder . decode ( compressor . decompress ( msg [ 'data' ] ) , container ) return chunk finally : if resp is not None : resp . close ( )
Serializable function for fetching a data source partition
272
10
223,389
def flatten ( iterable ) : # likely not used iterable = iter ( iterable ) while True : try : item = next ( iterable ) except StopIteration : break if isinstance ( item , six . string_types ) : yield item continue try : data = iter ( item ) iterable = itertools . chain ( data , iterable ) except : yield item
Flatten an arbitrarily deep list
80
6
223,390
def clamp ( value , lower = 0 , upper = sys . maxsize ) : return max ( lower , min ( upper , value ) )
Clamp float between given range
29
6
223,391
def expand_templates ( pars , context , return_left = False , client = False , getenv = True , getshell = True ) : all_vars = set ( context ) out = _expand ( pars , context , all_vars , client , getenv , getshell ) if return_left : return out , all_vars return out
Render variables in context into the set of parameters with jinja2 .
77
15
223,392
def merge_pars ( params , user_inputs , spec_pars , client = False , getenv = True , getshell = True ) : context = params . copy ( ) for par in spec_pars : val = user_inputs . get ( par . name , par . default ) if val is not None : if isinstance ( val , six . string_types ) : val = expand_defaults ( val , getenv = getenv , getshell = getshell , client = client ) context [ par . name ] = par . validate ( val ) context . update ( { k : v for k , v in user_inputs . items ( ) if k not in context } ) out , left = expand_templates ( params , context , True , client , getenv , getshell ) context = { k : v for k , v in context . items ( ) if k in left } for par in spec_pars : if par . name in context : # coerces to type context [ par . name ] = par . validate ( context [ par . name ] ) left . remove ( par . name ) params . update ( out ) user_inputs = expand_templates ( user_inputs , context , False , client , getenv , getshell ) params . update ( { k : v for k , v in user_inputs . items ( ) if k in left } ) params . pop ( 'CATALOG_DIR' ) for k , v in params . copy ( ) . items ( ) : # final validation/coersion for sp in [ p for p in spec_pars if p . name == k ] : params [ k ] = sp . validate ( params [ k ] ) return params
Produce open arguments by merging various inputs
370
8
223,393
def coerce ( dtype , value ) : if dtype is None : return value if type ( value ) . __name__ == dtype : return value op = COERCION_RULES [ dtype ] return op ( ) if value is None else op ( value )
Convert a value to a specific type .
59
9
223,394
def open_remote ( url , entry , container , user_parameters , description , http_args , page_size = None , auth = None , getenv = None , getshell = None ) : from intake . container import container_map if url . startswith ( 'intake://' ) : url = url [ len ( 'intake://' ) : ] payload = dict ( action = 'open' , name = entry , parameters = user_parameters , available_plugins = list ( plugin_registry . keys ( ) ) ) req = requests . post ( urljoin ( url , '/v1/source' ) , data = msgpack . packb ( payload , use_bin_type = True ) , * * http_args ) if req . ok : response = msgpack . unpackb ( req . content , * * unpack_kwargs ) if 'plugin' in response : pl = response [ 'plugin' ] pl = [ pl ] if isinstance ( pl , str ) else pl # Direct access for p in pl : if p in plugin_registry : source = plugin_registry [ p ] ( * * response [ 'args' ] ) proxy = False break else : proxy = True else : proxy = True if proxy : response . pop ( 'container' ) response . update ( { 'name' : entry , 'parameters' : user_parameters } ) if container == 'catalog' : response . update ( { 'auth' : auth , 'getenv' : getenv , 'getshell' : getshell , 'page_size' : page_size # TODO ttl? # TODO storage_options? } ) source = container_map [ container ] ( url , http_args , * * response ) source . description = description return source else : raise Exception ( 'Server error: %d, %s' % ( req . status_code , req . reason ) )
Create either local direct data source or remote streamed source
411
10
223,395
def _persist ( source , path , encoder = None ) : import posixpath from dask . bytes import open_files import dask import pickle import json from intake . source . textfiles import TextFilesSource encoder = { None : str , 'str' : str , 'json' : json . dumps , 'pickle' : pickle . dumps } [ encoder ] try : b = source . to_dask ( ) except NotImplementedError : import dask . bag as db b = db . from_sequence ( source . read ( ) , npartitions = 1 ) files = open_files ( posixpath . join ( path , 'part.*' ) , mode = 'wt' , num = b . npartitions ) dwrite = dask . delayed ( write_file ) out = [ dwrite ( part , f , encoder ) for part , f in zip ( b . to_delayed ( ) , files ) ] dask . compute ( out ) s = TextFilesSource ( posixpath . join ( path , 'part.*' ) ) return s
Save list to files using encoding
236
6
223,396
def _ipython_display_ ( self ) : contents = self . describe ( ) display ( { # noqa: F821 'application/json' : contents , 'text/plain' : pretty_describe ( contents ) } , metadata = { 'application/json' : { 'root' : contents [ "name" ] } } , raw = True )
Display the entry as a rich object in an IPython session .
78
13
223,397
def autodiscover ( path = None , plugin_prefix = 'intake_' ) : plugins = { } for importer , name , ispkg in pkgutil . iter_modules ( path = path ) : if name . startswith ( plugin_prefix ) : t = time . time ( ) new_plugins = load_plugins_from_module ( name ) for plugin_name , plugin in new_plugins . items ( ) : if plugin_name in plugins : orig_path = inspect . getfile ( plugins [ plugin_name ] ) new_path = inspect . getfile ( plugin ) warnings . warn ( 'Plugin name collision for "%s" from' '\n %s' '\nand' '\n %s' '\nKeeping plugin from first location.' % ( plugin_name , orig_path , new_path ) ) else : plugins [ plugin_name ] = plugin logger . debug ( "Import %s took: %7.2f s" % ( name , time . time ( ) - t ) ) return plugins
Scan for Intake plugin packages and return a dict of plugins .
225
12
223,398
def load_plugins_from_module ( module_name ) : plugins = { } try : if module_name . endswith ( '.py' ) : import imp mod = imp . load_source ( 'module.name' , module_name ) else : mod = importlib . import_module ( module_name ) except Exception as e : logger . debug ( "Import module <{}> failed: {}" . format ( module_name , e ) ) return { } for _ , cls in inspect . getmembers ( mod , inspect . isclass ) : # Don't try to register plugins imported into this module elsewhere if issubclass ( cls , ( Catalog , DataSource ) ) : plugins [ cls . name ] = cls return plugins
Imports a module and returns dictionary of discovered Intake plugins .
161
12
223,399
def _set_pattern_columns ( self , path_column ) : try : # CategoricalDtype allows specifying known categories when # creating objects. It was added in pandas 0.21.0. from pandas . api . types import CategoricalDtype _HAS_CDT = True except ImportError : _HAS_CDT = False col = self . _dataframe [ path_column ] paths = col . cat . categories column_by_field = { field : col . cat . codes . map ( dict ( enumerate ( values ) ) ) . astype ( "category" if not _HAS_CDT else CategoricalDtype ( set ( values ) ) ) for field , values in reverse_formats ( self . pattern , paths ) . items ( ) } self . _dataframe = self . _dataframe . assign ( * * column_by_field )
Get a column of values for each field in pattern
194
10