idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
28,900
def accel_quit ( self , * args ) : procs = self . notebook_manager . get_running_fg_processes_count ( ) tabs = self . notebook_manager . get_n_pages ( ) notebooks = self . notebook_manager . get_n_notebooks ( ) prompt_cfg = self . settings . general . get_boolean ( 'prompt-on-quit' ) prompt_tab_cfg = self . settings . ...
Callback to prompt the user whether to quit Guake or not .
28,901
def accel_reset_terminal ( self , * args ) : HidePrevention ( self . window ) . prevent ( ) current_term = self . get_notebook ( ) . get_current_terminal ( ) current_term . reset ( True , True ) HidePrevention ( self . window ) . allow ( ) return True
Callback to reset and clean the terminal
28,902
def accel_zoom_in ( self , * args ) : for term in self . get_notebook ( ) . iter_terminals ( ) : term . increase_font_size ( ) return True
Callback to zoom in .
28,903
def accel_zoom_out ( self , * args ) : for term in self . get_notebook ( ) . iter_terminals ( ) : term . decrease_font_size ( ) return True
Callback to zoom out .
28,904
def accel_increase_height ( self , * args ) : height = self . settings . general . get_int ( 'window-height' ) self . settings . general . set_int ( 'window-height' , min ( height + 2 , 100 ) ) return True
Callback to increase height .
28,905
def accel_decrease_height ( self , * args ) : height = self . settings . general . get_int ( 'window-height' ) self . settings . general . set_int ( 'window-height' , max ( height - 2 , 0 ) ) return True
Callback to decrease height .
28,906
def accel_increase_transparency ( self , * args ) : transparency = self . settings . styleBackground . get_int ( 'transparency' ) if int ( transparency ) - 2 > 0 : self . settings . styleBackground . set_int ( 'transparency' , int ( transparency ) - 2 ) return True
Callback to increase transparency .
28,907
def accel_decrease_transparency ( self , * args ) : transparency = self . settings . styleBackground . get_int ( 'transparency' ) if int ( transparency ) + 2 < MAX_TRANSPARENCY : self . settings . styleBackground . set_int ( 'transparency' , int ( transparency ) + 2 ) return True
Callback to decrease transparency .
28,908
def accel_toggle_transparency ( self , * args ) : self . transparency_toggled = not self . transparency_toggled self . settings . styleBackground . triggerOnChangedValue ( self . settings . styleBackground , 'transparency' ) return True
Callback to toggle transparency .
28,909
def accel_prev ( self , * args ) : if self . get_notebook ( ) . get_current_page ( ) == 0 : self . get_notebook ( ) . set_current_page ( self . get_notebook ( ) . get_n_pages ( ) - 1 ) else : self . get_notebook ( ) . prev_page ( ) return True
Callback to go to the previous tab . Called by the accel key .
28,910
def accel_next ( self , * args ) : if self . get_notebook ( ) . get_current_page ( ) + 1 == self . get_notebook ( ) . get_n_pages ( ) : self . get_notebook ( ) . set_current_page ( 0 ) else : self . get_notebook ( ) . next_page ( ) return True
Callback to go to the next tab . Called by the accel key .
28,911
def accel_move_tab_left ( self , * args ) : pos = self . get_notebook ( ) . get_current_page ( ) if pos != 0 : self . move_tab ( pos , pos - 1 ) return True
Callback to move a tab to the left
28,912
def accel_move_tab_right ( self , * args ) : pos = self . get_notebook ( ) . get_current_page ( ) if pos != self . get_notebook ( ) . get_n_pages ( ) - 1 : self . move_tab ( pos , pos + 1 ) return True
Callback to move a tab to the right
28,913
def accel_rename_current_tab ( self , * args ) : page_num = self . get_notebook ( ) . get_current_page ( ) page = self . get_notebook ( ) . get_nth_page ( page_num ) self . get_notebook ( ) . get_tab_label ( page ) . on_rename ( None ) return True
Callback to show the rename tab dialog . Called by the accel key .
28,914
def accel_toggle_hide_on_lose_focus ( self , * args ) : if self . settings . general . get_boolean ( 'window-losefocus' ) : self . settings . general . set_boolean ( 'window-losefocus' , False ) else : self . settings . general . set_boolean ( 'window-losefocus' , True ) return True
Callback toggle whether the window should hide when it loses focus . Called by the accel key .
28,915
def recompute_tabs_titles ( self ) : use_vte_titles = self . settings . general . get_boolean ( "use-vte-titles" ) if not use_vte_titles : return for terminal in self . get_notebook ( ) . iter_terminals ( ) : page_num = self . get_notebook ( ) . page_num ( terminal . get_parent ( ) ) self . get_notebook ( ) . rename_pa...
Updates labels on all tabs . This is required when self . abbreviate changes
28,916
def compute_tab_title ( self , vte ) : vte_title = vte . get_window_title ( ) or _ ( "Terminal" ) try : current_directory = vte . get_current_directory ( ) if self . abbreviate and vte_title . endswith ( current_directory ) : parts = current_directory . split ( '/' ) parts = [ s [ : 1 ] for s in parts [ : - 1 ] ] + [ p...
Abbreviate and cut vte terminal title when necessary
28,917
def close_tab ( self , * args ) : prompt_cfg = self . settings . general . get_int ( 'prompt-on-close-tab' ) self . get_notebook ( ) . delete_page_current ( prompt = prompt_cfg )
Closes the current tab .
28,918
def rename_tab_uuid ( self , term_uuid , new_text , user_set = True ) : term_uuid = uuid . UUID ( term_uuid ) page_index , = ( index for index , t in enumerate ( self . get_notebook ( ) . iter_terminals ( ) ) if t . get_uuid ( ) == term_uuid ) self . get_notebook ( ) . rename_page ( page_index , new_text , user_set )
Rename an already added tab by its UUID
28,919
def get_selected_uuidtab ( self ) : page_num = self . get_notebook ( ) . get_current_page ( ) terminals = self . get_notebook ( ) . get_terminals_for_page ( page_num ) return str ( terminals [ 0 ] . get_uuid ( ) )
Returns the uuid of the current selected terminal
28,920
def search_on_web ( self , * args ) : current_term = self . get_notebook ( ) . get_current_terminal ( ) if current_term . get_has_selection ( ) : current_term . copy_clipboard ( ) guake_clipboard = Gtk . Clipboard . get_default ( self . window . get_display ( ) ) search_query = guake_clipboard . wait_for_text ( ) searc...
Search for the selected text on the web
28,921
def execute_hook ( self , event_name ) : hook = self . settings . hooks . get_string ( '{!s}' . format ( event_name ) ) if hook is not None and hook != "" : hook = hook . split ( ) try : subprocess . Popen ( hook ) except OSError as oserr : if oserr . errno == 8 : log . error ( "Hook execution failed! Check shebang at ...
Execute shell commands related to current event_name
28,922
def get_resource_dirs ( resource ) : dirs = [ os . path . join ( dir , resource ) for dir in itertools . chain ( GLib . get_system_data_dirs ( ) , GUAKE_THEME_DIR , GLib . get_user_data_dir ( ) ) ] dirs += [ os . path . join ( os . path . expanduser ( "~" ) , ".{}" . format ( resource ) ) ] return [ Path ( dir ) for di...
Returns a list of all known resource dirs for a given resource .
28,923
def configure_terminal ( self ) : client = self . guake . settings . general word_chars = client . get_string ( 'word-chars' ) if word_chars : self . set_word_char_exceptions ( word_chars ) self . set_audible_bell ( client . get_boolean ( 'use-audible-bell' ) ) self . set_sensitive ( True ) cursor_blink_mode = self . g...
Sets all customized properties on the terminal
28,924
def is_file_on_local_server ( self , text ) -> Tuple [ Optional [ Path ] , Optional [ int ] , Optional [ int ] ] : lineno = None colno = None py_func = None m = re . compile ( r"(.*)\:(\d+)\:(\d+)$" ) . match ( text ) if m : text = m . group ( 1 ) lineno = m . group ( 2 ) colno = m . group ( 3 ) else : m = re . compile...
Test if the provided text matches a file on local server
28,925
def button_press ( self , terminal , event ) : self . matched_value = '' if ( Vte . MAJOR_VERSION , Vte . MINOR_VERSION ) >= ( 0 , 46 ) : matched_string = self . match_check_event ( event ) else : matched_string = self . match_check ( int ( event . x / self . get_char_width ( ) ) , int ( event . y / self . get_char_hei...
Handles the button press event in the terminal widget . If any match string is caught another application is open to handle the matched resource uri .
28,926
def delete_shell ( self , pid ) : try : os . kill ( pid , signal . SIGHUP ) except OSError : pass num_tries = 30 while num_tries > 0 : try : if os . waitpid ( pid , os . WNOHANG ) [ 0 ] != 0 : break except OSError : break sleep ( 0.1 ) num_tries -= 1 if num_tries == 0 : try : os . kill ( pid , signal . SIGKILL ) os . w...
This function will kill the shell on a tab trying to send a sigterm and if it doesn t work a sigkill . Between these two signals we have a timeout of 3 seconds so is recommended to call this in another thread . This doesn t change any thing in UI so you can use python s start_new_thread .
28,927
def is_valid_value ( value , type ) : if isinstance ( type , GraphQLNonNull ) : of_type = type . of_type if value is None : return [ u'Expected "{}", found null.' . format ( type ) ] return is_valid_value ( value , of_type ) if value is None : return _empty_list if isinstance ( type , GraphQLList ) : item_type = type ....
Given a type and any value return True if that value is valid .
28,928
def get_unique_schema_id ( schema ) : assert isinstance ( schema , GraphQLSchema ) , ( "Must receive a GraphQLSchema as schema. Received {}" ) . format ( repr ( schema ) ) if schema not in _cached_schemas : _cached_schemas [ schema ] = sha1 ( str ( schema ) . encode ( "utf-8" ) ) . hexdigest ( ) return _cached_schemas ...
Get a unique id given a GraphQLSchema
28,929
def get_unique_document_id ( query_str ) : assert isinstance ( query_str , string_types ) , ( "Must receive a string as query_str. Received {}" ) . format ( repr ( query_str ) ) if query_str not in _cached_queries : _cached_queries [ query_str ] = sha1 ( str ( query_str ) . encode ( "utf-8" ) ) . hexdigest ( ) return _...
Get a unique id given a query_string
28,930
def get_key_for_schema_and_document_string ( self , schema , request_string ) : if self . use_consistent_hash : schema_id = get_unique_schema_id ( schema ) document_id = get_unique_document_id ( request_string ) return hash ( ( schema_id , document_id ) ) return hash ( ( schema , request_string ) )
This method returns a unique key given a schema and a request_string
28,931
def get_suggested_type_names ( schema , output_type , field_name ) : if isinstance ( output_type , ( GraphQLInterfaceType , GraphQLUnionType ) ) : suggested_object_types = [ ] interface_usage_count = OrderedDict ( ) for possible_type in schema . get_possible_types ( output_type ) : if not possible_type . fields . get (...
Go through all of the implementations of type as well as the interfaces that they implement . If any of those types include the provided field suggest them sorted by how often the type is referenced starting with Interfaces .
28,932
def get_suggested_field_names ( schema , graphql_type , field_name ) : if isinstance ( graphql_type , ( GraphQLInterfaceType , GraphQLObjectType ) ) : possible_field_names = list ( graphql_type . fields . keys ( ) ) return suggestion_list ( field_name , possible_field_names ) return [ ]
For the field name provided determine if there are any similar field names that may be the result of a typo .
28,933
def from_code ( cls , schema , code , uptodate = None , extra_namespace = None , ) : if isinstance ( code , string_types ) : filename = "<document>" code = compile ( code , filename , "exec" ) namespace = { "__file__" : code . co_filename } exec ( code , namespace ) if extra_namespace : namespace . update ( extra_names...
Creates a GraphQLDocument object from compiled code and the globals . This is used by the loaders and schema to create a document object .
28,934
def suggestion_list ( inp , options ) : options_by_distance = OrderedDict ( ) input_threshold = len ( inp ) / 2 for option in options : distance = lexical_distance ( inp , option ) threshold = max ( input_threshold , len ( option ) / 2 , 1 ) if distance <= threshold : options_by_distance [ option ] = distance return so...
Given an invalid input string and a list of valid options returns a filtered list of valid options sorted based on their similarity with the input .
28,935
def lexical_distance ( a , b ) : d = [ [ i ] for i in range ( len ( a ) + 1 ) ] or [ ] d_len = len ( d ) or 1 for i in range ( d_len ) : for j in range ( 1 , len ( b ) + 1 ) : if i == 0 : d [ i ] . append ( j ) else : d [ i ] . append ( 0 ) for i in range ( 1 , len ( a ) + 1 ) : for j in range ( 1 , len ( b ) + 1 ) : c...
Computes the lexical distance between strings A and B . The distance between two strings is given by counting the minimum number of edits needed to transform string A into string B . An edit can be an insertion deletion or substitution of a single character or a swap of two adjacent characters . This distance can be us...
28,936
def get_complete_version ( version = None ) : if version is None : from graphql import VERSION as version else : assert len ( version ) == 5 assert version [ 3 ] in ( "alpha" , "beta" , "rc" , "final" ) return version
Returns a tuple of the graphql version . If version argument is non - empty then checks for correctness of the tuple provided .
28,937
def read_token ( source , from_position ) : body = source . body body_length = len ( body ) position = position_after_whitespace ( body , from_position ) if position >= body_length : return Token ( TokenKind . EOF , position , position ) code = char_code_at ( body , position ) if code : if code < 0x0020 and code not in...
Gets the next token from the source starting at the given position .
28,938
def position_after_whitespace ( body , start_position ) : body_length = len ( body ) position = start_position while position < body_length : code = char_code_at ( body , position ) if code in ignored_whitespace_characters : position += 1 elif code == 35 : position += 1 while position < body_length : code = char_code_a...
Reads from body starting at start_position until it finds a non - whitespace or commented character then returns the position of that character for lexing .
28,939
def read_number ( source , start , first_code ) : r code = first_code body = source . body position = start is_float = False if code == 45 : position += 1 code = char_code_at ( body , position ) if code == 48 : position += 1 code = char_code_at ( body , position ) if code is not None and 48 <= code <= 57 : raise GraphQ...
r Reads a number token from the source file either a float or an int depending on whether a decimal point appears .
28,940
def read_string ( source , start ) : body = source . body body_length = len ( body ) position = start + 1 chunk_start = position code = 0 value = [ ] append = value . append while position < body_length : code = char_code_at ( body , position ) if code in ( None , 0x000A , 0x000D , 34 , ) : break if code < 0x0020 and c...
Reads a string token from the source file .
28,941
def read_name ( source , position ) : body = source . body body_length = len ( body ) end = position + 1 while end != body_length : code = char_code_at ( body , end ) if not ( code is not None and ( code == 95 or 48 <= code <= 57 or 65 <= code <= 90 or 97 <= code <= 122 ) ) : break end += 1 return Token ( TokenKind . N...
Reads an alphanumeric + underscore name from the source .
28,942
def complete_value ( exe_context , return_type , field_asts , info , path , result , ) : if is_thenable ( result ) : return Promise . resolve ( result ) . then ( lambda resolved : complete_value ( exe_context , return_type , field_asts , info , path , resolved ) , lambda error : Promise . rejected ( GraphQLLocatedError...
Implements the instructions for completeValue as defined in the Field entries section of the spec .
28,943
def complete_list_value ( exe_context , return_type , field_asts , info , path , result , ) : assert isinstance ( result , Iterable ) , ( "User Error: expected iterable, but did not find one " + "for field {}.{}." ) . format ( info . parent_type , info . field_name ) item_type = return_type . of_type completed_results ...
Complete a list value by completing each item in the list with the inner type
28,944
def complete_leaf_value ( return_type , path , result , ) : assert hasattr ( return_type , "serialize" ) , "Missing serialize method on type" serialized_result = return_type . serialize ( result ) if serialized_result is None : raise GraphQLError ( ( 'Expected a value of type "{}" but ' + "received: {}" ) . format ( re...
Complete a Scalar or Enum by serializing to a valid value returning null if serialization is not possible .
28,945
def complete_abstract_value ( exe_context , return_type , field_asts , info , path , result , ) : runtime_type = None if isinstance ( return_type , ( GraphQLInterfaceType , GraphQLUnionType ) ) : if return_type . resolve_type : runtime_type = return_type . resolve_type ( result , info ) else : runtime_type = get_defaul...
Complete an value of an abstract type by determining the runtime type of that value then completing based on that type .
28,946
def complete_object_value ( exe_context , return_type , field_asts , info , path , result , ) : if return_type . is_type_of and not return_type . is_type_of ( result , info ) : raise GraphQLError ( u'Expected value of type "{}" but got: {}.' . format ( return_type , type ( result ) . __name__ ) , field_asts , ) subfiel...
Complete an Object value by evaluating all sub - selections .
28,947
def complete_nonnull_value ( exe_context , return_type , field_asts , info , path , result , ) : completed = complete_value ( exe_context , return_type . of_type , field_asts , info , path , result ) if completed is None : raise GraphQLError ( "Cannot return null for non-nullable field {}.{}." . format ( info . parent_...
Complete a NonNull value by completing the inner type
28,948
def value_from_ast ( value_ast , type , variables = None ) : if isinstance ( type , GraphQLNonNull ) : return value_from_ast ( value_ast , type . of_type , variables ) if value_ast is None : return None if isinstance ( value_ast , ast . Variable ) : variable_name = value_ast . name . value if not variables or variable_...
Given a type and a value AST node known to match this type build a runtime value .
28,949
def ast_to_code ( ast , indent = 0 ) : code = [ ] def append ( line ) : code . append ( ( " " * indent ) + line ) if isinstance ( ast , Node ) : append ( "ast.{}(" . format ( ast . __class__ . __name__ ) ) indent += 1 for i , k in enumerate ( ast . _fields , 1 ) : v = getattr ( ast , k ) append ( "{}={}," . format (...
Converts an ast into a python code representation of the AST .
28,950
def snake ( s ) : if len ( s ) < 2 : return s . lower ( ) out = s [ 0 ] . lower ( ) for c in s [ 1 : ] : if c . isupper ( ) : out += "_" c = c . lower ( ) out += c return out
Convert from title or camelCase to snake_case .
28,951
def make_post_request ( self , url , auth , json_payload ) : response = requests . post ( url , auth = auth , json = json_payload ) return response . json ( )
This function executes the request with the provided json payload and return the json response
28,952
def get_field_def ( schema , parent_type , field_ast , ) : name = field_ast . name . value if name == "__schema" and schema . get_query_type ( ) == parent_type : return SchemaMetaFieldDef elif name == "__type" and schema . get_query_type ( ) == parent_type : return TypeMetaFieldDef elif name == "__typename" and isinsta...
Not exactly the same as the executor s definition of get_field_def in this statically evaluated environment we do not always have an Object type and need to handle Interface and Union types .
28,953
def _find_conflicts_within_selection_set ( context , cached_fields_and_fragment_names , compared_fragments , parent_type , selection_set , ) : conflicts = [ ] field_map , fragment_names = _get_fields_and_fragments_names ( context , cached_fields_and_fragment_names , parent_type , selection_set ) _collect_conflicts_with...
Find all conflicts found within a selection set including those found via spreading in fragments .
28,954
def _find_conflicts_between_sub_selection_sets ( context , cached_fields_and_fragment_names , compared_fragments , are_mutually_exclusive , parent_type1 , selection_set1 , parent_type2 , selection_set2 , ) : conflicts = [ ] field_map1 , fragment_names1 = _get_fields_and_fragments_names ( context , cached_fields_and_fra...
Find all conflicts found between two selection sets .
28,955
def _find_conflict ( context , cached_fields_and_fragment_names , compared_fragments , parent_fields_are_mutually_exclusive , response_name , field1 , field2 , ) : parent_type1 , ast1 , def1 = field1 parent_type2 , ast2 , def2 = field2 are_mutually_exclusive = parent_fields_are_mutually_exclusive or ( parent_type1 != p...
Determines if there is a conflict between two particular fields .
28,956
def _get_referenced_fields_and_fragment_names ( context , cached_fields_and_fragment_names , fragment , ) : cached = cached_fields_and_fragment_names . get ( fragment . selection_set ) if cached : return cached fragment_type = type_from_ast ( context . get_schema ( ) , fragment . type_condition ) return _get_fields_and...
Given a reference to a fragment return the represented collection of fields as well as a list of nested fragment names referenced via fragment spreads .
28,957
def _subfield_conflicts ( conflicts , response_name , ast1 , ast2 , ) : if conflicts : return ( ( response_name , [ conflict [ 0 ] for conflict in conflicts ] ) , tuple ( itertools . chain ( [ ast1 ] , * [ conflict [ 1 ] for conflict in conflicts ] ) ) , tuple ( itertools . chain ( [ ast2 ] , * [ conflict [ 2 ] for con...
Given a series of Conflicts which occurred between two sub - fields generate a single Conflict .
28,958
def collect_fields ( ctx , runtime_type , selection_set , fields , prev_fragment_names , ) : for selection in selection_set . selections : directives = selection . directives if isinstance ( selection , ast . Field ) : if not should_include_node ( ctx , directives ) : continue name = get_field_entry_key ( selection ) f...
Given a selectionSet adds all of the fields in that selection to the passed in map of fields and returns it at the end .
28,959
def should_include_node ( ctx , directives ) : if directives : skip_ast = None for directive in directives : if directive . name . value == GraphQLSkipDirective . name : skip_ast = directive break if skip_ast : args = get_argument_values ( GraphQLSkipDirective . args , skip_ast . arguments , ctx . variable_values ) if ...
Determines if a field should be included based on the
28,960
def default_resolve_fn ( source , info , ** args ) : name = info . field_name if isinstance ( source , dict ) : property = source . get ( name ) else : property = getattr ( source , name , None ) if callable ( property ) : return property ( ) return property
If a resolve function is not given then a default resolve behavior is used which takes the property of the source object of the same name as the field and returns it as the result or if it s a function returns the result of calling that function .
28,961
def get_variable_values ( schema , definition_asts , inputs , ) : if inputs is None : inputs = { } values = { } for def_ast in definition_asts : var_name = def_ast . variable . name . value var_type = type_from_ast ( schema , def_ast . type ) value = inputs . get ( var_name ) if not is_input_type ( var_type ) : raise G...
Prepares an object map of variables of the correct type based on the provided variable definitions and arbitrary input . If the input cannot be parsed to match the variable definitions a GraphQLError will be thrown .
28,962
def get_argument_values ( arg_defs , arg_asts , variables = None , ) : if not arg_defs : return { } if arg_asts : arg_ast_map = { arg . name . value : arg for arg in arg_asts } else : arg_ast_map = { } result = { } for name , arg_def in arg_defs . items ( ) : arg_type = arg_def . type arg_ast = arg_ast_map . get ( name...
Prepares an object map of argument values given a list of argument definitions and list of argument AST nodes .
28,963
def coerce_value ( type , value ) : if isinstance ( type , GraphQLNonNull ) : return coerce_value ( type . of_type , value ) if value is None : return None if isinstance ( type , GraphQLList ) : item_type = type . of_type if not isinstance ( value , string_types ) and isinstance ( value , Iterable ) : return [ coerce_v...
Given a type and any value return a runtime value coerced to match the type .
28,964
def parse ( source , ** kwargs ) : options = { "no_location" : False , "no_source" : False } options . update ( kwargs ) if isinstance ( source , string_types ) : source_obj = Source ( source ) else : source_obj = source parser = Parser ( source_obj , options ) return parse_document ( parser )
Given a GraphQL source parses it into a Document .
28,965
def loc ( parser , start ) : if parser . options [ "no_location" ] : return None if parser . options [ "no_source" ] : return Loc ( start , parser . prev_end ) return Loc ( start , parser . prev_end , parser . source )
Returns a location object used to identify the place in the source that created a given parsed object .
28,966
def advance ( parser ) : prev_end = parser . token . end parser . prev_end = prev_end parser . token = parser . lexer . next_token ( prev_end )
Moves the internal parser object to the next lexed token .
28,967
def skip ( parser , kind ) : match = parser . token . kind == kind if match : advance ( parser ) return match
If the next token is of the given kind return true after advancing the parser . Otherwise do not change the parser state and throw an error .
28,968
def expect ( parser , kind ) : token = parser . token if token . kind == kind : advance ( parser ) return token raise GraphQLSyntaxError ( parser . source , token . start , u"Expected {}, found {}" . format ( get_token_kind_desc ( kind ) , get_token_desc ( token ) ) , )
If the next token is of the given kind return that token after advancing the parser . Otherwise do not change the parser state and return False .
28,969
def expect_keyword ( parser , value ) : token = parser . token if token . kind == TokenKind . NAME and token . value == value : advance ( parser ) return token raise GraphQLSyntaxError ( parser . source , token . start , u'Expected "{}", found {}' . format ( value , get_token_desc ( token ) ) , )
If the next token is a keyword with the given value return that token after advancing the parser . Otherwise do not change the parser state and return False .
28,970
def unexpected ( parser , at_token = None ) : token = at_token or parser . token return GraphQLSyntaxError ( parser . source , token . start , u"Unexpected {}" . format ( get_token_desc ( token ) ) )
Helper function for creating an error when an unexpected lexed token is encountered .
28,971
def any ( parser , open_kind , parse_fn , close_kind ) : expect ( parser , open_kind ) nodes = [ ] while not skip ( parser , close_kind ) : nodes . append ( parse_fn ( parser ) ) return nodes
Returns a possibly empty list of parse nodes determined by the parse_fn . This list begins with a lex token of openKind and ends with a lex token of closeKind . Advances the parser to the next lex token after the closing token .
28,972
def parse_name ( parser ) : token = expect ( parser , TokenKind . NAME ) return ast . Name ( value = token . value , loc = loc ( parser , token . start ) )
Converts a name lex token into a name parse node .
28,973
def mean ( series ) : if np . issubdtype ( series . dtype , np . number ) : return series . mean ( ) else : return np . nan
Returns the mean of a series .
28,974
def first ( series , order_by = None ) : if order_by is not None : series = order_series_by ( series , order_by ) first_s = series . iloc [ 0 ] return first_s
Returns the first value of a series .
28,975
def last ( series , order_by = None ) : if order_by is not None : series = order_series_by ( series , order_by ) last_s = series . iloc [ series . size - 1 ] return last_s
Returns the last value of a series .
28,976
def nth ( series , n , order_by = None ) : if order_by is not None : series = order_series_by ( series , order_by ) try : return series . iloc [ n ] except : return np . nan
Returns the nth value of a series .
28,977
def median ( series ) : if np . issubdtype ( series . dtype , np . number ) : return series . median ( ) else : return np . nan
Returns the median value of a series .
28,978
def var ( series ) : if np . issubdtype ( series . dtype , np . number ) : return series . var ( ) else : return np . nan
Returns the variance of values in a series .
28,979
def sd ( series ) : if np . issubdtype ( series . dtype , np . number ) : return series . std ( ) else : return np . nan
Returns the standard deviation of values in a series .
28,980
def get_join_parameters ( join_kwargs ) : by = join_kwargs . get ( 'by' , None ) suffixes = join_kwargs . get ( 'suffixes' , ( '_x' , '_y' ) ) if isinstance ( by , tuple ) : left_on , right_on = by elif isinstance ( by , list ) : by = [ x if isinstance ( x , tuple ) else ( x , x ) for x in by ] left_on , right_on = ( l...
Convenience function to determine the columns to join the right and left DataFrames on as well as any suffixes for the columns .
28,981
def inner_join ( df , other , ** kwargs ) : left_on , right_on , suffixes = get_join_parameters ( kwargs ) joined = df . merge ( other , how = 'inner' , left_on = left_on , right_on = right_on , suffixes = suffixes ) return joined
Joins on values present in both DataFrames .
28,982
def anti_join ( df , other , ** kwargs ) : left_on , right_on , suffixes = get_join_parameters ( kwargs ) if not right_on : right_on = [ col_name for col_name in df . columns . values . tolist ( ) if col_name in other . columns . values . tolist ( ) ] left_on = right_on elif not isinstance ( right_on , ( list , tuple )...
Returns all of the rows in the left DataFrame that do not have a match in the right DataFrame .
28,983
def bind_rows ( df , other , join = 'outer' , ignore_index = False ) : df = pd . concat ( [ df , other ] , join = join , ignore_index = ignore_index , axis = 0 ) return df
Binds DataFrames vertically stacking them together . This is equivalent to pd . concat with axis = 0 .
28,984
def arrange ( df , * args , ** kwargs ) : flat_args = [ a for a in flatten ( args ) ] series = [ df [ arg ] if isinstance ( arg , str ) else df . iloc [ : , arg ] if isinstance ( arg , int ) else pd . Series ( arg ) for arg in flat_args ] sorter = pd . concat ( series , axis = 1 ) . reset_index ( drop = True ) sorter =...
Calls pandas . DataFrame . sort_values to sort a DataFrame according to criteria .
28,985
def rename ( df , ** kwargs ) : return df . rename ( columns = { v : k for k , v in kwargs . items ( ) } )
Renames columns where keyword argument values are the current names of columns and keys are the new names .
28,986
def convert_type ( df , columns ) : out_df = df . copy ( ) for col in columns : column_values = pd . Series ( out_df [ col ] . unique ( ) ) column_values = column_values [ ~ column_values . isnull ( ) ] if len ( column_values ) == 0 : continue if set ( column_values . values ) < { 'True' , 'False' } : out_df [ col ] = ...
Helper function that attempts to convert columns into their appropriate data type .
28,987
def spread ( df , key , values , convert = False ) : columns = df . columns . tolist ( ) id_cols = [ col for col in columns if not col in [ key , values ] ] temp_index = [ '' for i in range ( len ( df ) ) ] for id_col in id_cols : temp_index += df [ id_col ] . map ( str ) out_df = df . assign ( temp_index = temp_index ...
Transforms a long DataFrame into a wide format using a key and value column .
28,988
def separate ( df , column , into , sep = "[\W_]+" , remove = True , convert = False , extra = 'drop' , fill = 'right' ) : assert isinstance ( into , ( tuple , list ) ) if isinstance ( sep , ( tuple , list ) ) : inds = [ 0 ] + list ( sep ) if len ( inds ) > len ( into ) : if extra == 'drop' : inds = inds [ : len ( into...
Splits columns into multiple columns .
28,989
def unite ( df , colname , * args , ** kwargs ) : to_unite = list ( [ a for a in flatten ( args ) ] ) sep = kwargs . get ( 'sep' , '_' ) remove = kwargs . get ( 'remove' , True ) na_action = kwargs . get ( 'na_action' , 'maintain' ) if na_action == 'maintain' : df [ colname ] = df [ to_unite ] . apply ( lambda x : np ....
Does the inverse of separate joining columns together by a specified separator .
28,990
def validate_set_ops ( df , other ) : if df . columns . values . tolist ( ) != other . columns . values . tolist ( ) : not_in_df = [ col for col in other . columns if col not in df . columns ] not_in_other = [ col for col in df . columns if col not in other . columns ] error_string = 'Error: not compatible.' if len ( n...
Helper function to ensure that DataFrames are valid for set operations . Columns must be the same name in the same order and indices must be of the same dimension with the same names .
28,991
def union ( df , other , index = False , keep = 'first' ) : validate_set_ops ( df , other ) stacked = df . append ( other ) if index : stacked_reset_indexes = stacked . reset_index ( ) index_cols = [ col for col in stacked_reset_indexes . columns if col not in df . columns ] index_name = df . index . names return_df = ...
Returns rows that appear in either DataFrame .
28,992
def intersect ( df , other , index = False , keep = 'first' ) : validate_set_ops ( df , other ) if index : df_reset_index = df . reset_index ( ) other_reset_index = other . reset_index ( ) index_cols = [ col for col in df_reset_index . columns if col not in df . columns ] df_index_names = df . index . names return_df =...
Returns rows that appear in both DataFrames .
28,993
def transmute ( df , * keep_columns , ** kwargs ) : keep_cols = [ ] for col in flatten ( keep_columns ) : try : keep_cols . append ( col . name ) except : if isinstance ( col , str ) : keep_cols . append ( col ) elif isinstance ( col , int ) : keep_cols . append ( df . columns [ col ] ) df = df . assign ( ** kwargs ) c...
Creates columns and then returns those new columns and optionally specified original columns from the DataFrame .
28,994
def coalesce ( * series ) : series = [ pd . Series ( s ) for s in series ] coalescer = pd . concat ( series , axis = 1 ) min_nonna = np . argmin ( pd . isnull ( coalescer ) . values , axis = 1 ) min_nonna = [ coalescer . columns [ i ] for i in min_nonna ] return coalescer . lookup ( np . arange ( coalescer . shape [ 0 ...
Takes the first non - NaN value in order across the specified series returning a new series . Mimics the coalesce function in dplyr and SQL .
28,995
def case_when ( * conditions ) : lengths = [ ] for logical , outcome in conditions : if isinstance ( logical , collections . Iterable ) : lengths . append ( len ( logical ) ) if isinstance ( outcome , collections . Iterable ) and not isinstance ( outcome , str ) : lengths . append ( len ( outcome ) ) unique_lengths = n...
Functions as a switch statement creating a new series out of logical conditions specified by 2 - item lists where the left - hand item is the logical condition and the right - hand item is the value where that condition is true .
28,996
def if_else ( condition , when_true , otherwise ) : if not isinstance ( when_true , collections . Iterable ) or isinstance ( when_true , str ) : when_true = np . repeat ( when_true , len ( condition ) ) if not isinstance ( otherwise , collections . Iterable ) or isinstance ( otherwise , str ) : otherwise = np . repeat ...
Wraps creation of a series based on if - else conditional logic into a function call .
28,997
def na_if ( series , * values ) : series = pd . Series ( series ) series [ series . isin ( values ) ] = np . nan return series
If values in a series match a specified value change them to np . nan .
28,998
def between ( series , a , b , inclusive = False ) : if inclusive == True : met_condition = ( series >= a ) & ( series <= b ) elif inclusive == False : met_condition = ( series > a ) & ( series < b ) return met_condition
Returns a boolean series specifying whether rows of the input series are between values a and b .
28,999
def seek ( self , pos ) : if self . debug : logging . debug ( 'seek: %r' % pos ) self . fp . seek ( pos ) self . bufpos = pos self . buf = b'' self . charpos = 0 self . _parse1 = self . _parse_main self . _curtoken = b'' self . _curtokenpos = 0 self . _tokens = [ ] return
Seeks the parser to the given position .