idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
18,500 | def append_to_arg_count ( self , data ) : assert data in '-0123456789' current = self . _arg if data == '-' : assert current is None or current == '-' result = data elif current is None : result = data else : result = "%s%s" % ( current , data ) self . input_processor . arg = result | Add digit to the input argument . |
18,501 | def _get_keys ( self , read , input_records ) : for i in range ( read . value ) : ir = input_records [ i ] if ir . EventType in EventTypes : ev = getattr ( ir . Event , EventTypes [ ir . EventType ] ) if type ( ev ) == KEY_EVENT_RECORD and ev . KeyDown : for key_press in self . _event_to_key_presses ( ev ) : yield key_press elif type ( ev ) == MOUSE_EVENT_RECORD : for key_press in self . _handle_mouse ( ev ) : yield key_press | Generator that yields KeyPress objects from the input records . |
18,502 | def _event_to_key_presses ( self , ev ) : assert type ( ev ) == KEY_EVENT_RECORD and ev . KeyDown result = None u_char = ev . uChar . UnicodeChar ascii_char = u_char . encode ( 'utf-8' ) if u_char == '\x00' : if ev . VirtualKeyCode in self . keycodes : result = KeyPress ( self . keycodes [ ev . VirtualKeyCode ] , '' ) else : if ascii_char in self . mappings : if self . mappings [ ascii_char ] == Keys . ControlJ : u_char = '\n' result = KeyPress ( self . mappings [ ascii_char ] , u_char ) else : result = KeyPress ( u_char , u_char ) if ( ev . ControlKeyState & self . LEFT_CTRL_PRESSED or ev . ControlKeyState & self . RIGHT_CTRL_PRESSED ) and result : if result . key == Keys . Left : result . key = Keys . ControlLeft if result . key == Keys . Right : result . key = Keys . ControlRight if result . key == Keys . Up : result . key = Keys . ControlUp if result . key == Keys . Down : result . key = Keys . ControlDown if ev . ControlKeyState & self . SHIFT_PRESSED and result : if result . key == Keys . Tab : result . key = Keys . BackTab if ( ev . ControlKeyState & self . LEFT_CTRL_PRESSED or ev . ControlKeyState & self . RIGHT_CTRL_PRESSED ) and result and result . data == ' ' : result = KeyPress ( Keys . ControlSpace , ' ' ) if ( ev . ControlKeyState & self . LEFT_CTRL_PRESSED or ev . ControlKeyState & self . RIGHT_CTRL_PRESSED ) and result and result . key == Keys . ControlJ : return [ KeyPress ( Keys . Escape , '' ) , result ] if result : meta_pressed = ev . ControlKeyState & self . LEFT_ALT_PRESSED if meta_pressed : return [ KeyPress ( Keys . Escape , '' ) , result ] else : return [ result ] else : return [ ] | For this KEY_EVENT_RECORD return a list of KeyPress instances . |
18,503 | def _handle_mouse ( self , ev ) : FROM_LEFT_1ST_BUTTON_PRESSED = 0x1 result = [ ] if ev . ButtonState == FROM_LEFT_1ST_BUTTON_PRESSED : for event_type in [ MouseEventType . MOUSE_DOWN , MouseEventType . MOUSE_UP ] : data = ';' . join ( [ event_type , str ( ev . MousePosition . X ) , str ( ev . MousePosition . Y ) ] ) result . append ( KeyPress ( Keys . WindowsMouseEvent , data ) ) return result | Handle mouse events . Return a list of KeyPress instances . |
18,504 | def tokenize_regex ( input ) : p = re . compile ( r , re . VERBOSE ) tokens = [ ] while input : m = p . match ( input ) if m : token , input = input [ : m . end ( ) ] , input [ m . end ( ) : ] if not token . isspace ( ) : tokens . append ( token ) else : raise Exception ( 'Could not tokenize input regex.' ) return tokens | Takes a string representing a regular expression as input and tokenizes it . |
18,505 | def parse_regex ( regex_tokens ) : tokens = [ ')' ] + regex_tokens [ : : - 1 ] def wrap ( lst ) : if len ( lst ) == 1 : return lst [ 0 ] else : return Sequence ( lst ) def _parse ( ) : or_list = [ ] result = [ ] def wrapped_result ( ) : if or_list == [ ] : return wrap ( result ) else : or_list . append ( result ) return Any ( [ wrap ( i ) for i in or_list ] ) while tokens : t = tokens . pop ( ) if t . startswith ( '(?P<' ) : variable = Variable ( _parse ( ) , varname = t [ 4 : - 1 ] ) result . append ( variable ) elif t in ( '*' , '*?' ) : greedy = ( t == '*' ) result [ - 1 ] = Repeat ( result [ - 1 ] , greedy = greedy ) elif t in ( '+' , '+?' ) : greedy = ( t == '+' ) result [ - 1 ] = Repeat ( result [ - 1 ] , min_repeat = 1 , greedy = greedy ) elif t in ( '?' , '??' ) : if result == [ ] : raise Exception ( 'Nothing to repeat.' + repr ( tokens ) ) else : greedy = ( t == '?' ) result [ - 1 ] = Repeat ( result [ - 1 ] , min_repeat = 0 , max_repeat = 1 , greedy = greedy ) elif t == '|' : or_list . append ( result ) result = [ ] elif t in ( '(' , '(?:' ) : result . append ( _parse ( ) ) elif t == '(?!' : result . append ( Lookahead ( _parse ( ) , negative = True ) ) elif t == '(?=' : result . append ( Lookahead ( _parse ( ) , negative = False ) ) elif t == ')' : return wrapped_result ( ) elif t . startswith ( '#' ) : pass elif t . startswith ( '{' ) : raise Exception ( '{}-style repitition not yet supported' % t ) elif t . startswith ( '(?' ) : raise Exception ( '%r not supported' % t ) elif t . isspace ( ) : pass else : result . append ( Regex ( t ) ) raise Exception ( "Expecting ')' token" ) result = _parse ( ) if len ( tokens ) != 0 : raise Exception ( "Unmatched parantheses." ) else : return result | Takes a list of tokens from the tokenizer and returns a parse tree . |
18,506 | def _divide_heigths ( self , cli , write_position ) : if not self . children : return [ ] given_dimensions = self . get_dimensions ( cli ) if self . get_dimensions else None def get_dimension_for_child ( c , index ) : if given_dimensions and given_dimensions [ index ] is not None : return given_dimensions [ index ] else : return c . preferred_height ( cli , write_position . width , write_position . extended_height ) dimensions = [ get_dimension_for_child ( c , index ) for index , c in enumerate ( self . children ) ] sum_dimensions = sum_layout_dimensions ( dimensions ) if sum_dimensions . min > write_position . extended_height : return sizes = [ d . min for d in dimensions ] child_generator = take_using_weights ( items = list ( range ( len ( dimensions ) ) ) , weights = [ d . weight for d in dimensions ] ) i = next ( child_generator ) while sum ( sizes ) < min ( write_position . extended_height , sum_dimensions . preferred ) : if sizes [ i ] < dimensions [ i ] . preferred : sizes [ i ] += 1 i = next ( child_generator ) if not any ( [ cli . is_returning , cli . is_exiting , cli . is_aborting ] ) : while sum ( sizes ) < min ( write_position . height , sum_dimensions . max ) : if sizes [ i ] < dimensions [ i ] . max : sizes [ i ] += 1 i = next ( child_generator ) return sizes | Return the heights for all rows . Or None when there is not enough space . |
18,507 | def _divide_widths ( self , cli , width ) : if not self . children : return [ ] given_dimensions = self . get_dimensions ( cli ) if self . get_dimensions else None def get_dimension_for_child ( c , index ) : if given_dimensions and given_dimensions [ index ] is not None : return given_dimensions [ index ] else : return c . preferred_width ( cli , width ) dimensions = [ get_dimension_for_child ( c , index ) for index , c in enumerate ( self . children ) ] sum_dimensions = sum_layout_dimensions ( dimensions ) if sum_dimensions . min > width : return sizes = [ d . min for d in dimensions ] child_generator = take_using_weights ( items = list ( range ( len ( dimensions ) ) ) , weights = [ d . weight for d in dimensions ] ) i = next ( child_generator ) while sum ( sizes ) < min ( width , sum_dimensions . preferred ) : if sizes [ i ] < dimensions [ i ] . preferred : sizes [ i ] += 1 i = next ( child_generator ) while sum ( sizes ) < min ( width , sum_dimensions . max ) : if sizes [ i ] < dimensions [ i ] . max : sizes [ i ] += 1 i = next ( child_generator ) return sizes | Return the widths for all columns . Or None when there is not enough space . |
18,508 | def input_line_to_visible_line ( self ) : result = { } for k , v in self . visible_line_to_input_line . items ( ) : if v in result : result [ v ] = min ( result [ v ] , k ) else : result [ v ] = k return result | Return the dictionary mapping the line numbers of the input buffer to the lines of the screen . When a line spans several rows at the screen the first row appears in the dictionary . |
18,509 | def last_visible_line ( self , before_scroll_offset = False ) : if before_scroll_offset : return self . displayed_lines [ - 1 - self . applied_scroll_offsets . bottom ] else : return self . displayed_lines [ - 1 ] | Like first_visible_line but for the last visible line . |
18,510 | def center_visible_line ( self , before_scroll_offset = False , after_scroll_offset = False ) : return ( self . first_visible_line ( after_scroll_offset ) + ( self . last_visible_line ( before_scroll_offset ) - self . first_visible_line ( after_scroll_offset ) ) // 2 ) | Like first_visible_line but for the center visible line . |
18,511 | def _merge_dimensions ( dimension , preferred = None , dont_extend = False ) : dimension = dimension or LayoutDimension ( ) if dimension . preferred_specified : preferred = dimension . preferred if preferred is not None : if dimension . max : preferred = min ( preferred , dimension . max ) if dimension . min : preferred = max ( preferred , dimension . min ) if dont_extend and preferred is not None : max_ = min ( dimension . max , preferred ) else : max_ = dimension . max return LayoutDimension ( min = dimension . min , max = max_ , preferred = preferred , weight = dimension . weight ) | Take the LayoutDimension from this Window class and the received preferred size from the UIControl and return a LayoutDimension to report to the parent container . |
18,512 | def _get_ui_content ( self , cli , width , height ) : def get_content ( ) : return self . content . create_content ( cli , width = width , height = height ) key = ( cli . render_counter , width , height ) return self . _ui_content_cache . get ( key , get_content ) | Create a UIContent instance . |
18,513 | def _get_digraph_char ( self , cli ) : " Return `False`, or the Digraph symbol to be used. " if cli . quoted_insert : return '^' if cli . vi_state . waiting_for_digraph : if cli . vi_state . digraph_symbol1 : return cli . vi_state . digraph_symbol1 return '?' return False | Return False or the Digraph symbol to be used . |
18,514 | def _highlight_digraph ( self , cli , new_screen ) : digraph_char = self . _get_digraph_char ( cli ) if digraph_char : cpos = new_screen . cursor_position new_screen . data_buffer [ cpos . y ] [ cpos . x ] = _CHAR_CACHE [ digraph_char , Token . Digraph ] | When we are in Vi digraph mode put a question mark underneath the cursor . |
18,515 | def _show_input_processor_key_buffer ( self , cli , new_screen ) : key_buffer = cli . input_processor . key_buffer if key_buffer and _in_insert_mode ( cli ) and not cli . is_done : data = key_buffer [ - 1 ] . data if get_cwidth ( data ) == 1 : cpos = new_screen . cursor_position new_screen . data_buffer [ cpos . y ] [ cpos . x ] = _CHAR_CACHE [ data , Token . PartialKeyBinding ] | When the user is typing a key binding that consists of several keys display the last pressed key if the user is in insert mode and the key is meaningful to be displayed . E . g . Some people want to bind jj to escape in Vi insert mode . But the first j needs to be displayed in order to get some feedback . |
18,516 | def _copy_margin ( self , cli , lazy_screen , new_screen , write_position , move_x , width ) : xpos = write_position . xpos + move_x ypos = write_position . ypos margin_write_position = WritePosition ( xpos , ypos , width , write_position . height ) self . _copy_body ( cli , lazy_screen , new_screen , margin_write_position , 0 , width ) | Copy characters from the margin screen to the real screen . |
18,517 | def _mouse_handler ( self , cli , mouse_event ) : if mouse_event . event_type == MouseEventType . SCROLL_DOWN : self . _scroll_down ( cli ) elif mouse_event . event_type == MouseEventType . SCROLL_UP : self . _scroll_up ( cli ) | Mouse handler . Called when the UI control doesn t handle this particular event . |
18,518 | def _scroll_up ( self , cli ) : " Scroll window up. " info = self . render_info if info . vertical_scroll > 0 : if info . cursor_position . y >= info . window_height - 1 - info . configured_scroll_offsets . bottom : self . content . move_cursor_up ( cli ) self . vertical_scroll -= 1 | Scroll window up . |
18,519 | def update ( self , key_vals = None , overwrite = True ) : if not key_vals : return write_items = self . _update ( key_vals , overwrite ) self . _root . _root_set ( self . _path , write_items ) self . _root . _write ( commit = True ) | Locked keys will be overwritten unless overwrite = False . |
18,520 | def _encode ( self , value , path_from_root ) : if isinstance ( value , dict ) : json_value = { } for key , value in six . iteritems ( value ) : json_value [ key ] = self . _encode ( value , path_from_root + ( key , ) ) return json_value else : path = "." . join ( path_from_root ) if util . is_pandas_data_frame ( value ) : return util . encode_data_frame ( path , value , self . _run ) else : friendly_value , converted = util . json_friendly ( data_types . val_to_json ( path , value ) ) json_value , compressed = util . maybe_compress_summary ( friendly_value , util . get_h5_typename ( value ) ) if compressed : self . write_h5 ( path_from_root , friendly_value ) return json_value | Normalize compress and encode sub - objects for backend storage . |
18,521 | def scroll_one_line_down ( event ) : w = find_window_for_buffer_name ( event . cli , event . cli . current_buffer_name ) b = event . cli . current_buffer if w : if w . render_info : info = w . render_info if w . vertical_scroll < info . content_height - info . window_height : if info . cursor_position . y <= info . configured_scroll_offsets . top : b . cursor_position += b . document . get_cursor_down_position ( ) w . vertical_scroll += 1 | scroll_offset + = 1 |
18,522 | def scroll_one_line_up ( event ) : w = find_window_for_buffer_name ( event . cli , event . cli . current_buffer_name ) b = event . cli . current_buffer if w : if w . render_info : info = w . render_info if w . vertical_scroll > 0 : first_line_height = info . get_height_for_line ( info . first_visible_line ( ) ) cursor_up = info . cursor_position . y - ( info . window_height - 1 - first_line_height - info . configured_scroll_offsets . bottom ) for _ in range ( max ( 0 , cursor_up ) ) : b . cursor_position += b . document . get_cursor_up_position ( ) w . vertical_scroll -= 1 | scroll_offset - = 1 |
18,523 | def create_content ( self , cli , width , height ) : complete_state = cli . current_buffer . complete_state if complete_state : completions = complete_state . current_completions index = complete_state . complete_index menu_width = self . _get_menu_width ( width , complete_state ) menu_meta_width = self . _get_menu_meta_width ( width - menu_width , complete_state ) show_meta = self . _show_meta ( complete_state ) def get_line ( i ) : c = completions [ i ] is_current_completion = ( i == index ) result = self . _get_menu_item_tokens ( c , is_current_completion , menu_width ) if show_meta : result += self . _get_menu_item_meta_tokens ( c , is_current_completion , menu_meta_width ) return result return UIContent ( get_line = get_line , cursor_position = Point ( x = 0 , y = index or 0 ) , line_count = len ( completions ) , default_char = Char ( ' ' , self . token ) ) return UIContent ( ) | Create a UIContent object for this control . |
18,524 | def _get_menu_width ( self , max_width , complete_state ) : return min ( max_width , max ( self . MIN_WIDTH , max ( get_cwidth ( c . display ) for c in complete_state . current_completions ) + 2 ) ) | Return the width of the main column . |
18,525 | def _get_menu_meta_width ( self , max_width , complete_state ) : if self . _show_meta ( complete_state ) : return min ( max_width , max ( get_cwidth ( c . display_meta ) for c in complete_state . current_completions ) + 2 ) else : return 0 | Return the width of the meta column . |
18,526 | def create_content ( self , cli , width , height ) : complete_state = cli . current_buffer . complete_state column_width = self . _get_column_width ( complete_state ) self . _render_pos_to_completion = { } def grouper ( n , iterable , fillvalue = None ) : " grouper(3, 'ABCDEFG', 'x') args = [ iter ( iterable ) ] * n return zip_longest ( fillvalue = fillvalue , * args ) def is_current_completion ( completion ) : " Returns True when this completion is the currently selected one. " return complete_state . complete_index is not None and c == complete_state . current_completion HORIZONTAL_MARGIN_REQUIRED = 3 if complete_state : column_width = min ( width - HORIZONTAL_MARGIN_REQUIRED , column_width ) if column_width > self . suggested_max_column_width : column_width //= ( column_width // self . suggested_max_column_width ) visible_columns = max ( 1 , ( width - self . _required_margin ) // column_width ) columns_ = list ( grouper ( height , complete_state . current_completions ) ) rows_ = list ( zip ( * columns_ ) ) selected_column = ( complete_state . complete_index or 0 ) // height self . scroll = min ( selected_column , max ( self . scroll , selected_column - visible_columns + 1 ) ) render_left_arrow = self . scroll > 0 render_right_arrow = self . scroll < len ( rows_ [ 0 ] ) - visible_columns tokens_for_line = [ ] for row_index , row in enumerate ( rows_ ) : tokens = [ ] middle_row = row_index == len ( rows_ ) // 2 if render_left_arrow : tokens += [ ( Token . Scrollbar , '<' if middle_row else ' ' ) ] for column_index , c in enumerate ( row [ self . scroll : ] [ : visible_columns ] ) : if c is not None : tokens += self . _get_menu_item_tokens ( c , is_current_completion ( c ) , column_width ) for x in range ( column_width ) : self . _render_pos_to_completion [ ( column_index * column_width + x , row_index ) ] = c else : tokens += [ ( self . token . Completion , ' ' * column_width ) ] tokens += [ ( self . token . Completion , ' ' ) ] if render_right_arrow : tokens += [ ( Token . Scrollbar , '>' if middle_row else ' ' ) ] tokens_for_line . append ( tokens ) else : tokens = [ ] self . _rendered_rows = height self . _rendered_columns = visible_columns self . _total_columns = len ( columns_ ) self . _render_left_arrow = render_left_arrow self . _render_right_arrow = render_right_arrow self . _render_width = column_width * visible_columns + render_left_arrow + render_right_arrow + 1 def get_line ( i ) : return tokens_for_line [ i ] return UIContent ( get_line = get_line , line_count = len ( rows_ ) ) | Create a UIContent object for this menu . |
18,527 | def _get_column_width ( self , complete_state ) : return max ( get_cwidth ( c . display ) for c in complete_state . current_completions ) + 1 | Return the width of each column . |
18,528 | def mouse_handler ( self , cli , mouse_event ) : b = cli . current_buffer def scroll_left ( ) : b . complete_previous ( count = self . _rendered_rows , disable_wrap_around = True ) self . scroll = max ( 0 , self . scroll - 1 ) def scroll_right ( ) : b . complete_next ( count = self . _rendered_rows , disable_wrap_around = True ) self . scroll = min ( self . _total_columns - self . _rendered_columns , self . scroll + 1 ) if mouse_event . event_type == MouseEventType . SCROLL_DOWN : scroll_right ( ) elif mouse_event . event_type == MouseEventType . SCROLL_UP : scroll_left ( ) elif mouse_event . event_type == MouseEventType . MOUSE_UP : x = mouse_event . position . x y = mouse_event . position . y if x == 0 : if self . _render_left_arrow : scroll_left ( ) elif x == self . _render_width - 1 : if self . _render_right_arrow : scroll_right ( ) else : completion = self . _render_pos_to_completion . get ( ( x , y ) ) if completion : b . apply_completion ( completion ) | Handle scoll and click events . |
18,529 | def preferred_width ( self , cli , max_available_width ) : if cli . current_buffer . complete_state : state = cli . current_buffer . complete_state return 2 + max ( get_cwidth ( c . display_meta ) for c in state . current_completions ) else : return 0 | Report the width of the longest meta text as the preferred width of this control . |
18,530 | def set_text ( self , text ) : assert isinstance ( text , six . string_types ) self . set_data ( ClipboardData ( text ) ) | Shortcut for setting plain text on clipboard . |
18,531 | def reset ( self ) : self . counter += 1 local_counter = self . counter def timer_timeout ( ) : if self . counter == local_counter and self . running : self . callback ( ) self . loop . call_later ( self . timeout , timer_timeout ) | Reset the timeout . Starts a new timer . |
18,532 | def sentry_reraise ( exc ) : sentry_exc ( exc ) six . reraise ( type ( exc ) , exc , sys . exc_info ( ) [ 2 ] ) | Re - raise an exception after logging it to Sentry |
18,533 | def vendor_import ( name ) : parent_dir = os . path . abspath ( os . path . dirname ( __file__ ) ) vendor_dir = os . path . join ( parent_dir , 'vendor' ) sys . path . insert ( 1 , vendor_dir ) return import_module ( name ) | This enables us to use the vendor directory for packages we don t depend on |
18,534 | def ensure_matplotlib_figure ( obj ) : import matplotlib from matplotlib . figure import Figure if obj == matplotlib . pyplot : obj = obj . gcf ( ) elif not isinstance ( obj , Figure ) : if hasattr ( obj , "figure" ) : obj = obj . figure if not isinstance ( obj , Figure ) : raise ValueError ( "Only matplotlib.pyplot or matplotlib.pyplot.Figure objects are accepted." ) if not obj . gca ( ) . has_data ( ) : raise ValueError ( "You attempted to log an empty plot, pass a figure directly or ensure the global plot isn't closed." ) return obj | Extract the current figure from a matplotlib object or return the object if it s a figure . raises ValueError if the object can t be converted . |
18,535 | def json_friendly ( obj ) : converted = True typename = get_full_typename ( obj ) if is_tf_tensor_typename ( typename ) : obj = obj . eval ( ) elif is_pytorch_tensor_typename ( typename ) : try : if obj . requires_grad : obj = obj . detach ( ) except AttributeError : pass try : obj = obj . data except RuntimeError : pass if obj . size ( ) : obj = obj . numpy ( ) else : return obj . item ( ) , True if np and isinstance ( obj , np . ndarray ) : if obj . size == 1 : obj = obj . flatten ( ) [ 0 ] elif obj . size <= 32 : obj = obj . tolist ( ) elif np and isinstance ( obj , np . generic ) : obj = obj . item ( ) elif isinstance ( obj , bytes ) : obj = obj . decode ( 'utf-8' ) elif isinstance ( obj , ( datetime , date ) ) : obj = obj . isoformat ( ) else : converted = False if getsizeof ( obj ) > VALUE_BYTES_LIMIT : logger . warning ( "Object %s is %i bytes" , obj , getsizeof ( obj ) ) return obj , converted | Convert an object into something that s more becoming of JSON |
18,536 | def launch_browser ( attempt_launch_browser = True ) : _DISPLAY_VARIABLES = [ 'DISPLAY' , 'WAYLAND_DISPLAY' , 'MIR_SOCKET' ] _WEBBROWSER_NAMES_BLACKLIST = [ 'www-browser' , 'lynx' , 'links' , 'elinks' , 'w3m' ] import webbrowser launch_browser = attempt_launch_browser if launch_browser : if ( 'linux' in sys . platform and not any ( os . getenv ( var ) for var in _DISPLAY_VARIABLES ) ) : launch_browser = False try : browser = webbrowser . get ( ) if ( hasattr ( browser , 'name' ) and browser . name in _WEBBROWSER_NAMES_BLACKLIST ) : launch_browser = False except webbrowser . Error : launch_browser = False return launch_browser | Decide if we should launch a browser |
18,537 | def parse_tfjob_config ( ) : if os . getenv ( "TF_CONFIG" ) : try : return json . loads ( os . environ [ "TF_CONFIG" ] ) except ValueError : return False else : return False | Attempts to parse TFJob config returning False if it can t find it |
18,538 | def parse_sm_config ( ) : sagemaker_config = "/opt/ml/input/config/hyperparameters.json" if os . path . exists ( sagemaker_config ) : conf = { } conf [ "sagemaker_training_job_name" ] = os . getenv ( 'TRAINING_JOB_NAME' ) for k , v in six . iteritems ( json . load ( open ( sagemaker_config ) ) ) : cast = v . strip ( '"' ) if os . getenv ( "WANDB_API_KEY" ) is None and k == "wandb_api_key" : os . environ [ "WANDB_API_KEY" ] = cast else : if re . match ( r'^[-\d]+$' , cast ) : cast = int ( cast ) elif re . match ( r'^[-.\d]+$' , cast ) : cast = float ( cast ) conf [ k ] = cast return conf else : return False | Attempts to parse SageMaker configuration returning False if it can t find it |
18,539 | def write_netrc ( host , entity , key ) : if len ( key ) != 40 : click . secho ( 'API-key must be exactly 40 characters long: {} ({} chars)' . format ( key , len ( key ) ) ) return None try : normalized_host = host . split ( "/" ) [ - 1 ] . split ( ":" ) [ 0 ] print ( "Appending key for %s to your netrc file: %s" % ( normalized_host , os . path . expanduser ( '~/.netrc' ) ) ) machine_line = 'machine %s' % normalized_host path = os . path . expanduser ( '~/.netrc' ) orig_lines = None try : with open ( path ) as f : orig_lines = f . read ( ) . strip ( ) . split ( '\n' ) except ( IOError , OSError ) as e : pass with open ( path , 'w' ) as f : if orig_lines : skip = 0 for line in orig_lines : if machine_line in line : skip = 2 elif skip : skip -= 1 else : f . write ( '%s\n' % line ) f . write ( textwrap . dedent ( ) . format ( host = normalized_host , entity = entity , key = key ) ) os . chmod ( os . path . expanduser ( '~/.netrc' ) , stat . S_IRUSR | stat . S_IWUSR ) return True except IOError as e : click . secho ( "Unable to read ~/.netrc" , fg = "red" ) return None | Add our host and key to . netrc |
18,540 | def request_with_retry ( func , * args , ** kwargs ) : max_retries = kwargs . pop ( 'max_retries' , 30 ) sleep = 2 retry_count = 0 while True : try : response = func ( * args , ** kwargs ) response . raise_for_status ( ) return response except ( requests . exceptions . ConnectionError , requests . exceptions . HTTPError , requests . exceptions . Timeout ) as e : if retry_count == max_retries : return e retry_count += 1 delay = sleep + random . random ( ) * 0.25 * sleep if isinstance ( e , requests . exceptions . HTTPError ) and e . response . status_code == 429 : logger . info ( "Rate limit exceeded, retrying in %s seconds" % delay ) else : logger . warning ( 'requests_with_retry encountered retryable exception: %s. args: %s, kwargs: %s' , e , args , kwargs ) time . sleep ( delay ) sleep *= 2 if sleep > MAX_SLEEP_SECONDS : sleep = MAX_SLEEP_SECONDS except requests . exceptions . RequestException as e : logger . error ( response . json ( ) [ 'error' ] ) logger . exception ( 'requests_with_retry encountered unretryable exception: %s' , e ) return e | Perform a requests http call retrying with exponential backoff . |
18,541 | def find_runner ( program ) : if os . path . isfile ( program ) and not os . access ( program , os . X_OK ) : try : opened = open ( program ) except PermissionError : return None first_line = opened . readline ( ) . strip ( ) if first_line . startswith ( '#!' ) : return shlex . split ( first_line [ 2 : ] ) if program . endswith ( '.py' ) : return [ sys . executable ] return None | Return a command that will run program . |
18,542 | def downsample ( values , target_length ) : assert target_length > 1 values = list ( values ) if len ( values ) < target_length : return values ratio = float ( len ( values ) - 1 ) / ( target_length - 1 ) result = [ ] for i in range ( target_length ) : result . append ( values [ int ( i * ratio ) ] ) return result | Downsamples 1d values to target_length including start and end . |
18,543 | def image_from_docker_args ( args ) : bool_args = [ "-t" , "--tty" , "--rm" , "--privileged" , "--oom-kill-disable" , "--no-healthcheck" , "-i" , "--interactive" , "--init" , "--help" , "--detach" , "-d" , "--sig-proxy" , "-it" , "-itd" ] last_flag = - 2 last_arg = "" possible_images = [ ] if len ( args ) > 0 and args [ 0 ] == "run" : args . pop ( 0 ) for i , arg in enumerate ( args ) : if arg . startswith ( "-" ) : last_flag = i last_arg = arg elif "@sha256:" in arg : possible_images . append ( arg ) elif docker_image_regex ( arg ) : if last_flag == i - 2 : possible_images . append ( arg ) elif "=" in last_arg : possible_images . append ( arg ) elif last_arg in bool_args and last_flag == i - 1 : possible_images . append ( arg ) most_likely = None for img in possible_images : if ":" in img or "@" in img or "/" in img : most_likely = img break if most_likely == None and len ( possible_images ) > 0 : most_likely = possible_images [ 0 ] return most_likely | This scans docker run args and attempts to find the most likely docker image argument . If excludes any argments that start with a dash and the argument after it if it isn t a boolean switch . This can be improved we currently fallback gracefully when this fails . |
18,544 | def load_yaml ( file ) : if hasattr ( yaml , "full_load" ) : return yaml . full_load ( file ) else : return yaml . load ( file ) | If pyyaml > 5 . 1 use full_load to avoid warning |
18,545 | def image_id_from_k8s ( ) : token_path = "/var/run/secrets/kubernetes.io/serviceaccount/token" if os . path . exists ( token_path ) : k8s_server = "https://{}:{}/api/v1/namespaces/default/pods/{}" . format ( os . getenv ( "KUBERNETES_SERVICE_HOST" ) , os . getenv ( "KUBERNETES_PORT_443_TCP_PORT" ) , os . getenv ( "HOSTNAME" ) ) try : res = requests . get ( k8s_server , verify = "/var/run/secrets/kubernetes.io/serviceaccount/ca.crt" , timeout = 3 , headers = { "Authorization" : "Bearer {}" . format ( open ( token_path ) . read ( ) ) } ) res . raise_for_status ( ) except requests . RequestException : return None try : return res . json ( ) [ "status" ] [ "containerStatuses" ] [ 0 ] [ "imageID" ] . strip ( "docker-pullable://" ) except ( ValueError , KeyError , IndexError ) : logger . exception ( "Error checking kubernetes for image id" ) return None | Pings the k8s metadata service for the image id |
18,546 | def stopwatch_now ( ) : if six . PY2 : now = time . time ( ) else : now = time . monotonic ( ) return now | Get a timevalue for interval comparisons |
18,547 | def focus ( self , cli , buffer_name ) : assert isinstance ( buffer_name , six . text_type ) self . focus_stack = [ buffer_name ] | Focus the buffer with the given name . |
18,548 | def push_focus ( self , cli , buffer_name ) : assert isinstance ( buffer_name , six . text_type ) self . focus_stack . append ( buffer_name ) | Push buffer on the focus stack . |
18,549 | def pop_focus ( self , cli ) : if len ( self . focus_stack ) > 1 : self . focus_stack . pop ( ) else : raise IndexError ( 'Cannot pop last item from the focus stack.' ) | Pop buffer from the focus stack . |
18,550 | def read ( self , size = - 1 ) : bites = self . file . read ( size ) self . bytes_read += len ( bites ) self . callback ( len ( bites ) , self . bytes_read ) return bites | Read bytes and call the callback |
18,551 | def on_train_begin ( self , ** kwargs ) : "Call watch method to log model topology, gradients & weights" super ( ) . on_train_begin ( ) if not WandbCallback . watch_called : WandbCallback . watch_called = True wandb . watch ( self . learn . model , log = self . log ) | Call watch method to log model topology gradients & weights |
18,552 | def on_epoch_end ( self , epoch , smooth_loss , last_metrics , ** kwargs ) : "Logs training loss, validation loss and custom metrics & log prediction samples & save model" if self . save_model : current = self . get_monitor_value ( ) if current is not None and self . operator ( current , self . best ) : print ( f'Better model found at epoch {epoch} with {self.monitor} value: {current}.' ) self . best = current with self . model_path . open ( 'wb' ) as model_file : self . learn . save ( model_file ) if self . show_results : self . learn . show_results ( ) wandb . log ( { "Prediction Samples" : plt } , commit = False ) logs = { name : stat for name , stat in list ( zip ( self . learn . recorder . names , [ epoch , smooth_loss ] + last_metrics ) ) [ 1 : ] } wandb . log ( logs ) if self . show_results : plt . close ( 'all' ) | Logs training loss validation loss and custom metrics & log prediction samples & save model |
18,553 | def column ( self , key ) : for row in self . rows : if key in row : yield row [ key ] | Iterator over a given column skipping steps that don t have that key |
18,554 | def add ( self , row = { } , step = None ) : if not isinstance ( row , collections . Mapping ) : raise wandb . Error ( 'history.add expects dict-like object' ) if step is None : self . update ( row ) if not self . batched : self . _write ( ) else : if not isinstance ( step , numbers . Integral ) : raise wandb . Error ( "Step must be an integer, not {}" . format ( step ) ) elif step < self . _steps : warnings . warn ( "Adding to old History rows isn't currently supported. Dropping." , wandb . WandbWarning ) return elif step == self . _steps : pass elif self . batched : raise wandb . Error ( "Can't log to a particular History step ({}) while in batched mode." . format ( step ) ) else : self . _write ( ) self . _steps = step self . update ( row ) | Adds or updates a history step . |
18,555 | def update ( self , new_vals ) : for k , v in six . iteritems ( new_vals ) : k = k . strip ( ) if k in self . row : warnings . warn ( "Adding history key ({}) that is already set in this step" . format ( k ) , wandb . WandbWarning ) self . row [ k ] = v | Add a dictionary of values to the current step without writing it to disk . |
18,556 | def step ( self , compute = True ) : if self . batched : raise wandb . Error ( "Nested History step contexts aren't supported" ) self . batched = True self . compute = compute yield self if compute : self . _write ( ) compute = True | Context manager to gradually build a history row then commit it at the end . |
18,557 | def _index ( self , row ) : self . rows . append ( row ) self . _keys . update ( row . keys ( ) ) self . _steps += 1 | Add a row to the internal list of rows without writing it to disk . |
18,558 | def wandb_pty ( resize = True ) : master_fd , slave_fd = pty . openpty ( ) try : tty . setraw ( master_fd ) except termios . error : pass if resize : if SIGWINCH_HANDLER is not None : SIGWINCH_HANDLER . add_fd ( master_fd ) return master_fd , slave_fd | Get a PTY set to raw mode and registered to hear about window size changes . |
18,559 | def spawn_reader_writer ( get_data_fn , put_data_fn ) : def _reader_thread ( ) : while True : out = get_data_fn ( ) put_data_fn ( out ) if not out : break t = threading . Thread ( target = _reader_thread ) t . daemon = True t . start ( ) return t | Spawn a thread that reads from a data source and writes to a sink . |
18,560 | def restore ( self ) : self . redir_file . flush ( ) os . dup2 ( self . orig_file . fileno ( ) , self . _from_fd ) | Restore self . redir_file to its original state . |
18,561 | def negotiate ( self , data ) : command , payload = data [ 0 : 1 ] , data [ 1 : ] assert isinstance ( command , bytes ) if command == NAWS : self . naws ( payload ) else : logger . info ( 'Negotiate (%r got bytes)' , len ( data ) ) | Got negotiate data . |
18,562 | def _parse_coroutine ( self ) : while True : d = yield if d == int2byte ( 0 ) : pass elif d == IAC : d2 = yield if d2 == IAC : self . received_data ( d2 ) elif d2 in ( NOP , DM , BRK , IP , AO , AYT , EC , EL , GA ) : self . command_received ( d2 , None ) elif d2 in ( DO , DONT , WILL , WONT ) : d3 = yield self . command_received ( d2 , d3 ) elif d2 == SB : data = [ ] while True : d3 = yield if d3 == IAC : d4 = yield if d4 == SE : break else : data . append ( d4 ) else : data . append ( d3 ) self . negotiate ( b'' . join ( data ) ) else : self . received_data ( d ) | Parser state machine . Every yield expression returns the next byte . |
18,563 | def feed ( self , data ) : assert isinstance ( data , binary_type ) for b in iterbytes ( data ) : self . _parser . send ( int2byte ( b ) ) | Feed data to the parser . |
18,564 | def create ( cls , api , run_id = None , project = None , username = None ) : run_id = run_id or util . generate_id ( ) project = project or api . settings . get ( "project" ) mutation = gql ( ) variables = { 'entity' : username , 'project' : project , 'name' : run_id } res = api . client . execute ( mutation , variable_values = variables ) res = res [ 'upsertBucket' ] [ 'bucket' ] return Run ( api . client , res [ "project" ] [ "entity" ] [ "name" ] , res [ "project" ] [ "name" ] , res [ "name" ] , { "id" : res [ "id" ] , "config" : "{}" , "systemMetrics" : "{}" , "summaryMetrics" : "{}" , "tags" : [ ] , "description" : None , "state" : "running" } ) | Create a run for the given project |
18,565 | def _exec ( self , query , ** kwargs ) : variables = { 'entity' : self . username , 'project' : self . project , 'name' : self . name } variables . update ( kwargs ) return self . client . execute ( query , variable_values = variables ) | Execute a query against the cloud backend |
18,566 | def _get_closest_ansi_color ( r , g , b , exclude = ( ) ) : assert isinstance ( exclude , tuple ) saturation = abs ( r - g ) + abs ( g - b ) + abs ( b - r ) if saturation > 30 : exclude += ( 'ansilightgray' , 'ansidarkgray' , 'ansiwhite' , 'ansiblack' ) distance = 257 * 257 * 3 match = 'ansidefault' for name , ( r2 , g2 , b2 ) in ANSI_COLORS_TO_RGB . items ( ) : if name != 'ansidefault' and name not in exclude : d = ( r - r2 ) ** 2 + ( g - g2 ) ** 2 + ( b - b2 ) ** 2 if d < distance : match = name distance = d return match | Find closest ANSI color . Return it by name . |
18,567 | def _get_size ( fileno ) : import fcntl import termios buf = array . array ( b'h' if six . PY2 else u'h' , [ 0 , 0 , 0 , 0 ] ) fcntl . ioctl ( fileno , termios . TIOCGWINSZ , buf ) return buf [ 0 ] , buf [ 1 ] | Get the size of this pseudo terminal . |
18,568 | def _colors_to_code ( self , fg_color , bg_color ) : " Return a tuple with the vt100 values that represent this color. " fg_ansi = [ ( ) ] def get ( color , bg ) : table = BG_ANSI_COLORS if bg else FG_ANSI_COLORS if color is None : return ( ) elif color in table : return ( table [ color ] , ) else : try : rgb = self . _color_name_to_rgb ( color ) except ValueError : return ( ) if self . ansi_colors_only ( ) : if bg : if fg_color != bg_color : exclude = ( fg_ansi [ 0 ] , ) else : exclude = ( ) code , name = _16_bg_colors . get_code ( rgb , exclude = exclude ) return ( code , ) else : code , name = _16_fg_colors . get_code ( rgb ) fg_ansi [ 0 ] = name return ( code , ) elif self . true_color : r , g , b = rgb return ( 48 if bg else 38 , 2 , r , g , b ) else : return ( 48 if bg else 38 , 5 , _256_colors [ rgb ] ) result = [ ] result . extend ( get ( fg_color , False ) ) result . extend ( get ( bg_color , True ) ) return map ( six . text_type , result ) | Return a tuple with the vt100 values that represent this color . |
18,569 | def set_attributes ( self , attrs ) : if self . true_color ( ) and not self . ansi_colors_only ( ) : self . write_raw ( self . _escape_code_cache_true_color [ attrs ] ) else : self . write_raw ( self . _escape_code_cache [ attrs ] ) | Create new style and output . |
18,570 | def watch ( models , criterion = None , log = "gradients" , log_freq = 100 ) : global watch_called if run is None : raise ValueError ( "You must call `wandb.init` before calling watch" ) if watch_called : raise ValueError ( "You can only call `wandb.watch` once per process. If you want to watch multiple models, pass them in as a tuple." ) watch_called = True log_parameters = False log_gradients = True if log == "all" : log_parameters = True elif log == "parameters" : log_parameters = True log_gradients = False elif log is None : log_gradients = False if not isinstance ( models , ( tuple , list ) ) : models = ( models , ) graphs = [ ] prefix = '' for idx , model in enumerate ( models ) : if idx > 0 : prefix = "graph_%i" % idx run . history . torch . add_log_hooks_to_pytorch_module ( model , log_parameters = log_parameters , log_gradients = log_gradients , prefix = prefix , log_freq = log_freq ) graph = wandb_torch . TorchGraph . hook_torch ( model , criterion , graph_idx = idx ) graphs . append ( graph ) return graphs | Hooks into the torch model to collect gradients and the topology . Should be extended to accept arbitrary ML models . |
18,571 | def restore ( name , run_path = None , replace = False , root = "." ) : if run_path is None and run is None : raise ValueError ( "You must call `wandb.init` before calling restore or specify a run_path" ) api = Api ( ) api_run = api . run ( run_path or run . path ) root = run . dir if run else root path = os . path . exists ( os . path . join ( root , name ) ) if path and replace == False : return open ( path , "r" ) files = api_run . files ( [ name ] ) if len ( files ) == 0 : return None return files [ 0 ] . download ( root = root , replace = True ) | Downloads the specified file from cloud storage into the current run directory if it doesn exist . |
18,572 | def monitor ( options = { } ) : try : from IPython . display import display except ImportError : def display ( stuff ) : return None class Monitor ( ) : def __init__ ( self , options = { } ) : if os . getenv ( "WANDB_JUPYTER" ) : display ( jupyter . Run ( ) ) else : self . rm = False termerror ( "wandb.monitor is only functional in Jupyter notebooks" ) def __enter__ ( self ) : termlog ( "DEPRECATED: with wandb.monitor(): is deprecated, just call wandb.monitor() to see live results." ) pass def __exit__ ( self , * args ) : pass return Monitor ( options ) | Starts syncing with W&B if you re in Jupyter . Displays your W&B charts live in a Jupyter notebook . It s currently a context manager for legacy reasons . |
18,573 | def log ( row = None , commit = True , * args , ** kargs ) : if run is None : raise ValueError ( "You must call `wandb.init` in the same process before calling log" ) if row is None : row = { } if commit : run . history . add ( row , * args , ** kargs ) else : run . history . update ( row , * args , ** kargs ) | Log a dict to the global run s history . If commit is false enables multiple calls before commiting . |
18,574 | def reset_env ( exclude = [ ] ) : if os . getenv ( env . INITED ) : wandb_keys = [ key for key in os . environ . keys ( ) if key . startswith ( 'WANDB_' ) and key not in exclude ] for key in wandb_keys : del os . environ [ key ] return True else : return False | Remove environment variables used in Jupyter notebooks |
18,575 | def try_to_set_up_global_logging ( ) : root = logging . getLogger ( ) root . setLevel ( logging . DEBUG ) formatter = logging . Formatter ( '%(asctime)s %(levelname)-7s %(threadName)-10s:%(process)d [%(filename)s:%(funcName)s():%(lineno)s] %(message)s' ) if env . is_debug ( ) : handler = logging . StreamHandler ( ) handler . setLevel ( logging . DEBUG ) handler . setFormatter ( formatter ) root . addHandler ( handler ) try : handler = logging . FileHandler ( GLOBAL_LOG_FNAME , mode = 'w' ) handler . setLevel ( logging . DEBUG ) handler . setFormatter ( formatter ) root . addHandler ( handler ) except IOError as e : termerror ( 'Failed to set up logging: {}' . format ( e ) ) return False return True | Try to set up global W&B debug log that gets re - written by every W&B process . |
18,576 | def sagemaker_auth ( overrides = { } , path = "." ) : api_key = overrides . get ( env . API_KEY , Api ( ) . api_key ) if api_key is None : raise ValueError ( "Can't find W&B ApiKey, set the WANDB_API_KEY env variable or run `wandb login`" ) overrides [ env . API_KEY ] = api_key with open ( os . path . join ( path , "secrets.env" ) , "w" ) as file : for k , v in six . iteritems ( overrides ) : file . write ( "{}={}\n" . format ( k , v ) ) | Write a secrets . env file with the W&B ApiKey and any additional secrets passed . |
18,577 | def style_from_dict ( style_dict , include_defaults = True ) : assert isinstance ( style_dict , Mapping ) if include_defaults : s2 = { } s2 . update ( DEFAULT_STYLE_EXTENSIONS ) s2 . update ( style_dict ) style_dict = s2 token_to_attrs = { } for ttype , styledef in sorted ( style_dict . items ( ) ) : attrs = DEFAULT_ATTRS if 'noinherit' not in styledef : for i in range ( 1 , len ( ttype ) + 1 ) : try : attrs = token_to_attrs [ ttype [ : - i ] ] except KeyError : pass else : break for part in styledef . split ( ) : if part == 'noinherit' : pass elif part == 'bold' : attrs = attrs . _replace ( bold = True ) elif part == 'nobold' : attrs = attrs . _replace ( bold = False ) elif part == 'italic' : attrs = attrs . _replace ( italic = True ) elif part == 'noitalic' : attrs = attrs . _replace ( italic = False ) elif part == 'underline' : attrs = attrs . _replace ( underline = True ) elif part == 'nounderline' : attrs = attrs . _replace ( underline = False ) elif part == 'blink' : attrs = attrs . _replace ( blink = True ) elif part == 'noblink' : attrs = attrs . _replace ( blink = False ) elif part == 'reverse' : attrs = attrs . _replace ( reverse = True ) elif part == 'noreverse' : attrs = attrs . _replace ( reverse = False ) elif part in ( 'roman' , 'sans' , 'mono' ) : pass elif part . startswith ( 'border:' ) : pass elif part . startswith ( 'bg:' ) : attrs = attrs . _replace ( bgcolor = _colorformat ( part [ 3 : ] ) ) else : attrs = attrs . _replace ( color = _colorformat ( part ) ) token_to_attrs [ ttype ] = attrs return _StyleFromDict ( token_to_attrs ) | Create a Style instance from a dictionary or other mapping . |
18,578 | def rename_file ( self , old_save_name , new_save_name , new_path ) : if old_save_name in self . _files : del self . _files [ old_save_name ] self . update_file ( new_save_name , new_path ) | This only updates the name and path we use to track the file s size and upload progress . Doesn t rename it on the back end or make us upload from anywhere else . |
18,579 | def register ( name ) : assert isinstance ( name , six . text_type ) def decorator ( handler ) : assert callable ( handler ) _readline_commands [ name ] = handler return handler return decorator | Store handler in the _readline_commands dictionary . |
18,580 | def beginning_of_line ( event ) : " Move to the start of the current line. " buff = event . current_buffer buff . cursor_position += buff . document . get_start_of_line_position ( after_whitespace = False ) | Move to the start of the current line . |
18,581 | def end_of_line ( event ) : " Move to the end of the line. " buff = event . current_buffer buff . cursor_position += buff . document . get_end_of_line_position ( ) | Move to the end of the line . |
18,582 | def forward_char ( event ) : " Move forward a character. " buff = event . current_buffer buff . cursor_position += buff . document . get_cursor_right_position ( count = event . arg ) | Move forward a character . |
18,583 | def backward_char ( event ) : " Move back a character. " buff = event . current_buffer buff . cursor_position += buff . document . get_cursor_left_position ( count = event . arg ) | Move back a character . |
18,584 | def forward_word ( event ) : buff = event . current_buffer pos = buff . document . find_next_word_ending ( count = event . arg ) if pos : buff . cursor_position += pos | Move forward to the end of the next word . Words are composed of letters and digits . |
18,585 | def backward_word ( event ) : buff = event . current_buffer pos = buff . document . find_previous_word_beginning ( count = event . arg ) if pos : buff . cursor_position += pos | Move back to the start of the current or previous word . Words are composed of letters and digits . |
18,586 | def accept_line ( event ) : " Accept the line regardless of where the cursor is. " b = event . current_buffer b . accept_action . validate_and_handle ( event . cli , b ) | Accept the line regardless of where the cursor is . |
18,587 | def end_of_history ( event ) : event . current_buffer . history_forward ( count = 10 ** 100 ) buff = event . current_buffer buff . go_to_history ( len ( buff . _working_lines ) - 1 ) | Move to the end of the input history i . e . the line currently being entered . |
18,588 | def reverse_search_history ( event ) : event . cli . current_search_state . direction = IncrementalSearchDirection . BACKWARD event . cli . push_focus ( SEARCH_BUFFER ) | Search backward starting at the current line and moving up through the history as necessary . This is an incremental search . |
18,589 | def delete_char ( event ) : " Delete character before the cursor. " deleted = event . current_buffer . delete ( count = event . arg ) if not deleted : event . cli . output . bell ( ) | Delete character before the cursor . |
18,590 | def backward_delete_char ( event ) : " Delete the character behind the cursor. " if event . arg < 0 : deleted = event . current_buffer . delete ( count = - event . arg ) else : deleted = event . current_buffer . delete_before_cursor ( count = event . arg ) if not deleted : event . cli . output . bell ( ) | Delete the character behind the cursor . |
18,591 | def kill_line ( event ) : buff = event . current_buffer if event . arg < 0 : deleted = buff . delete_before_cursor ( count = - buff . document . get_start_of_line_position ( ) ) else : if buff . document . current_char == '\n' : deleted = buff . delete ( 1 ) else : deleted = buff . delete ( count = buff . document . get_end_of_line_position ( ) ) event . cli . clipboard . set_text ( deleted ) | Kill the text from the cursor to the end of the line . |
18,592 | def kill_word ( event ) : buff = event . current_buffer pos = buff . document . find_next_word_ending ( count = event . arg ) if pos : deleted = buff . delete ( count = pos ) event . cli . clipboard . set_text ( deleted ) | Kill from point to the end of the current word or if between words to the end of the next word . Word boundaries are the same as forward - word . |
18,593 | def unix_word_rubout ( event , WORD = True ) : buff = event . current_buffer pos = buff . document . find_start_of_previous_word ( count = event . arg , WORD = WORD ) if pos is None : pos = - buff . cursor_position if pos : deleted = buff . delete_before_cursor ( count = - pos ) if event . is_repeat : deleted += event . cli . clipboard . get_data ( ) . text event . cli . clipboard . set_text ( deleted ) else : event . cli . output . bell ( ) | Kill the word behind point using whitespace as a word boundary . Usually bound to ControlW . |
18,594 | def delete_horizontal_space ( event ) : " Delete all spaces and tabs around point. " buff = event . current_buffer text_before_cursor = buff . document . text_before_cursor text_after_cursor = buff . document . text_after_cursor delete_before = len ( text_before_cursor ) - len ( text_before_cursor . rstrip ( '\t ' ) ) delete_after = len ( text_after_cursor ) - len ( text_after_cursor . lstrip ( '\t ' ) ) buff . delete_before_cursor ( count = delete_before ) buff . delete ( count = delete_after ) | Delete all spaces and tabs around point . |
18,595 | def unix_line_discard ( event ) : buff = event . current_buffer if buff . document . cursor_position_col == 0 and buff . document . cursor_position > 0 : buff . delete_before_cursor ( count = 1 ) else : deleted = buff . delete_before_cursor ( count = - buff . document . get_start_of_line_position ( ) ) event . cli . clipboard . set_text ( deleted ) | Kill backward from the cursor to the beginning of the current line . |
18,596 | def yank ( event ) : event . current_buffer . paste_clipboard_data ( event . cli . clipboard . get_data ( ) , count = event . arg , paste_mode = PasteMode . EMACS ) | Paste before cursor . |
18,597 | def yank_last_arg ( event ) : n = ( event . arg if event . arg_present else None ) event . current_buffer . yank_last_arg ( n ) | Like yank_nth_arg but if no argument has been given yank the last word of each line . |
18,598 | def yank_pop ( event ) : buff = event . current_buffer doc_before_paste = buff . document_before_paste clipboard = event . cli . clipboard if doc_before_paste is not None : buff . document = doc_before_paste clipboard . rotate ( ) buff . paste_clipboard_data ( clipboard . get_data ( ) , paste_mode = PasteMode . EMACS ) | Rotate the kill ring and yank the new top . Only works following yank or yank - pop . |
18,599 | def print_last_kbd_macro ( event ) : " Print the last keboard macro. " def print_macro ( ) : for k in event . cli . input_processor . macro : print ( k ) event . cli . run_in_terminal ( print_macro ) | Print the last keboard macro . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.