idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
18,400
def find_enclosing_bracket_right ( self , left_ch , right_ch , end_pos = None ) : if self . current_char == right_ch : return 0 if end_pos is None : end_pos = len ( self . text ) else : end_pos = min ( len ( self . text ) , end_pos ) stack = 1 for i in range ( self . cursor_position + 1 , end_pos ) : c = self . text [ i ] if c == left_ch : stack += 1 elif c == right_ch : stack -= 1 if stack == 0 : return i - self . cursor_position
Find the right bracket enclosing current position . Return the relative position to the cursor position .
18,401
def find_enclosing_bracket_left ( self , left_ch , right_ch , start_pos = None ) : if self . current_char == left_ch : return 0 if start_pos is None : start_pos = 0 else : start_pos = max ( 0 , start_pos ) stack = 1 for i in range ( self . cursor_position - 1 , start_pos - 1 , - 1 ) : c = self . text [ i ] if c == right_ch : stack += 1 elif c == left_ch : stack -= 1 if stack == 0 : return i - self . cursor_position
Find the left bracket enclosing current position . Return the relative position to the cursor position .
18,402
def find_matching_bracket_position ( self , start_pos = None , end_pos = None ) : for A , B in '()' , '[]' , '{}' , '<>' : if self . current_char == A : return self . find_enclosing_bracket_right ( A , B , end_pos = end_pos ) or 0 elif self . current_char == B : return self . find_enclosing_bracket_left ( A , B , start_pos = start_pos ) or 0 return 0
Return relative cursor position of matching [ ( { or < bracket .
18,403
def get_start_of_line_position ( self , after_whitespace = False ) : if after_whitespace : current_line = self . current_line return len ( current_line ) - len ( current_line . lstrip ( ) ) - self . cursor_position_col else : return - len ( self . current_line_before_cursor )
Relative position for the start of this line .
18,404
def empty_line_count_at_the_end ( self ) : count = 0 for line in self . lines [ : : - 1 ] : if not line or line . isspace ( ) : count += 1 else : break return count
Return number of empty lines at the end of the document .
18,405
def insert_after ( self , text ) : return Document ( text = self . text + text , cursor_position = self . cursor_position , selection = self . selection )
Create a new document with this text inserted after the buffer . It keeps selection ranges and cursor position in sync .
18,406
def insert_before ( self , text ) : selection_state = self . selection if selection_state : selection_state = SelectionState ( original_cursor_position = selection_state . original_cursor_position + len ( text ) , type = selection_state . type ) return Document ( text = text + self . text , cursor_position = self . cursor_position + len ( text ) , selection = selection_state )
Create a new document with this text inserted before the buffer . It keeps selection ranges and cursor position in sync .
18,407
def load_key_bindings ( get_search_state = None , enable_abort_and_exit_bindings = False , enable_system_bindings = False , enable_search = False , enable_open_in_editor = False , enable_extra_page_navigation = False , enable_auto_suggest_bindings = False ) : assert get_search_state is None or callable ( get_search_state ) enable_abort_and_exit_bindings = to_cli_filter ( enable_abort_and_exit_bindings ) enable_system_bindings = to_cli_filter ( enable_system_bindings ) enable_search = to_cli_filter ( enable_search ) enable_open_in_editor = to_cli_filter ( enable_open_in_editor ) enable_extra_page_navigation = to_cli_filter ( enable_extra_page_navigation ) enable_auto_suggest_bindings = to_cli_filter ( enable_auto_suggest_bindings ) registry = MergedRegistry ( [ load_basic_bindings ( ) , load_mouse_bindings ( ) , ConditionalRegistry ( load_abort_and_exit_bindings ( ) , enable_abort_and_exit_bindings ) , ConditionalRegistry ( load_basic_system_bindings ( ) , enable_system_bindings ) , load_emacs_bindings ( ) , ConditionalRegistry ( load_emacs_open_in_editor_bindings ( ) , enable_open_in_editor ) , ConditionalRegistry ( load_emacs_search_bindings ( get_search_state = get_search_state ) , enable_search ) , ConditionalRegistry ( load_emacs_system_bindings ( ) , enable_system_bindings ) , ConditionalRegistry ( load_extra_emacs_page_navigation_bindings ( ) , enable_extra_page_navigation ) , load_vi_bindings ( get_search_state = get_search_state ) , ConditionalRegistry ( load_vi_open_in_editor_bindings ( ) , enable_open_in_editor ) , ConditionalRegistry ( load_vi_search_bindings ( get_search_state = get_search_state ) , enable_search ) , ConditionalRegistry ( load_vi_system_bindings ( ) , enable_system_bindings ) , ConditionalRegistry ( load_extra_vi_page_navigation_bindings ( ) , enable_extra_page_navigation ) , ConditionalRegistry ( load_auto_suggestion_bindings ( ) , enable_auto_suggest_bindings ) , ] ) return registry
Create a Registry object that contains the default key bindings .
18,408
def load_key_bindings_for_prompt ( ** kw ) : kw . setdefault ( 'enable_abort_and_exit_bindings' , True ) kw . setdefault ( 'enable_search' , True ) kw . setdefault ( 'enable_auto_suggest_bindings' , True ) return load_key_bindings ( ** kw )
Create a Registry object with the defaults key bindings for an input prompt .
18,409
def _split_multiline_prompt ( get_prompt_tokens ) : def has_before_tokens ( cli ) : for token , char in get_prompt_tokens ( cli ) : if '\n' in char : return True return False def before ( cli ) : result = [ ] found_nl = False for token , char in reversed ( explode_tokens ( get_prompt_tokens ( cli ) ) ) : if found_nl : result . insert ( 0 , ( token , char ) ) elif char == '\n' : found_nl = True return result def first_input_line ( cli ) : result = [ ] for token , char in reversed ( explode_tokens ( get_prompt_tokens ( cli ) ) ) : if char == '\n' : break else : result . insert ( 0 , ( token , char ) ) return result return has_before_tokens , before , first_input_line
Take a get_prompt_tokens function and return three new functions instead . One that tells whether this prompt consists of multiple lines ; one that returns the tokens to be shown on the lines above the input ; and another one with the tokens to be shown at the first line of the input .
18,410
def prompt ( message = '' , ** kwargs ) : patch_stdout = kwargs . pop ( 'patch_stdout' , False ) return_asyncio_coroutine = kwargs . pop ( 'return_asyncio_coroutine' , False ) true_color = kwargs . pop ( 'true_color' , False ) refresh_interval = kwargs . pop ( 'refresh_interval' , 0 ) eventloop = kwargs . pop ( 'eventloop' , None ) application = create_prompt_application ( message , ** kwargs ) return run_application ( application , patch_stdout = patch_stdout , return_asyncio_coroutine = return_asyncio_coroutine , true_color = true_color , refresh_interval = refresh_interval , eventloop = eventloop )
Get input from the user and return it .
18,411
def run_application ( application , patch_stdout = False , return_asyncio_coroutine = False , true_color = False , refresh_interval = 0 , eventloop = None ) : assert isinstance ( application , Application ) if return_asyncio_coroutine : eventloop = create_asyncio_eventloop ( ) else : eventloop = eventloop or create_eventloop ( ) cli = CommandLineInterface ( application = application , eventloop = eventloop , output = create_output ( true_color = true_color ) ) if refresh_interval : done = [ False ] def start_refresh_loop ( cli ) : def run ( ) : while not done [ 0 ] : time . sleep ( refresh_interval ) cli . request_redraw ( ) t = threading . Thread ( target = run ) t . daemon = True t . start ( ) def stop_refresh_loop ( cli ) : done [ 0 ] = True cli . on_start += start_refresh_loop cli . on_stop += stop_refresh_loop patch_context = cli . patch_stdout_context ( raw = True ) if patch_stdout else DummyContext ( ) if return_asyncio_coroutine : exec_context = { 'patch_context' : patch_context , 'cli' : cli , 'Document' : Document } exec_ ( textwrap . dedent ( ) , exec_context ) return exec_context [ 'prompt_coro' ] ( ) else : try : with patch_context : result = cli . run ( ) if isinstance ( result , Document ) : return result . text return result finally : eventloop . close ( )
Run a prompt toolkit application .
18,412
def _create_ansi_color_dict ( color_cls ) : " Create a table that maps the 16 named ansi colors to their Windows code. " return { 'ansidefault' : color_cls . BLACK , 'ansiblack' : color_cls . BLACK , 'ansidarkgray' : color_cls . BLACK | color_cls . INTENSITY , 'ansilightgray' : color_cls . GRAY , 'ansiwhite' : color_cls . GRAY | color_cls . INTENSITY , 'ansidarkred' : color_cls . RED , 'ansidarkgreen' : color_cls . GREEN , 'ansibrown' : color_cls . YELLOW , 'ansidarkblue' : color_cls . BLUE , 'ansipurple' : color_cls . MAGENTA , 'ansiteal' : color_cls . CYAN , 'ansired' : color_cls . RED | color_cls . INTENSITY , 'ansigreen' : color_cls . GREEN | color_cls . INTENSITY , 'ansiyellow' : color_cls . YELLOW | color_cls . INTENSITY , 'ansiblue' : color_cls . BLUE | color_cls . INTENSITY , 'ansifuchsia' : color_cls . MAGENTA | color_cls . INTENSITY , 'ansiturquoise' : color_cls . CYAN | color_cls . INTENSITY , }
Create a table that maps the 16 named ansi colors to their Windows code .
18,413
def _winapi ( self , func , * a , ** kw ) : self . flush ( ) if _DEBUG_RENDER_OUTPUT : self . LOG . write ( ( '%r' % func . __name__ ) . encode ( 'utf-8' ) + b'\n' ) self . LOG . write ( b' ' + ', ' . join ( [ '%r' % i for i in a ] ) . encode ( 'utf-8' ) + b'\n' ) self . LOG . write ( b' ' + ', ' . join ( [ '%r' % type ( i ) for i in a ] ) . encode ( 'utf-8' ) + b'\n' ) self . LOG . flush ( ) try : return func ( * a , ** kw ) except ArgumentError as e : if _DEBUG_RENDER_OUTPUT : self . LOG . write ( ( ' Error in %r %r %s\n' % ( func . __name__ , e , e ) ) . encode ( 'utf-8' ) )
Flush and call win API function .
18,414
def get_win32_screen_buffer_info ( self ) : self . flush ( ) sbinfo = CONSOLE_SCREEN_BUFFER_INFO ( ) success = windll . kernel32 . GetConsoleScreenBufferInfo ( self . hconsole , byref ( sbinfo ) ) if success : return sbinfo else : raise NoConsoleScreenBufferError
Return Screen buffer info .
18,415
def enter_alternate_screen ( self ) : if not self . _in_alternate_screen : GENERIC_READ = 0x80000000 GENERIC_WRITE = 0x40000000 handle = self . _winapi ( windll . kernel32 . CreateConsoleScreenBuffer , GENERIC_READ | GENERIC_WRITE , DWORD ( 0 ) , None , DWORD ( 1 ) , None ) self . _winapi ( windll . kernel32 . SetConsoleActiveScreenBuffer , handle ) self . hconsole = handle self . _in_alternate_screen = True
Go to alternate screen buffer .
18,416
def quit_alternate_screen ( self ) : if self . _in_alternate_screen : stdout = self . _winapi ( windll . kernel32 . GetStdHandle , STD_OUTPUT_HANDLE ) self . _winapi ( windll . kernel32 . SetConsoleActiveScreenBuffer , stdout ) self . _winapi ( windll . kernel32 . CloseHandle , self . hconsole ) self . hconsole = stdout self . _in_alternate_screen = False
Make stdout again the active buffer .
18,417
def win32_refresh_window ( cls ) : handle = windll . kernel32 . GetConsoleWindow ( ) RDW_INVALIDATE = 0x0001 windll . user32 . RedrawWindow ( handle , None , None , c_uint ( RDW_INVALIDATE ) )
Call win32 API to refresh the whole Window .
18,418
def _build_color_table ( ) : FG = FOREGROUND_COLOR BG = BACKROUND_COLOR return [ ( 0x00 , 0x00 , 0x00 , FG . BLACK , BG . BLACK ) , ( 0x00 , 0x00 , 0xaa , FG . BLUE , BG . BLUE ) , ( 0x00 , 0xaa , 0x00 , FG . GREEN , BG . GREEN ) , ( 0x00 , 0xaa , 0xaa , FG . CYAN , BG . CYAN ) , ( 0xaa , 0x00 , 0x00 , FG . RED , BG . RED ) , ( 0xaa , 0x00 , 0xaa , FG . MAGENTA , BG . MAGENTA ) , ( 0xaa , 0xaa , 0x00 , FG . YELLOW , BG . YELLOW ) , ( 0x88 , 0x88 , 0x88 , FG . GRAY , BG . GRAY ) , ( 0x44 , 0x44 , 0xff , FG . BLUE | FG . INTENSITY , BG . BLUE | BG . INTENSITY ) , ( 0x44 , 0xff , 0x44 , FG . GREEN | FG . INTENSITY , BG . GREEN | BG . INTENSITY ) , ( 0x44 , 0xff , 0xff , FG . CYAN | FG . INTENSITY , BG . CYAN | BG . INTENSITY ) , ( 0xff , 0x44 , 0x44 , FG . RED | FG . INTENSITY , BG . RED | BG . INTENSITY ) , ( 0xff , 0x44 , 0xff , FG . MAGENTA | FG . INTENSITY , BG . MAGENTA | BG . INTENSITY ) , ( 0xff , 0xff , 0x44 , FG . YELLOW | FG . INTENSITY , BG . YELLOW | BG . INTENSITY ) , ( 0x44 , 0x44 , 0x44 , FG . BLACK | FG . INTENSITY , BG . BLACK | BG . INTENSITY ) , ( 0xff , 0xff , 0xff , FG . GRAY | FG . INTENSITY , BG . GRAY | BG . INTENSITY ) , ]
Build an RGB - to - 256 color conversion table
18,419
def log_tensor_stats ( self , tensor , name ) : if ( isinstance ( tensor , tuple ) or isinstance ( tensor , list ) ) : while ( isinstance ( tensor , tuple ) or isinstance ( tensor , list ) ) and ( isinstance ( tensor [ 0 ] , tuple ) or isinstance ( tensor [ 0 ] , list ) ) : tensor = [ item for sublist in tensor for item in sublist ] tensor = torch . cat ( [ t . view ( - 1 ) for t in tensor ] ) if not hasattr ( tensor , 'shape' ) : cls = type ( tensor ) raise TypeError ( 'Expected Tensor, not {}.{}' . format ( cls . __module__ , cls . __name__ ) ) history = self . _history ( ) if history is None or not history . compute : return if isinstance ( tensor , torch . HalfTensor ) : tensor = tensor . clone ( ) . type ( torch . FloatTensor ) . detach ( ) flat = tensor . view ( - 1 ) if not hasattr ( flat , "detach" ) : tensor = flat . cpu ( ) . clone ( ) . numpy ( ) history . row . update ( { name : wandb . Histogram ( tensor ) } ) return if flat . is_cuda : if self . _is_cuda_histc_supported is None : self . _is_cuda_histc_supported = True check = torch . cuda . FloatTensor ( 1 ) . fill_ ( 0 ) try : check = flat . histc ( bins = self . _num_bins ) except RuntimeError as e : if str ( e ) . startswith ( "_th_histc is not implemented" ) : self . _is_cuda_histc_supported = False if not self . _is_cuda_histc_supported : flat = flat . cpu ( ) . clone ( ) . detach ( ) if isinstance ( flat , torch . cuda . HalfTensor ) : flat = flat . clone ( ) . type ( torch . cuda . FloatTensor ) . detach ( ) if isinstance ( flat , torch . HalfTensor ) : flat = flat . clone ( ) . type ( torch . FloatTensor ) . detach ( ) tmin = flat . min ( ) . item ( ) tmax = flat . max ( ) . item ( ) tensor = flat . histc ( bins = self . _num_bins , min = tmin , max = tmax ) tensor = tensor . cpu ( ) . clone ( ) . detach ( ) bins = torch . linspace ( tmin , tmax , steps = self . _num_bins + 1 ) history . row . update ( { name : wandb . Histogram ( np_histogram = ( tensor . tolist ( ) , bins . tolist ( ) ) ) } )
Add distribution statistics on a tensor s elements to the current History entry
18,420
def get_height_for_line ( self , lineno , width ) : try : return self . _line_heights [ lineno , width ] except KeyError : text = token_list_to_text ( self . get_line ( lineno ) ) result = self . get_height_for_text ( text , width ) self . _line_heights [ lineno , width ] = result return result
Return the height that a given line would need if it is rendered in a space with the given width .
18,421
def preferred_width ( self , cli , max_available_width ) : text = token_list_to_text ( self . _get_tokens_cached ( cli ) ) line_lengths = [ get_cwidth ( l ) for l in text . split ( '\n' ) ] return max ( line_lengths )
Return the preferred width for this control . That is the width of the longest line .
18,422
def mouse_handler ( self , cli , mouse_event ) : if self . _tokens : tokens_for_line = list ( split_lines ( self . _tokens ) ) try : tokens = tokens_for_line [ mouse_event . position . y ] except IndexError : return NotImplemented else : xpos = mouse_event . position . x count = 0 for item in tokens : count += len ( item [ 1 ] ) if count >= xpos : if len ( item ) >= 3 : handler = item [ 2 ] return handler ( cli , mouse_event ) else : break return NotImplemented
Handle mouse events .
18,423
def _get_tokens_for_line_func ( self , cli , document ) : def get_tokens_for_line ( ) : return self . lexer . lex_document ( cli , document ) return self . _token_cache . get ( document . text , get_tokens_for_line )
Create a function that returns the tokens for a given line .
18,424
def create_content ( self , cli , width , height ) : buffer = self . _buffer ( cli ) def preview_now ( ) : return bool ( self . preview_search ( cli ) and cli . buffers [ self . search_buffer_name ] . text ) if preview_now ( ) : if self . get_search_state : ss = self . get_search_state ( cli ) else : ss = cli . search_state document = buffer . document_for_search ( SearchState ( text = cli . current_buffer . text , direction = ss . direction , ignore_case = ss . ignore_case ) ) else : document = buffer . document get_processed_line = self . _create_get_processed_line_func ( cli , document ) self . _last_get_processed_line = get_processed_line def translate_rowcol ( row , col ) : " Return the content column for this coordinate. " return Point ( y = row , x = get_processed_line ( row ) . source_to_display ( col ) ) def get_line ( i ) : " Return the tokens for a given line number. " tokens = get_processed_line ( i ) . tokens tokens = tokens + [ ( self . default_char . token , ' ' ) ] return tokens content = UIContent ( get_line = get_line , line_count = document . line_count , cursor_position = translate_rowcol ( document . cursor_position_row , document . cursor_position_col ) , default_char = self . default_char ) if cli . current_buffer_name == self . buffer_name : menu_position = self . menu_position ( cli ) if self . menu_position else None if menu_position is not None : assert isinstance ( menu_position , int ) menu_row , menu_col = buffer . document . translate_index_to_position ( menu_position ) content . menu_position = translate_rowcol ( menu_row , menu_col ) elif buffer . complete_state : menu_row , menu_col = buffer . document . translate_index_to_position ( min ( buffer . cursor_position , buffer . complete_state . original_document . cursor_position ) ) content . menu_position = translate_rowcol ( menu_row , menu_col ) else : content . menu_position = None return content
Create a UIContent .
18,425
def mouse_handler ( self , cli , mouse_event ) : buffer = self . _buffer ( cli ) position = mouse_event . position if self . has_focus ( cli ) : if self . _last_get_processed_line : processed_line = self . _last_get_processed_line ( position . y ) xpos = processed_line . display_to_source ( position . x ) index = buffer . document . translate_row_col_to_index ( position . y , xpos ) if mouse_event . event_type == MouseEventType . MOUSE_DOWN : buffer . exit_selection ( ) buffer . cursor_position = index elif mouse_event . event_type == MouseEventType . MOUSE_UP : if abs ( buffer . cursor_position - index ) > 1 : buffer . start_selection ( selection_type = SelectionType . CHARACTERS ) buffer . cursor_position = index double_click = self . _last_click_timestamp and time . time ( ) - self . _last_click_timestamp < .3 self . _last_click_timestamp = time . time ( ) if double_click : start , end = buffer . document . find_boundaries_of_current_word ( ) buffer . cursor_position += start buffer . start_selection ( selection_type = SelectionType . CHARACTERS ) buffer . cursor_position += end - start else : return NotImplemented else : if self . focus_on_click ( cli ) and mouse_event . event_type == MouseEventType . MOUSE_UP : cli . focus ( self . buffer_name ) else : return NotImplemented
Mouse handler for this control .
18,426
def _get_arg_tokens ( cli ) : arg = cli . input_processor . arg return [ ( Token . Prompt . Arg , '(arg: ' ) , ( Token . Prompt . Arg . Text , str ( arg ) ) , ( Token . Prompt . Arg , ') ' ) , ]
Tokens for the arg - prompt .
18,427
def from_message ( cls , message = '> ' ) : assert isinstance ( message , text_type ) def get_message_tokens ( cli ) : return [ ( Token . Prompt , message ) ] return cls ( get_message_tokens )
Create a default prompt with a static message text .
18,428
def write_jsonl_file ( fname , data ) : if not isinstance ( data , list ) : print ( 'warning: malformed json data for file' , fname ) return with open ( fname , 'w' ) as of : for row in data : if row . strip ( ) : of . write ( '%s\n' % row . strip ( ) )
Writes a jsonl file .
18,429
def received_winch ( self ) : def process_winch ( ) : if self . _callbacks : self . _callbacks . terminal_size_changed ( ) self . call_from_executor ( process_winch )
Notify the event loop that SIGWINCH has been received
18,430
def add_reader ( self , fd , callback ) : " Add read file descriptor to the event loop. " fd = fd_to_int ( fd ) self . _read_fds [ fd ] = callback self . selector . register ( fd )
Add read file descriptor to the event loop .
18,431
def remove_reader ( self , fd ) : " Remove read file descriptor from the event loop. " fd = fd_to_int ( fd ) if fd in self . _read_fds : del self . _read_fds [ fd ] self . selector . unregister ( fd )
Remove read file descriptor from the event loop .
18,432
def memoized ( maxsize = 1024 ) : cache = SimpleCache ( maxsize = maxsize ) def decorator ( obj ) : @ wraps ( obj ) def new_callable ( * a , ** kw ) : def create_new ( ) : return obj ( * a , ** kw ) key = ( a , tuple ( kw . items ( ) ) ) return cache . get ( key , create_new ) return new_callable return decorator
Momoization decorator for immutable classes and pure functions .
18,433
def get ( self , key , getter_func ) : try : return self . _data [ key ] except KeyError : value = getter_func ( ) self . _data [ key ] = value self . _keys . append ( key ) if len ( self . _data ) > self . maxsize : key_to_remove = self . _keys . popleft ( ) if key_to_remove in self . _data : del self . _data [ key_to_remove ] return value
Get object from the cache . If not found call getter_func to resolve it and put that on the top of the cache instead .
18,434
def add_binding ( self , * keys , ** kwargs ) : filter = to_cli_filter ( kwargs . pop ( 'filter' , True ) ) eager = to_cli_filter ( kwargs . pop ( 'eager' , False ) ) save_before = kwargs . pop ( 'save_before' , lambda e : True ) to_cli_filter ( kwargs . pop ( 'invalidate_ui' , True ) ) assert not kwargs assert keys assert all ( isinstance ( k , ( Key , text_type ) ) for k in keys ) , 'Key bindings should consist of Key and string (unicode) instances.' assert callable ( save_before ) if isinstance ( filter , Never ) : def decorator ( func ) : return func else : def decorator ( func ) : self . key_bindings . append ( _Binding ( keys , func , filter = filter , eager = eager , save_before = save_before ) ) self . _clear_cache ( ) return func return decorator
Decorator for annotating key bindings .
18,435
def remove_binding ( self , function ) : assert callable ( function ) for b in self . key_bindings : if b . handler == function : self . key_bindings . remove ( b ) self . _clear_cache ( ) return raise ValueError ( 'Binding not found: %r' % ( function , ) )
Remove a key binding .
18,436
def _update_cache ( self ) : " If the original registry was changed. Update our copy version. " expected_version = ( self . registry . _version , self . _extra_registry . _version ) if self . _last_version != expected_version : registry2 = Registry ( ) for reg in ( self . registry , self . _extra_registry ) : for b in reg . key_bindings : registry2 . key_bindings . append ( _Binding ( keys = b . keys , handler = b . handler , filter = self . filter & b . filter , eager = b . eager , save_before = b . save_before ) ) self . _registry2 = registry2 self . _last_version = expected_version
If the original registry was changed . Update our copy version .
18,437
def _update_cache ( self ) : expected_version = ( tuple ( r . _version for r in self . registries ) + ( self . _extra_registry . _version , ) ) if self . _last_version != expected_version : registry2 = Registry ( ) for reg in self . registries : registry2 . key_bindings . extend ( reg . key_bindings ) registry2 . key_bindings . extend ( self . _extra_registry . key_bindings ) self . _registry2 = registry2 self . _last_version = expected_version
If one of the original registries was changed . Update our merged version .
18,438
def nest ( thing ) : tfutil = util . get_module ( 'tensorflow.python.util' ) if tfutil : return tfutil . nest . flatten ( thing ) else : return [ thing ]
Use tensorflows nest function if available otherwise just wrap object in an array
18,439
def val_to_json ( key , val , mode = "summary" , step = None ) : converted = val typename = util . get_full_typename ( val ) if util . is_matplotlib_typename ( typename ) : val = util . ensure_matplotlib_figure ( val ) if any ( len ( ax . images ) > 0 for ax in val . axes ) : PILImage = util . get_module ( "PIL.Image" , required = "Logging plots with images requires pil: pip install pillow" ) buf = six . BytesIO ( ) val . savefig ( buf ) val = Image ( PILImage . open ( buf ) ) else : converted = util . convert_plots ( val ) elif util . is_plotly_typename ( typename ) : converted = util . convert_plots ( val ) if isinstance ( val , IterableMedia ) : val = [ val ] if isinstance ( val , collections . Sequence ) and len ( val ) > 0 : is_media = [ isinstance ( v , IterableMedia ) for v in val ] if all ( is_media ) : cwd = wandb . run . dir if wandb . run else "." if step is None : step = "summary" if isinstance ( val [ 0 ] , Image ) : converted = Image . transform ( val , cwd , "{}_{}.jpg" . format ( key , step ) ) elif isinstance ( val [ 0 ] , Audio ) : converted = Audio . transform ( val , cwd , key , step ) elif isinstance ( val [ 0 ] , Html ) : converted = Html . transform ( val , cwd , key , step ) elif isinstance ( val [ 0 ] , Object3D ) : converted = Object3D . transform ( val , cwd , key , step ) elif any ( is_media ) : raise ValueError ( "Mixed media types in the same list aren't supported" ) elif isinstance ( val , Histogram ) : converted = Histogram . transform ( val ) elif isinstance ( val , Graph ) : if mode == "history" : raise ValueError ( "Graphs are only supported in summary" ) converted = Graph . transform ( val ) elif isinstance ( val , Table ) : converted = Table . transform ( val ) return converted
Converts a wandb datatype to its JSON representation
18,440
def to_json ( payload , mode = "history" ) : for key , val in six . iteritems ( payload ) : if isinstance ( val , dict ) : payload [ key ] = to_json ( val , mode ) else : payload [ key ] = val_to_json ( key , val , mode , step = payload . get ( "_step" ) ) return payload
Converts all keys in a potentially nested array into their JSON representation
18,441
def guess_mode ( self , data ) : if data . ndim == 2 : return "L" elif data . shape [ - 1 ] == 3 : return "RGB" elif data . shape [ - 1 ] == 4 : return "RGBA" else : raise ValueError ( "Un-supported shape for image conversion %s" % list ( data . shape ) )
Guess what type of image the np . array is representing
18,442
def transform ( images , out_dir , fname ) : from PIL import Image as PILImage base = os . path . join ( out_dir , "media" , "images" ) width , height = images [ 0 ] . image . size num_images_to_log = len ( images ) if num_images_to_log > Image . MAX_IMAGES : logging . warn ( "The maximum number of images to store per step is %i." % Image . MAX_IMAGES ) num_images_to_log = Image . MAX_IMAGES if width * num_images_to_log > Image . MAX_DIMENSION : max_images_by_dimension = Image . MAX_DIMENSION // width logging . warn ( "The maximum total width for all images in a collection is 65500, or {} images, each with a width of {} pixels. Only logging the first {} images." . format ( max_images_by_dimension , width , max_images_by_dimension ) ) num_images_to_log = max_images_by_dimension total_width = width * num_images_to_log sprite = PILImage . new ( mode = 'RGB' , size = ( total_width , height ) , color = ( 0 , 0 , 0 ) ) for i , image in enumerate ( images [ : num_images_to_log ] ) : location = width * i sprite . paste ( image . image , ( location , 0 ) ) util . mkdir_exists_ok ( base ) sprite . save ( os . path . join ( base , fname ) , transparency = 0 ) meta = { "width" : width , "height" : height , "count" : num_images_to_log , "_type" : "images" } grouping = images [ 0 ] . grouping if grouping : meta [ "grouping" ] = grouping captions = Image . captions ( images [ : num_images_to_log ] ) if captions : meta [ "captions" ] = captions return meta
Combines a list of images into a single sprite returning meta information
18,443
def _handle_command ( self , command ) : logger . info ( 'Handle command %r' , command ) def in_executor ( ) : self . handling_command = True try : if self . callback is not None : self . callback ( self , command ) finally : self . server . call_from_executor ( done ) def done ( ) : self . handling_command = False if not self . closed : self . cli . reset ( ) self . cli . buffers [ DEFAULT_BUFFER ] . reset ( ) self . cli . renderer . request_absolute_cursor_position ( ) self . vt100_output . flush ( ) self . cli . _redraw ( ) self . server . run_in_executor ( in_executor )
Handle command . This will run in a separate thread in order not to block the event loop .
18,444
def erase_screen ( self ) : self . vt100_output . erase_screen ( ) self . vt100_output . cursor_goto ( 0 , 0 ) self . vt100_output . flush ( )
Erase output screen .
18,445
def send ( self , data ) : assert isinstance ( data , text_type ) self . stdout . write ( data . replace ( '\n' , '\r\n' ) ) self . stdout . flush ( )
Send text to the client .
18,446
def _process_callbacks ( self ) : os . read ( self . _schedule_pipe [ 0 ] , 1024 ) calls_from_executor , self . _calls_from_executor = self . _calls_from_executor , [ ] for c in calls_from_executor : c ( )
Process callbacks from call_from_executor in eventloop .
18,447
def run ( self ) : listen_socket = self . create_socket ( self . host , self . port ) logger . info ( 'Listening for telnet connections on %s port %r' , self . host , self . port ) try : while True : self . connections = set ( [ c for c in self . connections if not c . closed ] ) connections = set ( [ c for c in self . connections if not c . handling_command ] ) read_list = ( [ listen_socket , self . _schedule_pipe [ 0 ] ] + [ c . conn for c in connections ] ) read , _ , _ = select . select ( read_list , [ ] , [ ] ) for s in read : if s == listen_socket : self . _accept ( listen_socket ) elif s == self . _schedule_pipe [ 0 ] : self . _process_callbacks ( ) else : self . _handle_incoming_data ( s ) finally : listen_socket . close ( )
Run the eventloop for the telnet server .
18,448
def _accept ( self , listen_socket ) : conn , addr = listen_socket . accept ( ) connection = TelnetConnection ( conn , addr , self . application , self , encoding = self . encoding ) self . connections . add ( connection ) logger . info ( 'New connection %r %r' , * addr )
Accept new incoming connection .
18,449
def _handle_incoming_data ( self , conn ) : connection = [ c for c in self . connections if c . conn == conn ] [ 0 ] data = conn . recv ( 1024 ) if data : connection . feed ( data ) else : self . connections . remove ( connection )
Handle incoming data on socket .
18,450
def execute ( self , * args , ** kwargs ) : try : return self . client . execute ( * args , ** kwargs ) except requests . exceptions . HTTPError as err : res = err . response logger . error ( "%s response executing GraphQL." % res . status_code ) logger . error ( res . text ) self . display_gorilla_error_if_found ( res ) six . reraise ( * sys . exc_info ( ) )
Wrapper around execute that logs in cases of failure .
18,451
def save_pip ( self , out_dir ) : try : import pkg_resources installed_packages = [ d for d in iter ( pkg_resources . working_set ) ] installed_packages_list = sorted ( [ "%s==%s" % ( i . key , i . version ) for i in installed_packages ] ) with open ( os . path . join ( out_dir , 'requirements.txt' ) , 'w' ) as f : f . write ( "\n" . join ( installed_packages_list ) ) except Exception as e : logger . error ( "Error saving pip packages" )
Saves the current working set of pip packages to requirements . txt
18,452
def save_patches ( self , out_dir ) : if not self . git . enabled : return False try : root = self . git . root if self . git . dirty : patch_path = os . path . join ( out_dir , 'diff.patch' ) if self . git . has_submodule_diff : with open ( patch_path , 'wb' ) as patch : subprocess . check_call ( [ 'git' , 'diff' , '--submodule=diff' , 'HEAD' ] , stdout = patch , cwd = root , timeout = 5 ) else : with open ( patch_path , 'wb' ) as patch : subprocess . check_call ( [ 'git' , 'diff' , 'HEAD' ] , stdout = patch , cwd = root , timeout = 5 ) upstream_commit = self . git . get_upstream_fork_point ( ) if upstream_commit and upstream_commit != self . git . repo . head . commit : sha = upstream_commit . hexsha upstream_patch_path = os . path . join ( out_dir , 'upstream_diff_{}.patch' . format ( sha ) ) if self . git . has_submodule_diff : with open ( upstream_patch_path , 'wb' ) as upstream_patch : subprocess . check_call ( [ 'git' , 'diff' , '--submodule=diff' , sha ] , stdout = upstream_patch , cwd = root , timeout = 5 ) else : with open ( upstream_patch_path , 'wb' ) as upstream_patch : subprocess . check_call ( [ 'git' , 'diff' , sha ] , stdout = upstream_patch , cwd = root , timeout = 5 ) except ( subprocess . CalledProcessError , subprocess . TimeoutExpired ) : logger . error ( 'Error generating diff' )
Save the current state of this repository to one or more patches .
18,453
def list_projects ( self , entity = None ) : query = gql ( ) return self . _flatten_edges ( self . gql ( query , variable_values = { 'entity' : entity or self . settings ( 'entity' ) } ) [ 'models' ] )
Lists projects in W&B scoped by entity .
18,454
def list_runs ( self , project , entity = None ) : query = gql ( ) return self . _flatten_edges ( self . gql ( query , variable_values = { 'entity' : entity or self . settings ( 'entity' ) , 'model' : project or self . settings ( 'project' ) } ) [ 'model' ] [ 'buckets' ] )
Lists runs in W&B scoped by project .
18,455
def launch_run ( self , command , project = None , entity = None , run_id = None ) : query = gql ( ) patch = BytesIO ( ) if self . git . dirty : self . git . repo . git . execute ( [ 'git' , 'diff' ] , output_stream = patch ) patch . seek ( 0 ) cwd = "." if self . git . enabled : cwd = cwd + os . getcwd ( ) . replace ( self . git . repo . working_dir , "" ) return self . gql ( query , variable_values = { 'entity' : entity or self . settings ( 'entity' ) , 'model' : project or self . settings ( 'project' ) , 'command' : command , 'runId' : run_id , 'patch' : patch . read ( ) . decode ( "utf8" ) , 'cwd' : cwd } )
Launch a run in the cloud .
18,456
def run_config ( self , project , run = None , entity = None ) : query = gql ( ) response = self . gql ( query , variable_values = { 'name' : project , 'run' : run , 'entity' : entity } ) if response [ 'model' ] == None : raise ValueError ( "Run {}/{}/{} not found" . format ( entity , project , run ) ) run = response [ 'model' ] [ 'bucket' ] commit = run [ 'commit' ] patch = run [ 'patch' ] config = json . loads ( run [ 'config' ] or '{}' ) if len ( run [ 'files' ] [ 'edges' ] ) > 0 : url = run [ 'files' ] [ 'edges' ] [ 0 ] [ 'node' ] [ 'url' ] res = requests . get ( url ) res . raise_for_status ( ) metadata = res . json ( ) else : metadata = { } return ( commit , config , patch , metadata )
Get the relevant configs for a run
18,457
def run_resume_status ( self , entity , project_name , name ) : query = gql ( ) response = self . gql ( query , variable_values = { 'entity' : entity , 'project' : project_name , 'name' : name , } ) if 'model' not in response or 'bucket' not in response [ 'model' ] : return None project = response [ 'model' ] self . set_setting ( 'project' , project_name ) if 'entity' in project : self . set_setting ( 'entity' , project [ 'entity' ] [ 'name' ] ) return project [ 'bucket' ]
Check if a run exists and get resume information .
18,458
def upsert_run ( self , id = None , name = None , project = None , host = None , group = None , tags = None , config = None , description = None , entity = None , state = None , repo = None , job_type = None , program_path = None , commit = None , sweep_name = None , summary_metrics = None , num_retries = None ) : mutation = gql ( ) if config is not None : config = json . dumps ( config ) if not description : description = None kwargs = { } if num_retries is not None : kwargs [ 'num_retries' ] = num_retries variable_values = { 'id' : id , 'entity' : entity or self . settings ( 'entity' ) , 'name' : name , 'project' : project , 'groupName' : group , 'tags' : tags , 'description' : description , 'config' : config , 'commit' : commit , 'host' : host , 'debug' : env . is_debug ( ) , 'repo' : repo , 'program' : program_path , 'jobType' : job_type , 'state' : state , 'sweep' : sweep_name , 'summaryMetrics' : summary_metrics } response = self . gql ( mutation , variable_values = variable_values , ** kwargs ) run = response [ 'upsertBucket' ] [ 'bucket' ] project = run . get ( 'project' ) if project : self . set_setting ( 'project' , project [ 'name' ] ) entity = project . get ( 'entity' ) if entity : self . set_setting ( 'entity' , entity [ 'name' ] ) return response [ 'upsertBucket' ] [ 'bucket' ]
Update a run
18,459
def upload_urls ( self , project , files , run = None , entity = None , description = None ) : query = gql ( ) run_id = run or self . settings ( 'run' ) entity = entity or self . settings ( 'entity' ) query_result = self . gql ( query , variable_values = { 'name' : project , 'run' : run_id , 'entity' : entity , 'description' : description , 'files' : [ file for file in files ] } ) run = query_result [ 'model' ] [ 'bucket' ] if run : result = { file [ 'name' ] : file for file in self . _flatten_edges ( run [ 'files' ] ) } return run [ 'id' ] , result else : raise CommError ( "Run does not exist {}/{}/{}." . format ( entity , project , run_id ) )
Generate temporary resumeable upload urls
18,460
def download_file ( self , url ) : response = requests . get ( url , stream = True ) response . raise_for_status ( ) return ( int ( response . headers . get ( 'content-length' , 0 ) ) , response )
Initiate a streaming download
18,461
def upload_file ( self , url , file , callback = None , extra_headers = { } ) : extra_headers = extra_headers . copy ( ) response = None if os . stat ( file . name ) . st_size == 0 : raise CommError ( "%s is an empty file" % file . name ) try : progress = Progress ( file , callback = callback ) response = requests . put ( url , data = progress , headers = extra_headers ) response . raise_for_status ( ) except requests . exceptions . RequestException as e : total = progress . len status = self . _status_request ( url , total ) if status . status_code in ( 308 , 408 , 500 , 502 , 503 , 504 ) : util . sentry_reraise ( retry . TransientException ( exc = e ) ) else : util . sentry_reraise ( e ) return response
Uploads a file to W&B with failure resumption
18,462
def register_agent ( self , host , sweep_id = None , project_name = None ) : mutation = gql ( ) if project_name is None : project_name = self . settings ( 'project' ) def no_retry_400 ( e ) : if not isinstance ( e , requests . HTTPError ) : return True if e . response . status_code != 400 : return True body = json . loads ( e . response . content ) raise UsageError ( body [ 'errors' ] [ 0 ] [ 'message' ] ) response = self . gql ( mutation , variable_values = { 'host' : host , 'entityName' : self . settings ( "entity" ) , 'projectName' : project_name , 'sweep' : sweep_id } , check_retry_fn = no_retry_400 ) return response [ 'createAgent' ] [ 'agent' ]
Register a new agent
18,463
def agent_heartbeat ( self , agent_id , metrics , run_states ) : mutation = gql ( ) try : response = self . gql ( mutation , variable_values = { 'id' : agent_id , 'metrics' : json . dumps ( metrics ) , 'runState' : json . dumps ( run_states ) } ) except Exception as e : message = ast . literal_eval ( e . args [ 0 ] ) [ "message" ] logger . error ( 'Error communicating with W&B: %s' , message ) return [ ] else : return json . loads ( response [ 'agentHeartbeat' ] [ 'commands' ] )
Notify server about agent state receive commands .
18,464
def upsert_sweep ( self , config ) : mutation = gql ( ) def no_retry_400_or_404 ( e ) : if not isinstance ( e , requests . HTTPError ) : return True if e . response . status_code != 400 and e . response . status_code != 404 : return True body = json . loads ( e . response . content ) raise UsageError ( body [ 'errors' ] [ 0 ] [ 'message' ] ) response = self . gql ( mutation , variable_values = { 'config' : yaml . dump ( config ) , 'description' : config . get ( "description" ) , 'entityName' : self . settings ( "entity" ) , 'projectName' : self . settings ( "project" ) } , check_retry_fn = no_retry_400_or_404 ) return response [ 'upsertSweep' ] [ 'sweep' ] [ 'name' ]
Upsert a sweep object .
18,465
def file_current ( self , fname , md5 ) : return os . path . isfile ( fname ) and util . md5_file ( fname ) == md5
Checksum a file and compare the md5 with the known md5
18,466
def pull ( self , project , run = None , entity = None ) : project , run = self . parse_slug ( project , run = run ) urls = self . download_urls ( project , run , entity ) responses = [ ] for fileName in urls : _ , response = self . download_write_file ( urls [ fileName ] ) if response : responses . append ( response ) return responses
Download files from W&B
18,467
def push ( self , files , run = None , entity = None , project = None , description = None , force = True , progress = False ) : if project is None : project = self . get_project ( ) if project is None : raise CommError ( "No project configured." ) if run is None : run = self . current_run_id run_id , result = self . upload_urls ( project , files , run , entity , description ) responses = [ ] for file_name , file_info in result . items ( ) : try : normal_name = os . path . join ( * file_name . split ( "/" ) ) open_file = files [ normal_name ] if isinstance ( files , dict ) else open ( normal_name , "rb" ) except IOError : print ( "%s does not exist" % file_name ) continue if progress : if hasattr ( progress , '__call__' ) : responses . append ( self . upload_file_retry ( file_info [ 'url' ] , open_file , progress ) ) else : length = os . fstat ( open_file . fileno ( ) ) . st_size with click . progressbar ( file = progress , length = length , label = 'Uploading file: %s' % ( file_name ) , fill_char = click . style ( '&' , fg = 'green' ) ) as bar : responses . append ( self . upload_file_retry ( file_info [ 'url' ] , open_file , lambda bites , _ : bar . update ( bites ) ) ) else : responses . append ( self . upload_file_retry ( file_info [ 'url' ] , open_file ) ) open_file . close ( ) return responses
Uploads multiple files to W&B
18,468
def get_file_stream_api ( self ) : if not self . _file_stream_api : if self . _current_run_id is None : raise UsageError ( 'Must have a current run to use file stream API.' ) self . _file_stream_api = FileStreamApi ( self , self . _current_run_id ) return self . _file_stream_api
This creates a new file pusher thread . Call start to initiate the thread that talks to W&B
18,469
def _status_request ( self , url , length ) : return requests . put ( url = url , headers = { 'Content-Length' : '0' , 'Content-Range' : 'bytes */%i' % length } )
Ask google how much we ve uploaded
18,470
def get_common_complete_suffix ( document , completions ) : def doesnt_change_before_cursor ( completion ) : end = completion . text [ : - completion . start_position ] return document . text_before_cursor . endswith ( end ) completions2 = [ c for c in completions if doesnt_change_before_cursor ( c ) ] if len ( completions2 ) != len ( completions ) : return '' def get_suffix ( completion ) : return completion . text [ - completion . start_position : ] return _commonprefix ( [ get_suffix ( c ) for c in completions2 ] )
Return the common prefix for all completions .
18,471
def get_width ( self , cli , ui_content ) : " Width to report to the `Window`. " text = token_list_to_text ( self . get_prompt_tokens ( cli ) ) return get_cwidth ( text )
Width to report to the Window .
18,472
def prompt_for_project ( ctx , entity ) : result = ctx . invoke ( projects , entity = entity , display = False ) try : if len ( result ) == 0 : project = click . prompt ( "Enter a name for your first project" ) project = api . upsert_project ( project , entity = entity ) [ "name" ] else : project_names = [ project [ "name" ] for project in result ] question = { 'type' : 'list' , 'name' : 'project_name' , 'message' : "Which project should we use?" , 'choices' : project_names + [ "Create New" ] } result = whaaaaat . prompt ( [ question ] ) if result : project = result [ 'project_name' ] else : project = "Create New" if project == "Create New" : project = click . prompt ( "Enter a name for your new project" , value_proc = api . format_project ) project = api . upsert_project ( project , entity = entity ) [ "name" ] except wandb . apis . CommError as e : raise ClickException ( str ( e ) ) return project
Ask the user for a project creating one if necessary .
18,473
def cli ( ctx ) : wandb . try_to_set_up_global_logging ( ) if ctx . invoked_subcommand is None : click . echo ( ctx . get_help ( ) )
Weights & Biases .
18,474
def _load_values ( self ) : path = self . _config_path ( ) if path is not None and os . path . isfile ( path ) : self . _load_file ( path )
Load config . yaml from the run directory if available .
18,475
def load_json ( self , json ) : for key in json : if key == "wandb_version" : continue self . _items [ key ] = json [ key ] . get ( 'value' ) self . _descriptions [ key ] = json [ key ] . get ( 'desc' )
Loads existing config from JSON
18,476
def persist ( self ) : path = self . _config_path ( ) if path is None : return with open ( path , "w" ) as conf_file : conf_file . write ( str ( self ) )
Stores the current configuration for pushing to W&B
18,477
def get_upstream_fork_point ( self ) : possible_relatives = [ ] try : if not self . repo : return None try : active_branch = self . repo . active_branch except ( TypeError , ValueError ) : logger . debug ( "git is in a detached head state" ) return None else : tracking_branch = active_branch . tracking_branch ( ) if tracking_branch : possible_relatives . append ( tracking_branch . commit ) if not possible_relatives : for branch in self . repo . branches : tracking_branch = branch . tracking_branch ( ) if tracking_branch is not None : possible_relatives . append ( tracking_branch . commit ) head = self . repo . head most_recent_ancestor = None for possible_relative in possible_relatives : for ancestor in self . repo . merge_base ( head , possible_relative ) : if most_recent_ancestor is None : most_recent_ancestor = ancestor elif self . repo . is_ancestor ( most_recent_ancestor , ancestor ) : most_recent_ancestor = ancestor return most_recent_ancestor except exc . GitCommandError as e : logger . debug ( "git remote upstream fork point could not be found" ) logger . debug ( e . message ) return None
Get the most recent ancestor of HEAD that occurs on an upstream branch .
18,478
def create_text_object_decorator ( registry ) : assert isinstance ( registry , BaseRegistry ) operator_given = ViWaitingForTextObjectMode ( ) navigation_mode = ViNavigationMode ( ) selection_mode = ViSelectionMode ( ) def text_object_decorator ( * keys , ** kw ) : filter = kw . pop ( 'filter' , Always ( ) ) no_move_handler = kw . pop ( 'no_move_handler' , False ) no_selection_handler = kw . pop ( 'no_selection_handler' , False ) eager = kw . pop ( 'eager' , False ) assert not kw def decorator ( text_object_func ) : assert callable ( text_object_func ) @ registry . add_binding ( * keys , filter = operator_given & filter , eager = eager ) def _ ( event ) : vi_state = event . cli . vi_state event . _arg = ( vi_state . operator_arg or 1 ) * ( event . arg or 1 ) text_obj = text_object_func ( event ) if text_obj is not None : assert isinstance ( text_obj , TextObject ) vi_state . operator_func ( event , text_obj ) event . cli . vi_state . operator_func = None event . cli . vi_state . operator_arg = None if not no_move_handler : @ registry . add_binding ( * keys , filter = ~ operator_given & filter & navigation_mode , eager = eager ) def _ ( event ) : " Move handler for navigation mode. " text_object = text_object_func ( event ) event . current_buffer . cursor_position += text_object . start if not no_selection_handler : @ registry . add_binding ( * keys , filter = ~ operator_given & filter & selection_mode , eager = eager ) def _ ( event ) : " Move handler for selection mode. " text_object = text_object_func ( event ) buff = event . current_buffer if text_object . end : start , end = text_object . operator_range ( buff . document ) start += buff . cursor_position end += buff . cursor_position buff . selection_state . original_cursor_position = start buff . cursor_position = end if text_object . type == TextObjectType . LINEWISE : buff . selection_state . type = SelectionType . LINES else : buff . selection_state . type = SelectionType . CHARACTERS else : event . current_buffer . cursor_position += text_object . start return text_object_func return decorator return text_object_decorator
Create a decorator that can be used to register Vi text object implementations .
18,479
def create_operator_decorator ( registry ) : assert isinstance ( registry , BaseRegistry ) operator_given = ViWaitingForTextObjectMode ( ) navigation_mode = ViNavigationMode ( ) selection_mode = ViSelectionMode ( ) def operator_decorator ( * keys , ** kw ) : filter = kw . pop ( 'filter' , Always ( ) ) eager = kw . pop ( 'eager' , False ) assert not kw def decorator ( operator_func ) : @ registry . add_binding ( * keys , filter = ~ operator_given & filter & navigation_mode , eager = eager ) def _ ( event ) : event . cli . vi_state . operator_func = operator_func event . cli . vi_state . operator_arg = event . arg @ registry . add_binding ( * keys , filter = ~ operator_given & filter & selection_mode , eager = eager ) def _ ( event ) : buff = event . current_buffer selection_state = buff . selection_state if selection_state . type == SelectionType . LINES : text_obj_type = TextObjectType . LINEWISE elif selection_state . type == SelectionType . BLOCK : text_obj_type = TextObjectType . BLOCK else : text_obj_type = TextObjectType . INCLUSIVE text_object = TextObject ( selection_state . original_cursor_position - buff . cursor_position , type = text_obj_type ) operator_func ( event , text_object ) buff . selection_state = None return operator_func return decorator return operator_decorator
Create a decorator that can be used for registering Vi operators .
18,480
def load_vi_open_in_editor_bindings ( ) : registry = Registry ( ) navigation_mode = ViNavigationMode ( ) registry . add_binding ( 'v' , filter = navigation_mode ) ( get_by_name ( 'edit-and-execute-command' ) ) return registry
Pressing v in navigation mode will open the buffer in an external editor .
18,481
def cut ( self , buffer ) : from_ , to = self . operator_range ( buffer . document ) from_ += buffer . cursor_position to += buffer . cursor_position to -= 1 document = Document ( buffer . text , to , SelectionState ( original_cursor_position = from_ , type = self . selection_type ) ) new_document , clipboard_data = document . cut_selection ( ) return new_document , clipboard_data
Turn text object into ClipboardData instance .
18,482
def get_sync_start_position ( self , document , lineno ) : " Scan backwards, and find a possible position to start. " pattern = self . _compiled_pattern lines = document . lines for i in range ( lineno , max ( - 1 , lineno - self . MAX_BACKWARDS ) , - 1 ) : match = pattern . match ( lines [ i ] ) if match : return i , match . start ( ) if lineno < self . FROM_START_IF_NO_SYNC_POS_FOUND : return 0 , 0 else : return lineno , 0
Scan backwards and find a possible position to start .
18,483
def from_filename ( cls , filename , sync_from_start = True ) : from pygments . util import ClassNotFound from pygments . lexers import get_lexer_for_filename try : pygments_lexer = get_lexer_for_filename ( filename ) except ClassNotFound : return SimpleLexer ( ) else : return cls ( pygments_lexer . __class__ , sync_from_start = sync_from_start )
Create a Lexer from a filename .
18,484
def _bisearch ( ucs , table ) : lbound = 0 ubound = len ( table ) - 1 if ucs < table [ 0 ] [ 0 ] or ucs > table [ ubound ] [ 1 ] : return 0 while ubound >= lbound : mid = ( lbound + ubound ) // 2 if ucs > table [ mid ] [ 1 ] : lbound = mid + 1 elif ucs < table [ mid ] [ 0 ] : ubound = mid - 1 else : return 1 return 0
Auxiliary function for binary search in interval table .
18,485
def wcwidth ( wc ) : r ucs = ord ( wc ) if ( ucs == 0 or ucs == 0x034F or 0x200B <= ucs <= 0x200F or ucs == 0x2028 or ucs == 0x2029 or 0x202A <= ucs <= 0x202E or 0x2060 <= ucs <= 0x2063 ) : return 0 if ucs < 32 or 0x07F <= ucs < 0x0A0 : return - 1 if _bisearch ( ucs , ZERO_WIDTH ) : return 0 return 1 + _bisearch ( ucs , WIDE_EASTASIAN )
r Given one unicode character return its printable length on a terminal .
18,486
def wcswidth ( pwcs , n = None ) : end = len ( pwcs ) if n is None else n idx = slice ( 0 , end ) width = 0 for char in pwcs [ idx ] : wcw = wcwidth ( char ) if wcw < 0 : return - 1 else : width += wcw return width
Given a unicode string return its printable length on a terminal .
18,487
def _per_file_event_handler ( self ) : file_event_handler = PatternMatchingEventHandler ( ) file_event_handler . on_created = self . _on_file_created file_event_handler . on_modified = self . _on_file_modified file_event_handler . on_moved = self . _on_file_moved file_event_handler . _patterns = [ os . path . join ( self . _watch_dir , os . path . normpath ( '*' ) ) ] file_event_handler . _ignore_patterns = [ '*/.*' , '*.tmp' , os . path . join ( self . _run . dir , OUTPUT_FNAME ) ] for glob in self . _api . settings ( "ignore_globs" ) : file_event_handler . _ignore_patterns . append ( os . path . join ( self . _run . dir , glob ) ) return file_event_handler
Create a Watchdog file event handler that does different things for every file
18,488
def _get_file_event_handler ( self , file_path , save_name ) : self . _file_pusher . update_file ( save_name , file_path ) if save_name not in self . _file_event_handlers : if save_name == 'wandb-history.jsonl' : self . _file_event_handlers [ 'wandb-history.jsonl' ] = FileEventHandlerTextStream ( file_path , 'wandb-history.jsonl' , self . _api ) elif save_name == 'wandb-events.jsonl' : self . _file_event_handlers [ 'wandb-events.jsonl' ] = FileEventHandlerTextStream ( file_path , 'wandb-events.jsonl' , self . _api ) elif 'tfevents' in save_name or 'graph.pbtxt' in save_name : self . _file_event_handlers [ save_name ] = FileEventHandlerThrottledOverwrite ( file_path , save_name , self . _api , self . _file_pusher ) elif save_name == config . FNAME : self . _file_event_handlers [ save_name ] = FileEventHandlerConfig ( file_path , save_name , self . _api , self . _file_pusher , self . _run ) elif save_name == 'wandb-summary.json' : self . _run . summary . load ( ) self . _api . get_file_stream_api ( ) . set_file_policy ( save_name , OverwriteFilePolicy ( ) ) self . _file_event_handlers [ save_name ] = FileEventHandlerSummary ( file_path , save_name , self . _api , self . _file_pusher , self . _run ) elif save_name . startswith ( 'media/' ) : self . _file_event_handlers [ save_name ] = FileEventHandlerOverwrite ( file_path , save_name , self . _api , self . _file_pusher ) else : Handler = FileEventHandlerOverwriteDeferred for policy , globs in six . iteritems ( self . _user_file_policies ) : if policy == "end" : continue for g in globs : if any ( save_name in p for p in glob . glob ( os . path . join ( self . _run . dir , g ) ) ) : if policy == "live" : Handler = FileEventHandlerThrottledOverwriteMinWait self . _file_event_handlers [ save_name ] = Handler ( file_path , save_name , self . _api , self . _file_pusher ) return self . _file_event_handlers [ save_name ]
Get or create an event handler for a particular file .
18,489
def mirror_stdout_stderr ( self ) : fs_api = self . _api . get_file_stream_api ( ) io_wrap . SimpleTee ( sys . stdout , streaming_log . TextStreamPusher ( fs_api , OUTPUT_FNAME , prepend_timestamp = True ) ) io_wrap . SimpleTee ( sys . stderr , streaming_log . TextStreamPusher ( fs_api , OUTPUT_FNAME , prepend_timestamp = True , line_prepend = 'ERROR' ) )
Simple STDOUT and STDERR mirroring used by _init_jupyter
18,490
def _get_stdout_stderr_streams ( self ) : if six . PY2 or not hasattr ( sys . stdout , "buffer" ) : if hasattr ( sys . stdout , "fileno" ) and sys . stdout . isatty ( ) : try : stdout = os . fdopen ( sys . stdout . fileno ( ) , "w+" , 0 ) stderr = os . fdopen ( sys . stderr . fileno ( ) , "w+" , 0 ) except OSError : stdout = sys . stdout stderr = sys . stderr else : stdout = sys . stdout stderr = sys . stderr else : try : stdout = sys . stdout . buffer . raw stderr = sys . stderr . buffer . raw except AttributeError : stdout = sys . stdout . buffer stderr = sys . stderr . buffer output_log_path = os . path . join ( self . _run . dir , OUTPUT_FNAME ) self . _output_log = WriteSerializingFile ( open ( output_log_path , 'wb' ) ) stdout_streams = [ stdout , self . _output_log ] stderr_streams = [ stderr , self . _output_log ] if self . _cloud : fs_api = self . _api . get_file_stream_api ( ) self . _stdout_stream = streaming_log . TextStreamPusher ( fs_api , OUTPUT_FNAME , prepend_timestamp = True ) self . _stderr_stream = streaming_log . TextStreamPusher ( fs_api , OUTPUT_FNAME , line_prepend = 'ERROR' , prepend_timestamp = True ) stdout_streams . append ( self . _stdout_stream ) stderr_streams . append ( self . _stderr_stream ) return stdout_streams , stderr_streams
Sets up STDOUT and STDERR streams . Only call this once .
18,491
def _close_stdout_stderr_streams ( self ) : if self . _stdout_tee . tee_file is not None : self . _stdout_tee . tee_file . close ( ) if self . _stderr_tee . tee_file is not None : self . _stderr_tee . tee_file . close ( ) self . _stdout_tee . close_join ( ) self . _stderr_tee . close_join ( ) if self . _cloud : self . _stdout_stream . close ( ) self . _stderr_stream . close ( ) self . _output_log . f . close ( ) self . _output_log = None
Close output - capturing stuff . This also flushes anything left in the buffers .
18,492
def shutdown ( self , exitcode = 0 ) : logger . info ( "shutting down system stats and metadata service" ) self . _system_stats . shutdown ( ) self . _meta . shutdown ( ) if self . _cloud : logger . info ( "stopping streaming files and file change observer" ) self . _stop_file_observer ( ) self . _end_file_syncing ( exitcode ) self . _run . history . close ( )
Stops system stats streaming handlers and uploads files without output used by wandb . monitor
18,493
def run_user_process ( self , program , args , env ) : stdout_streams , stderr_streams = self . _get_stdout_stderr_streams ( ) if sys . platform == "win32" : self . _stdout_tee = io_wrap . Tee . pipe ( * stdout_streams ) self . _stderr_tee = io_wrap . Tee . pipe ( * stderr_streams ) else : self . _stdout_tee = io_wrap . Tee . pty ( * stdout_streams ) self . _stderr_tee = io_wrap . Tee . pty ( * stderr_streams ) command = [ program ] + list ( args ) runner = util . find_runner ( program ) if runner : command = runner + command command = ' ' . join ( six . moves . shlex_quote ( arg ) for arg in command ) self . _stdout_stream . write_string ( command + "\n\n" ) try : self . proc = subprocess . Popen ( command , env = env , stdout = self . _stdout_tee . tee_file , stderr = self . _stderr_tee . tee_file , shell = True , ) self . _run . pid = self . proc . pid except ( OSError , IOError ) : raise Exception ( 'Could not find program: %s' % command ) self . _sync_etc ( )
Launch a user process capture its output and sync its files to the backend .
18,494
def wrap_existing_process ( self , pid , stdout_read_fd , stderr_read_fd , port = None ) : stdout_read_file = os . fdopen ( stdout_read_fd , 'rb' ) stderr_read_file = os . fdopen ( stderr_read_fd , 'rb' ) stdout_streams , stderr_streams = self . _get_stdout_stderr_streams ( ) self . _stdout_tee = io_wrap . Tee ( stdout_read_file , * stdout_streams ) self . _stderr_tee = io_wrap . Tee ( stderr_read_file , * stderr_streams ) self . proc = Process ( pid ) self . _run . pid = pid logger . info ( "wrapping existing process %i" % pid ) try : self . init_run ( ) except LaunchError as e : logger . exception ( "catostrophic launch error" ) wandb . termerror ( str ( e ) ) util . sentry_exc ( e ) self . _socket . launch_error ( ) return if io_wrap . SIGWINCH_HANDLER is not None : io_wrap . SIGWINCH_HANDLER . add_fd ( stdout_read_fd ) io_wrap . SIGWINCH_HANDLER . add_fd ( stderr_read_fd ) logger . info ( "informing user process we are ready to proceed" ) self . _socket . ready ( ) self . _sync_etc ( headless = True )
Do syncing etc . for an already - running process .
18,495
def token_list_len ( tokenlist ) : ZeroWidthEscape = Token . ZeroWidthEscape return sum ( len ( item [ 1 ] ) for item in tokenlist if item [ 0 ] != ZeroWidthEscape )
Return the amount of characters in this token list .
18,496
def token_list_to_text ( tokenlist ) : ZeroWidthEscape = Token . ZeroWidthEscape return '' . join ( item [ 1 ] for item in tokenlist if item [ 0 ] != ZeroWidthEscape )
Concatenate all the text parts again .
18,497
def iter_token_lines ( tokenlist ) : line = [ ] for token , c in explode_tokens ( tokenlist ) : line . append ( ( token , c ) ) if c == '\n' : yield line line = [ ] yield line
Iterator that yields tokenlists for each line .
18,498
def _process ( self ) : buffer = self . key_buffer retry = False while True : if retry : retry = False else : buffer . append ( ( yield ) ) if buffer : is_prefix_of_longer_match = self . _is_prefix_of_longer_match ( buffer ) matches = self . _get_matches ( buffer ) eager_matches = [ m for m in matches if m . eager ( self . _cli_ref ( ) ) ] if eager_matches : matches = eager_matches is_prefix_of_longer_match = False if not is_prefix_of_longer_match and matches : self . _call_handler ( matches [ - 1 ] , key_sequence = buffer [ : ] ) del buffer [ : ] elif not is_prefix_of_longer_match and not matches : retry = True found = False for i in range ( len ( buffer ) , 0 , - 1 ) : matches = self . _get_matches ( buffer [ : i ] ) if matches : self . _call_handler ( matches [ - 1 ] , key_sequence = buffer [ : i ] ) del buffer [ : i ] found = True break if not found : del buffer [ : 1 ]
Coroutine implementing the key match algorithm . Key strokes are sent into this generator and it calls the appropriate handlers .
18,499
def arg ( self ) : if self . _arg == '-' : return - 1 result = int ( self . _arg or 1 ) if int ( result ) >= 1000000 : result = 1 return result
Repetition argument .