idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
18,600
def insert_comment ( event ) : buff = event . current_buffer if event . arg != 1 : def change ( line ) : return line [ 1 : ] if line . startswith ( '#' ) else line else : def change ( line ) : return '#' + line buff . document = Document ( text = '\n' . join ( map ( change , buff . text . splitlines ( ) ) ) , cursor_position = 0 ) buff . accept_action . validate_and_handle ( event . cli , buff )
Without numeric argument comment all lines . With numeric argument uncomment all lines . In any case accept the input .
18,601
def operate_and_get_next ( event ) : buff = event . current_buffer new_index = buff . working_index + 1 buff . accept_action . validate_and_handle ( event . cli , buff ) def set_working_index ( ) : if new_index < len ( buff . _working_lines ) : buff . working_index = new_index event . cli . pre_run_callables . append ( set_working_index )
Accept the current line for execution and fetch the next line relative to the current line from the history for editing .
18,602
def edit_and_execute ( event ) : buff = event . current_buffer buff . open_in_editor ( event . cli ) buff . accept_action . validate_and_handle ( event . cli , buff )
Invoke an editor on the current command line and accept the result .
18,603
def load_auto_suggestion_bindings ( ) : registry = Registry ( ) handle = registry . add_binding suggestion_available = Condition ( lambda cli : cli . current_buffer . suggestion is not None and cli . current_buffer . document . is_cursor_at_the_end ) @ handle ( Keys . ControlF , filter = suggestion_available ) @ handle ( Keys . ControlE , filter = suggestion_available ) @ handle ( Keys . Right , filter = suggestion_available ) def _ ( event ) : " Accept suggestion. " b = event . current_buffer suggestion = b . suggestion if suggestion : b . insert_text ( suggestion . text ) return registry
Key bindings for accepting auto suggestion text .
18,604
def set_columns ( self , types ) : if self . _types : raise wandb . Error ( 'TypedTable.set_columns called more than once.' ) try : for key , type_ in types : if type_ not in TYPE_TO_TYPESTRING : raise wandb . Error ( 'TypedTable.set_columns received invalid type ({}) for key "{}".\n Valid types: {}' . format ( type_ , key , '[%s]' % ', ' . join ( VALID_TYPE_NAMES ) ) ) except TypeError : raise wandb . Error ( 'TypedTable.set_columns requires iterable of (column_name, type) pairs.' ) self . _types = dict ( types ) self . _output . add ( { 'typemap' : { k : TYPE_TO_TYPESTRING [ type_ ] for k , type_ in types } , 'columns' : [ t [ 0 ] for t in types ] } )
Set the column types
18,605
def rows_above_layout ( self ) : if self . _in_alternate_screen : return 0 elif self . _min_available_height > 0 : total_rows = self . output . get_size ( ) . rows last_screen_height = self . _last_screen . height if self . _last_screen else 0 return total_rows - max ( self . _min_available_height , last_screen_height ) else : raise HeightIsUnknownError ( 'Rows above layout is unknown.' )
Return the number of rows visible in the terminal above the layout .
18,606
def render ( self , cli , layout , is_done = False ) : output = self . output if self . use_alternate_screen and not self . _in_alternate_screen : self . _in_alternate_screen = True output . enter_alternate_screen ( ) if not self . _bracketed_paste_enabled : self . output . enable_bracketed_paste ( ) self . _bracketed_paste_enabled = True needs_mouse_support = self . mouse_support ( cli ) if needs_mouse_support and not self . _mouse_support_enabled : output . enable_mouse_support ( ) self . _mouse_support_enabled = True elif not needs_mouse_support and self . _mouse_support_enabled : output . disable_mouse_support ( ) self . _mouse_support_enabled = False size = output . get_size ( ) screen = Screen ( ) screen . show_cursor = False mouse_handlers = MouseHandlers ( ) if is_done : height = 0 else : height = self . _last_screen . height if self . _last_screen else 0 height = max ( self . _min_available_height , height ) if self . _last_size != size : self . _last_screen = None if self . style . invalidation_hash ( ) != self . _last_style_hash : self . _last_screen = None self . _attrs_for_token = None if self . _attrs_for_token is None : self . _attrs_for_token = _TokenToAttrsCache ( self . style . get_attrs_for_token ) self . _last_style_hash = self . style . invalidation_hash ( ) layout . write_to_screen ( cli , screen , mouse_handlers , WritePosition ( xpos = 0 , ypos = 0 , width = size . columns , height = ( size . rows if self . use_alternate_screen else height ) , extended_height = size . rows , ) ) if cli . is_aborting or cli . is_exiting : screen . replace_all_tokens ( Token . Aborted ) self . _cursor_pos , self . _last_token = _output_screen_diff ( output , screen , self . _cursor_pos , self . _last_screen , self . _last_token , is_done , use_alternate_screen = self . use_alternate_screen , attrs_for_token = self . _attrs_for_token , size = size , previous_width = ( self . _last_size . columns if self . _last_size else 0 ) ) self . _last_screen = screen self . _last_size = size self . mouse_handlers = mouse_handlers new_title = cli . terminal_title if new_title != self . _last_title : if new_title is None : self . output . clear_title ( ) else : self . output . set_title ( new_title ) self . _last_title = new_title output . flush ( )
Render the current interface to the output .
18,607
def clear ( self ) : self . erase ( ) output = self . output output . erase_screen ( ) output . cursor_goto ( 0 , 0 ) output . flush ( ) self . request_absolute_cursor_position ( )
Clear screen and go to 0 0
18,608
def close ( self ) : " Close pipe fds. " os . close ( self . _r ) os . close ( self . _w ) self . _r = None self . _w = None
Close pipe fds .
18,609
def replace_all_tokens ( self , token ) : b = self . data_buffer for y , row in b . items ( ) : for x , char in row . items ( ) : b [ y ] [ x ] = _CHAR_CACHE [ char . char , token ]
For all the characters in the screen . Set the token to the given token .
18,610
def _handle_response ( self , response ) : if isinstance ( response , Exception ) : logging . error ( "dropped chunk %s" % response ) elif response . json ( ) . get ( "limits" ) : parsed = response . json ( ) self . _api . dynamic_settings . update ( parsed [ "limits" ] )
Logs dropped chunks and updates dynamic settings
18,611
def push ( self , filename , data ) : self . _queue . put ( Chunk ( filename , data ) )
Push a chunk of a file to the streaming endpoint .
18,612
def finish ( self , exitcode ) : self . _queue . put ( self . Finish ( exitcode ) ) self . _thread . join ( )
Cleans up .
18,613
def shell ( cmd ) : "Simple wrapper for calling docker, returning None on error and the output on success" try : return subprocess . check_output ( [ 'docker' ] + cmd , stderr = subprocess . STDOUT ) . decode ( 'utf8' ) . strip ( ) except subprocess . CalledProcessError : return None
Simple wrapper for calling docker returning None on error and the output on success
18,614
def auth_token ( registry , repo ) : auth_info = auth_config . resolve_authconfig ( registry ) if auth_info : normalized = { k . lower ( ) : v for k , v in six . iteritems ( auth_info ) } auth_info = ( normalized . get ( "username" ) , normalized . get ( "password" ) ) response = requests . get ( "https://{}/v2/" . format ( registry ) , timeout = 3 ) if response . headers . get ( "www-authenticate" ) : try : info = www_authenticate . parse ( response . headers [ 'www-authenticate' ] ) except ValueError : info = { } else : log . error ( "Received {} when attempting to authenticate with {}" . format ( response , registry ) ) info = { } if info . get ( "bearer" ) : res = requests . get ( info [ "bearer" ] [ "realm" ] + "?service={}&scope=repository:{}:pull" . format ( info [ "bearer" ] [ "service" ] , repo ) , auth = auth_info , timeout = 3 ) res . raise_for_status ( ) return res . json ( ) return { }
Makes a request to the root of a v2 docker registry to get the auth url .
18,615
def image_id_from_registry ( image_name ) : registry , repository , tag = parse ( image_name ) try : token = auth_token ( registry , repository ) . get ( "token" ) if registry == "index.docker.io" : registry = "registry-1.docker.io" res = requests . head ( "https://{}/v2/{}/manifests/{}" . format ( registry , repository , tag ) , headers = { "Authorization" : "Bearer {}" . format ( token ) , "Accept" : "application/vnd.docker.distribution.manifest.v2+json" } , timeout = 5 ) res . raise_for_status ( ) except requests . RequestException : log . error ( "Received {} when attempting to get digest for {}" . format ( res , image_name ) ) return None return "@" . join ( [ registry + "/" + repository , res . headers [ "Docker-Content-Digest" ] ] )
Get the docker id from a public or private registry
18,616
def escape ( self , varname , value ) : f = self . escape_funcs . get ( varname ) return f ( value ) if f else value
Escape value to fit in the place of this variable into the grammar .
18,617
def unescape ( self , varname , value ) : f = self . unescape_funcs . get ( varname ) return f ( value ) if f else value
Unescape value .
18,618
def _transform_prefix ( cls , root_node , create_group_func ) : def transform ( node ) : if isinstance ( node , Any ) : for c in node . children : for r in transform ( c ) : yield '(?:%s)?' % r elif isinstance ( node , Sequence ) : for i in range ( len ( node . children ) ) : a = [ cls . _transform ( c , create_group_func ) for c in node . children [ : i ] ] for c in transform ( node . children [ i ] ) : yield '(?:%s)' % ( '' . join ( a ) + c ) elif isinstance ( node , Regex ) : yield '(?:%s)?' % node . regex elif isinstance ( node , Lookahead ) : if node . negative : yield '(?!%s)' % cls . _transform ( node . childnode , create_group_func ) else : raise Exception ( 'Positive lookahead not yet supported.' ) elif isinstance ( node , Variable ) : for c in transform ( node . childnode ) : yield '(?P<%s>%s)' % ( create_group_func ( node ) , c ) elif isinstance ( node , Repeat ) : prefix = cls . _transform ( node . childnode , create_group_func ) for c in transform ( node . childnode ) : if node . max_repeat : repeat_sign = '{,%i}' % ( node . max_repeat - 1 ) else : repeat_sign = '*' yield '(?:%s)%s%s(?:%s)?' % ( prefix , repeat_sign , ( '' if node . greedy else '?' ) , c ) else : raise TypeError ( 'Got %r' % node ) for r in transform ( root_node ) : yield '^%s$' % r
Yield all the regular expressions matching a prefix of the grammar defined by the Node instance .
18,619
def trailing_input ( self ) : slices = [ ] for r , re_match in self . _re_matches : for group_name , group_index in r . groupindex . items ( ) : if group_name == _INVALID_TRAILING_INPUT : slices . append ( re_match . regs [ group_index ] ) if slices : slice = [ max ( i [ 0 ] for i in slices ) , max ( i [ 1 ] for i in slices ) ] value = self . string [ slice [ 0 ] : slice [ 1 ] ] return MatchVariable ( '<trailing_input>' , value , slice )
Get the MatchVariable instance representing trailing input if there is any . Trailing input is input at the end that does not match the grammar anymore but when this is removed from the end of the input the input would be a valid string .
18,620
def end_nodes ( self ) : for varname , reg in self . _nodes_to_regs ( ) : if reg [ 1 ] == len ( self . string ) : value = self . _unescape ( varname , self . string [ reg [ 0 ] : reg [ 1 ] ] ) yield MatchVariable ( varname , value , ( reg [ 0 ] , reg [ 1 ] ) )
Yields MatchVariable instances for all the nodes having their end position at the end of the input string .
18,621
def reset ( self , mode = InputMode . INSERT ) : self . input_mode = mode self . waiting_for_digraph = False self . operator_func = None self . operator_arg = None
Reset state go back to the given mode . INSERT by default .
18,622
def add_buffer ( self , name , buffer , focus = False ) : assert isinstance ( buffer , Buffer ) self . buffers [ name ] = buffer if focus : self . buffers . focus ( name ) auto_suggest_function = self . _create_auto_suggest_function ( buffer ) completer_function = self . _create_async_completer ( buffer ) self . _async_completers [ name ] = completer_function def create_on_insert_handler ( ) : def on_text_insert ( _ ) : if buffer . completer and buffer . complete_while_typing ( ) : completer_function ( ) if buffer . auto_suggest : auto_suggest_function ( ) return on_text_insert buffer . on_text_insert += create_on_insert_handler ( ) def buffer_changed ( _ ) : self . on_buffer_changed . fire ( ) buffer . on_text_changed += buffer_changed
Insert a new buffer .
18,623
def terminal_title ( self ) : result = self . application . get_title ( ) assert result is None or isinstance ( result , six . text_type ) return result
Return the current title to be displayed in the terminal . When this in None the terminal title remains the original .
18,624
def reset ( self , reset_current_buffer = False ) : self . _exit_flag = False self . _abort_flag = False self . _return_value = None self . renderer . reset ( ) self . input_processor . reset ( ) self . layout . reset ( ) self . vi_state . reset ( ) self . search_state = SearchState ( ignore_case = Condition ( lambda : self . is_ignoring_case ) ) self . on_reset . fire ( )
Reset everything for reading the next input .
18,625
def invalidate ( self ) : if self . _invalidated : return else : self . _invalidated = True self . on_invalidate . fire ( ) if self . eventloop is not None : def redraw ( ) : self . _invalidated = False self . _redraw ( ) if self . max_render_postpone_time : _max_postpone_until = time . time ( ) + self . max_render_postpone_time else : _max_postpone_until = None self . eventloop . call_from_executor ( redraw , _max_postpone_until = _max_postpone_until )
Thread safe way of sending a repaint trigger to the input event loop .
18,626
def _on_resize ( self ) : self . renderer . erase ( leave_alternate_screen = False , erase_title = False ) self . renderer . request_absolute_cursor_position ( ) self . _redraw ( )
When the window size changes we erase the current output and request again the cursor position . When the CPR answer arrives the output is drawn again .
18,627
def _pre_run ( self , pre_run = None ) : " Called during `run`. " if pre_run : pre_run ( ) for c in self . pre_run_callables : c ( ) del self . pre_run_callables [ : ]
Called during run .
18,628
def run ( self , reset_current_buffer = False , pre_run = None ) : assert pre_run is None or callable ( pre_run ) try : self . _is_running = True self . on_start . fire ( ) self . reset ( ) self . _pre_run ( pre_run ) with self . input . raw_mode ( ) : self . renderer . request_absolute_cursor_position ( ) self . _redraw ( ) self . eventloop . run ( self . input , self . create_eventloop_callbacks ( ) ) finally : if not self . is_done : self . _exit_flag = True self . _redraw ( ) self . renderer . reset ( ) self . on_stop . fire ( ) self . _is_running = False return self . return_value ( )
Read input from the command line . This runs the eventloop until a return value has been set .
18,629
def exit ( self ) : on_exit = self . application . on_exit self . _exit_flag = True self . _redraw ( ) if on_exit == AbortAction . RAISE_EXCEPTION : def eof_error ( ) : raise EOFError ( ) self . _set_return_callable ( eof_error ) elif on_exit == AbortAction . RETRY : self . reset ( ) self . renderer . request_absolute_cursor_position ( ) self . current_buffer . reset ( ) elif on_exit == AbortAction . RETURN_NONE : self . set_return_value ( None )
Set exit . When Control - D has been pressed .
18,630
def abort ( self ) : on_abort = self . application . on_abort self . _abort_flag = True self . _redraw ( ) if on_abort == AbortAction . RAISE_EXCEPTION : def keyboard_interrupt ( ) : raise KeyboardInterrupt ( ) self . _set_return_callable ( keyboard_interrupt ) elif on_abort == AbortAction . RETRY : self . reset ( ) self . renderer . request_absolute_cursor_position ( ) self . current_buffer . reset ( ) elif on_abort == AbortAction . RETURN_NONE : self . set_return_value ( None )
Set abort . When Control - C has been pressed .
18,631
def run_in_terminal ( self , func , render_cli_done = False , cooked_mode = True ) : if render_cli_done : self . _return_value = True self . _redraw ( ) self . renderer . reset ( ) else : self . renderer . erase ( ) self . _return_value = None if cooked_mode : with self . input . cooked_mode ( ) : result = func ( ) else : result = func ( ) self . renderer . reset ( ) self . renderer . request_absolute_cursor_position ( ) self . _redraw ( ) return result
Run function on the terminal above the prompt .
18,632
def run_application_generator ( self , coroutine , render_cli_done = False ) : if render_cli_done : self . _return_value = True self . _redraw ( ) self . renderer . reset ( ) else : self . renderer . erase ( ) self . _return_value = None g = coroutine ( ) assert isinstance ( g , types . GeneratorType ) def step_next ( send_value = None ) : " Execute next step of the coroutine." try : with self . input . cooked_mode ( ) : result = g . send ( send_value ) except StopIteration : done ( ) except : done ( ) raise else : assert isinstance ( result , Application ) self . run_sub_application ( result , done_callback = step_next , _from_application_generator = True ) def done ( ) : self . renderer . reset ( ) self . renderer . request_absolute_cursor_position ( ) self . _redraw ( ) step_next ( )
EXPERIMENTAL Like run_in_terminal but takes a generator that can yield Application instances .
18,633
def patch_stdout_context ( self , raw = False , patch_stdout = True , patch_stderr = True ) : return _PatchStdoutContext ( self . stdout_proxy ( raw = raw ) , patch_stdout = patch_stdout , patch_stderr = patch_stderr )
Return a context manager that will replace sys . stdout with a proxy that makes sure that all printed text will appear above the prompt and that it doesn t destroy the output from the renderer .
18,634
def _active_cli ( self ) : cli = self . cli while cli . _sub_cli : cli = cli . _sub_cli return cli
Return the active CommandLineInterface .
18,635
def feed_key ( self , key_press ) : assert isinstance ( key_press , KeyPress ) cli = self . _active_cli if not cli . is_done : cli . input_processor . feed ( key_press ) cli . input_processor . process_keys ( )
Feed a key press to the CommandLineInterface .
18,636
def set_mouse_handler_for_range ( self , x_min , x_max , y_min , y_max , handler = None ) : for x , y in product ( range ( x_min , x_max ) , range ( y_min , y_max ) ) : self . mouse_handlers [ x , y ] = handler
Set mouse handler for a region .
18,637
def send_message ( self , options ) : if not options . get ( "save_policy" ) : raise ValueError ( "Only configuring save_policy is supported" ) if self . socket : self . socket . send ( options ) elif self . _jupyter_agent : self . _jupyter_agent . start ( ) self . _jupyter_agent . rm . update_user_file_policy ( options [ "save_policy" ] ) else : wandb . termerror ( "wandb.init hasn't been called, can't configure run" )
Sends a message to the wandb process changing the policy of saved files . This is primarily used internally by wandb . save
18,638
def from_environment_or_defaults ( cls , environment = None ) : if environment is None : environment = os . environ run_id = environment . get ( env . RUN_ID ) resume = environment . get ( env . RESUME ) storage_id = environment . get ( env . RUN_STORAGE_ID ) mode = environment . get ( env . MODE ) disabled = InternalApi ( ) . disabled ( ) if not mode and disabled : mode = "dryrun" elif disabled and mode != "dryrun" : wandb . termlog ( "WARNING: WANDB_MODE is set to run, but W&B was disabled. Run `wandb on` to remove this message" ) elif disabled : wandb . termlog ( 'W&B is disabled in this directory. Run `wandb on` to enable cloud syncing.' ) group = environment . get ( env . RUN_GROUP ) job_type = environment . get ( env . JOB_TYPE ) run_dir = environment . get ( env . RUN_DIR ) sweep_id = environment . get ( env . SWEEP_ID ) program = environment . get ( env . PROGRAM ) description = environment . get ( env . DESCRIPTION ) args = env . get_args ( ) wandb_dir = env . get_dir ( ) tags = env . get_tags ( ) config = Config . from_environment_or_defaults ( ) run = cls ( run_id , mode , run_dir , group , job_type , config , sweep_id , storage_id , program = program , description = description , args = args , wandb_dir = wandb_dir , tags = tags , resume = resume ) return run
Create a Run object taking values from the local environment where possible .
18,639
def upload_debug ( self ) : if os . path . exists ( self . log_fname ) : api = InternalApi ( ) api . set_current_run_id ( self . id ) pusher = FilePusher ( api ) pusher . update_file ( "wandb-debug.log" , self . log_fname ) pusher . file_changed ( "wandb-debug.log" , self . log_fname ) pusher . finish ( )
Uploads the debug log to cloud storage
18,640
def enable_logging ( self ) : handler = logging . FileHandler ( self . log_fname ) handler . setLevel ( logging . INFO ) run_id = self . id class WBFilter ( logging . Filter ) : def filter ( self , record ) : record . run_id = run_id return True formatter = logging . Formatter ( '%(asctime)s %(levelname)-7s %(threadName)-10s:%(process)d [%(run_id)s:%(filename)s:%(funcName)s():%(lineno)s] %(message)s' ) handler . setFormatter ( formatter ) handler . addFilter ( WBFilter ( ) ) root = logging . getLogger ( ) root . addHandler ( handler )
Enable logging to the global debug log . This adds a run_id to the log in case of muliple processes on the same machine .
18,641
def display_completions_like_readline ( event ) : b = event . current_buffer if b . completer is None : return complete_event = CompleteEvent ( completion_requested = True ) completions = list ( b . completer . get_completions ( b . document , complete_event ) ) common_suffix = get_common_complete_suffix ( b . document , completions ) if len ( completions ) == 1 : b . delete_before_cursor ( - completions [ 0 ] . start_position ) b . insert_text ( completions [ 0 ] . text ) elif common_suffix : b . insert_text ( common_suffix ) elif completions : _display_completions_like_readline ( event . cli , completions )
Key binding handler for readline - style tab completion . This is meant to be as similar as possible to the way how readline displays completions .
18,642
def _display_completions_like_readline ( cli , completions ) : from prompt_toolkit . shortcuts import create_confirm_application assert isinstance ( completions , list ) term_size = cli . output . get_size ( ) term_width = term_size . columns term_height = term_size . rows max_compl_width = min ( term_width , max ( get_cwidth ( c . text ) for c in completions ) + 1 ) column_count = max ( 1 , term_width // max_compl_width ) completions_per_page = column_count * ( term_height - 1 ) page_count = int ( math . ceil ( len ( completions ) / float ( completions_per_page ) ) ) def display ( page ) : page_completions = completions [ page * completions_per_page : ( page + 1 ) * completions_per_page ] page_row_count = int ( math . ceil ( len ( page_completions ) / float ( column_count ) ) ) page_columns = [ page_completions [ i * page_row_count : ( i + 1 ) * page_row_count ] for i in range ( column_count ) ] result = [ ] for r in range ( page_row_count ) : for c in range ( column_count ) : try : result . append ( page_columns [ c ] [ r ] . text . ljust ( max_compl_width ) ) except IndexError : pass result . append ( '\n' ) cli . output . write ( '' . join ( result ) ) cli . output . flush ( ) def run ( ) : if len ( completions ) > completions_per_page : message = 'Display all {} possibilities? (y on n) ' . format ( len ( completions ) ) confirm = yield create_confirm_application ( message ) if confirm : for page in range ( page_count ) : display ( page ) if page != page_count - 1 : show_more = yield _create_more_application ( ) if not show_more : return else : cli . output . write ( '\n' ) cli . output . flush ( ) else : display ( 0 ) cli . run_application_generator ( run , render_cli_done = True )
Display the list of completions in columns above the prompt . This will ask for a confirmation if there are too many completions to fit on a single page and provide a paginator to walk through them .
18,643
def to_simple_filter ( bool_or_filter ) : if not isinstance ( bool_or_filter , ( bool , SimpleFilter ) ) : raise TypeError ( 'Expecting a bool or a SimpleFilter instance. Got %r' % bool_or_filter ) return { True : _always , False : _never , } . get ( bool_or_filter , bool_or_filter )
Accept both booleans and CLIFilters as input and turn it into a SimpleFilter .
18,644
def to_cli_filter ( bool_or_filter ) : if not isinstance ( bool_or_filter , ( bool , CLIFilter ) ) : raise TypeError ( 'Expecting a bool or a CLIFilter instance. Got %r' % bool_or_filter ) return { True : _always , False : _never , } . get ( bool_or_filter , bool_or_filter )
Accept both booleans and CLIFilters as input and turn it into a CLIFilter .
18,645
def read ( self , count = 1024 ) : if self . closed : return b'' try : data = os . read ( self . stdin_fd , count ) if data == b'' : self . closed = True return '' except OSError : data = b'' return self . _stdin_decoder . decode ( data )
Read the input and return it as a string .
18,646
def add_string ( self , data ) : lines = [ ] while data : match = self . _line_end_re . search ( data ) if match is None : chunk = data else : chunk = data [ : match . end ( ) ] data = data [ len ( chunk ) : ] if self . _buf and self . _buf [ - 1 ] . endswith ( b ( '\r' ) ) and not chunk . startswith ( b ( '\n' ) ) : lines . append ( self . _finish_line ( ) ) self . _buf . append ( chunk ) if chunk . endswith ( b ( '\n' ) ) : lines . append ( self . _finish_line ( ) ) return lines
Process some data splitting it into complete lines and buffering the rest
18,647
def write ( self , message , cur_time = None ) : if cur_time is None : cur_time = time . time ( ) lines = self . _line_buffer . add_string ( message ) for line in lines : timestamp = '' if self . _prepend_timestamp : timestamp = datetime . datetime . utcfromtimestamp ( cur_time ) . isoformat ( ) + ' ' line = u'{}{}{}' . format ( self . _line_prepend , timestamp , line ) self . _fsapi . push ( self . _filename , line )
Write some text to the pusher .
18,648
def stream_tfevents ( path , file_api , step = 0 ) : last_step = 0 row = { } buffer = [ ] last_row = { } for summary in tf . train . summary_iterator ( path ) : parsed = tf_summary_to_dict ( summary ) if last_step != parsed [ "tensorflow_step" ] : step += 1 last_step = parsed [ "tensorflow_step" ] if len ( row ) > 0 : last_row = to_json ( row ) buffer . append ( Chunk ( "wandb-history.jsonl" , util . json_dumps_safer_history ( to_json ( row ) ) ) ) row . update ( parsed ) file_api . _send ( buffer ) return last_row
Parses and streams a tfevents file to the server
18,649
def agent_run ( args ) : run = wandb . wandb_run . Run . from_environment_or_defaults ( ) run . enable_logging ( ) api = wandb . apis . InternalApi ( ) api . set_current_run_id ( run . id ) root = api . git . root if not root : root = os . path . abspath ( os . getcwd ( ) ) host = socket . gethostname ( ) remote_url = 'file://{}{}' . format ( host , root ) run . save ( program = args [ 'program' ] , api = api ) env = dict ( os . environ ) run . set_environment ( env ) try : rm = wandb . run_manager . RunManager ( api , run ) except wandb . run_manager . Error : exc_type , exc_value , exc_traceback = sys . exc_info ( ) wandb . termerror ( 'An Exception was raised during setup, see %s for full traceback.' % util . get_log_file_path ( ) ) wandb . termerror ( exc_value ) if 'permission' in str ( exc_value ) : wandb . termerror ( 'Are you sure you provided the correct API key to "wandb login"?' ) lines = traceback . format_exception ( exc_type , exc_value , exc_traceback ) logging . error ( '\n' . join ( lines ) ) else : rm . run_user_process ( args [ 'program' ] , args [ 'args' ] , env )
A version of wandb run that the agent uses to run things .
18,650
def patch ( save = True , tensorboardX = tensorboardX_loaded ) : global Summary , Event if tensorboardX : tensorboard_module = "tensorboardX.writer" if tensorflow_loaded : wandb . termlog ( "Found TensorboardX and tensorflow, pass tensorboardX=False to patch regular tensorboard." ) from tensorboardX . proto . summary_pb2 import Summary from tensorboardX . proto . event_pb2 import Event else : tensorboard_module = "tensorflow.python.summary.writer.writer" from tensorflow . summary import Summary , Event writers = set ( ) def _add_event ( self , event , step , walltime = None ) : event . wall_time = time . time ( ) if walltime is None else walltime if step is not None : event . step = int ( step ) try : if hasattr ( self . event_writer . _ev_writer , "_file_name" ) : name = self . event_writer . _ev_writer . _file_name else : name = self . event_writer . _ev_writer . FileName ( ) . decode ( "utf-8" ) writers . add ( name ) log_dir = os . path . dirname ( os . path . commonprefix ( list ( writers ) ) ) filename = os . path . basename ( name ) namespace = name . replace ( filename , "" ) . replace ( log_dir , "" ) . strip ( os . sep ) if save : wandb . save ( name , base_path = log_dir ) wandb . save ( os . path . join ( log_dir , "*.pbtxt" ) , base_path = log_dir ) log ( event , namespace = namespace , step = event . step ) except Exception as e : wandb . termerror ( "Unable to log event %s" % e ) self . event_writer . add_event ( event ) writer = wandb . util . get_module ( tensorboard_module ) writer . SummaryToEventTransformer . _add_event = _add_event
Monkeypatches tensorboard or tensorboardX so that all events are logged to tfevents files and wandb . We save the tfevents files and graphs to wandb by default .
18,651
def tf_summary_to_dict ( tf_summary_str_or_pb , namespace = "" ) : values = { } if isinstance ( tf_summary_str_or_pb , Summary ) : summary_pb = tf_summary_str_or_pb elif isinstance ( tf_summary_str_or_pb , Event ) : summary_pb = tf_summary_str_or_pb . summary values [ "global_step" ] = tf_summary_str_or_pb . step values [ "_timestamp" ] = tf_summary_str_or_pb . wall_time else : summary_pb = Summary ( ) summary_pb . ParseFromString ( tf_summary_str_or_pb ) for value in summary_pb . value : kind = value . WhichOneof ( "value" ) if kind == "simple_value" : values [ namespaced_tag ( value . tag , namespace ) ] = value . simple_value elif kind == "image" : from PIL import Image image = wandb . Image ( Image . open ( six . BytesIO ( value . image . encoded_image_string ) ) ) tag_idx = value . tag . rsplit ( '/' , 1 ) if len ( tag_idx ) > 1 and tag_idx [ 1 ] . isdigit ( ) : tag , idx = tag_idx values . setdefault ( history_image_key ( tag ) , [ ] ) . append ( image ) else : values [ history_image_key ( value . tag ) ] = image elif kind == "histo" : first = value . histo . bucket_limit [ 0 ] + value . histo . bucket_limit [ 0 ] - value . histo . bucket_limit [ 1 ] last = value . histo . bucket_limit [ - 2 ] + value . histo . bucket_limit [ - 2 ] - value . histo . bucket_limit [ - 3 ] np_histogram = ( list ( value . histo . bucket ) , [ first ] + value . histo . bucket_limit [ : - 1 ] + [ last ] ) values [ namespaced_tag ( value . tag ) ] = wandb . Histogram ( np_histogram = np_histogram ) return values
Convert a Tensorboard Summary to a dictionary
18,652
def _get_search_text ( self , cli ) : if self . preview_search ( cli ) and cli . buffers [ self . search_buffer_name ] . text : return cli . buffers [ self . search_buffer_name ] . text else : return self . get_search_state ( cli ) . text
The text we are searching for .
18,653
def listen ( self , max_seconds = 30 ) : if not self . connection : self . connect ( ) start = time . time ( ) conn , _ , err = select ( [ self . connection ] , [ ] , [ self . connection ] , max_seconds ) try : if len ( err ) > 0 : raise socket . error ( "Couldn't open socket" ) message = b'' while True : if time . time ( ) - start > max_seconds : raise socket . error ( "Timeout of %s seconds waiting for W&B process" % max_seconds ) res = self . connection . recv ( 1024 ) term = res . find ( b'\0' ) if term != - 1 : message += res [ : term ] break else : message += res message = json . loads ( message . decode ( 'utf8' ) ) if message [ 'status' ] == 'done' : return True , None elif message [ 'status' ] == 'ready' : return True , message elif message [ 'status' ] == 'launch_error' : return False , None else : raise socket . error ( "Invalid status: %s" % message [ 'status' ] ) except ( socket . error , ValueError ) as e : util . sentry_exc ( e ) return False , None
Waits to receive up to two bytes for up to max_seconds
18,654
def _calc_adu ( self ) : res = super ( ) . _calc_adu ( ) self . ensure_one ( ) dafs_to_apply = self . env [ 'ddmrp.adjustment' ] . search ( self . _daf_to_apply_domain ( ) ) if dafs_to_apply : daf = 1 values = dafs_to_apply . mapped ( 'value' ) for val in values : daf *= val prev = self . adu self . adu *= daf _logger . debug ( "DAF=%s applied to %s. ADU: %s -> %s" % ( daf , self . name , prev , self . adu ) ) dafs_to_explode = self . env [ 'ddmrp.adjustment' ] . search ( self . _daf_to_apply_domain ( False ) ) for daf in dafs_to_explode : prev = self . adu increased_demand = prev * daf . value - prev self . explode_demand_to_components ( daf , increased_demand , self . product_uom ) return res
Apply DAFs if existing for the buffer .
18,655
def cron_ddmrp_adu ( self , automatic = False ) : self . env [ 'ddmrp.adjustment.demand' ] . search ( [ ] ) . unlink ( ) super ( ) . cron_ddmrp_adu ( automatic ) today = fields . Date . today ( ) for op in self . search ( [ ] ) . filtered ( 'extra_demand_ids' ) : to_add = sum ( op . extra_demand_ids . filtered ( lambda r : r . date_start <= today <= r . date_end ) . mapped ( 'extra_demand' ) ) if to_add : op . adu += to_add _logger . debug ( "DAFs-originated demand applied. %s: ADU += %s" % ( op . name , to_add ) )
Apply extra demand originated by Demand Adjustment Factors to components after the cron update of all the buffers .
18,656
def _compute_dlt ( self ) : res = super ( ) . _compute_dlt ( ) for rec in self : ltaf_to_apply = self . env [ 'ddmrp.adjustment' ] . search ( rec . _ltaf_to_apply_domain ( ) ) if ltaf_to_apply : ltaf = 1 values = ltaf_to_apply . mapped ( 'value' ) for val in values : ltaf *= val prev = rec . dlt rec . dlt *= ltaf _logger . debug ( "LTAF=%s applied to %s. DLT: %s -> %s" % ( ltaf , rec . name , prev , rec . dlt ) ) return res
Apply Lead Time Adj Factor if existing
18,657
def flatten ( nested_dict , separator = "_" , root_keys_to_ignore = set ( ) ) : assert isinstance ( nested_dict , dict ) , "flatten requires a dictionary input" assert isinstance ( separator , six . string_types ) , "separator must be string" flattened_dict = dict ( ) def _flatten ( object_ , key ) : if not object_ : flattened_dict [ key ] = object_ elif isinstance ( object_ , dict ) : for object_key in object_ : if not ( not key and object_key in root_keys_to_ignore ) : _flatten ( object_ [ object_key ] , _construct_key ( key , separator , object_key ) ) elif isinstance ( object_ , ( list , set , tuple ) ) : for index , item in enumerate ( object_ ) : _flatten ( item , _construct_key ( key , separator , index ) ) else : flattened_dict [ key ] = object_ _flatten ( nested_dict , None ) return flattened_dict
Flattens a dictionary with nested structure to a dictionary with no hierarchy Consider ignoring keys that you are not interested in to prevent unnecessary processing This is specially true for very deep objects
18,658
def check_if_numbers_are_consecutive ( list_ ) : return all ( ( True if second - first == 1 else False for first , second in zip ( list_ [ : - 1 ] , list_ [ 1 : ] ) ) )
Returns True if numbers in the list are consecutive
18,659
def strip ( text ) : members = [ attr for attr in Colors . __dict__ . keys ( ) if not attr . startswith ( "__" ) and not attr == 'strip' ] for c in members : text = text . replace ( vars ( Colors ) [ c ] , '' ) return text
Strips all color codes from a text .
18,660
def vprintf ( self , alevel , format , * args ) : if self . _verbosity and self . _verbosity >= alevel : sys . stdout . write ( format % args )
A verbosity - aware printf .
18,661
def density ( a_M , * args , ** kwargs ) : rows , cols = a_M . shape a_Mmask = ones ( ( rows , cols ) ) if len ( args ) : a_Mmask = args [ 0 ] a_M *= a_Mmask f_binaryMass = float ( size ( nonzero ( a_M ) [ 0 ] ) ) f_actualMass = a_M . sum ( ) f_area = float ( size ( nonzero ( a_Mmask ) [ 0 ] ) ) f_binaryDensity = f_binaryMass / f_area f_actualDensity = f_actualMass / f_area return f_actualDensity , f_binaryDensity
ARGS a_M matrix to analyze
18,662
def cdf ( arr , ** kwargs ) : counts , bin_edges = histogram ( arr , ** kwargs ) cdf = cumsum ( counts ) return cdf
ARGS arr array to calculate cumulative distribution function
18,663
def array2DIndices_enumerate ( arr ) : rows = arr [ 0 ] cols = arr [ 1 ] arr_index = zeros ( ( rows * cols , 2 ) ) count = 0 for row in arange ( 0 , rows ) : for col in arange ( 0 , cols ) : arr_index [ count ] = array ( [ row , col ] ) count = count + 1 return arr_index
DESC Given a 2D array defined by arr prepare an explicit list of the indices .
18,664
def toc ( * args , ** kwargs ) : global Gtic_start f_elapsedTime = time . time ( ) - Gtic_start for key , value in kwargs . items ( ) : if key == 'sysprint' : return value % f_elapsedTime if key == 'default' : return "Elapsed time = %f seconds." % f_elapsedTime return f_elapsedTime
Port of the MatLAB function of same name
18,665
def base10toN ( num , n ) : num_rep = { 10 : 'a' , 11 : 'b' , 12 : 'c' , 13 : 'd' , 14 : 'e' , 15 : 'f' , 16 : 'g' , 17 : 'h' , 18 : 'i' , 19 : 'j' , 20 : 'k' , 21 : 'l' , 22 : 'm' , 23 : 'n' , 24 : 'o' , 25 : 'p' , 26 : 'q' , 27 : 'r' , 28 : 's' , 29 : 't' , 30 : 'u' , 31 : 'v' , 32 : 'w' , 33 : 'x' , 34 : 'y' , 35 : 'z' } new_num_string = '' new_num_arr = array ( ( ) ) current = num while current != 0 : remainder = current % n if 36 > remainder > 9 : remainder_string = num_rep [ remainder ] elif remainder >= 36 : remainder_string = '(' + str ( remainder ) + ')' else : remainder_string = str ( remainder ) new_num_string = remainder_string + new_num_string new_num_arr = r_ [ remainder , new_num_arr ] current = current / n print ( new_num_arr ) return new_num_string
Change a num to a base - n number . Up to base - 36 is supported without special notation .
18,666
def list_i2str ( ilist ) : slist = [ ] for el in ilist : slist . append ( str ( el ) ) return slist
Convert an integer list into a string list .
18,667
def shellne ( command ) : child = os . popen ( command ) data = child . read ( ) err = child . close ( ) if err : raise RuntimeError ( '%s failed w/ exit code %d' % ( command , err ) ) return data
Runs commands on the underlying shell ; any stderr is echo d to the console .
18,668
def find ( pattern , root = os . curdir ) : hits = '' for F in locate ( pattern , root ) : hits = hits + F + '\n' l = hits . split ( '\n' ) if ( not len ( l [ - 1 ] ) ) : l . pop ( ) if len ( l ) == 1 and not len ( l [ 0 ] ) : return None else : return l
Helper around locate
18,669
def match ( self , * args ) : if self . fall or not args : return True elif self . value in args : self . fall = True return True else : return False
Indicate whether or not to enter a case suite
18,670
def process_slice ( self , b_rot90 = None ) : if b_rot90 : self . _Mnp_2Dslice = np . rot90 ( self . _Mnp_2Dslice ) if self . func == 'invertIntensities' : self . invert_slice_intensities ( )
Processes a single slice .
18,671
def slice_save ( self , astr_outputFile ) : self . _log ( 'Outputfile = %s\n' % astr_outputFile ) fformat = astr_outputFile . split ( '.' ) [ - 1 ] if fformat == 'dcm' : if self . _dcm : self . _dcm . pixel_array . flat = self . _Mnp_2Dslice . flat self . _dcm . PixelData = self . _dcm . pixel_array . tostring ( ) self . _dcm . save_as ( astr_outputFile ) else : raise ValueError ( 'dcm output format only available for DICOM files' ) else : pylab . imsave ( astr_outputFile , self . _Mnp_2Dslice , format = fformat , cmap = cm . Greys_r )
Saves a single slice .
18,672
def run ( self ) : self . _log ( 'Converting DICOM image.\n' ) try : self . _log ( 'PatientName: %s\n' % self . _dcm . PatientName ) except AttributeError : self . _log ( 'PatientName: %s\n' % 'PatientName not found in DCM header.' ) error . warn ( self , 'PatientNameTag' ) try : self . _log ( 'PatientAge: %s\n' % self . _dcm . PatientAge ) except AttributeError : self . _log ( 'PatientAge: %s\n' % 'PatientAge not found in DCM header.' ) error . warn ( self , 'PatientAgeTag' ) try : self . _log ( 'PatientSex: %s\n' % self . _dcm . PatientSex ) except AttributeError : self . _log ( 'PatientSex: %s\n' % 'PatientSex not found in DCM header.' ) error . warn ( self , 'PatientSexTag' ) try : self . _log ( 'PatientID: %s\n' % self . _dcm . PatientID ) except AttributeError : self . _log ( 'PatientID: %s\n' % 'PatientID not found in DCM header.' ) error . warn ( self , 'PatientIDTag' ) try : self . _log ( 'SeriesDescription: %s\n' % self . _dcm . SeriesDescription ) except AttributeError : self . _log ( 'SeriesDescription: %s\n' % 'SeriesDescription not found in DCM header.' ) error . warn ( self , 'SeriesDescriptionTag' ) try : self . _log ( 'ProtocolName: %s\n' % self . _dcm . ProtocolName ) except AttributeError : self . _log ( 'ProtocolName: %s\n' % 'ProtocolName not found in DCM header.' ) error . warn ( self , 'ProtocolNameTag' ) if self . _b_convertMiddleSlice : self . _log ( 'Converting middle slice in DICOM series: %d\n' % self . _sliceToConvert ) l_rot90 = [ True , True , False ] misc . mkdir ( self . _str_outputDir ) if not self . _b_3D : str_outputFile = '%s/%s.%s' % ( self . _str_outputDir , self . _str_outputFileStem , self . _str_outputFileType ) self . process_slice ( ) self . slice_save ( str_outputFile ) if self . _b_3D : rotCount = 0 if self . _b_reslice : for dim in [ 'x' , 'y' , 'z' ] : self . dim_save ( dimension = dim , makeSubDir = True , rot90 = l_rot90 [ rotCount ] , indexStart = 0 , indexStop = - 1 ) rotCount += 1 else : self . dim_save ( dimension = 'z' , makeSubDir = False , rot90 = False , indexStart = 0 , indexStop = - 1 )
Runs the DICOM conversion based on internal state .
18,673
def run ( self ) : self . _log ( 'About to perform NifTI to %s conversion...\n' % self . _str_outputFileType ) frames = 1 frameStart = 0 frameEnd = 0 sliceStart = 0 sliceEnd = 0 if self . _b_4D : self . _log ( '4D volume detected.\n' ) frames = self . _Vnp_4DVol . shape [ 3 ] if self . _b_3D : self . _log ( '3D volume detected.\n' ) if self . _b_convertMiddleFrame : self . _frameToConvert = int ( frames / 2 ) if self . _frameToConvert == - 1 : frameEnd = frames else : frameStart = self . _frameToConvert frameEnd = self . _frameToConvert + 1 for f in range ( frameStart , frameEnd ) : if self . _b_4D : self . _Vnp_3DVol = self . _Vnp_4DVol [ : , : , : , f ] slices = self . _Vnp_3DVol . shape [ 2 ] if self . _b_convertMiddleSlice : self . _sliceToConvert = int ( slices / 2 ) if self . _sliceToConvert == - 1 : sliceEnd = - 1 else : sliceStart = self . _sliceToConvert sliceEnd = self . _sliceToConvert + 1 misc . mkdir ( self . _str_outputDir ) if self . _b_reslice : for dim in [ 'x' , 'y' , 'z' ] : self . dim_save ( dimension = dim , makeSubDir = True , indexStart = sliceStart , indexStop = sliceEnd , rot90 = True ) else : self . dim_save ( dimension = 'z' , makeSubDir = False , indexStart = sliceStart , indexStop = sliceEnd , rot90 = True )
Runs the NIfTI conversion based on internal state .
18,674
def get_logger ( name ) : logger = logging . getLogger ( name ) logger . setLevel ( logging . INFO ) file_handler = logging . FileHandler ( log_path ) file_handler . setLevel ( logging . INFO ) formatter = logging . Formatter ( '%(asctime)s %(name)12s %(levelname)8s %(lineno)s %(message)s' , datefmt = '%m/%d/%Y %I:%M:%S %p' ) file_handler . setFormatter ( formatter ) logger . addHandler ( file_handler ) return logger
Return a logger with a file handler .
18,675
def timeit ( method ) : def wrapper ( * args , ** kwargs ) : start = time . time ( ) result = method ( * args , ** kwargs ) end = time . time ( ) click . echo ( 'Cost {}s' . format ( int ( end - start ) ) ) return result return wrapper
Compute the download time .
18,676
def login ( method ) : def wrapper ( * args , ** kwargs ) : crawler = args [ 0 ] . crawler try : if os . path . isfile ( cookie_path ) : with open ( cookie_path , 'r' ) as cookie_file : cookie = cookie_file . read ( ) expire_time = re . compile ( r'\d{4}-\d{2}-\d{2}' ) . findall ( cookie ) now = time . strftime ( '%Y-%m-%d' , time . localtime ( time . time ( ) ) ) if expire_time [ 0 ] > now : crawler . session . cookies . load ( ) else : crawler . login ( ) else : crawler . login ( ) except RequestException : click . echo ( 'Maybe password error, please try again.' ) sys . exit ( 1 ) result = method ( * args , ** kwargs ) return result return wrapper
Require user to login .
18,677
def download_song_by_search ( self , song_name ) : try : song = self . crawler . search_song ( song_name , self . quiet ) except RequestException as exception : click . echo ( exception ) else : self . download_song_by_id ( song . song_id , song . song_name , self . folder )
Download a song by its name .
18,678
def download_song_by_id ( self , song_id , song_name , folder = '.' ) : try : url = self . crawler . get_song_url ( song_id ) if self . lyric : lyric_info = self . crawler . get_song_lyric ( song_id ) else : lyric_info = None song_name = song_name . replace ( '/' , '' ) song_name = song_name . replace ( '.' , '' ) self . crawler . get_song_by_url ( url , song_name , folder , lyric_info ) except RequestException as exception : click . echo ( exception )
Download a song by id and save it to disk .
18,679
def download_playlist_by_search ( self , playlist_name ) : try : playlist = self . crawler . search_playlist ( playlist_name , self . quiet ) except RequestException as exception : click . echo ( exception ) else : self . download_playlist_by_id ( playlist . playlist_id , playlist . playlist_name )
Download a playlist s songs by its name .
18,680
def download_playlist_by_id ( self , playlist_id , playlist_name ) : try : songs = self . crawler . get_playlist_songs ( playlist_id ) except RequestException as exception : click . echo ( exception ) else : folder = os . path . join ( self . folder , playlist_name ) for song in songs : self . download_song_by_id ( song . song_id , song . song_name , folder )
Download a playlist s songs by its id .
18,681
def download_person_playlists ( self ) : with open ( person_info_path , 'r' ) as person_info : user_id = int ( person_info . read ( ) ) self . download_user_playlists_by_id ( user_id )
Download person playlist including private playlist .
18,682
def signal_handler ( sign , frame ) : LOG . info ( '%s => %s' , sign , frame ) click . echo ( 'Bye' ) sys . exit ( 0 )
Capture Ctrl + C .
18,683
def cli ( ctx , timeout , proxy , output , quiet , lyric , again ) : ctx . obj = NetEase ( timeout , proxy , output , quiet , lyric , again )
A command tool to download NetEase - Music s songs .
18,684
def song ( netease , name , id ) : if name : netease . download_song_by_search ( name ) if id : netease . download_song_by_id ( id , 'song' + str ( id ) )
Download a song by name or id .
18,685
def album ( netease , name , id ) : if name : netease . download_album_by_search ( name ) if id : netease . download_album_by_id ( id , 'album' + str ( id ) )
Download a album s songs by name or id .
18,686
def artist ( netease , name , id ) : if name : netease . download_artist_by_search ( name ) if id : netease . download_artist_by_id ( id , 'artist' + str ( id ) )
Download a artist s hot songs by name or id .
18,687
def playlist ( netease , name , id ) : if name : netease . download_playlist_by_search ( name ) if id : netease . download_playlist_by_id ( id , 'playlist' + str ( id ) )
Download a playlist s songs by id .
18,688
def user ( netease , name , id ) : if name : netease . download_user_playlists_by_search ( name ) if id : netease . download_user_playlists_by_id ( id )
Download a user \ s playlists by id .
18,689
def select_one_song ( songs ) : if len ( songs ) == 1 : select_i = 0 else : table = PrettyTable ( [ 'Sequence' , 'Song Name' , 'Artist Name' ] ) for i , song in enumerate ( songs , 1 ) : table . add_row ( [ i , song [ 'name' ] , song [ 'ar' ] [ 0 ] [ 'name' ] ] ) click . echo ( table ) select_i = click . prompt ( 'Select one song' , type = int , default = 1 ) while select_i < 1 or select_i > len ( songs ) : select_i = click . prompt ( 'Error Select! Select Again' , type = int ) song_id , song_name = songs [ select_i - 1 ] [ 'id' ] , songs [ select_i - 1 ] [ 'name' ] song = Song ( song_id , song_name ) return song
Display the songs returned by search api .
18,690
def select_one_album ( albums ) : if len ( albums ) == 1 : select_i = 0 else : table = PrettyTable ( [ 'Sequence' , 'Album Name' , 'Artist Name' ] ) for i , album in enumerate ( albums , 1 ) : table . add_row ( [ i , album [ 'name' ] , album [ 'artist' ] [ 'name' ] ] ) click . echo ( table ) select_i = click . prompt ( 'Select one album' , type = int , default = 1 ) while select_i < 1 or select_i > len ( albums ) : select_i = click . prompt ( 'Error Select! Select Again' , type = int ) album_id = albums [ select_i - 1 ] [ 'id' ] album_name = albums [ select_i - 1 ] [ 'name' ] album = Album ( album_id , album_name ) return album
Display the albums returned by search api .
18,691
def select_one_artist ( artists ) : if len ( artists ) == 1 : select_i = 0 else : table = PrettyTable ( [ 'Sequence' , 'Artist Name' ] ) for i , artist in enumerate ( artists , 1 ) : table . add_row ( [ i , artist [ 'name' ] ] ) click . echo ( table ) select_i = click . prompt ( 'Select one artist' , type = int , default = 1 ) while select_i < 1 or select_i > len ( artists ) : select_i = click . prompt ( 'Error Select! Select Again' , type = int ) artist_id = artists [ select_i - 1 ] [ 'id' ] artist_name = artists [ select_i - 1 ] [ 'name' ] artist = Artist ( artist_id , artist_name ) return artist
Display the artists returned by search api .
18,692
def select_one_playlist ( playlists ) : if len ( playlists ) == 1 : select_i = 0 else : table = PrettyTable ( [ 'Sequence' , 'Name' ] ) for i , playlist in enumerate ( playlists , 1 ) : table . add_row ( [ i , playlist [ 'name' ] ] ) click . echo ( table ) select_i = click . prompt ( 'Select one playlist' , type = int , default = 1 ) while select_i < 1 or select_i > len ( playlists ) : select_i = click . prompt ( 'Error Select! Select Again' , type = int ) playlist_id = playlists [ select_i - 1 ] [ 'id' ] playlist_name = playlists [ select_i - 1 ] [ 'name' ] playlist = Playlist ( playlist_id , playlist_name ) return playlist
Display the playlists returned by search api or user playlist .
18,693
def select_one_user ( users ) : if len ( users ) == 1 : select_i = 0 else : table = PrettyTable ( [ 'Sequence' , 'Name' ] ) for i , user in enumerate ( users , 1 ) : table . add_row ( [ i , user [ 'nickname' ] ] ) click . echo ( table ) select_i = click . prompt ( 'Select one user' , type = int , default = 1 ) while select_i < 1 or select_i > len ( users ) : select_i = click . prompt ( 'Error Select! Select Again' , type = int ) user_id = users [ select_i - 1 ] [ 'userId' ] user_name = users [ select_i - 1 ] [ 'nickname' ] user = User ( user_id , user_name ) return user
Display the users returned by search api .
18,694
def exception_handle ( method ) : def wrapper ( * args , ** kwargs ) : try : result = method ( * args , ** kwargs ) return result except ProxyError : LOG . exception ( 'ProxyError when try to get %s.' , args ) raise ProxyError ( 'A proxy error occurred.' ) except ConnectionException : LOG . exception ( 'ConnectionError when try to get %s.' , args ) raise ConnectionException ( 'DNS failure, refused connection, etc.' ) except Timeout : LOG . exception ( 'Timeout when try to get %s' , args ) raise Timeout ( 'The request timed out.' ) except RequestException : LOG . exception ( 'RequestException when try to get %s.' , args ) raise RequestException ( 'Please check out your network.' ) return wrapper
Handle exception raised by requests library .
18,695
def get_request ( self , url ) : resp = self . session . get ( url , timeout = self . timeout , proxies = self . proxies ) result = resp . json ( ) if result [ 'code' ] != 200 : LOG . error ( 'Return %s when try to get %s' , result , url ) raise GetRequestIllegal ( result ) else : return result
Send a get request .
18,696
def post_request ( self , url , params ) : data = encrypted_request ( params ) resp = self . session . post ( url , data = data , timeout = self . timeout , proxies = self . proxies ) result = resp . json ( ) if result [ 'code' ] != 200 : LOG . error ( 'Return %s when try to post %s => %s' , result , url , params ) raise PostRequestIllegal ( result ) else : return result
Send a post request .
18,697
def search ( self , search_content , search_type , limit = 9 ) : url = 'http://music.163.com/weapi/cloudsearch/get/web?csrf_token=' params = { 's' : search_content , 'type' : search_type , 'offset' : 0 , 'sub' : 'false' , 'limit' : limit } result = self . post_request ( url , params ) return result
Search entrance .
18,698
def search_song ( self , song_name , quiet = False , limit = 9 ) : result = self . search ( song_name , search_type = 1 , limit = limit ) if result [ 'result' ] [ 'songCount' ] <= 0 : LOG . warning ( 'Song %s not existed!' , song_name ) raise SearchNotFound ( 'Song {} not existed.' . format ( song_name ) ) else : songs = result [ 'result' ] [ 'songs' ] if quiet : song_id , song_name = songs [ 0 ] [ 'id' ] , songs [ 0 ] [ 'name' ] song = Song ( song_id , song_name ) return song else : return self . display . select_one_song ( songs )
Search song by song name .
18,699
def search_album ( self , album_name , quiet = False , limit = 9 ) : result = self . search ( album_name , search_type = 10 , limit = limit ) if result [ 'result' ] [ 'albumCount' ] <= 0 : LOG . warning ( 'Album %s not existed!' , album_name ) raise SearchNotFound ( 'Album {} not existed' . format ( album_name ) ) else : albums = result [ 'result' ] [ 'albums' ] if quiet : album_id , album_name = albums [ 0 ] [ 'id' ] , albums [ 0 ] [ 'name' ] album = Album ( album_id , album_name ) return album else : return self . display . select_one_album ( albums )
Search album by album name .