idx int64 0 63k | question stringlengths 61 4.03k | target stringlengths 6 1.23k |
|---|---|---|
16,300 | def _install_nukeassist ( use_threaded_wrapper ) : import nuke if "--nukeassist" not in nuke . rawArgs : raise ImportError def threaded_wrapper ( func , * args , ** kwargs ) : return nuke . executeInMainThreadWithResult ( func , args , kwargs ) _common_setup ( "NukeAssist" , threaded_wrapper , use_threaded_wrapper ) | Helper function to The Foundry NukeAssist support |
16,301 | def _install_hiero ( use_threaded_wrapper ) : import hiero import nuke if "--hiero" not in nuke . rawArgs : raise ImportError def threaded_wrapper ( func , * args , ** kwargs ) : return hiero . core . executeInMainThreadWithResult ( func , args , kwargs ) _common_setup ( "Hiero" , threaded_wrapper , use_threaded_wrapper ) | Helper function to The Foundry Hiero support |
16,302 | def _install_blender ( use_threaded_wrapper ) : import bpy qml_to_blender = queue . Queue ( ) blender_to_qml = queue . Queue ( ) def threaded_wrapper ( func , * args , ** kwargs ) : qml_to_blender . put ( ( func , args , kwargs ) ) return blender_to_qml . get ( ) class PyblishQMLOperator ( bpy . types . Operator ) : bl_idname = "wm.pyblish_qml_timer" bl_label = "Pyblish QML Timer Operator" _timer = None def modal ( self , context , event ) : if event . type == 'TIMER' : try : func , args , kwargs = qml_to_blender . get_nowait ( ) except queue . Empty : pass else : result = func ( * args , ** kwargs ) blender_to_qml . put ( result ) return { 'PASS_THROUGH' } def execute ( self , context ) : wm = context . window_manager self . _timer = wm . event_timer_add ( 0.01 , context . window ) wm . modal_handler_add ( self ) return { 'RUNNING_MODAL' } def cancel ( self , context ) : wm = context . window_manager wm . event_timer_remove ( self . _timer ) log . info ( "Registering Blender + Pyblish operator" ) bpy . utils . register_class ( PyblishQMLOperator ) bpy . ops . wm . pyblish_qml_timer ( ) _state [ "QmlToBlenderQueue" ] = qml_to_blender _state [ "BlenderToQmlQueue" ] = blender_to_qml _common_setup ( "Blender" , threaded_wrapper , use_threaded_wrapper ) | Blender is a special snowflake |
16,303 | def install ( self , host ) : print ( "Installing.." ) if self . _state [ "installed" ] : return if self . is_headless ( ) : log . info ( "Headless host" ) return print ( "aboutToQuit.." ) self . app . aboutToQuit . connect ( self . _on_application_quit ) if host == "Maya" : print ( "Maya host.." ) window = { widget . objectName ( ) : widget for widget in self . app . topLevelWidgets ( ) } [ "MayaWindow" ] else : window = self . find_window ( ) print ( "event filter.." ) event_filter = self . EventFilter ( window ) window . installEventFilter ( event_filter ) for signal in SIGNALS_TO_REMOVE_EVENT_FILTER : pyblish . api . register_callback ( signal , self . uninstall ) log . info ( "Installed event filter" ) self . window = window self . _state [ "installed" ] = True self . _state [ "eventFilter" ] = event_filter | Setup common to all Qt - based hosts |
16,304 | def find_window ( self ) : window = self . app . activeWindow ( ) while True : parent_window = window . parent ( ) if parent_window : window = parent_window else : break return window | Get top window in host |
16,305 | def tab_should_insert_whitespace ( ) : b = get_app ( ) . current_buffer before_cursor = b . document . current_line_before_cursor return bool ( b . text and ( not before_cursor or before_cursor . isspace ( ) ) ) | When the tab key is pressed with only whitespace character before the cursor do autocompletion . Otherwise insert indentation . |
16,306 | def load_sidebar_bindings ( python_input ) : bindings = KeyBindings ( ) handle = bindings . add sidebar_visible = Condition ( lambda : python_input . show_sidebar ) @ handle ( 'up' , filter = sidebar_visible ) @ handle ( 'c-p' , filter = sidebar_visible ) @ handle ( 'k' , filter = sidebar_visible ) def _ ( event ) : " Go to previous option. " python_input . selected_option_index = ( ( python_input . selected_option_index - 1 ) % python_input . option_count ) @ handle ( 'down' , filter = sidebar_visible ) @ handle ( 'c-n' , filter = sidebar_visible ) @ handle ( 'j' , filter = sidebar_visible ) def _ ( event ) : " Go to next option. " python_input . selected_option_index = ( ( python_input . selected_option_index + 1 ) % python_input . option_count ) @ handle ( 'right' , filter = sidebar_visible ) @ handle ( 'l' , filter = sidebar_visible ) @ handle ( ' ' , filter = sidebar_visible ) def _ ( event ) : " Select next value for current option. " option = python_input . selected_option option . activate_next ( ) @ handle ( 'left' , filter = sidebar_visible ) @ handle ( 'h' , filter = sidebar_visible ) def _ ( event ) : " Select previous value for current option. " option = python_input . selected_option option . activate_previous ( ) @ handle ( 'c-c' , filter = sidebar_visible ) @ handle ( 'c-d' , filter = sidebar_visible ) @ handle ( 'c-d' , filter = sidebar_visible ) @ handle ( 'enter' , filter = sidebar_visible ) @ handle ( 'escape' , filter = sidebar_visible ) def _ ( event ) : " Hide sidebar. " python_input . show_sidebar = False event . app . layout . focus_last ( ) return bindings | Load bindings for the navigation in the sidebar . |
16,307 | def auto_newline ( buffer ) : r insert_text = buffer . insert_text if buffer . document . current_line_after_cursor : insert_text ( '\n' ) else : current_line = buffer . document . current_line_before_cursor . rstrip ( ) insert_text ( '\n' ) unindent = current_line . rstrip ( ) . endswith ( ' pass' ) current_line2 = current_line [ 4 : ] if unindent else current_line for c in current_line2 : if c . isspace ( ) : insert_text ( c ) else : break if current_line [ - 1 : ] == ':' : for x in range ( 4 ) : insert_text ( ' ' ) | r Insert \ n at the cursor position . Also add necessary padding . |
16,308 | def _path_completer_grammar ( self ) : if self . _path_completer_grammar_cache is None : self . _path_completer_grammar_cache = self . _create_path_completer_grammar ( ) return self . _path_completer_grammar_cache | Return the grammar for matching paths inside strings inside Python code . |
16,309 | def get_completions ( self , document , complete_event ) : if complete_event . completion_requested or self . _complete_path_while_typing ( document ) : for c in self . _path_completer . get_completions ( document , complete_event ) : yield c if self . _path_completer_grammar . match ( document . text_before_cursor ) : return if complete_event . completion_requested or self . _complete_python_while_typing ( document ) : script = get_jedi_script_from_document ( document , self . get_locals ( ) , self . get_globals ( ) ) if script : try : completions = script . completions ( ) except TypeError : pass except UnicodeDecodeError : pass except AttributeError : pass except ValueError : pass except KeyError : pass except IOError : pass except AssertionError : pass except SystemError : pass except NotImplementedError : pass except Exception : pass else : for c in completions : yield Completion ( c . name_with_symbols , len ( c . complete ) - len ( c . name_with_symbols ) , display = c . name_with_symbols ) | Get Python completions . |
16,310 | def _inputhook_tk ( inputhook_context ) : import _tkinter from six . moves import tkinter root = tkinter . _default_root def wait_using_filehandler ( ) : stop = [ False ] def done ( * a ) : stop [ 0 ] = True root . createfilehandler ( inputhook_context . fileno ( ) , _tkinter . READABLE , done ) while root . dooneevent ( _tkinter . ALL_EVENTS ) : if stop [ 0 ] : break root . deletefilehandler ( inputhook_context . fileno ( ) ) def wait_using_polling ( ) : while not inputhook_context . input_is_ready ( ) : while root . dooneevent ( _tkinter . ALL_EVENTS | _tkinter . DONT_WAIT ) : pass time . sleep ( .01 ) if root is not None : if hasattr ( root , 'createfilehandler' ) : wait_using_filehandler ( ) else : wait_using_polling ( ) | Inputhook for Tk . Run the Tk eventloop until prompt - toolkit needs to process the next input . |
16,311 | def run_config ( repl , config_file = '~/.ptpython/config.py' ) : assert isinstance ( repl , PythonInput ) assert isinstance ( config_file , six . text_type ) config_file = os . path . expanduser ( config_file ) def enter_to_continue ( ) : six . moves . input ( '\nPress ENTER to continue...' ) if not os . path . exists ( config_file ) : print ( 'Impossible to read %r' % config_file ) enter_to_continue ( ) return try : namespace = { } with open ( config_file , 'rb' ) as f : code = compile ( f . read ( ) , config_file , 'exec' ) six . exec_ ( code , namespace , namespace ) if 'configure' in namespace : namespace [ 'configure' ] ( repl ) except Exception : traceback . print_exc ( ) enter_to_continue ( ) | Execute REPL config file . |
16,312 | def _load_start_paths ( self ) : " Start the Read-Eval-Print Loop. " if self . _startup_paths : for path in self . _startup_paths : if os . path . exists ( path ) : with open ( path , 'rb' ) as f : code = compile ( f . read ( ) , path , 'exec' ) six . exec_ ( code , self . get_globals ( ) , self . get_locals ( ) ) else : output = self . app . output output . write ( 'WARNING | File not found: {}\n\n' . format ( path ) ) | Start the Read - Eval - Print Loop . |
16,313 | def _execute ( self , line ) : output = self . app . output if '' not in sys . path : sys . path . insert ( 0 , '' ) def compile_with_flags ( code , mode ) : " Compile code with the right compiler flags. " return compile ( code , '<stdin>' , mode , flags = self . get_compiler_flags ( ) , dont_inherit = True ) if line . lstrip ( ) . startswith ( '\x1a' ) : self . app . exit ( ) elif line . lstrip ( ) . startswith ( '!' ) : os . system ( line [ 1 : ] ) else : try : code = compile_with_flags ( line , 'eval' ) result = eval ( code , self . get_globals ( ) , self . get_locals ( ) ) locals = self . get_locals ( ) locals [ '_' ] = locals [ '_%i' % self . current_statement_index ] = result if result is not None : out_prompt = self . get_output_prompt ( ) try : result_str = '%r\n' % ( result , ) except UnicodeDecodeError : result_str = '%s\n' % repr ( result ) . decode ( 'utf-8' ) line_sep = '\n' + ' ' * fragment_list_width ( out_prompt ) result_str = line_sep . join ( result_str . splitlines ( ) ) + '\n' if self . enable_syntax_highlighting : formatted_output = merge_formatted_text ( [ out_prompt , PygmentsTokens ( list ( _lex_python_result ( result_str ) ) ) , ] ) else : formatted_output = FormattedText ( out_prompt + [ ( '' , result_str ) ] ) print_formatted_text ( formatted_output , style = self . _current_style , style_transformation = self . style_transformation , include_default_pygments_style = False ) except SyntaxError : code = compile_with_flags ( line , 'exec' ) six . exec_ ( code , self . get_globals ( ) , self . get_locals ( ) ) output . flush ( ) | Evaluate the line and print the result . |
16,314 | def interactive_shell ( ) : print ( 'You should be able to read and update the "counter[0]" variable from this shell.' ) try : yield from embed ( globals = globals ( ) , return_asyncio_coroutine = True , patch_stdout = True ) except EOFError : loop . stop ( ) | Coroutine that starts a Python REPL from which we can access the global counter variable . |
16,315 | def validate ( self , document ) : if document . text . startswith ( '\x1a' ) : return try : if self . get_compiler_flags : flags = self . get_compiler_flags ( ) else : flags = 0 compile ( document . text , '<input>' , 'exec' , flags = flags , dont_inherit = True ) except SyntaxError as e : index = document . translate_row_col_to_index ( e . lineno - 1 , ( e . offset or 1 ) - 1 ) raise ValidationError ( index , 'Syntax Error' ) except TypeError as e : raise ValidationError ( 0 , str ( e ) ) except ValueError as e : raise ValidationError ( 0 , 'Syntax Error: %s' % e ) | Check input for Python syntax errors . |
16,316 | def main ( port = 8222 ) : loop = asyncio . get_event_loop ( ) environ = { 'hello' : 'world' } def create_server ( ) : return MySSHServer ( lambda : environ ) print ( 'Listening on :%i' % port ) print ( 'To connect, do "ssh localhost -p %i"' % port ) loop . run_until_complete ( asyncssh . create_server ( create_server , '' , port , server_host_keys = [ '/etc/ssh/ssh_host_dsa_key' ] ) ) loop . run_forever ( ) | Example that starts the REPL through an SSH server . |
16,317 | def _get_size ( self ) : if self . _chan is None : return Size ( rows = 20 , columns = 79 ) else : width , height , pixwidth , pixheight = self . _chan . get_terminal_size ( ) return Size ( rows = height , columns = width ) | Callable that returns the current Size required by Vt100_Output . |
16,318 | def connection_made ( self , chan ) : self . _chan = chan f = asyncio . ensure_future ( self . cli . run_async ( ) ) def done ( _ ) : chan . close ( ) self . _chan = None f . add_done_callback ( done ) | Client connected run repl in coroutine . |
16,319 | def configure ( repl ) : repl . show_signature = True repl . show_docstring = False repl . show_meta_enter_message = True repl . completion_visualisation = CompletionVisualisation . POP_UP repl . completion_menu_scroll_offset = 0 repl . show_line_numbers = False repl . show_status_bar = True repl . show_sidebar_help = True repl . highlight_matching_parenthesis = True repl . wrap_lines = True repl . enable_mouse_support = True repl . complete_while_typing = True repl . vi_mode = False repl . paste_mode = False repl . prompt_style = 'classic' repl . insert_blank_line_after_output = False repl . enable_history_search = False repl . enable_auto_suggest = False repl . enable_open_in_editor = True repl . enable_system_bindings = True repl . confirm_exit = True repl . enable_input_validation = True repl . use_code_colorscheme ( 'pastie' ) repl . color_depth = 'DEPTH_8_BIT' repl . enable_syntax_highlighting = True | Configuration method . This is called during the start - up of ptpython . |
16,320 | def has_unclosed_brackets ( text ) : stack = [ ] text = re . sub ( r , '' , text ) for c in reversed ( text ) : if c in '])}' : stack . append ( c ) elif c in '[({' : if stack : if ( ( c == '[' and stack [ - 1 ] == ']' ) or ( c == '{' and stack [ - 1 ] == '}' ) or ( c == '(' and stack [ - 1 ] == ')' ) ) : stack . pop ( ) else : return True return False | Starting at the end of the string . If we find an opening bracket for which we didn t had a closing one yet return True . |
16,321 | def document_is_multiline_python ( document ) : def ends_in_multiline_string ( ) : delims = _multiline_string_delims . findall ( document . text ) opening = None for delim in delims : if opening is None : opening = delim elif delim == opening : opening = None return bool ( opening ) if '\n' in document . text or ends_in_multiline_string ( ) : return True def line_ends_with_colon ( ) : return document . current_line . rstrip ( ) [ - 1 : ] == ':' if line_ends_with_colon ( ) or ( document . is_cursor_at_the_end and has_unclosed_brackets ( document . text_before_cursor ) ) or document . text . startswith ( '@' ) : return True elif document . text_before_cursor [ - 1 : ] == '\\' : return True return False | Determine whether this is a multiline Python document . |
16,322 | def if_mousedown ( handler ) : def handle_if_mouse_down ( mouse_event ) : if mouse_event . event_type == MouseEventType . MOUSE_DOWN : return handler ( mouse_event ) else : return NotImplemented return handle_if_mouse_down | Decorator for mouse handlers . Only handle event when the user pressed mouse down . |
16,323 | def _create_popup_window ( title , body ) : assert isinstance ( title , six . text_type ) assert isinstance ( body , Container ) return Frame ( body = body , title = title ) | Return the layout for a pop - up window . It consists of a title bar showing the title text and a body layout . The window is surrounded by borders . |
16,324 | def get_new_document ( self , cursor_pos = None ) : lines = [ ] if self . original_document . text_before_cursor : lines . append ( self . original_document . text_before_cursor ) for line_no in sorted ( self . selected_lines ) : lines . append ( self . history_lines [ line_no ] ) if self . original_document . text_after_cursor : lines . append ( self . original_document . text_after_cursor ) text = '\n' . join ( lines ) if cursor_pos is not None and cursor_pos > len ( text ) : cursor_pos = len ( text ) return Document ( text , cursor_pos ) | Create a Document instance that contains the resulting text . |
16,325 | def _default_buffer_pos_changed ( self , _ ) : if self . app . current_buffer == self . default_buffer : try : line_no = self . default_buffer . document . cursor_position_row - self . history_mapping . result_line_offset if line_no < 0 : raise IndexError history_lineno = sorted ( self . history_mapping . selected_lines ) [ line_no ] except IndexError : pass else : self . history_buffer . cursor_position = self . history_buffer . document . translate_row_col_to_index ( history_lineno , 0 ) | When the cursor changes in the default buffer . Synchronize with history buffer . |
16,326 | def _history_buffer_pos_changed ( self , _ ) : if self . app . current_buffer == self . history_buffer : line_no = self . history_buffer . document . cursor_position_row if line_no in self . history_mapping . selected_lines : default_lineno = sorted ( self . history_mapping . selected_lines ) . index ( line_no ) + self . history_mapping . result_line_offset self . default_buffer . cursor_position = self . default_buffer . document . translate_row_col_to_index ( default_lineno , 0 ) | When the cursor changes in the history buffer . Synchronize . |
16,327 | def python_sidebar ( python_input ) : def get_text_fragments ( ) : tokens = [ ] def append_category ( category ) : tokens . extend ( [ ( 'class:sidebar' , ' ' ) , ( 'class:sidebar.title' , ' %-36s' % category . title ) , ( 'class:sidebar' , '\n' ) , ] ) def append ( index , label , status ) : selected = index == python_input . selected_option_index @ if_mousedown def select_item ( mouse_event ) : python_input . selected_option_index = index @ if_mousedown def goto_next ( mouse_event ) : " Select item and go to next value. " python_input . selected_option_index = index option = python_input . selected_option option . activate_next ( ) sel = ',selected' if selected else '' tokens . append ( ( 'class:sidebar' + sel , ' >' if selected else ' ' ) ) tokens . append ( ( 'class:sidebar.label' + sel , '%-24s' % label , select_item ) ) tokens . append ( ( 'class:sidebar.status' + sel , ' ' , select_item ) ) tokens . append ( ( 'class:sidebar.status' + sel , '%s' % status , goto_next ) ) if selected : tokens . append ( ( '[SetCursorPosition]' , '' ) ) tokens . append ( ( 'class:sidebar.status' + sel , ' ' * ( 13 - len ( status ) ) , goto_next ) ) tokens . append ( ( 'class:sidebar' , '<' if selected else '' ) ) tokens . append ( ( 'class:sidebar' , '\n' ) ) i = 0 for category in python_input . options : append_category ( category ) for option in category . options : append ( i , option . title , '%s' % option . get_current_value ( ) ) i += 1 tokens . pop ( ) return tokens class Control ( FormattedTextControl ) : def move_cursor_down ( self ) : python_input . selected_option_index += 1 def move_cursor_up ( self ) : python_input . selected_option_index -= 1 return Window ( Control ( get_text_fragments ) , style = 'class:sidebar' , width = Dimension . exact ( 43 ) , height = Dimension ( min = 3 ) , scroll_offsets = ScrollOffsets ( top = 1 , bottom = 1 ) ) | Create the Layout for the sidebar with the configurable options . |
16,328 | def python_sidebar_navigation ( python_input ) : def get_text_fragments ( ) : tokens = [ ] tokens . extend ( [ ( 'class:sidebar' , ' ' ) , ( 'class:sidebar.key' , '[Arrows]' ) , ( 'class:sidebar' , ' ' ) , ( 'class:sidebar.description' , 'Navigate' ) , ( 'class:sidebar' , ' ' ) , ( 'class:sidebar.key' , '[Enter]' ) , ( 'class:sidebar' , ' ' ) , ( 'class:sidebar.description' , 'Hide menu' ) , ] ) return tokens return Window ( FormattedTextControl ( get_text_fragments ) , style = 'class:sidebar' , width = Dimension . exact ( 43 ) , height = Dimension . exact ( 1 ) ) | Create the Layout showing the navigation information for the sidebar . |
16,329 | def python_sidebar_help ( python_input ) : token = 'class:sidebar.helptext' def get_current_description ( ) : i = 0 for category in python_input . options : for option in category . options : if i == python_input . selected_option_index : return option . description i += 1 return '' def get_help_text ( ) : return [ ( token , get_current_description ( ) ) ] return ConditionalContainer ( content = Window ( FormattedTextControl ( get_help_text ) , style = token , height = Dimension ( min = 3 ) ) , filter = ShowSidebar ( python_input ) & Condition ( lambda : python_input . show_sidebar_help ) & ~ is_done ) | Create the Layout for the help text for the current item in the sidebar . |
16,330 | def signature_toolbar ( python_input ) : def get_text_fragments ( ) : result = [ ] append = result . append Signature = 'class:signature-toolbar' if python_input . signatures : sig = python_input . signatures [ 0 ] append ( ( Signature , ' ' ) ) try : append ( ( Signature , sig . full_name ) ) except IndexError : return [ ] append ( ( Signature + ',operator' , '(' ) ) try : enumerated_params = enumerate ( sig . params ) except AttributeError : return [ ] for i , p in enumerated_params : description = ( p . description if p else '*' ) sig_index = getattr ( sig , 'index' , 0 ) if i == sig_index : append ( ( Signature + ',current-name' , str ( description ) ) ) else : append ( ( Signature , str ( description ) ) ) append ( ( Signature + ',operator' , ', ' ) ) if sig . params : result . pop ( ) append ( ( Signature + ',operator' , ')' ) ) append ( ( Signature , ' ' ) ) return result return ConditionalContainer ( content = Window ( FormattedTextControl ( get_text_fragments ) , height = Dimension . exact ( 1 ) ) , filter = HasSignature ( python_input ) & ~ ( has_completions & ( show_completions_menu ( python_input ) | show_multi_column_completions_menu ( python_input ) ) ) & ShowSignature ( python_input ) & ~ is_done ) | Return the Layout for the signature . |
16,331 | def status_bar ( python_input ) : TB = 'class:status-toolbar' @ if_mousedown def toggle_paste_mode ( mouse_event ) : python_input . paste_mode = not python_input . paste_mode @ if_mousedown def enter_history ( mouse_event ) : python_input . enter_history ( ) def get_text_fragments ( ) : python_buffer = python_input . default_buffer result = [ ] append = result . append append ( ( TB , ' ' ) ) result . extend ( get_inputmode_fragments ( python_input ) ) append ( ( TB , ' ' ) ) append ( ( TB , '%i/%i ' % ( python_buffer . working_index + 1 , len ( python_buffer . _working_lines ) ) ) ) app = get_app ( ) if not python_input . vi_mode and app . current_buffer == python_input . search_buffer : append ( ( TB , '[Ctrl-G] Cancel search [Enter] Go to this position.' ) ) elif bool ( app . current_buffer . selection_state ) and not python_input . vi_mode : append ( ( TB , '[Ctrl-W] Cut [Meta-W] Copy [Ctrl-Y] Paste [Ctrl-G] Cancel' ) ) else : result . extend ( [ ( TB + ' class:key' , '[F3]' , enter_history ) , ( TB , ' History ' , enter_history ) , ( TB + ' class:key' , '[F6]' , toggle_paste_mode ) , ( TB , ' ' , toggle_paste_mode ) , ] ) if python_input . paste_mode : append ( ( TB + ' class:paste-mode-on' , 'Paste mode (on)' , toggle_paste_mode ) ) else : append ( ( TB , 'Paste mode' , toggle_paste_mode ) ) return result return ConditionalContainer ( content = Window ( content = FormattedTextControl ( get_text_fragments ) , style = TB ) , filter = ~ is_done & renderer_height_is_known & Condition ( lambda : python_input . show_status_bar and not python_input . show_exit_confirmation ) ) | Create the Layout for the status bar . |
16,332 | def exit_confirmation ( python_input , style = 'class:exit-confirmation' ) : def get_text_fragments ( ) : return [ ( style , '\n %s ([y]/n)' % python_input . exit_message ) , ( '[SetCursorPosition]' , '' ) , ( style , ' \n' ) , ] visible = ~ is_done & Condition ( lambda : python_input . show_exit_confirmation ) return ConditionalContainer ( content = Window ( FormattedTextControl ( get_text_fragments ) , style = style ) , filter = visible ) | Create Layout for the exit message . |
16,333 | def meta_enter_message ( python_input ) : def get_text_fragments ( ) : return [ ( 'class:accept-message' , ' [Meta+Enter] Execute ' ) ] def extra_condition ( ) : " Only show when... " b = python_input . default_buffer return ( python_input . show_meta_enter_message and ( not b . document . is_cursor_at_the_end or python_input . accept_input_on_enter is None ) and '\n' in b . text ) visible = ~ is_done & has_focus ( DEFAULT_BUFFER ) & Condition ( extra_condition ) return ConditionalContainer ( content = Window ( FormattedTextControl ( get_text_fragments ) ) , filter = visible ) | Create the Layout for the Meta + Enter message . |
16,334 | def activate_next ( self , _previous = False ) : current = self . get_current_value ( ) options = sorted ( self . values . keys ( ) ) try : index = options . index ( current ) except ValueError : index = 0 if _previous : index -= 1 else : index += 1 next_option = options [ index % len ( options ) ] self . values [ next_option ] ( ) | Activate next value . |
16,335 | def selected_option ( self ) : " Return the currently selected option. " i = 0 for category in self . options : for o in category . options : if i == self . selected_option_index : return o else : i += 1 | Return the currently selected option . |
16,336 | def get_compiler_flags ( self ) : flags = 0 for value in self . get_globals ( ) . values ( ) : if isinstance ( value , __future__ . _Feature ) : flags |= value . compiler_flag return flags | Give the current compiler flags by looking for _Feature instances in the globals . |
16,337 | def install_code_colorscheme ( self , name , style_dict ) : assert isinstance ( name , six . text_type ) assert isinstance ( style_dict , dict ) self . code_styles [ name ] = style_dict | Install a new code color scheme . |
16,338 | def install_ui_colorscheme ( self , name , style_dict ) : assert isinstance ( name , six . text_type ) assert isinstance ( style_dict , dict ) self . ui_styles [ name ] = style_dict | Install a new UI color scheme . |
16,339 | def _create_application ( self ) : return Application ( input = self . input , output = self . output , layout = self . ptpython_layout . layout , key_bindings = merge_key_bindings ( [ load_python_bindings ( self ) , load_auto_suggest_bindings ( ) , load_sidebar_bindings ( self ) , load_confirm_exit_bindings ( self ) , ConditionalKeyBindings ( load_open_in_editor_bindings ( ) , Condition ( lambda : self . enable_open_in_editor ) ) , ConditionalKeyBindings ( self . extra_key_bindings , Condition ( lambda : not self . show_sidebar ) ) ] ) , color_depth = lambda : self . color_depth , paste_mode = Condition ( lambda : self . paste_mode ) , mouse_support = Condition ( lambda : self . enable_mouse_support ) , style = DynamicStyle ( lambda : self . _current_style ) , style_transformation = self . style_transformation , include_default_pygments_style = False , reverse_vi_search_direction = True ) | Create an Application instance . |
16,340 | def _create_buffer ( self ) : python_buffer = Buffer ( name = DEFAULT_BUFFER , complete_while_typing = Condition ( lambda : self . complete_while_typing ) , enable_history_search = Condition ( lambda : self . enable_history_search ) , tempfile_suffix = '.py' , history = self . history , completer = ThreadedCompleter ( self . _completer ) , validator = ConditionalValidator ( self . _validator , Condition ( lambda : self . enable_input_validation ) ) , auto_suggest = ConditionalAutoSuggest ( ThreadedAutoSuggest ( AutoSuggestFromHistory ( ) ) , Condition ( lambda : self . enable_auto_suggest ) ) , accept_handler = self . _accept_handler , on_text_changed = self . _on_input_timeout ) return python_buffer | Create the Buffer for the Python input . |
16,341 | def _on_input_timeout ( self , buff ) : assert isinstance ( buff , Buffer ) app = self . app if self . _get_signatures_thread_running : return self . _get_signatures_thread_running = True document = buff . document def run ( ) : script = get_jedi_script_from_document ( document , self . get_locals ( ) , self . get_globals ( ) ) if script : try : signatures = script . call_signatures ( ) except ValueError : signatures = [ ] except Exception : signatures = [ ] else : try : if signatures : signatures [ 0 ] . params except AttributeError : pass else : signatures = [ ] self . _get_signatures_thread_running = False if buff . text == document . text : self . signatures = signatures if signatures : string = signatures [ 0 ] . docstring ( ) if not isinstance ( string , six . text_type ) : string = string . decode ( 'utf-8' ) self . docstring_buffer . reset ( document = Document ( string , cursor_position = 0 ) ) else : self . docstring_buffer . reset ( ) app . invalidate ( ) else : self . _on_input_timeout ( buff ) get_event_loop ( ) . run_in_executor ( run ) | When there is no input activity in another thread get the signature of the current code . |
16,342 | def enter_history ( self ) : app = get_app ( ) app . vi_state . input_mode = InputMode . NAVIGATION def done ( f ) : result = f . result ( ) if result is not None : self . default_buffer . text = result app . vi_state . input_mode = InputMode . INSERT history = History ( self , self . default_buffer . document ) future = run_coroutine_in_terminal ( history . app . run_async ) future . add_done_callback ( done ) | Display the history . |
16,343 | def get_all_code_styles ( ) : result = dict ( ( name , style_from_pygments_cls ( get_style_by_name ( name ) ) ) for name in get_all_styles ( ) ) result [ 'win32' ] = Style . from_dict ( win32_code_style ) return result | Return a mapping from style names to their classes . |
16,344 | def initialize_extensions ( shell , extensions ) : try : iter ( extensions ) except TypeError : pass else : for ext in extensions : try : shell . extension_manager . load_extension ( ext ) except : ipy_utils . warn . warn ( "Error in loading extension: %s" % ext + "\nCheck your config files in %s" % ipy_utils . path . get_ipython_dir ( ) ) shell . showtraceback ( ) | Partial copy of InteractiveShellApp . init_extensions from IPython . |
16,345 | def paths_for_download ( self ) : if self . _paths_for_download is None : queries = list ( ) try : for sra in self . gsm . relations [ 'SRA' ] : query = sra . split ( "=" ) [ - 1 ] if 'SRX' not in query : raise ValueError ( "Sample looks like it is not an SRA: %s" % query ) logger . info ( "Query: %s" % query ) queries . append ( query ) except KeyError : raise NoSRARelationException ( 'No relation called SRA for %s' % self . gsm . get_accession ( ) ) df = DataFrame ( columns = [ 'download_path' ] ) for query in queries : searchdata = Entrez . esearch ( db = 'sra' , term = query , usehistory = 'y' , retmode = 'json' ) answer = json . loads ( searchdata . read ( ) ) ids = answer [ "esearchresult" ] [ "idlist" ] if len ( ids ) != 1 : raise ValueError ( "There should be one and only one ID per SRX" ) number_of_trials = 10 wait_time = 30 for trial in range ( number_of_trials ) : try : results = Entrez . efetch ( db = "sra" , id = ids [ 0 ] , rettype = "runinfo" , retmode = "text" ) . read ( ) break except HTTPError as httperr : if "502" in str ( httperr ) : logger . warn ( ( "%s, trial %i out of %i, waiting " "for %i seconds." ) % ( str ( httperr ) , trial , number_of_trials , wait_time ) ) time . sleep ( wait_time ) elif httperr . code == 429 : try : header_wait_time = int ( httperr . headers [ "Retry-After" ] ) except : header_wait_time = wait_time logger . warn ( ( "%s, trial %i out of %i, waiting " "for %i seconds." ) % ( str ( httperr ) , trial , number_of_trials , header_wait_time ) ) time . sleep ( header_wait_time ) else : raise httperr try : df_tmp = DataFrame ( [ i . split ( ',' ) for i in results . split ( '\n' ) if i != '' ] [ 1 : ] , columns = [ i . split ( ',' ) for i in results . split ( '\n' ) if i != '' ] [ 0 ] ) except IndexError : logger . error ( ( "SRA is empty (ID: %s, query: %s). " "Check if it is publicly available." ) % ( ids [ 0 ] , query ) ) continue try : df_tmp [ 'download_path' ] except KeyError as e : logger . error ( 'KeyError: ' + str ( e ) + '\n' ) logger . error ( str ( results ) + '\n' ) df = concat ( [ df , df_tmp ] , sort = True ) self . _paths_for_download = [ path for path in df [ 'download_path' ] ] return self . _paths_for_download | List of URLs available for downloading . |
16,346 | def download ( self ) : self . downloaded_paths = list ( ) for path in self . paths_for_download : downloaded_path = list ( ) utils . mkdir_p ( os . path . abspath ( self . directory ) ) sra_run = path . split ( "/" ) [ - 1 ] logger . info ( "Analysing %s" % sra_run ) url = type ( self ) . FTP_ADDRESS_TPL . format ( range_subdir = sra_run [ : 6 ] , file_dir = sra_run ) logger . debug ( "URL: %s" , url ) filepath = os . path . abspath ( os . path . join ( self . directory , "%s.sra" % sra_run ) ) utils . download_from_url ( url , filepath , aspera = self . aspera , silent = self . silent , force = self . force ) if self . filetype in ( "fasta" , "fastq" ) : if utils . which ( 'fastq-dump' ) is None : logger . error ( "fastq-dump command not found" ) ftype = "" if self . filetype == "fasta" : ftype = " --fasta " cmd = "fastq-dump" if utils . which ( 'parallel-fastq-dump' ) is None : cmd += " %s --outdir %s %s" else : logger . debug ( "Using parallel fastq-dump" ) cmd = " parallel-fastq-dump --threads %s" cmd = cmd % self . threads cmd += " %s --outdir %s -s %s" cmd = cmd % ( ftype , self . directory , filepath ) for fqoption , fqvalue in iteritems ( self . fastq_dump_options ) : if fqvalue : cmd += ( " --%s %s" % ( fqoption , fqvalue ) ) elif fqvalue is None : cmd += ( " --%s" % fqoption ) logger . debug ( cmd ) process = sp . Popen ( cmd , stdout = sp . PIPE , stderr = sp . PIPE , shell = True ) logger . info ( "Converting to %s/%s*.%s.gz\n" % ( self . directory , sra_run , self . filetype ) ) pout , perr = process . communicate ( ) downloaded_path = glob . glob ( os . path . join ( self . directory , "%s*.%s.gz" % ( sra_run , self . filetype ) ) ) elif self . filetype == 'sra' : downloaded_path = glob . glob ( os . path . join ( self . directory , "%s*.%s" % ( sra_run , self . filetype ) ) ) else : downloaded_path = glob . glob ( os . path . join ( self . directory , "%s*" % sra_run ) ) logger . error ( "Filetype %s not supported." % self . filetype ) if not self . keep_sra and self . filetype != 'sra' : os . unlink ( filepath ) self . downloaded_paths += downloaded_path return self . downloaded_paths | Download SRA files . |
16,347 | def add_log_file ( path ) : logfile_handler = RotatingFileHandler ( path , maxBytes = 50000 , backupCount = 2 ) formatter = logging . Formatter ( fmt = '%(asctime)s %(levelname)s %(module)s - %(message)s' , datefmt = "%d-%b-%Y %H:%M:%S" ) logfile_handler . setFormatter ( formatter ) geoparse_logger . addHandler ( logfile_handler ) | Add log file . |
16,348 | def _sra_download_worker ( * args ) : gsm = args [ 0 ] [ 0 ] email = args [ 0 ] [ 1 ] dirpath = args [ 0 ] [ 2 ] kwargs = args [ 0 ] [ 3 ] return ( gsm . get_accession ( ) , gsm . download_SRA ( email , dirpath , ** kwargs ) ) | A worker to download SRA files . |
16,349 | def _supplementary_files_download_worker ( * args ) : gsm = args [ 0 ] [ 0 ] download_sra = args [ 0 ] [ 1 ] email = args [ 0 ] [ 2 ] dirpath = args [ 0 ] [ 3 ] sra_kwargs = args [ 0 ] [ 4 ] return ( gsm . get_accession ( ) , gsm . download_supplementary_files ( directory = dirpath , download_sra = download_sra , email = email , ** sra_kwargs ) ) | A worker to download supplementary files . |
16,350 | def get_metadata_attribute ( self , metaname ) : metadata_value = self . metadata . get ( metaname , None ) if metadata_value is None : raise NoMetadataException ( "No metadata attribute named %s" % metaname ) if not isinstance ( metadata_value , list ) : raise TypeError ( "Metadata is not a list and it should be." ) if len ( metadata_value ) > 1 : return metadata_value else : return metadata_value [ 0 ] | Get the metadata attribute by the name . |
16,351 | def _get_metadata_as_string ( self ) : metalist = [ ] for metaname , meta in iteritems ( self . metadata ) : message = "Single value in metadata dictionary should be a list!" assert isinstance ( meta , list ) , message for data in meta : if data : metalist . append ( "!%s_%s = %s" % ( self . geotype . capitalize ( ) , metaname , data ) ) return "\n" . join ( metalist ) | Get the metadata as SOFT formatted string . |
16,352 | def to_soft ( self , path_or_handle , as_gzip = False ) : if isinstance ( path_or_handle , str ) : if as_gzip : with gzip . open ( path_or_handle , 'wt' ) as outfile : outfile . write ( self . _get_object_as_soft ( ) ) else : with open ( path_or_handle , 'w' ) as outfile : outfile . write ( self . _get_object_as_soft ( ) ) else : path_or_handle . write ( self . _get_object_as_soft ( ) ) | Save the object in a SOFT format . |
16,353 | def head ( self ) : summary = list ( ) summary . append ( "%s %s" % ( self . geotype , self . name ) + "\n" ) summary . append ( " - Metadata:" + "\n" ) summary . append ( "\n" . join ( self . _get_metadata_as_string ( ) . split ( "\n" ) [ : 5 ] ) + "\n" ) summary . append ( "\n" ) summary . append ( " - Columns:" + "\n" ) summary . append ( self . columns . to_string ( ) + "\n" ) summary . append ( "\n" ) summary . append ( " - Table:" + "\n" ) summary . append ( "\t" . join ( [ "Index" ] + self . table . columns . tolist ( ) ) + "\n" ) summary . append ( self . table . head ( ) . to_string ( header = None ) + "\n" ) summary . append ( " " * 40 + "..." + " " * 40 + "\n" ) summary . append ( " " * 40 + "..." + " " * 40 + "\n" ) summary . append ( " " * 40 + "..." + " " * 40 + "\n" ) summary . append ( self . table . tail ( ) . to_string ( header = None ) + "\n" ) return "\n" . join ( [ str ( s ) for s in summary ] ) | Print short description of the object . |
16,354 | def _get_object_as_soft ( self ) : soft = [ "^%s = %s" % ( self . geotype , self . name ) , self . _get_metadata_as_string ( ) , self . _get_columns_as_string ( ) , self . _get_table_as_string ( ) ] return "\n" . join ( soft ) | Get the object as SOFT formated string . |
16,355 | def _get_table_as_string ( self ) : tablelist = [ ] tablelist . append ( "!%s_table_begin" % self . geotype . lower ( ) ) tablelist . append ( "\t" . join ( self . table . columns ) ) for idx , row in self . table . iterrows ( ) : tablelist . append ( "\t" . join ( map ( str , row ) ) ) tablelist . append ( "!%s_table_end" % self . geotype . lower ( ) ) return "\n" . join ( tablelist ) | Get table as SOFT formated string . |
16,356 | def _get_columns_as_string ( self ) : columnslist = [ ] for rowidx , row in self . columns . iterrows ( ) : columnslist . append ( "#%s = %s" % ( rowidx , row . description ) ) return "\n" . join ( columnslist ) | Returns columns as SOFT formated string . |
16,357 | def annotate ( self , gpl , annotation_column , gpl_on = "ID" , gsm_on = "ID_REF" , in_place = False ) : if isinstance ( gpl , GPL ) : annotation_table = gpl . table elif isinstance ( gpl , DataFrame ) : annotation_table = gpl else : raise TypeError ( "gpl should be a GPL object or a pandas.DataFrame" ) annotated = self . table . merge ( annotation_table [ [ gpl_on , annotation_column ] ] , left_on = gsm_on , right_on = gpl_on ) del annotated [ gpl_on ] if in_place : self . table = annotated return None else : return annotated | Annotate GSM with provided GPL |
16,358 | def annotate_and_average ( self , gpl , expression_column , group_by_column , rename = True , force = False , merge_on_column = None , gsm_on = None , gpl_on = None ) : if gpl . name != self . metadata [ 'platform_id' ] [ 0 ] and not force : raise KeyError ( "Platforms from GSM (%s) and from GPL (%s)" % ( gpl . name , self . metadata [ 'platform_id' ] ) + " are incompatible. Use force=True to use this GPL." ) if merge_on_column is None and gpl_on is None and gsm_on is None : raise Exception ( "You have to provide one of the two: " "merge_on_column or gpl_on and gsm_on parameters" ) if merge_on_column : logger . info ( "merge_on_column is not None. Using this option." ) tmp_data = self . table . merge ( gpl . table , on = merge_on_column , how = 'outer' ) tmp_data = tmp_data . groupby ( group_by_column ) . mean ( ) [ [ expression_column ] ] else : if gpl_on is None or gsm_on is None : raise Exception ( "Please provide both gpl_on and gsm_on or " "provide merge_on_column only" ) tmp_data = self . table . merge ( gpl . table , left_on = gsm_on , right_on = gpl_on , how = 'outer' ) tmp_data = tmp_data . groupby ( group_by_column ) . mean ( ) [ [ expression_column ] ] if rename : tmp_data . columns = [ self . name ] return tmp_data | Annotate GSM table with provided GPL . |
16,359 | def download_supplementary_files ( self , directory = "./" , download_sra = True , email = None , sra_kwargs = None ) : directory_path = os . path . abspath ( os . path . join ( directory , "%s_%s_%s" % ( 'Supp' , self . get_accession ( ) , re . sub ( r'[\s\*\?\(\),\.;]' , '_' , self . metadata [ 'title' ] [ 0 ] ) ) ) ) utils . mkdir_p ( os . path . abspath ( directory_path ) ) downloaded_paths = dict ( ) if sra_kwargs is None : sra_kwargs = { } blacklist = ( 'NONE' , ) for metakey , metavalue in iteritems ( self . metadata ) : if 'supplementary_file' in metakey : assert len ( metavalue ) == 1 and metavalue != '' if metavalue [ 0 ] in blacklist : logger . warn ( "%s value is blacklisted as '%s' - skipping" % ( metakey , metavalue [ 0 ] ) ) continue if 'sra' not in metavalue [ 0 ] : download_path = os . path . abspath ( os . path . join ( directory , os . path . join ( directory_path , metavalue [ 0 ] . split ( "/" ) [ - 1 ] ) ) ) try : utils . download_from_url ( metavalue [ 0 ] , download_path ) downloaded_paths [ metavalue [ 0 ] ] = download_path except Exception as err : logger . error ( "Cannot download %s supplementary file (%s)" % ( self . get_accession ( ) , err ) ) if download_sra : try : downloaded_files = self . download_SRA ( email , directory = directory , ** sra_kwargs ) downloaded_paths . update ( downloaded_files ) except Exception as err : logger . error ( "Cannot download %s SRA file (%s)" % ( self . get_accession ( ) , err ) ) return downloaded_paths | Download all supplementary data available for the sample . |
16,360 | def download_SRA ( self , email , directory = './' , ** kwargs ) : downloader = SRADownloader ( self , email , directory , ** kwargs ) return { "SRA" : downloader . download ( ) } | Download RAW data as SRA file . |
16,361 | def _get_object_as_soft ( self ) : soft = [ "^%s = %s" % ( self . geotype , self . name ) , self . _get_metadata_as_string ( ) ] return "\n" . join ( soft ) | Get the object as SOFT formatted string . |
16,362 | def _get_object_as_soft ( self ) : soft = [ ] if self . database is not None : soft . append ( self . database . _get_object_as_soft ( ) ) soft += [ "^%s = %s" % ( self . geotype , self . name ) , self . _get_metadata_as_string ( ) ] for subset in self . subsets . values ( ) : soft . append ( subset . _get_object_as_soft ( ) ) soft += [ "^%s = %s" % ( self . geotype , self . name ) , self . _get_columns_as_string ( ) , self . _get_table_as_string ( ) ] return "\n" . join ( soft ) | Return object as SOFT formatted string . |
16,363 | def phenotype_data ( self ) : if self . _phenotype_data is None : pheno_data = { } for gsm_name , gsm in iteritems ( self . gsms ) : tmp = { } for key , value in iteritems ( gsm . metadata ) : if len ( value ) == 0 : tmp [ key ] = np . nan elif key . startswith ( "characteristics_" ) : for i , char in enumerate ( value ) : char = re . split ( ":\s+" , char ) char_type , char_value = [ char [ 0 ] , ": " . join ( char [ 1 : ] ) ] tmp [ key + "." + str ( i ) + "." + char_type ] = char_value else : tmp [ key ] = "," . join ( value ) pheno_data [ gsm_name ] = tmp self . _phenotype_data = DataFrame ( pheno_data ) . T return self . _phenotype_data | Get the phenotype data for each of the sample . |
16,364 | def merge_and_average ( self , platform , expression_column , group_by_column , force = False , merge_on_column = None , gsm_on = None , gpl_on = None ) : if isinstance ( platform , str ) : gpl = self . gpls [ platform ] elif isinstance ( platform , GPL ) : gpl = platform else : raise ValueError ( "Platform has to be of type GPL or string with " "key for platform in GSE" ) data = [ ] for gsm in self . gsms . values ( ) : if gpl . name == gsm . metadata [ 'platform_id' ] [ 0 ] : data . append ( gsm . annotate_and_average ( gpl = gpl , merge_on_column = merge_on_column , expression_column = expression_column , group_by_column = group_by_column , force = force , gpl_on = gpl_on , gsm_on = gsm_on ) ) if len ( data ) == 0 : logger . warning ( "No samples for the platform were found\n" ) return None elif len ( data ) == 1 : return data [ 0 ] else : return data [ 0 ] . join ( data [ 1 : ] ) | Merge and average GSE samples . |
16,365 | def pivot_samples ( self , values , index = "ID_REF" ) : data = [ ] for gsm in self . gsms . values ( ) : tmp_data = gsm . table . copy ( ) tmp_data [ "name" ] = gsm . name data . append ( tmp_data ) ndf = concat ( data ) . pivot ( index = index , values = values , columns = "name" ) return ndf | Pivot samples by specified column . |
16,366 | def pivot_and_annotate ( self , values , gpl , annotation_column , gpl_on = "ID" , gsm_on = "ID_REF" ) : if isinstance ( gpl , GPL ) : annotation_table = gpl . table elif isinstance ( gpl , DataFrame ) : annotation_table = gpl else : raise TypeError ( "gpl should be a GPL object or a pandas.DataFrame" ) pivoted_samples = self . pivot_samples ( values = values , index = gsm_on ) ndf = pivoted_samples . reset_index ( ) . merge ( annotation_table [ [ gpl_on , annotation_column ] ] , left_on = gsm_on , right_on = gpl_on ) . set_index ( gsm_on ) del ndf [ gpl_on ] ndf . columns . name = 'name' return ndf | Annotate GSM with provided GPL . |
16,367 | def download_supplementary_files ( self , directory = 'series' , download_sra = True , email = None , sra_kwargs = None , nproc = 1 ) : if sra_kwargs is None : sra_kwargs = dict ( ) if directory == 'series' : dirpath = os . path . abspath ( self . get_accession ( ) + "_Supp" ) utils . mkdir_p ( dirpath ) else : dirpath = os . path . abspath ( directory ) utils . mkdir_p ( dirpath ) downloaded_paths = dict ( ) if nproc == 1 : downloaded_paths = dict ( ) for gsm in itervalues ( self . gsms ) : logger . info ( "Downloading SRA files for %s series\n" % gsm . name ) paths = gsm . download_supplementary_files ( email = email , download_sra = download_sra , directory = dirpath , sra_kwargs = sra_kwargs ) downloaded_paths [ gsm . name ] = paths elif nproc > 1 : downloaders = list ( ) for gsm in itervalues ( self . gsms ) : downloaders . append ( [ gsm , download_sra , email , dirpath , sra_kwargs ] ) p = Pool ( nproc ) results = p . map ( _supplementary_files_download_worker , downloaders ) downloaded_paths = dict ( results ) else : raise ValueError ( "Nproc should be non-negative: %s" % str ( nproc ) ) return downloaded_paths | Download supplementary data . |
16,368 | def download_SRA ( self , email , directory = 'series' , filterby = None , nproc = 1 , ** kwargs ) : if directory == 'series' : dirpath = os . path . abspath ( self . get_accession ( ) + "_SRA" ) utils . mkdir_p ( dirpath ) else : dirpath = os . path . abspath ( directory ) utils . mkdir_p ( dirpath ) if filterby is not None : gsms_to_use = [ gsm for gsm in self . gsms . values ( ) if filterby ( gsm ) ] else : gsms_to_use = self . gsms . values ( ) if nproc == 1 : downloaded_paths = dict ( ) for gsm in gsms_to_use : logger . info ( "Downloading SRA files for %s series\n" % gsm . name ) downloaded_paths [ gsm . name ] = gsm . download_SRA ( email = email , directory = dirpath , ** kwargs ) elif nproc > 1 : downloaders = list ( ) for gsm in gsms_to_use : downloaders . append ( [ gsm , email , dirpath , kwargs ] ) p = Pool ( nproc ) results = p . map ( _sra_download_worker , downloaders ) downloaded_paths = dict ( results ) else : raise ValueError ( "Nproc should be non-negative: %s" % str ( nproc ) ) return downloaded_paths | Download SRA files for each GSM in series . |
16,369 | def _get_object_as_soft ( self ) : soft = [ ] if self . database is not None : soft . append ( self . database . _get_object_as_soft ( ) ) soft += [ "^%s = %s" % ( self . geotype , self . name ) , self . _get_metadata_as_string ( ) ] for gsm in itervalues ( self . gsms ) : soft . append ( gsm . _get_object_as_soft ( ) ) for gpl in itervalues ( self . gpls ) : soft . append ( gpl . _get_object_as_soft ( ) ) return "\n" . join ( soft ) | Get object as SOFT formatted string . |
16,370 | def destination ( self ) : return os . path . join ( os . path . abspath ( self . outdir ) , self . filename ) | Get the destination path . |
16,371 | def download ( self , force = False , silent = False ) : def _download ( ) : if self . url . startswith ( "http" ) : self . _download_http ( silent = silent ) elif self . url . startswith ( "ftp" ) : self . _download_ftp ( silent = silent ) else : raise ValueError ( "Invalid URL %s" % self . url ) logger . debug ( "Moving %s to %s" % ( self . _temp_file_name , self . destination ) ) shutil . move ( self . _temp_file_name , self . destination ) logger . debug ( "Successfully downloaded %s" % self . url ) try : is_already_downloaded = os . path . isfile ( self . destination ) if is_already_downloaded : if force : try : os . remove ( self . destination ) except Exception : logger . error ( "Cannot delete %s" % self . destination ) logger . info ( "Downloading %s to %s" % ( self . url , self . destination ) ) logger . debug ( "Downloading %s to %s" % ( self . url , self . _temp_file_name ) ) _download ( ) else : logger . info ( ( "File %s already exist. Use force=True if you" " would like to overwrite it." ) % self . destination ) else : _download ( ) finally : try : os . remove ( self . _temp_file_name ) except OSError : pass | Download from URL . |
16,372 | def download_aspera ( self , user , host , silent = False ) : aspera_home = os . environ . get ( "ASPERA_HOME" , None ) if not aspera_home : raise ValueError ( "environment variable $ASPERA_HOME not set" ) if not os . path . exists ( aspera_home ) : raise ValueError ( "$ASPERA_HOME directory {} does not exist" . format ( aspera_home ) ) ascp = os . path . join ( aspera_home , "connect/bin/ascp" ) key = os . path . join ( aspera_home , "connect/etc/asperaweb_id_dsa.openssh" ) if not os . path . exists ( ascp ) : raise ValueError ( "could not find ascp binary" ) if not os . path . exists ( key ) : raise ValueError ( "could not find openssh key" ) parsed_url = urlparse ( self . url ) cmd = "{} -i {} -k1 -T -l400m {}@{}:{} {}" . format ( ascp , key , user , host , parsed_url . path , self . _temp_file_name ) logger . debug ( cmd ) try : pr = sp . Popen ( cmd , shell = True , stdout = sp . PIPE , stderr = sp . PIPE ) stdout , stderr = pr . communicate ( ) if not silent : logger . debug ( "Aspera stdout: " + str ( stdout ) ) logger . debug ( "Aspera stderr: " + str ( stderr ) ) if pr . returncode == 0 : logger . debug ( "Moving %s to %s" % ( self . _temp_file_name , self . destination ) ) shutil . move ( self . _temp_file_name , self . destination ) logger . debug ( "Successfully downloaded %s" % self . url ) else : logger . error ( "Failed to download %s using Aspera Connect" % self . url ) finally : try : os . remove ( self . _temp_file_name ) except OSError : pass | Download file with Aspera Connect . |
16,373 | def md5sum ( filename , blocksize = 8192 ) : with open ( filename , 'rb' ) as fh : m = hashlib . md5 ( ) while True : data = fh . read ( blocksize ) if not data : break m . update ( data ) return m . hexdigest ( ) | Get the MD5 checksum of a file . |
16,374 | def get_GEO ( geo = None , filepath = None , destdir = "./" , how = 'full' , annotate_gpl = False , geotype = None , include_data = False , silent = False , aspera = False , partial = None ) : if geo is None and filepath is None : raise Exception ( "You have to specify filename or GEO accession!" ) if geo is not None and filepath is not None : raise Exception ( "You can specify filename or GEO accession - not both!" ) if silent : logger . setLevel ( 100 ) if filepath is None : filepath , geotype = get_GEO_file ( geo , destdir = destdir , how = how , annotate_gpl = annotate_gpl , include_data = include_data , silent = silent , aspera = aspera ) else : if geotype is None : geotype = path . basename ( filepath ) [ : 3 ] logger . info ( "Parsing %s: " % filepath ) if geotype . upper ( ) == "GSM" : return parse_GSM ( filepath ) elif geotype . upper ( ) == "GSE" : return parse_GSE ( filepath ) elif geotype . upper ( ) == 'GPL' : return parse_GPL ( filepath , partial = partial ) elif geotype . upper ( ) == 'GDS' : return parse_GDS ( filepath ) else : raise ValueError ( ( "Unknown GEO type: %s. Available types: GSM, GSE, " "GPL and GDS." ) % geotype . upper ( ) ) | Get the GEO entry . |
16,375 | def parse_metadata ( lines ) : meta = defaultdict ( list ) for line in lines : line = line . rstrip ( ) if line . startswith ( "!" ) : if "_table_begin" in line or "_table_end" in line : continue key , value = __parse_entry ( line ) meta [ key ] . append ( value ) return dict ( meta ) | Parse list of lines with metadata information from SOFT file . |
16,376 | def parse_columns ( lines ) : data = [ ] index = [ ] for line in lines : line = line . rstrip ( ) if line . startswith ( "#" ) : tmp = __parse_entry ( line ) data . append ( tmp [ 1 ] ) index . append ( tmp [ 0 ] ) return DataFrame ( data , index = index , columns = [ 'description' ] ) | Parse list of lines with columns description from SOFT file . |
16,377 | def parse_GDS_columns ( lines , subsets ) : data = [ ] index = [ ] for line in lines : line = line . rstrip ( ) if line . startswith ( "#" ) : tmp = __parse_entry ( line ) data . append ( tmp [ 1 ] ) index . append ( tmp [ 0 ] ) df = DataFrame ( data , index = index , columns = [ 'description' ] ) subset_ids = defaultdict ( dict ) for subsetname , subset in iteritems ( subsets ) : for expid in subset . metadata [ "sample_id" ] [ 0 ] . split ( "," ) : try : subset_type = subset . get_type ( ) subset_ids [ subset_type ] [ expid ] = subset . metadata [ 'description' ] [ 0 ] except Exception as err : logger . error ( "Error processing subsets: %s for subset %s" % ( subset . get_type ( ) , subsetname ) ) return df . join ( DataFrame ( subset_ids ) ) | Parse list of line with columns description from SOFT file of GDS . |
16,378 | def parse_table_data ( lines ) : data = "\n" . join ( [ i . rstrip ( ) for i in lines if not i . startswith ( ( "^" , "!" , "#" ) ) and i . rstrip ( ) ] ) if data : return read_csv ( StringIO ( data ) , index_col = None , sep = "\t" ) else : return DataFrame ( ) | Parse list of lines from SOFT file into DataFrame . |
16,379 | def parse_GSM ( filepath , entry_name = None ) : if isinstance ( filepath , str ) : with utils . smart_open ( filepath ) as f : soft = [ ] has_table = False for line in f : if "_table_begin" in line or ( not line . startswith ( ( "^" , "!" , "#" ) ) ) : has_table = True soft . append ( line . rstrip ( ) ) else : soft = [ ] has_table = False for line in filepath : if "_table_begin" in line or ( not line . startswith ( ( "^" , "!" , "#" ) ) ) : has_table = True soft . append ( line . rstrip ( ) ) if entry_name is None : sets = [ i for i in soft if i . startswith ( "^" ) ] if len ( sets ) > 1 : raise Exception ( "More than one entry in GPL" ) if len ( sets ) == 0 : raise NoEntriesException ( "No entries found. Check the if accession is correct!" ) entry_name = parse_entry_name ( sets [ 0 ] ) columns = parse_columns ( soft ) metadata = parse_metadata ( soft ) if has_table : table_data = parse_table_data ( soft ) else : table_data = DataFrame ( ) gsm = GSM ( name = entry_name , table = table_data , metadata = metadata , columns = columns ) return gsm | Parse GSM entry from SOFT file . |
16,380 | def parse_GPL ( filepath , entry_name = None , partial = None ) : gsms = { } gses = { } gpl_soft = [ ] has_table = False gpl_name = entry_name database = None if isinstance ( filepath , str ) : with utils . smart_open ( filepath ) as soft : groupper = groupby ( soft , lambda x : x . startswith ( "^" ) ) for is_new_entry , group in groupper : if is_new_entry : entry_type , entry_name = __parse_entry ( next ( group ) ) logger . debug ( "%s: %s" % ( entry_type . upper ( ) , entry_name ) ) if entry_type == "SERIES" : is_data , data_group = next ( groupper ) gse_metadata = parse_metadata ( data_group ) gses [ entry_name ] = GSE ( name = entry_name , metadata = gse_metadata ) elif entry_type == "SAMPLE" : if partial and entry_name not in partial : continue is_data , data_group = next ( groupper ) gsms [ entry_name ] = parse_GSM ( data_group , entry_name ) elif entry_type == "DATABASE" : is_data , data_group = next ( groupper ) database_metadata = parse_metadata ( data_group ) database = GEODatabase ( name = entry_name , metadata = database_metadata ) elif entry_type == "PLATFORM" or entry_type == "Annotation" : gpl_name = entry_name is_data , data_group = next ( groupper ) has_gpl_name = gpl_name or gpl_name is None for line in data_group : if ( "_table_begin" in line or not line . startswith ( ( "^" , "!" , "#" ) ) ) : has_table = True if not has_gpl_name : if match ( "!Annotation_platform\s*=\s*" , line ) : gpl_name = split ( "\s*=\s*" , line ) [ - 1 ] . strip ( ) has_gpl_name = True gpl_soft . append ( line ) else : raise RuntimeError ( "Cannot parse {etype}. Unknown for GPL." . format ( etype = entry_type ) ) else : for line in filepath : if "_table_begin" in line or ( not line . startswith ( ( "^" , "!" , "#" ) ) ) : has_table = True gpl_soft . append ( line . rstrip ( ) ) columns = None try : columns = parse_columns ( gpl_soft ) except Exception : pass metadata = parse_metadata ( gpl_soft ) if has_table : table_data = parse_table_data ( gpl_soft ) else : table_data = DataFrame ( ) gpl = GPL ( name = gpl_name , gses = gses , gsms = gsms , table = table_data , metadata = metadata , columns = columns , database = database ) for gse_id , gse in gpl . gses . items ( ) : for gsm_id in gse . metadata . get ( "sample_id" , [ ] ) : if gsm_id in gpl . gsms : gpl . gses [ gse_id ] . gsms [ gsm_id ] = gpl . gsms [ gsm_id ] return gpl | Parse GPL entry from SOFT file . |
16,381 | def parse_GSE ( filepath ) : gpls = { } gsms = { } series_counter = 0 database = None metadata = { } gse_name = None with utils . smart_open ( filepath ) as soft : groupper = groupby ( soft , lambda x : x . startswith ( "^" ) ) for is_new_entry , group in groupper : if is_new_entry : entry_type , entry_name = __parse_entry ( next ( group ) ) logger . debug ( "%s: %s" % ( entry_type . upper ( ) , entry_name ) ) if entry_type == "SERIES" : gse_name = entry_name series_counter += 1 if series_counter > 1 : raise Exception ( "GSE file should contain only one series entry!" ) is_data , data_group = next ( groupper ) message = ( "The key is not False, probably there is an " "error in the SOFT file" ) assert not is_data , message metadata = parse_metadata ( data_group ) elif entry_type == "SAMPLE" : is_data , data_group = next ( groupper ) gsms [ entry_name ] = parse_GSM ( data_group , entry_name ) elif entry_type == "PLATFORM" : is_data , data_group = next ( groupper ) gpls [ entry_name ] = parse_GPL ( data_group , entry_name ) elif entry_type == "DATABASE" : is_data , data_group = next ( groupper ) database_metadata = parse_metadata ( data_group ) database = GEODatabase ( name = entry_name , metadata = database_metadata ) else : logger . error ( "Cannot recognize type %s" % entry_type ) gse = GSE ( name = gse_name , metadata = metadata , gpls = gpls , gsms = gsms , database = database ) return gse | Parse GSE SOFT file . |
16,382 | def parse_GDS ( filepath ) : dataset_lines = [ ] subsets = { } database = None dataset_name = None with utils . smart_open ( filepath ) as soft : groupper = groupby ( soft , lambda x : x . startswith ( "^" ) ) for is_new_entry , group in groupper : if is_new_entry : entry_type , entry_name = __parse_entry ( next ( group ) ) logger . debug ( "%s: %s" % ( entry_type . upper ( ) , entry_name ) ) if entry_type == "SUBSET" : is_data , data_group = next ( groupper ) message = ( "The key is not False, probably there is an " "error in the SOFT file" ) assert not is_data , message subset_metadata = parse_metadata ( data_group ) subsets [ entry_name ] = GDSSubset ( name = entry_name , metadata = subset_metadata ) elif entry_type == "DATABASE" : is_data , data_group = next ( groupper ) message = ( "The key is not False, probably there is an " "error in the SOFT file" ) assert not is_data , message database_metadata = parse_metadata ( data_group ) database = GEODatabase ( name = entry_name , metadata = database_metadata ) elif entry_type == "DATASET" : is_data , data_group = next ( groupper ) dataset_name = entry_name for line in data_group : dataset_lines . append ( line . rstrip ( ) ) else : logger . error ( "Cannot recognize type %s" % entry_type ) metadata = parse_metadata ( dataset_lines ) columns = parse_GDS_columns ( dataset_lines , subsets ) table = parse_table_data ( dataset_lines ) return GDS ( name = dataset_name , metadata = metadata , columns = columns , table = table , subsets = subsets , database = database ) | Parse GDS SOFT file . |
16,383 | def download_from_url ( url , destination_path , force = False , aspera = False , silent = False ) : if aspera and url . startswith ( "http" ) : logger . warn ( "Aspera Connect allows only FTP servers - falling back to " "normal download" ) aspera = False try : fn = Downloader ( url , outdir = os . path . dirname ( destination_path ) ) if aspera : fn . download_aspera ( user = "anonftp" , host = "ftp-trace.ncbi.nlm.nih.gov" , silent = silent ) else : fn . download ( silent = silent , force = force ) except URLError : logger . error ( "Cannot find file %s" % url ) | Download file from remote server . |
16,384 | def smart_open ( filepath ) : if filepath [ - 2 : ] == "gz" : mode = "rt" fopen = gzip . open else : mode = "r" fopen = open if sys . version_info [ 0 ] < 3 : fh = fopen ( filepath , mode ) else : fh = fopen ( filepath , mode , errors = "ignore" ) try : yield fh except IOError : fh . close ( ) finally : fh . close ( ) | Open file intelligently depending on the source and python version . |
16,385 | def bandit ( self , choice_rewards ) : return max ( choice_rewards , key = lambda a : np . mean ( choice_rewards [ a ] ) ) | Return the choice to take next using multi - armed bandit |
16,386 | def select ( self , choice_scores ) : choice_rewards = { } for choice , scores in choice_scores . items ( ) : if choice not in self . choices : continue choice_rewards [ choice ] = self . compute_rewards ( scores ) return self . bandit ( choice_rewards ) | Select the next best choice to make |
16,387 | def compute_rewards ( self , scores ) : if len ( scores ) > self . k : scores = np . copy ( scores ) inds = np . argsort ( scores ) [ : - self . k ] scores [ inds ] = np . nan return list ( scores ) | Retain the K best scores and replace the rest with nans |
16,388 | def compute_rewards ( self , scores ) : k = self . k m = max ( len ( scores ) - k , 0 ) best_scores = sorted ( scores ) [ - k - 1 : ] velocities = np . diff ( best_scores ) nans = np . full ( m , np . nan ) return list ( velocities ) + list ( nans ) | Compute the velocity of the best scores |
16,389 | def predict ( self , X ) : if np . random . random ( ) < self . POU : return Uniform ( self . tunables ) . predict ( X ) return super ( GPEiVelocity , self ) . predict ( X ) | Use the POU value we computed in fit to choose randomly between GPEi and uniform random selection . |
16,390 | def compute_rewards ( self , scores ) : for i in range ( len ( scores ) ) : if i >= self . k : scores [ i ] = 0. return scores | Retain the K most recent scores and replace the rest with zeros |
16,391 | def select ( self , choice_scores ) : min_num_scores = min ( [ len ( s ) for s in choice_scores . values ( ) ] ) if min_num_scores >= K_MIN : logger . info ( '{klass}: using Best K bandit selection' . format ( klass = type ( self ) . __name__ ) ) reward_func = self . compute_rewards else : logger . warning ( '{klass}: Not enough choices to do K-selection; using plain UCB1' . format ( klass = type ( self ) . __name__ ) ) reward_func = super ( RecentKReward , self ) . compute_rewards choice_rewards = { } for choice , scores in choice_scores . items ( ) : if choice not in self . choices : continue choice_rewards [ choice ] = reward_func ( scores ) return self . bandit ( choice_rewards ) | Use the top k learner s scores for usage in rewards for the bandit calculation |
16,392 | def compute_rewards ( self , scores ) : recent_scores = scores [ : - self . k - 2 : - 1 ] velocities = [ recent_scores [ i ] - recent_scores [ i + 1 ] for i in range ( len ( recent_scores ) - 1 ) ] zeros = ( len ( scores ) - self . k ) * [ 0 ] return velocities + zeros | Compute the velocity of thte k + 1 most recent scores . |
16,393 | def select ( self , choice_scores ) : alg_scores = { } for algorithm , choices in self . by_algorithm . items ( ) : if not set ( choices ) & set ( choice_scores . keys ( ) ) : continue sublists = [ choice_scores . get ( c , [ ] ) for c in choices ] alg_scores [ algorithm ] = sum ( sublists , [ ] ) best_algorithm = self . bandit ( alg_scores ) best_subset = self . by_algorithm [ best_algorithm ] normal_ucb1 = UCB1 ( choices = best_subset ) return normal_ucb1 . select ( choice_scores ) | Groups the frozen sets by algorithm and first chooses an algorithm based on the traditional UCB1 criteria . |
16,394 | def _generate_grid ( self ) : grid_axes = [ ] for _ , param in self . tunables : grid_axes . append ( param . get_grid_axis ( self . grid_width ) ) return grid_axes | Get the all possible values for each of the tunables . |
16,395 | def _candidates_from_grid ( self , n = 1000 ) : used_vectors = set ( tuple ( v ) for v in self . X ) grid_size = self . grid_width ** len ( self . tunables ) if len ( used_vectors ) == grid_size : return None all_vectors = set ( itertools . product ( * self . _grid_axes ) ) remaining_vectors = all_vectors - used_vectors candidates = np . array ( list ( map ( np . array , remaining_vectors ) ) ) np . random . shuffle ( candidates ) return candidates [ 0 : n ] | Get unused candidates from the grid or parameters . |
16,396 | def _random_candidates ( self , n = 1000 ) : candidates = np . zeros ( ( n , len ( self . tunables ) ) ) for i , tunable in enumerate ( self . tunables ) : param = tunable [ 1 ] lo , hi = param . range if param . is_integer : column = np . random . randint ( lo , hi + 1 , size = n ) else : diff = hi - lo column = lo + diff * np . random . rand ( n ) candidates [ : , i ] = column return candidates | Generate a matrix of random parameters column by column . |
16,397 | def _create_candidates ( self , n = 1000 ) : if self . grid : return self . _candidates_from_grid ( n ) else : return self . _random_candidates ( n ) | Generate random hyperparameter vectors |
16,398 | def propose ( self , n = 1 ) : proposed_params = [ ] for i in range ( n ) : candidate_params = self . _create_candidates ( ) if candidate_params is None : return None predictions = self . predict ( candidate_params ) idx = self . _acquire ( predictions ) params = { } for i in range ( candidate_params [ idx , : ] . shape [ 0 ] ) : inverse_transformed = self . tunables [ i ] [ 1 ] . inverse_transform ( candidate_params [ idx , i ] ) params [ self . tunables [ i ] [ 0 ] ] = inverse_transformed proposed_params . append ( params ) return params if n == 1 else proposed_params | Use the trained model to propose a new set of parameters . |
16,399 | def add ( self , X , y ) : if isinstance ( X , dict ) : X = [ X ] y = [ y ] for i in range ( len ( X ) ) : each = X [ i ] if y [ i ] > self . _best_score : self . _best_score = y [ i ] self . _best_hyperparams = X [ i ] vectorized = [ ] for tunable in self . tunables : vectorized . append ( each [ tunable [ 0 ] ] ) if self . X_raw is not None : self . X_raw = np . append ( self . X_raw , np . array ( [ vectorized ] , dtype = object ) , axis = 0 , ) else : self . X_raw = np . array ( [ vectorized ] , dtype = object ) self . y_raw = np . append ( self . y_raw , y ) x_transformed = np . array ( [ ] , dtype = np . float64 ) if len ( self . X_raw . shape ) > 1 and self . X_raw . shape [ 1 ] > 0 : x_transformed = self . tunables [ 0 ] [ 1 ] . fit_transform ( self . X_raw [ : , 0 ] , self . y_raw , ) . astype ( float ) for i in range ( 1 , self . X_raw . shape [ 1 ] ) : transformed = self . tunables [ i ] [ 1 ] . fit_transform ( self . X_raw [ : , i ] , self . y_raw , ) . astype ( float ) x_transformed = np . column_stack ( ( x_transformed , transformed ) ) self . fit ( x_transformed , self . y_raw ) | Add data about known tunable hyperparameter configurations and scores . |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.