idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
231,000
def disable ( self ) : self . _enabled = False for child in self . children : if isinstance ( child , ( Container , Widget ) ) : child . disable ( )
Disable all the widgets in this container
38
7
231,001
def enable ( self ) : self . _enabled = True for child in self . children : if isinstance ( child , ( Container , Widget ) ) : child . enable ( )
Enable all the widgets in this container
38
7
231,002
def exit_full_screen ( self ) : self . tk . attributes ( "-fullscreen" , False ) self . _full_screen = False self . events . remove_event ( "<FullScreen.Escape>" )
Change from full screen to windowed mode and remove key binding
48
12
231,003
def _set_propagation ( self , width , height ) : if width is None : width = 0 if height is None : height = 0 # set the propagate value propagate_function = self . tk . pack_propagate if self . layout == "grid" : propagate_function = self . tk . grid_propagate propagate_value = True # if height or width > 0 need to stop propagation if isinstance ( width , int ) : if width > 0 : propagate_value = False if isinstance ( height , int ) : if height > 0 : propagate_value = False # if you specify a height or width you must specify they other # (unless its a fill) if isinstance ( width , int ) and isinstance ( height , int ) : if ( width == 0 and height > 0 ) or ( height == 0 and width > 0 ) : utils . error_format ( "You must specify a width and a height for {}" . format ( self . description ) ) propagate_function ( propagate_value )
Set the propagation value of the tk widget dependent on the width and height
218
15
231,004
def load ( self , addr , ty ) : rdt = self . irsb_c . load ( addr . rdt , ty ) return VexValue ( self . irsb_c , rdt )
Load a value from memory into a VEX temporary register .
44
12
231,005
def constant ( self , val , ty ) : if isinstance ( val , VexValue ) and not isinstance ( val , IRExpr ) : raise Exception ( 'Constant cannot be made from VexValue or IRExpr' ) rdt = self . irsb_c . mkconst ( val , ty ) return VexValue ( self . irsb_c , rdt )
Creates a constant as a VexValue
85
9
231,006
def put ( self , val , reg ) : offset = self . lookup_register ( self . irsb_c . irsb . arch , reg ) self . irsb_c . put ( val . rdt , offset )
Puts a value from a VEX temporary register into a machine register . This is how the results of operations done to registers get committed to the machine s state .
48
33
231,007
def put_conditional ( self , cond , valiftrue , valiffalse , reg ) : val = self . irsb_c . ite ( cond . rdt , valiftrue . rdt , valiffalse . rdt ) offset = self . lookup_register ( self . irsb_c . irsb . arch , reg ) self . irsb_c . put ( val , offset )
Like put except it checks a condition to decide what to put in the destination register .
87
17
231,008
def store ( self , val , addr ) : self . irsb_c . store ( addr . rdt , val . rdt )
Store a VexValue in memory at the specified loaction .
29
13
231,009
def jump ( self , condition , to_addr , jumpkind = JumpKind . Boring , ip_offset = None ) : to_addr_ty = None if isinstance ( to_addr , VexValue ) : # Unpack a VV to_addr_rdt = to_addr . rdt to_addr_ty = to_addr . ty elif isinstance ( to_addr , int ) : # Direct jump to an int, make an RdT and Ty to_addr_ty = vex_int_class ( self . irsb_c . irsb . arch . bits ) . type to_addr = self . constant ( to_addr , to_addr_ty ) # TODO archinfo may be changing to_addr_rdt = to_addr . rdt elif isinstance ( to_addr , RdTmp ) : # An RdT; just get the Ty of the arch's pointer type to_addr_ty = vex_int_class ( self . irsb_c . irsb . arch . bits ) . type to_addr_rdt = to_addr else : raise ValueError ( "Jump destination has unknown type: " + repr ( type ( to_addr ) ) ) if not condition : # This is the default exit. self . irsb_c . irsb . jumpkind = jumpkind self . irsb_c . irsb . next = to_addr_rdt else : # add another exit # EDG says: We should make sure folks set ArchXYZ.ip_offset like they're supposed to if ip_offset is None : ip_offset = self . arch . ip_offset assert ip_offset is not None negated_condition_rdt = self . ite ( condition , self . constant ( 0 , condition . ty ) , self . constant ( 1 , condition . ty ) ) direct_exit_target = self . constant ( self . addr + ( self . bitwidth // 8 ) , to_addr_ty ) self . irsb_c . add_exit ( negated_condition_rdt , direct_exit_target . rdt , jumpkind , ip_offset ) self . irsb_c . irsb . jumpkind = jumpkind self . irsb_c . irsb . next = to_addr_rdt
Jump to a specified destination under the specified condition . Used for branches jumps calls returns etc .
493
18
231,010
def register ( lifter , arch_name ) : if issubclass ( lifter , Lifter ) : l . debug ( "Registering lifter %s for architecture %s." , lifter . __name__ , arch_name ) lifters [ arch_name ] . append ( lifter ) if issubclass ( lifter , Postprocessor ) : l . debug ( "Registering postprocessor %s for architecture %s." , lifter . __name__ , arch_name ) postprocessors [ arch_name ] . append ( lifter )
Registers a Lifter or Postprocessor to be used by pyvex . Lifters are are given priority based on the order in which they are registered . Postprocessors will be run in registration order .
118
42
231,011
def child_expressions ( self ) : expressions = [ ] for k in self . __slots__ : v = getattr ( self , k ) if isinstance ( v , IRExpr ) : expressions . append ( v ) expressions . extend ( v . child_expressions ) return expressions
A list of all of the expressions that this expression ends up evaluating .
63
14
231,012
def constants ( self ) : constants = [ ] for k in self . __slots__ : v = getattr ( self , k ) if isinstance ( v , IRExpr ) : constants . extend ( v . constants ) elif isinstance ( v , IRConst ) : constants . append ( v ) return constants
A list of all of the constants that this expression ends up using .
68
14
231,013
def expressions ( self ) : for s in self . statements : for expr_ in s . expressions : yield expr_ yield self . next
Return an iterator of all expressions contained in the IRSB .
28
12
231,014
def instructions ( self ) : if self . _instructions is None : if self . statements is None : self . _instructions = 0 else : self . _instructions = len ( [ s for s in self . statements if type ( s ) is stmt . IMark ] ) return self . _instructions
The number of instructions in this block
70
7
231,015
def instruction_addresses ( self ) : if self . _instruction_addresses is None : if self . statements is None : self . _instruction_addresses = [ ] else : self . _instruction_addresses = [ ( s . addr + s . delta ) for s in self . statements if type ( s ) is stmt . IMark ] return self . _instruction_addresses
Addresses of instructions in this block .
87
8
231,016
def size ( self ) : if self . _size is None : self . _size = sum ( s . len for s in self . statements if type ( s ) is stmt . IMark ) return self . _size
The size of this block in bytes
47
7
231,017
def operations ( self ) : ops = [ ] for e in self . expressions : if hasattr ( e , 'op' ) : ops . append ( e . op ) return ops
A list of all operations done by the IRSB as libVEX enum names
38
16
231,018
def constant_jump_targets ( self ) : exits = set ( ) if self . exit_statements : for _ , _ , stmt_ in self . exit_statements : exits . add ( stmt_ . dst . value ) default_target = self . default_exit_target if default_target is not None : exits . add ( default_target ) return exits
A set of the static jump targets of the basic block .
82
12
231,019
def constant_jump_targets_and_jumpkinds ( self ) : exits = dict ( ) if self . exit_statements : for _ , _ , stmt_ in self . exit_statements : exits [ stmt_ . dst . value ] = stmt_ . jumpkind default_target = self . default_exit_target if default_target is not None : exits [ default_target ] = self . jumpkind return exits
A dict of the static jump targets of the basic block to their jumpkind .
96
16
231,020
def _pp_str ( self ) : sa = [ ] sa . append ( "IRSB {" ) if self . statements is not None : sa . append ( " %s" % self . tyenv ) sa . append ( "" ) if self . statements is not None : for i , s in enumerate ( self . statements ) : if isinstance ( s , stmt . Put ) : stmt_str = s . __str__ ( reg_name = self . arch . translate_register_name ( s . offset , s . data . result_size ( self . tyenv ) // 8 ) ) elif isinstance ( s , stmt . WrTmp ) and isinstance ( s . data , expr . Get ) : stmt_str = s . __str__ ( reg_name = self . arch . translate_register_name ( s . data . offset , s . data . result_size ( self . tyenv ) // 8 ) ) elif isinstance ( s , stmt . Exit ) : stmt_str = s . __str__ ( reg_name = self . arch . translate_register_name ( s . offsIP , self . arch . bits // 8 ) ) else : stmt_str = s . __str__ ( ) sa . append ( " %02d | %s" % ( i , stmt_str ) ) else : sa . append ( " Statements are omitted." ) sa . append ( " NEXT: PUT(%s) = %s; %s" % ( self . arch . translate_register_name ( self . offsIP ) , self . next , self . jumpkind ) ) sa . append ( "}" ) return '\n' . join ( sa )
Return the pretty - printed IRSB .
371
8
231,021
def _is_defaultexit_direct_jump ( self ) : if not ( self . jumpkind == 'Ijk_InvalICache' or self . jumpkind == 'Ijk_Boring' or self . jumpkind == 'Ijk_Call' ) : return False target = self . default_exit_target return target is not None
Checks if the default of this IRSB a direct jump or not .
77
15
231,022
def lookup ( self , tmp ) : if tmp < 0 or tmp > self . types_used : l . debug ( "Invalid temporary number %d" , tmp ) raise IndexError ( tmp ) return self . types [ tmp ]
Return the type of temporary variable tmp as an enum string
48
11
231,023
def _lift ( self , data , bytes_offset = None , max_bytes = None , max_inst = None , opt_level = 1 , traceflags = None , allow_arch_optimizations = None , strict_block_end = None , skip_stmts = False , collect_data_refs = False ) : irsb = IRSB . empty_block ( self . arch , self . addr ) self . data = data self . bytes_offset = bytes_offset self . opt_level = opt_level self . traceflags = traceflags self . allow_arch_optimizations = allow_arch_optimizations self . strict_block_end = strict_block_end self . collect_data_refs = collect_data_refs self . max_inst = max_inst self . max_bytes = max_bytes self . skip_stmts = skip_stmts self . irsb = irsb self . lift ( ) return self . irsb
Wrapper around the lift method on Lifters . Should not be overridden in child classes .
211
19
231,024
def exp_backoff ( attempt , cap = 3600 , base = 300 ) : # this is a numerically stable version of # min(cap, base * 2 ** attempt) max_attempts = math . log ( cap / base , 2 ) if attempt <= max_attempts : return base * 2 ** attempt return cap
Exponential backoff time
71
5
231,025
def get_proxy ( self , proxy_address ) : if not proxy_address : return None hostport = extract_proxy_hostport ( proxy_address ) return self . proxies_by_hostport . get ( hostport , None )
Return complete proxy name associated with a hostport of a given proxy_address . If proxy_address is unkonwn or empty return None .
51
30
231,026
def mark_dead ( self , proxy , _time = None ) : if proxy not in self . proxies : logger . warn ( "Proxy <%s> was not found in proxies list" % proxy ) return if proxy in self . good : logger . debug ( "GOOD proxy became DEAD: <%s>" % proxy ) else : logger . debug ( "Proxy <%s> is DEAD" % proxy ) self . unchecked . discard ( proxy ) self . good . discard ( proxy ) self . dead . add ( proxy ) now = _time or time . time ( ) state = self . proxies [ proxy ] state . backoff_time = self . backoff ( state . failed_attempts ) state . next_check = now + state . backoff_time state . failed_attempts += 1
Mark a proxy as dead
172
5
231,027
def mark_good ( self , proxy ) : if proxy not in self . proxies : logger . warn ( "Proxy <%s> was not found in proxies list" % proxy ) return if proxy not in self . good : logger . debug ( "Proxy <%s> is GOOD" % proxy ) self . unchecked . discard ( proxy ) self . dead . discard ( proxy ) self . good . add ( proxy ) self . proxies [ proxy ] . failed_attempts = 0
Mark a proxy as good
101
5
231,028
def reanimate ( self , _time = None ) : n_reanimated = 0 now = _time or time . time ( ) for proxy in list ( self . dead ) : state = self . proxies [ proxy ] assert state . next_check is not None if state . next_check <= now : self . dead . remove ( proxy ) self . unchecked . add ( proxy ) n_reanimated += 1 return n_reanimated
Move dead proxies to unchecked if a backoff timeout passes
94
11
231,029
def reset ( self ) : for proxy in list ( self . dead ) : self . dead . remove ( proxy ) self . unchecked . add ( proxy )
Mark all dead proxies as unchecked
32
6
231,030
def on_change ( self , path , event_type ) : # I can do all this work in a sql statement, but # for debugging it's easier to do it in stages. sql = """SELECT collection_id FROM collection_table WHERE path == ? """ cursor = self . _execute ( sql , ( path , ) ) results = cursor . fetchall ( ) # there should always be exactly one result, but # there's no harm in using a loop to process the # single result for result in results : collection_id = result [ 0 ] # remove all keywords in this collection sql = """DELETE from keyword_table WHERE collection_id == ? """ cursor = self . _execute ( sql , ( collection_id , ) ) self . _load_keywords ( collection_id , path = path )
Respond to changes in the file system
171
8
231,031
def _load_keywords ( self , collection_id , path = None , libdoc = None ) : if libdoc is None and path is None : raise ( Exception ( "You must provide either a path or libdoc argument" ) ) if libdoc is None : libdoc = LibraryDocumentation ( path ) if len ( libdoc . keywords ) > 0 : for keyword in libdoc . keywords : self . _add_keyword ( collection_id , keyword . name , keyword . doc , keyword . args )
Load a collection of keywords
109
5
231,032
def add_file ( self , path ) : libdoc = LibraryDocumentation ( path ) if len ( libdoc . keywords ) > 0 : if libdoc . doc . startswith ( "Documentation for resource file" ) : # bah! The file doesn't have an file-level documentation # and libdoc substitutes some placeholder text. libdoc . doc = "" collection_id = self . add_collection ( path , libdoc . name , libdoc . type , libdoc . doc , libdoc . version , libdoc . scope , libdoc . named_args , libdoc . doc_format ) self . _load_keywords ( collection_id , libdoc = libdoc )
Add a resource file or library file to the database
147
10
231,033
def add_library ( self , name ) : libdoc = LibraryDocumentation ( name ) if len ( libdoc . keywords ) > 0 : # FIXME: figure out the path to the library file collection_id = self . add_collection ( None , libdoc . name , libdoc . type , libdoc . doc , libdoc . version , libdoc . scope , libdoc . named_args , libdoc . doc_format ) self . _load_keywords ( collection_id , libdoc = libdoc )
Add a library to the database
111
6
231,034
def add_folder ( self , dirname , watch = True ) : ignore_file = os . path . join ( dirname , ".rfhubignore" ) exclude_patterns = [ ] try : with open ( ignore_file , "r" ) as f : exclude_patterns = [ ] for line in f . readlines ( ) : line = line . strip ( ) if ( re . match ( r'^\s*#' , line ) ) : continue if len ( line . strip ( ) ) > 0 : exclude_patterns . append ( line ) except : # should probably warn the user? pass for filename in os . listdir ( dirname ) : path = os . path . join ( dirname , filename ) ( basename , ext ) = os . path . splitext ( filename . lower ( ) ) try : if ( os . path . isdir ( path ) ) : if ( not basename . startswith ( "." ) ) : if os . access ( path , os . R_OK ) : self . add_folder ( path , watch = False ) else : if ( ext in ( ".xml" , ".robot" , ".txt" , ".py" , ".tsv" ) ) : if os . access ( path , os . R_OK ) : self . add ( path ) except Exception as e : # I really need to get the logging situation figured out. print ( "bummer:" , str ( e ) ) # FIXME: # instead of passing a flag around, I should just keep track # of which folders we're watching, and don't add wathers for # any subfolders. That will work better in the case where # the user accidentally starts up the hub giving the same # folder, or a folder and it's children, on the command line... if watch : # add watcher on normalized path dirname = os . path . abspath ( dirname ) event_handler = WatchdogHandler ( self , dirname ) self . observer . schedule ( event_handler , dirname , recursive = True )
Recursively add all files in a folder to the database
441
12
231,035
def add_installed_libraries ( self , extra_libs = [ "Selenium2Library" , "SudsLibrary" , "RequestsLibrary" ] ) : libdir = os . path . dirname ( robot . libraries . __file__ ) loaded = [ ] for filename in os . listdir ( libdir ) : if filename . endswith ( ".py" ) or filename . endswith ( ".pyc" ) : libname , ext = os . path . splitext ( filename ) if ( libname . lower ( ) not in loaded and not self . _should_ignore ( libname ) ) : try : self . add ( libname ) loaded . append ( libname . lower ( ) ) except Exception as e : # need a better way to log this... self . log . debug ( "unable to add library: " + str ( e ) ) # I hate how I implemented this, but I don't think there's # any way to find out which installed python packages are # robot libraries. for library in extra_libs : if ( library . lower ( ) not in loaded and not self . _should_ignore ( library ) ) : try : self . add ( library ) loaded . append ( library . lower ( ) ) except Exception as e : self . log . debug ( "unable to add external library %s: %s" % ( library , str ( e ) ) )
Add any installed libraries that we can find
302
8
231,036
def get_collection ( self , collection_id ) : sql = """SELECT collection.collection_id, collection.type, collection.name, collection.path, collection.doc, collection.version, collection.scope, collection.namedargs, collection.doc_format FROM collection_table as collection WHERE collection_id == ? OR collection.name like ? """ cursor = self . _execute ( sql , ( collection_id , collection_id ) ) # need to handle the case where we get more than one result... sql_result = cursor . fetchone ( ) return { "collection_id" : sql_result [ 0 ] , "type" : sql_result [ 1 ] , "name" : sql_result [ 2 ] , "path" : sql_result [ 3 ] , "doc" : sql_result [ 4 ] , "version" : sql_result [ 5 ] , "scope" : sql_result [ 6 ] , "namedargs" : sql_result [ 7 ] , "doc_format" : sql_result [ 8 ] } return sql_result
Get a specific collection
228
4
231,037
def get_keyword ( self , collection_id , name ) : sql = """SELECT keyword.name, keyword.args, keyword.doc FROM keyword_table as keyword WHERE keyword.collection_id == ? AND keyword.name like ? """ cursor = self . _execute ( sql , ( collection_id , name ) ) # We're going to assume no library has duplicate keywords # While that in theory _could_ happen, it never _should_, # and you get what you deserve if it does. row = cursor . fetchone ( ) if row is not None : return { "name" : row [ 0 ] , "args" : json . loads ( row [ 1 ] ) , "doc" : row [ 2 ] , "collection_id" : collection_id } return { }
Get a specific keyword from a library
167
7
231,038
def _looks_like_libdoc_file ( self , name ) : # inefficient since we end up reading the file twice, # but it's fast enough for our purposes, and prevents # us from doing a full parse of files that are obviously # not libdoc files if name . lower ( ) . endswith ( ".xml" ) : with open ( name , "r" ) as f : # read the first few lines; if we don't see # what looks like libdoc data, return false data = f . read ( 200 ) index = data . lower ( ) . find ( "<keywordspec " ) if index > 0 : return True return False
Return true if an xml file looks like a libdoc file
140
12
231,039
def _looks_like_resource_file ( self , name ) : # inefficient since we end up reading the file twice, # but it's fast enough for our purposes, and prevents # us from doing a full parse of files that are obviously # not robot files if ( re . search ( r'__init__.(txt|robot|html|tsv)$' , name ) ) : # These are initialize files, not resource files return False found_keyword_table = False if ( name . lower ( ) . endswith ( ".robot" ) or name . lower ( ) . endswith ( ".txt" ) or name . lower ( ) . endswith ( ".tsv" ) ) : with open ( name , "r" ) as f : data = f . read ( ) for match in re . finditer ( r'^\*+\s*(Test Cases?|(?:User )?Keywords?)' , data , re . MULTILINE | re . IGNORECASE ) : if ( re . match ( r'Test Cases?' , match . group ( 1 ) , re . IGNORECASE ) ) : # if there's a test case table, it's not a keyword file return False if ( not found_keyword_table and re . match ( r'(User )?Keywords?' , match . group ( 1 ) , re . IGNORECASE ) ) : found_keyword_table = True return found_keyword_table
Return true if the file has a keyword table but not a testcase table
321
15
231,040
def _should_ignore ( self , name ) : _name = name . lower ( ) return ( _name . startswith ( "deprecated" ) or _name . startswith ( "_" ) or _name in ( "remote" , "reserved" , "dialogs_py" , "dialogs_ipy" , "dialogs_jy" ) )
Return True if a given library name should be ignored
82
10
231,041
def _execute ( self , * args ) : cursor = self . db . cursor ( ) cursor . execute ( * args ) return cursor
Execute an SQL query
28
5
231,042
def _glob_to_sql ( self , string ) : # What's with the chr(1) and chr(2) nonsense? It's a trick to # hide \* and \? from the * and ? substitutions. This trick # depends on the substitutiones being done in order. chr(1) # and chr(2) were picked because I know those characters # almost certainly won't be in the input string table = ( ( r'\\' , chr ( 1 ) ) , ( r'\*' , chr ( 2 ) ) , ( r'\?' , chr ( 3 ) ) , ( r'%' , r'\%' ) , ( r'?' , '_' ) , ( r'*' , '%' ) , ( chr ( 1 ) , r'\\' ) , ( chr ( 2 ) , r'\*' ) , ( chr ( 3 ) , r'\?' ) ) for ( a , b ) in table : string = string . replace ( a , b ) string = string [ 1 : ] if string . startswith ( "^" ) else "%" + string string = string [ : - 1 ] if string . endswith ( "$" ) else string + "%" return string
Convert glob - like wildcards to SQL wildcards
278
11
231,043
def doc ( ) : kwdb = current_app . kwdb libraries = get_collections ( kwdb , libtype = "library" ) resource_files = get_collections ( kwdb , libtype = "resource" ) hierarchy = get_navpanel_data ( kwdb ) return flask . render_template ( "home.html" , data = { "libraries" : libraries , "version" : __version__ , "libdoc" : None , "hierarchy" : hierarchy , "resource_files" : resource_files } )
Show a list of libraries along with the nav panel on the left
124
13
231,044
def index ( ) : kwdb = current_app . kwdb libraries = get_collections ( kwdb , libtype = "library" ) resource_files = get_collections ( kwdb , libtype = "resource" ) return flask . render_template ( "libraryNames.html" , data = { "libraries" : libraries , "version" : __version__ , "resource_files" : resource_files } )
Show a list of available libraries and resource files
97
9
231,045
def search ( ) : pattern = flask . request . args . get ( 'pattern' , "*" ) . strip ( ) . lower ( ) # if the pattern contains "in:<collection>" (eg: in:builtin), # filter results to only that (or those) collections # This was kind-of hacked together, but seems to work well enough collections = [ c [ "name" ] . lower ( ) for c in current_app . kwdb . get_collections ( ) ] words = [ ] filters = [ ] if pattern . startswith ( "name:" ) : pattern = pattern [ 5 : ] . strip ( ) mode = "name" else : mode = "both" for word in pattern . split ( " " ) : if word . lower ( ) . startswith ( "in:" ) : filters . extend ( [ name for name in collections if name . startswith ( word [ 3 : ] ) ] ) else : words . append ( word ) pattern = " " . join ( words ) keywords = [ ] for keyword in current_app . kwdb . search ( pattern , mode ) : kw = list ( keyword ) collection_id = kw [ 0 ] collection_name = kw [ 1 ] . lower ( ) if len ( filters ) == 0 or collection_name in filters : url = flask . url_for ( ".doc_for_library" , collection_id = kw [ 0 ] , keyword = kw [ 2 ] ) row_id = "row-%s.%s" % ( keyword [ 1 ] . lower ( ) , keyword [ 2 ] . lower ( ) . replace ( " " , "-" ) ) keywords . append ( { "collection_id" : keyword [ 0 ] , "collection_name" : keyword [ 1 ] , "name" : keyword [ 2 ] , "synopsis" : keyword [ 3 ] , "version" : __version__ , "url" : url , "row_id" : row_id } ) keywords . sort ( key = lambda kw : kw [ "name" ] ) return flask . render_template ( "search.html" , data = { "keywords" : keywords , "version" : __version__ , "pattern" : pattern } )
Show all keywords that match a pattern
487
7
231,046
def get_collections ( kwdb , libtype = "*" ) : collections = kwdb . get_collections ( libtype = libtype ) for result in collections : url = flask . url_for ( ".doc_for_library" , collection_id = result [ "collection_id" ] ) result [ "url" ] = url return collections
Get list of collections from kwdb then add urls necessary for hyperlinks
79
16
231,047
def get_navpanel_data ( kwdb ) : data = kwdb . get_keyword_hierarchy ( ) for library in data : library [ "url" ] = flask . url_for ( ".doc_for_library" , collection_id = library [ "collection_id" ] ) for keyword in library [ "keywords" ] : url = flask . url_for ( ".doc_for_library" , collection_id = library [ "collection_id" ] , keyword = keyword [ "name" ] ) keyword [ "url" ] = url return data
Get navpanel data from kwdb and add urls necessary for hyperlinks
127
16
231,048
def doc_to_html ( doc , doc_format = "ROBOT" ) : from robot . libdocpkg . htmlwriter import DocToHtml return DocToHtml ( doc_format ) ( doc )
Convert documentation to HTML
47
5
231,049
def start ( self ) : if self . args . debug : self . app . run ( port = self . args . port , debug = self . args . debug , host = self . args . interface ) else : root = "http://%s:%s" % ( self . args . interface , self . args . port ) print ( "tornado web server running on " + root ) self . shutdown_requested = False http_server = HTTPServer ( WSGIContainer ( self . app ) ) http_server . listen ( port = self . args . port , address = self . args . interface ) signal . signal ( signal . SIGINT , self . signal_handler ) tornado . ioloop . PeriodicCallback ( self . check_shutdown_flag , 500 ) . start ( ) tornado . ioloop . IOLoop . instance ( ) . start ( )
Start the app
188
3
231,050
def check_shutdown_flag ( self ) : if self . shutdown_requested : tornado . ioloop . IOLoop . instance ( ) . stop ( ) print ( "web server stopped." )
Shutdown the server if the flag has been set
44
10
231,051
def coords ( obj ) : # Handle recursive case first if 'features' in obj : for f in obj [ 'features' ] : # For Python 2 compatibility # See https://www.reddit.com/r/learnpython/comments/4rc15s/yield_from_and_python_27/ # noqa: E501 for c in coords ( f ) : yield c else : if isinstance ( obj , ( tuple , list ) ) : coordinates = obj elif 'geometry' in obj : coordinates = obj [ 'geometry' ] [ 'coordinates' ] else : coordinates = obj . get ( 'coordinates' , obj ) for e in coordinates : if isinstance ( e , ( float , int ) ) : yield tuple ( coordinates ) break for f in coords ( e ) : yield f
Yields the coordinates from a Feature or Geometry .
176
12
231,052
def map_tuples ( func , obj ) : if obj [ 'type' ] == 'Point' : coordinates = tuple ( func ( obj [ 'coordinates' ] ) ) elif obj [ 'type' ] in [ 'LineString' , 'MultiPoint' ] : coordinates = [ tuple ( func ( c ) ) for c in obj [ 'coordinates' ] ] elif obj [ 'type' ] in [ 'MultiLineString' , 'Polygon' ] : coordinates = [ [ tuple ( func ( c ) ) for c in curve ] for curve in obj [ 'coordinates' ] ] elif obj [ 'type' ] == 'MultiPolygon' : coordinates = [ [ [ tuple ( func ( c ) ) for c in curve ] for curve in part ] for part in obj [ 'coordinates' ] ] elif obj [ 'type' ] in [ 'Feature' , 'FeatureCollection' , 'GeometryCollection' ] : return map_geometries ( lambda g : map_tuples ( func , g ) , obj ) else : raise ValueError ( "Invalid geometry object %s" % repr ( obj ) ) return { 'type' : obj [ 'type' ] , 'coordinates' : coordinates }
Returns the mapped coordinates from a Geometry after applying the provided function to each coordinate .
264
17
231,053
def map_geometries ( func , obj ) : simple_types = [ 'Point' , 'LineString' , 'MultiPoint' , 'MultiLineString' , 'Polygon' , 'MultiPolygon' , ] if obj [ 'type' ] in simple_types : return func ( obj ) elif obj [ 'type' ] == 'GeometryCollection' : geoms = [ func ( geom ) if geom else None for geom in obj [ 'geometries' ] ] return { 'type' : obj [ 'type' ] , 'geometries' : geoms } elif obj [ 'type' ] == 'Feature' : geom = func ( obj [ 'geometry' ] ) if obj [ 'geometry' ] else None return { 'type' : obj [ 'type' ] , 'geometry' : geom , 'properties' : obj [ 'properties' ] , } elif obj [ 'type' ] == 'FeatureCollection' : feats = [ map_geometries ( func , feat ) for feat in obj [ 'features' ] ] return { 'type' : obj [ 'type' ] , 'features' : feats } else : raise ValueError ( "Invalid GeoJSON object %s" % repr ( obj ) )
Returns the result of passing every geometry in the given geojson object through func .
275
17
231,054
def to_instance ( cls , ob , default = None , strict = False ) : if ob is None and default is not None : instance = default ( ) elif isinstance ( ob , GeoJSON ) : instance = ob else : mapping = to_mapping ( ob ) d = { } for k in mapping : d [ k ] = mapping [ k ] try : type_ = d . pop ( "type" ) try : type_ = str ( type_ ) except UnicodeEncodeError : # If the type contains non-ascii characters, we can assume # it's not a valid GeoJSON type raise AttributeError ( "{0} is not a GeoJSON type" ) . format ( type_ ) geojson_factory = getattr ( geojson . factory , type_ ) instance = geojson_factory ( * * d ) except ( AttributeError , KeyError ) as invalid : if strict : msg = "Cannot coerce %r into a valid GeoJSON structure: %s" msg %= ( ob , invalid ) raise ValueError ( msg ) instance = ob return instance
Encode a GeoJSON dict into an GeoJSON object . Assumes the caller knows that the dict should satisfy a GeoJSON type .
237
27
231,055
def check_list_errors ( self , checkFunc , lst ) : # check for errors on each subitem, filter only subitems with errors results = ( checkFunc ( i ) for i in lst ) return [ err for err in results if err ]
Validation helper function .
57
5
231,056
def run_only_once ( self , keyword ) : lock_name = 'pabot_run_only_once_%s' % keyword try : self . acquire_lock ( lock_name ) passed = self . get_parallel_value_for_key ( lock_name ) if passed != '' : if passed == 'FAILED' : raise AssertionError ( 'Keyword failed in other process' ) return BuiltIn ( ) . run_keyword ( keyword ) self . set_parallel_value_for_key ( lock_name , 'PASSED' ) except : self . set_parallel_value_for_key ( lock_name , 'FAILED' ) raise finally : self . release_lock ( lock_name )
Runs a keyword only once in one of the parallel processes . As the keyword will be called only in one process and the return value could basically be anything . The Run Only Once can t return the actual return value . If the keyword fails Run Only Once fails . Others executing Run Only Once wait before going through this keyword before the actual command has been executed . NOTE! This is a potential Shoot yourself in to knee keyword Especially note that all the namespace changes are only visible in the process that actually executed the keyword . Also note that this might lead to odd situations if used inside of other keywords . Also at this point the keyword will be identified to be same if it has the same name .
165
138
231,057
def set_parallel_value_for_key ( self , key , value ) : if self . _remotelib : self . _remotelib . run_keyword ( 'set_parallel_value_for_key' , [ key , value ] , { } ) else : _PabotLib . set_parallel_value_for_key ( self , key , value )
Set a globally available key and value that can be accessed from all the pabot processes .
88
19
231,058
def get_parallel_value_for_key ( self , key ) : if self . _remotelib : return self . _remotelib . run_keyword ( 'get_parallel_value_for_key' , [ key ] , { } ) return _PabotLib . get_parallel_value_for_key ( self , key )
Get the value for a key . If there is no value for the key then empty string is returned .
82
21
231,059
def acquire_lock ( self , name ) : if self . _remotelib : try : while not self . _remotelib . run_keyword ( 'acquire_lock' , [ name , self . _my_id ] , { } ) : time . sleep ( 0.1 ) logger . debug ( 'waiting for lock to release' ) return True except RuntimeError : logger . warn ( 'no connection' ) self . __remotelib = None return _PabotLib . acquire_lock ( self , name , self . _my_id )
Wait for a lock with name . This will prevent other processes from acquiring the lock with the name while it is held . Thus they will wait in the position where they are acquiring the lock until the process that has it releases it .
125
46
231,060
def release_lock ( self , name ) : if self . _remotelib : self . _remotelib . run_keyword ( 'release_lock' , [ name , self . _my_id ] , { } ) else : _PabotLib . release_lock ( self , name , self . _my_id )
Release a lock with name . This will enable others to acquire the lock .
75
15
231,061
def release_locks ( self ) : if self . _remotelib : self . _remotelib . run_keyword ( 'release_locks' , [ self . _my_id ] , { } ) else : _PabotLib . release_locks ( self , self . _my_id )
Release all locks called by instance .
69
7
231,062
def acquire_value_set ( self , * tags ) : setname = self . _acquire_value_set ( * tags ) if setname is None : raise ValueError ( "Could not aquire a value set" ) return setname
Reserve a set of values for this execution . No other process can reserve the same set of values while the set is reserved . Acquired value set needs to be released after use to allow other processes to access it . Add tags to limit the possible value sets that this returns .
52
56
231,063
def get_value_from_set ( self , key ) : #TODO: This should be done locally. # We do not really need to call centralised server if the set is already # reserved as the data there is immutable during execution key = key . lower ( ) if self . _remotelib : while True : value = self . _remotelib . run_keyword ( 'get_value_from_set' , [ key , self . _my_id ] , { } ) if value : return value time . sleep ( 0.1 ) logger . debug ( 'waiting for a value' ) else : return _PabotLib . get_value_from_set ( self , key , self . _my_id )
Get a value from previously reserved value set .
162
9
231,064
def release_value_set ( self ) : if self . _remotelib : self . _remotelib . run_keyword ( 'release_value_set' , [ self . _my_id ] , { } ) else : _PabotLib . release_value_set ( self , self . _my_id )
Release a reserved value set so that other executions can use it also .
75
14
231,065
def install_all_patches ( ) : from . import mysqldb from . import psycopg2 from . import strict_redis from . import sqlalchemy from . import tornado_http from . import urllib from . import urllib2 from . import requests mysqldb . install_patches ( ) psycopg2 . install_patches ( ) strict_redis . install_patches ( ) sqlalchemy . install_patches ( ) tornado_http . install_patches ( ) urllib . install_patches ( ) urllib2 . install_patches ( ) requests . install_patches ( )
A convenience method that installs all available hooks .
142
9
231,066
def install_patches ( patchers = 'all' ) : if patchers is None or patchers == 'all' : install_all_patches ( ) return if not _valid_args ( patchers ) : raise ValueError ( 'patchers argument must be None, "all", or a list' ) for patch_func_name in patchers : logging . info ( 'Loading client hook %s' , patch_func_name ) patch_func = _load_symbol ( patch_func_name ) logging . info ( 'Applying client hook %s' , patch_func_name ) patch_func ( )
Usually called from middleware to install client hooks specified in the client_hooks section of the configuration .
135
21
231,067
def install_client_interceptors ( client_interceptors = ( ) ) : if not _valid_args ( client_interceptors ) : raise ValueError ( 'client_interceptors argument must be a list' ) from . . http_client import ClientInterceptors for client_interceptor in client_interceptors : logging . info ( 'Loading client interceptor %s' , client_interceptor ) interceptor_class = _load_symbol ( client_interceptor ) logging . info ( 'Adding client interceptor %s' , client_interceptor ) ClientInterceptors . append ( interceptor_class ( ) )
Install client interceptors for the patchers .
139
9
231,068
def _load_symbol ( name ) : module_name , key = name . rsplit ( '.' , 1 ) try : module = importlib . import_module ( module_name ) except ImportError as err : # it's possible the symbol is a class method module_name , class_name = module_name . rsplit ( '.' , 1 ) module = importlib . import_module ( module_name ) cls = getattr ( module , class_name , None ) if cls : attr = getattr ( cls , key , None ) else : raise err else : attr = getattr ( module , key , None ) if not callable ( attr ) : raise ValueError ( '%s is not callable (was %r)' % ( name , attr ) ) return attr
Load a symbol by name .
175
6
231,069
def span_in_stack_context ( span ) : if not isinstance ( opentracing . tracer . scope_manager , TornadoScopeManager ) : raise RuntimeError ( 'scope_manager is not TornadoScopeManager' ) # Enter the newly created stack context so we have # storage available for Span activation. context = tracer_stack_context ( ) entered_context = _TracerEnteredStackContext ( context ) if span is None : return entered_context opentracing . tracer . scope_manager . activate ( span , False ) assert opentracing . tracer . active_span is not None assert opentracing . tracer . active_span is span return entered_context
Create Tornado s StackContext that stores the given span in the thread - local request context . This function is intended for use in Tornado applications based on IOLoop although will work fine in single - threaded apps like Flask albeit with more overhead .
150
48
231,070
def traced_function ( func = None , name = None , on_start = None , require_active_trace = False ) : if func is None : return functools . partial ( traced_function , name = name , on_start = on_start , require_active_trace = require_active_trace ) if name : operation_name = name else : operation_name = func . __name__ @ functools . wraps ( func ) def decorator ( * args , * * kwargs ) : parent_span = get_current_span ( ) if parent_span is None and require_active_trace : return func ( * args , * * kwargs ) span = utils . start_child_span ( operation_name = operation_name , parent = parent_span ) if callable ( on_start ) : on_start ( span , * args , * * kwargs ) # We explicitly invoke deactivation callback for the StackContext, # because there are scenarios when it gets retained forever, for # example when a Periodic Callback is scheduled lazily while in the # scope of a tracing StackContext. with span_in_stack_context ( span ) as deactivate_cb : try : res = func ( * args , * * kwargs ) # Tornado co-routines usually return futures, so we must wait # until the future is completed, in order to accurately # capture the function's execution time. if tornado . concurrent . is_future ( res ) : def done_callback ( future ) : deactivate_cb ( ) exception = future . exception ( ) if exception is not None : span . log ( event = 'exception' , payload = exception ) span . set_tag ( 'error' , 'true' ) span . finish ( ) res . add_done_callback ( done_callback ) else : deactivate_cb ( ) span . finish ( ) return res except Exception as e : deactivate_cb ( ) span . log ( event = 'exception' , payload = e ) span . set_tag ( 'error' , 'true' ) span . finish ( ) raise return decorator
A decorator that enables tracing of the wrapped function or Tornado co - routine provided there is a parent span already established .
457
24
231,071
def start_child_span ( operation_name , tracer = None , parent = None , tags = None ) : tracer = tracer or opentracing . tracer return tracer . start_span ( operation_name = operation_name , child_of = parent . context if parent else None , tags = tags )
Start a new span as a child of parent_span . If parent_span is None start a new root span .
70
24
231,072
def before_request ( request , tracer = None ) : if tracer is None : # pragma: no cover tracer = opentracing . tracer # we need to prepare tags upfront, mainly because RPC_SERVER tag must be # set when starting the span, to support Zipkin's one-span-per-RPC model tags_dict = { tags . SPAN_KIND : tags . SPAN_KIND_RPC_SERVER , tags . HTTP_URL : request . full_url , } remote_ip = request . remote_ip if remote_ip : tags_dict [ tags . PEER_HOST_IPV4 ] = remote_ip caller_name = request . caller_name if caller_name : tags_dict [ tags . PEER_SERVICE ] = caller_name remote_port = request . remote_port if remote_port : tags_dict [ tags . PEER_PORT ] = remote_port operation = request . operation try : carrier = { } for key , value in six . iteritems ( request . headers ) : carrier [ key ] = value parent_ctx = tracer . extract ( format = Format . HTTP_HEADERS , carrier = carrier ) except Exception as e : logging . exception ( 'trace extract failed: %s' % e ) parent_ctx = None span = tracer . start_span ( operation_name = operation , child_of = parent_ctx , tags = tags_dict ) return span
Attempts to extract a tracing span from incoming request . If no tracing context is passed in the headers or the data cannot be parsed a new root span is started .
316
32
231,073
def _parse_wsgi_headers ( wsgi_environ ) : prefix = 'HTTP_' p_len = len ( prefix ) # use .items() despite suspected memory pressure bc GC occasionally # collects wsgi_environ.iteritems() during iteration. headers = { key [ p_len : ] . replace ( '_' , '-' ) . lower ( ) : val for ( key , val ) in wsgi_environ . items ( ) if key . startswith ( prefix ) } return headers
HTTP headers are presented in WSGI environment with HTTP_ prefix . This method finds those headers removes the prefix converts underscores to dashes and converts to lower case .
113
32
231,074
def append ( cls , interceptor ) : cls . _check ( interceptor ) cls . _interceptors . append ( interceptor )
Add interceptor to the end of the internal list .
32
11
231,075
def insert ( cls , index , interceptor ) : cls . _check ( interceptor ) cls . _interceptors . insert ( index , interceptor )
Add interceptor to the given index in the internal list .
36
12
231,076
def singleton ( func ) : @ functools . wraps ( func ) def wrapper ( * args , * * kwargs ) : if wrapper . __call_state__ == CALLED : return ret = func ( * args , * * kwargs ) wrapper . __call_state__ = CALLED return ret def reset ( ) : wrapper . __call_state__ = NOT_CALLED wrapper . reset = reset reset ( ) # save original func to be able to patch and restore multiple times from # unit tests wrapper . __original_func = func return wrapper
This decorator allows you to make sure that a function is called once and only once . Note that recursive functions will still work .
120
26
231,077
def smooth_image ( image , sigma , sigma_in_physical_coordinates = True , FWHM = False , max_kernel_width = 32 ) : if image . components == 1 : return _smooth_image_helper ( image , sigma , sigma_in_physical_coordinates , FWHM , max_kernel_width ) else : imagelist = utils . split_channels ( image ) newimages = [ ] for image in imagelist : newimage = _smooth_image_helper ( image , sigma , sigma_in_physical_coordinates , FWHM , max_kernel_width ) newimages . append ( newimage ) return utils . merge_channels ( newimages )
Smooth an image
162
4
231,078
def build_template ( initial_template = None , image_list = None , iterations = 3 , gradient_step = 0.2 , * * kwargs ) : wt = 1.0 / len ( image_list ) if initial_template is None : initial_template = image_list [ 0 ] * 0 for i in range ( len ( image_list ) ) : initial_template = initial_template + image_list [ i ] * wt xavg = initial_template . clone ( ) for i in range ( iterations ) : for k in range ( len ( image_list ) ) : w1 = registration ( xavg , image_list [ k ] , type_of_transform = 'SyN' , * * kwargs ) if k == 0 : wavg = iio . image_read ( w1 [ 'fwdtransforms' ] [ 0 ] ) * wt xavgNew = w1 [ 'warpedmovout' ] * wt else : wavg = wavg + iio . image_read ( w1 [ 'fwdtransforms' ] [ 0 ] ) * wt xavgNew = xavgNew + w1 [ 'warpedmovout' ] * wt print ( wavg . abs ( ) . mean ( ) ) wscl = ( - 1.0 ) * gradient_step wavg = wavg * wscl wavgfn = mktemp ( suffix = '.nii.gz' ) iio . image_write ( wavg , wavgfn ) xavg = apply_transforms ( xavg , xavg , wavgfn ) return xavg
Estimate an optimal template from an input image_list
371
11
231,079
def resample_image ( image , resample_params , use_voxels = False , interp_type = 1 ) : if image . components == 1 : inimage = image . clone ( 'float' ) outimage = image . clone ( 'float' ) rsampar = 'x' . join ( [ str ( rp ) for rp in resample_params ] ) args = [ image . dimension , inimage , outimage , rsampar , int ( use_voxels ) , interp_type ] processed_args = utils . _int_antsProcessArguments ( args ) libfn = utils . get_lib_fn ( 'ResampleImage' ) libfn ( processed_args ) outimage = outimage . clone ( image . pixeltype ) return outimage else : raise ValueError ( 'images with more than 1 component not currently supported' )
Resample image by spacing or number of voxels with various interpolators . Works with multi - channel images .
191
23
231,080
def apply_ants_transform ( transform , data , data_type = "point" , reference = None , * * kwargs ) : return transform . apply ( data , data_type , reference , * * kwargs )
Apply ANTsTransform to data
49
6
231,081
def compose_ants_transforms ( transform_list ) : precision = transform_list [ 0 ] . precision dimension = transform_list [ 0 ] . dimension for tx in transform_list : if precision != tx . precision : raise ValueError ( 'All transforms must have the same precision' ) if dimension != tx . dimension : raise ValueError ( 'All transforms must have the same dimension' ) tx_ptr_list = list ( reversed ( [ tf . pointer for tf in transform_list ] ) ) libfn = utils . get_lib_fn ( 'composeTransforms%s' % ( transform_list [ 0 ] . _libsuffix ) ) itk_composed_tx = libfn ( tx_ptr_list , precision , dimension ) return ANTsTransform ( precision = precision , dimension = dimension , transform_type = 'CompositeTransform' , pointer = itk_composed_tx )
Compose multiple ANTsTransform s together
195
8
231,082
def transform_index_to_physical_point ( image , index ) : if not isinstance ( image , iio . ANTsImage ) : raise ValueError ( 'image must be ANTsImage type' ) if isinstance ( index , np . ndarray ) : index = index . tolist ( ) if not isinstance ( index , ( tuple , list ) ) : raise ValueError ( 'index must be tuple or list' ) if len ( index ) != image . dimension : raise ValueError ( 'len(index) != image.dimension' ) index = [ i + 1 for i in index ] ndim = image . dimension ptype = image . pixeltype libfn = utils . get_lib_fn ( 'TransformIndexToPhysicalPoint%s%i' % ( utils . short_ptype ( ptype ) , ndim ) ) point = libfn ( image . pointer , [ list ( index ) ] ) return np . array ( point [ 0 ] )
Get spatial point from index of an image .
209
9
231,083
def invert ( self ) : libfn = utils . get_lib_fn ( 'inverseTransform%s' % ( self . _libsuffix ) ) inv_tx_ptr = libfn ( self . pointer ) new_tx = ANTsTransform ( precision = self . precision , dimension = self . dimension , transform_type = self . transform_type , pointer = inv_tx_ptr ) return new_tx
Invert the transform
91
4
231,084
def apply ( self , data , data_type = 'point' , reference = None , * * kwargs ) : if data_type == 'point' : return self . apply_to_point ( data ) elif data_type == 'vector' : return self . apply_to_vector ( data ) elif data_type == 'image' : return self . apply_to_image ( data , reference , * * kwargs )
Apply transform to data
96
4
231,085
def apply_to_point ( self , point ) : libfn = utils . get_lib_fn ( 'transformPoint%s' % ( self . _libsuffix ) ) return tuple ( libfn ( self . pointer , point ) )
Apply transform to a point
53
5
231,086
def apply_to_vector ( self , vector ) : if isinstance ( vector , np . ndarray ) : vector = vector . tolist ( ) libfn = utils . get_lib_fn ( 'transformVector%s' % ( self . _libsuffix ) ) return np . asarray ( libfn ( self . pointer , vector ) )
Apply transform to a vector
77
5
231,087
def plot_hist ( image , threshold = 0. , fit_line = False , normfreq = True , ## plot label arguments title = None , grid = True , xlabel = None , ylabel = None , ## other plot arguments facecolor = 'green' , alpha = 0.75 ) : img_arr = image . numpy ( ) . flatten ( ) img_arr = img_arr [ np . abs ( img_arr ) > threshold ] if normfreq != False : normfreq = 1. if normfreq == True else normfreq n , bins , patches = plt . hist ( img_arr , 50 , normed = normfreq , facecolor = facecolor , alpha = alpha ) if fit_line : # add a 'best fit' line y = mlab . normpdf ( bins , img_arr . mean ( ) , img_arr . std ( ) ) l = plt . plot ( bins , y , 'r--' , linewidth = 1 ) if xlabel is not None : plt . xlabel ( xlabel ) if ylabel is not None : plt . ylabel ( ylabel ) if title is not None : plt . title ( title ) plt . grid ( grid ) plt . show ( )
Plot a histogram from an ANTsImage
274
9
231,088
def morphology ( image , operation , radius , mtype = 'binary' , value = 1 , shape = 'ball' , radius_is_parametric = False , thickness = 1 , lines = 3 , include_center = False ) : if image . components > 1 : raise ValueError ( 'multichannel images not yet supported' ) _sflag_dict = { 'ball' : 1 , 'box' : 2 , 'cross' : 3 , 'annulus' : 4 , 'polygon' : 5 } sFlag = _sflag_dict . get ( shape , 0 ) if sFlag == 0 : raise ValueError ( 'invalid element shape' ) radius_is_parametric = radius_is_parametric * 1 include_center = include_center * 1 if ( mtype == 'binary' ) : if ( operation == 'dilate' ) : if ( sFlag == 5 ) : ret = iMath ( image , 'MD' , radius , value , sFlag , lines ) else : ret = iMath ( image , 'MD' , radius , value , sFlag , radius_is_parametric , thickness , include_center ) elif ( operation == 'erode' ) : if ( sFlag == 5 ) : ret = iMath ( image , 'ME' , radius , value , sFlag , lines ) else : ret = iMath ( image , 'ME' , radius , value , sFlag , radius_is_parametric , thickness , include_center ) elif ( operation == 'open' ) : if ( sFlag == 5 ) : ret = iMath ( image , 'MO' , radius , value , sFlag , lines ) else : ret = iMath ( image , 'MO' , radius , value , sFlag , radius_is_parametric , thickness , include_center ) elif ( operation == 'close' ) : if ( sFlag == 5 ) : ret = iMath ( image , 'MC' , radius , value , sFlag , lines ) else : ret = iMath ( image , 'MC' , radius , value , sFlag , radius_is_parametric , thickness , include_center ) else : raise ValueError ( 'Invalid morphology operation' ) elif ( mtype == 'grayscale' ) : if ( operation == 'dilate' ) : ret = iMath ( image , 'GD' , radius ) elif ( operation == 'erode' ) : ret = iMath ( image , 'GE' , radius ) elif ( operation == 'open' ) : ret = iMath ( image , 'GO' , radius ) elif ( operation == 'close' ) : ret = iMath ( image , 'GC' , radius ) else : raise ValueError ( 'Invalid morphology operation' ) else : raise ValueError ( 'Invalid morphology type' ) return ret
Apply morphological operations to an image
607
7
231,089
def rgb_to_vector ( image ) : if image . pixeltype != 'unsigned char' : image = image . clone ( 'unsigned char' ) idim = image . dimension libfn = utils . get_lib_fn ( 'RgbToVector%i' % idim ) new_ptr = libfn ( image . pointer ) new_img = iio . ANTsImage ( pixeltype = image . pixeltype , dimension = image . dimension , components = 3 , pointer = new_ptr , is_rgb = False ) return new_img
Convert an RGB ANTsImage to a Vector ANTsImage
119
13
231,090
def vector_to_rgb ( image ) : if image . pixeltype != 'unsigned char' : image = image . clone ( 'unsigned char' ) idim = image . dimension libfn = utils . get_lib_fn ( 'VectorToRgb%i' % idim ) new_ptr = libfn ( image . pointer ) new_img = iio . ANTsImage ( pixeltype = image . pixeltype , dimension = image . dimension , components = 3 , pointer = new_ptr , is_rgb = True ) return new_img
Convert an Vector ANTsImage to a RGB ANTsImage
120
13
231,091
def quantile ( image , q , nonzero = True ) : img_arr = image . numpy ( ) if isinstance ( q , ( list , tuple ) ) : q = [ qq * 100. if qq <= 1. else qq for qq in q ] if nonzero : img_arr = img_arr [ img_arr > 0 ] vals = [ np . percentile ( img_arr , qq ) for qq in q ] return tuple ( vals ) elif isinstance ( q , ( float , int ) ) : if q <= 1. : q = q * 100. if nonzero : img_arr = img_arr [ img_arr > 0 ] return np . percentile ( img_arr [ img_arr > 0 ] , q ) else : raise ValueError ( 'q argument must be list/tuple or float/int' )
Get the quantile values from an ANTsImage
188
10
231,092
def bandpass_filter_matrix ( matrix , tr = 1 , lowf = 0.01 , highf = 0.1 , order = 3 ) : from scipy . signal import butter , filtfilt def butter_bandpass ( lowcut , highcut , fs , order ) : nyq = 0.5 * fs low = lowcut / nyq high = highcut / nyq b , a = butter ( order , [ low , high ] , btype = 'band' ) return b , a def butter_bandpass_filter ( data , lowcut , highcut , fs , order ) : b , a = butter_bandpass ( lowcut , highcut , fs , order = order ) y = filtfilt ( b , a , data ) return y fs = 1 / tr # sampling rate based on tr nsamples = matrix . shape [ 0 ] ncolumns = matrix . shape [ 1 ] matrixOut = matrix . copy ( ) for k in range ( ncolumns ) : matrixOut [ : , k ] = butter_bandpass_filter ( matrix [ : , k ] , lowf , highf , fs , order = order ) return matrixOut
Bandpass filter the input time series image
254
8
231,093
def compcor ( boldImage , ncompcor = 4 , quantile = 0.975 , mask = None , filter_type = False , degree = 2 ) : def compute_tSTD ( M , quantile , x = 0 , axis = 0 ) : stdM = np . std ( M , axis = axis ) # set bad values to x stdM [ stdM == 0 ] = x stdM [ np . isnan ( stdM ) ] = x tt = round ( quantile * 100 ) threshold_std = np . percentile ( stdM , tt ) # threshold_std = quantile( stdM, quantile ) return { 'tSTD' : stdM , 'threshold_std' : threshold_std } if mask is None : temp = utils . slice_image ( boldImage , axis = boldImage . dimension - 1 , idx = 0 ) mask = utils . get_mask ( temp ) imagematrix = core . timeseries_to_matrix ( boldImage , mask ) temp = compute_tSTD ( imagematrix , quantile , 0 ) tsnrmask = core . make_image ( mask , temp [ 'tSTD' ] ) tsnrmask = utils . threshold_image ( tsnrmask , temp [ 'threshold_std' ] , temp [ 'tSTD' ] . max ( ) ) M = core . timeseries_to_matrix ( boldImage , tsnrmask ) components = None basis = np . array ( [ ] ) if filter_type in ( 'polynomial' , False ) : M , basis = regress_poly ( degree , M ) # M = M / compute_tSTD(M, 1.)['tSTD'] # "The covariance matrix C = MMT was constructed and decomposed into its # principal components using a singular value decomposition." u , _ , _ = linalg . svd ( M , full_matrices = False ) if components is None : components = u [ : , : ncompcor ] else : components = np . hstack ( ( components , u [ : , : ncompcor ] ) ) if components is None and ncompcor > 0 : raise ValueError ( 'No components found' ) return { 'components' : components , 'basis' : basis }
Compute noise components from the input image
498
8
231,094
def n3_bias_field_correction ( image , downsample_factor = 3 ) : outimage = image . clone ( ) args = [ image . dimension , image , outimage , downsample_factor ] processed_args = pargs . _int_antsProcessArguments ( args ) libfn = utils . get_lib_fn ( 'N3BiasFieldCorrection' ) libfn ( processed_args ) return outimage
N3 Bias Field Correction
95
6
231,095
def n4_bias_field_correction ( image , mask = None , shrink_factor = 4 , convergence = { 'iters' : [ 50 , 50 , 50 , 50 ] , 'tol' : 1e-07 } , spline_param = 200 , verbose = False , weight_mask = None ) : if image . pixeltype != 'float' : image = image . clone ( 'float' ) iters = convergence [ 'iters' ] tol = convergence [ 'tol' ] if mask is None : mask = get_mask ( image ) N4_CONVERGENCE_1 = '[%s, %.10f]' % ( 'x' . join ( [ str ( it ) for it in iters ] ) , tol ) N4_SHRINK_FACTOR_1 = str ( shrink_factor ) if ( not isinstance ( spline_param , ( list , tuple ) ) ) or ( len ( spline_param ) == 1 ) : N4_BSPLINE_PARAMS = '[%i]' % spline_param elif ( isinstance ( spline_param , ( list , tuple ) ) ) and ( len ( spline_param ) == image . dimension ) : N4_BSPLINE_PARAMS = '[%s]' % ( 'x' . join ( [ str ( sp ) for sp in spline_param ] ) ) else : raise ValueError ( 'Length of splineParam must either be 1 or dimensionality of image' ) if weight_mask is not None : if not isinstance ( weight_mask , iio . ANTsImage ) : raise ValueError ( 'Weight Image must be an antsImage' ) outimage = image . clone ( ) kwargs = { 'd' : outimage . dimension , 'i' : image , 'w' : weight_mask , 's' : N4_SHRINK_FACTOR_1 , 'c' : N4_CONVERGENCE_1 , 'b' : N4_BSPLINE_PARAMS , 'x' : mask , 'o' : outimage , 'v' : int ( verbose ) } processed_args = pargs . _int_antsProcessArguments ( kwargs ) libfn = utils . get_lib_fn ( 'N4BiasFieldCorrection' ) libfn ( processed_args ) return outimage
N4 Bias Field Correction
523
6
231,096
def abp_n4 ( image , intensity_truncation = ( 0.025 , 0.975 , 256 ) , mask = None , usen3 = False ) : if ( not isinstance ( intensity_truncation , ( list , tuple ) ) ) or ( len ( intensity_truncation ) != 3 ) : raise ValueError ( 'intensity_truncation must be list/tuple with 3 values' ) outimage = iMath ( image , 'TruncateIntensity' , intensity_truncation [ 0 ] , intensity_truncation [ 1 ] , intensity_truncation [ 2 ] ) if usen3 == True : outimage = n3_bias_field_correction ( outimage , 4 ) outimage = n3_bias_field_correction ( outimage , 2 ) return outimage else : outimage = n4_bias_field_correction ( outimage , mask ) return outimage
Truncate outlier intensities and bias correct with the N4 algorithm .
207
16
231,097
def image_mutual_information ( image1 , image2 ) : if ( image1 . pixeltype != 'float' ) or ( image2 . pixeltype != 'float' ) : raise ValueError ( 'Both images must have float pixeltype' ) if image1 . dimension != image2 . dimension : raise ValueError ( 'Both images must have same dimension' ) libfn = utils . get_lib_fn ( 'antsImageMutualInformation%iD' % image1 . dimension ) return libfn ( image1 . pointer , image2 . pointer )
Compute mutual information between two ANTsImage types
120
10
231,098
def get_mask ( image , low_thresh = None , high_thresh = None , cleanup = 2 ) : cleanup = int ( cleanup ) if isinstance ( image , iio . ANTsImage ) : if image . pixeltype != 'float' : image = image . clone ( 'float' ) if low_thresh is None : low_thresh = image . mean ( ) if high_thresh is None : high_thresh = image . max ( ) mask_image = threshold_image ( image , low_thresh , high_thresh ) if cleanup > 0 : mask_image = iMath ( mask_image , 'ME' , cleanup ) mask_image = iMath ( mask_image , 'GetLargestComponent' ) mask_image = iMath ( mask_image , 'MD' , cleanup ) mask_image = iMath ( mask_image , 'FillHoles' ) . threshold_image ( 1 , 2 ) while ( ( mask_image . min ( ) == mask_image . max ( ) ) and ( cleanup > 0 ) ) : cleanup = cleanup - 1 mask_image = threshold_image ( image , low_thresh , high_thresh ) if cleanup > 0 : mask_image = iMath ( mask_image , 'ME' , cleanup ) mask_image = iMath ( mask_image , 'MD' , cleanup ) mask_image = iMath ( mask_image , 'FillHoles' ) . threshold_image ( 1 , 2 ) #if cleanup == 0: # clustlab = label_clusters(mask_image, 1) # mask_image = threshold_image(clustlab, 1, 1) return mask_image
Get a binary mask image from the given image after thresholding
365
12
231,099
def label_image_centroids ( image , physical = False , convex = True , verbose = False ) : d = image . shape if len ( d ) != 3 : raise ValueError ( 'image must be 3 dimensions' ) xcoords = np . asarray ( np . arange ( d [ 0 ] ) . tolist ( ) * ( d [ 1 ] * d [ 2 ] ) ) ycoords = np . asarray ( np . repeat ( np . arange ( d [ 1 ] ) , d [ 0 ] ) . tolist ( ) * d [ 2 ] ) zcoords = np . asarray ( np . repeat ( np . arange ( d [ 1 ] ) , d [ 0 ] * d [ 2 ] ) ) labels = image . numpy ( ) mylabels = np . sort ( np . unique ( labels [ labels > 0 ] ) ) . astype ( 'int' ) n_labels = len ( mylabels ) xc = np . zeros ( n_labels ) yc = np . zeros ( n_labels ) zc = np . zeros ( n_labels ) if convex : for i in mylabels : idx = ( labels == i ) . flatten ( ) xc [ i - 1 ] = np . mean ( xcoords [ idx ] ) yc [ i - 1 ] = np . mean ( ycoords [ idx ] ) zc [ i - 1 ] = np . mean ( zcoords [ idx ] ) else : for i in mylabels : idx = ( labels == i ) . flatten ( ) xci = xcoords [ idx ] yci = ycoords [ idx ] zci = zcoords [ idx ] dist = np . zeros ( len ( xci ) ) for j in range ( len ( xci ) ) : dist [ j ] = np . mean ( np . sqrt ( ( xci [ j ] - xci ) ** 2 + ( yci [ j ] - yci ) ** 2 + ( zci [ j ] - zci ) ** 2 ) ) mid = np . where ( dist == np . min ( dist ) ) xc [ i - 1 ] = xci [ mid ] yc [ i - 1 ] = yci [ mid ] zc [ i - 1 ] = zci [ mid ] centroids = np . vstack ( [ xc , yc , zc ] ) . T #if physical: # centroids = tio.transform_index_to_physical_point(image, centroids) return { 'labels' : mylabels , 'vertices' : centroids }
Converts a label image to coordinates summarizing their positions
588
11