idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
43,100
def reanimate ( self , _time = None ) : n_reanimated = 0 now = _time or time . time ( ) for proxy in list ( self . dead ) : state = self . proxies [ proxy ] assert state . next_check is not None if state . next_check <= now : self . dead . remove ( proxy ) self . unchecked . add ( proxy ) n_reanimated += 1 return n_reanimated
Move dead proxies to unchecked if a backoff timeout passes
43,101
def reset ( self ) : for proxy in list ( self . dead ) : self . dead . remove ( proxy ) self . unchecked . add ( proxy )
Mark all dead proxies as unchecked
43,102
def on_change ( self , path , event_type ) : sql = cursor = self . _execute ( sql , ( path , ) ) results = cursor . fetchall ( ) for result in results : collection_id = result [ 0 ] sql = cursor = self . _execute ( sql , ( collection_id , ) ) self . _load_keywords ( collection_id , path = path )
Respond to changes in the file system
43,103
def _load_keywords ( self , collection_id , path = None , libdoc = None ) : if libdoc is None and path is None : raise ( Exception ( "You must provide either a path or libdoc argument" ) ) if libdoc is None : libdoc = LibraryDocumentation ( path ) if len ( libdoc . keywords ) > 0 : for keyword in libdoc . keywords : self . _add_keyword ( collection_id , keyword . name , keyword . doc , keyword . args )
Load a collection of keywords
43,104
def add_file ( self , path ) : libdoc = LibraryDocumentation ( path ) if len ( libdoc . keywords ) > 0 : if libdoc . doc . startswith ( "Documentation for resource file" ) : libdoc . doc = "" collection_id = self . add_collection ( path , libdoc . name , libdoc . type , libdoc . doc , libdoc . version , libdoc . scope , libdoc . named_args , libdoc . doc_format ) self . _load_keywords ( collection_id , libdoc = libdoc )
Add a resource file or library file to the database
43,105
def add_library ( self , name ) : libdoc = LibraryDocumentation ( name ) if len ( libdoc . keywords ) > 0 : collection_id = self . add_collection ( None , libdoc . name , libdoc . type , libdoc . doc , libdoc . version , libdoc . scope , libdoc . named_args , libdoc . doc_format ) self . _load_keywords ( collection_id , libdoc = libdoc )
Add a library to the database
43,106
def add_folder ( self , dirname , watch = True ) : ignore_file = os . path . join ( dirname , ".rfhubignore" ) exclude_patterns = [ ] try : with open ( ignore_file , "r" ) as f : exclude_patterns = [ ] for line in f . readlines ( ) : line = line . strip ( ) if ( re . match ( r'^\s*#' , line ) ) : continue if len ( line . strip ( ) ) > 0 : exclude_patterns . append ( line ) except : pass for filename in os . listdir ( dirname ) : path = os . path . join ( dirname , filename ) ( basename , ext ) = os . path . splitext ( filename . lower ( ) ) try : if ( os . path . isdir ( path ) ) : if ( not basename . startswith ( "." ) ) : if os . access ( path , os . R_OK ) : self . add_folder ( path , watch = False ) else : if ( ext in ( ".xml" , ".robot" , ".txt" , ".py" , ".tsv" ) ) : if os . access ( path , os . R_OK ) : self . add ( path ) except Exception as e : print ( "bummer:" , str ( e ) ) if watch : dirname = os . path . abspath ( dirname ) event_handler = WatchdogHandler ( self , dirname ) self . observer . schedule ( event_handler , dirname , recursive = True )
Recursively add all files in a folder to the database
43,107
def add_installed_libraries ( self , extra_libs = [ "Selenium2Library" , "SudsLibrary" , "RequestsLibrary" ] ) : libdir = os . path . dirname ( robot . libraries . __file__ ) loaded = [ ] for filename in os . listdir ( libdir ) : if filename . endswith ( ".py" ) or filename . endswith ( ".pyc" ) : libname , ext = os . path . splitext ( filename ) if ( libname . lower ( ) not in loaded and not self . _should_ignore ( libname ) ) : try : self . add ( libname ) loaded . append ( libname . lower ( ) ) except Exception as e : self . log . debug ( "unable to add library: " + str ( e ) ) for library in extra_libs : if ( library . lower ( ) not in loaded and not self . _should_ignore ( library ) ) : try : self . add ( library ) loaded . append ( library . lower ( ) ) except Exception as e : self . log . debug ( "unable to add external library %s: %s" % ( library , str ( e ) ) )
Add any installed libraries that we can find
43,108
def get_collection ( self , collection_id ) : sql = cursor = self . _execute ( sql , ( collection_id , collection_id ) ) sql_result = cursor . fetchone ( ) return { "collection_id" : sql_result [ 0 ] , "type" : sql_result [ 1 ] , "name" : sql_result [ 2 ] , "path" : sql_result [ 3 ] , "doc" : sql_result [ 4 ] , "version" : sql_result [ 5 ] , "scope" : sql_result [ 6 ] , "namedargs" : sql_result [ 7 ] , "doc_format" : sql_result [ 8 ] } return sql_result
Get a specific collection
43,109
def get_keyword ( self , collection_id , name ) : sql = cursor = self . _execute ( sql , ( collection_id , name ) ) row = cursor . fetchone ( ) if row is not None : return { "name" : row [ 0 ] , "args" : json . loads ( row [ 1 ] ) , "doc" : row [ 2 ] , "collection_id" : collection_id } return { }
Get a specific keyword from a library
43,110
def _looks_like_libdoc_file ( self , name ) : if name . lower ( ) . endswith ( ".xml" ) : with open ( name , "r" ) as f : data = f . read ( 200 ) index = data . lower ( ) . find ( "<keywordspec " ) if index > 0 : return True return False
Return true if an xml file looks like a libdoc file
43,111
def _looks_like_resource_file ( self , name ) : if ( re . search ( r'__init__.(txt|robot|html|tsv)$' , name ) ) : return False found_keyword_table = False if ( name . lower ( ) . endswith ( ".robot" ) or name . lower ( ) . endswith ( ".txt" ) or name . lower ( ) . endswith ( ".tsv" ) ) : with open ( name , "r" ) as f : data = f . read ( ) for match in re . finditer ( r'^\*+\s*(Test Cases?|(?:User )?Keywords?)' , data , re . MULTILINE | re . IGNORECASE ) : if ( re . match ( r'Test Cases?' , match . group ( 1 ) , re . IGNORECASE ) ) : return False if ( not found_keyword_table and re . match ( r'(User )?Keywords?' , match . group ( 1 ) , re . IGNORECASE ) ) : found_keyword_table = True return found_keyword_table
Return true if the file has a keyword table but not a testcase table
43,112
def _should_ignore ( self , name ) : _name = name . lower ( ) return ( _name . startswith ( "deprecated" ) or _name . startswith ( "_" ) or _name in ( "remote" , "reserved" , "dialogs_py" , "dialogs_ipy" , "dialogs_jy" ) )
Return True if a given library name should be ignored
43,113
def _execute ( self , * args ) : cursor = self . db . cursor ( ) cursor . execute ( * args ) return cursor
Execute an SQL query
43,114
def _glob_to_sql ( self , string ) : table = ( ( r'\\' , chr ( 1 ) ) , ( r'\*' , chr ( 2 ) ) , ( r'\?' , chr ( 3 ) ) , ( r'%' , r'\%' ) , ( r'?' , '_' ) , ( r'*' , '%' ) , ( chr ( 1 ) , r'\\' ) , ( chr ( 2 ) , r'\*' ) , ( chr ( 3 ) , r'\?' ) ) for ( a , b ) in table : string = string . replace ( a , b ) string = string [ 1 : ] if string . startswith ( "^" ) else "%" + string string = string [ : - 1 ] if string . endswith ( "$" ) else string + "%" return string
Convert glob - like wildcards to SQL wildcards
43,115
def doc ( ) : kwdb = current_app . kwdb libraries = get_collections ( kwdb , libtype = "library" ) resource_files = get_collections ( kwdb , libtype = "resource" ) hierarchy = get_navpanel_data ( kwdb ) return flask . render_template ( "home.html" , data = { "libraries" : libraries , "version" : __version__ , "libdoc" : None , "hierarchy" : hierarchy , "resource_files" : resource_files } )
Show a list of libraries along with the nav panel on the left
43,116
def index ( ) : kwdb = current_app . kwdb libraries = get_collections ( kwdb , libtype = "library" ) resource_files = get_collections ( kwdb , libtype = "resource" ) return flask . render_template ( "libraryNames.html" , data = { "libraries" : libraries , "version" : __version__ , "resource_files" : resource_files } )
Show a list of available libraries and resource files
43,117
def search ( ) : pattern = flask . request . args . get ( 'pattern' , "*" ) . strip ( ) . lower ( ) collections = [ c [ "name" ] . lower ( ) for c in current_app . kwdb . get_collections ( ) ] words = [ ] filters = [ ] if pattern . startswith ( "name:" ) : pattern = pattern [ 5 : ] . strip ( ) mode = "name" else : mode = "both" for word in pattern . split ( " " ) : if word . lower ( ) . startswith ( "in:" ) : filters . extend ( [ name for name in collections if name . startswith ( word [ 3 : ] ) ] ) else : words . append ( word ) pattern = " " . join ( words ) keywords = [ ] for keyword in current_app . kwdb . search ( pattern , mode ) : kw = list ( keyword ) collection_id = kw [ 0 ] collection_name = kw [ 1 ] . lower ( ) if len ( filters ) == 0 or collection_name in filters : url = flask . url_for ( ".doc_for_library" , collection_id = kw [ 0 ] , keyword = kw [ 2 ] ) row_id = "row-%s.%s" % ( keyword [ 1 ] . lower ( ) , keyword [ 2 ] . lower ( ) . replace ( " " , "-" ) ) keywords . append ( { "collection_id" : keyword [ 0 ] , "collection_name" : keyword [ 1 ] , "name" : keyword [ 2 ] , "synopsis" : keyword [ 3 ] , "version" : __version__ , "url" : url , "row_id" : row_id } ) keywords . sort ( key = lambda kw : kw [ "name" ] ) return flask . render_template ( "search.html" , data = { "keywords" : keywords , "version" : __version__ , "pattern" : pattern } )
Show all keywords that match a pattern
43,118
def get_collections ( kwdb , libtype = "*" ) : collections = kwdb . get_collections ( libtype = libtype ) for result in collections : url = flask . url_for ( ".doc_for_library" , collection_id = result [ "collection_id" ] ) result [ "url" ] = url return collections
Get list of collections from kwdb then add urls necessary for hyperlinks
43,119
def get_navpanel_data ( kwdb ) : data = kwdb . get_keyword_hierarchy ( ) for library in data : library [ "url" ] = flask . url_for ( ".doc_for_library" , collection_id = library [ "collection_id" ] ) for keyword in library [ "keywords" ] : url = flask . url_for ( ".doc_for_library" , collection_id = library [ "collection_id" ] , keyword = keyword [ "name" ] ) keyword [ "url" ] = url return data
Get navpanel data from kwdb and add urls necessary for hyperlinks
43,120
def doc_to_html ( doc , doc_format = "ROBOT" ) : from robot . libdocpkg . htmlwriter import DocToHtml return DocToHtml ( doc_format ) ( doc )
Convert documentation to HTML
43,121
def start ( self ) : if self . args . debug : self . app . run ( port = self . args . port , debug = self . args . debug , host = self . args . interface ) else : root = "http://%s:%s" % ( self . args . interface , self . args . port ) print ( "tornado web server running on " + root ) self . shutdown_requested = False http_server = HTTPServer ( WSGIContainer ( self . app ) ) http_server . listen ( port = self . args . port , address = self . args . interface ) signal . signal ( signal . SIGINT , self . signal_handler ) tornado . ioloop . PeriodicCallback ( self . check_shutdown_flag , 500 ) . start ( ) tornado . ioloop . IOLoop . instance ( ) . start ( )
Start the app
43,122
def check_shutdown_flag ( self ) : if self . shutdown_requested : tornado . ioloop . IOLoop . instance ( ) . stop ( ) print ( "web server stopped." )
Shutdown the server if the flag has been set
43,123
def coords ( obj ) : if 'features' in obj : for f in obj [ 'features' ] : for c in coords ( f ) : yield c else : if isinstance ( obj , ( tuple , list ) ) : coordinates = obj elif 'geometry' in obj : coordinates = obj [ 'geometry' ] [ 'coordinates' ] else : coordinates = obj . get ( 'coordinates' , obj ) for e in coordinates : if isinstance ( e , ( float , int ) ) : yield tuple ( coordinates ) break for f in coords ( e ) : yield f
Yields the coordinates from a Feature or Geometry .
43,124
def map_tuples ( func , obj ) : if obj [ 'type' ] == 'Point' : coordinates = tuple ( func ( obj [ 'coordinates' ] ) ) elif obj [ 'type' ] in [ 'LineString' , 'MultiPoint' ] : coordinates = [ tuple ( func ( c ) ) for c in obj [ 'coordinates' ] ] elif obj [ 'type' ] in [ 'MultiLineString' , 'Polygon' ] : coordinates = [ [ tuple ( func ( c ) ) for c in curve ] for curve in obj [ 'coordinates' ] ] elif obj [ 'type' ] == 'MultiPolygon' : coordinates = [ [ [ tuple ( func ( c ) ) for c in curve ] for curve in part ] for part in obj [ 'coordinates' ] ] elif obj [ 'type' ] in [ 'Feature' , 'FeatureCollection' , 'GeometryCollection' ] : return map_geometries ( lambda g : map_tuples ( func , g ) , obj ) else : raise ValueError ( "Invalid geometry object %s" % repr ( obj ) ) return { 'type' : obj [ 'type' ] , 'coordinates' : coordinates }
Returns the mapped coordinates from a Geometry after applying the provided function to each coordinate .
43,125
def map_geometries ( func , obj ) : simple_types = [ 'Point' , 'LineString' , 'MultiPoint' , 'MultiLineString' , 'Polygon' , 'MultiPolygon' , ] if obj [ 'type' ] in simple_types : return func ( obj ) elif obj [ 'type' ] == 'GeometryCollection' : geoms = [ func ( geom ) if geom else None for geom in obj [ 'geometries' ] ] return { 'type' : obj [ 'type' ] , 'geometries' : geoms } elif obj [ 'type' ] == 'Feature' : geom = func ( obj [ 'geometry' ] ) if obj [ 'geometry' ] else None return { 'type' : obj [ 'type' ] , 'geometry' : geom , 'properties' : obj [ 'properties' ] , } elif obj [ 'type' ] == 'FeatureCollection' : feats = [ map_geometries ( func , feat ) for feat in obj [ 'features' ] ] return { 'type' : obj [ 'type' ] , 'features' : feats } else : raise ValueError ( "Invalid GeoJSON object %s" % repr ( obj ) )
Returns the result of passing every geometry in the given geojson object through func .
43,126
def to_instance ( cls , ob , default = None , strict = False ) : if ob is None and default is not None : instance = default ( ) elif isinstance ( ob , GeoJSON ) : instance = ob else : mapping = to_mapping ( ob ) d = { } for k in mapping : d [ k ] = mapping [ k ] try : type_ = d . pop ( "type" ) try : type_ = str ( type_ ) except UnicodeEncodeError : raise AttributeError ( "{0} is not a GeoJSON type" ) . format ( type_ ) geojson_factory = getattr ( geojson . factory , type_ ) instance = geojson_factory ( ** d ) except ( AttributeError , KeyError ) as invalid : if strict : msg = "Cannot coerce %r into a valid GeoJSON structure: %s" msg %= ( ob , invalid ) raise ValueError ( msg ) instance = ob return instance
Encode a GeoJSON dict into an GeoJSON object . Assumes the caller knows that the dict should satisfy a GeoJSON type .
43,127
def check_list_errors ( self , checkFunc , lst ) : results = ( checkFunc ( i ) for i in lst ) return [ err for err in results if err ]
Validation helper function .
43,128
def run_only_once ( self , keyword ) : lock_name = 'pabot_run_only_once_%s' % keyword try : self . acquire_lock ( lock_name ) passed = self . get_parallel_value_for_key ( lock_name ) if passed != '' : if passed == 'FAILED' : raise AssertionError ( 'Keyword failed in other process' ) return BuiltIn ( ) . run_keyword ( keyword ) self . set_parallel_value_for_key ( lock_name , 'PASSED' ) except : self . set_parallel_value_for_key ( lock_name , 'FAILED' ) raise finally : self . release_lock ( lock_name )
Runs a keyword only once in one of the parallel processes . As the keyword will be called only in one process and the return value could basically be anything . The Run Only Once can t return the actual return value . If the keyword fails Run Only Once fails . Others executing Run Only Once wait before going through this keyword before the actual command has been executed . NOTE! This is a potential Shoot yourself in to knee keyword Especially note that all the namespace changes are only visible in the process that actually executed the keyword . Also note that this might lead to odd situations if used inside of other keywords . Also at this point the keyword will be identified to be same if it has the same name .
43,129
def set_parallel_value_for_key ( self , key , value ) : if self . _remotelib : self . _remotelib . run_keyword ( 'set_parallel_value_for_key' , [ key , value ] , { } ) else : _PabotLib . set_parallel_value_for_key ( self , key , value )
Set a globally available key and value that can be accessed from all the pabot processes .
43,130
def get_parallel_value_for_key ( self , key ) : if self . _remotelib : return self . _remotelib . run_keyword ( 'get_parallel_value_for_key' , [ key ] , { } ) return _PabotLib . get_parallel_value_for_key ( self , key )
Get the value for a key . If there is no value for the key then empty string is returned .
43,131
def acquire_lock ( self , name ) : if self . _remotelib : try : while not self . _remotelib . run_keyword ( 'acquire_lock' , [ name , self . _my_id ] , { } ) : time . sleep ( 0.1 ) logger . debug ( 'waiting for lock to release' ) return True except RuntimeError : logger . warn ( 'no connection' ) self . __remotelib = None return _PabotLib . acquire_lock ( self , name , self . _my_id )
Wait for a lock with name . This will prevent other processes from acquiring the lock with the name while it is held . Thus they will wait in the position where they are acquiring the lock until the process that has it releases it .
43,132
def release_lock ( self , name ) : if self . _remotelib : self . _remotelib . run_keyword ( 'release_lock' , [ name , self . _my_id ] , { } ) else : _PabotLib . release_lock ( self , name , self . _my_id )
Release a lock with name . This will enable others to acquire the lock .
43,133
def release_locks ( self ) : if self . _remotelib : self . _remotelib . run_keyword ( 'release_locks' , [ self . _my_id ] , { } ) else : _PabotLib . release_locks ( self , self . _my_id )
Release all locks called by instance .
43,134
def acquire_value_set ( self , * tags ) : setname = self . _acquire_value_set ( * tags ) if setname is None : raise ValueError ( "Could not aquire a value set" ) return setname
Reserve a set of values for this execution . No other process can reserve the same set of values while the set is reserved . Acquired value set needs to be released after use to allow other processes to access it . Add tags to limit the possible value sets that this returns .
43,135
def get_value_from_set ( self , key ) : key = key . lower ( ) if self . _remotelib : while True : value = self . _remotelib . run_keyword ( 'get_value_from_set' , [ key , self . _my_id ] , { } ) if value : return value time . sleep ( 0.1 ) logger . debug ( 'waiting for a value' ) else : return _PabotLib . get_value_from_set ( self , key , self . _my_id )
Get a value from previously reserved value set .
43,136
def release_value_set ( self ) : if self . _remotelib : self . _remotelib . run_keyword ( 'release_value_set' , [ self . _my_id ] , { } ) else : _PabotLib . release_value_set ( self , self . _my_id )
Release a reserved value set so that other executions can use it also .
43,137
def install_all_patches ( ) : from . import mysqldb from . import psycopg2 from . import strict_redis from . import sqlalchemy from . import tornado_http from . import urllib from . import urllib2 from . import requests mysqldb . install_patches ( ) psycopg2 . install_patches ( ) strict_redis . install_patches ( ) sqlalchemy . install_patches ( ) tornado_http . install_patches ( ) urllib . install_patches ( ) urllib2 . install_patches ( ) requests . install_patches ( )
A convenience method that installs all available hooks .
43,138
def install_patches ( patchers = 'all' ) : if patchers is None or patchers == 'all' : install_all_patches ( ) return if not _valid_args ( patchers ) : raise ValueError ( 'patchers argument must be None, "all", or a list' ) for patch_func_name in patchers : logging . info ( 'Loading client hook %s' , patch_func_name ) patch_func = _load_symbol ( patch_func_name ) logging . info ( 'Applying client hook %s' , patch_func_name ) patch_func ( )
Usually called from middleware to install client hooks specified in the client_hooks section of the configuration .
43,139
def install_client_interceptors ( client_interceptors = ( ) ) : if not _valid_args ( client_interceptors ) : raise ValueError ( 'client_interceptors argument must be a list' ) from . . http_client import ClientInterceptors for client_interceptor in client_interceptors : logging . info ( 'Loading client interceptor %s' , client_interceptor ) interceptor_class = _load_symbol ( client_interceptor ) logging . info ( 'Adding client interceptor %s' , client_interceptor ) ClientInterceptors . append ( interceptor_class ( ) )
Install client interceptors for the patchers .
43,140
def _load_symbol ( name ) : module_name , key = name . rsplit ( '.' , 1 ) try : module = importlib . import_module ( module_name ) except ImportError as err : module_name , class_name = module_name . rsplit ( '.' , 1 ) module = importlib . import_module ( module_name ) cls = getattr ( module , class_name , None ) if cls : attr = getattr ( cls , key , None ) else : raise err else : attr = getattr ( module , key , None ) if not callable ( attr ) : raise ValueError ( '%s is not callable (was %r)' % ( name , attr ) ) return attr
Load a symbol by name .
43,141
def span_in_stack_context ( span ) : if not isinstance ( opentracing . tracer . scope_manager , TornadoScopeManager ) : raise RuntimeError ( 'scope_manager is not TornadoScopeManager' ) context = tracer_stack_context ( ) entered_context = _TracerEnteredStackContext ( context ) if span is None : return entered_context opentracing . tracer . scope_manager . activate ( span , False ) assert opentracing . tracer . active_span is not None assert opentracing . tracer . active_span is span return entered_context
Create Tornado s StackContext that stores the given span in the thread - local request context . This function is intended for use in Tornado applications based on IOLoop although will work fine in single - threaded apps like Flask albeit with more overhead .
43,142
def traced_function ( func = None , name = None , on_start = None , require_active_trace = False ) : if func is None : return functools . partial ( traced_function , name = name , on_start = on_start , require_active_trace = require_active_trace ) if name : operation_name = name else : operation_name = func . __name__ @ functools . wraps ( func ) def decorator ( * args , ** kwargs ) : parent_span = get_current_span ( ) if parent_span is None and require_active_trace : return func ( * args , ** kwargs ) span = utils . start_child_span ( operation_name = operation_name , parent = parent_span ) if callable ( on_start ) : on_start ( span , * args , ** kwargs ) with span_in_stack_context ( span ) as deactivate_cb : try : res = func ( * args , ** kwargs ) if tornado . concurrent . is_future ( res ) : def done_callback ( future ) : deactivate_cb ( ) exception = future . exception ( ) if exception is not None : span . log ( event = 'exception' , payload = exception ) span . set_tag ( 'error' , 'true' ) span . finish ( ) res . add_done_callback ( done_callback ) else : deactivate_cb ( ) span . finish ( ) return res except Exception as e : deactivate_cb ( ) span . log ( event = 'exception' , payload = e ) span . set_tag ( 'error' , 'true' ) span . finish ( ) raise return decorator
A decorator that enables tracing of the wrapped function or Tornado co - routine provided there is a parent span already established .
43,143
def start_child_span ( operation_name , tracer = None , parent = None , tags = None ) : tracer = tracer or opentracing . tracer return tracer . start_span ( operation_name = operation_name , child_of = parent . context if parent else None , tags = tags )
Start a new span as a child of parent_span . If parent_span is None start a new root span .
43,144
def before_request ( request , tracer = None ) : if tracer is None : tracer = opentracing . tracer tags_dict = { tags . SPAN_KIND : tags . SPAN_KIND_RPC_SERVER , tags . HTTP_URL : request . full_url , } remote_ip = request . remote_ip if remote_ip : tags_dict [ tags . PEER_HOST_IPV4 ] = remote_ip caller_name = request . caller_name if caller_name : tags_dict [ tags . PEER_SERVICE ] = caller_name remote_port = request . remote_port if remote_port : tags_dict [ tags . PEER_PORT ] = remote_port operation = request . operation try : carrier = { } for key , value in six . iteritems ( request . headers ) : carrier [ key ] = value parent_ctx = tracer . extract ( format = Format . HTTP_HEADERS , carrier = carrier ) except Exception as e : logging . exception ( 'trace extract failed: %s' % e ) parent_ctx = None span = tracer . start_span ( operation_name = operation , child_of = parent_ctx , tags = tags_dict ) return span
Attempts to extract a tracing span from incoming request . If no tracing context is passed in the headers or the data cannot be parsed a new root span is started .
43,145
def _parse_wsgi_headers ( wsgi_environ ) : prefix = 'HTTP_' p_len = len ( prefix ) headers = { key [ p_len : ] . replace ( '_' , '-' ) . lower ( ) : val for ( key , val ) in wsgi_environ . items ( ) if key . startswith ( prefix ) } return headers
HTTP headers are presented in WSGI environment with HTTP_ prefix . This method finds those headers removes the prefix converts underscores to dashes and converts to lower case .
43,146
def append ( cls , interceptor ) : cls . _check ( interceptor ) cls . _interceptors . append ( interceptor )
Add interceptor to the end of the internal list .
43,147
def insert ( cls , index , interceptor ) : cls . _check ( interceptor ) cls . _interceptors . insert ( index , interceptor )
Add interceptor to the given index in the internal list .
43,148
def singleton ( func ) : @ functools . wraps ( func ) def wrapper ( * args , ** kwargs ) : if wrapper . __call_state__ == CALLED : return ret = func ( * args , ** kwargs ) wrapper . __call_state__ = CALLED return ret def reset ( ) : wrapper . __call_state__ = NOT_CALLED wrapper . reset = reset reset ( ) wrapper . __original_func = func return wrapper
This decorator allows you to make sure that a function is called once and only once . Note that recursive functions will still work .
43,149
def smooth_image ( image , sigma , sigma_in_physical_coordinates = True , FWHM = False , max_kernel_width = 32 ) : if image . components == 1 : return _smooth_image_helper ( image , sigma , sigma_in_physical_coordinates , FWHM , max_kernel_width ) else : imagelist = utils . split_channels ( image ) newimages = [ ] for image in imagelist : newimage = _smooth_image_helper ( image , sigma , sigma_in_physical_coordinates , FWHM , max_kernel_width ) newimages . append ( newimage ) return utils . merge_channels ( newimages )
Smooth an image
43,150
def build_template ( initial_template = None , image_list = None , iterations = 3 , gradient_step = 0.2 , ** kwargs ) : wt = 1.0 / len ( image_list ) if initial_template is None : initial_template = image_list [ 0 ] * 0 for i in range ( len ( image_list ) ) : initial_template = initial_template + image_list [ i ] * wt xavg = initial_template . clone ( ) for i in range ( iterations ) : for k in range ( len ( image_list ) ) : w1 = registration ( xavg , image_list [ k ] , type_of_transform = 'SyN' , ** kwargs ) if k == 0 : wavg = iio . image_read ( w1 [ 'fwdtransforms' ] [ 0 ] ) * wt xavgNew = w1 [ 'warpedmovout' ] * wt else : wavg = wavg + iio . image_read ( w1 [ 'fwdtransforms' ] [ 0 ] ) * wt xavgNew = xavgNew + w1 [ 'warpedmovout' ] * wt print ( wavg . abs ( ) . mean ( ) ) wscl = ( - 1.0 ) * gradient_step wavg = wavg * wscl wavgfn = mktemp ( suffix = '.nii.gz' ) iio . image_write ( wavg , wavgfn ) xavg = apply_transforms ( xavg , xavg , wavgfn ) return xavg
Estimate an optimal template from an input image_list
43,151
def resample_image ( image , resample_params , use_voxels = False , interp_type = 1 ) : if image . components == 1 : inimage = image . clone ( 'float' ) outimage = image . clone ( 'float' ) rsampar = 'x' . join ( [ str ( rp ) for rp in resample_params ] ) args = [ image . dimension , inimage , outimage , rsampar , int ( use_voxels ) , interp_type ] processed_args = utils . _int_antsProcessArguments ( args ) libfn = utils . get_lib_fn ( 'ResampleImage' ) libfn ( processed_args ) outimage = outimage . clone ( image . pixeltype ) return outimage else : raise ValueError ( 'images with more than 1 component not currently supported' )
Resample image by spacing or number of voxels with various interpolators . Works with multi - channel images .
43,152
def apply_ants_transform ( transform , data , data_type = "point" , reference = None , ** kwargs ) : return transform . apply ( data , data_type , reference , ** kwargs )
Apply ANTsTransform to data
43,153
def compose_ants_transforms ( transform_list ) : precision = transform_list [ 0 ] . precision dimension = transform_list [ 0 ] . dimension for tx in transform_list : if precision != tx . precision : raise ValueError ( 'All transforms must have the same precision' ) if dimension != tx . dimension : raise ValueError ( 'All transforms must have the same dimension' ) tx_ptr_list = list ( reversed ( [ tf . pointer for tf in transform_list ] ) ) libfn = utils . get_lib_fn ( 'composeTransforms%s' % ( transform_list [ 0 ] . _libsuffix ) ) itk_composed_tx = libfn ( tx_ptr_list , precision , dimension ) return ANTsTransform ( precision = precision , dimension = dimension , transform_type = 'CompositeTransform' , pointer = itk_composed_tx )
Compose multiple ANTsTransform s together
43,154
def transform_index_to_physical_point ( image , index ) : if not isinstance ( image , iio . ANTsImage ) : raise ValueError ( 'image must be ANTsImage type' ) if isinstance ( index , np . ndarray ) : index = index . tolist ( ) if not isinstance ( index , ( tuple , list ) ) : raise ValueError ( 'index must be tuple or list' ) if len ( index ) != image . dimension : raise ValueError ( 'len(index) != image.dimension' ) index = [ i + 1 for i in index ] ndim = image . dimension ptype = image . pixeltype libfn = utils . get_lib_fn ( 'TransformIndexToPhysicalPoint%s%i' % ( utils . short_ptype ( ptype ) , ndim ) ) point = libfn ( image . pointer , [ list ( index ) ] ) return np . array ( point [ 0 ] )
Get spatial point from index of an image .
43,155
def invert ( self ) : libfn = utils . get_lib_fn ( 'inverseTransform%s' % ( self . _libsuffix ) ) inv_tx_ptr = libfn ( self . pointer ) new_tx = ANTsTransform ( precision = self . precision , dimension = self . dimension , transform_type = self . transform_type , pointer = inv_tx_ptr ) return new_tx
Invert the transform
43,156
def apply ( self , data , data_type = 'point' , reference = None , ** kwargs ) : if data_type == 'point' : return self . apply_to_point ( data ) elif data_type == 'vector' : return self . apply_to_vector ( data ) elif data_type == 'image' : return self . apply_to_image ( data , reference , ** kwargs )
Apply transform to data
43,157
def apply_to_point ( self , point ) : libfn = utils . get_lib_fn ( 'transformPoint%s' % ( self . _libsuffix ) ) return tuple ( libfn ( self . pointer , point ) )
Apply transform to a point
43,158
def apply_to_vector ( self , vector ) : if isinstance ( vector , np . ndarray ) : vector = vector . tolist ( ) libfn = utils . get_lib_fn ( 'transformVector%s' % ( self . _libsuffix ) ) return np . asarray ( libfn ( self . pointer , vector ) )
Apply transform to a vector
43,159
def plot_hist ( image , threshold = 0. , fit_line = False , normfreq = True , title = None , grid = True , xlabel = None , ylabel = None , facecolor = 'green' , alpha = 0.75 ) : img_arr = image . numpy ( ) . flatten ( ) img_arr = img_arr [ np . abs ( img_arr ) > threshold ] if normfreq != False : normfreq = 1. if normfreq == True else normfreq n , bins , patches = plt . hist ( img_arr , 50 , normed = normfreq , facecolor = facecolor , alpha = alpha ) if fit_line : y = mlab . normpdf ( bins , img_arr . mean ( ) , img_arr . std ( ) ) l = plt . plot ( bins , y , 'r--' , linewidth = 1 ) if xlabel is not None : plt . xlabel ( xlabel ) if ylabel is not None : plt . ylabel ( ylabel ) if title is not None : plt . title ( title ) plt . grid ( grid ) plt . show ( )
Plot a histogram from an ANTsImage
43,160
def morphology ( image , operation , radius , mtype = 'binary' , value = 1 , shape = 'ball' , radius_is_parametric = False , thickness = 1 , lines = 3 , include_center = False ) : if image . components > 1 : raise ValueError ( 'multichannel images not yet supported' ) _sflag_dict = { 'ball' : 1 , 'box' : 2 , 'cross' : 3 , 'annulus' : 4 , 'polygon' : 5 } sFlag = _sflag_dict . get ( shape , 0 ) if sFlag == 0 : raise ValueError ( 'invalid element shape' ) radius_is_parametric = radius_is_parametric * 1 include_center = include_center * 1 if ( mtype == 'binary' ) : if ( operation == 'dilate' ) : if ( sFlag == 5 ) : ret = iMath ( image , 'MD' , radius , value , sFlag , lines ) else : ret = iMath ( image , 'MD' , radius , value , sFlag , radius_is_parametric , thickness , include_center ) elif ( operation == 'erode' ) : if ( sFlag == 5 ) : ret = iMath ( image , 'ME' , radius , value , sFlag , lines ) else : ret = iMath ( image , 'ME' , radius , value , sFlag , radius_is_parametric , thickness , include_center ) elif ( operation == 'open' ) : if ( sFlag == 5 ) : ret = iMath ( image , 'MO' , radius , value , sFlag , lines ) else : ret = iMath ( image , 'MO' , radius , value , sFlag , radius_is_parametric , thickness , include_center ) elif ( operation == 'close' ) : if ( sFlag == 5 ) : ret = iMath ( image , 'MC' , radius , value , sFlag , lines ) else : ret = iMath ( image , 'MC' , radius , value , sFlag , radius_is_parametric , thickness , include_center ) else : raise ValueError ( 'Invalid morphology operation' ) elif ( mtype == 'grayscale' ) : if ( operation == 'dilate' ) : ret = iMath ( image , 'GD' , radius ) elif ( operation == 'erode' ) : ret = iMath ( image , 'GE' , radius ) elif ( operation == 'open' ) : ret = iMath ( image , 'GO' , radius ) elif ( operation == 'close' ) : ret = iMath ( image , 'GC' , radius ) else : raise ValueError ( 'Invalid morphology operation' ) else : raise ValueError ( 'Invalid morphology type' ) return ret
Apply morphological operations to an image
43,161
def rgb_to_vector ( image ) : if image . pixeltype != 'unsigned char' : image = image . clone ( 'unsigned char' ) idim = image . dimension libfn = utils . get_lib_fn ( 'RgbToVector%i' % idim ) new_ptr = libfn ( image . pointer ) new_img = iio . ANTsImage ( pixeltype = image . pixeltype , dimension = image . dimension , components = 3 , pointer = new_ptr , is_rgb = False ) return new_img
Convert an RGB ANTsImage to a Vector ANTsImage
43,162
def vector_to_rgb ( image ) : if image . pixeltype != 'unsigned char' : image = image . clone ( 'unsigned char' ) idim = image . dimension libfn = utils . get_lib_fn ( 'VectorToRgb%i' % idim ) new_ptr = libfn ( image . pointer ) new_img = iio . ANTsImage ( pixeltype = image . pixeltype , dimension = image . dimension , components = 3 , pointer = new_ptr , is_rgb = True ) return new_img
Convert an Vector ANTsImage to a RGB ANTsImage
43,163
def quantile ( image , q , nonzero = True ) : img_arr = image . numpy ( ) if isinstance ( q , ( list , tuple ) ) : q = [ qq * 100. if qq <= 1. else qq for qq in q ] if nonzero : img_arr = img_arr [ img_arr > 0 ] vals = [ np . percentile ( img_arr , qq ) for qq in q ] return tuple ( vals ) elif isinstance ( q , ( float , int ) ) : if q <= 1. : q = q * 100. if nonzero : img_arr = img_arr [ img_arr > 0 ] return np . percentile ( img_arr [ img_arr > 0 ] , q ) else : raise ValueError ( 'q argument must be list/tuple or float/int' )
Get the quantile values from an ANTsImage
43,164
def bandpass_filter_matrix ( matrix , tr = 1 , lowf = 0.01 , highf = 0.1 , order = 3 ) : from scipy . signal import butter , filtfilt def butter_bandpass ( lowcut , highcut , fs , order ) : nyq = 0.5 * fs low = lowcut / nyq high = highcut / nyq b , a = butter ( order , [ low , high ] , btype = 'band' ) return b , a def butter_bandpass_filter ( data , lowcut , highcut , fs , order ) : b , a = butter_bandpass ( lowcut , highcut , fs , order = order ) y = filtfilt ( b , a , data ) return y fs = 1 / tr nsamples = matrix . shape [ 0 ] ncolumns = matrix . shape [ 1 ] matrixOut = matrix . copy ( ) for k in range ( ncolumns ) : matrixOut [ : , k ] = butter_bandpass_filter ( matrix [ : , k ] , lowf , highf , fs , order = order ) return matrixOut
Bandpass filter the input time series image
43,165
def compcor ( boldImage , ncompcor = 4 , quantile = 0.975 , mask = None , filter_type = False , degree = 2 ) : def compute_tSTD ( M , quantile , x = 0 , axis = 0 ) : stdM = np . std ( M , axis = axis ) stdM [ stdM == 0 ] = x stdM [ np . isnan ( stdM ) ] = x tt = round ( quantile * 100 ) threshold_std = np . percentile ( stdM , tt ) return { 'tSTD' : stdM , 'threshold_std' : threshold_std } if mask is None : temp = utils . slice_image ( boldImage , axis = boldImage . dimension - 1 , idx = 0 ) mask = utils . get_mask ( temp ) imagematrix = core . timeseries_to_matrix ( boldImage , mask ) temp = compute_tSTD ( imagematrix , quantile , 0 ) tsnrmask = core . make_image ( mask , temp [ 'tSTD' ] ) tsnrmask = utils . threshold_image ( tsnrmask , temp [ 'threshold_std' ] , temp [ 'tSTD' ] . max ( ) ) M = core . timeseries_to_matrix ( boldImage , tsnrmask ) components = None basis = np . array ( [ ] ) if filter_type in ( 'polynomial' , False ) : M , basis = regress_poly ( degree , M ) u , _ , _ = linalg . svd ( M , full_matrices = False ) if components is None : components = u [ : , : ncompcor ] else : components = np . hstack ( ( components , u [ : , : ncompcor ] ) ) if components is None and ncompcor > 0 : raise ValueError ( 'No components found' ) return { 'components' : components , 'basis' : basis }
Compute noise components from the input image
43,166
def n3_bias_field_correction ( image , downsample_factor = 3 ) : outimage = image . clone ( ) args = [ image . dimension , image , outimage , downsample_factor ] processed_args = pargs . _int_antsProcessArguments ( args ) libfn = utils . get_lib_fn ( 'N3BiasFieldCorrection' ) libfn ( processed_args ) return outimage
N3 Bias Field Correction
43,167
def n4_bias_field_correction ( image , mask = None , shrink_factor = 4 , convergence = { 'iters' : [ 50 , 50 , 50 , 50 ] , 'tol' : 1e-07 } , spline_param = 200 , verbose = False , weight_mask = None ) : if image . pixeltype != 'float' : image = image . clone ( 'float' ) iters = convergence [ 'iters' ] tol = convergence [ 'tol' ] if mask is None : mask = get_mask ( image ) N4_CONVERGENCE_1 = '[%s, %.10f]' % ( 'x' . join ( [ str ( it ) for it in iters ] ) , tol ) N4_SHRINK_FACTOR_1 = str ( shrink_factor ) if ( not isinstance ( spline_param , ( list , tuple ) ) ) or ( len ( spline_param ) == 1 ) : N4_BSPLINE_PARAMS = '[%i]' % spline_param elif ( isinstance ( spline_param , ( list , tuple ) ) ) and ( len ( spline_param ) == image . dimension ) : N4_BSPLINE_PARAMS = '[%s]' % ( 'x' . join ( [ str ( sp ) for sp in spline_param ] ) ) else : raise ValueError ( 'Length of splineParam must either be 1 or dimensionality of image' ) if weight_mask is not None : if not isinstance ( weight_mask , iio . ANTsImage ) : raise ValueError ( 'Weight Image must be an antsImage' ) outimage = image . clone ( ) kwargs = { 'd' : outimage . dimension , 'i' : image , 'w' : weight_mask , 's' : N4_SHRINK_FACTOR_1 , 'c' : N4_CONVERGENCE_1 , 'b' : N4_BSPLINE_PARAMS , 'x' : mask , 'o' : outimage , 'v' : int ( verbose ) } processed_args = pargs . _int_antsProcessArguments ( kwargs ) libfn = utils . get_lib_fn ( 'N4BiasFieldCorrection' ) libfn ( processed_args ) return outimage
N4 Bias Field Correction
43,168
def abp_n4 ( image , intensity_truncation = ( 0.025 , 0.975 , 256 ) , mask = None , usen3 = False ) : if ( not isinstance ( intensity_truncation , ( list , tuple ) ) ) or ( len ( intensity_truncation ) != 3 ) : raise ValueError ( 'intensity_truncation must be list/tuple with 3 values' ) outimage = iMath ( image , 'TruncateIntensity' , intensity_truncation [ 0 ] , intensity_truncation [ 1 ] , intensity_truncation [ 2 ] ) if usen3 == True : outimage = n3_bias_field_correction ( outimage , 4 ) outimage = n3_bias_field_correction ( outimage , 2 ) return outimage else : outimage = n4_bias_field_correction ( outimage , mask ) return outimage
Truncate outlier intensities and bias correct with the N4 algorithm .
43,169
def image_mutual_information ( image1 , image2 ) : if ( image1 . pixeltype != 'float' ) or ( image2 . pixeltype != 'float' ) : raise ValueError ( 'Both images must have float pixeltype' ) if image1 . dimension != image2 . dimension : raise ValueError ( 'Both images must have same dimension' ) libfn = utils . get_lib_fn ( 'antsImageMutualInformation%iD' % image1 . dimension ) return libfn ( image1 . pointer , image2 . pointer )
Compute mutual information between two ANTsImage types
43,170
def get_mask ( image , low_thresh = None , high_thresh = None , cleanup = 2 ) : cleanup = int ( cleanup ) if isinstance ( image , iio . ANTsImage ) : if image . pixeltype != 'float' : image = image . clone ( 'float' ) if low_thresh is None : low_thresh = image . mean ( ) if high_thresh is None : high_thresh = image . max ( ) mask_image = threshold_image ( image , low_thresh , high_thresh ) if cleanup > 0 : mask_image = iMath ( mask_image , 'ME' , cleanup ) mask_image = iMath ( mask_image , 'GetLargestComponent' ) mask_image = iMath ( mask_image , 'MD' , cleanup ) mask_image = iMath ( mask_image , 'FillHoles' ) . threshold_image ( 1 , 2 ) while ( ( mask_image . min ( ) == mask_image . max ( ) ) and ( cleanup > 0 ) ) : cleanup = cleanup - 1 mask_image = threshold_image ( image , low_thresh , high_thresh ) if cleanup > 0 : mask_image = iMath ( mask_image , 'ME' , cleanup ) mask_image = iMath ( mask_image , 'MD' , cleanup ) mask_image = iMath ( mask_image , 'FillHoles' ) . threshold_image ( 1 , 2 ) return mask_image
Get a binary mask image from the given image after thresholding
43,171
def label_image_centroids ( image , physical = False , convex = True , verbose = False ) : d = image . shape if len ( d ) != 3 : raise ValueError ( 'image must be 3 dimensions' ) xcoords = np . asarray ( np . arange ( d [ 0 ] ) . tolist ( ) * ( d [ 1 ] * d [ 2 ] ) ) ycoords = np . asarray ( np . repeat ( np . arange ( d [ 1 ] ) , d [ 0 ] ) . tolist ( ) * d [ 2 ] ) zcoords = np . asarray ( np . repeat ( np . arange ( d [ 1 ] ) , d [ 0 ] * d [ 2 ] ) ) labels = image . numpy ( ) mylabels = np . sort ( np . unique ( labels [ labels > 0 ] ) ) . astype ( 'int' ) n_labels = len ( mylabels ) xc = np . zeros ( n_labels ) yc = np . zeros ( n_labels ) zc = np . zeros ( n_labels ) if convex : for i in mylabels : idx = ( labels == i ) . flatten ( ) xc [ i - 1 ] = np . mean ( xcoords [ idx ] ) yc [ i - 1 ] = np . mean ( ycoords [ idx ] ) zc [ i - 1 ] = np . mean ( zcoords [ idx ] ) else : for i in mylabels : idx = ( labels == i ) . flatten ( ) xci = xcoords [ idx ] yci = ycoords [ idx ] zci = zcoords [ idx ] dist = np . zeros ( len ( xci ) ) for j in range ( len ( xci ) ) : dist [ j ] = np . mean ( np . sqrt ( ( xci [ j ] - xci ) ** 2 + ( yci [ j ] - yci ) ** 2 + ( zci [ j ] - zci ) ** 2 ) ) mid = np . where ( dist == np . min ( dist ) ) xc [ i - 1 ] = xci [ mid ] yc [ i - 1 ] = yci [ mid ] zc [ i - 1 ] = zci [ mid ] centroids = np . vstack ( [ xc , yc , zc ] ) . T return { 'labels' : mylabels , 'vertices' : centroids }
Converts a label image to coordinates summarizing their positions
43,172
def transform ( self , X , y = None ) : insuffix = X . _libsuffix multires_fn = utils . get_lib_fn ( 'multiResolutionAntsImage%s' % ( insuffix ) ) casted_ptrs = multires_fn ( X . pointer , self . levels ) imgs = [ ] for casted_ptr in casted_ptrs : img = iio . ANTsImage ( pixeltype = X . pixeltype , dimension = X . dimension , components = X . components , pointer = casted_ptr ) if self . keep_shape : img = img . resample_image_to_target ( X ) imgs . append ( img ) return imgs
Generate a set of multi - resolution ANTsImage types
43,173
def transform ( self , X , y = None ) : insuffix = X . _libsuffix cast_fn = utils . get_lib_fn ( 'locallyBlurAntsImage%s' % ( insuffix ) ) casted_ptr = cast_fn ( X . pointer , self . iters , self . conductance ) return iio . ANTsImage ( pixeltype = X . pixeltype , dimension = X . dimension , components = X . components , pointer = casted_ptr )
Locally blur an image by applying a gradient anisotropic diffusion filter .
43,174
def get_data ( name = None ) : if name is None : files = [ ] for fname in os . listdir ( data_path ) : if ( fname . endswith ( '.nii.gz' ) ) or ( fname . endswith ( '.jpg' ) or ( fname . endswith ( '.csv' ) ) ) : fname = os . path . join ( data_path , fname ) files . append ( fname ) return files else : datapath = None for fname in os . listdir ( data_path ) : if ( name == fname . split ( '.' ) [ 0 ] ) or ( ( name + 'slice' ) == fname . split ( '.' ) [ 0 ] ) : datapath = os . path . join ( data_path , fname ) if datapath is None : raise ValueError ( 'File doesnt exist. Options: ' , os . listdir ( data_path ) ) return datapath
Get ANTsPy test data filename
43,175
def convolve_image ( image , kernel_image , crop = True ) : if not isinstance ( image , iio . ANTsImage ) : raise ValueError ( 'image must be ANTsImage type' ) if not isinstance ( kernel_image , iio . ANTsImage ) : raise ValueError ( 'kernel must be ANTsImage type' ) orig_ptype = image . pixeltype if image . pixeltype != 'float' : image = image . clone ( 'float' ) if kernel_image . pixeltype != 'float' : kernel_image = kernel_image . clone ( 'float' ) if crop : kernel_image_mask = utils . get_mask ( kernel_image ) kernel_image = utils . crop_image ( kernel_image , kernel_image_mask ) kernel_image_mask = utils . crop_image ( kernel_image_mask , kernel_image_mask ) kernel_image [ kernel_image_mask == 0 ] = kernel_image [ kernel_image_mask == 1 ] . mean ( ) libfn = utils . get_lib_fn ( 'convolveImageF%i' % image . dimension ) conv_itk_image = libfn ( image . pointer , kernel_image . pointer ) conv_ants_image = iio . ANTsImage ( pixeltype = image . pixeltype , dimension = image . dimension , components = image . components , pointer = conv_itk_image ) if orig_ptype != 'float' : conv_ants_image = conv_ants_image . clone ( orig_ptype ) return conv_ants_image
Convolve one image with another
43,176
def ndimage_to_list ( image ) : inpixeltype = image . pixeltype dimension = image . dimension components = 1 imageShape = image . shape nSections = imageShape [ dimension - 1 ] subdimension = dimension - 1 suborigin = iio . get_origin ( image ) [ 0 : subdimension ] subspacing = iio . get_spacing ( image ) [ 0 : subdimension ] subdirection = np . eye ( subdimension ) for i in range ( subdimension ) : subdirection [ i , : ] = iio . get_direction ( image ) [ i , 0 : subdimension ] subdim = image . shape [ 0 : subdimension ] imagelist = [ ] for i in range ( nSections ) : img = utils . slice_image ( image , axis = subdimension , idx = i ) iio . set_spacing ( img , subspacing ) iio . set_origin ( img , suborigin ) iio . set_direction ( img , subdirection ) imagelist . append ( img ) return imagelist
Split a n dimensional ANTsImage into a list of n - 1 dimensional ANTsImages
43,177
def _int_antsProcessArguments ( args ) : p_args = [ ] if isinstance ( args , dict ) : for argname , argval in args . items ( ) : if '-MULTINAME-' in argname : argname = argname [ : argname . find ( '-MULTINAME-' ) ] if argval is not None : if len ( argname ) > 1 : p_args . append ( '--%s' % argname ) else : p_args . append ( '-%s' % argname ) if isinstance ( argval , iio . ANTsImage ) : p_args . append ( _ptrstr ( argval . pointer ) ) elif isinstance ( argval , list ) : for av in argval : if isinstance ( av , iio . ANTsImage ) : av = _ptrstr ( av . pointer ) p_args . append ( av ) else : p_args . append ( str ( argval ) ) elif isinstance ( args , list ) : for arg in args : if isinstance ( arg , iio . ANTsImage ) : pointer_string = _ptrstr ( arg . pointer ) p_arg = pointer_string elif arg is None : pass else : p_arg = str ( arg ) p_args . append ( p_arg ) return p_args
Needs to be better validated .
43,178
def initialize_eigenanatomy ( initmat , mask = None , initlabels = None , nreps = 1 , smoothing = 0 ) : if isinstance ( initmat , iio . ANTsImage ) : if mask is not None : selectvec = mask > 0 else : selectvec = initmat > 0 initmatvec = initmat [ selectvec ] if initlabels is None : ulabs = np . sort ( np . unique ( initmatvec ) ) ulabs = ulabs [ ulabs > 0 ] else : ulabs = initlabels nvox = len ( initmatvec ) temp = np . zeros ( ( len ( ulabs ) , nvox ) ) for x in range ( len ( ulabs ) ) : timg = utils . threshold_image ( initmat , ulabs [ x ] - 1e-4 , ulabs [ x ] + 1e-4 ) if smoothing > 0 : timg = utils . smooth_image ( timg , smoothing ) temp [ x , : ] = timg [ selectvec ] initmat = temp nclasses = initmat . shape [ 0 ] classlabels = [ 'init%i' % i for i in range ( nclasses ) ] initlist = [ ] if mask is None : maskmat = np . zeros ( initmat . shape ) maskmat [ 0 , : ] = 1 mask = core . from_numpy ( maskmat . astype ( 'float32' ) ) eanatnames = [ 'A' ] * ( nclasses * nreps ) ct = 0 for i in range ( nclasses ) : vecimg = mask . clone ( 'float' ) initf = initmat [ i , : ] vecimg [ mask == 1 ] = initf for nr in range ( nreps ) : initlist . append ( vecimg ) eanatnames [ ct + nr - 1 ] = str ( classlabels [ i ] ) ct = ct + 1 return { 'initlist' : initlist , 'mask' : mask , 'enames' : eanatnames }
InitializeEigenanatomy is a helper function to initialize sparseDecom and sparseDecom2 . Can be used to estimate sparseness parameters per eigenvector . The user then only chooses nvecs and optional regularization parameters .
43,179
def eig_seg ( mask , img_list , apply_segmentation_to_images = False , cthresh = 0 , smooth = 1 ) : maskvox = mask > 0 maskseg = mask . clone ( ) maskseg [ maskvox ] = 0 if isinstance ( img_list , np . ndarray ) : mydata = img_list elif isinstance ( img_list , ( tuple , list ) ) : mydata = core . image_list_to_matrix ( img_list , mask ) if ( smooth > 0 ) : for i in range ( mydata . shape [ 0 ] ) : temp_img = core . make_image ( mask , mydata [ i , : ] , pixeltype = 'float' ) temp_img = utils . smooth_image ( temp_img , smooth , sigma_in_physical_coordinates = True ) mydata [ i , : ] = temp_img [ mask >= 0.5 ] segids = np . argmax ( np . abs ( mydata ) , axis = 0 ) + 1 segmax = np . max ( np . abs ( mydata ) , axis = 0 ) maskseg [ maskvox ] = ( segids * ( segmax > 1e-09 ) ) if cthresh > 0 : for kk in range ( int ( maskseg . max ( ) ) ) : timg = utils . threshold_image ( maskseg , kk , kk ) timg = utils . label_clusters ( timg , cthresh ) timg = utils . threshold_image ( timg , 1 , 1e15 ) * float ( kk ) maskseg [ maskseg == kk ] = timg [ maskseg == kk ] if ( apply_segmentation_to_images ) and ( not isinstance ( img_list , np . ndarray ) ) : for i in range ( len ( img_list ) ) : img = img_list [ i ] img [ maskseg != float ( i ) ] = 0 img_list [ i ] = img return maskseg
Segment a mask into regions based on the max value in an image list . At a given voxel the segmentation label will contain the index to the image that has the largest value . If the 3rd image has the greatest value the segmentation label will be 3 at that voxel .
43,180
def label_stats ( image , label_image ) : image_float = image . clone ( 'float' ) label_image_int = label_image . clone ( 'unsigned int' ) libfn = utils . get_lib_fn ( 'labelStats%iD' % image . dimension ) df = libfn ( image_float . pointer , label_image_int . pointer ) return pd . DataFrame ( df )
Get label statistics from image
43,181
def spacing ( self ) : libfn = utils . get_lib_fn ( 'getSpacing%s' % self . _libsuffix ) return libfn ( self . pointer )
Get image spacing
43,182
def set_spacing ( self , new_spacing ) : if not isinstance ( new_spacing , ( tuple , list ) ) : raise ValueError ( 'arg must be tuple or list' ) if len ( new_spacing ) != self . dimension : raise ValueError ( 'must give a spacing value for each dimension (%i)' % self . dimension ) libfn = utils . get_lib_fn ( 'setSpacing%s' % self . _libsuffix ) libfn ( self . pointer , new_spacing )
Set image spacing
43,183
def origin ( self ) : libfn = utils . get_lib_fn ( 'getOrigin%s' % self . _libsuffix ) return libfn ( self . pointer )
Get image origin
43,184
def set_origin ( self , new_origin ) : if not isinstance ( new_origin , ( tuple , list ) ) : raise ValueError ( 'arg must be tuple or list' ) if len ( new_origin ) != self . dimension : raise ValueError ( 'must give a origin value for each dimension (%i)' % self . dimension ) libfn = utils . get_lib_fn ( 'setOrigin%s' % self . _libsuffix ) libfn ( self . pointer , new_origin )
Set image origin
43,185
def direction ( self ) : libfn = utils . get_lib_fn ( 'getDirection%s' % self . _libsuffix ) return libfn ( self . pointer )
Get image direction
43,186
def set_direction ( self , new_direction ) : if isinstance ( new_direction , ( tuple , list ) ) : new_direction = np . asarray ( new_direction ) if not isinstance ( new_direction , np . ndarray ) : raise ValueError ( 'arg must be np.ndarray or tuple or list' ) if len ( new_direction ) != self . dimension : raise ValueError ( 'must give a origin value for each dimension (%i)' % self . dimension ) libfn = utils . get_lib_fn ( 'setDirection%s' % self . _libsuffix ) libfn ( self . pointer , new_direction )
Set image direction
43,187
def astype ( self , dtype ) : if dtype not in _supported_dtypes : raise ValueError ( 'Datatype %s not supported. Supported types are %s' % ( dtype , _supported_dtypes ) ) pixeltype = _npy_to_itk_map [ dtype ] return self . clone ( pixeltype )
Cast & clone an ANTsImage to a given numpy datatype .
43,188
def new_image_like ( self , data ) : if not isinstance ( data , np . ndarray ) : raise ValueError ( 'data must be a numpy array' ) if not self . has_components : if data . shape != self . shape : raise ValueError ( 'given array shape (%s) and image array shape (%s) do not match' % ( data . shape , self . shape ) ) else : if ( data . shape [ - 1 ] != self . components ) or ( data . shape [ : - 1 ] != self . shape ) : raise ValueError ( 'given array shape (%s) and image array shape (%s) do not match' % ( data . shape [ 1 : ] , self . shape ) ) return iio2 . from_numpy ( data , origin = self . origin , spacing = self . spacing , direction = self . direction , has_components = self . has_components )
Create a new ANTsImage with the same header information but with a new image array .
43,189
def to_file ( self , filename ) : filename = os . path . expanduser ( filename ) libfn = utils . get_lib_fn ( 'toFile%s' % self . _libsuffix ) libfn ( self . pointer , filename )
Write the ANTsImage to file
43,190
def apply ( self , fn ) : this_array = self . numpy ( ) new_array = fn ( this_array ) return self . new_image_like ( new_array )
Apply an arbitrary function to ANTsImage .
43,191
def sum ( self , axis = None , keepdims = False ) : return self . numpy ( ) . sum ( axis = axis , keepdims = keepdims )
Return sum along specified axis
43,192
def range ( self , axis = None ) : return ( self . min ( axis = axis ) , self . max ( axis = axis ) )
Return range tuple along specified axis
43,193
def argrange ( self , axis = None ) : amin = self . argmin ( axis = axis ) amax = self . argmax ( axis = axis ) if axis is None : return ( amin , amax ) else : return np . stack ( [ amin , amax ] ) . T
Return argrange along specified axis
43,194
def unique ( self , sort = False ) : unique_vals = np . unique ( self . numpy ( ) ) if sort : unique_vals = np . sort ( unique_vals ) return unique_vals
Return unique set of values in image
43,195
def uniquekeys ( self , metakey = None ) : if metakey is None : return self . _uniquekeys else : if metakey not in self . metakeys ( ) : raise ValueError ( 'metakey %s does not exist' % metakey ) return self . _uniquekeys [ metakey ]
Get keys for a given metakey
43,196
def label_clusters ( image , min_cluster_size = 50 , min_thresh = 1e-6 , max_thresh = 1 , fully_connected = False ) : dim = image . dimension clust = threshold_image ( image , min_thresh , max_thresh ) temp = int ( fully_connected ) args = [ dim , clust , clust , min_cluster_size , temp ] processed_args = _int_antsProcessArguments ( args ) libfn = utils . get_lib_fn ( 'LabelClustersUniquely' ) libfn ( processed_args ) return clust
This will give a unique ID to each connected component 1 through N of size > min_cluster_size
43,197
def make_points_image ( pts , mask , radius = 5 ) : powers_lblimg = mask * 0 npts = len ( pts ) dim = mask . dimension if pts . shape [ 1 ] != dim : raise ValueError ( 'points dimensionality should match that of images' ) for r in range ( npts ) : pt = pts [ r , : ] idx = tio . transform_physical_point_to_index ( mask , pt . tolist ( ) ) . astype ( int ) in_image = ( np . prod ( idx <= mask . shape ) == 1 ) and ( len ( np . where ( idx < 0 ) [ 0 ] ) == 0 ) if ( in_image == True ) : if ( dim == 3 ) : powers_lblimg [ idx [ 0 ] , idx [ 1 ] , idx [ 2 ] ] = r + 1 elif ( dim == 2 ) : powers_lblimg [ idx [ 0 ] , idx [ 1 ] ] = r + 1 return utils . morphology ( powers_lblimg , 'dilate' , radius , 'grayscale' )
Create label image from physical space points
43,198
def weingarten_image_curvature ( image , sigma = 1.0 , opt = 'mean' ) : if image . dimension not in { 2 , 3 } : raise ValueError ( 'image must be 2D or 3D' ) if image . dimension == 2 : d = image . shape temp = np . zeros ( list ( d ) + [ 10 ] ) for k in range ( 1 , 7 ) : voxvals = image [ : d [ 0 ] , : d [ 1 ] ] temp [ : d [ 0 ] , : d [ 1 ] , k ] = voxvals temp = core . from_numpy ( temp ) myspc = image . spacing myspc = list ( myspc ) + [ min ( myspc ) ] temp . set_spacing ( myspc ) temp = temp . clone ( 'float' ) else : temp = image . clone ( 'float' ) optnum = 0 if opt == 'gaussian' : optnum = 6 if opt == 'characterize' : optnum = 5 libfn = utils . get_lib_fn ( 'weingartenImageCurvature' ) mykout = libfn ( temp . pointer , sigma , optnum ) mykout = iio . ANTsImage ( pixeltype = image . pixeltype , dimension = 3 , components = image . components , pointer = mykout ) if image . dimension == 3 : return mykout elif image . dimension == 2 : subarr = core . from_numpy ( mykout . numpy ( ) [ : , : , 4 ] ) return core . copy_image_info ( image , subarr )
Uses the weingarten map to estimate image mean or gaussian curvature
43,199
def from_numpy ( data , origin = None , spacing = None , direction = None , has_components = False , is_rgb = False ) : data = data . astype ( 'float32' ) if data . dtype . name == 'float64' else data img = _from_numpy ( data . T . copy ( ) , origin , spacing , direction , has_components , is_rgb ) return img
Create an ANTsImage object from a numpy array