idx
int64
0
63k
question
stringlengths
61
4.03k
target
stringlengths
6
1.23k
46,700
def register_neurite_feature ( name , func ) : if name in NEURITEFEATURES : raise NeuroMError ( 'Attempt to hide registered feature %s' % name ) def _fun ( neurites , neurite_type = _ntype . all ) : return list ( func ( n ) for n in _ineurites ( neurites , filt = _is_type ( neurite_type ) ) ) NEURONFEATURES [ name ] = _fun
Register a feature to be applied to neurites
46,701
def get ( feature , obj , ** kwargs ) : feature = ( NEURITEFEATURES [ feature ] if feature in NEURITEFEATURES else NEURONFEATURES [ feature ] ) return _np . array ( list ( feature ( obj , ** kwargs ) ) )
Obtain a feature from a set of morphology objects
46,702
def _get_doc ( ) : def get_docstring ( func ) : docstring = ':\n' if func . __doc__ : docstring += _indent ( func . __doc__ , 2 ) return docstring ret = [ '\nNeurite features (neurite, neuron, neuron population):' ] ret . extend ( _INDENT + '- ' + feature + get_docstring ( func ) for feature , func in sorted ( NEURITEFEATURES . items ( ) ) ) ret . append ( '\nNeuron features (neuron, neuron population):' ) ret . extend ( _INDENT + '- ' + feature + get_docstring ( func ) for feature , func in sorted ( NEURONFEATURES . items ( ) ) ) return '\n' . join ( ret )
Get a description of all the known available features
46,703
def read ( filename , remove_duplicates = False , data_wrapper = DataWrapper ) : with h5py . File ( filename , mode = 'r' ) as h5file : version = get_version ( h5file ) if version == 'H5V1' : points , groups = _unpack_v1 ( h5file ) elif version == 'H5V2' : stg = next ( s for s in ( 'repaired' , 'unraveled' , 'raw' ) if s in h5file [ 'neuron1' ] ) points , groups = _unpack_v2 ( h5file , stage = stg ) if remove_duplicates : points , groups = _remove_duplicate_points ( points , groups ) neuron_builder = BlockNeuronBuilder ( ) points [ : , POINT_DIAMETER ] /= 2 for id_ , row in enumerate ( zip_longest ( groups , groups [ 1 : , GPFIRST ] , fillvalue = len ( points ) ) ) : ( point_start , section_type , parent_id ) , point_end = row neuron_builder . add_section ( id_ , int ( parent_id ) , int ( section_type ) , points [ point_start : point_end ] ) return neuron_builder . get_datawrapper ( version , data_wrapper = data_wrapper )
Read a file and return a data_wrapper d data
46,704
def _remove_duplicate_points ( points , groups ) : group_initial_ids = groups [ : , GPFIRST ] to_be_reduced = np . zeros ( len ( group_initial_ids ) ) to_be_removed = [ ] for ig , g in enumerate ( groups ) : iid , typ , pid = g [ GPFIRST ] , g [ GTYPE ] , g [ GPID ] if pid != - 1 and typ != 1 and groups [ pid ] [ GTYPE ] != 1 : to_be_removed . append ( iid ) to_be_reduced [ ig + 1 : ] += 1 groups [ : , GPFIRST ] = groups [ : , GPFIRST ] - to_be_reduced points = np . delete ( points , to_be_removed , axis = 0 ) return points , groups
Removes the duplicate points from the beginning of a section if they are present in points - groups representation .
46,705
def _unpack_v1 ( h5file ) : points = np . array ( h5file [ 'points' ] ) groups = np . array ( h5file [ 'structure' ] ) return points , groups
Unpack groups from HDF5 v1 file
46,706
def _unpack_v2 ( h5file , stage ) : points = np . array ( h5file [ 'neuron1/%s/points' % stage ] ) groups_stage = stage if stage != 'unraveled' else 'raw' groups = np . array ( h5file [ 'neuron1/structure/%s' % groups_stage ] ) stypes = np . array ( h5file [ 'neuron1/structure/sectiontype' ] ) groups = np . hstack ( [ groups , stypes ] ) groups [ : , [ 1 , 2 ] ] = groups [ : , [ 2 , 1 ] ] return points , groups
Unpack groups from HDF5 v2 file
46,707
def fit_results_to_dict ( fit_results , min_bound = None , max_bound = None ) : type_map = { 'norm' : 'normal' , 'expon' : 'exponential' , 'uniform' : 'uniform' } param_map = { 'uniform' : lambda p : [ ( 'min' , p [ 0 ] ) , ( 'max' , p [ 0 ] + p [ 1 ] ) ] , 'norm' : lambda p : [ ( 'mu' , p [ 0 ] ) , ( 'sigma' , p [ 1 ] ) ] , 'expon' : lambda p : [ ( 'lambda' , 1.0 / p [ 1 ] ) ] } d = OrderedDict ( { 'type' : type_map [ fit_results . type ] } ) d . update ( param_map [ fit_results . type ] ( fit_results . params ) ) if min_bound is not None and 'min' not in d : d [ 'min' ] = min_bound if max_bound is not None and 'max' not in d : d [ 'max' ] = max_bound return d
Create a JSON - comparable dict from a FitResults object
46,708
def fit ( data , distribution = 'norm' ) : params = getattr ( _st , distribution ) . fit ( data ) return FitResults ( params , _st . kstest ( data , distribution , params ) , distribution )
Calculate the parameters of a fit of a distribution to a data set
46,709
def optimal_distribution ( data , distr_to_check = ( 'norm' , 'expon' , 'uniform' ) ) : fit_results = [ fit ( data , d ) for d in distr_to_check ] return min ( fit_results , key = lambda fit : fit . errs [ 0 ] )
Calculate the parameters of a fit of different distributions to a data set and returns the distribution of the minimal ks - distance .
46,710
def scalar_stats ( data , functions = ( 'min' , 'max' , 'mean' , 'std' ) ) : stats = { } for func in functions : stats [ func ] = getattr ( np , func ) ( data ) return stats
Calculate the stats from the given numpy functions
46,711
def total_score ( paired_dats , p = 2 , test = StatTests . ks ) : scores = np . array ( [ compare_two ( fL1 , fL2 , test = test ) . dist for fL1 , fL2 in paired_dats ] ) return np . linalg . norm ( scores , p )
Calculates the p - norm of the distances that have been calculated from the statistical test that has been applied on all the paired datasets .
46,712
def iter_neurites ( obj , mapfun = None , filt = None , neurite_order = NeuriteIter . FileOrder ) : neurites = ( ( obj , ) if isinstance ( obj , Neurite ) else obj . neurites if hasattr ( obj , 'neurites' ) else obj ) if neurite_order == NeuriteIter . NRN : last_position = max ( NRN_ORDER . values ( ) ) + 1 neurites = sorted ( neurites , key = lambda neurite : NRN_ORDER . get ( neurite . type , last_position ) ) neurite_iter = iter ( neurites ) if filt is None else filter ( filt , neurites ) return neurite_iter if mapfun is None else map ( mapfun , neurite_iter )
Iterator to a neurite neuron or neuron population
46,713
def iter_sections ( neurites , iterator_type = Tree . ipreorder , neurite_filter = None , neurite_order = NeuriteIter . FileOrder ) : return chain . from_iterable ( iterator_type ( neurite . root_node ) for neurite in iter_neurites ( neurites , filt = neurite_filter , neurite_order = neurite_order ) )
Iterator to the sections in a neurite neuron or neuron population .
46,714
def iter_segments ( obj , neurite_filter = None , neurite_order = NeuriteIter . FileOrder ) : sections = iter ( ( obj , ) if isinstance ( obj , Section ) else iter_sections ( obj , neurite_filter = neurite_filter , neurite_order = neurite_order ) ) return chain . from_iterable ( zip ( sec . points [ : - 1 ] , sec . points [ 1 : ] ) for sec in sections )
Return an iterator to the segments in a collection of neurites
46,715
def graft_neuron ( root_section ) : assert isinstance ( root_section , Section ) return Neuron ( soma = Soma ( root_section . points [ : 1 ] ) , neurites = [ Neurite ( root_section ) ] )
Returns a neuron starting at root_section
46,716
def points ( self ) : _pts = [ v for s in self . root_node . ipreorder ( ) for v in s . points [ 1 : , COLS . XYZR ] ] _pts . insert ( 0 , self . root_node . points [ 0 ] [ COLS . XYZR ] ) return np . array ( _pts )
Return unordered array with all the points in this neurite
46,717
def transform ( self , trans ) : clone = deepcopy ( self ) for n in clone . iter_sections ( ) : n . points [ : , 0 : 3 ] = trans ( n . points [ : , 0 : 3 ] ) return clone
Return a copy of this neurite with a 3D transformation applied
46,718
def iter_sections ( self , order = Tree . ipreorder , neurite_order = NeuriteIter . FileOrder ) : return iter_sections ( self , iterator_type = order , neurite_order = neurite_order )
iteration over section nodes
46,719
def eval_stats ( values , mode ) : if mode == 'raw' : return values . tolist ( ) if mode == 'total' : mode = 'sum' try : return getattr ( np , mode ) ( values , axis = 0 ) except ValueError : pass return None
Extract a summary statistic from an array of list of values
46,720
def _stat_name ( feat_name , stat_mode ) : if feat_name [ - 1 ] == 's' : feat_name = feat_name [ : - 1 ] if feat_name == 'soma_radii' : feat_name = 'soma_radius' if stat_mode == 'raw' : return feat_name return '%s_%s' % ( stat_mode , feat_name )
Set stat name based on feature name and stat mode
46,721
def extract_stats ( neurons , config ) : stats = defaultdict ( dict ) for ns , modes in config [ 'neurite' ] . items ( ) : for n in config [ 'neurite_type' ] : n = _NEURITE_MAP [ n ] for mode in modes : stat_name = _stat_name ( ns , mode ) stat = eval_stats ( nm . get ( ns , neurons , neurite_type = n ) , mode ) if stat is None or not stat . shape : stats [ n . name ] [ stat_name ] = stat else : assert stat . shape in ( ( 3 , ) , ) , 'Statistic must create a 1x3 result' for i , suffix in enumerate ( 'XYZ' ) : compound_stat_name = stat_name + '_' + suffix stats [ n . name ] [ compound_stat_name ] = stat [ i ] for ns , modes in config [ 'neuron' ] . items ( ) : for mode in modes : stat_name = _stat_name ( ns , mode ) stats [ stat_name ] = eval_stats ( nm . get ( ns , neurons ) , mode ) return stats
Extract stats from neurons
46,722
def get_header ( results ) : ret = [ 'name' , ] values = next ( iter ( results . values ( ) ) ) for k , v in values . items ( ) : if isinstance ( v , dict ) : for metric in v . keys ( ) : ret . append ( '%s:%s' % ( k , metric ) ) else : ret . append ( k ) return ret
Extracts the headers using the first value in the dict as the template
46,723
def generate_flattened_dict ( headers , results ) : for name , values in results . items ( ) : row = [ ] for header in headers : if header == 'name' : row . append ( name ) elif ':' in header : neurite_type , metric = header . split ( ':' ) row . append ( values [ neurite_type ] [ metric ] ) else : row . append ( values [ header ] ) yield row
extract from results the fields in the headers list
46,724
def add_child ( self , tree ) : tree . parent = self self . children . append ( tree ) return tree
Add a child to the list of this tree s children
46,725
def ipreorder ( self ) : children = deque ( ( self , ) ) while children : cur_node = children . pop ( ) children . extend ( reversed ( cur_node . children ) ) yield cur_node
Depth - first pre - order iteration of tree nodes
46,726
def ipostorder ( self ) : children = [ self , ] seen = set ( ) while children : cur_node = children [ - 1 ] if cur_node not in seen : seen . add ( cur_node ) children . extend ( reversed ( cur_node . children ) ) else : children . pop ( ) yield cur_node
Depth - first post - order iteration of tree nodes
46,727
def deprecated ( fun_name = None , msg = "" ) : def _deprecated ( fun ) : @ wraps ( fun ) def _wrapper ( * args , ** kwargs ) : name = fun_name if fun_name is not None else fun . __name__ _warn_deprecated ( 'Call to deprecated function %s. %s' % ( name , msg ) ) return fun ( * args , ** kwargs ) return _wrapper return _deprecated
Issue a deprecation warning for a function
46,728
def check_wrapper ( fun ) : @ wraps ( fun ) def _wrapper ( * args , ** kwargs ) : title = fun . __name__ . replace ( '_' , ' ' ) . capitalize ( ) result = fun ( * args , ** kwargs ) result . title = title return result return _wrapper
Decorate a checking function
46,729
def run ( self , path ) : SEPARATOR = '=' * 40 summary = { } res = True for _f in utils . get_files_by_path ( path ) : L . info ( SEPARATOR ) status , summ = self . _check_file ( _f ) res &= status if summ is not None : summary . update ( summ ) L . info ( SEPARATOR ) status = 'PASS' if res else 'FAIL' return { 'files' : summary , 'STATUS' : status }
Test a bunch of files and return a summary JSON report
46,730
def _do_check ( self , obj , check_module , check_str ) : opts = self . _config [ 'options' ] if check_str in opts : fargs = opts [ check_str ] if isinstance ( fargs , list ) : out = check_wrapper ( getattr ( check_module , check_str ) ) ( obj , * fargs ) else : out = check_wrapper ( getattr ( check_module , check_str ) ) ( obj , fargs ) else : out = check_wrapper ( getattr ( check_module , check_str ) ) ( obj ) try : if out . info : L . debug ( '%s: %d failing ids detected: %s' , out . title , len ( out . info ) , out . info ) except TypeError : pass return out
Run a check function on obj
46,731
def _check_loop ( self , obj , check_mod_str ) : check_module = self . _check_modules [ check_mod_str ] checks = self . _config [ 'checks' ] [ check_mod_str ] result = True summary = OrderedDict ( ) for check in checks : ok = self . _do_check ( obj , check_module , check ) summary [ ok . title ] = ok . status result &= ok . status return result , summary
Run all the checks in a check_module
46,732
def _check_file ( self , f ) : L . info ( 'File: %s' , f ) full_result = True full_summary = OrderedDict ( ) try : data = load_data ( f ) except Exception as e : L . error ( 'Failed to load data... skipping tests for this file' ) L . error ( e . args ) return False , { f : OrderedDict ( [ ( 'ALL' , False ) ] ) } try : result , summary = self . _check_loop ( data , 'structural_checks' ) full_result &= result full_summary . update ( summary ) nrn = fst_core . FstNeuron ( data ) result , summary = self . _check_loop ( nrn , 'neuron_checks' ) full_result &= result full_summary . update ( summary ) except Exception as e : L . error ( 'Check failed: %s' , str ( type ( e ) ) + str ( e . args ) ) full_result = False full_summary [ 'ALL' ] = full_result for m , s in full_summary . items ( ) : self . _log_msg ( m , s ) return full_result , { f : full_summary }
Run tests on a morphology file
46,733
def _log_msg ( self , msg , ok ) : if self . _config [ 'color' ] : CGREEN , CRED , CEND = '\033[92m' , '\033[91m' , '\033[0m' else : CGREEN = CRED = CEND = '' LOG_LEVELS = { False : logging . ERROR , True : logging . INFO } L . log ( LOG_LEVELS [ ok ] , '%35s %s' + CEND , msg , CGREEN + 'PASS' if ok else CRED + 'FAIL' )
Helper to log message to the right level
46,734
def _sanitize_config ( config ) : if 'checks' in config : checks = config [ 'checks' ] if 'structural_checks' not in checks : checks [ 'structural_checks' ] = [ ] if 'neuron_checks' not in checks : checks [ 'neuron_checks' ] = [ ] else : raise ConfigError ( 'Need to have "checks" in the config' ) if 'options' not in config : L . debug ( 'Using default options' ) config [ 'options' ] = { } if 'color' not in config : config [ 'color' ] = False return config
check that the config has the correct keys add missing keys if necessary
46,735
def read ( filename , data_wrapper = DataWrapper ) : data = np . loadtxt ( filename ) if len ( np . shape ( data ) ) == 1 : data = np . reshape ( data , ( 1 , - 1 ) ) data = data [ : , [ X , Y , Z , R , TYPE , ID , P ] ] return data_wrapper ( data , 'SWC' , None )
Read an SWC file and return a tuple of data format .
46,736
def _merge_sections ( sec_a , sec_b ) : sec_b . ids = list ( sec_a . ids ) + list ( sec_b . ids [ 1 : ] ) sec_b . ntype = sec_a . ntype sec_b . pid = sec_a . pid sec_a . ids = [ ] sec_a . pid = - 1 sec_a . ntype = 0
Merge two sections
46,737
def _section_end_points ( structure_block , id_map ) : soma_idx = structure_block [ : , TYPE ] == POINT_TYPE . SOMA soma_ids = structure_block [ soma_idx , ID ] neurite_idx = structure_block [ : , TYPE ] != POINT_TYPE . SOMA neurite_rows = structure_block [ neurite_idx , : ] soma_end_pts = set ( id_map [ id_ ] for id_ in soma_ids [ np . in1d ( soma_ids , neurite_rows [ : , PID ] ) ] ) n_children = defaultdict ( int ) for row in structure_block : n_children [ row [ PID ] ] += 1 end_pts = set ( i for i , row in enumerate ( structure_block ) if n_children [ row [ ID ] ] != 1 ) return end_pts . union ( soma_end_pts )
Get the section end - points
46,738
def _extract_sections ( data_block ) : structure_block = data_block [ : , COLS . TYPE : COLS . COL_COUNT ] . astype ( np . int ) id_map = { - 1 : - 1 } for i , row in enumerate ( structure_block ) : id_map [ row [ ID ] ] = i sec_end_pts = _section_end_points ( structure_block , id_map ) gap_sections = set ( ) sections = [ ] def new_section ( ) : sections . append ( DataBlockSection ( ) ) return sections [ - 1 ] curr_section = new_section ( ) parent_section = { - 1 : - 1 } for row in structure_block : row_id = id_map [ row [ ID ] ] parent_id = id_map [ row [ PID ] ] if not curr_section . ids : curr_section . ids . append ( parent_id ) curr_section . ntype = row [ TYPE ] gap = parent_id != curr_section . ids [ - 1 ] if gap : sec_end_pts . add ( row_id ) else : curr_section . ids . append ( row_id ) if row_id in sec_end_pts : parent_section [ curr_section . ids [ - 1 ] ] = len ( sections ) - 1 if gap : curr_section = new_section ( ) curr_section . ids . extend ( ( parent_id , row_id ) ) curr_section . ntype = row [ TYPE ] gap_sections . add ( len ( sections ) - 2 ) elif row_id != len ( data_block ) - 1 : curr_section = new_section ( ) for sec in sections : if sec . ids : sec . pid = parent_section [ sec . ids [ 0 ] ] if sec . pid in gap_sections : _merge_sections ( sections [ sec . pid ] , sec ) return sections
Make a list of sections from an SWC - style data wrapper block
46,739
def neurite_root_section_ids ( self ) : sec = self . sections return [ i for i , ss in enumerate ( sec ) if ss . pid > - 1 and ( sec [ ss . pid ] . ntype == POINT_TYPE . SOMA and ss . ntype != POINT_TYPE . SOMA ) ]
Get the section IDs of the intitial neurite sections
46,740
def soma_points ( self ) : db = self . data_block return db [ db [ : , COLS . TYPE ] == POINT_TYPE . SOMA ]
Get the soma points
46,741
def add_section ( self , id_ , parent_id , section_type , points ) : assert id_ not in self . sections , 'id %s already exists in sections' % id_ self . sections [ id_ ] = BlockNeuronBuilder . BlockSection ( parent_id , section_type , points )
add a section
46,742
def _make_datablock ( self ) : section_ids = sorted ( self . sections ) id_to_insert_id = { } row_count = 0 for section_id in section_ids : row_count += len ( self . sections [ section_id ] . points ) id_to_insert_id [ section_id ] = row_count - 1 datablock = np . empty ( ( row_count , COLS . COL_COUNT ) , dtype = np . float ) datablock [ : , COLS . ID ] = np . arange ( len ( datablock ) ) datablock [ : , COLS . P ] = datablock [ : , COLS . ID ] - 1 sections = [ ] insert_index = 0 for id_ in section_ids : sec = self . sections [ id_ ] points , section_type , parent_id = sec . points , sec . section_type , sec . parent_id idx = slice ( insert_index , insert_index + len ( points ) ) datablock [ idx , COLS . XYZR ] = points datablock [ idx , COLS . TYPE ] = section_type datablock [ idx . start , COLS . P ] = id_to_insert_id . get ( parent_id , ROOT_ID ) sections . append ( DataBlockSection ( idx , section_type , parent_id ) ) insert_index = idx . stop return datablock , sections
Make a data_block and sections list as required by DataWrapper
46,743
def _check_consistency ( self ) : type_count = defaultdict ( int ) for _ , section in sorted ( self . sections . items ( ) ) : type_count [ section . section_type ] += 1 if type_count [ POINT_TYPE . SOMA ] != 1 : L . info ( 'Have %d somas, expected 1' , type_count [ POINT_TYPE . SOMA ] )
see if the sections have obvious errors
46,744
def get_datawrapper ( self , file_format = 'BlockNeuronBuilder' , data_wrapper = DataWrapper ) : self . _check_consistency ( ) datablock , sections = self . _make_datablock ( ) return data_wrapper ( datablock , file_format , sections )
returns a DataWrapper
46,745
def _is_morphology_file ( filepath ) : return ( os . path . isfile ( filepath ) and os . path . splitext ( filepath ) [ 1 ] . lower ( ) in ( '.swc' , '.h5' , '.asc' ) )
Check if filepath is a file with one of morphology file extensions .
46,746
def get_morph_files ( directory ) : lsdir = ( os . path . join ( directory , m ) for m in os . listdir ( directory ) ) return list ( filter ( _is_morphology_file , lsdir ) )
Get a list of all morphology files in a directory
46,747
def get_files_by_path ( path ) : if os . path . isfile ( path ) : return [ path ] if os . path . isdir ( path ) : return get_morph_files ( path ) raise IOError ( 'Invalid data path %s' % path )
Get a file or set of files from a file path
46,748
def load_neuron ( handle , reader = None ) : rdw = load_data ( handle , reader ) if isinstance ( handle , StringType ) : name = os . path . splitext ( os . path . basename ( handle ) ) [ 0 ] else : name = None return FstNeuron ( rdw , name )
Build section trees from an h5 or swc file
46,749
def load_neurons ( neurons , neuron_loader = load_neuron , name = None , population_class = Population , ignored_exceptions = ( ) ) : if isinstance ( neurons , ( list , tuple ) ) : files = neurons name = name if name is not None else 'Population' elif isinstance ( neurons , StringType ) : files = get_files_by_path ( neurons ) name = name if name is not None else os . path . basename ( neurons ) ignored_exceptions = tuple ( ignored_exceptions ) pop = [ ] for f in files : try : pop . append ( neuron_loader ( f ) ) except NeuroMError as e : if isinstance ( e , ignored_exceptions ) : L . info ( 'Ignoring exception "%s" for file %s' , e , os . path . basename ( f ) ) continue raise return population_class ( pop , name = name )
Create a population object from all morphologies in a directory \ of from morphologies in a list of file names
46,750
def _get_file ( handle ) : if not isinstance ( handle , IOBase ) : return handle fd , temp_file = tempfile . mkstemp ( str ( uuid . uuid4 ( ) ) , prefix = 'neurom-' ) os . close ( fd ) with open ( temp_file , 'w' ) as fd : handle . seek ( 0 ) shutil . copyfileobj ( handle , fd ) return temp_file
Returns the filename of the file to read
46,751
def load_data ( handle , reader = None ) : if not reader : reader = os . path . splitext ( handle ) [ 1 ] [ 1 : ] . lower ( ) if reader not in _READERS : raise NeuroMError ( 'Do not have a loader for "%s" extension' % reader ) filename = _get_file ( handle ) try : return _READERS [ reader ] ( filename ) except Exception as e : L . exception ( 'Error reading file %s, using "%s" loader' , filename , reader ) raise RawDataError ( 'Error reading file %s:\n%s' % ( filename , str ( e ) ) )
Unpack data into a raw data wrapper
46,752
def _load_h5 ( filename ) : from neurom . io import hdf5 return hdf5 . read ( filename , remove_duplicates = False , data_wrapper = DataWrapper )
Delay loading of h5py until it is needed
46,753
def _filepath ( self , name ) : if self . file_ext is None : candidates = glob . glob ( os . path . join ( self . directory , name + ".*" ) ) try : return next ( filter ( _is_morphology_file , candidates ) ) except StopIteration : raise NeuroMError ( "Can not find morphology file for '%s' " % name ) else : return os . path . join ( self . directory , name + self . file_ext )
File path to name morphology file .
46,754
def draw ( obj , mode = '2d' , ** kwargs ) : if mode not in MODES : raise InvalidDrawModeError ( 'Invalid drawing mode %s' % mode ) if mode in ( '2d' , 'dendrogram' ) : fig , ax = common . get_figure ( ) else : fig , ax = common . get_figure ( params = { 'projection' : '3d' } ) if isinstance ( obj , Neuron ) : tag = 'neuron' elif isinstance ( obj , ( Tree , Neurite ) ) : tag = 'tree' elif isinstance ( obj , Soma ) : tag = 'soma' else : raise NotDrawableError ( 'draw not implemented for %s' % obj . __class__ ) viewer = '%s_%s' % ( tag , mode ) try : plotter = _VIEWERS [ viewer ] except KeyError : raise NotDrawableError ( 'No drawer for class %s, mode=%s' % ( obj . __class__ , mode ) ) output_path = kwargs . pop ( 'output_path' , None ) plotter ( ax , obj , ** kwargs ) if mode != 'dendrogram' : common . plot_style ( fig = fig , ax = ax , ** kwargs ) if output_path : common . save_plot ( fig = fig , output_path = output_path , ** kwargs ) return fig , ax
Draw a morphology object
46,755
def population_feature_values ( pops , feature ) : pops_feature_values = [ ] for pop in pops : feature_values = [ getattr ( neu , 'get_' + feature ) ( ) for neu in pop . neurons ] if any ( [ isinstance ( p , ( list , np . ndarray ) ) for p in feature_values ] ) : feature_values = list ( chain ( * feature_values ) ) pops_feature_values . append ( feature_values ) return pops_feature_values
Extracts feature values per population
46,756
def get_segment ( neuron , section_id , segment_id ) : sec = neuron . sections [ section_id ] return sec . points [ segment_id : segment_id + 2 ] [ : , COLS . XYZR ]
Get a segment given a section and segment id
46,757
def extract_data ( data_path , feature ) : population = nm . load_neurons ( data_path ) feature_data = [ nm . get ( feature , n ) for n in population ] feature_data = list ( chain ( * feature_data ) ) return stats . optimal_distribution ( feature_data )
Loads a list of neurons extracts feature and transforms the fitted distribution in the correct format . Returns the optimal distribution corresponding parameters minimun and maximum values .
46,758
def bifurcation_partition ( bif_point ) : assert len ( bif_point . children ) == 2 , 'A bifurcation point must have exactly 2 children' n = float ( sum ( 1 for _ in bif_point . children [ 0 ] . ipreorder ( ) ) ) m = float ( sum ( 1 for _ in bif_point . children [ 1 ] . ipreorder ( ) ) ) return max ( n , m ) / min ( n , m )
Calculate the partition at a bifurcation point
46,759
def partition_pair ( bif_point ) : n = float ( sum ( 1 for _ in bif_point . children [ 0 ] . ipreorder ( ) ) ) m = float ( sum ( 1 for _ in bif_point . children [ 1 ] . ipreorder ( ) ) ) return ( n , m )
Calculate the partition pairs at a bifurcation point
46,760
def _match_section ( section , match ) : for i in range ( 5 ) : if i >= len ( section ) : return None if isinstance ( section [ i ] , StringType ) and section [ i ] in match : return match [ section [ i ] ] return None
checks whether the type of section is in the match dictionary
46,761
def _parse_section ( token_iter ) : sexp = [ ] for token in token_iter : if token == '(' : new_sexp = _parse_section ( token_iter ) if not _match_section ( new_sexp , UNWANTED_SECTIONS ) : sexp . append ( new_sexp ) elif token == ')' : return sexp else : sexp . append ( token ) return sexp
take a stream of tokens and create the tree structure that is defined by the s - expressions
46,762
def _parse_sections ( morph_fd ) : sections = [ ] token_iter = _get_tokens ( morph_fd ) for token in token_iter : if token == '(' : section = _parse_section ( token_iter ) if not _match_section ( section , UNWANTED_SECTIONS ) : sections . append ( section ) return sections
returns array of all the sections that exist
46,763
def _flatten_subsection ( subsection , _type , offset , parent ) : for row in subsection : if row in ( 'Low' , 'Generated' , 'High' , ) : continue elif isinstance ( row [ 0 ] , StringType ) : if len ( row ) in ( 4 , 5 , ) : if len ( row ) == 5 : assert row [ 4 ] [ 0 ] == 'S' , 'Only known usage of a fifth member is Sn, found: %s' % row [ 4 ] [ 0 ] yield ( float ( row [ 0 ] ) , float ( row [ 1 ] ) , float ( row [ 2 ] ) , float ( row [ 3 ] ) / 2. , _type , offset , parent ) parent = offset offset += 1 elif isinstance ( row [ 0 ] , list ) : split_parent = offset - 1 start_offset = 0 slices = [ ] start = 0 for i , value in enumerate ( row ) : if value == '|' : slices . append ( slice ( start + start_offset , i ) ) start = i + 1 slices . append ( slice ( start + start_offset , len ( row ) ) ) for split_slice in slices : for _row in _flatten_subsection ( row [ split_slice ] , _type , offset , split_parent ) : offset += 1 yield _row
Flatten a subsection from its nested version
46,764
def _extract_section ( section ) : if len ( section ) == 1 : assert section [ 0 ] == 'Sections' , ( 'Only known usage of a single Section content is "Sections", found %s' % section [ 0 ] ) return None _type = WANTED_SECTIONS . get ( section [ 0 ] [ 0 ] , None ) start = 1 if _type is None : _type = WANTED_SECTIONS . get ( section [ 1 ] [ 0 ] , None ) if _type is None : return None start = 2 parent = - 1 if _type == POINT_TYPE . SOMA else 0 subsection_iter = _flatten_subsection ( section [ start : ] , _type , offset = 0 , parent = parent ) ret = np . array ( [ row for row in subsection_iter ] ) return ret
Find top level sections and get their flat contents and append them all
46,765
def _sections_to_raw_data ( sections ) : soma = None neurites = [ ] for section in sections : neurite = _extract_section ( section ) if neurite is None : continue elif neurite [ 0 ] [ COLS . TYPE ] == POINT_TYPE . SOMA : assert soma is None , 'Multiple somas defined in file' soma = neurite else : neurites . append ( neurite ) assert soma is not None , 'Missing CellBody element (ie. soma)' total_length = len ( soma ) + sum ( len ( neurite ) for neurite in neurites ) ret = np . zeros ( ( total_length , 7 , ) , dtype = np . float64 ) pos = len ( soma ) ret [ 0 : pos , : ] = soma for neurite in neurites : end = pos + len ( neurite ) ret [ pos : end , : ] = neurite ret [ pos : end , COLS . P ] += pos ret [ pos : end , COLS . ID ] += pos ret [ pos , COLS . P ] = len ( soma ) - 1 pos = end return ret
convert list of sections into the raw_data format used in neurom
46,766
def read ( morph_file , data_wrapper = DataWrapper ) : msg = ( 'This is an experimental reader. ' 'There are no guarantees regarding ability to parse ' 'Neurolucida .asc files or correctness of output.' ) warnings . warn ( msg ) L . warning ( msg ) with open ( morph_file , encoding = 'utf-8' , errors = 'replace' ) as morph_fd : sections = _parse_sections ( morph_fd ) raw_data = _sections_to_raw_data ( sections ) return data_wrapper ( raw_data , 'NL-ASCII' )
return a raw_data np . array with the full neuron and the format of the file suitable to be wrapped by DataWrapper
46,767
def stats ( data ) : return { 'len' : len ( data ) , 'mean' : np . mean ( data ) , 'sum' : np . sum ( data ) , 'std' : np . std ( data ) , 'min' : np . min ( data ) , 'max' : np . max ( data ) }
Dictionary with summary stats for data
46,768
def get_config ( config , default_config ) : if not config : logging . warning ( 'Using default config: %s' , default_config ) config = default_config try : with open ( config , 'r' ) as config_file : return yaml . load ( config_file ) except ( yaml . reader . ReaderError , yaml . parser . ParserError , yaml . scanner . ScannerError ) as e : raise ConfigError ( 'Invalid yaml file: \n %s' % str ( e ) )
Load configuration from file if in config else use default
46,769
def soma_surface_area ( nrn , neurite_type = NeuriteType . soma ) : assert neurite_type == NeuriteType . soma , 'Neurite type must be soma' return 4 * math . pi * nrn . soma . radius ** 2
Get the surface area of a neuron s soma .
46,770
def soma_surface_areas ( nrn_pop , neurite_type = NeuriteType . soma ) : nrns = neuron_population ( nrn_pop ) assert neurite_type == NeuriteType . soma , 'Neurite type must be soma' return [ soma_surface_area ( n ) for n in nrns ]
Get the surface areas of the somata in a population of neurons
46,771
def soma_radii ( nrn_pop , neurite_type = NeuriteType . soma ) : assert neurite_type == NeuriteType . soma , 'Neurite type must be soma' nrns = neuron_population ( nrn_pop ) return [ n . soma . radius for n in nrns ]
Get the radii of the somata of a population of neurons
46,772
def trunk_section_lengths ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) return [ morphmath . section_length ( s . root_node . points ) for s in nrn . neurites if neurite_filter ( s ) ]
list of lengths of trunk sections of neurites in a neuron
46,773
def trunk_origin_radii ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) return [ s . root_node . points [ 0 ] [ COLS . R ] for s in nrn . neurites if neurite_filter ( s ) ]
radii of the trunk sections of neurites in a neuron
46,774
def trunk_origin_azimuths ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) nrns = neuron_population ( nrn ) def _azimuth ( section , soma ) : vector = morphmath . vector ( section [ 0 ] , soma . center ) return np . arctan2 ( vector [ COLS . Z ] , vector [ COLS . X ] ) return [ _azimuth ( s . root_node . points , n . soma ) for n in nrns for s in n . neurites if neurite_filter ( s ) ]
Get a list of all the trunk origin azimuths of a neuron or population
46,775
def trunk_origin_elevations ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) nrns = neuron_population ( nrn ) def _elevation ( section , soma ) : vector = morphmath . vector ( section [ 0 ] , soma . center ) norm_vector = np . linalg . norm ( vector ) if norm_vector >= np . finfo ( type ( norm_vector ) ) . eps : return np . arcsin ( vector [ COLS . Y ] / norm_vector ) raise ValueError ( "Norm of vector between soma center and section is almost zero." ) return [ _elevation ( s . root_node . points , n . soma ) for n in nrns for s in n . neurites if neurite_filter ( s ) ]
Get a list of all the trunk origin elevations of a neuron or population
46,776
def trunk_vectors ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) nrns = neuron_population ( nrn ) return np . array ( [ morphmath . vector ( s . root_node . points [ 0 ] , n . soma . center ) for n in nrns for s in n . neurites if neurite_filter ( s ) ] )
Calculates the vectors between all the trunks of the neuron and the soma center .
46,777
def trunk_angles ( nrn , neurite_type = NeuriteType . all ) : vectors = trunk_vectors ( nrn , neurite_type = neurite_type ) if not vectors . size : return [ ] def _sort_angle ( p1 , p2 ) : ang1 = np . arctan2 ( * p1 [ : : - 1 ] ) ang2 = np . arctan2 ( * p2 [ : : - 1 ] ) return ( ang1 - ang2 ) order = np . argsort ( np . array ( [ _sort_angle ( i / np . linalg . norm ( i ) , [ 0 , 1 ] ) for i in vectors [ : , 0 : 2 ] ] ) ) ordered_vectors = vectors [ order ] [ : , [ COLS . X , COLS . Y ] ] return [ morphmath . angle_between_vectors ( ordered_vectors [ i ] , ordered_vectors [ i - 1 ] ) for i , _ in enumerate ( ordered_vectors ) ]
Calculates the angles between all the trunks of the neuron . The angles are defined on the x - y plane and the trees are sorted from the y axis and anticlock - wise .
46,778
def sholl_crossings ( neurites , center , radii ) : def _count_crossings ( neurite , radius ) : r2 = radius ** 2 count = 0 for start , end in iter_segments ( neurite ) : start_dist2 , end_dist2 = ( morphmath . point_dist2 ( center , start ) , morphmath . point_dist2 ( center , end ) ) count += int ( start_dist2 <= r2 <= end_dist2 or end_dist2 <= r2 <= start_dist2 ) return count return np . array ( [ sum ( _count_crossings ( neurite , r ) for neurite in iter_neurites ( neurites ) ) for r in radii ] )
calculate crossings of neurites
46,779
def sholl_frequency ( nrn , neurite_type = NeuriteType . all , step_size = 10 ) : nrns = neuron_population ( nrn ) neurite_filter = is_type ( neurite_type ) min_soma_edge = float ( 'Inf' ) max_radii = 0 neurites_list = [ ] for neuron in nrns : neurites_list . extend ( ( ( neurites , neuron . soma . center ) for neurites in neuron . neurites if neurite_filter ( neurites ) ) ) min_soma_edge = min ( min_soma_edge , neuron . soma . radius ) max_radii = max ( max_radii , np . max ( np . abs ( bounding_box ( neuron ) ) ) ) radii = np . arange ( min_soma_edge , max_radii + step_size , step_size ) ret = np . zeros_like ( radii ) for neurites , center in neurites_list : ret += sholl_crossings ( neurites , center , radii ) return ret
perform Sholl frequency calculations on a population of neurites
46,780
def dist_points ( bin_edges , d ) : bc = bin_centers ( bin_edges ) if d is not None : d = DISTS [ d [ 'type' ] ] ( d , bc ) return d , bc
Return an array of values according to a distribution
46,781
def calc_limits ( data , dist = None , padding = 0.25 ) : dmin = sys . float_info . max if dist is None else dist . get ( 'min' , sys . float_info . max ) dmax = sys . float_info . min if dist is None else dist . get ( 'max' , sys . float_info . min ) _min = min ( min ( data ) , dmin ) _max = max ( max ( data ) , dmax ) padding = padding * ( _max - _min ) return _min - padding , _max + padding
Calculate a suitable range for a histogram
46,782
def load_neurite_features ( filepath ) : stuff = defaultdict ( lambda : defaultdict ( list ) ) nrns = nm . load_neurons ( filepath ) for nrn in nrns : for t in NEURITES_ : for feat in FEATURES : stuff [ feat ] [ str ( t ) . split ( '.' ) [ 1 ] ] . extend ( nm . get ( feat , nrn , neurite_type = t ) ) return stuff
Unpack relevant data into megadict
46,783
def main ( data_dir , mtype_file ) : stuff = load_neurite_features ( data_dir ) sim_params = json . load ( open ( mtype_file ) ) _plots = [ ] for feat , d in stuff . items ( ) : for typ , data in d . items ( ) : dist = sim_params [ 'components' ] [ typ ] . get ( feat , None ) print ( 'Type = %s, Feature = %s, Distribution = %s' % ( typ , feat , dist ) ) if not data : print ( "No data found for feature %s (%s)" % ( feat , typ ) ) continue num_bins = 100 limits = calc_limits ( data , dist ) bin_edges = np . linspace ( limits [ 0 ] , limits [ 1 ] , num_bins + 1 ) histo = np . histogram ( data , bin_edges , normed = True ) print ( 'PLOT LIMITS:' , limits ) plot = Plot ( * view_utils . get_figure ( new_fig = True , subplot = 111 ) ) plot . ax . set_xlim ( * limits ) plot . ax . bar ( histo [ 1 ] [ : - 1 ] , histo [ 0 ] , width = bin_widths ( histo [ 1 ] ) ) dp , bc = dist_points ( histo [ 1 ] , dist ) if dp is not None : plot . ax . plot ( bc , dp , 'r*' ) plot . ax . set_title ( '%s (%s)' % ( feat , typ ) ) _plots . append ( plot ) return _plots
Run the stuff
46,784
def extract_density ( population , plane = 'xy' , bins = 100 , neurite_type = NeuriteType . basal_dendrite ) : segment_midpoints = get_feat ( 'segment_midpoints' , population , neurite_type = neurite_type ) horiz = segment_midpoints [ : , 'xyz' . index ( plane [ 0 ] ) ] vert = segment_midpoints [ : , 'xyz' . index ( plane [ 1 ] ) ] return np . histogram2d ( np . array ( horiz ) , np . array ( vert ) , bins = ( bins , bins ) )
Extracts the 2d histogram of the center coordinates of segments in the selected plane .
46,785
def plot_density ( population , bins = 100 , new_fig = True , subplot = 111 , levels = None , plane = 'xy' , colorlabel = 'Nodes per unit area' , labelfontsize = 16 , color_map = 'Reds' , no_colorbar = False , threshold = 0.01 , neurite_type = NeuriteType . basal_dendrite , ** kwargs ) : fig , ax = common . get_figure ( new_fig = new_fig , subplot = subplot ) H1 , xedges1 , yedges1 = extract_density ( population , plane = plane , bins = bins , neurite_type = neurite_type ) mask = H1 < threshold H2 = np . ma . masked_array ( H1 , mask ) getattr ( plt . cm , color_map ) . set_bad ( color = 'white' , alpha = None ) plots = ax . contourf ( ( xedges1 [ : - 1 ] + xedges1 [ 1 : ] ) / 2 , ( yedges1 [ : - 1 ] + yedges1 [ 1 : ] ) / 2 , np . transpose ( H2 ) , cmap = getattr ( plt . cm , color_map ) , levels = levels ) if not no_colorbar : cbar = plt . colorbar ( plots ) cbar . ax . set_ylabel ( colorlabel , fontsize = labelfontsize ) kwargs [ 'title' ] = kwargs . get ( 'title' , '' ) kwargs [ 'xlabel' ] = kwargs . get ( 'xlabel' , plane [ 0 ] ) kwargs [ 'ylabel' ] = kwargs . get ( 'ylabel' , plane [ 1 ] ) return common . plot_style ( fig = fig , ax = ax , ** kwargs )
Plots the 2d histogram of the center coordinates of segments in the selected plane .
46,786
def plot_neuron_on_density ( population , bins = 100 , new_fig = True , subplot = 111 , levels = None , plane = 'xy' , colorlabel = 'Nodes per unit area' , labelfontsize = 16 , color_map = 'Reds' , no_colorbar = False , threshold = 0.01 , neurite_type = NeuriteType . basal_dendrite , ** kwargs ) : _ , ax = common . get_figure ( new_fig = new_fig ) view . plot_tree ( ax , population . neurites [ 0 ] ) return plot_density ( population , plane = plane , bins = bins , new_fig = False , subplot = subplot , colorlabel = colorlabel , labelfontsize = labelfontsize , levels = levels , color_map = color_map , no_colorbar = no_colorbar , threshold = threshold , neurite_type = neurite_type , ** kwargs )
Plots the 2d histogram of the center coordinates of segments in the selected plane and superimposes the view of the first neurite of the collection .
46,787
def is_monotonic ( neurite , tol ) : for node in neurite . iter_sections ( ) : sec = node . points for point_id in range ( len ( sec ) - 1 ) : if sec [ point_id + 1 ] [ COLS . R ] > sec [ point_id ] [ COLS . R ] + tol : return False if ( node . parent is not None and sec [ 0 ] [ COLS . R ] > node . parent . points [ - 1 ] [ COLS . R ] + tol ) : return False return True
Check if neurite tree is monotonic
46,788
def is_flat ( neurite , tol , method = 'tolerance' ) : ext = principal_direction_extent ( neurite . points [ : , COLS . XYZ ] ) assert method in ( 'tolerance' , 'ratio' ) , "Method must be one of 'tolerance', 'ratio'" if method == 'ratio' : sorted_ext = np . sort ( ext ) return sorted_ext [ 0 ] / sorted_ext [ 1 ] < float ( tol ) return any ( ext < float ( tol ) )
Check if neurite is flat using the given method
46,789
def is_back_tracking ( neurite ) : def pair ( segs ) : return zip ( segs , segs [ 1 : ] ) def coords ( node ) : return node [ COLS . XYZ ] def max_radius ( seg ) : return max ( seg [ 0 ] [ COLS . R ] , seg [ 1 ] [ COLS . R ] ) def is_not_zero_seg ( seg ) : return not np . allclose ( coords ( seg [ 0 ] ) , coords ( seg [ 1 ] ) ) def is_in_the_same_verse ( seg1 , seg2 ) : v1 = coords ( seg2 [ 1 ] ) - coords ( seg2 [ 0 ] ) v2 = coords ( seg1 [ 1 ] ) - coords ( seg1 [ 0 ] ) return np . dot ( v1 , v2 ) >= 0 def is_seg2_within_seg1_radius ( dist , seg1 , seg2 ) : return dist <= max_radius ( seg1 ) + max_radius ( seg2 ) def is_seg1_overlapping_with_seg2 ( seg1 , seg2 ) : s1 = coords ( seg2 [ 0 ] ) s2 = coords ( seg2 [ 1 ] ) C = 0.5 * ( s1 + s2 ) P = coords ( seg1 [ 1 ] ) CP = P - C S1S2 = s2 - s1 prj = mm . vector_projection ( CP , S1S2 ) if not is_seg2_within_seg1_radius ( np . linalg . norm ( CP - prj ) , seg1 , seg2 ) : return False return np . linalg . norm ( prj ) < 0.55 * np . linalg . norm ( S1S2 ) def is_inside_cylinder ( seg1 , seg2 ) : return not is_in_the_same_verse ( seg1 , seg2 ) and is_seg1_overlapping_with_seg2 ( seg1 , seg2 ) section_itr = ( snode for snode in neurite . iter_sections ( ) if snode . points . shape [ 0 ] > 2 ) for snode in section_itr : segment_pairs = list ( filter ( is_not_zero_seg , pair ( snode . points ) ) ) for i , seg1 in enumerate ( segment_pairs [ 1 : ] ) : for seg2 in segment_pairs [ 0 : i + 1 ] : if is_inside_cylinder ( seg1 , seg2 ) : return True return False
Check if a neurite process backtracks to a previous node . Back - tracking takes place when a daughter of a branching process goes back and either overlaps with a previous point or lies inside the cylindrical volume of the latter .
46,790
def get_flat_neurites ( neuron , tol = 0.1 , method = 'ratio' ) : return [ n for n in neuron . neurites if is_flat ( n , tol , method ) ]
Check if a neuron has neurites that are flat within a tolerance
46,791
def get_nonmonotonic_neurites ( neuron , tol = 1e-6 ) : return [ n for n in neuron . neurites if not is_monotonic ( n , tol ) ]
Get neurites that are not monotonic
46,792
def segment_centre_of_mass ( seg ) : h = mm . segment_length ( seg ) r0 = seg [ 0 ] [ COLS . R ] r1 = seg [ 1 ] [ COLS . R ] num = r0 * r0 + 2 * r0 * r1 + 3 * r1 * r1 denom = 4 * ( r0 * r0 + r0 * r1 + r1 * r1 ) centre_of_mass_z_loc = num / denom return seg [ 0 ] [ COLS . XYZ ] + ( centre_of_mass_z_loc / h ) * ( seg [ 1 ] [ COLS . XYZ ] - seg [ 0 ] [ COLS . XYZ ] )
Calculate and return centre of mass of a segment .
46,793
def neurite_centre_of_mass ( neurite ) : centre_of_mass = np . zeros ( 3 ) total_volume = 0 seg_vol = np . array ( map ( mm . segment_volume , nm . iter_segments ( neurite ) ) ) seg_centre_of_mass = np . array ( map ( segment_centre_of_mass , nm . iter_segments ( neurite ) ) ) seg_centre_of_mass = seg_centre_of_mass * seg_vol [ : , np . newaxis ] centre_of_mass = np . sum ( seg_centre_of_mass , axis = 0 ) total_volume = np . sum ( seg_vol ) return centre_of_mass / total_volume
Calculate and return centre of mass of a neurite .
46,794
def distance_sqr ( point , seg ) : centre_of_mass = segment_centre_of_mass ( seg ) return sum ( pow ( np . subtract ( point , centre_of_mass ) , 2 ) )
Calculate and return square Euclidian distance from given point to centre of mass of given segment .
46,795
def radius_of_gyration ( neurite ) : centre_mass = neurite_centre_of_mass ( neurite ) sum_sqr_distance = 0 N = 0 dist_sqr = [ distance_sqr ( centre_mass , s ) for s in nm . iter_segments ( neurite ) ] sum_sqr_distance = np . sum ( dist_sqr ) N = len ( dist_sqr ) return np . sqrt ( sum_sqr_distance / N )
Calculate and return radius of gyration of a given neurite .
46,796
def view ( input_file , plane , backend ) : if backend == 'matplotlib' : from neurom . viewer import draw kwargs = { 'mode' : '3d' if plane == '3d' else '2d' , } if plane != '3d' : kwargs [ 'plane' ] = plane draw ( load_neuron ( input_file ) , ** kwargs ) else : from neurom . view . plotly import draw draw ( load_neuron ( input_file ) , plane = plane ) if backend == 'matplotlib' : import matplotlib . pyplot as plt plt . show ( )
A simple neuron viewer
46,797
def generate_annotation ( result , settings ) : if result . status : return "" header = ( "\n\n" "({label} ; MUK_ANNOTATION\n" " (Color {color}) ; MUK_ANNOTATION\n" " (Name \"{name}\") ; MUK_ANNOTATION" ) . format ( ** settings ) points = [ p for _ , _points in result . info for p in _points ] annotations = ( " ({0} {1} {2} 0.50) ; MUK_ANNOTATION" . format ( p [ COLS . X ] , p [ COLS . Y ] , p [ COLS . Z ] ) for p in points ) footer = ") ; MUK_ANNOTATION\n" return '\n' . join ( chain . from_iterable ( ( [ header ] , annotations , [ footer ] ) ) )
Generate the annotation for a given checker
46,798
def annotate ( results , settings ) : annotations = ( generate_annotation ( result , setting ) for result , setting in zip ( results , settings ) ) return '\n' . join ( annot for annot in annotations if annot )
Concatenate the annotations of all checkers
46,799
def as_point ( row ) : return Point ( row [ COLS . X ] , row [ COLS . Y ] , row [ COLS . Z ] , row [ COLS . R ] , int ( row [ COLS . TYPE ] ) )
Create a Point from a data block row