idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
234,600
def taper_rate ( p0 , p1 ) : return 2 * abs ( p0 [ COLS . R ] - p1 [ COLS . R ] ) / point_dist ( p0 , p1 )
Compute the taper rate between points p0 and p1
47
13
234,601
def principal_direction_extent ( points ) : # center the points around 0.0 points = np . copy ( points ) points -= np . mean ( points , axis = 0 ) # principal components _ , eigv = pca ( points ) extent = np . zeros ( 3 ) for i in range ( eigv . shape [ 1 ] ) : # orthogonal projection onto the direction of the v component scalar_projs = np . sort ( np . array ( [ np . dot ( p , eigv [ : , i ] ) for p in points ] ) ) extent [ i ] = scalar_projs [ - 1 ] if scalar_projs [ 0 ] < 0. : extent -= scalar_projs [ 0 ] return extent
Calculate the extent of a set of 3D points .
165
13
234,602
def stylize ( ax , name , feature ) : ax . set_ylabel ( feature ) ax . set_title ( name , fontsize = 'small' )
Stylization modifications to the plots
35
7
234,603
def plot_feature ( feature , cell ) : fig = pl . figure ( ) ax = fig . add_subplot ( 111 ) if cell is not None : try : histogram ( cell , feature , ax ) except ValueError : pass stylize ( ax , cell . name , feature ) return fig
Plot a feature
63
3
234,604
def path_end_to_end_distance ( neurite ) : trunk = neurite . root_node . points [ 0 ] return max ( morphmath . point_dist ( l . points [ - 1 ] , trunk ) for l in neurite . root_node . ileaf ( ) )
Calculate and return end - to - end - distance of a given neurite .
64
18
234,605
def make_end_to_end_distance_plot ( nb_segments , end_to_end_distance , neurite_type ) : plt . figure ( ) plt . plot ( nb_segments , end_to_end_distance ) plt . title ( neurite_type ) plt . xlabel ( 'Number of segments' ) plt . ylabel ( 'End-to-end distance' ) plt . show ( )
Plot end - to - end distance vs number of segments
101
11
234,606
def calculate_and_plot_end_to_end_distance ( neurite ) : def _dist ( seg ) : '''Distance between segmenr end and trunk''' return morphmath . point_dist ( seg [ 1 ] , neurite . root_node . points [ 0 ] ) end_to_end_distance = [ _dist ( s ) for s in nm . iter_segments ( neurite ) ] make_end_to_end_distance_plot ( np . arange ( len ( end_to_end_distance ) ) + 1 , end_to_end_distance , neurite . type )
Calculate and plot the end - to - end distance vs the number of segments for an increasingly larger part of a given neurite .
138
28
234,607
def tree_type_checker ( * ref ) : ref = tuple ( ref ) if NeuriteType . all in ref : def check_tree_type ( _ ) : '''Always returns true''' return True else : def check_tree_type ( tree ) : '''Check whether tree has the same type as ref Returns: True if ref in the same type as tree.type or ref is NeuriteType.all ''' return tree . type in ref return check_tree_type
Tree type checker functor
106
6
234,608
def dendrite_filter ( n ) : return n . type == NeuriteType . basal_dendrite or n . type == NeuriteType . apical_dendrite
Select only dendrites
41
5
234,609
def plot_somas ( somas ) : _ , ax = common . get_figure ( new_fig = True , subplot = 111 , params = { 'projection' : '3d' , 'aspect' : 'equal' } ) for s in somas : common . plot_sphere ( ax , s . center , s . radius , color = random_color ( ) , alpha = 1 ) plt . show ( )
Plot set of somas on same figure as spheres each with different color
95
14
234,610
def _max_recursion_depth ( obj ) : neurites = obj . neurites if hasattr ( obj , 'neurites' ) else [ obj ] return max ( sum ( 1 for _ in neu . iter_sections ( ) ) for neu in neurites )
Estimate recursion depth which is defined as the number of nodes in a tree
60
16
234,611
def _total_rectangles ( tree ) : return sum ( len ( sec . children ) + sec . points . shape [ 0 ] - 1 for sec in tree . iter_sections ( ) )
Calculate the total number of segments that are required for the dendrogram . There is a vertical line for each segment and two horizontal line at each branching point
41
33
234,612
def _n_rectangles ( obj ) : return sum ( _total_rectangles ( neu ) for neu in obj . neurites ) if hasattr ( obj , 'neurites' ) else _total_rectangles ( obj )
Calculate the total number of rectangles with respect to the type of the object
52
17
234,613
def _square_segment ( radius , origin ) : return np . array ( ( ( origin [ 0 ] - radius , origin [ 1 ] - radius ) , ( origin [ 0 ] - radius , origin [ 1 ] + radius ) , ( origin [ 0 ] + radius , origin [ 1 ] + radius ) , ( origin [ 0 ] + radius , origin [ 1 ] - radius ) ) )
Vertices for a square
83
5
234,614
def _vertical_segment ( old_offs , new_offs , spacing , radii ) : return np . array ( ( ( new_offs [ 0 ] - radii [ 0 ] , old_offs [ 1 ] + spacing [ 1 ] ) , ( new_offs [ 0 ] - radii [ 1 ] , new_offs [ 1 ] ) , ( new_offs [ 0 ] + radii [ 1 ] , new_offs [ 1 ] ) , ( new_offs [ 0 ] + radii [ 0 ] , old_offs [ 1 ] + spacing [ 1 ] ) ) )
Vertices for a vertical rectangle
127
6
234,615
def _horizontal_segment ( old_offs , new_offs , spacing , diameter ) : return np . array ( ( ( old_offs [ 0 ] , old_offs [ 1 ] + spacing [ 1 ] ) , ( new_offs [ 0 ] , old_offs [ 1 ] + spacing [ 1 ] ) , ( new_offs [ 0 ] , old_offs [ 1 ] + spacing [ 1 ] - diameter ) , ( old_offs [ 0 ] , old_offs [ 1 ] + spacing [ 1 ] - diameter ) ) )
Vertices of a horizontal rectangle
116
6
234,616
def _spacingx ( node , max_dims , xoffset , xspace ) : x_spacing = _n_terminations ( node ) * xspace if x_spacing > max_dims [ 0 ] : max_dims [ 0 ] = x_spacing return xoffset - x_spacing / 2.
Determine the spacing of the current node depending on the number of the leaves of the tree
73
19
234,617
def _update_offsets ( start_x , spacing , terminations , offsets , length ) : return ( start_x + spacing [ 0 ] * terminations / 2. , offsets [ 1 ] + spacing [ 1 ] * 2. + length )
Update the offsets
53
3
234,618
def _max_diameter ( tree ) : return 2. * max ( max ( node . points [ : , COLS . R ] ) for node in tree . ipreorder ( ) )
Find max diameter in tree
41
5
234,619
def _generate_dendro ( self , current_section , spacing , offsets ) : max_dims = self . _max_dims start_x = _spacingx ( current_section , max_dims , offsets [ 0 ] , spacing [ 0 ] ) for child in current_section . children : segments = child . points # number of leaves in child terminations = _n_terminations ( child ) # segement lengths seg_lengths = np . linalg . norm ( np . subtract ( segments [ : - 1 , COLS . XYZ ] , segments [ 1 : , COLS . XYZ ] ) , axis = 1 ) # segment radii radii = np . vstack ( ( segments [ : - 1 , COLS . R ] , segments [ 1 : , COLS . R ] ) ) . T if self . _show_diameters else np . zeros ( ( seg_lengths . shape [ 0 ] , 2 ) ) y_offset = offsets [ 1 ] for i , slen in enumerate ( seg_lengths ) : # offset update for the vertical segments new_offsets = _update_offsets ( start_x , spacing , terminations , ( offsets [ 0 ] , y_offset ) , slen ) # segments are drawn vertically, thus only y_offset changes from init offsets self . _rectangles [ self . _n ] = _vertical_segment ( ( offsets [ 0 ] , y_offset ) , new_offsets , spacing , radii [ i , : ] ) self . _n += 1 y_offset = new_offsets [ 1 ] if y_offset + spacing [ 1 ] * 2 + sum ( seg_lengths ) > max_dims [ 1 ] : max_dims [ 1 ] = y_offset + spacing [ 1 ] * 2. + sum ( seg_lengths ) self . _max_dims = max_dims # recursive call to self. self . _generate_dendro ( child , spacing , new_offsets ) # update the starting position for the next child start_x += terminations * spacing [ 0 ] # write the horizontal lines only for bifurcations, where the are actual horizontal # lines and not zero ones if offsets [ 0 ] != new_offsets [ 0 ] : # horizontal segment. Thickness is either 0 if show_diameters is false # or 1. if show_diameters is true self . _rectangles [ self . _n ] = _horizontal_segment ( offsets , new_offsets , spacing , 0. ) self . _n += 1
Recursive function for dendrogram line computations
572
10
234,620
def types ( self ) : neurites = self . _obj . neurites if hasattr ( self . _obj , 'neurites' ) else ( self . _obj , ) return ( neu . type for neu in neurites )
Returns an iterator over the types of the neurites in the object . If the object is a tree then one value is returned .
52
26
234,621
def register_neurite_feature ( name , func ) : if name in NEURITEFEATURES : raise NeuroMError ( 'Attempt to hide registered feature %s' % name ) def _fun ( neurites , neurite_type = _ntype . all ) : '''Wrap neurite function from outer scope and map into list''' return list ( func ( n ) for n in _ineurites ( neurites , filt = _is_type ( neurite_type ) ) ) NEURONFEATURES [ name ] = _fun
Register a feature to be applied to neurites
119
9
234,622
def get ( feature , obj , * * kwargs ) : feature = ( NEURITEFEATURES [ feature ] if feature in NEURITEFEATURES else NEURONFEATURES [ feature ] ) return _np . array ( list ( feature ( obj , * * kwargs ) ) )
Obtain a feature from a set of morphology objects
63
10
234,623
def _get_doc ( ) : def get_docstring ( func ) : '''extract doctstring, if possible''' docstring = ':\n' if func . __doc__ : docstring += _indent ( func . __doc__ , 2 ) return docstring ret = [ '\nNeurite features (neurite, neuron, neuron population):' ] ret . extend ( _INDENT + '- ' + feature + get_docstring ( func ) for feature , func in sorted ( NEURITEFEATURES . items ( ) ) ) ret . append ( '\nNeuron features (neuron, neuron population):' ) ret . extend ( _INDENT + '- ' + feature + get_docstring ( func ) for feature , func in sorted ( NEURONFEATURES . items ( ) ) ) return '\n' . join ( ret )
Get a description of all the known available features
189
9
234,624
def read ( filename , remove_duplicates = False , data_wrapper = DataWrapper ) : with h5py . File ( filename , mode = 'r' ) as h5file : version = get_version ( h5file ) if version == 'H5V1' : points , groups = _unpack_v1 ( h5file ) elif version == 'H5V2' : stg = next ( s for s in ( 'repaired' , 'unraveled' , 'raw' ) if s in h5file [ 'neuron1' ] ) points , groups = _unpack_v2 ( h5file , stage = stg ) if remove_duplicates : points , groups = _remove_duplicate_points ( points , groups ) neuron_builder = BlockNeuronBuilder ( ) points [ : , POINT_DIAMETER ] /= 2 # Store radius, not diameter for id_ , row in enumerate ( zip_longest ( groups , groups [ 1 : , GPFIRST ] , fillvalue = len ( points ) ) ) : ( point_start , section_type , parent_id ) , point_end = row neuron_builder . add_section ( id_ , int ( parent_id ) , int ( section_type ) , points [ point_start : point_end ] ) return neuron_builder . get_datawrapper ( version , data_wrapper = data_wrapper )
Read a file and return a data_wrapper d data
315
11
234,625
def _remove_duplicate_points ( points , groups ) : group_initial_ids = groups [ : , GPFIRST ] to_be_reduced = np . zeros ( len ( group_initial_ids ) ) to_be_removed = [ ] for ig , g in enumerate ( groups ) : iid , typ , pid = g [ GPFIRST ] , g [ GTYPE ] , g [ GPID ] # Remove first point from sections that are # not the root section, a soma, or a child of a soma if pid != - 1 and typ != 1 and groups [ pid ] [ GTYPE ] != 1 : # Remove duplicate from list of points to_be_removed . append ( iid ) # Reduce the id of the following sections # in groups structure by one to_be_reduced [ ig + 1 : ] += 1 groups [ : , GPFIRST ] = groups [ : , GPFIRST ] - to_be_reduced points = np . delete ( points , to_be_removed , axis = 0 ) return points , groups
Removes the duplicate points from the beginning of a section if they are present in points - groups representation .
237
21
234,626
def _unpack_v1 ( h5file ) : points = np . array ( h5file [ 'points' ] ) groups = np . array ( h5file [ 'structure' ] ) return points , groups
Unpack groups from HDF5 v1 file
48
10
234,627
def _unpack_v2 ( h5file , stage ) : points = np . array ( h5file [ 'neuron1/%s/points' % stage ] ) # from documentation: The /neuron1/structure/unraveled reuses /neuron1/structure/raw groups_stage = stage if stage != 'unraveled' else 'raw' groups = np . array ( h5file [ 'neuron1/structure/%s' % groups_stage ] ) stypes = np . array ( h5file [ 'neuron1/structure/sectiontype' ] ) groups = np . hstack ( [ groups , stypes ] ) groups [ : , [ 1 , 2 ] ] = groups [ : , [ 2 , 1 ] ] return points , groups
Unpack groups from HDF5 v2 file
173
10
234,628
def fit_results_to_dict ( fit_results , min_bound = None , max_bound = None ) : type_map = { 'norm' : 'normal' , 'expon' : 'exponential' , 'uniform' : 'uniform' } param_map = { 'uniform' : lambda p : [ ( 'min' , p [ 0 ] ) , ( 'max' , p [ 0 ] + p [ 1 ] ) ] , 'norm' : lambda p : [ ( 'mu' , p [ 0 ] ) , ( 'sigma' , p [ 1 ] ) ] , 'expon' : lambda p : [ ( 'lambda' , 1.0 / p [ 1 ] ) ] } d = OrderedDict ( { 'type' : type_map [ fit_results . type ] } ) d . update ( param_map [ fit_results . type ] ( fit_results . params ) ) if min_bound is not None and 'min' not in d : d [ 'min' ] = min_bound if max_bound is not None and 'max' not in d : d [ 'max' ] = max_bound return d
Create a JSON - comparable dict from a FitResults object
256
11
234,629
def fit ( data , distribution = 'norm' ) : params = getattr ( _st , distribution ) . fit ( data ) return FitResults ( params , _st . kstest ( data , distribution , params ) , distribution )
Calculate the parameters of a fit of a distribution to a data set
49
15
234,630
def optimal_distribution ( data , distr_to_check = ( 'norm' , 'expon' , 'uniform' ) ) : fit_results = [ fit ( data , d ) for d in distr_to_check ] return min ( fit_results , key = lambda fit : fit . errs [ 0 ] )
Calculate the parameters of a fit of different distributions to a data set and returns the distribution of the minimal ks - distance .
73
27
234,631
def scalar_stats ( data , functions = ( 'min' , 'max' , 'mean' , 'std' ) ) : stats = { } for func in functions : stats [ func ] = getattr ( np , func ) ( data ) return stats
Calculate the stats from the given numpy functions
55
11
234,632
def total_score ( paired_dats , p = 2 , test = StatTests . ks ) : scores = np . array ( [ compare_two ( fL1 , fL2 , test = test ) . dist for fL1 , fL2 in paired_dats ] ) return np . linalg . norm ( scores , p )
Calculates the p - norm of the distances that have been calculated from the statistical test that has been applied on all the paired datasets .
77
28
234,633
def iter_neurites ( obj , mapfun = None , filt = None , neurite_order = NeuriteIter . FileOrder ) : neurites = ( ( obj , ) if isinstance ( obj , Neurite ) else obj . neurites if hasattr ( obj , 'neurites' ) else obj ) if neurite_order == NeuriteIter . NRN : last_position = max ( NRN_ORDER . values ( ) ) + 1 neurites = sorted ( neurites , key = lambda neurite : NRN_ORDER . get ( neurite . type , last_position ) ) neurite_iter = iter ( neurites ) if filt is None else filter ( filt , neurites ) return neurite_iter if mapfun is None else map ( mapfun , neurite_iter )
Iterator to a neurite neuron or neuron population
180
9
234,634
def iter_sections ( neurites , iterator_type = Tree . ipreorder , neurite_filter = None , neurite_order = NeuriteIter . FileOrder ) : return chain . from_iterable ( iterator_type ( neurite . root_node ) for neurite in iter_neurites ( neurites , filt = neurite_filter , neurite_order = neurite_order ) )
Iterator to the sections in a neurite neuron or neuron population .
90
13
234,635
def iter_segments ( obj , neurite_filter = None , neurite_order = NeuriteIter . FileOrder ) : sections = iter ( ( obj , ) if isinstance ( obj , Section ) else iter_sections ( obj , neurite_filter = neurite_filter , neurite_order = neurite_order ) ) return chain . from_iterable ( zip ( sec . points [ : - 1 ] , sec . points [ 1 : ] ) for sec in sections )
Return an iterator to the segments in a collection of neurites
105
12
234,636
def graft_neuron ( root_section ) : assert isinstance ( root_section , Section ) return Neuron ( soma = Soma ( root_section . points [ : 1 ] ) , neurites = [ Neurite ( root_section ) ] )
Returns a neuron starting at root_section
56
8
234,637
def points ( self ) : # add all points in a section except the first one, which is a duplicate _pts = [ v for s in self . root_node . ipreorder ( ) for v in s . points [ 1 : , COLS . XYZR ] ] # except for the very first point, which is not a duplicate _pts . insert ( 0 , self . root_node . points [ 0 ] [ COLS . XYZR ] ) return np . array ( _pts )
Return unordered array with all the points in this neurite
109
12
234,638
def transform ( self , trans ) : clone = deepcopy ( self ) for n in clone . iter_sections ( ) : n . points [ : , 0 : 3 ] = trans ( n . points [ : , 0 : 3 ] ) return clone
Return a copy of this neurite with a 3D transformation applied
52
13
234,639
def iter_sections ( self , order = Tree . ipreorder , neurite_order = NeuriteIter . FileOrder ) : return iter_sections ( self , iterator_type = order , neurite_order = neurite_order )
iteration over section nodes
52
5
234,640
def eval_stats ( values , mode ) : if mode == 'raw' : return values . tolist ( ) if mode == 'total' : mode = 'sum' try : return getattr ( np , mode ) ( values , axis = 0 ) except ValueError : pass return None
Extract a summary statistic from an array of list of values
60
12
234,641
def _stat_name ( feat_name , stat_mode ) : if feat_name [ - 1 ] == 's' : feat_name = feat_name [ : - 1 ] if feat_name == 'soma_radii' : feat_name = 'soma_radius' if stat_mode == 'raw' : return feat_name return '%s_%s' % ( stat_mode , feat_name )
Set stat name based on feature name and stat mode
94
10
234,642
def extract_stats ( neurons , config ) : stats = defaultdict ( dict ) for ns , modes in config [ 'neurite' ] . items ( ) : for n in config [ 'neurite_type' ] : n = _NEURITE_MAP [ n ] for mode in modes : stat_name = _stat_name ( ns , mode ) stat = eval_stats ( nm . get ( ns , neurons , neurite_type = n ) , mode ) if stat is None or not stat . shape : stats [ n . name ] [ stat_name ] = stat else : assert stat . shape in ( ( 3 , ) , ) , 'Statistic must create a 1x3 result' for i , suffix in enumerate ( 'XYZ' ) : compound_stat_name = stat_name + '_' + suffix stats [ n . name ] [ compound_stat_name ] = stat [ i ] for ns , modes in config [ 'neuron' ] . items ( ) : for mode in modes : stat_name = _stat_name ( ns , mode ) stats [ stat_name ] = eval_stats ( nm . get ( ns , neurons ) , mode ) return stats
Extract stats from neurons
257
5
234,643
def get_header ( results ) : ret = [ 'name' , ] values = next ( iter ( results . values ( ) ) ) for k , v in values . items ( ) : if isinstance ( v , dict ) : for metric in v . keys ( ) : ret . append ( '%s:%s' % ( k , metric ) ) else : ret . append ( k ) return ret
Extracts the headers using the first value in the dict as the template
86
15
234,644
def generate_flattened_dict ( headers , results ) : for name , values in results . items ( ) : row = [ ] for header in headers : if header == 'name' : row . append ( name ) elif ':' in header : neurite_type , metric = header . split ( ':' ) row . append ( values [ neurite_type ] [ metric ] ) else : row . append ( values [ header ] ) yield row
extract from results the fields in the headers list
96
10
234,645
def add_child ( self , tree ) : tree . parent = self self . children . append ( tree ) return tree
Add a child to the list of this tree s children
25
11
234,646
def ipreorder ( self ) : children = deque ( ( self , ) ) while children : cur_node = children . pop ( ) children . extend ( reversed ( cur_node . children ) ) yield cur_node
Depth - first pre - order iteration of tree nodes
47
10
234,647
def ipostorder ( self ) : children = [ self , ] seen = set ( ) while children : cur_node = children [ - 1 ] if cur_node not in seen : seen . add ( cur_node ) children . extend ( reversed ( cur_node . children ) ) else : children . pop ( ) yield cur_node
Depth - first post - order iteration of tree nodes
71
10
234,648
def deprecated ( fun_name = None , msg = "" ) : def _deprecated ( fun ) : '''Issue a deprecation warning for a function''' @ wraps ( fun ) def _wrapper ( * args , * * kwargs ) : '''Issue deprecation warning and forward arguments to fun''' name = fun_name if fun_name is not None else fun . __name__ _warn_deprecated ( 'Call to deprecated function %s. %s' % ( name , msg ) ) return fun ( * args , * * kwargs ) return _wrapper return _deprecated
Issue a deprecation warning for a function
129
9
234,649
def check_wrapper ( fun ) : @ wraps ( fun ) def _wrapper ( * args , * * kwargs ) : '''Sets the title property of the result of running a checker''' title = fun . __name__ . replace ( '_' , ' ' ) . capitalize ( ) result = fun ( * args , * * kwargs ) result . title = title return result return _wrapper
Decorate a checking function
88
5
234,650
def run ( self , path ) : SEPARATOR = '=' * 40 summary = { } res = True for _f in utils . get_files_by_path ( path ) : L . info ( SEPARATOR ) status , summ = self . _check_file ( _f ) res &= status if summ is not None : summary . update ( summ ) L . info ( SEPARATOR ) status = 'PASS' if res else 'FAIL' return { 'files' : summary , 'STATUS' : status }
Test a bunch of files and return a summary JSON report
114
11
234,651
def _do_check ( self , obj , check_module , check_str ) : opts = self . _config [ 'options' ] if check_str in opts : fargs = opts [ check_str ] if isinstance ( fargs , list ) : out = check_wrapper ( getattr ( check_module , check_str ) ) ( obj , * fargs ) else : out = check_wrapper ( getattr ( check_module , check_str ) ) ( obj , fargs ) else : out = check_wrapper ( getattr ( check_module , check_str ) ) ( obj ) try : if out . info : L . debug ( '%s: %d failing ids detected: %s' , out . title , len ( out . info ) , out . info ) except TypeError : # pragma: no cover pass return out
Run a check function on obj
187
6
234,652
def _check_loop ( self , obj , check_mod_str ) : check_module = self . _check_modules [ check_mod_str ] checks = self . _config [ 'checks' ] [ check_mod_str ] result = True summary = OrderedDict ( ) for check in checks : ok = self . _do_check ( obj , check_module , check ) summary [ ok . title ] = ok . status result &= ok . status return result , summary
Run all the checks in a check_module
105
9
234,653
def _check_file ( self , f ) : L . info ( 'File: %s' , f ) full_result = True full_summary = OrderedDict ( ) try : data = load_data ( f ) except Exception as e : # pylint: disable=W0703 L . error ( 'Failed to load data... skipping tests for this file' ) L . error ( e . args ) return False , { f : OrderedDict ( [ ( 'ALL' , False ) ] ) } try : result , summary = self . _check_loop ( data , 'structural_checks' ) full_result &= result full_summary . update ( summary ) nrn = fst_core . FstNeuron ( data ) result , summary = self . _check_loop ( nrn , 'neuron_checks' ) full_result &= result full_summary . update ( summary ) except Exception as e : # pylint: disable=W0703 L . error ( 'Check failed: %s' , str ( type ( e ) ) + str ( e . args ) ) full_result = False full_summary [ 'ALL' ] = full_result for m , s in full_summary . items ( ) : self . _log_msg ( m , s ) return full_result , { f : full_summary }
Run tests on a morphology file
293
6
234,654
def _log_msg ( self , msg , ok ) : if self . _config [ 'color' ] : CGREEN , CRED , CEND = '\033[92m' , '\033[91m' , '\033[0m' else : CGREEN = CRED = CEND = '' LOG_LEVELS = { False : logging . ERROR , True : logging . INFO } # pylint: disable=logging-not-lazy L . log ( LOG_LEVELS [ ok ] , '%35s %s' + CEND , msg , CGREEN + 'PASS' if ok else CRED + 'FAIL' )
Helper to log message to the right level
144
8
234,655
def _sanitize_config ( config ) : if 'checks' in config : checks = config [ 'checks' ] if 'structural_checks' not in checks : checks [ 'structural_checks' ] = [ ] if 'neuron_checks' not in checks : checks [ 'neuron_checks' ] = [ ] else : raise ConfigError ( 'Need to have "checks" in the config' ) if 'options' not in config : L . debug ( 'Using default options' ) config [ 'options' ] = { } if 'color' not in config : config [ 'color' ] = False return config
check that the config has the correct keys add missing keys if necessary
135
13
234,656
def read ( filename , data_wrapper = DataWrapper ) : data = np . loadtxt ( filename ) if len ( np . shape ( data ) ) == 1 : data = np . reshape ( data , ( 1 , - 1 ) ) data = data [ : , [ X , Y , Z , R , TYPE , ID , P ] ] return data_wrapper ( data , 'SWC' , None )
Read an SWC file and return a tuple of data format .
88
13
234,657
def _merge_sections ( sec_a , sec_b ) : sec_b . ids = list ( sec_a . ids ) + list ( sec_b . ids [ 1 : ] ) sec_b . ntype = sec_a . ntype sec_b . pid = sec_a . pid sec_a . ids = [ ] sec_a . pid = - 1 sec_a . ntype = 0
Merge two sections
95
4
234,658
def _section_end_points ( structure_block , id_map ) : soma_idx = structure_block [ : , TYPE ] == POINT_TYPE . SOMA soma_ids = structure_block [ soma_idx , ID ] neurite_idx = structure_block [ : , TYPE ] != POINT_TYPE . SOMA neurite_rows = structure_block [ neurite_idx , : ] soma_end_pts = set ( id_map [ id_ ] for id_ in soma_ids [ np . in1d ( soma_ids , neurite_rows [ : , PID ] ) ] ) # end points have either no children or more than one # ie: leaf or multifurcation nodes n_children = defaultdict ( int ) for row in structure_block : n_children [ row [ PID ] ] += 1 end_pts = set ( i for i , row in enumerate ( structure_block ) if n_children [ row [ ID ] ] != 1 ) return end_pts . union ( soma_end_pts )
Get the section end - points
240
6
234,659
def _extract_sections ( data_block ) : structure_block = data_block [ : , COLS . TYPE : COLS . COL_COUNT ] . astype ( np . int ) # SWC ID -> structure_block position id_map = { - 1 : - 1 } for i , row in enumerate ( structure_block ) : id_map [ row [ ID ] ] = i # end points have either no children, more than one, or are the start # of a new gap sec_end_pts = _section_end_points ( structure_block , id_map ) # a 'gap' is when a section has part of it's segments interleaved # with those of another section gap_sections = set ( ) sections = [ ] def new_section ( ) : '''new_section''' sections . append ( DataBlockSection ( ) ) return sections [ - 1 ] curr_section = new_section ( ) parent_section = { - 1 : - 1 } for row in structure_block : row_id = id_map [ row [ ID ] ] parent_id = id_map [ row [ PID ] ] if not curr_section . ids : # first in section point is parent curr_section . ids . append ( parent_id ) curr_section . ntype = row [ TYPE ] gap = parent_id != curr_section . ids [ - 1 ] # If parent is not the previous point, create a section end-point. # Else add the point to this section if gap : sec_end_pts . add ( row_id ) else : curr_section . ids . append ( row_id ) if row_id in sec_end_pts : parent_section [ curr_section . ids [ - 1 ] ] = len ( sections ) - 1 # Parent-child discontinuity section if gap : curr_section = new_section ( ) curr_section . ids . extend ( ( parent_id , row_id ) ) curr_section . ntype = row [ TYPE ] gap_sections . add ( len ( sections ) - 2 ) elif row_id != len ( data_block ) - 1 : # avoid creating an extra DataBlockSection for last row if it's a leaf curr_section = new_section ( ) for sec in sections : # get the section parent ID from the id of the first point. if sec . ids : sec . pid = parent_section [ sec . ids [ 0 ] ] # join gap sections and "disable" first half if sec . pid in gap_sections : _merge_sections ( sections [ sec . pid ] , sec ) # TODO find a way to remove empty sections. Currently they are # required to maintain tree integrity. return sections
Make a list of sections from an SWC - style data wrapper block
604
14
234,660
def neurite_root_section_ids ( self ) : sec = self . sections return [ i for i , ss in enumerate ( sec ) if ss . pid > - 1 and ( sec [ ss . pid ] . ntype == POINT_TYPE . SOMA and ss . ntype != POINT_TYPE . SOMA ) ]
Get the section IDs of the intitial neurite sections
72
11
234,661
def soma_points ( self ) : db = self . data_block return db [ db [ : , COLS . TYPE ] == POINT_TYPE . SOMA ]
Get the soma points
37
5
234,662
def add_section ( self , id_ , parent_id , section_type , points ) : # L.debug('Adding section %d, with parent %d, of type: %d with count: %d', # id_, parent_id, section_type, len(points)) assert id_ not in self . sections , 'id %s already exists in sections' % id_ self . sections [ id_ ] = BlockNeuronBuilder . BlockSection ( parent_id , section_type , points )
add a section
110
3
234,663
def _make_datablock ( self ) : section_ids = sorted ( self . sections ) # create all insertion id's, this needs to be done ahead of time # as some of the children may have a lower id than their parents id_to_insert_id = { } row_count = 0 for section_id in section_ids : row_count += len ( self . sections [ section_id ] . points ) id_to_insert_id [ section_id ] = row_count - 1 datablock = np . empty ( ( row_count , COLS . COL_COUNT ) , dtype = np . float ) datablock [ : , COLS . ID ] = np . arange ( len ( datablock ) ) datablock [ : , COLS . P ] = datablock [ : , COLS . ID ] - 1 sections = [ ] insert_index = 0 for id_ in section_ids : sec = self . sections [ id_ ] points , section_type , parent_id = sec . points , sec . section_type , sec . parent_id idx = slice ( insert_index , insert_index + len ( points ) ) datablock [ idx , COLS . XYZR ] = points datablock [ idx , COLS . TYPE ] = section_type datablock [ idx . start , COLS . P ] = id_to_insert_id . get ( parent_id , ROOT_ID ) sections . append ( DataBlockSection ( idx , section_type , parent_id ) ) insert_index = idx . stop return datablock , sections
Make a data_block and sections list as required by DataWrapper
356
14
234,664
def _check_consistency ( self ) : type_count = defaultdict ( int ) for _ , section in sorted ( self . sections . items ( ) ) : type_count [ section . section_type ] += 1 if type_count [ POINT_TYPE . SOMA ] != 1 : L . info ( 'Have %d somas, expected 1' , type_count [ POINT_TYPE . SOMA ] )
see if the sections have obvious errors
92
7
234,665
def get_datawrapper ( self , file_format = 'BlockNeuronBuilder' , data_wrapper = DataWrapper ) : self . _check_consistency ( ) datablock , sections = self . _make_datablock ( ) return data_wrapper ( datablock , file_format , sections )
returns a DataWrapper
71
6
234,666
def _is_morphology_file ( filepath ) : return ( os . path . isfile ( filepath ) and os . path . splitext ( filepath ) [ 1 ] . lower ( ) in ( '.swc' , '.h5' , '.asc' ) )
Check if filepath is a file with one of morphology file extensions .
61
14
234,667
def get_morph_files ( directory ) : lsdir = ( os . path . join ( directory , m ) for m in os . listdir ( directory ) ) return list ( filter ( _is_morphology_file , lsdir ) )
Get a list of all morphology files in a directory
52
10
234,668
def get_files_by_path ( path ) : if os . path . isfile ( path ) : return [ path ] if os . path . isdir ( path ) : return get_morph_files ( path ) raise IOError ( 'Invalid data path %s' % path )
Get a file or set of files from a file path
61
11
234,669
def load_neuron ( handle , reader = None ) : rdw = load_data ( handle , reader ) if isinstance ( handle , StringType ) : name = os . path . splitext ( os . path . basename ( handle ) ) [ 0 ] else : name = None return FstNeuron ( rdw , name )
Build section trees from an h5 or swc file
75
11
234,670
def load_neurons ( neurons , neuron_loader = load_neuron , name = None , population_class = Population , ignored_exceptions = ( ) ) : if isinstance ( neurons , ( list , tuple ) ) : files = neurons name = name if name is not None else 'Population' elif isinstance ( neurons , StringType ) : files = get_files_by_path ( neurons ) name = name if name is not None else os . path . basename ( neurons ) ignored_exceptions = tuple ( ignored_exceptions ) pop = [ ] for f in files : try : pop . append ( neuron_loader ( f ) ) except NeuroMError as e : if isinstance ( e , ignored_exceptions ) : L . info ( 'Ignoring exception "%s" for file %s' , e , os . path . basename ( f ) ) continue raise return population_class ( pop , name = name )
Create a population object from all morphologies in a directory \ of from morphologies in a list of file names
201
22
234,671
def _get_file ( handle ) : if not isinstance ( handle , IOBase ) : return handle fd , temp_file = tempfile . mkstemp ( str ( uuid . uuid4 ( ) ) , prefix = 'neurom-' ) os . close ( fd ) with open ( temp_file , 'w' ) as fd : handle . seek ( 0 ) shutil . copyfileobj ( handle , fd ) return temp_file
Returns the filename of the file to read
101
8
234,672
def load_data ( handle , reader = None ) : if not reader : reader = os . path . splitext ( handle ) [ 1 ] [ 1 : ] . lower ( ) if reader not in _READERS : raise NeuroMError ( 'Do not have a loader for "%s" extension' % reader ) filename = _get_file ( handle ) try : return _READERS [ reader ] ( filename ) except Exception as e : L . exception ( 'Error reading file %s, using "%s" loader' , filename , reader ) raise RawDataError ( 'Error reading file %s:\n%s' % ( filename , str ( e ) ) )
Unpack data into a raw data wrapper
141
8
234,673
def _load_h5 ( filename ) : from neurom . io import hdf5 return hdf5 . read ( filename , remove_duplicates = False , data_wrapper = DataWrapper )
Delay loading of h5py until it is needed
44
11
234,674
def _filepath ( self , name ) : if self . file_ext is None : candidates = glob . glob ( os . path . join ( self . directory , name + ".*" ) ) try : return next ( filter ( _is_morphology_file , candidates ) ) except StopIteration : raise NeuroMError ( "Can not find morphology file for '%s' " % name ) else : return os . path . join ( self . directory , name + self . file_ext )
File path to name morphology file .
106
7
234,675
def draw ( obj , mode = '2d' , * * kwargs ) : if mode not in MODES : raise InvalidDrawModeError ( 'Invalid drawing mode %s' % mode ) if mode in ( '2d' , 'dendrogram' ) : fig , ax = common . get_figure ( ) else : fig , ax = common . get_figure ( params = { 'projection' : '3d' } ) if isinstance ( obj , Neuron ) : tag = 'neuron' elif isinstance ( obj , ( Tree , Neurite ) ) : tag = 'tree' elif isinstance ( obj , Soma ) : tag = 'soma' else : raise NotDrawableError ( 'draw not implemented for %s' % obj . __class__ ) viewer = '%s_%s' % ( tag , mode ) try : plotter = _VIEWERS [ viewer ] except KeyError : raise NotDrawableError ( 'No drawer for class %s, mode=%s' % ( obj . __class__ , mode ) ) output_path = kwargs . pop ( 'output_path' , None ) plotter ( ax , obj , * * kwargs ) if mode != 'dendrogram' : common . plot_style ( fig = fig , ax = ax , * * kwargs ) if output_path : common . save_plot ( fig = fig , output_path = output_path , * * kwargs ) return fig , ax
Draw a morphology object
325
4
234,676
def population_feature_values ( pops , feature ) : pops_feature_values = [ ] for pop in pops : feature_values = [ getattr ( neu , 'get_' + feature ) ( ) for neu in pop . neurons ] # ugly hack to chain in case of list of lists if any ( [ isinstance ( p , ( list , np . ndarray ) ) for p in feature_values ] ) : feature_values = list ( chain ( * feature_values ) ) pops_feature_values . append ( feature_values ) return pops_feature_values
Extracts feature values per population
124
7
234,677
def get_segment ( neuron , section_id , segment_id ) : sec = neuron . sections [ section_id ] return sec . points [ segment_id : segment_id + 2 ] [ : , COLS . XYZR ]
Get a segment given a section and segment id
52
9
234,678
def extract_data ( data_path , feature ) : population = nm . load_neurons ( data_path ) feature_data = [ nm . get ( feature , n ) for n in population ] feature_data = list ( chain ( * feature_data ) ) return stats . optimal_distribution ( feature_data )
Loads a list of neurons extracts feature and transforms the fitted distribution in the correct format . Returns the optimal distribution corresponding parameters minimun and maximum values .
70
30
234,679
def bifurcation_partition ( bif_point ) : assert len ( bif_point . children ) == 2 , 'A bifurcation point must have exactly 2 children' n = float ( sum ( 1 for _ in bif_point . children [ 0 ] . ipreorder ( ) ) ) m = float ( sum ( 1 for _ in bif_point . children [ 1 ] . ipreorder ( ) ) ) return max ( n , m ) / min ( n , m )
Calculate the partition at a bifurcation point
109
12
234,680
def partition_pair ( bif_point ) : n = float ( sum ( 1 for _ in bif_point . children [ 0 ] . ipreorder ( ) ) ) m = float ( sum ( 1 for _ in bif_point . children [ 1 ] . ipreorder ( ) ) ) return ( n , m )
Calculate the partition pairs at a bifurcation point
71
13
234,681
def _match_section ( section , match ) : # TODO: rewrite this so it is more clear, and handles sets & dictionaries for matching for i in range ( 5 ) : if i >= len ( section ) : return None if isinstance ( section [ i ] , StringType ) and section [ i ] in match : return match [ section [ i ] ] return None
checks whether the type of section is in the match dictionary
79
11
234,682
def _parse_section ( token_iter ) : sexp = [ ] for token in token_iter : if token == '(' : new_sexp = _parse_section ( token_iter ) if not _match_section ( new_sexp , UNWANTED_SECTIONS ) : sexp . append ( new_sexp ) elif token == ')' : return sexp else : sexp . append ( token ) return sexp
take a stream of tokens and create the tree structure that is defined by the s - expressions
97
18
234,683
def _parse_sections ( morph_fd ) : sections = [ ] token_iter = _get_tokens ( morph_fd ) for token in token_iter : if token == '(' : # find top-level sections section = _parse_section ( token_iter ) if not _match_section ( section , UNWANTED_SECTIONS ) : sections . append ( section ) return sections
returns array of all the sections that exist
87
9
234,684
def _flatten_subsection ( subsection , _type , offset , parent ) : for row in subsection : # TODO: Figure out what these correspond to in neurolucida if row in ( 'Low' , 'Generated' , 'High' , ) : continue elif isinstance ( row [ 0 ] , StringType ) : if len ( row ) in ( 4 , 5 , ) : if len ( row ) == 5 : assert row [ 4 ] [ 0 ] == 'S' , 'Only known usage of a fifth member is Sn, found: %s' % row [ 4 ] [ 0 ] yield ( float ( row [ 0 ] ) , float ( row [ 1 ] ) , float ( row [ 2 ] ) , float ( row [ 3 ] ) / 2. , _type , offset , parent ) parent = offset offset += 1 elif isinstance ( row [ 0 ] , list ) : split_parent = offset - 1 start_offset = 0 slices = [ ] start = 0 for i , value in enumerate ( row ) : if value == '|' : slices . append ( slice ( start + start_offset , i ) ) start = i + 1 slices . append ( slice ( start + start_offset , len ( row ) ) ) for split_slice in slices : for _row in _flatten_subsection ( row [ split_slice ] , _type , offset , split_parent ) : offset += 1 yield _row
Flatten a subsection from its nested version
309
8
234,685
def _extract_section ( section ) : # sections with only one element will be skipped, if len ( section ) == 1 : assert section [ 0 ] == 'Sections' , ( 'Only known usage of a single Section content is "Sections", found %s' % section [ 0 ] ) return None # try and detect type _type = WANTED_SECTIONS . get ( section [ 0 ] [ 0 ] , None ) start = 1 # CellBody often has [['"CellBody"'], ['CellBody'] as its first two elements if _type is None : _type = WANTED_SECTIONS . get ( section [ 1 ] [ 0 ] , None ) if _type is None : # can't determine the type return None start = 2 parent = - 1 if _type == POINT_TYPE . SOMA else 0 subsection_iter = _flatten_subsection ( section [ start : ] , _type , offset = 0 , parent = parent ) ret = np . array ( [ row for row in subsection_iter ] ) return ret
Find top level sections and get their flat contents and append them all
226
13
234,686
def _sections_to_raw_data ( sections ) : soma = None neurites = [ ] for section in sections : neurite = _extract_section ( section ) if neurite is None : continue elif neurite [ 0 ] [ COLS . TYPE ] == POINT_TYPE . SOMA : assert soma is None , 'Multiple somas defined in file' soma = neurite else : neurites . append ( neurite ) assert soma is not None , 'Missing CellBody element (ie. soma)' total_length = len ( soma ) + sum ( len ( neurite ) for neurite in neurites ) ret = np . zeros ( ( total_length , 7 , ) , dtype = np . float64 ) pos = len ( soma ) ret [ 0 : pos , : ] = soma for neurite in neurites : end = pos + len ( neurite ) ret [ pos : end , : ] = neurite ret [ pos : end , COLS . P ] += pos ret [ pos : end , COLS . ID ] += pos # TODO: attach the neurite at the closest point on the soma ret [ pos , COLS . P ] = len ( soma ) - 1 pos = end return ret
convert list of sections into the raw_data format used in neurom
271
15
234,687
def read ( morph_file , data_wrapper = DataWrapper ) : msg = ( 'This is an experimental reader. ' 'There are no guarantees regarding ability to parse ' 'Neurolucida .asc files or correctness of output.' ) warnings . warn ( msg ) L . warning ( msg ) with open ( morph_file , encoding = 'utf-8' , errors = 'replace' ) as morph_fd : sections = _parse_sections ( morph_fd ) raw_data = _sections_to_raw_data ( sections ) return data_wrapper ( raw_data , 'NL-ASCII' )
return a raw_data np . array with the full neuron and the format of the file suitable to be wrapped by DataWrapper
132
26
234,688
def stats ( data ) : return { 'len' : len ( data ) , 'mean' : np . mean ( data ) , 'sum' : np . sum ( data ) , 'std' : np . std ( data ) , 'min' : np . min ( data ) , 'max' : np . max ( data ) }
Dictionary with summary stats for data
72
7
234,689
def get_config ( config , default_config ) : if not config : logging . warning ( 'Using default config: %s' , default_config ) config = default_config try : with open ( config , 'r' ) as config_file : return yaml . load ( config_file ) except ( yaml . reader . ReaderError , yaml . parser . ParserError , yaml . scanner . ScannerError ) as e : raise ConfigError ( 'Invalid yaml file: \n %s' % str ( e ) )
Load configuration from file if in config else use default
116
10
234,690
def soma_surface_area ( nrn , neurite_type = NeuriteType . soma ) : assert neurite_type == NeuriteType . soma , 'Neurite type must be soma' return 4 * math . pi * nrn . soma . radius ** 2
Get the surface area of a neuron s soma .
65
11
234,691
def soma_surface_areas ( nrn_pop , neurite_type = NeuriteType . soma ) : nrns = neuron_population ( nrn_pop ) assert neurite_type == NeuriteType . soma , 'Neurite type must be soma' return [ soma_surface_area ( n ) for n in nrns ]
Get the surface areas of the somata in a population of neurons
83
13
234,692
def soma_radii ( nrn_pop , neurite_type = NeuriteType . soma ) : assert neurite_type == NeuriteType . soma , 'Neurite type must be soma' nrns = neuron_population ( nrn_pop ) return [ n . soma . radius for n in nrns ]
Get the radii of the somata of a population of neurons
78
13
234,693
def trunk_section_lengths ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) return [ morphmath . section_length ( s . root_node . points ) for s in nrn . neurites if neurite_filter ( s ) ]
list of lengths of trunk sections of neurites in a neuron
72
12
234,694
def trunk_origin_radii ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) return [ s . root_node . points [ 0 ] [ COLS . R ] for s in nrn . neurites if neurite_filter ( s ) ]
radii of the trunk sections of neurites in a neuron
73
12
234,695
def trunk_origin_azimuths ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) nrns = neuron_population ( nrn ) def _azimuth ( section , soma ) : '''Azimuth of a section''' vector = morphmath . vector ( section [ 0 ] , soma . center ) return np . arctan2 ( vector [ COLS . Z ] , vector [ COLS . X ] ) return [ _azimuth ( s . root_node . points , n . soma ) for n in nrns for s in n . neurites if neurite_filter ( s ) ]
Get a list of all the trunk origin azimuths of a neuron or population
156
17
234,696
def trunk_origin_elevations ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) nrns = neuron_population ( nrn ) def _elevation ( section , soma ) : '''Elevation of a section''' vector = morphmath . vector ( section [ 0 ] , soma . center ) norm_vector = np . linalg . norm ( vector ) if norm_vector >= np . finfo ( type ( norm_vector ) ) . eps : return np . arcsin ( vector [ COLS . Y ] / norm_vector ) raise ValueError ( "Norm of vector between soma center and section is almost zero." ) return [ _elevation ( s . root_node . points , n . soma ) for n in nrns for s in n . neurites if neurite_filter ( s ) ]
Get a list of all the trunk origin elevations of a neuron or population
203
15
234,697
def trunk_vectors ( nrn , neurite_type = NeuriteType . all ) : neurite_filter = is_type ( neurite_type ) nrns = neuron_population ( nrn ) return np . array ( [ morphmath . vector ( s . root_node . points [ 0 ] , n . soma . center ) for n in nrns for s in n . neurites if neurite_filter ( s ) ] )
Calculates the vectors between all the trunks of the neuron and the soma center .
99
19
234,698
def trunk_angles ( nrn , neurite_type = NeuriteType . all ) : vectors = trunk_vectors ( nrn , neurite_type = neurite_type ) # In order to avoid the failure of the process in case the neurite_type does not exist if not vectors . size : return [ ] def _sort_angle ( p1 , p2 ) : """Angle between p1-p2 to sort vectors""" ang1 = np . arctan2 ( * p1 [ : : - 1 ] ) ang2 = np . arctan2 ( * p2 [ : : - 1 ] ) return ( ang1 - ang2 ) # Sorting angles according to x-y plane order = np . argsort ( np . array ( [ _sort_angle ( i / np . linalg . norm ( i ) , [ 0 , 1 ] ) for i in vectors [ : , 0 : 2 ] ] ) ) ordered_vectors = vectors [ order ] [ : , [ COLS . X , COLS . Y ] ] return [ morphmath . angle_between_vectors ( ordered_vectors [ i ] , ordered_vectors [ i - 1 ] ) for i , _ in enumerate ( ordered_vectors ) ]
Calculates the angles between all the trunks of the neuron . The angles are defined on the x - y plane and the trees are sorted from the y axis and anticlock - wise .
272
39
234,699
def sholl_crossings ( neurites , center , radii ) : def _count_crossings ( neurite , radius ) : '''count_crossings of segments in neurite with radius''' r2 = radius ** 2 count = 0 for start , end in iter_segments ( neurite ) : start_dist2 , end_dist2 = ( morphmath . point_dist2 ( center , start ) , morphmath . point_dist2 ( center , end ) ) count += int ( start_dist2 <= r2 <= end_dist2 or end_dist2 <= r2 <= start_dist2 ) return count return np . array ( [ sum ( _count_crossings ( neurite , r ) for neurite in iter_neurites ( neurites ) ) for r in radii ] )
calculate crossings of neurites
176
7