idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
234,700
def sholl_frequency ( nrn , neurite_type = NeuriteType . all , step_size = 10 ) : nrns = neuron_population ( nrn ) neurite_filter = is_type ( neurite_type ) min_soma_edge = float ( 'Inf' ) max_radii = 0 neurites_list = [ ] for neuron in nrns : neurites_list . extend ( ( ( neurites , neuron . soma . center ) for neurites in neuron . neurites if neurite_filter ( neurites ) ) ) min_soma_edge = min ( min_soma_edge , neuron . soma . radius ) max_radii = max ( max_radii , np . max ( np . abs ( bounding_box ( neuron ) ) ) ) radii = np . arange ( min_soma_edge , max_radii + step_size , step_size ) ret = np . zeros_like ( radii ) for neurites , center in neurites_list : ret += sholl_crossings ( neurites , center , radii ) return ret
perform Sholl frequency calculations on a population of neurites
246
12
234,701
def dist_points ( bin_edges , d ) : bc = bin_centers ( bin_edges ) if d is not None : d = DISTS [ d [ 'type' ] ] ( d , bc ) return d , bc
Return an array of values according to a distribution
52
9
234,702
def calc_limits ( data , dist = None , padding = 0.25 ) : dmin = sys . float_info . max if dist is None else dist . get ( 'min' , sys . float_info . max ) dmax = sys . float_info . min if dist is None else dist . get ( 'max' , sys . float_info . min ) _min = min ( min ( data ) , dmin ) _max = max ( max ( data ) , dmax ) padding = padding * ( _max - _min ) return _min - padding , _max + padding
Calculate a suitable range for a histogram
127
10
234,703
def load_neurite_features ( filepath ) : stuff = defaultdict ( lambda : defaultdict ( list ) ) nrns = nm . load_neurons ( filepath ) # unpack data into arrays for nrn in nrns : for t in NEURITES_ : for feat in FEATURES : stuff [ feat ] [ str ( t ) . split ( '.' ) [ 1 ] ] . extend ( nm . get ( feat , nrn , neurite_type = t ) ) return stuff
Unpack relevant data into megadict
111
8
234,704
def main ( data_dir , mtype_file ) : # pylint: disable=too-many-locals # data structure to store results stuff = load_neurite_features ( data_dir ) sim_params = json . load ( open ( mtype_file ) ) # load histograms, distribution parameter sets and figures into arrays. # To plot figures, do # plots[i].fig.show() # To modify an axis, do # plots[i].ax.something() _plots = [ ] for feat , d in stuff . items ( ) : for typ , data in d . items ( ) : dist = sim_params [ 'components' ] [ typ ] . get ( feat , None ) print ( 'Type = %s, Feature = %s, Distribution = %s' % ( typ , feat , dist ) ) # if no data available, skip this feature if not data : print ( "No data found for feature %s (%s)" % ( feat , typ ) ) continue # print 'DATA', data num_bins = 100 limits = calc_limits ( data , dist ) bin_edges = np . linspace ( limits [ 0 ] , limits [ 1 ] , num_bins + 1 ) histo = np . histogram ( data , bin_edges , normed = True ) print ( 'PLOT LIMITS:' , limits ) # print 'DATA:', data # print 'BIN HEIGHT', histo[0] plot = Plot ( * view_utils . get_figure ( new_fig = True , subplot = 111 ) ) plot . ax . set_xlim ( * limits ) plot . ax . bar ( histo [ 1 ] [ : - 1 ] , histo [ 0 ] , width = bin_widths ( histo [ 1 ] ) ) dp , bc = dist_points ( histo [ 1 ] , dist ) # print 'BIN CENTERS:', bc, len(bc) if dp is not None : # print 'DIST POINTS:', dp, len(dp) plot . ax . plot ( bc , dp , 'r*' ) plot . ax . set_title ( '%s (%s)' % ( feat , typ ) ) _plots . append ( plot ) return _plots
Run the stuff
496
3
234,705
def extract_density ( population , plane = 'xy' , bins = 100 , neurite_type = NeuriteType . basal_dendrite ) : segment_midpoints = get_feat ( 'segment_midpoints' , population , neurite_type = neurite_type ) horiz = segment_midpoints [ : , 'xyz' . index ( plane [ 0 ] ) ] vert = segment_midpoints [ : , 'xyz' . index ( plane [ 1 ] ) ] return np . histogram2d ( np . array ( horiz ) , np . array ( vert ) , bins = ( bins , bins ) )
Extracts the 2d histogram of the center coordinates of segments in the selected plane .
139
19
234,706
def plot_density ( population , # pylint: disable=too-many-arguments, too-many-locals bins = 100 , new_fig = True , subplot = 111 , levels = None , plane = 'xy' , colorlabel = 'Nodes per unit area' , labelfontsize = 16 , color_map = 'Reds' , no_colorbar = False , threshold = 0.01 , neurite_type = NeuriteType . basal_dendrite , * * kwargs ) : fig , ax = common . get_figure ( new_fig = new_fig , subplot = subplot ) H1 , xedges1 , yedges1 = extract_density ( population , plane = plane , bins = bins , neurite_type = neurite_type ) mask = H1 < threshold # mask = H1==0 H2 = np . ma . masked_array ( H1 , mask ) getattr ( plt . cm , color_map ) . set_bad ( color = 'white' , alpha = None ) plots = ax . contourf ( ( xedges1 [ : - 1 ] + xedges1 [ 1 : ] ) / 2 , ( yedges1 [ : - 1 ] + yedges1 [ 1 : ] ) / 2 , np . transpose ( H2 ) , # / np.max(H2), cmap = getattr ( plt . cm , color_map ) , levels = levels ) if not no_colorbar : cbar = plt . colorbar ( plots ) cbar . ax . set_ylabel ( colorlabel , fontsize = labelfontsize ) kwargs [ 'title' ] = kwargs . get ( 'title' , '' ) kwargs [ 'xlabel' ] = kwargs . get ( 'xlabel' , plane [ 0 ] ) kwargs [ 'ylabel' ] = kwargs . get ( 'ylabel' , plane [ 1 ] ) return common . plot_style ( fig = fig , ax = ax , * * kwargs )
Plots the 2d histogram of the center coordinates of segments in the selected plane .
455
18
234,707
def plot_neuron_on_density ( population , # pylint: disable=too-many-arguments bins = 100 , new_fig = True , subplot = 111 , levels = None , plane = 'xy' , colorlabel = 'Nodes per unit area' , labelfontsize = 16 , color_map = 'Reds' , no_colorbar = False , threshold = 0.01 , neurite_type = NeuriteType . basal_dendrite , * * kwargs ) : _ , ax = common . get_figure ( new_fig = new_fig ) view . plot_tree ( ax , population . neurites [ 0 ] ) return plot_density ( population , plane = plane , bins = bins , new_fig = False , subplot = subplot , colorlabel = colorlabel , labelfontsize = labelfontsize , levels = levels , color_map = color_map , no_colorbar = no_colorbar , threshold = threshold , neurite_type = neurite_type , * * kwargs )
Plots the 2d histogram of the center coordinates of segments in the selected plane and superimposes the view of the first neurite of the collection .
229
32
234,708
def is_monotonic ( neurite , tol ) : for node in neurite . iter_sections ( ) : # check that points in section satisfy monotonicity sec = node . points for point_id in range ( len ( sec ) - 1 ) : if sec [ point_id + 1 ] [ COLS . R ] > sec [ point_id ] [ COLS . R ] + tol : return False # Check that section boundary points satisfy monotonicity if ( node . parent is not None and sec [ 0 ] [ COLS . R ] > node . parent . points [ - 1 ] [ COLS . R ] + tol ) : return False return True
Check if neurite tree is monotonic
144
9
234,709
def is_flat ( neurite , tol , method = 'tolerance' ) : ext = principal_direction_extent ( neurite . points [ : , COLS . XYZ ] ) assert method in ( 'tolerance' , 'ratio' ) , "Method must be one of 'tolerance', 'ratio'" if method == 'ratio' : sorted_ext = np . sort ( ext ) return sorted_ext [ 0 ] / sorted_ext [ 1 ] < float ( tol ) return any ( ext < float ( tol ) )
Check if neurite is flat using the given method
120
10
234,710
def get_flat_neurites ( neuron , tol = 0.1 , method = 'ratio' ) : return [ n for n in neuron . neurites if is_flat ( n , tol , method ) ]
Check if a neuron has neurites that are flat within a tolerance
49
13
234,711
def get_nonmonotonic_neurites ( neuron , tol = 1e-6 ) : return [ n for n in neuron . neurites if not is_monotonic ( n , tol ) ]
Get neurites that are not monotonic
47
9
234,712
def segment_centre_of_mass ( seg ) : h = mm . segment_length ( seg ) r0 = seg [ 0 ] [ COLS . R ] r1 = seg [ 1 ] [ COLS . R ] num = r0 * r0 + 2 * r0 * r1 + 3 * r1 * r1 denom = 4 * ( r0 * r0 + r0 * r1 + r1 * r1 ) centre_of_mass_z_loc = num / denom return seg [ 0 ] [ COLS . XYZ ] + ( centre_of_mass_z_loc / h ) * ( seg [ 1 ] [ COLS . XYZ ] - seg [ 0 ] [ COLS . XYZ ] )
Calculate and return centre of mass of a segment .
169
12
234,713
def neurite_centre_of_mass ( neurite ) : centre_of_mass = np . zeros ( 3 ) total_volume = 0 seg_vol = np . array ( map ( mm . segment_volume , nm . iter_segments ( neurite ) ) ) seg_centre_of_mass = np . array ( map ( segment_centre_of_mass , nm . iter_segments ( neurite ) ) ) # multiply array of scalars with array of arrays # http://stackoverflow.com/questions/5795700/multiply-numpy-array-of-scalars-by-array-of-vectors seg_centre_of_mass = seg_centre_of_mass * seg_vol [ : , np . newaxis ] centre_of_mass = np . sum ( seg_centre_of_mass , axis = 0 ) total_volume = np . sum ( seg_vol ) return centre_of_mass / total_volume
Calculate and return centre of mass of a neurite .
227
13
234,714
def distance_sqr ( point , seg ) : centre_of_mass = segment_centre_of_mass ( seg ) return sum ( pow ( np . subtract ( point , centre_of_mass ) , 2 ) )
Calculate and return square Euclidian distance from given point to centre of mass of given segment .
51
20
234,715
def radius_of_gyration ( neurite ) : centre_mass = neurite_centre_of_mass ( neurite ) sum_sqr_distance = 0 N = 0 dist_sqr = [ distance_sqr ( centre_mass , s ) for s in nm . iter_segments ( neurite ) ] sum_sqr_distance = np . sum ( dist_sqr ) N = len ( dist_sqr ) return np . sqrt ( sum_sqr_distance / N )
Calculate and return radius of gyration of a given neurite .
111
15
234,716
def view ( input_file , plane , backend ) : if backend == 'matplotlib' : from neurom . viewer import draw kwargs = { 'mode' : '3d' if plane == '3d' else '2d' , } if plane != '3d' : kwargs [ 'plane' ] = plane draw ( load_neuron ( input_file ) , * * kwargs ) else : from neurom . view . plotly import draw draw ( load_neuron ( input_file ) , plane = plane ) if backend == 'matplotlib' : import matplotlib . pyplot as plt plt . show ( )
A simple neuron viewer
144
4
234,717
def generate_annotation ( result , settings ) : if result . status : return "" header = ( "\n\n" "({label} ; MUK_ANNOTATION\n" " (Color {color}) ; MUK_ANNOTATION\n" " (Name \"{name}\") ; MUK_ANNOTATION" ) . format ( * * settings ) points = [ p for _ , _points in result . info for p in _points ] annotations = ( " ({0} {1} {2} 0.50) ; MUK_ANNOTATION" . format ( p [ COLS . X ] , p [ COLS . Y ] , p [ COLS . Z ] ) for p in points ) footer = ") ; MUK_ANNOTATION\n" return '\n' . join ( chain . from_iterable ( ( [ header ] , annotations , [ footer ] ) ) )
Generate the annotation for a given checker
200
9
234,718
def annotate ( results , settings ) : annotations = ( generate_annotation ( result , setting ) for result , setting in zip ( results , settings ) ) return '\n' . join ( annot for annot in annotations if annot )
Concatenate the annotations of all checkers
49
10
234,719
def as_point ( row ) : return Point ( row [ COLS . X ] , row [ COLS . Y ] , row [ COLS . Z ] , row [ COLS . R ] , int ( row [ COLS . TYPE ] ) )
Create a Point from a data block row
54
8
234,720
def create_superuser ( self , email , password , * * extra_fields ) : extra_fields . setdefault ( 'is_staff' , True ) extra_fields . setdefault ( 'is_superuser' , True ) if extra_fields . get ( 'is_staff' ) is not True : raise ValueError ( 'Superuser must have is_staff=True.' ) if extra_fields . get ( 'is_superuser' ) is not True : raise ValueError ( 'Superuser must have is_superuser=True.' ) return self . _create_user ( email , password , * * extra_fields )
Save new User with is_staff and is_superuser set to True
136
15
234,721
def load_file_contents ( file_path , as_list = True ) : abs_file_path = join ( HERE , file_path ) with open ( abs_file_path , encoding = 'utf-8' ) as file_pointer : if as_list : return file_pointer . read ( ) . splitlines ( ) return file_pointer . read ( )
Load file as string or list
81
6
234,722
def clean_password2 ( self ) : password1 = self . cleaned_data . get ( 'password1' ) password2 = self . cleaned_data . get ( 'password2' ) if password1 and password2 and password1 != password2 : self . add_error ( 'password1' , forms . ValidationError ( self . error_messages [ 'password_mismatch' ] , code = 'password_mismatch' , ) ) return password2
Check wether password 1 and password 2 are equivalent
102
10
234,723
def _post_clean ( self ) : super ( ) . _post_clean ( ) # updates self.instance with form data password = self . cleaned_data . get ( 'password1' ) if password : try : password_validation . validate_password ( password , self . instance ) except ValidationError as error : self . add_error ( 'password1' , error )
Run password validaton after clean methods
82
7
234,724
def clean ( self ) : super ( ) . clean ( ) self . email = self . __class__ . objects . normalize_email ( self . email )
Override default clean method to normalize email .
34
9
234,725
def normalize_feature_inputs ( ctx , param , value ) : for feature_like in value or ( '-' , ) : try : with click . open_file ( feature_like ) as src : for feature in iter_features ( iter ( src ) ) : yield feature except IOError : coords = list ( coords_from_query ( feature_like ) ) yield { 'type' : 'Feature' , 'properties' : { } , 'geometry' : { 'type' : 'Point' , 'coordinates' : coords } }
Click callback that normalizes feature input values .
122
9
234,726
def iter_features ( geojsonfile , func = None ) : func = func or ( lambda x : x ) first_line = next ( geojsonfile ) # Does the geojsonfile contain RS-delimited JSON sequences? if first_line . startswith ( u'\x1e' ) : text_buffer = first_line . strip ( u'\x1e' ) for line in geojsonfile : if line . startswith ( u'\x1e' ) : if text_buffer : obj = json . loads ( text_buffer ) if 'coordinates' in obj : obj = to_feature ( obj ) newfeat = func ( obj ) if newfeat : yield newfeat text_buffer = line . strip ( u'\x1e' ) else : text_buffer += line # complete our parsing with a for-else clause. else : obj = json . loads ( text_buffer ) if 'coordinates' in obj : obj = to_feature ( obj ) newfeat = func ( obj ) if newfeat : yield newfeat # If not, it may contains LF-delimited GeoJSON objects or a single # multi-line pretty-printed GeoJSON object. else : # Try to parse LF-delimited sequences of features or feature # collections produced by, e.g., `jq -c ...`. try : obj = json . loads ( first_line ) if obj [ 'type' ] == 'Feature' : newfeat = func ( obj ) if newfeat : yield newfeat for line in geojsonfile : newfeat = func ( json . loads ( line ) ) if newfeat : yield newfeat elif obj [ 'type' ] == 'FeatureCollection' : for feat in obj [ 'features' ] : newfeat = func ( feat ) if newfeat : yield newfeat elif 'coordinates' in obj : newfeat = func ( to_feature ( obj ) ) if newfeat : yield newfeat for line in geojsonfile : newfeat = func ( to_feature ( json . loads ( line ) ) ) if newfeat : yield newfeat # Indented or pretty-printed GeoJSON features or feature # collections will fail out of the try clause above since # they'll have no complete JSON object on their first line. # To handle these, we slurp in the entire file and parse its # text. except ValueError : text = "" . join ( chain ( [ first_line ] , geojsonfile ) ) obj = json . loads ( text ) if obj [ 'type' ] == 'Feature' : newfeat = func ( obj ) if newfeat : yield newfeat elif obj [ 'type' ] == 'FeatureCollection' : for feat in obj [ 'features' ] : newfeat = func ( feat ) if newfeat : yield newfeat elif 'coordinates' in obj : newfeat = func ( to_feature ( obj ) ) if newfeat : yield newfeat
Extract GeoJSON features from a text file object .
635
11
234,727
def iter_query ( query ) : try : itr = click . open_file ( query ) . readlines ( ) except IOError : itr = [ query ] return itr
Accept a filename stream or string . Returns an iterator over lines of the query .
39
16
234,728
def normalize_feature_objects ( feature_objs ) : for obj in feature_objs : if hasattr ( obj , "__geo_interface__" ) and 'type' in obj . __geo_interface__ . keys ( ) and obj . __geo_interface__ [ 'type' ] == 'Feature' : yield obj . __geo_interface__ elif isinstance ( obj , dict ) and 'type' in obj and obj [ 'type' ] == 'Feature' : yield obj else : raise ValueError ( "Did not recognize object {0}" "as GeoJSON Feature" . format ( obj ) )
Takes an iterable of GeoJSON - like Feature mappings or an iterable of objects with a geo interface and normalizes it to the former .
137
31
234,729
async def api ( self , endpoint , params = None , test = False ) : data = { } url = "{}/{}" . format ( self . _api , endpoint ) try : async with async_timeout . timeout ( 8 , loop = self . _loop ) : response = await self . _session . get ( url , auth = self . _auth , headers = HEADERS , params = params ) if response . status == 200 : self . _authenticated = True self . _connected = True if not test : data = await response . json ( ) elif response . status == 401 : self . _authenticated = False self . _connected = True except asyncio . TimeoutError as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Timeouterror connecting to Traccar, %s" , error ) except aiohttp . ClientError as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Error connecting to Traccar, %s" , error ) except socket . gaierror as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Error connecting to Traccar, %s" , error ) except TypeError as error : self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Error connecting to Traccar, %s" , error ) except Exception as error : # pylint: disable=broad-except self . _authenticated , self . _connected = False , False if not test : _LOGGER . warning ( "Error connecting to Traccar, %s" , error ) return data
Comunicate with the API .
384
7
234,730
async def runcli ( ) : async with aiohttp . ClientSession ( ) as session : host = input ( "IP: " ) username = input ( "Username: " ) password = input ( "Password: " ) print ( "\n\n\n" ) data = API ( LOOP , session , username , password , host ) await data . test_connection ( ) print ( "Authenticated:" , data . authenticated ) if data . authenticated : await data . get_device_info ( ) print ( "Authentication:" , data . authenticated ) print ( "Geofences:" , data . geofences ) print ( "Devices:" , data . devices ) print ( "Positions:" , data . positions ) print ( "Device info:" , data . device_info )
Debug of pytraccar .
168
7
234,731
def restore ( s , t ) : t = ( c for c in t ) return '' . join ( next ( t ) if not is_blacksquare ( c ) else c for c in s )
s is the source string it can contain . t is the target it s smaller than s by the number of . s in s
44
26
234,732
def default ( event , data ) : messages = defaultdict ( lambda : 'Avast:' ) messages [ 'start' ] = 'Thar she blows!' messages [ 'tag' ] = 'Thar she blows!' messages [ 'stop' ] = 'Away into the depths:' messages [ 'destroy' ] = 'Away into the depths:' messages [ 'delete' ] = 'Away into the depths:' status = get_status ( event ) message = messages [ status ] + ' %s/%s' log . info ( message , status , get_id ( event ) ) log . debug ( '"data": %s' , form_json ( data ) )
The default handler prints basic event info .
144
8
234,733
def table ( tab ) : global open_tables if tab in open_tables : yield open_tables [ tab ] else : open_tables [ tab ] = iptc . Table ( tab ) open_tables [ tab ] . refresh ( ) open_tables [ tab ] . autocommit = False yield open_tables [ tab ] open_tables [ tab ] . commit ( ) del open_tables [ tab ]
Access IPTables transactionally in a uniform way .
97
11
234,734
def format_symbol ( self , symbol , link_resolver ) : if not symbol : return '' if isinstance ( symbol , FieldSymbol ) : return '' # pylint: disable=unused-variable out = self . _format_symbol ( symbol ) template = self . get_template ( 'symbol_wrapper.html' ) return template . render ( { 'symbol' : symbol , 'formatted_doc' : out } )
Format a symbols . Symbol
98
5
234,735
def add_comment ( self , comment ) : if not comment : return self . __comments [ comment . name ] = comment self . comment_added_signal ( self , comment )
Add a comment to the database .
39
7
234,736
def touch ( fname ) : orig_mtime = get_mtime ( fname ) while get_mtime ( fname ) == orig_mtime : pathlib . Path ( fname ) . touch ( )
Mimics the touch command
47
6
234,737
def debug ( self , message , domain = None ) : if domain is None : domain = self . extension_name debug ( message , domain )
Shortcut function for utils . loggable . debug
30
12
234,738
def info ( self , message , domain = None ) : if domain is None : domain = self . extension_name info ( message , domain )
Shortcut function for utils . loggable . info
30
12
234,739
def parse_config ( self , config ) : prefix = self . argument_prefix self . sources = config . get_sources ( prefix ) self . smart_sources = [ self . _get_smart_filename ( s ) for s in self . sources ] self . index = config . get_index ( prefix ) self . source_roots = OrderedSet ( config . get_paths ( '%s_source_roots' % prefix ) ) for arg , dest in list ( self . paths_arguments . items ( ) ) : val = config . get_paths ( arg ) setattr ( self , dest , val ) for arg , dest in list ( self . path_arguments . items ( ) ) : val = config . get_path ( arg ) setattr ( self , dest , val ) self . formatter . parse_config ( config )
Override this making sure to chain up first if your extension adds its own custom command line arguments or you want to do any further processing on the automatically added arguments .
185
32
234,740
def add_attrs ( self , symbol , * * kwargs ) : for key , val in kwargs . items ( ) : symbol . add_extension_attribute ( self . extension_name , key , val )
Helper for setting symbol extension attributes
49
6
234,741
def get_attr ( self , symbol , attrname ) : return symbol . extension_attributes . get ( self . extension_name , { } ) . get ( attrname , None )
Helper for getting symbol extension attributes
42
6
234,742
def add_index_argument ( cls , group ) : prefix = cls . argument_prefix group . add_argument ( '--%s-index' % prefix , action = "store" , dest = "%s_index" % prefix , help = ( "Name of the %s root markdown file, can be None" % ( cls . extension_name ) ) )
Subclasses may call this to add an index argument .
82
11
234,743
def add_sources_argument ( cls , group , allow_filters = True , prefix = None , add_root_paths = False ) : prefix = prefix or cls . argument_prefix group . add_argument ( "--%s-sources" % prefix , action = "store" , nargs = "+" , dest = "%s_sources" % prefix . replace ( '-' , '_' ) , help = "%s source files to parse" % prefix ) if allow_filters : group . add_argument ( "--%s-source-filters" % prefix , action = "store" , nargs = "+" , dest = "%s_source_filters" % prefix . replace ( '-' , '_' ) , help = "%s source files to ignore" % prefix ) if add_root_paths : group . add_argument ( "--%s-source-roots" % prefix , action = "store" , nargs = "+" , dest = "%s_source_roots" % prefix . replace ( '-' , '_' ) , help = "%s source root directories allowing files " "to be referenced relatively to those" % prefix )
Subclasses may call this to add sources and source_filters arguments .
259
15
234,744
def add_path_argument ( cls , group , argname , dest = None , help_ = None ) : prefixed = '%s-%s' % ( cls . argument_prefix , argname ) if dest is None : dest = prefixed . replace ( '-' , '_' ) final_dest = dest [ len ( cls . argument_prefix ) + 1 : ] else : final_dest = dest dest = '%s_%s' % ( cls . argument_prefix , dest ) group . add_argument ( '--%s' % prefixed , action = 'store' , dest = dest , help = help_ ) cls . path_arguments [ dest ] = final_dest
Subclasses may call this to expose a path argument .
156
11
234,745
def add_paths_argument ( cls , group , argname , dest = None , help_ = None ) : prefixed = '%s-%s' % ( cls . argument_prefix , argname ) if dest is None : dest = prefixed . replace ( '-' , '_' ) final_dest = dest [ len ( cls . argument_prefix ) + 1 : ] else : final_dest = dest dest = '%s_%s' % ( cls . argument_prefix , dest ) group . add_argument ( '--%s' % prefixed , action = 'store' , nargs = '+' , dest = dest , help = help_ ) cls . paths_arguments [ dest ] = final_dest
Subclasses may call this to expose a paths argument .
164
11
234,746
def create_symbol ( self , * args , * * kwargs ) : if not kwargs . get ( 'project_name' ) : kwargs [ 'project_name' ] = self . project . project_name sym = self . app . database . create_symbol ( * args , * * kwargs ) if sym : # pylint: disable=unidiomatic-typecheck if type ( sym ) != Symbol : self . _created_symbols [ sym . filename ] . add ( sym . unique_name ) return sym
Extensions that discover and create instances of symbols . Symbol should do this through this method as it will keep an index of these which can be used when generating a naive index .
121
35
234,747
def format_page ( self , page , link_resolver , output ) : debug ( 'Formatting page %s' % page . link . ref , 'formatting' ) if output : actual_output = os . path . join ( output , 'html' ) if not os . path . exists ( actual_output ) : os . makedirs ( actual_output ) else : actual_output = None page . format ( self . formatter , link_resolver , actual_output )
Called by project . Project . format_page to leave full control to extensions over the formatting of the pages they are responsible of .
105
27
234,748
def add_subproject ( self , fname , conf_path ) : config = Config ( conf_file = conf_path ) proj = Project ( self . app , dependency_map = self . dependency_map ) proj . parse_name_from_config ( config ) proj . parse_config ( config ) proj . setup ( ) self . subprojects [ fname ] = proj
Creates and adds a new subproject .
86
9
234,749
def _no_duplicates_constructor ( loader , node , deep = False ) : mapping = { } for key_node , value_node in node . value : key = loader . construct_object ( key_node , deep = deep ) value = loader . construct_object ( value_node , deep = deep ) if key in mapping : raise ConstructorError ( "while constructing a mapping" , node . start_mark , "found duplicate key (%s)" % key , key_node . start_mark ) mapping [ key ] = value return loader . construct_mapping ( node , deep )
Check for duplicate keys .
128
5
234,750
def resolve_symbols ( self , tree , database , link_resolver ) : self . typed_symbols = self . __get_empty_typed_symbols ( ) all_syms = OrderedSet ( ) for sym_name in self . symbol_names : sym = database . get_symbol ( sym_name ) self . __query_extra_symbols ( sym , all_syms , tree , link_resolver , database ) if tree . project . is_toplevel : page_path = self . link . ref else : page_path = self . project_name + '/' + self . link . ref if self . meta . get ( "auto-sort" , True ) : all_syms = sorted ( all_syms , key = lambda x : x . unique_name ) for sym in all_syms : sym . update_children_comments ( ) self . __resolve_symbol ( sym , link_resolver , page_path ) self . symbol_names . add ( sym . unique_name ) # Always put symbols with no parent at the end no_parent_syms = self . by_parent_symbols . pop ( None , None ) if no_parent_syms : self . by_parent_symbols [ None ] = no_parent_syms for sym_type in [ ClassSymbol , AliasSymbol , InterfaceSymbol , StructSymbol ] : syms = self . typed_symbols [ sym_type ] . symbols if not syms : continue if self . title is None : self . title = syms [ 0 ] . display_name if self . comment is None : self . comment = Comment ( name = self . name ) self . comment . short_description = syms [ 0 ] . comment . short_description self . comment . title = syms [ 0 ] . comment . title break
When this method is called the page s symbol names are queried from database and added to lists of actual symbols sorted by symbol class .
414
27
234,751
def walk ( self , parent = None ) : if parent is None : yield self . root parent = self . root for cpage_name in parent . subpages : cpage = self . __all_pages [ cpage_name ] yield cpage for page in self . walk ( parent = cpage ) : yield page
Generator that yields pages in infix order
68
9
234,752
def get_extension_classes ( ) : res = [ SyntaxHighlightingExtension , SearchExtension , TagExtension , DevhelpExtension , LicenseExtension , GitUploadExtension , EditOnGitHubExtension ] if sys . version_info [ 1 ] >= 5 : res += [ DBusExtension ] try : from hotdoc . extensions . c . c_extension import CExtension res += [ CExtension ] except ImportError : pass try : from hotdoc . extensions . gi . gi_extension import GIExtension res += [ GIExtension ] except ImportError : pass return res
Hotdoc s setuptools entry point
136
8
234,753
def register_functions ( lib , ignore_errors ) : def register ( item ) : return register_function ( lib , item , ignore_errors ) for f in functionList : register ( f )
Register function prototypes with a libclang library instance .
42
11
234,754
def from_offset ( tu , file , offset ) : return conf . lib . clang_getLocationForOffset ( tu , file , offset )
Retrieve a SourceLocation from a given character offset .
31
11
234,755
def get_tokens ( tu , extent ) : tokens_memory = POINTER ( Token ) ( ) tokens_count = c_uint ( ) conf . lib . clang_tokenize ( tu , extent , byref ( tokens_memory ) , byref ( tokens_count ) ) count = int ( tokens_count . value ) # If we get no tokens, no memory was allocated. Be sure not to return # anything and potentially call a destructor on nothing. if count < 1 : return tokens_array = cast ( tokens_memory , POINTER ( Token * count ) ) . contents token_group = TokenGroup ( tu , tokens_memory , tokens_count ) for i in xrange ( 0 , count ) : token = Token ( ) token . int_data = tokens_array [ i ] . int_data token . ptr_data = tokens_array [ i ] . ptr_data token . _tu = tu token . _group = token_group yield token
Helper method to return all tokens in an extent .
207
10
234,756
def from_value ( value ) : result = TokenKind . _value_map . get ( value , None ) if result is None : raise ValueError ( 'Unknown TokenKind: %d' % value ) return result
Obtain a registered TokenKind instance from its value .
46
11
234,757
def register ( value , name ) : if value in TokenKind . _value_map : raise ValueError ( 'TokenKind already registered: %d' % value ) kind = TokenKind ( value , name ) TokenKind . _value_map [ value ] = kind setattr ( TokenKind , name , kind )
Register a new TokenKind enumeration .
66
8
234,758
def canonical ( self ) : if not hasattr ( self , '_canonical' ) : self . _canonical = conf . lib . clang_getCanonicalCursor ( self ) return self . _canonical
Return the canonical Cursor corresponding to this Cursor .
48
11
234,759
def result_type ( self ) : if not hasattr ( self , '_result_type' ) : self . _result_type = conf . lib . clang_getResultType ( self . type ) return self . _result_type
Retrieve the Type of the result for this Cursor .
52
12
234,760
def underlying_typedef_type ( self ) : if not hasattr ( self , '_underlying_type' ) : assert self . kind . is_declaration ( ) self . _underlying_type = conf . lib . clang_getTypedefDeclUnderlyingType ( self ) return self . _underlying_type
Return the underlying type of a typedef declaration .
73
10
234,761
def enum_type ( self ) : if not hasattr ( self , '_enum_type' ) : assert self . kind == CursorKind . ENUM_DECL self . _enum_type = conf . lib . clang_getEnumDeclIntegerType ( self ) return self . _enum_type
Return the integer type of an enum declaration .
67
9
234,762
def enum_value ( self ) : if not hasattr ( self , '_enum_value' ) : assert self . kind == CursorKind . ENUM_CONSTANT_DECL # Figure out the underlying type of the enum to know if it # is a signed or unsigned quantity. underlying_type = self . type if underlying_type . kind == TypeKind . ENUM : underlying_type = underlying_type . get_declaration ( ) . enum_type if underlying_type . kind in ( TypeKind . CHAR_U , TypeKind . UCHAR , TypeKind . CHAR16 , TypeKind . CHAR32 , TypeKind . USHORT , TypeKind . UINT , TypeKind . ULONG , TypeKind . ULONGLONG , TypeKind . UINT128 ) : self . _enum_value = conf . lib . clang_getEnumConstantDeclUnsignedValue ( self ) else : self . _enum_value = conf . lib . clang_getEnumConstantDeclValue ( self ) return self . _enum_value
Return the value of an enum constant .
227
8
234,763
def hash ( self ) : if not hasattr ( self , '_hash' ) : self . _hash = conf . lib . clang_hashCursor ( self ) return self . _hash
Returns a hash of the cursor as an int .
42
10
234,764
def semantic_parent ( self ) : if not hasattr ( self , '_semantic_parent' ) : self . _semantic_parent = conf . lib . clang_getCursorSemanticParent ( self ) return self . _semantic_parent
Return the semantic parent for this cursor .
56
8
234,765
def lexical_parent ( self ) : if not hasattr ( self , '_lexical_parent' ) : self . _lexical_parent = conf . lib . clang_getCursorLexicalParent ( self ) return self . _lexical_parent
Return the lexical parent for this cursor .
57
9
234,766
def referenced ( self ) : if not hasattr ( self , '_referenced' ) : self . _referenced = conf . lib . clang_getCursorReferenced ( self ) return self . _referenced
For a cursor that is a reference returns a cursor representing the entity that it references .
51
17
234,767
def brief_comment ( self ) : r = conf . lib . clang_Cursor_getBriefCommentText ( self ) if not r : return None return str ( r )
Returns the brief comment text associated with that Cursor
39
10
234,768
def raw_comment ( self ) : r = conf . lib . clang_Cursor_getRawCommentText ( self ) if not r : return None return str ( r )
Returns the raw comment text associated with that Cursor
38
10
234,769
def get_arguments ( self ) : num_args = conf . lib . clang_Cursor_getNumArguments ( self ) for i in xrange ( 0 , num_args ) : yield conf . lib . clang_Cursor_getArgument ( self , i )
Return an iterator for accessing the arguments of this cursor .
62
11
234,770
def get_children ( self ) : # FIXME: Expose iteration from CIndex, PR6125. def visitor ( child , parent , children ) : # FIXME: Document this assertion in API. # FIXME: There should just be an isNull method. assert child != conf . lib . clang_getNullCursor ( ) # Create reference to TU so it isn't GC'd before Cursor. child . _tu = self . _tu children . append ( child ) return 1 # continue children = [ ] conf . lib . clang_visitChildren ( self , callbacks [ 'cursor_visit' ] ( visitor ) , children ) return iter ( children )
Return an iterator for accessing the children of this cursor .
146
11
234,771
def walk_preorder ( self ) : yield self for child in self . get_children ( ) : for descendant in child . walk_preorder ( ) : yield descendant
Depth - first preorder walk over the cursor and its descendants .
36
13
234,772
def is_anonymous ( self ) : if self . kind == CursorKind . FIELD_DECL : return self . type . get_declaration ( ) . is_anonymous ( ) return conf . lib . clang_Cursor_isAnonymous ( self )
Check if the record is anonymous .
58
7
234,773
def argument_types ( self ) : class ArgumentsIterator ( collections . Sequence ) : def __init__ ( self , parent ) : self . parent = parent self . length = None def __len__ ( self ) : if self . length is None : self . length = conf . lib . clang_getNumArgTypes ( self . parent ) return self . length def __getitem__ ( self , key ) : # FIXME Support slice objects. if not isinstance ( key , int ) : raise TypeError ( "Must supply a non-negative int." ) if key < 0 : raise IndexError ( "Only non-negative indexes are accepted." ) if key >= len ( self ) : raise IndexError ( "Index greater than container length: " "%d > %d" % ( key , len ( self ) ) ) result = conf . lib . clang_getArgType ( self . parent , key ) if result . kind == TypeKind . INVALID : raise IndexError ( "Argument could not be retrieved." ) return result assert self . kind == TypeKind . FUNCTIONPROTO return ArgumentsIterator ( self )
Retrieve a container for the non - variadic arguments for this type .
240
15
234,774
def element_type ( self ) : result = conf . lib . clang_getElementType ( self ) if result . kind == TypeKind . INVALID : raise Exception ( 'Element type not available on this type.' ) return result
Retrieve the Type of elements within this Type .
50
10
234,775
def element_count ( self ) : result = conf . lib . clang_getNumElements ( self ) if result < 0 : raise Exception ( 'Type does not have elements.' ) return result
Retrieve the number of elements in this type .
42
10
234,776
def is_function_variadic ( self ) : assert self . kind == TypeKind . FUNCTIONPROTO return conf . lib . clang_isFunctionTypeVariadic ( self )
Determine whether this function Type is a variadic function type .
40
14
234,777
def get_fields ( self ) : def visitor ( field , children ) : assert field != conf . lib . clang_getNullCursor ( ) # Create reference to TU so it isn't GC'd before Cursor. field . _tu = self . _tu fields . append ( field ) return 1 # continue fields = [ ] conf . lib . clang_Type_visitFields ( self , callbacks [ 'fields_visit' ] ( visitor ) , fields ) return iter ( fields )
Return an iterator for accessing the fields of this type .
108
11
234,778
def parse ( self , path , args = None , unsaved_files = None , options = 0 ) : return TranslationUnit . from_source ( path , args , unsaved_files , options , self )
Load the translation unit from the given source code file by running clang and generating the AST before loading . Additional command line parameters can be passed to clang via the args parameter .
44
36
234,779
def from_ast_file ( cls , filename , index = None ) : if index is None : index = Index . create ( ) ptr = conf . lib . clang_createTranslationUnit ( index , filename ) if not ptr : raise TranslationUnitLoadError ( filename ) return cls ( ptr = ptr , index = index )
Create a TranslationUnit instance from a saved AST file .
70
11
234,780
def get_includes ( self ) : def visitor ( fobj , lptr , depth , includes ) : if depth > 0 : loc = lptr . contents includes . append ( FileInclusion ( loc . file , File ( fobj ) , loc , depth ) ) # Automatically adapt CIndex/ctype pointers to python objects includes = [ ] conf . lib . clang_getInclusions ( self , callbacks [ 'translation_unit_includes' ] ( visitor ) , includes ) return iter ( includes )
Return an iterable sequence of FileInclusion objects that describe the sequence of inclusions in a translation unit . The first object in this sequence is always the input file . Note that this method will not recursively iterate over header files included through precompiled headers .
108
55
234,781
def get_location ( self , filename , position ) : f = self . get_file ( filename ) if isinstance ( position , int ) : return SourceLocation . from_offset ( self , f , position ) return SourceLocation . from_position ( self , f , position [ 0 ] , position [ 1 ] )
Obtain a SourceLocation for a file in this translation unit .
67
13
234,782
def get_extent ( self , filename , locations ) : f = self . get_file ( filename ) if len ( locations ) < 2 : raise Exception ( 'Must pass object with at least 2 elements' ) start_location , end_location = locations if hasattr ( start_location , '__len__' ) : start_location = SourceLocation . from_position ( self , f , start_location [ 0 ] , start_location [ 1 ] ) elif isinstance ( start_location , int ) : start_location = SourceLocation . from_offset ( self , f , start_location ) if hasattr ( end_location , '__len__' ) : end_location = SourceLocation . from_position ( self , f , end_location [ 0 ] , end_location [ 1 ] ) elif isinstance ( end_location , int ) : end_location = SourceLocation . from_offset ( self , f , end_location ) assert isinstance ( start_location , SourceLocation ) assert isinstance ( end_location , SourceLocation ) return SourceRange . from_locations ( start_location , end_location )
Obtain a SourceRange from this translation unit .
243
10
234,783
def reparse ( self , unsaved_files = None , options = 0 ) : if unsaved_files is None : unsaved_files = [ ] unsaved_files_array = 0 if len ( unsaved_files ) : unsaved_files_array = ( _CXUnsavedFile * len ( unsaved_files ) ) ( ) for i , ( name , value ) in enumerate ( unsaved_files ) : if not isinstance ( value , str ) : # FIXME: It would be great to support an efficient version # of this, one day. value = value . read ( ) print ( value ) if not isinstance ( value , str ) : raise TypeError ( 'Unexpected unsaved file contents.' ) unsaved_files_array [ i ] . name = name unsaved_files_array [ i ] . contents = value unsaved_files_array [ i ] . length = len ( value ) ptr = conf . lib . clang_reparseTranslationUnit ( self , len ( unsaved_files ) , unsaved_files_array , options )
Reparse an already parsed translation unit .
233
8
234,784
def save ( self , filename ) : options = conf . lib . clang_defaultSaveOptions ( self ) result = int ( conf . lib . clang_saveTranslationUnit ( self , filename , options ) ) if result != 0 : raise TranslationUnitSaveError ( result , 'Error saving TranslationUnit.' )
Saves the TranslationUnit to a file .
65
9
234,785
def codeComplete ( self , path , line , column , unsaved_files = None , include_macros = False , include_code_patterns = False , include_brief_comments = False ) : options = 0 if include_macros : options += 1 if include_code_patterns : options += 2 if include_brief_comments : options += 4 if unsaved_files is None : unsaved_files = [ ] unsaved_files_array = 0 if len ( unsaved_files ) : unsaved_files_array = ( _CXUnsavedFile * len ( unsaved_files ) ) ( ) for i , ( name , value ) in enumerate ( unsaved_files ) : if not isinstance ( value , str ) : # FIXME: It would be great to support an efficient version # of this, one day. value = value . read ( ) print ( value ) if not isinstance ( value , str ) : raise TypeError ( 'Unexpected unsaved file contents.' ) unsaved_files_array [ i ] . name = c_string_p ( name ) unsaved_files_array [ i ] . contents = c_string_p ( value ) unsaved_files_array [ i ] . length = len ( value ) ptr = conf . lib . clang_codeCompleteAt ( self , path , line , column , unsaved_files_array , len ( unsaved_files ) , options ) if ptr : return CodeCompletionResults ( ptr ) return None
Code complete in this translation unit .
326
7
234,786
def get_tokens ( self , locations = None , extent = None ) : if locations is not None : extent = SourceRange ( start = locations [ 0 ] , end = locations [ 1 ] ) return TokenGroup . get_tokens ( self , extent )
Obtain tokens in this translation unit .
57
8
234,787
def name ( self ) : return str ( conf . lib . clang_getCString ( conf . lib . clang_getFileName ( self ) ) )
Return the complete file and path name of the file .
35
11
234,788
def arguments ( self ) : length = conf . lib . clang_CompileCommand_getNumArgs ( self . cmd ) for i in xrange ( length ) : yield str ( conf . lib . clang_CompileCommand_getArg ( self . cmd , i ) )
Get an iterable object providing each argument in the command line for the compiler invocation as a _CXString .
60
23
234,789
def fromDirectory ( buildDir ) : errorCode = c_uint ( ) try : cdb = conf . lib . clang_CompilationDatabase_fromDirectory ( buildDir , byref ( errorCode ) ) except CompilationDatabaseError as e : raise CompilationDatabaseError ( int ( errorCode . value ) , "CompilationDatabase loading failed" ) return cdb
Builds a CompilationDatabase from the database found in buildDir
78
13
234,790
def get_klass_parents ( gi_name ) : res = [ ] parents = __HIERARCHY_GRAPH . predecessors ( gi_name ) if not parents : return [ ] __get_parent_link_recurse ( parents [ 0 ] , res ) return res
Returns a sorted list of qualified symbols representing the parents of the klass - like symbol named gi_name
62
22
234,791
def get_klass_children ( gi_name ) : res = { } children = __HIERARCHY_GRAPH . successors ( gi_name ) for gi_name in children : ctype_name = ALL_GI_TYPES [ gi_name ] qs = QualifiedSymbol ( type_tokens = [ Link ( None , ctype_name , ctype_name ) ] ) qs . add_extension_attribute ( 'gi-extension' , 'type_desc' , SymbolTypeDesc ( [ ] , gi_name , ctype_name , 0 ) ) res [ ctype_name ] = qs return res
Returns a dict of qualified symbols representing the children of the klass - like symbol named gi_name
148
21
234,792
def type_description_from_node ( gi_node ) : ctype_name , gi_name , array_nesting = unnest_type ( gi_node ) cur_ns = get_namespace ( gi_node ) if ctype_name is not None : type_tokens = __type_tokens_from_cdecl ( ctype_name ) else : type_tokens = __type_tokens_from_gitype ( cur_ns , gi_name ) namespaced = '%s.%s' % ( cur_ns , gi_name ) if namespaced in ALL_GI_TYPES : gi_name = namespaced return SymbolTypeDesc ( type_tokens , gi_name , ctype_name , array_nesting )
Parse a typed node returns a usable description
182
9
234,793
def is_introspectable ( name , language ) : if name in FUNDAMENTALS [ language ] : return True if name not in __TRANSLATED_NAMES [ language ] : return False return True
Do not call this before caching the nodes
47
8
234,794
def get_markdown_files ( self , dir_ ) : md_files = OrderedSet ( ) for root , _ , files in os . walk ( dir_ ) : for name in files : split = os . path . splitext ( name ) if len ( split ) == 1 : continue if split [ 1 ] in ( '.markdown' , '.md' , '.yaml' ) : md_files . add ( os . path . join ( root , name ) ) return md_files
Get all the markdown files in a folder recursively
107
12
234,795
def get ( self , key , default = None ) : if key in self . __cli : return self . __cli [ key ] if key in self . __config : return self . __config . get ( key ) if key in self . __defaults : return self . __defaults . get ( key ) return default
Get the value for key .
68
6
234,796
def get_index ( self , prefix = '' ) : if prefix : prefixed = '%s_index' % prefix else : prefixed = 'index' if prefixed in self . __cli and self . __cli [ prefixed ] : index = self . __cli . get ( prefixed ) from_conf = False else : index = self . __config . get ( prefixed ) from_conf = True return self . __abspath ( index , from_conf )
Retrieve the absolute path to an index according to prefix .
102
12
234,797
def get_path ( self , key , rel_to_cwd = False , rel_to_conf = False ) : if key in self . __cli : path = self . __cli [ key ] from_conf = False else : path = self . __config . get ( key ) from_conf = True if not isinstance ( path , str ) : return None res = self . __abspath ( path , from_conf ) if rel_to_cwd : return os . path . relpath ( res , self . __invoke_dir ) if rel_to_conf : return os . path . relpath ( res , self . __conf_dir ) return self . __abspath ( path , from_conf )
Retrieve a path from the config resolving it against the invokation directory or the configuration file directory depending on whether it was passed through the command - line or the configuration file .
157
36
234,798
def get_paths ( self , key ) : final_paths = [ ] if key in self . __cli : paths = self . __cli [ key ] or [ ] from_conf = False else : paths = self . __config . get ( key ) or [ ] from_conf = True for path in flatten_list ( paths ) : final_path = self . __abspath ( path , from_conf ) if final_path : final_paths . append ( final_path ) return final_paths
Same as ConfigParser . get_path for a list of paths .
113
14
234,799
def get_sources ( self , prefix = '' ) : prefix = prefix . replace ( '-' , '_' ) prefixed = '%s_sources' % prefix if prefixed in self . __cli : sources = self . __cli . get ( prefixed ) from_conf = False else : sources = self . __config . get ( prefixed ) from_conf = True if sources is None : return OrderedSet ( ) sources = self . __resolve_patterns ( sources , from_conf ) prefixed = '%s_source_filters' % prefix if prefixed in self . __cli : filters = self . __cli . get ( prefixed ) from_conf = False else : filters = self . __config . get ( prefixed ) from_conf = True if filters is None : return sources sources -= self . __resolve_patterns ( filters , from_conf ) return sources
Retrieve a set of absolute paths to sources according to prefix
196
12