idx
int64
0
251k
question
stringlengths
53
3.53k
target
stringlengths
5
1.23k
len_question
int64
20
893
len_target
int64
3
238
245,900
def hist ( hists , stacked = True , reverse = False , xpadding = 0 , ypadding = .1 , yerror_in_padding = True , logy = None , snap = True , axes = None , * * kwargs ) : if axes is None : axes = plt . gca ( ) if logy is None : logy = axes . get_yscale ( ) == 'log' curr_xlim = axes . get_xlim ( ) curr_ylim = axes . get_ylim ( ) was_empty = not axes . has_data ( ) returns = [ ] if isinstance ( hists , _Hist ) : # This is a single plottable object. returns = _hist ( hists , axes = axes , logy = logy , * * kwargs ) _set_bounds ( hists , axes = axes , was_empty = was_empty , prev_xlim = curr_xlim , prev_ylim = curr_ylim , xpadding = xpadding , ypadding = ypadding , yerror_in_padding = yerror_in_padding , snap = snap , logy = logy ) elif stacked : # draw the top histogram first so its edges don't cover the histograms # beneath it in the stack if not reverse : hists = list ( hists ) [ : : - 1 ] for i , h in enumerate ( hists ) : kwargs_local = kwargs . copy ( ) if i == len ( hists ) - 1 : low = h . Clone ( ) low . Reset ( ) else : low = sum ( hists [ i + 1 : ] ) high = h + low high . alpha = getattr ( h , 'alpha' , None ) proxy = _hist ( high , bottom = low , axes = axes , logy = logy , * * kwargs ) returns . append ( proxy ) if not reverse : returns = returns [ : : - 1 ] _set_bounds ( sum ( hists ) , axes = axes , was_empty = was_empty , prev_xlim = curr_xlim , prev_ylim = curr_ylim , xpadding = xpadding , ypadding = ypadding , yerror_in_padding = yerror_in_padding , snap = snap , logy = logy ) else : for h in _maybe_reversed ( hists , reverse ) : returns . append ( _hist ( h , axes = axes , logy = logy , * * kwargs ) ) if reverse : returns = returns [ : : - 1 ] _set_bounds ( hists [ max ( range ( len ( hists ) ) , key = lambda idx : hists [ idx ] . max ( ) ) ] , axes = axes , was_empty = was_empty , prev_xlim = curr_xlim , prev_ylim = curr_ylim , xpadding = xpadding , ypadding = ypadding , yerror_in_padding = yerror_in_padding , snap = snap , logy = logy ) return returns
Make a matplotlib hist plot from a ROOT histogram stack or list of histograms .
679
20
245,901
def errorbar ( hists , xerr = True , yerr = True , xpadding = 0 , ypadding = .1 , xerror_in_padding = True , yerror_in_padding = True , emptybins = True , snap = True , axes = None , * * kwargs ) : if axes is None : axes = plt . gca ( ) curr_xlim = axes . get_xlim ( ) curr_ylim = axes . get_ylim ( ) was_empty = not axes . has_data ( ) if isinstance ( hists , ( _Hist , _Graph1DBase ) ) : # This is a single plottable object. returns = _errorbar ( hists , xerr , yerr , axes = axes , emptybins = emptybins , * * kwargs ) _set_bounds ( hists , axes = axes , was_empty = was_empty , prev_ylim = curr_ylim , xpadding = xpadding , ypadding = ypadding , xerror_in_padding = xerror_in_padding , yerror_in_padding = yerror_in_padding , snap = snap ) else : returns = [ ] for h in hists : returns . append ( errorbar ( h , xerr = xerr , yerr = yerr , axes = axes , xpadding = xpadding , ypadding = ypadding , xerror_in_padding = xerror_in_padding , yerror_in_padding = yerror_in_padding , snap = snap , emptybins = emptybins , * * kwargs ) ) return returns
Make a matplotlib errorbar plot from a ROOT histogram or graph or list of histograms and graphs .
357
24
245,902
def step ( h , logy = None , axes = None , * * kwargs ) : if axes is None : axes = plt . gca ( ) if logy is None : logy = axes . get_yscale ( ) == 'log' _set_defaults ( h , kwargs , [ 'common' , 'line' ] ) if kwargs . get ( 'color' ) is None : kwargs [ 'color' ] = h . GetLineColor ( 'mpl' ) y = np . array ( list ( h . y ( ) ) + [ 0. ] ) if logy : np . clip ( y , 1E-300 , 1E300 , out = y ) return axes . step ( list ( h . xedges ( ) ) , y , where = 'post' , * * kwargs )
Make a matplotlib step plot from a ROOT histogram .
185
14
245,903
def fill_between ( a , b , logy = None , axes = None , * * kwargs ) : if axes is None : axes = plt . gca ( ) if logy is None : logy = axes . get_yscale ( ) == 'log' if not isinstance ( a , _Hist ) or not isinstance ( b , _Hist ) : raise TypeError ( "fill_between only operates on 1D histograms" ) a . check_compatibility ( b , check_edges = True ) x = [ ] top = [ ] bottom = [ ] for abin , bbin in zip ( a . bins ( overflow = False ) , b . bins ( overflow = False ) ) : up = max ( abin . value , bbin . value ) dn = min ( abin . value , bbin . value ) x . extend ( [ abin . x . low , abin . x . high ] ) top . extend ( [ up , up ] ) bottom . extend ( [ dn , dn ] ) x = np . array ( x ) top = np . array ( top ) bottom = np . array ( bottom ) if logy : np . clip ( top , 1E-300 , 1E300 , out = top ) np . clip ( bottom , 1E-300 , 1E300 , out = bottom ) return axes . fill_between ( x , top , bottom , * * kwargs )
Fill the region between two histograms or graphs .
310
10
245,904
def hist2d ( h , axes = None , colorbar = False , * * kwargs ) : if axes is None : axes = plt . gca ( ) X , Y = np . meshgrid ( list ( h . x ( ) ) , list ( h . y ( ) ) ) x = X . ravel ( ) y = Y . ravel ( ) z = np . array ( h . z ( ) ) . T # returns of hist2d: (counts, xedges, yedges, Image) return_values = axes . hist2d ( x , y , weights = z . ravel ( ) , bins = ( list ( h . xedges ( ) ) , list ( h . yedges ( ) ) ) , * * kwargs ) if colorbar : mappable = return_values [ - 1 ] plt . colorbar ( mappable , ax = axes ) return return_values
Draw a 2D matplotlib histogram plot from a 2D ROOT histogram .
203
19
245,905
def imshow ( h , axes = None , colorbar = False , * * kwargs ) : kwargs . setdefault ( 'aspect' , 'auto' ) if axes is None : axes = plt . gca ( ) z = np . array ( h . z ( ) ) . T axis_image = axes . imshow ( z , extent = [ h . xedges ( 1 ) , h . xedges ( h . nbins ( 0 ) + 1 ) , h . yedges ( 1 ) , h . yedges ( h . nbins ( 1 ) + 1 ) ] , interpolation = 'nearest' , origin = 'lower' , * * kwargs ) if colorbar : plt . colorbar ( axis_image , ax = axes ) return axis_image
Draw a matplotlib imshow plot from a 2D ROOT histogram .
177
17
245,906
def contour ( h , axes = None , zoom = None , label_contour = False , * * kwargs ) : if axes is None : axes = plt . gca ( ) x = np . array ( list ( h . x ( ) ) ) y = np . array ( list ( h . y ( ) ) ) z = np . array ( h . z ( ) ) . T if zoom is not None : from scipy import ndimage if hasattr ( zoom , '__iter__' ) : zoom = list ( zoom ) x = ndimage . zoom ( x , zoom [ 0 ] ) y = ndimage . zoom ( y , zoom [ 1 ] ) else : x = ndimage . zoom ( x , zoom ) y = ndimage . zoom ( y , zoom ) z = ndimage . zoom ( z , zoom ) return_values = axes . contour ( x , y , z , * * kwargs ) if label_contour : plt . clabel ( return_values ) return return_values
Draw a matplotlib contour plot from a 2D ROOT histogram .
228
17
245,907
def _post_init ( self ) : if not hasattr ( self , '_buffer' ) : # only set _buffer if model was not specified in the __init__ self . _buffer = TreeBuffer ( ) self . read_branches_on_demand = False self . _branch_cache = { } self . _current_entry = 0 self . _always_read = [ ] self . userdata = UserData ( ) self . _inited = True
The standard rootpy _post_init method that is used to initialize both new Trees and Trees retrieved from a File .
101
24
245,908
def always_read ( self , branches ) : if type ( branches ) not in ( list , tuple ) : raise TypeError ( "branches must be a list or tuple" ) self . _always_read = branches
Always read these branches even when in caching mode . Maybe you have caching enabled and there are branches you want to be updated for each entry even though you never access them directly . This is useful if you are iterating over an input tree and writing to an output tree sharing the same TreeBuffer and you want a direct copy of certain branches . If you have caching enabled but these branches are not specified here and never accessed then they will never be read from disk so the values of branches in memory will remain unchanged .
46
102
245,909
def branch_type ( cls , branch ) : typename = branch . GetClassName ( ) if not typename : leaf = branch . GetListOfLeaves ( ) [ 0 ] typename = leaf . GetTypeName ( ) # check if leaf has multiple elements leaf_count = leaf . GetLeafCount ( ) if leaf_count : length = leaf_count . GetMaximum ( ) else : length = leaf . GetLen ( ) if length > 1 : typename = '{0}[{1:d}]' . format ( typename , length ) return typename
Return the string representation for the type of a branch
124
10
245,910
def create_buffer ( self , ignore_unsupported = False ) : bufferdict = OrderedDict ( ) for branch in self . iterbranches ( ) : # only include activated branches if not self . GetBranchStatus ( branch . GetName ( ) ) : continue if not BaseTree . branch_is_supported ( branch ) : log . warning ( "ignore unsupported branch `{0}`" . format ( branch . GetName ( ) ) ) continue bufferdict [ branch . GetName ( ) ] = Tree . branch_type ( branch ) self . set_buffer ( TreeBuffer ( bufferdict , ignore_unsupported = ignore_unsupported ) )
Create this tree s TreeBuffer
140
6
245,911
def create_branches ( self , branches ) : if not isinstance ( branches , TreeBuffer ) : branches = TreeBuffer ( branches ) self . set_buffer ( branches , create_branches = True )
Create branches from a TreeBuffer or dict mapping names to type names
44
13
245,912
def update_buffer ( self , treebuffer , transfer_objects = False ) : self . _buffer . update ( treebuffer ) if transfer_objects : self . _buffer . set_objects ( treebuffer )
Merge items from a TreeBuffer into this Tree s TreeBuffer
44
13
245,913
def set_buffer ( self , treebuffer , branches = None , ignore_branches = None , create_branches = False , visible = True , ignore_missing = False , ignore_duplicates = False , transfer_objects = False ) : # determine branches to keep while preserving branch order if branches is None : branches = treebuffer . keys ( ) if ignore_branches is not None : branches = [ b for b in branches if b not in ignore_branches ] if create_branches : for name in branches : value = treebuffer [ name ] if self . has_branch ( name ) : if ignore_duplicates : log . warning ( "Skipping entry in buffer with the same name " "as an existing branch: `{0}`" . format ( name ) ) continue raise ValueError ( "Attempting to create two branches " "with the same name: `{0}`" . format ( name ) ) if isinstance ( value , Scalar ) : self . Branch ( name , value , '{0}/{1}' . format ( name , value . type ) ) elif isinstance ( value , Array ) : length = value . length_name or len ( value ) self . Branch ( name , value , '{0}[{2}]/{1}' . format ( name , value . type , length ) ) else : self . Branch ( name , value ) else : for name in branches : value = treebuffer [ name ] if self . has_branch ( name ) : self . SetBranchAddress ( name , value ) elif not ignore_missing : raise ValueError ( "Attempting to set address for " "branch `{0}` which does not exist" . format ( name ) ) else : log . warning ( "Skipping entry in buffer for which no " "corresponding branch in the " "tree exists: `{0}`" . format ( name ) ) if visible : newbuffer = TreeBuffer ( ) for branch in branches : if branch in treebuffer : newbuffer [ branch ] = treebuffer [ branch ] newbuffer . set_objects ( treebuffer ) self . update_buffer ( newbuffer , transfer_objects = transfer_objects )
Set the Tree buffer
479
4
245,914
def glob ( self , patterns , exclude = None ) : if isinstance ( patterns , string_types ) : patterns = [ patterns ] if isinstance ( exclude , string_types ) : exclude = [ exclude ] matches = [ ] for pattern in patterns : matches += fnmatch . filter ( self . iterbranchnames ( ) , pattern ) if exclude is not None : for exclude_pattern in exclude : matches = [ match for match in matches if not fnmatch . fnmatch ( match , exclude_pattern ) ] return matches
Return a list of branch names that match pattern . Exclude all matched branch names which also match a pattern in exclude . exclude may be a string or list of strings .
110
34
245,915
def CopyTree ( self , selection , * args , * * kwargs ) : return super ( BaseTree , self ) . CopyTree ( str ( selection ) , * args , * * kwargs )
Copy the tree while supporting a rootpy . tree . cut . Cut selection in addition to a simple string .
44
22
245,916
def to_array ( self , * args , * * kwargs ) : from root_numpy import tree2array return tree2array ( self , * args , * * kwargs )
Convert this tree into a NumPy structured array
42
10
245,917
def color_key ( tkey ) : name = tkey . GetName ( ) classname = tkey . GetClassName ( ) for class_regex , color in _COLOR_MATCHER : if class_regex . match ( classname ) : return colored ( name , color = color ) return name
Function which returns a colorized TKey name given its type
67
12
245,918
def cov ( m , y = None , rowvar = 1 , bias = 0 , ddof = None , weights = None , repeat_weights = 0 ) : import numpy as np # Check inputs if ddof is not None and ddof != int ( ddof ) : raise ValueError ( "ddof must be integer" ) X = np . array ( m , ndmin = 2 , dtype = float ) if X . size == 0 : # handle empty arrays return np . array ( m ) if X . shape [ 0 ] == 1 : rowvar = 1 if rowvar : axis = 0 tup = ( slice ( None ) , np . newaxis ) else : axis = 1 tup = ( np . newaxis , slice ( None ) ) if y is not None : y = np . array ( y , copy = False , ndmin = 2 , dtype = float ) X = np . concatenate ( ( X , y ) , axis ) if ddof is None : if bias == 0 : ddof = 1 else : ddof = 0 if weights is not None : weights = np . array ( weights , dtype = float ) weights_sum = weights . sum ( ) if weights_sum <= 0 : raise ValueError ( "sum of weights is non-positive" ) X -= np . average ( X , axis = 1 - axis , weights = weights ) [ tup ] if repeat_weights : # each weight represents a number of repetitions of an observation # the total sample size can be determined in this case and we have # both an unbiased and biased weighted covariance fact = weights_sum - ddof else : # normalize weights so they sum to unity weights /= weights_sum # unbiased weighted covariance is not defined if the weights are # not integral frequencies (repeat-type) fact = ( 1. - np . power ( weights , 2 ) . sum ( ) ) else : weights = 1 X -= X . mean ( axis = 1 - axis ) [ tup ] if rowvar : N = X . shape [ 1 ] else : N = X . shape [ 0 ] fact = float ( N - ddof ) if not rowvar : return ( np . dot ( weights * X . T , X . conj ( ) ) / fact ) . squeeze ( ) else : return ( np . dot ( weights * X , X . T . conj ( ) ) / fact ) . squeeze ( )
Estimate a covariance matrix given data .
512
9
245,919
def corrcoef ( x , y = None , rowvar = 1 , bias = 0 , ddof = None , weights = None , repeat_weights = 0 ) : import numpy as np c = cov ( x , y , rowvar , bias , ddof , weights , repeat_weights ) if c . size == 0 : # handle empty arrays return c try : d = np . diag ( c ) except ValueError : # scalar covariance return 1 return c / np . sqrt ( np . multiply . outer ( d , d ) )
Return correlation coefficients .
118
4
245,920
def safe ( self , parentheses = True ) : if not self : return "" string = str ( self ) string = string . replace ( "**" , "_pow_" ) string = string . replace ( "*" , "_mul_" ) string = string . replace ( "/" , "_div_" ) string = string . replace ( "==" , "_eq_" ) string = string . replace ( "<=" , "_leq_" ) string = string . replace ( ">=" , "_geq_" ) string = string . replace ( "<" , "_lt_" ) string = string . replace ( ">" , "_gt_" ) string = string . replace ( "&&" , "_and_" ) string = string . replace ( "||" , "_or_" ) string = string . replace ( "!" , "not_" ) if parentheses : string = string . replace ( "(" , "L" ) string = string . replace ( ")" , "R" ) else : string = string . replace ( "(" , "" ) string = string . replace ( ")" , "" ) string = string . replace ( " " , "" ) return string
Returns a string representation with special characters replaced by safer characters for use in file names .
252
17
245,921
def latex ( self ) : if not self : return "" s = str ( self ) s = s . replace ( "==" , " = " ) s = s . replace ( "<=" , " \leq " ) s = s . replace ( ">=" , " \geq " ) s = s . replace ( "&&" , r" \text{ and } " ) s = s . replace ( "||" , r" \text{ or } " ) return s
Returns a string representation for use in LaTeX
103
9
245,922
def replace ( self , name , newname ) : if not re . match ( "[a-zA-Z]\w*" , name ) : return None if not re . match ( "[a-zA-Z]\w*" , newname ) : return None def _replace ( match ) : return match . group ( 0 ) . replace ( match . group ( 'name' ) , newname ) pattern = re . compile ( "(\W|^)(?P<name>" + name + ")(\W|$)" ) cut = re . sub ( pattern , _replace , str ( self ) ) return Cut ( cut )
Replace all occurrences of name with newname
138
9
245,923
def save_image ( self , image_file ) : self . ensure_pyplot ( ) command = 'plt.gcf().savefig("%s")' % image_file #print 'SAVEFIG', command # dbg self . process_input_line ( 'bookmark ipy_thisdir' , store_history = False ) self . process_input_line ( 'cd -b ipy_savedir' , store_history = False ) self . process_input_line ( command , store_history = False ) self . process_input_line ( 'cd -b ipy_thisdir' , store_history = False ) self . process_input_line ( 'bookmark -d ipy_thisdir' , store_history = False ) self . clear_cout ( )
Saves the image file to disk .
176
8
245,924
def decorate ( self , other = None , * * kwargs ) : if 'color' in kwargs : incompatible = [ ] for othercolor in ( 'linecolor' , 'fillcolor' , 'markercolor' ) : if othercolor in kwargs : incompatible . append ( othercolor ) if incompatible : raise ValueError ( "Setting both the `color` and the `{0}` attribute{1} " "is ambiguous. Please set only one." . format ( ', ' . join ( incompatible ) , 's' if len ( incompatible ) != 1 else '' ) ) if other is not None : decor = other . decorators if 'color' in kwargs : decor . pop ( 'linecolor' , None ) decor . pop ( 'fillcolor' , None ) decor . pop ( 'markercolor' , None ) decor . update ( kwargs ) kwargs = decor for key , value in kwargs . items ( ) : if key in Plottable . EXTRA_ATTRS_DEPRECATED : newkey = Plottable . EXTRA_ATTRS_DEPRECATED [ key ] warnings . warn ( "`{0}` is deprecated and will be removed in " "future versions. Use `{1}` instead" . format ( key , newkey ) , DeprecationWarning ) key = newkey if key in Plottable . EXTRA_ATTRS : setattr ( self , key , value ) elif key == 'markerstyle' : self . SetMarkerStyle ( value ) elif key == 'markercolor' : self . SetMarkerColor ( value ) elif key == 'markersize' : self . SetMarkerSize ( value ) elif key == 'fillcolor' : self . SetFillColor ( value ) elif key == 'fillstyle' : self . SetFillStyle ( value ) elif key == 'linecolor' : self . SetLineColor ( value ) elif key == 'linestyle' : self . SetLineStyle ( value ) elif key == 'linewidth' : self . SetLineWidth ( value ) elif key == 'color' : self . SetColor ( value ) else : raise AttributeError ( "unknown decoration attribute: `{0}`" . format ( key ) ) return self
Apply style options to a Plottable object .
501
10
245,925
def getitem ( self , index ) : if index >= getattr ( self . tree , self . size ) : raise IndexError ( index ) if self . __cache_objects and index in self . __cache : return self . __cache [ index ] obj = self . tree_object_cls ( self . tree , self . name , self . prefix , index ) if self . __cache_objects : self . __cache [ index ] = obj return obj
direct access without going through self . selection
97
8
245,926
def configure_defaults ( ) : log . debug ( "configure_defaults()" ) global initialized initialized = True if use_rootpy_handler : # Need to do it again here, since it is overridden by ROOT. set_error_handler ( python_logging_error_handler ) if os . environ . get ( 'ROOTPY_BATCH' , False ) or IN_NOSETESTS : ROOT . gROOT . SetBatch ( True ) log . debug ( 'ROOT is running in batch mode' ) ROOT . gErrorIgnoreLevel = 0 this_dll = C . CDLL ( None ) try : EnableAutoDictionary = C . c_int . in_dll ( this_dll , "G__EnableAutoDictionary" ) except ValueError : pass else : # Disable automatic dictionary generation EnableAutoDictionary . value = 0 # TODO(pwaller): idea, `execfile("userdata/initrc.py")` here? # note: that wouldn't allow the user to override the default # canvas size, for example. for init in _initializations : init ( )
This function is executed immediately after ROOT s finalSetup
248
11
245,927
def rp_module_level_in_stack ( ) : from traceback import extract_stack from rootpy import _ROOTPY_SOURCE_PATH modlevel_files = [ filename for filename , _ , func , _ in extract_stack ( ) if func == "<module>" ] return any ( path . startswith ( _ROOTPY_SOURCE_PATH ) for path in modlevel_files )
Returns true if we re during a rootpy import
89
10
245,928
def monitor_deletion ( ) : monitors = { } def set_deleted ( x ) : def _ ( weakref ) : del monitors [ x ] return _ def monitor ( item , name ) : monitors [ name ] = ref ( item , set_deleted ( name ) ) def is_alive ( name ) : return monitors . get ( name , None ) is not None return monitor , is_alive
Function for checking for correct deletion of weakref - able objects .
89
13
245,929
def canvases_with ( drawable ) : return [ c for c in ROOT . gROOT . GetListOfCanvases ( ) if drawable in find_all_primitives ( c ) ]
Return a list of all canvases where drawable has been painted .
45
14
245,930
def tick_length_pixels ( pad , xaxis , yaxis , xlength , ylength = None ) : if ylength is None : ylength = xlength xaxis . SetTickLength ( xlength / float ( pad . height_pixels ) ) yaxis . SetTickLength ( ylength / float ( pad . width_pixels ) )
Set the axes tick lengths in pixels
78
7
245,931
def reset ( self ) : if self . resetable : for i in range ( len ( self ) ) : self [ i ] = self . default
Reset the value to the default
31
7
245,932
def minimize ( func , minimizer_type = None , minimizer_algo = None , strategy = None , retry = 0 , scan = False , print_level = None ) : llog = log [ 'minimize' ] min_opts = ROOT . Math . MinimizerOptions if minimizer_type is None : minimizer_type = min_opts . DefaultMinimizerType ( ) if minimizer_algo is None : minimizer_algo = min_opts . DefaultMinimizerAlgo ( ) if strategy is None : strategy = min_opts . DefaultStrategy ( ) if print_level is None : print_level = min_opts . DefaultPrintLevel ( ) if print_level < 0 : msg_service = ROOT . RooMsgService . instance ( ) msg_level = msg_service . globalKillBelow ( ) msg_service . setGlobalKillBelow ( ROOT . RooFit . FATAL ) minim = Minimizer ( func ) minim . setPrintLevel ( print_level ) minim . setStrategy ( strategy ) if scan : llog . info ( "running scan algorithm ..." ) minim . minimize ( 'Minuit2' , 'Scan' ) llog . info ( "minimizing with {0} {1} using strategy {2}" . format ( minimizer_type , minimizer_algo , strategy ) ) status = minim . minimize ( minimizer_type , minimizer_algo ) iretry = 0 while iretry < retry and status not in ( 0 , 1 ) : if strategy < 2 : strategy += 1 minim . setStrategy ( strategy ) llog . warning ( "minimization failed with status {0:d}" . format ( status ) ) llog . info ( "retrying minimization with strategy {0:d}" . format ( strategy ) ) status = minim . minimize ( minimizer_type , minimizer_algo ) if status in ( 0 , 1 ) : llog . info ( "found minimum" ) else : llog . warning ( "minimization failed with status {0:d}" . format ( status ) ) if print_level < 0 : msg_service . setGlobalKillBelow ( msg_level ) return minim
Minimize a RooAbsReal function
483
8
245,933
def make_string ( obj ) : if inspect . isclass ( obj ) : if issubclass ( obj , Object ) : return obj . _ROOT . __name__ if issubclass ( obj , string_types ) : return 'string' return obj . __name__ if not isinstance ( obj , string_types ) : raise TypeError ( "expected string or class" ) return obj
If obj is a string return that otherwise attempt to figure out the name of a type .
84
18
245,934
def ensure_built ( self , headers = None ) : if not self . params : return else : for child in self . params : child . ensure_built ( headers = headers ) if headers is None : headers = self . guess_headers generate ( str ( self ) , headers , has_iterators = self . name in HAS_ITERATORS )
Make sure that a dictionary exists for this type .
75
10
245,935
def guess_headers ( self ) : name = self . name . replace ( "*" , "" ) headers = [ ] if name in KNOWN_TYPES : headers . append ( KNOWN_TYPES [ name ] ) elif name in STL : headers . append ( '<{0}>' . format ( name ) ) elif hasattr ( ROOT , name ) and name . startswith ( "T" ) : headers . append ( '<{0}.h>' . format ( name ) ) elif '::' in name : headers . append ( '<{0}.h>' . format ( name . replace ( '::' , '/' ) ) ) elif name == 'allocator' : headers . append ( '<memory>' ) else : try : # is this just a basic type? CPPGrammar . BASIC_TYPE . parseString ( name , parseAll = True ) except ParseException as e : # nope... I don't know what it is log . warning ( "unable to guess headers required for {0}" . format ( name ) ) if self . params : for child in self . params : headers . extend ( child . guess_headers ) # remove duplicates return list ( set ( headers ) )
Attempt to guess what headers may be required in order to use this type . Returns guess_headers of all children recursively .
273
26
245,936
def cls ( self ) : # TODO: register the resulting type? return SmartTemplate ( self . name ) ( ", " . join ( map ( str , self . params ) ) )
Return the class definition for this type
40
7
245,937
def from_string ( cls , string ) : cls . TYPE . setParseAction ( cls . make ) try : return cls . TYPE . parseString ( string , parseAll = True ) [ 0 ] except ParseException : log . error ( "Failed to parse '{0}'" . format ( string ) ) raise
Parse string into a CPPType instance
73
9
245,938
def callback ( cfunc ) : # Note: # ROOT wants a c_voidp whose addressof() == the call site of the target # function. This hackery is necessary to achieve that. return C . c_voidp . from_address ( C . cast ( cfunc , C . c_voidp ) . value )
Turn a ctypes CFUNCTYPE instance into a value which can be passed into PyROOT
72
21
245,939
def objectproxy_realaddress ( obj ) : voidp = QROOT . TPython . ObjectProxy_AsVoidPtr ( obj ) return C . addressof ( C . c_char . from_buffer ( voidp ) )
Obtain a real address as an integer from an objectproxy .
51
13
245,940
def set_style ( style , mpl = False , * * kwargs ) : if mpl : import matplotlib as mpl style_dictionary = { } if isinstance ( style , string_types ) : style_dictionary = get_style ( style , mpl = True , * * kwargs ) log . info ( "using matplotlib style '{0}'" . format ( style ) ) elif isinstance ( style , dict ) : style_dictionary = style log . info ( "using user-defined matplotlib style" ) else : raise TypeError ( "style must be a matplotlib style name or dict" ) for k , v in style_dictionary . items ( ) : mpl . rcParams [ k ] = v else : if isinstance ( style , string_types ) : style = get_style ( style , * * kwargs ) log . info ( "using ROOT style '{0}'" . format ( style . GetName ( ) ) ) style . cd ( )
If mpl is False accept either style name or a TStyle instance . If mpl is True accept either style name or a matplotlib . rcParams - like dictionary
224
36
245,941
def cd_previous ( self ) : if self . _prev_dir is None or isinstance ( self . _prev_dir , ROOT . TROOT ) : return False if isinstance ( self . _prev_dir , ROOT . TFile ) : if self . _prev_dir . IsOpen ( ) and self . _prev_dir . IsWritable ( ) : self . _prev_dir . cd ( ) return True return False if not self . _prev_dir . IsWritable ( ) : # avoid warning from ROOT stating file is not writable return False prev_file = self . _prev_dir . GetFile ( ) if prev_file and prev_file . IsOpen ( ) : self . _prev_dir . cd ( ) return True return False
cd to the gDirectory before this file was open .
169
11
245,942
def Close ( self , * args ) : super ( _DirectoryBase , self ) . Close ( * args ) return self . cd_previous ( )
Like ROOT s Close but reverts to the gDirectory before this file was opened .
32
18
245,943
def keys ( self , latest = False ) : if latest : keys = { } for key in self . keys ( ) : name = key . GetName ( ) if name in keys : if key . GetCycle ( ) > keys [ name ] . GetCycle ( ) : keys [ name ] = key else : keys [ name ] = key return keys . values ( ) return [ asrootpy ( key ) for key in self . GetListOfKeys ( ) ]
Return a list of the keys in this directory .
99
10
245,944
def Get ( self , path , rootpy = True , * * kwargs ) : thing = super ( _DirectoryBase , self ) . Get ( path ) if not thing : raise DoesNotExist # Ensure that the file we took the object from is alive at least as # long as the object being taken from it. # Note, Python does *not* own `thing`, it is ROOT's responsibility to # delete it in the C++ sense. (SetOwnership is False). However, ROOT # will delete the object when the TFile's destructor is run. # Therefore, when `thing` goes out of scope and the file referred to # by `this` has no references left, the file is destructed and calls # `thing`'s delete. # (this is thanks to the fact that weak referents (used by keepalive) # are notified when they are dead). keepalive ( thing , self ) if rootpy : return asrootpy ( thing , * * kwargs ) return thing
Return the requested object cast as its corresponding subclass in rootpy if one exists and rootpy = True otherwise return the unadulterated TObject .
218
32
245,945
def GetKey ( self , path , cycle = 9999 , rootpy = True , * * kwargs ) : key = super ( _DirectoryBase , self ) . GetKey ( path , cycle ) if not key : raise DoesNotExist if rootpy : return asrootpy ( key , * * kwargs ) return key
Override TDirectory s GetKey and also handle accessing keys nested arbitrarily deep in subdirectories .
71
20
245,946
def mkdir ( self , path , title = "" , recurse = False ) : head , tail = os . path . split ( os . path . normpath ( path ) ) if tail == "" : raise ValueError ( "invalid directory name: {0}" . format ( path ) ) with preserve_current_directory ( ) : dest = self if recurse : parent_dirs = head . split ( os . path . sep ) for parent_dir in parent_dirs : try : newdest = dest . GetDirectory ( parent_dir ) dest = newdest except DoesNotExist : dest = dest . mkdir ( parent_dir ) elif head != "" : dest = dest . GetDirectory ( head ) if tail in dest : raise ValueError ( "{0} already exists" . format ( path ) ) newdir = asrootpy ( super ( _DirectoryBase , dest ) . mkdir ( tail , title ) ) return newdir
Make a new directory . If recurse is True create parent directories as required . Return the newly created TDirectory .
201
24
245,947
def rm ( self , path , cycle = ';*' ) : rdir = self with preserve_current_directory ( ) : dirname , objname = os . path . split ( os . path . normpath ( path ) ) if dirname : rdir = rdir . Get ( dirname ) rdir . Delete ( objname + cycle )
Delete an object at path relative to this directory
75
9
245,948
def copytree ( self , dest_dir , src = None , newname = None , exclude = None , overwrite = False ) : def copy_object ( obj , dest , name = None ) : if name is None : name = obj . GetName ( ) if not overwrite and name in dest : raise ValueError ( "{0} already exists in {1} and `overwrite=False`" . format ( name , dest . _path ) ) dest . cd ( ) if isinstance ( obj , ROOT . R . TTree ) : new_obj = obj . CloneTree ( - 1 , "fast" ) new_obj . Write ( name , ROOT . R . TObject . kOverwrite ) else : obj . Write ( name , ROOT . R . TObject . kOverwrite ) with preserve_current_directory ( ) : if isinstance ( src , string_types ) : src = asrootpy ( self . Get ( src ) ) else : src = self if isinstance ( dest_dir , string_types ) : try : dest_dir = asrootpy ( self . GetDirectory ( dest_dir ) ) except DoesNotExist : dest_dir = self . mkdir ( dest_dir ) if isinstance ( src , ROOT . R . TDirectory ) : # Copy a directory cp_name = newname if newname is not None else src . GetName ( ) # See if the directory already exists if cp_name not in dest_dir : # Destination directory doesn't exist, so make a new one new_dir = dest_dir . mkdir ( cp_name ) else : new_dir = dest_dir . get ( cp_name ) # Copy everything in the src directory to the destination for ( path , dirnames , objects ) in src . walk ( maxdepth = 0 ) : # Copy all the objects for object_name in objects : if exclude and exclude ( path , object_name ) : continue thing = src . Get ( object_name ) copy_object ( thing , new_dir ) for dirname in dirnames : if exclude and exclude ( path , dirname ) : continue rdir = src . GetDirectory ( dirname ) # Recursively copy objects in subdirectories rdir . copytree ( new_dir , exclude = exclude , overwrite = overwrite ) else : # Copy an object copy_object ( src , dest_dir , name = newname )
Copy this directory or just one contained object into another directory .
516
12
245,949
def find ( self , regexp , negate_regexp = False , class_pattern = None , find_fnc = re . search , refresh_cache = False ) : if refresh_cache or not hasattr ( self , 'cache' ) : self . _populate_cache ( ) b = self . cache split_regexp = regexp . split ( '/' ) # traverse as deep as possible in the cache # special case if the first character is not the root, i.e. not "" if split_regexp [ 0 ] == '' : for d in split_regexp : if d in b : b = b [ d ] else : break else : b = b [ '' ] # perform the search for path , ( obj , classname ) in b [ 'obj' ] : if class_pattern : if not fnmatch ( classname , class_pattern ) : continue joined_path = os . path . join ( * [ '/' , path , obj ] ) result = find_fnc ( regexp , joined_path ) if ( result is not None ) ^ negate_regexp : yield joined_path , result
yield the full path of the matching regular expression and the match itself
246
14
245,950
def start_new_gui_thread ( ) : PyGUIThread = getattr ( ROOT , 'PyGUIThread' , None ) if PyGUIThread is not None : assert not PyGUIThread . isAlive ( ) , "GUI thread already running!" assert _processRootEvents , ( "GUI thread wasn't started when rootwait was imported, " "so it can't be restarted" ) ROOT . keeppolling = 1 ROOT . PyGUIThread = threading . Thread ( None , _processRootEvents , None , ( ROOT , ) ) ROOT . PyGUIThread . finishSchedule = _finishSchedule ROOT . PyGUIThread . setDaemon ( 1 ) ROOT . PyGUIThread . start ( ) log . debug ( "successfully started a new GUI thread" )
Attempt to start a new GUI thread if possible .
174
10
245,951
def stop_gui_thread ( ) : PyGUIThread = getattr ( ROOT , 'PyGUIThread' , None ) if PyGUIThread is None or not PyGUIThread . isAlive ( ) : log . debug ( "no existing GUI thread is runnng" ) return False ROOT . keeppolling = 0 try : PyGUIThread . finishSchedule ( ) except AttributeError : log . debug ( "unable to call finishSchedule() on PyGUIThread" ) pass PyGUIThread . join ( ) log . debug ( "successfully stopped the existing GUI thread" ) return True
Try to stop the GUI thread . If it was running returns True otherwise False .
130
16
245,952
def wait_for_zero_canvases ( middle_mouse_close = False ) : if not __ACTIVE : wait_failover ( wait_for_zero_canvases ) return @ dispatcher def count_canvases ( ) : """ Count the number of active canvases and finish gApplication.Run() if there are none remaining. incpy.ignore """ if not get_visible_canvases ( ) : try : ROOT . gSystem . ExitLoop ( ) except AttributeError : # We might be exiting and ROOT.gROOT will raise an AttributeError pass @ dispatcher def exit_application_loop ( ) : """ Signal handler for CTRL-c to cause gApplication.Run() to finish. incpy.ignore """ ROOT . gSystem . ExitLoop ( ) # Handle CTRL-c sh = ROOT . TSignalHandler ( ROOT . kSigInterrupt , True ) sh . Add ( ) sh . Connect ( "Notified()" , "TPyDispatcher" , exit_application_loop , "Dispatch()" ) visible_canvases = get_visible_canvases ( ) for canvas in visible_canvases : log . debug ( "waiting for canvas {0} to close" . format ( canvas . GetName ( ) ) ) canvas . Update ( ) if middle_mouse_close : attach_event_handler ( canvas ) if not getattr ( canvas , "_py_close_dispatcher_attached" , False ) : # Attach a handler only once to each canvas canvas . _py_close_dispatcher_attached = True canvas . Connect ( "Closed()" , "TPyDispatcher" , count_canvases , "Dispatch()" ) keepalive ( canvas , count_canvases ) if visible_canvases and not ROOT . gROOT . IsBatch ( ) : run_application_until_done ( ) # Disconnect from canvases for canvas in visible_canvases : if getattr ( canvas , "_py_close_dispatcher_attached" , False ) : canvas . _py_close_dispatcher_attached = False canvas . Disconnect ( "Closed()" , count_canvases , "Dispatch()" )
Wait for all canvases to be closed or CTRL - c .
493
13
245,953
def wait_for_frame ( frame ) : if not frame : # It's already closed or maybe we're in batch mode return @ dispatcher def close ( ) : ROOT . gSystem . ExitLoop ( ) if not getattr ( frame , "_py_close_dispatcher_attached" , False ) : frame . _py_close_dispatcher_attached = True frame . Connect ( "CloseWindow()" , "TPyDispatcher" , close , "Dispatch()" ) @ dispatcher def exit_application_loop ( ) : """ Signal handler for CTRL-c to cause gApplication.Run() to finish. incpy.ignore """ ROOT . gSystem . ExitLoop ( ) # Handle CTRL-c sh = ROOT . TSignalHandler ( ROOT . kSigInterrupt , True ) sh . Add ( ) sh . Connect ( "Notified()" , "TPyDispatcher" , exit_application_loop , "Dispatch()" ) if not ROOT . gROOT . IsBatch ( ) : run_application_until_done ( ) # Need to disconnect to prevent close handler from running when python # teardown has already commenced. frame . Disconnect ( "CloseWindow()" , close , "Dispatch()" )
wait until a TGMainFrame is closed or ctrl - c
272
13
245,954
def wait_for_browser_close ( b ) : if b : if not __ACTIVE : wait_failover ( wait_for_browser_close ) return wait_for_frame ( b . GetBrowserImp ( ) . GetMainFrame ( ) )
Can be used to wait until a TBrowser is closed
55
12
245,955
def log_trace ( logger , level = logging . DEBUG , show_enter = True , show_exit = True ) : def wrap ( function ) : l = logger . getChild ( function . __name__ ) . log @ wraps ( function ) def thunk ( * args , * * kwargs ) : global trace_depth trace_depth . value += 1 try : start = time ( ) if show_enter : l ( level , "{0}> {1} {2}" . format ( " " * trace_depth . value , args , kwargs ) ) try : result = function ( * args , * * kwargs ) except : _ , result , _ = sys . exc_info ( ) raise finally : if show_exit : l ( level , "{0}< return {1} [{2:.2f} sec]" . format ( " " * trace_depth . value , result , time ( ) - start ) ) finally : trace_depth . value -= 1 return result return thunk return wrap
log a statement on function entry and exit
219
8
245,956
def log_stack ( logger , level = logging . INFO , limit = None , frame = None ) : if showing_stack . inside : return showing_stack . inside = True try : if frame is None : frame = sys . _getframe ( 1 ) stack = "" . join ( traceback . format_stack ( frame , limit ) ) for line in ( l [ 2 : ] for l in stack . split ( "\n" ) if l . strip ( ) ) : logger . log ( level , line ) finally : showing_stack . inside = False
Display the current stack on logger .
117
7
245,957
def showdeletion ( self , * objects ) : from . . memory import showdeletion as S for o in objects : S . monitor_object_cleanup ( o )
Record a stack trace at the point when an ROOT TObject is deleted
39
16
245,958
def trace ( self , level = logging . DEBUG , show_enter = True , show_exit = True ) : from . import log_trace return log_trace ( self , level , show_enter , show_exit )
Functions decorated with this function show function entry and exit with values defaults to debug log level .
47
19
245,959
def frame_unique ( f ) : return f . f_code . co_filename , f . f_code . co_name , f . f_lineno
A tuple representing a value which is unique to a given frame s line of execution
35
16
245,960
def show_stack_depth ( self , record , frame ) : logger = self depths = [ - 1 ] msg = record . getMessage ( ) # For each logger in the hierarchy while logger : to_match = getattr ( logger , "show_stack_regexes" , ( ) ) for regex , depth , once , min_level in to_match : if record . levelno < min_level : continue if not regex . match ( record . msg ) : continue # Only for a given regex, line number and logger unique = regex , self . frame_unique ( frame ) , record . name if once : if unique in logger . shown_stack_frames : # We've shown this one already. continue # Prevent this stack frame from being shown again logger . shown_stack_frames . add ( unique ) depths . append ( depth ) logger = logger . parent return max ( depths )
Compute the maximum stack depth to show requested by any hooks returning - 1 if there are none matching or if we ve already emitted one for the line of code referred to .
188
35
245,961
def getChild ( self , suffix ) : if suffix is None : return self if self . root is not self : if suffix . startswith ( self . name + "." ) : # Remove duplicate prefix suffix = suffix [ len ( self . name + "." ) : ] suf_parts = suffix . split ( "." ) if len ( suf_parts ) > 1 and suf_parts [ - 1 ] == suf_parts [ - 2 ] : # If we have a submodule's name equal to the parent's name, # omit it. suffix = "." . join ( suf_parts [ : - 1 ] ) suffix = '.' . join ( ( self . name , suffix ) ) return self . manager . getLogger ( suffix )
Taken from CPython 2 . 7 modified to remove duplicate prefix and suffixes
162
16
245,962
def method_file_check ( f ) : @ wraps ( f ) def wrapper ( self , * args , * * kwargs ) : curr_dir = ROOT . gDirectory if isinstance ( curr_dir , ROOT . TROOT ) or not curr_dir : raise RuntimeError ( "You must first create a File before calling {0}.{1}" . format ( self . __class__ . __name__ , _get_qualified_name ( f ) ) ) if not curr_dir . IsWritable ( ) : raise RuntimeError ( "Calling {0}.{1} requires that the " "current File is writable" . format ( self . __class__ . __name__ , _get_qualified_name ( f ) ) ) return f ( self , * args , * * kwargs ) return wrapper
A decorator to check that a TFile as been created before f is called . This function can decorate methods .
182
24
245,963
def chainable ( f ) : @ wraps ( f ) def wrapper ( self , * args , * * kwargs ) : # perform action f ( self , * args , * * kwargs ) # return reference to class. return self return wrapper
Decorator which causes a void function to return self
53
11
245,964
def snake_case_methods ( cls , debug = False ) : if not CONVERT_SNAKE_CASE : return cls # get the ROOT base class root_base = cls . _ROOT members = inspect . getmembers ( root_base ) # filter out any methods that already exist in lower and uppercase forms # i.e. TDirectory::cd and Cd... names = { } for name , member in members : lower_name = name . lower ( ) if lower_name in names : del names [ lower_name ] else : names [ lower_name ] = None for name , member in members : if name . lower ( ) not in names : continue # Don't touch special methods or methods without cap letters if name [ 0 ] == '_' or name . islower ( ) : continue # Is this a method of the ROOT base class? if not inspect . ismethod ( member ) and not inspect . isfunction ( member ) : continue # convert CamelCase to snake_case new_name = camel_to_snake ( name ) # Use a __dict__ lookup rather than getattr because we _want_ to # obtain the _descriptor_, and not what the descriptor gives us # when it is `getattr`'d. value = None skip = False for c in cls . mro ( ) : # skip methods that are already overridden if new_name in c . __dict__ : skip = True break if name in c . __dict__ : value = c . __dict__ [ name ] break # <neo>Woah, a use for for-else</neo> else : # Weird. Maybe the item lives somewhere else, such as on the # metaclass? value = getattr ( cls , name ) if skip : continue setattr ( cls , new_name , value ) return cls
A class decorator adding snake_case methods that alias capitalized ROOT methods . cls must subclass a ROOT class and define the _ROOT class variable .
404
34
245,965
def sync ( lock ) : def sync ( f ) : @ wraps ( f ) def new_function ( * args , * * kwargs ) : lock . acquire ( ) try : return f ( * args , * * kwargs ) finally : lock . release ( ) return new_function return sync
A synchronization decorator
64
4
245,966
def as_ufloat ( roorealvar ) : if isinstance ( roorealvar , ( U . AffineScalarFunc , U . Variable ) ) : return roorealvar return U . ufloat ( ( roorealvar . getVal ( ) , roorealvar . getError ( ) ) )
Cast a RooRealVar to an uncertainties . ufloat
66
12
245,967
def correlated_values ( param_names , roofitresult ) : pars = roofitresult . floatParsFinal ( ) #pars.Print() pars = [ pars [ i ] for i in range ( pars . getSize ( ) ) ] parnames = [ p . GetName ( ) for p in pars ] values = [ ( p . getVal ( ) , p . getError ( ) ) for p in pars ] #values = [as_ufloat(p) for p in pars] matrix = asrootpy ( roofitresult . correlationMatrix ( ) ) . to_numpy ( ) uvalues = U . correlated_values_norm ( values , matrix . tolist ( ) ) uvalues = dict ( ( n , v ) for n , v in zip ( parnames , uvalues ) ) assert all ( n in uvalues for n in parnames ) , ( "name {0} isn't in parameter list {1}" . format ( n , parnames ) ) # Return a tuple in the order it was asked for return tuple ( uvalues [ n ] for n in param_names )
Return symbolic values from a RooFitResult taking into account covariance
239
14
245,968
def checkattr ( metacls , attr , value ) : if not isinstance ( value , ( types . MethodType , types . FunctionType , classmethod , staticmethod , property ) ) : if attr in dir ( type ( 'dummy' , ( object , ) , { } ) ) + [ '__metaclass__' , '__qualname__' ] : return if attr . startswith ( '_' ) : raise SyntaxError ( "TreeModel attribute `{0}` " "must not start with `_`" . format ( attr ) ) if not inspect . isclass ( value ) : if not isinstance ( value , Column ) : raise TypeError ( "TreeModel attribute `{0}` " "must be an instance of " "`rootpy.tree.treetypes.Column`" . format ( attr ) ) return if not issubclass ( value , ( ROOT . TObject , ROOT . ObjectProxy ) ) : raise TypeError ( "TreeModel attribute `{0}` must inherit " "from `ROOT.TObject` or `ROOT.ObjectProxy`" . format ( attr ) )
Only allow class attributes that are instances of rootpy . types . Column ROOT . TObject or ROOT . ObjectProxy
255
26
245,969
def prefix ( cls , name ) : attrs = dict ( [ ( name + attr , value ) for attr , value in cls . get_attrs ( ) ] ) return TreeModelMeta ( '_' . join ( [ name , cls . __name__ ] ) , ( TreeModel , ) , attrs )
Create a new TreeModel where class attribute names are prefixed with name
72
14
245,970
def get_attrs ( cls ) : ignore = dir ( type ( 'dummy' , ( object , ) , { } ) ) + [ '__metaclass__' ] attrs = [ item for item in inspect . getmembers ( cls ) if item [ 0 ] not in ignore and not isinstance ( item [ 1 ] , ( types . FunctionType , types . MethodType , classmethod , staticmethod , property ) ) ] # sort by idx and use attribute name to break ties attrs . sort ( key = lambda attr : ( getattr ( attr [ 1 ] , 'idx' , - 1 ) , attr [ 0 ] ) ) return attrs
Get all class attributes ordered by definition
147
7
245,971
def to_struct ( cls , name = None ) : if name is None : name = cls . __name__ basic_attrs = dict ( [ ( attr_name , value ) for attr_name , value in cls . get_attrs ( ) if isinstance ( value , Column ) ] ) if not basic_attrs : return None src = 'struct {0} {{' . format ( name ) for attr_name , value in basic_attrs . items ( ) : src += '{0} {1};' . format ( value . type . typename , attr_name ) src += '};' if ROOT . gROOT . ProcessLine ( src ) != 0 : return None return getattr ( ROOT , name , None )
Convert the TreeModel into a compiled C struct
168
10
245,972
def id_to_name ( id ) : name = pdgid_names . get ( id ) if not name : name = repr ( id ) return name
Convert a PDG ID to a printable string .
35
12
245,973
def id_to_root_name ( id ) : name = root_names . get ( id ) if not name : name = repr ( id ) return name
Convert a PDG ID to a string with root markup .
34
13
245,974
def new_closure ( vals ) : args = ',' . join ( 'x%i' % i for i in range ( len ( vals ) ) ) f = eval ( "lambda %s:lambda:(%s)" % ( args , args ) ) if sys . version_info [ 0 ] >= 3 : return f ( * vals ) . __closure__ return f ( * vals ) . func_closure
Build a new closure
90
4
245,975
def _inject_closure_values_fix_closures ( c , injected , * * kwargs ) : code = c . code orig_len = len ( code ) for iback , ( opcode , value ) in enumerate ( reversed ( code ) ) : i = orig_len - iback - 1 if opcode != MAKE_CLOSURE : continue codeobj = code [ i - 1 - OPCODE_OFFSET ] assert codeobj [ 0 ] == byteplay . LOAD_CONST build_tuple = code [ i - 2 - OPCODE_OFFSET ] assert build_tuple [ 0 ] == byteplay . BUILD_TUPLE n_closed = build_tuple [ 1 ] load_closures = code [ i - 2 - OPCODE_OFFSET - n_closed : i - 2 - OPCODE_OFFSET ] assert all ( o == byteplay . LOAD_CLOSURE for o , _ in load_closures ) newlcs = [ ( byteplay . LOAD_CLOSURE , inj ) for inj in injected ] code [ i - 2 - OPCODE_OFFSET ] = byteplay . BUILD_TUPLE , n_closed + len ( injected ) code [ i - 2 - OPCODE_OFFSET : i - 2 - OPCODE_OFFSET ] = newlcs _inject_closure_values_fix_code ( codeobj [ 1 ] , injected , * * kwargs )
Recursively fix closures
323
5
245,976
def _inject_closure_values_fix_code ( c , injected , * * kwargs ) : # Add more closure variables c . freevars += injected # Replace LOAD_GLOBAL with LOAD_DEREF (fetch from closure cells) # for named variables for i , ( opcode , value ) in enumerate ( c . code ) : if opcode == byteplay . LOAD_GLOBAL and value in kwargs : c . code [ i ] = byteplay . LOAD_DEREF , value _inject_closure_values_fix_closures ( c , injected , * * kwargs ) return c
Fix code objects recursively fixing any closures
143
9
245,977
def inject_closure_values ( func , * * kwargs ) : wrapped_by = None if isinstance ( func , property ) : fget , fset , fdel = func . fget , func . fset , func . fdel if fget : fget = fix_func ( fget , * * kwargs ) if fset : fset = fix_func ( fset , * * kwargs ) if fdel : fdel = fix_func ( fdel , * * kwargs ) wrapped_by = type ( func ) return wrapped_by ( fget , fset , fdel ) elif isinstance ( func , ( staticmethod , classmethod ) ) : func = func . __func__ wrapped_by = type ( func ) newfunc = _inject_closure_values ( func , * * kwargs ) if wrapped_by : newfunc = wrapped_by ( newfunc ) return newfunc
Returns a new function identical to the previous one except that it acts as though global variables named in kwargs have been closed over with the values specified in the kwargs dictionary .
203
37
245,978
def axes ( self , ndim = 1 , xlimits = None , ylimits = None , zlimits = None , xbins = 1 , ybins = 1 , zbins = 1 ) : if xlimits is None : xlimits = ( 0 , 1 ) if ylimits is None : ylimits = ( 0 , 1 ) if zlimits is None : zlimits = ( 0 , 1 ) if ndim == 1 : from . hist import Hist hist = Hist ( 1 , xlimits [ 0 ] , xlimits [ 1 ] ) elif ndim == 2 : from . hist import Hist2D hist = Hist2D ( 1 , xlimits [ 0 ] , xlimits [ 1 ] , 1 , ylimits [ 0 ] , ylimits [ 1 ] ) elif ndim == 3 : from . hist import Hist3D hist = Hist3D ( 1 , xlimits [ 0 ] , xlimits [ 1 ] , 1 , ylimits [ 0 ] , ylimits [ 1 ] , 1 , zlimits [ 0 ] , zlimits [ 1 ] ) else : raise ValueError ( "ndim must be 1, 2, or 3" ) with self : hist . Draw ( 'AXIS' ) xaxis = hist . xaxis yaxis = hist . yaxis if isinstance ( xbins , ( list , tuple ) ) : xbins = array ( 'd' , xbins ) if hasattr ( xbins , '__iter__' ) : xaxis . Set ( len ( xbins ) - 1 , xbins ) else : xaxis . Set ( xbins , * xlimits ) if ndim > 1 : if isinstance ( ybins , ( list , tuple ) ) : ybins = array ( 'd' , ybins ) if hasattr ( ybins , '__iter__' ) : yaxis . Set ( len ( ybins ) - 1 , ybins ) else : yaxis . Set ( ybins , * ylimits ) else : yaxis . limits = ylimits yaxis . range_user = ylimits if ndim > 1 : zaxis = hist . zaxis if ndim == 3 : if isinstance ( zbins , ( list , tuple ) ) : zbins = array ( 'd' , zbins ) if hasattr ( zbins , '__iter__' ) : zaxis . Set ( len ( zbins ) - 1 , zbins ) else : zaxis . Set ( zbins , * zlimits ) else : zaxis . limits = zlimits zaxis . range_user = zlimits return xaxis , yaxis , zaxis return xaxis , yaxis
Create and return axes on this pad
577
7
245,979
def root2hdf5 ( rfile , hfile , rpath = '' , entries = - 1 , userfunc = None , show_progress = False , ignore_exception = False , * * kwargs ) : own_rootfile = False if isinstance ( rfile , string_types ) : rfile = root_open ( rfile ) own_rootfile = True own_h5file = False if isinstance ( hfile , string_types ) : hfile = tables_open ( filename = hfile , mode = "w" , title = "Data" ) own_h5file = True for dirpath , dirnames , treenames in rfile . walk ( rpath , class_ref = QROOT . TTree ) : # skip directories w/o trees if not treenames : continue treenames . sort ( ) group_where = '/' + os . path . dirname ( dirpath ) group_name = os . path . basename ( dirpath ) if not group_name : group = hfile . root elif TABLES_NEW_API : group = hfile . create_group ( group_where , group_name , createparents = True ) else : group = hfile . createGroup ( group_where , group_name ) ntrees = len ( treenames ) log . info ( "Will convert {0:d} tree{1} in {2}" . format ( ntrees , 's' if ntrees != 1 else '' , os . path . join ( group_where , group_name ) ) ) for treename in treenames : input_tree = rfile . Get ( os . path . join ( dirpath , treename ) ) if userfunc is not None : tmp_file = TemporaryFile ( ) # call user-defined function on tree and get output trees log . info ( "Calling user function on tree '{0}'" . format ( input_tree . GetName ( ) ) ) trees = userfunc ( input_tree ) if not isinstance ( trees , list ) : trees = [ trees ] else : trees = [ input_tree ] tmp_file = None for tree in trees : try : tree2hdf5 ( tree , hfile , group = group , entries = entries , show_progress = show_progress , * * kwargs ) except Exception as e : if ignore_exception : log . error ( "Failed to convert tree '{0}': {1}" . format ( tree . GetName ( ) , str ( e ) ) ) else : raise input_tree . Delete ( ) if userfunc is not None : for tree in trees : tree . Delete ( ) tmp_file . Close ( ) if own_h5file : hfile . close ( ) if own_rootfile : rfile . Close ( )
Convert all trees in a ROOT file into tables in an HDF5 file .
611
18
245,980
def Reverse ( self , copy = False ) : numPoints = self . GetN ( ) if copy : revGraph = self . Clone ( ) else : revGraph = self X = self . GetX ( ) EXlow = self . GetEXlow ( ) EXhigh = self . GetEXhigh ( ) Y = self . GetY ( ) EYlow = self . GetEYlow ( ) EYhigh = self . GetEYhigh ( ) for i in range ( numPoints ) : index = numPoints - 1 - i revGraph . SetPoint ( i , X [ index ] , Y [ index ] ) revGraph . SetPointError ( i , EXlow [ index ] , EXhigh [ index ] , EYlow [ index ] , EYhigh [ index ] ) return revGraph
Reverse the order of the points
167
8
245,981
def Shift ( self , value , copy = False ) : numPoints = self . GetN ( ) if copy : shiftGraph = self . Clone ( ) else : shiftGraph = self X = self . GetX ( ) EXlow = self . GetEXlow ( ) EXhigh = self . GetEXhigh ( ) Y = self . GetY ( ) EYlow = self . GetEYlow ( ) EYhigh = self . GetEYhigh ( ) for i in range ( numPoints ) : shiftGraph . SetPoint ( i , X [ i ] + value , Y [ i ] ) shiftGraph . SetPointError ( i , EXlow [ i ] , EXhigh [ i ] , EYlow [ i ] , EYhigh [ i ] ) return shiftGraph
Shift the graph left or right by value
163
8
245,982
def Integrate ( self ) : area = 0. X = self . GetX ( ) Y = self . GetY ( ) for i in range ( self . GetN ( ) - 1 ) : area += ( X [ i + 1 ] - X [ i ] ) * ( Y [ i ] + Y [ i + 1 ] ) / 2. return area
Integrate using the trapazoidal method
76
8
245,983
def Append ( self , other ) : orig_len = len ( self ) self . Set ( orig_len + len ( other ) ) ipoint = orig_len if hasattr ( self , 'SetPointError' ) : for point in other : self . SetPoint ( ipoint , point . x . value , point . y . value ) self . SetPointError ( ipoint , point . x . error_low , point . x . error_hi , point . y . error_low , point . y . error_hi ) ipoint += 1 else : for point in other : self . SetPoint ( ipoint , point . x . value , point . y . value ) ipoint += 1
Append points from another graph
150
6
245,984
def keepalive ( nurse , * patients ) : if DISABLED : return if hashable ( nurse ) : hashable_patients = [ ] for p in patients : if hashable ( p ) : log . debug ( "Keeping {0} alive for lifetime of {1}" . format ( p , nurse ) ) hashable_patients . append ( p ) else : log . warning ( "Unable to keep unhashable object {0} " "alive for lifetime of {1}" . format ( p , nurse ) ) KEEPALIVE . setdefault ( nurse , set ( ) ) . update ( hashable_patients ) else : log . warning ( "Unable to keep objects alive for lifetime of " "unhashable object {0}" . format ( nurse ) )
Keep patients alive at least as long as nurse is around using a WeakKeyDictionary .
168
18
245,985
def canonify_slice ( s , n ) : if isinstance ( s , ( int , long ) ) : return canonify_slice ( slice ( s , s + 1 , None ) , n ) start = s . start % n if s . start is not None else 0 stop = s . stop % n if s . stop is not None else n step = s . step if s . step is not None else 1 return slice ( start , stop , step )
Convert a slice object into a canonical form to simplify treatment in histogram bin content and edge slicing .
98
21
245,986
def bin_to_edge_slice ( s , n ) : s = canonify_slice ( s , n ) start = s . start stop = s . stop if start > stop : _stop = start + 1 start = stop + 1 stop = _stop start = max ( start - 1 , 0 ) step = abs ( s . step ) if stop <= 1 or start >= n - 1 or stop == start + 1 : return slice ( 0 , None , min ( step , n - 2 ) ) s = slice ( start , stop , abs ( s . step ) ) if len ( range ( * s . indices ( n - 1 ) ) ) < 2 : return slice ( start , stop , stop - start - 1 ) return s
Convert a bin slice into a bin edge slice .
155
11
245,987
def histogram ( data , * args , * * kwargs ) : from . autobinning import autobinning dim = kwargs . pop ( 'dim' , 1 ) if dim != 1 : raise NotImplementedError if 'binning' in kwargs : args = autobinning ( data , kwargs [ 'binning' ] ) del kwargs [ 'binning' ] histo = Hist ( * args , * * kwargs ) for d in data : histo . Fill ( d ) return list ( histo . xedgesl ( ) ) , histo
Create and fill a one - dimensional histogram .
130
10
245,988
def overflow ( self ) : indices = self . hist . xyz ( self . idx ) for i in range ( self . hist . GetDimension ( ) ) : if indices [ i ] == 0 or indices [ i ] == self . hist . nbins ( i ) + 1 : return True return False
Returns true if this BinProxy is for an overflow bin
66
11
245,989
def xyz ( self , idx ) : # Not implemented for Python 3: # GetBinXYZ(i, x, y, z) nx = self . GetNbinsX ( ) + 2 ny = self . GetNbinsY ( ) + 2 ndim = self . GetDimension ( ) if ndim < 2 : binx = idx % nx biny = 0 binz = 0 elif ndim < 3 : binx = idx % nx biny = ( ( idx - binx ) // nx ) % ny binz = 0 elif ndim < 4 : binx = idx % nx biny = ( ( idx - binx ) // nx ) % ny binz = ( ( idx - binx ) // nx - biny ) // ny else : raise NotImplementedError return binx , biny , binz
return binx biny binz corresponding to the global bin number
200
13
245,990
def nbins ( self , axis = 0 , overflow = False ) : if axis == 0 : nbins = self . GetNbinsX ( ) elif axis == 1 : nbins = self . GetNbinsY ( ) elif axis == 2 : nbins = self . GetNbinsZ ( ) else : raise ValueError ( "axis must be 0, 1, or 2" ) if overflow : nbins += 2 return nbins
Get the number of bins along an axis
102
8
245,991
def bins_range ( self , axis = 0 , overflow = False ) : nbins = self . nbins ( axis = axis , overflow = False ) if overflow : start = 0 end_offset = 2 else : start = 1 end_offset = 1 return range ( start , nbins + end_offset )
Return a range of bin indices for iterating along an axis
68
12
245,992
def uniform_binned ( self , name = None ) : if self . GetDimension ( ) == 1 : new_hist = Hist ( self . GetNbinsX ( ) , 0 , self . GetNbinsX ( ) , name = name , type = self . TYPE ) elif self . GetDimension ( ) == 2 : new_hist = Hist2D ( self . GetNbinsX ( ) , 0 , self . GetNbinsX ( ) , self . GetNbinsY ( ) , 0 , self . GetNbinsY ( ) , name = name , type = self . TYPE ) else : new_hist = Hist3D ( self . GetNbinsX ( ) , 0 , self . GetNbinsX ( ) , self . GetNbinsY ( ) , 0 , self . GetNbinsY ( ) , self . GetNbinsZ ( ) , 0 , self . GetNbinsZ ( ) , name = name , type = self . TYPE ) # copy over the bin contents and errors for outbin , inbin in zip ( new_hist . bins ( ) , self . bins ( ) ) : outbin . value = inbin . value outbin . error = inbin . error new_hist . decorate ( self ) new_hist . entries = self . entries return new_hist
Return a new histogram with constant width bins along all axes by using the bin indices as the bin edges of the new histogram .
294
27
245,993
def underflow ( self , axis = 0 ) : if axis not in range ( 3 ) : raise ValueError ( "axis must be 0, 1, or 2" ) if self . DIM == 1 : return self . GetBinContent ( 0 ) elif self . DIM == 2 : def idx ( i ) : arg = [ i ] arg . insert ( axis , 0 ) return arg return [ self . GetBinContent ( * idx ( i ) ) for i in self . bins_range ( axis = ( axis + 1 ) % 2 , overflow = True ) ] elif self . DIM == 3 : axes = [ 0 , 1 , 2 ] axes . remove ( axis ) axis2 , axis3 = axes def idx ( i , j ) : arg = [ i , j ] arg . insert ( axis , 0 ) return arg return [ [ self . GetBinContent ( * idx ( i , j ) ) for i in self . bins_range ( axis = axis2 , overflow = True ) ] for j in self . bins_range ( axis = axis3 , overflow = True ) ]
Return the underflow for the given axis .
239
9
245,994
def lowerbound ( self , axis = 0 ) : if not 0 <= axis < self . GetDimension ( ) : raise ValueError ( "axis must be a non-negative integer less than " "the dimensionality of the histogram" ) if axis == 0 : return self . xedges ( 1 ) if axis == 1 : return self . yedges ( 1 ) if axis == 2 : return self . zedges ( 1 ) raise TypeError ( "axis must be an integer" )
Get the lower bound of the binning along an axis
105
11
245,995
def bounds ( self , axis = 0 ) : if not 0 <= axis < self . GetDimension ( ) : raise ValueError ( "axis must be a non-negative integer less than " "the dimensionality of the histogram" ) if axis == 0 : return self . xedges ( 1 ) , self . xedges ( - 2 ) if axis == 1 : return self . yedges ( 1 ) , self . yedges ( - 2 ) if axis == 2 : return self . zedges ( 1 ) , self . zedges ( - 2 ) raise TypeError ( "axis must be an integer" )
Get the lower and upper bounds of the binning along an axis
134
13
245,996
def check_compatibility ( self , other , check_edges = False , precision = 1E-7 ) : if self . GetDimension ( ) != other . GetDimension ( ) : raise TypeError ( "histogram dimensionalities do not match" ) if len ( self ) != len ( other ) : raise ValueError ( "histogram sizes do not match" ) for axis in range ( self . GetDimension ( ) ) : if self . nbins ( axis = axis ) != other . nbins ( axis = axis ) : raise ValueError ( "numbers of bins along axis {0:d} do not match" . format ( axis ) ) if check_edges : for axis in range ( self . GetDimension ( ) ) : if not all ( [ abs ( l - r ) < precision for l , r in zip ( self . _edges ( axis ) , other . _edges ( axis ) ) ] ) : raise ValueError ( "edges do not match along axis {0:d}" . format ( axis ) )
Test whether two histograms are considered compatible by the number of dimensions number of bins along each axis and optionally the bin edges .
227
25
245,997
def fill_array ( self , array , weights = None ) : try : try : from root_numpy import fill_hist as fill_func except ImportError : from root_numpy import fill_array as fill_func except ImportError : log . critical ( "root_numpy is needed for Hist*.fill_array. " "Is it installed and importable?" ) raise fill_func ( self , array , weights = weights )
Fill this histogram with a NumPy array
93
9
245,998
def fill_view ( self , view ) : other = view . hist _other_x_center = other . axis ( 0 ) . GetBinCenter _other_y_center = other . axis ( 1 ) . GetBinCenter _other_z_center = other . axis ( 2 ) . GetBinCenter _other_get = other . GetBinContent _other_get_bin = super ( _HistBase , other ) . GetBin other_sum_w2 = other . GetSumw2 ( ) _other_sum_w2_at = other_sum_w2 . At _find = self . FindBin sum_w2 = self . GetSumw2 ( ) _sum_w2_at = sum_w2 . At _sum_w2_setat = sum_w2 . SetAt _set = self . SetBinContent _get = self . GetBinContent for x , y , z in view . points : idx = _find ( _other_x_center ( x ) , _other_y_center ( y ) , _other_z_center ( z ) ) other_idx = _other_get_bin ( x , y , z ) _set ( idx , _get ( idx ) + _other_get ( other_idx ) ) _sum_w2_setat ( _sum_w2_at ( idx ) + _other_sum_w2_at ( other_idx ) , idx )
Fill this histogram from a view of another histogram
330
11
245,999
def get_sum_w2 ( self , ix , iy = 0 , iz = 0 ) : if self . GetSumw2N ( ) == 0 : raise RuntimeError ( "Attempting to access Sumw2 in histogram " "where weights were not stored" ) xl = self . nbins ( axis = 0 , overflow = True ) yl = self . nbins ( axis = 1 , overflow = True ) idx = xl * yl * iz + xl * iy + ix if not 0 <= idx < self . GetSumw2N ( ) : raise IndexError ( "bin index out of range" ) return self . GetSumw2 ( ) . At ( idx )
Obtain the true number of entries in the bin weighted by w^2
159
15