idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
36,000
def onesided_cl_to_dlnl ( cl ) : alpha = 1.0 - cl return 0.5 * np . power ( np . sqrt ( 2. ) * special . erfinv ( 1 - 2 * alpha ) , 2. )
Compute the delta - loglikehood values that corresponds to an upper limit of the given confidence level .
36,001
def split_bin_edges ( edges , npts = 2 ) : if npts < 2 : return edges x = ( edges [ : - 1 , None ] + ( edges [ 1 : , None ] - edges [ : - 1 , None ] ) * np . linspace ( 0.0 , 1.0 , npts + 1 ) [ None , : ] ) return np . unique ( np . ravel ( x ) )
Subdivide an array of bins by splitting each bin into npts subintervals .
36,002
def extend_array ( edges , binsz , lo , hi ) : numlo = int ( np . ceil ( ( edges [ 0 ] - lo ) / binsz ) ) numhi = int ( np . ceil ( ( hi - edges [ - 1 ] ) / binsz ) ) edges = copy . deepcopy ( edges ) if numlo > 0 : edges_lo = np . linspace ( edges [ 0 ] - numlo * binsz , edges [ 0 ] , numlo + 1 ) edges = np . concaten...
Extend an array to encompass lo and hi values .
36,003
def fits_recarray_to_dict ( table ) : cols = { } for icol , col in enumerate ( table . columns . names ) : col_data = table . data [ col ] if type ( col_data [ 0 ] ) == np . float32 : cols [ col ] = np . array ( col_data , dtype = float ) elif type ( col_data [ 0 ] ) == np . float64 : cols [ col ] = np . array ( col_da...
Convert a FITS recarray to a python dictionary .
36,004
def merge_dict ( d0 , d1 , add_new_keys = False , append_arrays = False ) : if d1 is None : return d0 elif d0 is None : return d1 elif d0 is None and d1 is None : return { } od = { } for k , v in d0 . items ( ) : t0 = None t1 = None if k in d0 : t0 = type ( d0 [ k ] ) if k in d1 : t1 = type ( d1 [ k ] ) if k not in d1 ...
Recursively merge the contents of python dictionary d0 with the contents of another python dictionary d1 .
36,005
def tolist ( x ) : if isinstance ( x , list ) : return map ( tolist , x ) elif isinstance ( x , dict ) : return dict ( ( tolist ( k ) , tolist ( v ) ) for k , v in x . items ( ) ) elif isinstance ( x , np . ndarray ) or isinstance ( x , np . number ) : return tolist ( x . tolist ( ) ) elif isinstance ( x , OrderedDict ...
convenience function that takes in a nested structure of lists and dictionaries and converts everything to its base objects . This is useful for dupming a file to yaml .
36,006
def make_gaussian_kernel ( sigma , npix = 501 , cdelt = 0.01 , xpix = None , ypix = None ) : sigma /= cdelt def fn ( t , s ) : return 1. / ( 2 * np . pi * s ** 2 ) * np . exp ( - t ** 2 / ( s ** 2 * 2.0 ) ) dxy = make_pixel_distance ( npix , xpix , ypix ) k = fn ( dxy , sigma ) k /= ( np . sum ( k ) * np . radians ( cd...
Make kernel for a 2D gaussian .
36,007
def make_disk_kernel ( radius , npix = 501 , cdelt = 0.01 , xpix = None , ypix = None ) : radius /= cdelt def fn ( t , s ) : return 0.5 * ( np . sign ( s - t ) + 1.0 ) dxy = make_pixel_distance ( npix , xpix , ypix ) k = fn ( dxy , radius ) k /= ( np . sum ( k ) * np . radians ( cdelt ) ** 2 ) return k
Make kernel for a 2D disk .
36,008
def make_cdisk_kernel ( psf , sigma , npix , cdelt , xpix , ypix , psf_scale_fn = None , normalize = False ) : sigma /= 0.8246211251235321 dtheta = psf . dtheta egy = psf . energies x = make_pixel_distance ( npix , xpix , ypix ) x *= cdelt k = np . zeros ( ( len ( egy ) , npix , npix ) ) for i in range ( len ( egy ) ) ...
Make a kernel for a PSF - convolved 2D disk .
36,009
def make_radial_kernel ( psf , fn , sigma , npix , cdelt , xpix , ypix , psf_scale_fn = None , normalize = False , klims = None , sparse = False ) : if klims is None : egy = psf . energies else : egy = psf . energies [ klims [ 0 ] : klims [ 1 ] + 1 ] ang_dist = make_pixel_distance ( npix , xpix , ypix ) * cdelt max_ang...
Make a kernel for a general radially symmetric 2D function .
36,010
def make_psf_kernel ( psf , npix , cdelt , xpix , ypix , psf_scale_fn = None , normalize = False ) : egy = psf . energies x = make_pixel_distance ( npix , xpix , ypix ) x *= cdelt k = np . zeros ( ( len ( egy ) , npix , npix ) ) for i in range ( len ( egy ) ) : k [ i ] = psf . eval ( i , x , scale_fn = psf_scale_fn ) i...
Generate a kernel for a point - source .
36,011
def overlap_slices ( large_array_shape , small_array_shape , position ) : edges_min = [ int ( pos - small_shape // 2 ) for ( pos , small_shape ) in zip ( position , small_array_shape ) ] edges_max = [ int ( pos + ( small_shape - small_shape // 2 ) ) for ( pos , small_shape ) in zip ( position , small_array_shape ) ] sl...
Modified version of ~astropy . nddata . utils . overlap_slices .
36,012
def make_library ( ** kwargs ) : library_yaml = kwargs . pop ( 'library' , 'models/library.yaml' ) comp_yaml = kwargs . pop ( 'comp' , 'config/binning.yaml' ) basedir = kwargs . pop ( 'basedir' , os . path . abspath ( '.' ) ) model_man = kwargs . get ( 'ModelManager' , ModelManager ( basedir = basedir ) ) model_comp_di...
Build and return a ModelManager object and fill the associated model library
36,013
def edisp_disable_list ( self ) : l = [ ] for model_comp in self . model_components . values ( ) : if model_comp . edisp_disable : l += [ model_comp . info . source_name ] return l
Return the list of source for which energy dispersion should be turned off
36,014
def make_model_rois ( self , components , name_factory ) : ret_dict = { } master_roi_source_info = { } sub_comp_sources = { } for comp_name , model_comp in self . model_components . items ( ) : comp_info = model_comp . info if comp_info . components is None : master_roi_source_info [ comp_name ] = model_comp else : sub...
Make the fermipy roi_model objects for each of a set of binning components
36,015
def read_model_yaml ( self , modelkey ) : model_yaml = self . _name_factory . model_yaml ( modelkey = modelkey , fullpath = True ) model = yaml . safe_load ( open ( model_yaml ) ) return model
Read the yaml file for the diffuse components
36,016
def make_library ( self , diffuse_yaml , catalog_yaml , binning_yaml ) : ret_dict = { } components_dict = Component . build_from_yamlfile ( binning_yaml ) diffuse_ret_dict = make_diffuse_comp_info_dict ( GalpropMapManager = self . _gmm , DiffuseModelManager = self . _dmm , library = diffuse_yaml , components = componen...
Build up the library of all the components
36,017
def make_model_info ( self , modelkey ) : model = self . read_model_yaml ( modelkey ) sources = model [ 'sources' ] components = OrderedDict ( ) spec_model_yaml = self . _name_factory . fullpath ( localpath = model [ 'spectral_models' ] ) self . _spec_lib . update ( yaml . safe_load ( open ( spec_model_yaml ) ) ) for s...
Build a dictionary with the information for a particular model .
36,018
def get_sub_comp_info ( source_info , comp ) : sub_comps = source_info . get ( 'components' , None ) if sub_comps is None : return source_info . copy ( ) moving = source_info . get ( 'moving' , False ) selection_dependent = source_info . get ( 'selection_dependent' , False ) if selection_dependent : key = comp . make_k...
Build and return information about a sub - component for a particular selection
36,019
def replace_aliases ( cut_dict , aliases ) : for k , v in cut_dict . items ( ) : for k0 , v0 in aliases . items ( ) : cut_dict [ k ] = cut_dict [ k ] . replace ( k0 , '(%s)' % v0 )
Substitute aliases in a cut dictionary .
36,020
def get_files ( files , extnames = [ '.root' ] ) : files_out = [ ] for f in files : mime = mimetypes . guess_type ( f ) if os . path . splitext ( f ) [ 1 ] in extnames : files_out += [ f ] elif mime [ 0 ] == 'text/plain' : files_out += list ( np . loadtxt ( f , unpack = True , dtype = 'str' ) ) else : raise Exception (...
Extract a list of file paths from a list containing both paths and file lists with one path per line .
36,021
def get_cuts_from_xml ( xmlfile ) : root = ElementTree . ElementTree ( file = xmlfile ) . getroot ( ) event_maps = root . findall ( 'EventMap' ) alias_maps = root . findall ( 'AliasDict' ) [ 0 ] event_classes = { } event_types = { } event_aliases = { } for m in event_maps : if m . attrib [ 'altName' ] == 'EVENT_CLASS' ...
Extract event selection strings from the XML file .
36,022
def set_event_list ( tree , selection = None , fraction = None , start_fraction = None ) : import ROOT elist = rand_str ( ) if selection is None : cuts = '' else : cuts = selection if fraction is None or fraction >= 1.0 : n = tree . Draw ( ">>%s" % elist , cuts , "goff" ) tree . SetEventList ( ROOT . gDirectory . Get (...
Set the event list for a tree or chain .
36,023
def localize ( self , name , ** kwargs ) : timer = Timer . create ( start = True ) name = self . roi . get_source_by_name ( name ) . name schema = ConfigSchema ( self . defaults [ 'localize' ] , optimizer = self . defaults [ 'optimizer' ] ) schema . add_option ( 'use_cache' , True ) schema . add_option ( 'prefix' , '' ...
Find the best - fit position of a source . Localization is performed in two steps . First a TS map is computed centered on the source with half - width set by dtheta_max . A fit is then performed to the maximum TS peak in this map . The source position is then further refined by scanning the likelihood in the vicinity ...
36,024
def _fit_position_tsmap ( self , name , ** kwargs ) : prefix = kwargs . get ( 'prefix' , '' ) dtheta_max = kwargs . get ( 'dtheta_max' , 0.5 ) zmin = kwargs . get ( 'zmin' , - 3.0 ) kw = { 'map_size' : 2.0 * dtheta_max , 'write_fits' : kwargs . get ( 'write_fits' , False ) , 'write_npy' : kwargs . get ( 'write_npy' , F...
Localize a source from its TS map .
36,025
def get_lsf_status ( ) : status_count = { 'RUN' : 0 , 'PEND' : 0 , 'SUSP' : 0 , 'USUSP' : 0 , 'NJOB' : 0 , 'UNKNWN' : 0 } try : subproc = subprocess . Popen ( [ 'bjobs' ] , stdout = subprocess . PIPE , stderr = subprocess . PIPE ) subproc . stderr . close ( ) output = subproc . stdout . readlines ( ) except OSError : r...
Count and print the number of jobs in various LSF states
36,026
def build_bsub_command ( command_template , lsf_args ) : if command_template is None : return "" full_command = 'bsub -o {logfile}' for key , value in lsf_args . items ( ) : full_command += ' -%s' % key if value is not None : full_command += ' %s' % value full_command += ' %s' % command_template return full_command
Build and return a lsf batch command template
36,027
def dispatch_job_hook ( self , link , key , job_config , logfile , stream = sys . stdout ) : full_sub_dict = job_config . copy ( ) if self . _no_batch : full_command = "%s >& %s" % ( link . command_template ( ) . format ( ** full_sub_dict ) , logfile ) else : full_sub_dict [ 'logfile' ] = logfile full_command_template ...
Send a single job to the LSF batch
36,028
def submit_jobs ( self , link , job_dict = None , job_archive = None , stream = sys . stdout ) : if link is None : return JobStatus . no_job if job_dict is None : job_keys = link . jobs . keys ( ) else : job_keys = sorted ( job_dict . keys ( ) ) unsubmitted_jobs = job_keys unsubmitted_jobs . reverse ( ) failed = False ...
Submit all the jobs in job_dict
36,029
def create_sc_table ( scfile , colnames = None ) : if utils . is_fits_file ( scfile ) and colnames is None : return create_table_from_fits ( scfile , 'SC_DATA' ) if utils . is_fits_file ( scfile ) : files = [ scfile ] else : files = [ line . strip ( ) for line in open ( scfile , 'r' ) ] tables = [ create_table_from_fit...
Load an FT2 file from a file or list of files .
36,030
def create_table_from_fits ( fitsfile , hduname , colnames = None ) : if colnames is None : return Table . read ( fitsfile , hduname ) cols = [ ] with fits . open ( fitsfile , memmap = True ) as h : for k in colnames : data = h [ hduname ] . data . field ( k ) cols += [ Column ( name = k , data = data ) ] return Table ...
Memory efficient function for loading a table from a FITS file .
36,031
def get_spectral_index ( src , egy ) : delta = 1E-5 f0 = src . spectrum ( ) ( pyLike . dArg ( egy * ( 1 - delta ) ) ) f1 = src . spectrum ( ) ( pyLike . dArg ( egy * ( 1 + delta ) ) ) if f0 > 0 and f1 > 0 : gamma = np . log10 ( f0 / f1 ) / np . log10 ( ( 1 - delta ) / ( 1 + delta ) ) else : gamma = np . nan return gamm...
Compute the local spectral index of a source .
36,032
def create ( cls , infile , config = None , params = None , mask = None ) : infile = os . path . abspath ( infile ) roi_file , roi_data = utils . load_data ( infile ) if config is None : config = roi_data [ 'config' ] validate = False else : validate = True gta = cls ( config , validate = validate ) gta . setup ( init_...
Create a new instance of GTAnalysis from an analysis output file generated with ~fermipy . GTAnalysis . write_roi . By default the new instance will inherit the configuration of the saved analysis instance . The configuration may be overriden by passing a configuration file path with the config argument .
36,033
def clone ( self , config , ** kwargs ) : gta = GTAnalysis ( config , ** kwargs ) gta . _roi = copy . deepcopy ( self . roi ) return gta
Make a clone of this analysis instance .
36,034
def set_random_seed ( self , seed ) : self . config [ 'mc' ] [ 'seed' ] = seed np . random . seed ( seed )
Set the seed for the random number generator
36,035
def reload_source ( self , name , init_source = True ) : for c in self . components : c . reload_source ( name ) if init_source : self . _init_source ( name ) self . like . model = self . like . components [ 0 ] . model
Delete and reload a source in the model . This will update the spatial model of this source to the one defined in the XML model .
36,036
def set_source_morphology ( self , name , ** kwargs ) : name = self . roi . get_source_by_name ( name ) . name src = self . roi [ name ] spatial_model = kwargs . get ( 'spatial_model' , src [ 'SpatialModel' ] ) spatial_pars = kwargs . get ( 'spatial_pars' , { } ) use_pylike = kwargs . get ( 'use_pylike' , True ) psf_sc...
Set the spatial model of a source .
36,037
def set_source_spectrum ( self , name , spectrum_type = 'PowerLaw' , spectrum_pars = None , update_source = True ) : name = self . roi . get_source_by_name ( name ) . name src = self . roi [ name ] spectrum_pars = { } if spectrum_pars is None else spectrum_pars if ( self . roi [ name ] [ 'SpectrumType' ] == 'PowerLaw' ...
Set the spectral model of a source . This function can be used to change the spectral type of a source or modify its spectral parameters . If called with spectrum_type = FileFunction and spectrum_pars = None the source spectrum will be replaced with a FileFunction with the same differential flux distribution as the ori...
36,038
def set_source_dnde ( self , name , dnde , update_source = True ) : name = self . roi . get_source_by_name ( name ) . name if self . roi [ name ] [ 'SpectrumType' ] != 'FileFunction' : msg = 'Wrong spectral type: %s' % self . roi [ name ] [ 'SpectrumType' ] self . logger . error ( msg ) raise Exception ( msg ) xy = sel...
Set the differential flux distribution of a source with the FileFunction spectral type .
36,039
def get_source_dnde ( self , name ) : name = self . roi . get_source_by_name ( name ) . name if self . roi [ name ] [ 'SpectrumType' ] != 'FileFunction' : src = self . components [ 0 ] . like . logLike . getSource ( str ( name ) ) spectrum = src . spectrum ( ) file_function = pyLike . FileFunction_cast ( spectrum ) log...
Return differential flux distribution of a source . For sources with FileFunction spectral type this returns the internal differential flux array .
36,040
def _create_filefunction ( self , name , spectrum_pars ) : spectrum_pars = { } if spectrum_pars is None else spectrum_pars if 'loge' in spectrum_pars : loge = spectrum_pars . get ( 'loge' ) else : ebinsz = ( self . log_energies [ - 1 ] - self . log_energies [ 0 ] ) / self . enumbins loge = utils . extend_array ( self ....
Replace the spectrum of an existing source with a FileFunction .
36,041
def stage_output ( self ) : if self . workdir == self . outdir : return elif not os . path . isdir ( self . workdir ) : self . logger . error ( 'Working directory does not exist.' ) return regex = self . config [ 'fileio' ] [ 'outdir_regex' ] savefits = self . config [ 'fileio' ] [ 'savefits' ] files = os . listdir ( s...
Copy data products to final output directory .
36,042
def stage_input ( self ) : if self . workdir == self . outdir : return elif not os . path . isdir ( self . workdir ) : self . logger . error ( 'Working directory does not exist.' ) return self . logger . info ( 'Staging files to %s' , self . workdir ) files = [ os . path . join ( self . outdir , f ) for f in os . listd...
Copy input files to working directory .
36,043
def _create_likelihood ( self , srcmdl = None ) : self . _like = SummedLikelihood ( ) for c in self . components : c . _create_binned_analysis ( srcmdl ) self . _like . addComponent ( c . like ) self . like . model = self . like . components [ 0 ] . model self . _fitcache = None self . _init_roi_model ( )
Instantiate the likelihood object for each component and create a SummedLikelihood .
36,044
def generate_model ( self , model_name = None ) : for i , c in enumerate ( self . _components ) : c . generate_model ( model_name = model_name )
Generate model maps for all components . model_name should be a unique identifier for the model . If model_name is None then the model maps will be generated using the current parameters of the ROI .
36,045
def set_energy_range ( self , logemin , logemax ) : if logemin is None : logemin = self . log_energies [ 0 ] else : imin = int ( utils . val_to_edge ( self . log_energies , logemin ) [ 0 ] ) logemin = self . log_energies [ imin ] if logemax is None : logemax = self . log_energies [ - 1 ] else : imax = int ( utils . val...
Set the energy bounds of the analysis . This restricts the evaluation of the likelihood to the data that falls in this range . Input values will be rounded to the closest bin edge value . If either argument is None then the lower or upper bound of the analysis instance will be used .
36,046
def model_counts_map ( self , name = None , exclude = None , use_mask = False ) : maps = [ c . model_counts_map ( name , exclude , use_mask = use_mask ) for c in self . components ] return skymap . coadd_maps ( self . geom , maps )
Return the model counts map for a single source a list of sources or for the sum of all sources in the ROI . The exclude parameter can be used to exclude one or more components when generating the model map .
36,047
def model_counts_spectrum ( self , name , logemin = None , logemax = None , summed = False , weighted = False ) : if logemin is None : logemin = self . log_energies [ 0 ] if logemax is None : logemax = self . log_energies [ - 1 ] if summed : cs = np . zeros ( self . enumbins ) imin = utils . val_to_bin_bounded ( self ....
Return the predicted number of model counts versus energy for a given source and energy range . If summed = True return the counts spectrum summed over all components otherwise return a list of model spectra . If weighted = True return the weighted version of the counts spectrum
36,048
def get_sources ( self , cuts = None , distance = None , skydir = None , minmax_ts = None , minmax_npred = None , exclude = None , square = False ) : coordsys = self . config [ 'binning' ] [ 'coordsys' ] return self . roi . get_sources ( skydir , distance , cuts , minmax_ts , minmax_npred , exclude , square , coordsys ...
Retrieve list of sources in the ROI satisfying the given selections .
36,049
def add_source ( self , name , src_dict , free = None , init_source = True , save_source_maps = True , use_pylike = True , use_single_psf = False , ** kwargs ) : if self . roi . has_source ( name ) : msg = 'Source %s already exists.' % name self . logger . error ( msg ) raise Exception ( msg ) loglevel = kwargs . pop (...
Add a source to the ROI model . This function may be called either before or after ~fermipy . gtanalysis . GTAnalysis . setup .
36,050
def add_sources_from_roi ( self , names , roi , free = False , ** kwargs ) : for name in names : self . add_source ( name , roi [ name ] . data , free = free , ** kwargs )
Add multiple sources to the current ROI model copied from another ROI model .
36,051
def delete_source ( self , name , save_template = True , delete_source_map = False , build_fixed_wts = True , ** kwargs ) : if not self . roi . has_source ( name ) : self . logger . error ( 'No source with name: %s' , name ) return loglevel = kwargs . pop ( 'loglevel' , self . loglevel ) self . logger . log ( loglevel ...
Delete a source from the ROI model .
36,052
def delete_sources ( self , cuts = None , distance = None , skydir = None , minmax_ts = None , minmax_npred = None , exclude = None , square = False , names = None ) : srcs = self . roi . get_sources ( skydir = skydir , distance = distance , cuts = cuts , minmax_ts = minmax_ts , minmax_npred = minmax_npred , exclude = ...
Delete sources in the ROI model satisfying the given selection criteria .
36,053
def free_sources_by_name ( self , names , free = True , pars = None , ** kwargs ) : if names is None : return names = [ names ] if not isinstance ( names , list ) else names names = [ self . roi . get_source_by_name ( t ) . name for t in names ] srcs = [ s for s in self . roi . sources if s . name in names ] for s in s...
Free all sources with names matching names .
36,054
def set_parameter ( self , name , par , value , true_value = True , scale = None , bounds = None , error = None , update_source = True ) : name = self . roi . get_source_by_name ( name ) . name idx = self . like . par_index ( name , par ) current_bounds = list ( self . like . model [ idx ] . getBounds ( ) ) if scale is...
Update the value of a parameter . Parameter bounds will automatically be adjusted to encompass the new parameter value .
36,055
def set_parameter_scale ( self , name , par , scale ) : name = self . roi . get_source_by_name ( name ) . name idx = self . like . par_index ( name , par ) current_bounds = list ( self . like . model [ idx ] . getBounds ( ) ) current_scale = self . like . model [ idx ] . getScale ( ) current_value = self . like [ idx ]...
Update the scale of a parameter while keeping its value constant .
36,056
def set_parameter_bounds ( self , name , par , bounds ) : idx = self . like . par_index ( name , par ) self . like [ idx ] . setBounds ( * bounds ) self . _sync_params ( name )
Set the bounds on the scaled value of a parameter .
36,057
def set_parameter_error ( self , name , par , error ) : idx = self . like . par_index ( name , par ) self . like [ idx ] . setError ( error ) self . _sync_params ( name )
Set the error on the value of a parameter .
36,058
def get_source_name ( self , name ) : if name not in self . like . sourceNames ( ) : name = self . roi . get_source_by_name ( name ) . name return name
Return the name of a source as it is defined in the pyLikelihood model object .
36,059
def constrain_norms ( self , srcNames , cov_scale = 1.0 ) : for name in srcNames : par = self . like . normPar ( name ) err = par . error ( ) val = par . getValue ( ) if par . error ( ) == 0.0 or not par . isFree ( ) : continue self . add_gauss_prior ( name , par . getName ( ) , val , err * cov_scale )
Constrain the normalizations of one or more sources by adding gaussian priors with sigma equal to the parameter error times a scaling factor .
36,060
def remove_priors ( self ) : for src in self . roi . sources : for par in self . like [ src . name ] . funcs [ "Spectrum" ] . params . values ( ) : par . removePrior ( )
Clear all priors .
36,061
def _create_optObject ( self , ** kwargs ) : optimizer = kwargs . get ( 'optimizer' , self . config [ 'optimizer' ] [ 'optimizer' ] ) if optimizer . upper ( ) == 'MINUIT' : optObject = pyLike . Minuit ( self . like . logLike ) elif optimizer . upper ( ) == 'NEWMINUIT' : optObject = pyLike . NewMinuit ( self . like . lo...
Make MINUIT or NewMinuit type optimizer object
36,062
def load_xml ( self , xmlfile ) : self . logger . info ( 'Loading XML' ) for c in self . components : c . load_xml ( xmlfile ) for name in self . like . sourceNames ( ) : self . update_source ( name ) self . _fitcache = None self . logger . info ( 'Finished Loading XML' )
Load model definition from XML .
36,063
def load_parameters_from_yaml ( self , yamlfile , update_sources = False ) : d = utils . load_yaml ( yamlfile ) for src , src_pars in d . items ( ) : for par_name , par_dict in src_pars . items ( ) : if par_name in [ 'SpectrumType' ] : continue par_value = par_dict . get ( 'value' , None ) par_error = par_dict . get ( ...
Load model parameters from yaml
36,064
def _restore_counts_maps ( self ) : for c in self . components : c . restore_counts_maps ( ) if hasattr ( self . like . components [ 0 ] . logLike , 'setCountsMap' ) : self . _init_roi_model ( ) else : self . write_xml ( 'tmp' ) self . _like = SummedLikelihood ( ) for i , c in enumerate ( self . _components ) : c . _cr...
Revert counts maps to their state prior to injecting any simulated components .
36,065
def simulate_source ( self , src_dict = None ) : self . _fitcache = None if src_dict is None : src_dict = { } else : src_dict = copy . deepcopy ( src_dict ) skydir = wcs_utils . get_target_skydir ( src_dict , self . roi . skydir ) src_dict . setdefault ( 'ra' , skydir . ra . deg ) src_dict . setdefault ( 'dec' , skydir...
Inject simulated source counts into the data .
36,066
def simulate_roi ( self , name = None , randomize = True , restore = False ) : self . logger . info ( 'Simulating ROI' ) self . _fitcache = None if restore : self . logger . info ( 'Restoring' ) self . _restore_counts_maps ( ) self . logger . info ( 'Finished' ) return for c in self . components : c . simulate_roi ( na...
Generate a simulation of the ROI using the current best - fit model and replace the data counts cube with this simulation . The simulation is created by generating an array of Poisson random numbers with expectation values drawn from the model cube of the binned analysis instance . This function will update the counts ...
36,067
def load_roi ( self , infile , reload_sources = False , params = None , mask = None ) : infile = utils . resolve_path ( infile , workdir = self . workdir ) roi_file , roi_data = utils . load_data ( infile , workdir = self . workdir ) self . logger . info ( 'Loading ROI file: %s' , roi_file ) key_map = { 'dfde' : 'dnde'...
This function reloads the analysis state from a previously saved instance generated with ~fermipy . gtanalysis . GTAnalysis . write_roi .
36,068
def write_roi ( self , outfile = None , save_model_map = False , ** kwargs ) : make_plots = kwargs . get ( 'make_plots' , False ) save_weight_map = kwargs . get ( 'save_weight_map' , False ) if outfile is None : pathprefix = os . path . join ( self . config [ 'fileio' ] [ 'workdir' ] , 'results' ) elif not os . path . ...
Write current state of the analysis to a file . This method writes an XML model definition a ROI dictionary and a FITS source catalog file . A previously saved analysis state can be reloaded from the ROI dictionary file with the ~fermipy . gtanalysis . GTAnalysis . load_roi method .
36,069
def make_plots ( self , prefix , mcube_map = None , ** kwargs ) : if mcube_map is None : mcube_map = self . model_counts_map ( ) plotter = plotting . AnalysisPlotter ( self . config [ 'plotting' ] , fileio = self . config [ 'fileio' ] , logging = self . config [ 'logging' ] ) plotter . run ( self , mcube_map , prefix =...
Make diagnostic plots using the current ROI model .
36,070
def update_source ( self , name , paramsonly = False , reoptimize = False , ** kwargs ) : npts = self . config [ 'gtlike' ] [ 'llscan_npts' ] optimizer = kwargs . get ( 'optimizer' , self . config [ 'optimizer' ] ) sd = self . get_src_model ( name , paramsonly , reoptimize , npts , optimizer = optimizer ) src = self . ...
Update the dictionary for this source .
36,071
def compute_srcprob ( self , xmlfile = None , overwrite = False ) : for i , c in enumerate ( self . components ) : c . _diffrsp_app ( xmlfile = xmlfile ) c . _srcprob_app ( xmlfile = xmlfile , overwrite = overwrite )
Run the gtsrcprob app with the current model or a user provided xmlfile
36,072
def reload_source ( self , name ) : src = self . roi . get_source_by_name ( name ) if hasattr ( self . like . logLike , 'loadSourceMap' ) : self . like . logLike . loadSourceMap ( str ( name ) , True , False ) srcmap_utils . delete_source_map ( self . files [ 'srcmap' ] , name ) self . like . logLike . saveSourceMaps (...
Recompute the source map for a single source in the model .
36,073
def reload_sources ( self , names ) : try : self . like . logLike . loadSourceMaps ( names , True , True ) self . _scale_srcmap ( self . _src_expscale , check_header = False , names = names ) except : for name in names : self . reload_source ( name )
Recompute the source map for a list of sources in the model .
36,074
def _create_source ( self , src ) : if src [ 'SpatialType' ] == 'SkyDirFunction' : pylike_src = pyLike . PointSource ( self . like . logLike . observation ( ) ) pylike_src . setDir ( src . skydir . ra . deg , src . skydir . dec . deg , False , False ) elif src [ 'SpatialType' ] == 'SpatialMap' : filepath = str ( utils ...
Create a pyLikelihood Source object from a ~fermipy . roi_model . Model object .
36,075
def set_exposure_scale ( self , name , scale = None ) : name = self . roi . get_source_by_name ( name ) . name if scale is None and name not in self . _src_expscale : return elif scale is None : scale = self . _src_expscale . get ( name , 1.0 ) else : self . _src_expscale [ name ] = scale self . _scale_srcmap ( { name ...
Set the exposure correction of a source .
36,076
def set_energy_range ( self , logemin , logemax ) : if logemin is None : logemin = self . log_energies [ 0 ] if logemax is None : logemax = self . log_energies [ - 1 ] imin = int ( utils . val_to_edge ( self . log_energies , logemin ) [ 0 ] ) imax = int ( utils . val_to_edge ( self . log_energies , logemax ) [ 0 ] ) if...
Set the energy range of the analysis .
36,077
def counts_map ( self ) : try : if isinstance ( self . like , gtutils . SummedLikelihood ) : cmap = self . like . components [ 0 ] . logLike . countsMap ( ) p_method = cmap . projection ( ) . method ( ) else : cmap = self . like . logLike . countsMap ( ) p_method = cmap . projection ( ) . method ( ) except Exception : ...
Return 3 - D counts map for this component as a Map object .
36,078
def weight_map ( self ) : if isinstance ( self . like , gtutils . SummedLikelihood ) : cmap = self . like . components [ 0 ] . logLike . countsMap ( ) try : p_method = cmap . projection ( ) . method ( ) except AttributeError : p_method = 0 try : if self . like . components [ 0 ] . logLike . has_weights ( ) : wmap = sel...
Return 3 - D weights map for this component as a Map object .
36,079
def model_counts_spectrum ( self , name , logemin , logemax , weighted = False ) : try : cs = np . array ( self . like . logLike . modelCountsSpectrum ( str ( name ) , weighted ) ) except ( TypeError , NotImplementedError ) : cs = np . array ( self . like . logLike . modelCountsSpectrum ( str ( name ) ) ) imin = utils ...
Return the model counts spectrum of a source .
36,080
def setup ( self , overwrite = False , ** kwargs ) : loglevel = kwargs . get ( 'loglevel' , self . loglevel ) self . logger . log ( loglevel , 'Running setup for component %s' , self . name ) use_external_srcmap = self . config [ 'gtlike' ] [ 'use_external_srcmap' ] if not use_external_srcmap : self . _select_data ( ov...
Run pre - processing step for this component . This will generate all of the auxiliary files needed to instantiate a likelihood object . By default this function will skip any steps for which the output file already exists .
36,081
def _scale_srcmap ( self , scale_map , check_header = True , names = None ) : srcmap = fits . open ( self . files [ 'srcmap' ] ) for hdu in srcmap [ 1 : ] : if hdu . name not in scale_map : continue if names is not None and hdu . name not in names : continue scale = scale_map [ hdu . name ] if scale < 1e-20 : self . lo...
Apply exposure corrections to the source map file .
36,082
def _make_scaled_srcmap ( self ) : self . logger . info ( 'Computing scaled source map.' ) bexp0 = fits . open ( self . files [ 'bexpmap_roi' ] ) bexp1 = fits . open ( self . config [ 'gtlike' ] [ 'bexpmap' ] ) srcmap = fits . open ( self . config [ 'gtlike' ] [ 'srcmap' ] ) if bexp0 [ 0 ] . data . shape != bexp1 [ 0 ]...
Make an exposure cube with the same binning as the counts map .
36,083
def simulate_roi ( self , name = None , clear = True , randomize = True ) : cm = self . counts_map ( ) data = cm . data m = self . model_counts_map ( name ) if clear : data . fill ( 0.0 ) if randomize : if m . data . min ( ) < 0. : self . logger . warning ( 'At least on negative value found in model map.' ' Changing it...
Simulate the whole ROI or inject a simulation of one or more model components into the data .
36,084
def _update_srcmap_file ( self , sources , overwrite = True ) : if not os . path . isfile ( self . files [ 'srcmap' ] ) : return hdulist = fits . open ( self . files [ 'srcmap' ] ) hdunames = [ hdu . name . upper ( ) for hdu in hdulist ] srcmaps = { } for src in sources : if src . name . upper ( ) in hdunames and not o...
Check the contents of the source map file and generate source maps for any components that are not present .
36,085
def _create_srcmap ( self , name , src , ** kwargs ) : psf_scale_fn = kwargs . get ( 'psf_scale_fn' , None ) skydir = src . skydir spatial_model = src [ 'SpatialModel' ] spatial_width = src [ 'SpatialWidth' ] xpix , ypix = self . geom . to_image ( ) . coord_to_pix ( skydir ) exp = self . _bexp . interp_by_coord ( ( sky...
Generate the source map for a source .
36,086
def _update_srcmap ( self , name , src , ** kwargs ) : k = self . _create_srcmap ( name , src , ** kwargs ) scale = self . _src_expscale . get ( name , 1.0 ) k *= scale self . like . logLike . sourceMap ( str ( name ) ) . model ( ) self . like . logLike . setSourceMapImage ( str ( name ) , np . ravel ( k ) ) self . lik...
Update the source map for an existing source in memory .
36,087
def generate_model ( self , model_name = None , outfile = None ) : if model_name is not None : model_name = os . path . splitext ( model_name ) [ 0 ] if model_name is None or model_name == '' : srcmdl = self . files [ 'srcmdl' ] else : srcmdl = self . get_model_path ( model_name ) if not os . path . isfile ( srcmdl ) :...
Generate a counts model map from an XML model file using gtmodel .
36,088
def write_xml ( self , xmlfile ) : xmlfile = self . get_model_path ( xmlfile ) self . logger . info ( 'Writing %s...' , xmlfile ) self . like . writeXml ( str ( xmlfile ) )
Write the XML model for this analysis component .
36,089
def get_model_path ( self , name ) : name , ext = os . path . splitext ( name ) ext = '.xml' xmlfile = name + self . config [ 'file_suffix' ] + ext xmlfile = utils . resolve_path ( xmlfile , workdir = self . config [ 'fileio' ] [ 'workdir' ] ) return xmlfile
Infer the path to the XML model name .
36,090
def _tscube_app ( self , xmlfile ) : xmlfile = self . get_model_path ( xmlfile ) outfile = os . path . join ( self . config [ 'fileio' ] [ 'workdir' ] , 'tscube%s.fits' % ( self . config [ 'file_suffix' ] ) ) kw = dict ( cmap = self . files [ 'ccube' ] , expcube = self . files [ 'ltcube' ] , bexpmap = self . files [ 'b...
Run gttscube as an application .
36,091
def _diffrsp_app ( self , xmlfile = None , ** kwargs ) : loglevel = kwargs . get ( 'loglevel' , self . loglevel ) self . logger . log ( loglevel , 'Computing diffuse repsonce for component %s.' , self . name ) srcmdl_file = self . files [ 'srcmdl' ] if xmlfile is not None : srcmdl_file = self . get_model_path ( xmlfile...
Compute the diffuse response
36,092
def _srcprob_app ( self , xmlfile = None , overwrite = False , ** kwargs ) : loglevel = kwargs . get ( 'loglevel' , self . loglevel ) self . logger . log ( loglevel , 'Computing src probability for component %s.' , self . name ) srcmdl_file = self . files [ 'srcmdl' ] if xmlfile is not None : srcmdl_file = self . get_m...
Run srcprob for an analysis component as an application
36,093
def purge_dict ( idict ) : odict = { } for key , val in idict . items ( ) : if is_null ( val ) : continue odict [ key ] = val return odict
Remove null items from a dictionary
36,094
def main ( cls ) : chain = cls . create ( ) args = chain . _run_argparser ( sys . argv [ 1 : ] ) chain . _run_chain ( sys . stdout , args . dry_run ) chain . _finalize ( args . dry_run )
Hook to run this Chain from the command line
36,095
def _set_link ( self , linkname , cls , ** kwargs ) : val_copy = purge_dict ( kwargs . copy ( ) ) sub_link_prefix = val_copy . pop ( 'link_prefix' , '' ) link_prefix = self . link_prefix + sub_link_prefix create_args = dict ( linkname = linkname , link_prefix = link_prefix , job_archive = val_copy . pop ( 'job_archive'...
Transfer options kwargs to a Link object optionally building the Link if needed .
36,096
def _set_links_job_archive ( self ) : for link in self . _links . values ( ) : link . _job_archive = self . _job_archive
Pass self . _job_archive along to links
36,097
def _run_chain ( self , stream = sys . stdout , dry_run = False , stage_files = True , force_run = False , resubmit_failed = False ) : self . _set_links_job_archive ( ) failed = False if self . _file_stage is not None : input_file_mapping , output_file_mapping = self . _map_scratch_files ( self . sub_files ) if stage_f...
Run all the links in the chain
36,098
def clear_jobs ( self , recursive = True ) : if recursive : for link in self . _links . values ( ) : link . clear_jobs ( recursive ) self . jobs . clear ( )
Clear a dictionary with all the jobs
36,099
def check_links_status ( self , fail_running = False , fail_pending = False ) : status_vector = JobStatusVector ( ) for link in self . _links . values ( ) : key = JobDetails . make_fullkey ( link . full_linkname ) link_status = link . check_job_status ( key , fail_running = fail_running , fail_pending = fail_pending ) ...
Check the status of all the jobs run from the Link objects in this Chain and return a status flag that summarizes that .