idx
int64
0
63k
question
stringlengths
53
5.28k
target
stringlengths
5
805
35,900
def make_catalog_comp_dict ( ** kwargs ) : library_yamlfile = kwargs . pop ( 'library' , 'models/library.yaml' ) csm = kwargs . pop ( 'CatalogSourceManager' , CatalogSourceManager ( ** kwargs ) ) if library_yamlfile is None or library_yamlfile == 'None' : yamldict = { } else : yamldict = yaml . safe_load ( open ( libra...
Build and return the information about the catalog components
35,901
def read_catalog_info_yaml ( self , splitkey ) : catalog_info_yaml = self . _name_factory . catalog_split_yaml ( sourcekey = splitkey , fullpath = True ) yaml_dict = yaml . safe_load ( open ( catalog_info_yaml ) ) yaml_dict [ 'catalog_file' ] = os . path . expandvars ( yaml_dict [ 'catalog_file' ] ) yaml_dict [ 'catalo...
Read the yaml file for a particular split key
35,902
def build_catalog_info ( self , catalog_info ) : cat = SourceFactory . build_catalog ( ** catalog_info ) catalog_info [ 'catalog' ] = cat catalog_info [ 'catalog_table' ] = cat . table catalog_info [ 'roi_model' ] = SourceFactory . make_fermipy_roi_model_from_catalogs ( [ cat ] ) catalog_info [ 'srcmdl_name' ] = self ....
Build a CatalogInfo object
35,903
def catalog_components ( self , catalog_name , split_ver ) : return sorted ( self . _split_comp_info_dicts [ "%s_%s" % ( catalog_name , split_ver ) ] . keys ( ) )
Return the set of merged components for a particular split key
35,904
def split_comp_info ( self , catalog_name , split_ver , split_key ) : return self . _split_comp_info_dicts [ "%s_%s" % ( catalog_name , split_ver ) ] [ split_key ]
Return the info for a particular split key
35,905
def make_catalog_comp_info_dict ( self , catalog_sources ) : catalog_ret_dict = { } split_ret_dict = { } for key , value in catalog_sources . items ( ) : if value is None : continue if value [ 'model_type' ] != 'catalog' : continue versions = value [ 'versions' ] for version in versions : ver_key = "%s_%s" % ( key , ve...
Make the information about the catalog components
35,906
def extract_images_from_tscube ( infile , outfile ) : inhdulist = fits . open ( infile ) wcs = pywcs . WCS ( inhdulist [ 0 ] . header ) map_shape = inhdulist [ 0 ] . data . shape t_eng = Table . read ( infile , "EBOUNDS" ) t_scan = Table . read ( infile , "SCANDATA" ) t_fit = Table . read ( infile , "FITDATA" ) n_ebin ...
Extract data from table HDUs in TSCube file and convert them to FITS images
35,907
def truncate_array ( array1 , array2 , position ) : slices = [ ] for i in range ( array1 . ndim ) : xmin = 0 xmax = array1 . shape [ i ] dxlo = array1 . shape [ i ] // 2 dxhi = array1 . shape [ i ] - dxlo if position [ i ] - dxlo < 0 : xmin = max ( dxlo - position [ i ] , 0 ) if position [ i ] + dxhi > array2 . shape [...
Truncate array1 by finding the overlap with array2 when the array1 center is located at the given position in array2 .
35,908
def _sum_wrapper ( fn ) : def wrapper ( * args , ** kwargs ) : v = 0 new_args = _cast_args_to_list ( args ) for arg in zip ( * new_args ) : v += fn ( * arg , ** kwargs ) return v return wrapper
Wrapper to perform row - wise aggregation of list arguments and pass them to a function . The return value of the function is summed over the argument groups . Non - list arguments will be automatically cast to a list .
35,909
def _amplitude_bounds ( counts , bkg , model ) : if isinstance ( counts , list ) : counts = np . concatenate ( [ t . flat for t in counts ] ) bkg = np . concatenate ( [ t . flat for t in bkg ] ) model = np . concatenate ( [ t . flat for t in model ] ) s_model = np . sum ( model ) s_counts = np . sum ( counts ) sn = bkg...
Compute bounds for the root of _f_cash_root_cython .
35,910
def _root_amplitude_brentq ( counts , bkg , model , root_fn = _f_cash_root ) : amplitude_min , amplitude_max = _amplitude_bounds ( counts , bkg , model ) if not np . sum ( counts ) > 0 : return amplitude_min , 0 args = ( counts , bkg , model ) if root_fn ( 0.0 , * args ) < 0 : return 0.0 , 1 with warnings . catch_warni...
Fit amplitude by finding roots using Brent algorithm .
35,911
def poisson_log_like ( counts , model ) : loglike = np . array ( model ) m = counts > 0 loglike [ m ] -= counts [ m ] * np . log ( model [ m ] ) return loglike
Compute the Poisson log - likelihood function for the given counts and model arrays .
35,912
def f_cash ( x , counts , bkg , model ) : return 2.0 * poisson_log_like ( counts , bkg + x * model )
Wrapper for cash statistics that defines the model function .
35,913
def _ts_value_newton ( position , counts , bkg , model , C_0_map ) : extract_fn = _collect_wrapper ( extract_large_array ) truncate_fn = _collect_wrapper ( extract_small_array ) counts_slice = extract_fn ( counts , model , position ) bkg_slice = extract_fn ( bkg , model , position ) C_0_map_slice = extract_fn ( C_0_map...
Compute TS value at a given pixel position using the newton method .
35,914
def tsmap ( self , prefix = '' , ** kwargs ) : timer = Timer . create ( start = True ) schema = ConfigSchema ( self . defaults [ 'tsmap' ] ) schema . add_option ( 'loglevel' , logging . INFO ) schema . add_option ( 'map_skydir' , None , '' , astropy . coordinates . SkyCoord ) schema . add_option ( 'map_size' , 1.0 ) sc...
Generate a spatial TS map for a source component with properties defined by the model argument . The TS map will have the same geometry as the ROI . The output of this method is a dictionary containing ~fermipy . skymap . Map objects with the TS and amplitude of the best - fit test source . By default this method will ...
35,915
def tscube ( self , prefix = '' , ** kwargs ) : self . logger . info ( 'Generating TS cube' ) schema = ConfigSchema ( self . defaults [ 'tscube' ] ) schema . add_option ( 'make_plots' , True ) schema . add_option ( 'write_fits' , True ) schema . add_option ( 'write_npy' , True ) config = schema . create_config ( self ....
Generate a spatial TS map for a source component with properties defined by the model argument . This method uses the gttscube ST application for source fitting and will simultaneously fit the test source normalization as well as the normalizations of any background components that are currently free . The output of th...
35,916
def compute_ps_counts ( ebins , exp , psf , bkg , fn , egy_dim = 0 , spatial_model = 'PointSource' , spatial_size = 1E-3 ) : ewidth = utils . edge_to_width ( ebins ) ectr = np . exp ( utils . edge_to_center ( np . log ( ebins ) ) ) r68 = psf . containment_angle ( ectr , fraction = 0.68 ) if spatial_model != 'PointSourc...
Calculate the observed signal and background counts given models for the exposure background intensity PSF and source flux .
35,917
def create_psf ( event_class , event_type , dtheta , egy , cth ) : irf = create_irf ( event_class , event_type ) theta = np . degrees ( np . arccos ( cth ) ) m = np . zeros ( ( len ( dtheta ) , len ( egy ) , len ( cth ) ) ) for i , x in enumerate ( egy ) : for j , y in enumerate ( theta ) : m [ : , i , j ] = irf . psf ...
Create an array of PSF response values versus energy and inclination angle .
35,918
def create_edisp ( event_class , event_type , erec , egy , cth ) : irf = create_irf ( event_class , event_type ) theta = np . degrees ( np . arccos ( cth ) ) v = np . zeros ( ( len ( erec ) , len ( egy ) , len ( cth ) ) ) m = ( erec [ : , None ] / egy [ None , : ] < 3.0 ) & ( erec [ : , None ] / egy [ None , : ] > 0.33...
Create an array of energy response values versus energy and inclination angle .
35,919
def create_aeff ( event_class , event_type , egy , cth ) : irf = create_irf ( event_class , event_type ) irf . aeff ( ) . setPhiDependence ( False ) theta = np . degrees ( np . arccos ( cth ) ) m = np . zeros ( ( len ( egy ) , len ( cth ) ) ) for i , x in enumerate ( egy ) : for j , y in enumerate ( theta ) : m [ i , j...
Create an array of effective areas versus energy and incidence angle . Binning in energy and incidence angle is controlled with the egy and cth input parameters .
35,920
def calc_exp ( skydir , ltc , event_class , event_types , egy , cth_bins , npts = None ) : if npts is None : npts = int ( np . ceil ( np . max ( cth_bins [ 1 : ] - cth_bins [ : - 1 ] ) / 0.025 ) ) exp = np . zeros ( ( len ( egy ) , len ( cth_bins ) - 1 ) ) cth_bins = utils . split_bin_edges ( cth_bins , npts ) cth = ed...
Calculate the exposure on a 2D grid of energy and incidence angle .
35,921
def create_avg_rsp ( rsp_fn , skydir , ltc , event_class , event_types , x , egy , cth_bins , npts = None ) : if npts is None : npts = int ( np . ceil ( np . max ( cth_bins [ 1 : ] - cth_bins [ : - 1 ] ) / 0.05 ) ) wrsp = np . zeros ( ( len ( x ) , len ( egy ) , len ( cth_bins ) - 1 ) ) exps = np . zeros ( ( len ( egy ...
Calculate the weighted response function .
35,922
def create_avg_psf ( skydir , ltc , event_class , event_types , dtheta , egy , cth_bins , npts = None ) : return create_avg_rsp ( create_psf , skydir , ltc , event_class , event_types , dtheta , egy , cth_bins , npts )
Generate model for exposure - weighted PSF averaged over incidence angle .
35,923
def create_avg_edisp ( skydir , ltc , event_class , event_types , erec , egy , cth_bins , npts = None ) : return create_avg_rsp ( create_edisp , skydir , ltc , event_class , event_types , erec , egy , cth_bins , npts )
Generate model for exposure - weighted DRM averaged over incidence angle .
35,924
def create_wtd_psf ( skydir , ltc , event_class , event_types , dtheta , egy_bins , cth_bins , fn , nbin = 64 , npts = 1 ) : egy_bins = np . exp ( utils . split_bin_edges ( np . log ( egy_bins ) , npts ) ) etrue_bins = 10 ** np . linspace ( 1.0 , 6.5 , nbin * 5.5 + 1 ) etrue = 10 ** utils . edge_to_center ( np . log10 ...
Create an exposure - and dispersion - weighted PSF model for a source with spectral parameterization fn . The calculation performed by this method accounts for the influence of energy dispersion on the PSF .
35,925
def calc_drm ( skydir , ltc , event_class , event_types , egy_bins , cth_bins , nbin = 64 ) : npts = int ( np . ceil ( 128. / bins_per_dec ( egy_bins ) ) ) egy_bins = np . exp ( utils . split_bin_edges ( np . log ( egy_bins ) , npts ) ) etrue_bins = 10 ** np . linspace ( 1.0 , 6.5 , nbin * 5.5 + 1 ) egy = 10 ** utils ....
Calculate the detector response matrix .
35,926
def calc_counts ( skydir , ltc , event_class , event_types , egy_bins , cth_bins , fn , npts = 1 ) : egy_bins = np . exp ( utils . split_bin_edges ( np . log ( egy_bins ) , npts ) ) exp = calc_exp ( skydir , ltc , event_class , event_types , egy_bins , cth_bins ) dnde = fn . dnde ( egy_bins ) cnts = loglog_quad ( egy_b...
Calculate the expected counts vs . true energy and incidence angle for a source with spectral parameterization fn .
35,927
def calc_counts_edisp ( skydir , ltc , event_class , event_types , egy_bins , cth_bins , fn , nbin = 16 , npts = 1 ) : egy_bins = np . exp ( utils . split_bin_edges ( np . log ( egy_bins ) , npts ) ) etrue_bins = 10 ** np . linspace ( 1.0 , 6.5 , nbin * 5.5 + 1 ) drm = calc_drm ( skydir , ltc , event_class , event_type...
Calculate the expected counts vs . observed energy and true incidence angle for a source with spectral parameterization fn .
35,928
def calc_wtd_exp ( skydir , ltc , event_class , event_types , egy_bins , cth_bins , fn , nbin = 16 ) : cnts = calc_counts_edisp ( skydir , ltc , event_class , event_types , egy_bins , cth_bins , fn , nbin = nbin ) flux = fn . flux ( egy_bins [ : - 1 ] , egy_bins [ 1 : ] ) return cnts / flux [ : , None ]
Calculate the effective exposure .
35,929
def eval ( self , ebin , dtheta , scale_fn = None ) : if scale_fn is None and self . scale_fn is not None : scale_fn = self . scale_fn if scale_fn is None : scale_factor = 1.0 else : dtheta = dtheta / scale_fn ( self . energies [ ebin ] ) scale_factor = 1. / scale_fn ( self . energies [ ebin ] ) ** 2 vals = 10 ** np . ...
Evaluate the PSF at the given energy bin index .
35,930
def interp ( self , energies , dtheta , scale_fn = None ) : if scale_fn is None and self . scale_fn : scale_fn = self . scale_fn log_energies = np . log10 ( energies ) shape = ( energies * dtheta ) . shape scale_factor = np . ones ( shape ) if scale_fn is not None : dtheta = dtheta / scale_fn ( energies ) scale_factor ...
Evaluate the PSF model at an array of energies and angular separations .
35,931
def interp_bin ( self , egy_bins , dtheta , scale_fn = None ) : npts = 4 egy_bins = np . exp ( utils . split_bin_edges ( np . log ( egy_bins ) , npts ) ) egy = np . exp ( utils . edge_to_center ( np . log ( egy_bins ) ) ) log_energies = np . log10 ( egy ) vals = self . interp ( egy [ None , : ] , dtheta [ : , None ] , ...
Evaluate the bin - averaged PSF model over the energy bins egy_bins .
35,932
def containment_angle ( self , energies = None , fraction = 0.68 , scale_fn = None ) : if energies is None : energies = self . energies vals = self . interp ( energies [ np . newaxis , : ] , self . dtheta [ : , np . newaxis ] , scale_fn = scale_fn ) dtheta = np . radians ( self . dtheta [ : , np . newaxis ] * np . ones...
Evaluate the PSF containment angle at a sequence of energies .
35,933
def containment_angle_bin ( self , egy_bins , fraction = 0.68 , scale_fn = None ) : vals = self . interp_bin ( egy_bins , self . dtheta , scale_fn = scale_fn ) dtheta = np . radians ( self . dtheta [ : , np . newaxis ] * np . ones ( vals . shape ) ) return self . _calc_containment ( dtheta , vals , fraction )
Evaluate the PSF containment angle averaged over energy bins .
35,934
def create ( cls , skydir , ltc , event_class , event_types , energies , cth_bins = None , ndtheta = 500 , use_edisp = False , fn = None , nbin = 64 ) : if isinstance ( event_types , int ) : event_types = bitmask_to_bits ( event_types ) if fn is None : fn = spectrum . PowerLaw ( [ 1E-13 , - 2.0 ] ) dtheta = np . logspa...
Create a PSFModel object . This class can be used to evaluate the exposure - weighted PSF for a source with a given observing profile and energy distribution .
35,935
def remove_file ( filepath , dry_run = False ) : if dry_run : sys . stdout . write ( "rm %s\n" % filepath ) else : try : os . remove ( filepath ) except OSError : pass
Remove the file at filepath
35,936
def clean_job ( logfile , outfiles , dry_run = False ) : remove_file ( logfile , dry_run ) for outfile in outfiles . values ( ) : remove_file ( outfile , dry_run )
Removes log file and files created by failed jobs .
35,937
def check_log ( logfile , exited = 'Exited with exit code' , successful = 'Successfully completed' ) : if not os . path . exists ( logfile ) : return JobStatus . ready if exited in open ( logfile ) . read ( ) : return JobStatus . failed elif successful in open ( logfile ) . read ( ) : return JobStatus . done return Job...
Check a log file to determine status of LSF job
35,938
def check_job ( cls , job_details ) : return check_log ( job_details . logfile , cls . string_exited , cls . string_successful )
Check the status of a specfic job
35,939
def dispatch_job_hook ( self , link , key , job_config , logfile , stream = sys . stdout ) : raise NotImplementedError ( "SysInterface.dispatch_job_hook" )
Hook to dispatch a single job
35,940
def dispatch_job ( self , link , key , job_archive , stream = sys . stdout ) : try : job_details = link . jobs [ key ] except KeyError : print ( key , link . jobs ) job_config = job_details . job_config link . update_args ( job_config ) logfile = job_config [ 'logfile' ] try : self . dispatch_job_hook ( link , key , jo...
Function to dispatch a single job
35,941
def submit_jobs ( self , link , job_dict = None , job_archive = None , stream = sys . stdout ) : failed = False if job_dict is None : job_dict = link . jobs for job_key , job_details in sorted ( job_dict . items ( ) ) : job_config = job_details . job_config if job_details . status == JobStatus . failed : clean_job ( jo...
Run the Link with all of the items job_dict as input .
35,942
def clean_jobs ( self , link , job_dict = None , clean_all = False ) : failed = False if job_dict is None : job_dict = link . jobs for job_details in job_dict . values ( ) : if job_details . status == JobStatus . failed or clean_all : clean_job ( job_details . logfile , { } , self . _dry_run ) job_details . status = Jo...
Clean up all the jobs associated with this link .
35,943
def get_spatial_type ( spatial_model ) : if spatial_model in [ 'SkyDirFunction' , 'PointSource' , 'Gaussian' ] : return 'SkyDirFunction' elif spatial_model in [ 'SpatialMap' ] : return 'SpatialMap' elif spatial_model in [ 'RadialGaussian' , 'RadialDisk' ] : try : import pyLikelihood if hasattr ( pyLikelihood , 'RadialG...
Translate a spatial model string to a spatial type .
35,944
def create_pars_from_dict ( name , pars_dict , rescale = True , update_bounds = False ) : o = get_function_defaults ( name ) pars_dict = pars_dict . copy ( ) for k in o . keys ( ) : if not k in pars_dict : continue v = pars_dict [ k ] if not isinstance ( v , dict ) : v = { 'name' : k , 'value' : v } o [ k ] . update ( ...
Create a dictionary for the parameters of a function .
35,945
def make_parameter_dict ( pdict , fixed_par = False , rescale = True , update_bounds = False ) : o = copy . deepcopy ( pdict ) o . setdefault ( 'scale' , 1.0 ) if rescale : value , scale = utils . scale_parameter ( o [ 'value' ] * o [ 'scale' ] ) o [ 'value' ] = np . abs ( value ) * np . sign ( o [ 'value' ] ) o [ 'sca...
Update a parameter dictionary . This function will automatically set the parameter scale and bounds if they are not defined . Bounds are also adjusted to ensure that they encompass the parameter value .
35,946
def cast_pars_dict ( pars_dict ) : o = { } for pname , pdict in pars_dict . items ( ) : o [ pname ] = { } for k , v in pdict . items ( ) : if k == 'free' : o [ pname ] [ k ] = bool ( int ( v ) ) elif k == 'name' : o [ pname ] [ k ] = v else : o [ pname ] [ k ] = float ( v ) return o
Cast the bool and float elements of a parameters dict to the appropriate python types .
35,947
def do_gather ( flist ) : hlist = [ ] nskip = 3 for fname in flist : fin = fits . open ( fname ) if len ( hlist ) == 0 : if fin [ 1 ] . name == 'SKYMAP' : nskip = 4 start = 0 else : start = nskip for h in fin [ start : ] : hlist . append ( h ) hdulistout = fits . HDUList ( hlist ) return hdulistout
Gather all the HDUs from a list of files
35,948
def main_browse ( ) : parser = argparse . ArgumentParser ( usage = "job_archive.py [options]" , description = "Browse a job archive" ) parser . add_argument ( '--jobs' , action = 'store' , dest = 'job_archive_table' , type = str , default = 'job_archive_temp2.fits' , help = "Job archive file" ) parser . add_argument ( ...
Entry point for command line use for browsing a JobArchive
35,949
def n_waiting ( self ) : return self . _counters [ JobStatus . no_job ] + self . _counters [ JobStatus . unknown ] + self . _counters [ JobStatus . not_ready ] + self . _counters [ JobStatus . ready ]
Return the number of jobs in various waiting states
35,950
def n_failed ( self ) : return self . _counters [ JobStatus . failed ] + self . _counters [ JobStatus . partial_failed ]
Return the number of failed jobs
35,951
def get_status ( self ) : if self . n_total == 0 : return JobStatus . no_job elif self . n_done == self . n_total : return JobStatus . done elif self . n_failed > 0 : if self . n_failed > self . n_total / 4. : return JobStatus . failed return JobStatus . partial_failed elif self . n_running > 0 : return JobStatus . run...
Return an overall status based on the number of jobs in various states .
35,952
def make_tables ( job_dict ) : col_dbkey = Column ( name = 'dbkey' , dtype = int ) col_jobname = Column ( name = 'jobname' , dtype = 'S64' ) col_jobkey = Column ( name = 'jobkey' , dtype = 'S64' ) col_appname = Column ( name = 'appname' , dtype = 'S64' ) col_logfile = Column ( name = 'logfile' , dtype = 'S256' ) col_jo...
Build and return an astropy . table . Table to store JobDetails
35,953
def get_file_ids ( self , file_archive , creator = None , status = FileStatus . no_file ) : file_dict = copy . deepcopy ( self . file_dict ) if self . sub_file_dict is not None : file_dict . update ( self . sub_file_dict . file_dict ) infiles = file_dict . input_files outfiles = file_dict . output_files rmfiles = file_...
Fill the file id arrays from the file lists
35,954
def get_file_paths ( self , file_archive , file_id_array ) : full_list = [ ] status_dict = { } full_list += file_archive . get_file_paths ( file_id_array [ self . infile_ids ] ) full_list += file_archive . get_file_paths ( file_id_array [ self . outfile_ids ] ) full_list += file_archive . get_file_paths ( file_id_array...
Get the full paths of the files used by this object from the the id arrays
35,955
def _fill_array_from_list ( the_list , the_array ) : for i , val in enumerate ( the_list ) : the_array [ i ] = val return the_array
Fill an array from a list
35,956
def make_dict ( cls , table ) : ret_dict = { } for row in table : job_details = cls . create_from_row ( row ) ret_dict [ job_details . dbkey ] = job_details return ret_dict
Build a dictionary map int to JobDetails from an astropy . table . Table
35,957
def check_status_logfile ( self , checker_func ) : self . status = checker_func ( self . logfile ) return self . status
Check on the status of this particular job using the logfile
35,958
def _read_table_file ( self , table_file ) : self . _table_file = table_file if os . path . exists ( self . _table_file ) : self . _table = Table . read ( self . _table_file , hdu = 'JOB_ARCHIVE' ) self . _table_ids = Table . read ( self . _table_file , hdu = 'FILE_IDS' ) else : self . _table , self . _table_ids = JobD...
Read an astropy . table . Table from table_file to set up the JobArchive
35,959
def get_details ( self , jobname , jobkey ) : fullkey = JobDetails . make_fullkey ( jobname , jobkey ) return self . _cache [ fullkey ]
Get the JobDetails associated to a particular job instance
35,960
def register_job ( self , job_details ) : try : job_details_old = self . get_details ( job_details . jobname , job_details . jobkey ) if job_details_old . status <= JobStatus . running : job_details_old . status = job_details . status job_details_old . update_table_row ( self . _table , job_details_old . dbkey - 1 ) jo...
Register a job in this JobArchive
35,961
def register_jobs ( self , job_dict ) : njobs = len ( job_dict ) sys . stdout . write ( "Registering %i total jobs: " % njobs ) for i , job_details in enumerate ( job_dict . values ( ) ) : if i % 10 == 0 : sys . stdout . write ( '.' ) sys . stdout . flush ( ) self . register_job ( job_details ) sys . stdout . write ( '...
Register a bunch of jobs in this archive
35,962
def register_job_from_link ( self , link , key , ** kwargs ) : job_config = kwargs . get ( 'job_config' , None ) if job_config is None : job_config = link . args status = kwargs . get ( 'status' , JobStatus . unknown ) job_details = JobDetails ( jobname = link . linkname , jobkey = key , appname = link . appname , logf...
Register a job in the JobArchive from a Link object
35,963
def update_job ( self , job_details ) : other = self . get_details ( job_details . jobname , job_details . jobkey ) other . timestamp = job_details . timestamp other . status = job_details . status other . update_table_row ( self . _table , other . dbkey - 1 ) return other
Update a job in the JobArchive
35,964
def remove_jobs ( self , mask ) : jobnames = self . table [ mask ] [ 'jobname' ] jobkey = self . table [ mask ] [ 'jobkey' ] self . table [ mask ] [ 'status' ] = JobStatus . removed for jobname , jobkey in zip ( jobnames , jobkey ) : fullkey = JobDetails . make_fullkey ( jobname , jobkey ) self . _cache . pop ( fullkey...
Mark all jobs that match a mask as removed
35,965
def build_temp_job_archive ( cls ) : try : os . unlink ( 'job_archive_temp.fits' ) os . unlink ( 'file_archive_temp.fits' ) except OSError : pass cls . _archive = cls ( job_archive_table = 'job_archive_temp.fits' , file_archive_table = 'file_archive_temp.fits' , base_path = os . path . abspath ( '.' ) + '/' ) return cl...
Build and return a JobArchive using defualt locations of persistent files .
35,966
def update_job_status ( self , checker_func ) : njobs = len ( self . cache . keys ( ) ) status_vect = np . zeros ( ( 8 ) , int ) sys . stdout . write ( "Updating status of %i jobs: " % njobs ) sys . stdout . flush ( ) for i , key in enumerate ( self . cache . keys ( ) ) : if i % 200 == 0 : sys . stdout . write ( '.' ) ...
Update the status of all the jobs in the archive
35,967
def build_archive ( cls , ** kwargs ) : if cls . _archive is None : cls . _archive = cls ( ** kwargs ) return cls . _archive
Return the singleton JobArchive instance building it if needed
35,968
def elapsed_time ( self ) : if self . _t0 is not None : return self . _time + self . _get_time ( ) else : return self . _time
Get the elapsed time .
35,969
def make_spatialmap_source ( name , Spatial_Filename , spectrum ) : data = dict ( Spatial_Filename = Spatial_Filename , ra = 0.0 , dec = 0.0 , SpatialType = 'SpatialMap' , Source_Name = name ) if spectrum is not None : data . update ( spectrum ) return roi_model . Source ( name , data )
Construct and return a fermipy . roi_model . Source object
35,970
def make_mapcube_source ( name , Spatial_Filename , spectrum ) : data = dict ( Spatial_Filename = Spatial_Filename ) if spectrum is not None : data . update ( spectrum ) return roi_model . MapCubeSource ( name , data )
Construct and return a fermipy . roi_model . MapCubeSource object
35,971
def make_isotropic_source ( name , Spectrum_Filename , spectrum ) : data = dict ( Spectrum_Filename = Spectrum_Filename ) if spectrum is not None : data . update ( spectrum ) return roi_model . IsoSource ( name , data )
Construct and return a fermipy . roi_model . IsoSource object
35,972
def make_composite_source ( name , spectrum ) : data = dict ( SpatialType = 'CompositeSource' , SpatialModel = 'CompositeSource' , SourceType = 'CompositeSource' ) if spectrum is not None : data . update ( spectrum ) return roi_model . CompositeSource ( name , data )
Construct and return a fermipy . roi_model . CompositeSource object
35,973
def make_catalog_sources ( catalog_roi_model , source_names ) : sources = { } for source_name in source_names : sources [ source_name ] = catalog_roi_model [ source_name ] return sources
Construct and return dictionary of sources that are a subset of sources in catalog_roi_model .
35,974
def make_sources ( comp_key , comp_dict ) : srcdict = OrderedDict ( ) try : comp_info = comp_dict . info except AttributeError : comp_info = comp_dict try : spectrum = comp_dict . spectrum except AttributeError : spectrum = None model_type = comp_info . model_type if model_type == 'PointSource' : srcdict [ comp_key ] =...
Make dictionary mapping component keys to a source or set of sources
35,975
def add_sources ( self , source_info_dict ) : self . _source_info_dict . update ( source_info_dict ) for key , value in source_info_dict . items ( ) : self . _sources . update ( make_sources ( key , value ) )
Add all of the sources in source_info_dict to this factory
35,976
def build_catalog ( ** kwargs ) : catalog_type = kwargs . get ( 'catalog_type' ) catalog_file = kwargs . get ( 'catalog_file' ) catalog_extdir = kwargs . get ( 'catalog_extdir' ) if catalog_type == '2FHL' : return catalog . Catalog2FHL ( fitsfile = catalog_file , extdir = catalog_extdir ) elif catalog_type == '3FGL' : ...
Build a fermipy . catalog . Catalog object
35,977
def make_fermipy_roi_model_from_catalogs ( cataloglist ) : data = dict ( catalogs = cataloglist , src_roiwidth = 360. ) return roi_model . ROIModel ( data , skydir = SkyCoord ( 0.0 , 0.0 , unit = 'deg' ) )
Build and return a fermipy . roi_model . ROIModel object from a list of fermipy . catalog . Catalog objects
35,978
def make_roi ( cls , sources = None ) : if sources is None : sources = { } src_fact = cls ( ) src_fact . add_sources ( sources ) ret_model = roi_model . ROIModel ( { } , skydir = SkyCoord ( 0.0 , 0.0 , unit = 'deg' ) ) for source in src_fact . sources . values ( ) : ret_model . load_source ( source , build_index = Fals...
Build and return a fermipy . roi_model . ROIModel object from a dict with information about the sources
35,979
def copy_selected_sources ( cls , roi , source_names ) : roi_new = cls . make_roi ( ) for source_name in source_names : try : src_cp = roi . copy_source ( source_name ) except Exception : continue roi_new . load_source ( src_cp , build_index = False ) return roi_new
Build and return a fermipy . roi_model . ROIModel object by copying selected sources from another such object
35,980
def build_from_yamlfile ( yamlfile ) : d = yaml . load ( open ( yamlfile ) ) return MktimeFilterDict ( d [ 'aliases' ] , d [ 'selections' ] )
Build a list of components from a yaml file
35,981
def collect_jobs ( dirs , runscript , overwrite = False , max_job_age = 90 ) : jobs = [ ] for dirname in sorted ( dirs ) : o = dict ( cfgfile = os . path . join ( dirname , 'config.yaml' ) , logfile = os . path . join ( dirname , os . path . splitext ( runscript ) [ 0 ] + '.log' ) , runscript = os . path . join ( dirna...
Construct a list of job dictionaries .
35,982
def delete_source_map ( srcmap_file , names , logger = None ) : with fits . open ( srcmap_file ) as hdulist : hdunames = [ hdu . name . upper ( ) for hdu in hdulist ] if not isinstance ( names , list ) : names = [ names ] for name in names : if not name . upper ( ) in hdunames : continue del hdulist [ name . upper ( ) ...
Delete a map from a binned analysis source map file if it exists .
35,983
def get_offsets ( self , pix ) : idx = [ ] for i in range ( self . ndim ) : if i == 0 : idx += [ 0 ] else : npix1 = int ( self . shape [ i ] ) pix0 = int ( pix [ i - 1 ] ) - npix1 // 2 idx += [ pix0 ] return idx
Get offset of the first pixel in each dimension in the global coordinate system .
35,984
def shift_to_coords ( self , pix , fill_value = np . nan ) : pix_offset = self . get_offsets ( pix ) dpix = np . zeros ( len ( self . shape ) - 1 ) for i in range ( len ( self . shape ) - 1 ) : x = self . rebin * ( pix [ i ] - pix_offset [ i + 1 ] ) + ( self . rebin - 1.0 ) / 2. dpix [ i ] = x - self . _pix_ref [ i ] p...
Create a new map that is shifted to the pixel coordinates pix .
35,985
def create_map ( self , pix ) : k0 = self . _m0 . shift_to_coords ( pix ) k1 = self . _m1 . shift_to_coords ( pix ) k0 [ np . isfinite ( k1 ) ] = k1 [ np . isfinite ( k1 ) ] k0 [ ~ np . isfinite ( k0 ) ] = 0 return k0
Create a new map with reference pixel coordinates shifted to the pixel coordinates pix .
35,986
def render_pep440 ( vcs ) : if vcs is None : return None tags = vcs . split ( '-' ) if len ( tags ) == 1 : return tags [ 0 ] else : return tags [ 0 ] + '+' + '.' . join ( tags [ 1 : ] )
Convert git release tag into a form that is PEP440 compliant .
35,987
def read_release_version ( ) : import re dirname = os . path . abspath ( os . path . dirname ( __file__ ) ) try : f = open ( os . path . join ( dirname , "_version.py" ) , "rt" ) for line in f . readlines ( ) : m = re . match ( "__version__ = '([^']+)'" , line ) if m : ver = m . group ( 1 ) return ver except : return N...
Read the release version from _version . py .
35,988
def write_release_version ( version ) : dirname = os . path . abspath ( os . path . dirname ( __file__ ) ) f = open ( os . path . join ( dirname , "_version.py" ) , "wt" ) f . write ( "__version__ = '%s'\n" % version ) f . close ( )
Write the release version to _version . py .
35,989
def make_full_path ( basedir , outkey , origname ) : return os . path . join ( basedir , outkey , os . path . basename ( origname ) . replace ( '.fits' , '_%s.fits' % outkey ) )
Make a full file path by combining tokens
35,990
def init_matplotlib_backend ( backend = None ) : import matplotlib try : os . environ [ 'DISPLAY' ] except KeyError : matplotlib . use ( 'Agg' ) else : if backend is not None : matplotlib . use ( backend )
This function initializes the matplotlib backend . When no DISPLAY is available the backend is automatically set to Agg .
35,991
def load_data ( infile , workdir = None ) : infile = resolve_path ( infile , workdir = workdir ) infile , ext = os . path . splitext ( infile ) if os . path . isfile ( infile + '.npy' ) : infile += '.npy' elif os . path . isfile ( infile + '.yaml' ) : infile += '.yaml' else : raise Exception ( 'Input file does not exis...
Load python data structure from either a YAML or numpy file .
35,992
def resolve_file_path_list ( pathlist , workdir , prefix = '' , randomize = False ) : files = [ ] with open ( pathlist , 'r' ) as f : files = [ line . strip ( ) for line in f ] newfiles = [ ] for f in files : f = os . path . expandvars ( f ) if os . path . isfile ( f ) : newfiles += [ f ] else : newfiles += [ os . path...
Resolve the path of each file name in the file pathlist and write the updated paths to a new file .
35,993
def collect_dirs ( path , max_depth = 1 , followlinks = True ) : if not os . path . isdir ( path ) : return [ ] o = [ path ] if max_depth == 0 : return o for subdir in os . listdir ( path ) : subdir = os . path . join ( path , subdir ) if not os . path . isdir ( subdir ) : continue o += [ subdir ] if os . path . islink...
Recursively find directories under the given path .
35,994
def match_regex_list ( patterns , string ) : for p in patterns : if re . findall ( p , string ) : return True return False
Perform a regex match of a string against a list of patterns . Returns true if the string matches at least one pattern in the list .
35,995
def find_rows_by_string ( tab , names , colnames = [ 'assoc' ] ) : mask = np . empty ( len ( tab ) , dtype = bool ) mask . fill ( False ) names = [ name . lower ( ) . replace ( ' ' , '' ) for name in names ] for colname in colnames : if colname not in tab . columns : continue col = tab [ [ colname ] ] . copy ( ) col [ ...
Find the rows in a table tab that match at least one of the strings in names . This method ignores whitespace and case when matching strings .
35,996
def separation_cos_angle ( lon0 , lat0 , lon1 , lat1 ) : return ( np . sin ( lat1 ) * np . sin ( lat0 ) + np . cos ( lat1 ) * np . cos ( lat0 ) * np . cos ( lon1 - lon0 ) )
Evaluate the cosine of the angular separation between two direction vectors .
35,997
def angle_to_cartesian ( lon , lat ) : theta = np . array ( np . pi / 2. - lat ) return np . vstack ( ( np . sin ( theta ) * np . cos ( lon ) , np . sin ( theta ) * np . sin ( lon ) , np . cos ( theta ) ) ) . T
Convert spherical coordinates to cartesian unit vectors .
35,998
def cov_to_correlation ( cov ) : err = np . sqrt ( np . diag ( cov ) ) errinv = np . ones_like ( err ) * np . nan m = np . isfinite ( err ) & ( err != 0 ) errinv [ m ] = 1. / err [ m ] corr = np . array ( cov ) return corr * np . outer ( errinv , errinv )
Compute the correlation matrix given the covariance matrix .
35,999
def ellipse_to_cov ( sigma_maj , sigma_min , theta ) : cth = np . cos ( theta ) sth = np . sin ( theta ) covxx = cth ** 2 * sigma_maj ** 2 + sth ** 2 * sigma_min ** 2 covyy = sth ** 2 * sigma_maj ** 2 + cth ** 2 * sigma_min ** 2 covxy = cth * sth * sigma_maj ** 2 - cth * sth * sigma_min ** 2 return np . array ( [ [ cov...
Compute the covariance matrix in two variables x and y given the std . deviation along the semi - major and semi - minor axes and the rotation angle of the error ellipse .