repo
stringlengths
7
55
path
stringlengths
4
127
func_name
stringlengths
1
88
original_string
stringlengths
75
19.8k
language
stringclasses
1 value
code
stringlengths
75
19.8k
code_tokens
listlengths
20
707
docstring
stringlengths
3
17.3k
docstring_tokens
listlengths
3
222
sha
stringlengths
40
40
url
stringlengths
87
242
partition
stringclasses
1 value
idx
int64
0
252k
fermiPy/fermipy
fermipy/utils.py
onesided_cl_to_dlnl
def onesided_cl_to_dlnl(cl): """Compute the delta-loglikehood values that corresponds to an upper limit of the given confidence level. Parameters ---------- cl : float Confidence level. Returns ------- dlnl : float Delta-loglikelihood value with respect to the maximum of the likelihood function. """ alpha = 1.0 - cl return 0.5 * np.power(np.sqrt(2.) * special.erfinv(1 - 2 * alpha), 2.)
python
def onesided_cl_to_dlnl(cl): """Compute the delta-loglikehood values that corresponds to an upper limit of the given confidence level. Parameters ---------- cl : float Confidence level. Returns ------- dlnl : float Delta-loglikelihood value with respect to the maximum of the likelihood function. """ alpha = 1.0 - cl return 0.5 * np.power(np.sqrt(2.) * special.erfinv(1 - 2 * alpha), 2.)
[ "def", "onesided_cl_to_dlnl", "(", "cl", ")", ":", "alpha", "=", "1.0", "-", "cl", "return", "0.5", "*", "np", ".", "power", "(", "np", ".", "sqrt", "(", "2.", ")", "*", "special", ".", "erfinv", "(", "1", "-", "2", "*", "alpha", ")", ",", "2."...
Compute the delta-loglikehood values that corresponds to an upper limit of the given confidence level. Parameters ---------- cl : float Confidence level. Returns ------- dlnl : float Delta-loglikelihood value with respect to the maximum of the likelihood function.
[ "Compute", "the", "delta", "-", "loglikehood", "values", "that", "corresponds", "to", "an", "upper", "limit", "of", "the", "given", "confidence", "level", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L576-L592
train
36,000
fermiPy/fermipy
fermipy/utils.py
split_bin_edges
def split_bin_edges(edges, npts=2): """Subdivide an array of bins by splitting each bin into ``npts`` subintervals. Parameters ---------- edges : `~numpy.ndarray` Bin edge array. npts : int Number of intervals into which each bin will be subdivided. Returns ------- edges : `~numpy.ndarray` Subdivided bin edge array. """ if npts < 2: return edges x = (edges[:-1, None] + (edges[1:, None] - edges[:-1, None]) * np.linspace(0.0, 1.0, npts + 1)[None, :]) return np.unique(np.ravel(x))
python
def split_bin_edges(edges, npts=2): """Subdivide an array of bins by splitting each bin into ``npts`` subintervals. Parameters ---------- edges : `~numpy.ndarray` Bin edge array. npts : int Number of intervals into which each bin will be subdivided. Returns ------- edges : `~numpy.ndarray` Subdivided bin edge array. """ if npts < 2: return edges x = (edges[:-1, None] + (edges[1:, None] - edges[:-1, None]) * np.linspace(0.0, 1.0, npts + 1)[None, :]) return np.unique(np.ravel(x))
[ "def", "split_bin_edges", "(", "edges", ",", "npts", "=", "2", ")", ":", "if", "npts", "<", "2", ":", "return", "edges", "x", "=", "(", "edges", "[", ":", "-", "1", ",", "None", "]", "+", "(", "edges", "[", "1", ":", ",", "None", "]", "-", ...
Subdivide an array of bins by splitting each bin into ``npts`` subintervals. Parameters ---------- edges : `~numpy.ndarray` Bin edge array. npts : int Number of intervals into which each bin will be subdivided. Returns ------- edges : `~numpy.ndarray` Subdivided bin edge array.
[ "Subdivide", "an", "array", "of", "bins", "by", "splitting", "each", "bin", "into", "npts", "subintervals", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1010-L1034
train
36,001
fermiPy/fermipy
fermipy/utils.py
extend_array
def extend_array(edges, binsz, lo, hi): """Extend an array to encompass lo and hi values.""" numlo = int(np.ceil((edges[0] - lo) / binsz)) numhi = int(np.ceil((hi - edges[-1]) / binsz)) edges = copy.deepcopy(edges) if numlo > 0: edges_lo = np.linspace(edges[0] - numlo * binsz, edges[0], numlo + 1) edges = np.concatenate((edges_lo[:-1], edges)) if numhi > 0: edges_hi = np.linspace(edges[-1], edges[-1] + numhi * binsz, numhi + 1) edges = np.concatenate((edges, edges_hi[1:])) return edges
python
def extend_array(edges, binsz, lo, hi): """Extend an array to encompass lo and hi values.""" numlo = int(np.ceil((edges[0] - lo) / binsz)) numhi = int(np.ceil((hi - edges[-1]) / binsz)) edges = copy.deepcopy(edges) if numlo > 0: edges_lo = np.linspace(edges[0] - numlo * binsz, edges[0], numlo + 1) edges = np.concatenate((edges_lo[:-1], edges)) if numhi > 0: edges_hi = np.linspace(edges[-1], edges[-1] + numhi * binsz, numhi + 1) edges = np.concatenate((edges, edges_hi[1:])) return edges
[ "def", "extend_array", "(", "edges", ",", "binsz", ",", "lo", ",", "hi", ")", ":", "numlo", "=", "int", "(", "np", ".", "ceil", "(", "(", "edges", "[", "0", "]", "-", "lo", ")", "/", "binsz", ")", ")", "numhi", "=", "int", "(", "np", ".", "...
Extend an array to encompass lo and hi values.
[ "Extend", "an", "array", "to", "encompass", "lo", "and", "hi", "values", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1087-L1102
train
36,002
fermiPy/fermipy
fermipy/utils.py
fits_recarray_to_dict
def fits_recarray_to_dict(table): """Convert a FITS recarray to a python dictionary.""" cols = {} for icol, col in enumerate(table.columns.names): col_data = table.data[col] if type(col_data[0]) == np.float32: cols[col] = np.array(col_data, dtype=float) elif type(col_data[0]) == np.float64: cols[col] = np.array(col_data, dtype=float) elif type(col_data[0]) == str: cols[col] = np.array(col_data, dtype=str) elif type(col_data[0]) == np.string_: cols[col] = np.array(col_data, dtype=str) elif type(col_data[0]) == np.int16: cols[col] = np.array(col_data, dtype=int) elif type(col_data[0]) == np.ndarray: cols[col] = np.array(col_data) else: raise Exception( 'Unrecognized column type: %s %s' % (col, str(type(col_data)))) return cols
python
def fits_recarray_to_dict(table): """Convert a FITS recarray to a python dictionary.""" cols = {} for icol, col in enumerate(table.columns.names): col_data = table.data[col] if type(col_data[0]) == np.float32: cols[col] = np.array(col_data, dtype=float) elif type(col_data[0]) == np.float64: cols[col] = np.array(col_data, dtype=float) elif type(col_data[0]) == str: cols[col] = np.array(col_data, dtype=str) elif type(col_data[0]) == np.string_: cols[col] = np.array(col_data, dtype=str) elif type(col_data[0]) == np.int16: cols[col] = np.array(col_data, dtype=int) elif type(col_data[0]) == np.ndarray: cols[col] = np.array(col_data) else: raise Exception( 'Unrecognized column type: %s %s' % (col, str(type(col_data)))) return cols
[ "def", "fits_recarray_to_dict", "(", "table", ")", ":", "cols", "=", "{", "}", "for", "icol", ",", "col", "in", "enumerate", "(", "table", ".", "columns", ".", "names", ")", ":", "col_data", "=", "table", ".", "data", "[", "col", "]", "if", "type", ...
Convert a FITS recarray to a python dictionary.
[ "Convert", "a", "FITS", "recarray", "to", "a", "python", "dictionary", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1111-L1134
train
36,003
fermiPy/fermipy
fermipy/utils.py
merge_dict
def merge_dict(d0, d1, add_new_keys=False, append_arrays=False): """Recursively merge the contents of python dictionary d0 with the contents of another python dictionary, d1. Parameters ---------- d0 : dict The input dictionary. d1 : dict Dictionary to be merged with the input dictionary. add_new_keys : str Do not skip keys that only exist in d1. append_arrays : bool If an element is a numpy array set the value of that element by concatenating the two arrays. """ if d1 is None: return d0 elif d0 is None: return d1 elif d0 is None and d1 is None: return {} od = {} for k, v in d0.items(): t0 = None t1 = None if k in d0: t0 = type(d0[k]) if k in d1: t1 = type(d1[k]) if k not in d1: od[k] = copy.deepcopy(d0[k]) elif isinstance(v, dict) and isinstance(d1[k], dict): od[k] = merge_dict(d0[k], d1[k], add_new_keys, append_arrays) elif isinstance(v, list) and isstr(d1[k]): od[k] = d1[k].split(',') elif isinstance(v, dict) and d1[k] is None: od[k] = copy.deepcopy(d0[k]) elif isinstance(v, np.ndarray) and append_arrays: od[k] = np.concatenate((v, d1[k])) elif (d0[k] is not None and d1[k] is not None) and t0 != t1: if t0 == dict or t0 == list: raise Exception('Conflicting types in dictionary merge for ' 'key %s %s %s' % (k, t0, t1)) od[k] = t0(d1[k]) else: od[k] = copy.copy(d1[k]) if add_new_keys: for k, v in d1.items(): if k not in d0: od[k] = copy.deepcopy(d1[k]) return od
python
def merge_dict(d0, d1, add_new_keys=False, append_arrays=False): """Recursively merge the contents of python dictionary d0 with the contents of another python dictionary, d1. Parameters ---------- d0 : dict The input dictionary. d1 : dict Dictionary to be merged with the input dictionary. add_new_keys : str Do not skip keys that only exist in d1. append_arrays : bool If an element is a numpy array set the value of that element by concatenating the two arrays. """ if d1 is None: return d0 elif d0 is None: return d1 elif d0 is None and d1 is None: return {} od = {} for k, v in d0.items(): t0 = None t1 = None if k in d0: t0 = type(d0[k]) if k in d1: t1 = type(d1[k]) if k not in d1: od[k] = copy.deepcopy(d0[k]) elif isinstance(v, dict) and isinstance(d1[k], dict): od[k] = merge_dict(d0[k], d1[k], add_new_keys, append_arrays) elif isinstance(v, list) and isstr(d1[k]): od[k] = d1[k].split(',') elif isinstance(v, dict) and d1[k] is None: od[k] = copy.deepcopy(d0[k]) elif isinstance(v, np.ndarray) and append_arrays: od[k] = np.concatenate((v, d1[k])) elif (d0[k] is not None and d1[k] is not None) and t0 != t1: if t0 == dict or t0 == list: raise Exception('Conflicting types in dictionary merge for ' 'key %s %s %s' % (k, t0, t1)) od[k] = t0(d1[k]) else: od[k] = copy.copy(d1[k]) if add_new_keys: for k, v in d1.items(): if k not in d0: od[k] = copy.deepcopy(d1[k]) return od
[ "def", "merge_dict", "(", "d0", ",", "d1", ",", "add_new_keys", "=", "False", ",", "append_arrays", "=", "False", ")", ":", "if", "d1", "is", "None", ":", "return", "d0", "elif", "d0", "is", "None", ":", "return", "d1", "elif", "d0", "is", "None", ...
Recursively merge the contents of python dictionary d0 with the contents of another python dictionary, d1. Parameters ---------- d0 : dict The input dictionary. d1 : dict Dictionary to be merged with the input dictionary. add_new_keys : str Do not skip keys that only exist in d1. append_arrays : bool If an element is a numpy array set the value of that element by concatenating the two arrays.
[ "Recursively", "merge", "the", "contents", "of", "python", "dictionary", "d0", "with", "the", "contents", "of", "another", "python", "dictionary", "d1", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1239-L1302
train
36,004
fermiPy/fermipy
fermipy/utils.py
tolist
def tolist(x): """ convenience function that takes in a nested structure of lists and dictionaries and converts everything to its base objects. This is useful for dupming a file to yaml. (a) numpy arrays into python lists >>> type(tolist(np.asarray(123))) == int True >>> tolist(np.asarray([1,2,3])) == [1,2,3] True (b) numpy strings into python strings. >>> tolist([np.asarray('cat')])==['cat'] True (c) an ordered dict to a dict >>> ordered=OrderedDict(a=1, b=2) >>> type(tolist(ordered)) == dict True (d) converts unicode to regular strings >>> type(u'a') == str False >>> type(tolist(u'a')) == str True (e) converts numbers & bools in strings to real represntation, (i.e. '123' -> 123) >>> type(tolist(np.asarray('123'))) == int True >>> type(tolist('123')) == int True >>> tolist('False') == False True """ if isinstance(x, list): return map(tolist, x) elif isinstance(x, dict): return dict((tolist(k), tolist(v)) for k, v in x.items()) elif isinstance(x, np.ndarray) or isinstance(x, np.number): # note, call tolist again to convert strings of numbers to numbers return tolist(x.tolist()) elif isinstance(x, OrderedDict): return dict(x) elif isinstance(x, np.bool_): return bool(x) elif isstr(x) or isinstance(x, np.str): x = str(x) # convert unicode & numpy strings try: return int(x) except: try: return float(x) except: if x == 'True': return True elif x == 'False': return False else: return x else: return x
python
def tolist(x): """ convenience function that takes in a nested structure of lists and dictionaries and converts everything to its base objects. This is useful for dupming a file to yaml. (a) numpy arrays into python lists >>> type(tolist(np.asarray(123))) == int True >>> tolist(np.asarray([1,2,3])) == [1,2,3] True (b) numpy strings into python strings. >>> tolist([np.asarray('cat')])==['cat'] True (c) an ordered dict to a dict >>> ordered=OrderedDict(a=1, b=2) >>> type(tolist(ordered)) == dict True (d) converts unicode to regular strings >>> type(u'a') == str False >>> type(tolist(u'a')) == str True (e) converts numbers & bools in strings to real represntation, (i.e. '123' -> 123) >>> type(tolist(np.asarray('123'))) == int True >>> type(tolist('123')) == int True >>> tolist('False') == False True """ if isinstance(x, list): return map(tolist, x) elif isinstance(x, dict): return dict((tolist(k), tolist(v)) for k, v in x.items()) elif isinstance(x, np.ndarray) or isinstance(x, np.number): # note, call tolist again to convert strings of numbers to numbers return tolist(x.tolist()) elif isinstance(x, OrderedDict): return dict(x) elif isinstance(x, np.bool_): return bool(x) elif isstr(x) or isinstance(x, np.str): x = str(x) # convert unicode & numpy strings try: return int(x) except: try: return float(x) except: if x == 'True': return True elif x == 'False': return False else: return x else: return x
[ "def", "tolist", "(", "x", ")", ":", "if", "isinstance", "(", "x", ",", "list", ")", ":", "return", "map", "(", "tolist", ",", "x", ")", "elif", "isinstance", "(", "x", ",", "dict", ")", ":", "return", "dict", "(", "(", "tolist", "(", "k", ")",...
convenience function that takes in a nested structure of lists and dictionaries and converts everything to its base objects. This is useful for dupming a file to yaml. (a) numpy arrays into python lists >>> type(tolist(np.asarray(123))) == int True >>> tolist(np.asarray([1,2,3])) == [1,2,3] True (b) numpy strings into python strings. >>> tolist([np.asarray('cat')])==['cat'] True (c) an ordered dict to a dict >>> ordered=OrderedDict(a=1, b=2) >>> type(tolist(ordered)) == dict True (d) converts unicode to regular strings >>> type(u'a') == str False >>> type(tolist(u'a')) == str True (e) converts numbers & bools in strings to real represntation, (i.e. '123' -> 123) >>> type(tolist(np.asarray('123'))) == int True >>> type(tolist('123')) == int True >>> tolist('False') == False True
[ "convenience", "function", "that", "takes", "in", "a", "nested", "structure", "of", "lists", "and", "dictionaries", "and", "converts", "everything", "to", "its", "base", "objects", ".", "This", "is", "useful", "for", "dupming", "a", "file", "to", "yaml", "."...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1316-L1383
train
36,005
fermiPy/fermipy
fermipy/utils.py
make_gaussian_kernel
def make_gaussian_kernel(sigma, npix=501, cdelt=0.01, xpix=None, ypix=None): """Make kernel for a 2D gaussian. Parameters ---------- sigma : float Standard deviation in degrees. """ sigma /= cdelt def fn(t, s): return 1. / (2 * np.pi * s ** 2) * np.exp( -t ** 2 / (s ** 2 * 2.0)) dxy = make_pixel_distance(npix, xpix, ypix) k = fn(dxy, sigma) k /= (np.sum(k) * np.radians(cdelt) ** 2) return k
python
def make_gaussian_kernel(sigma, npix=501, cdelt=0.01, xpix=None, ypix=None): """Make kernel for a 2D gaussian. Parameters ---------- sigma : float Standard deviation in degrees. """ sigma /= cdelt def fn(t, s): return 1. / (2 * np.pi * s ** 2) * np.exp( -t ** 2 / (s ** 2 * 2.0)) dxy = make_pixel_distance(npix, xpix, ypix) k = fn(dxy, sigma) k /= (np.sum(k) * np.radians(cdelt) ** 2) return k
[ "def", "make_gaussian_kernel", "(", "sigma", ",", "npix", "=", "501", ",", "cdelt", "=", "0.01", ",", "xpix", "=", "None", ",", "ypix", "=", "None", ")", ":", "sigma", "/=", "cdelt", "def", "fn", "(", "t", ",", "s", ")", ":", "return", "1.", "/",...
Make kernel for a 2D gaussian. Parameters ---------- sigma : float Standard deviation in degrees.
[ "Make", "kernel", "for", "a", "2D", "gaussian", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1542-L1560
train
36,006
fermiPy/fermipy
fermipy/utils.py
make_disk_kernel
def make_disk_kernel(radius, npix=501, cdelt=0.01, xpix=None, ypix=None): """Make kernel for a 2D disk. Parameters ---------- radius : float Disk radius in deg. """ radius /= cdelt def fn(t, s): return 0.5 * (np.sign(s - t) + 1.0) dxy = make_pixel_distance(npix, xpix, ypix) k = fn(dxy, radius) k /= (np.sum(k) * np.radians(cdelt) ** 2) return k
python
def make_disk_kernel(radius, npix=501, cdelt=0.01, xpix=None, ypix=None): """Make kernel for a 2D disk. Parameters ---------- radius : float Disk radius in deg. """ radius /= cdelt def fn(t, s): return 0.5 * (np.sign(s - t) + 1.0) dxy = make_pixel_distance(npix, xpix, ypix) k = fn(dxy, radius) k /= (np.sum(k) * np.radians(cdelt) ** 2) return k
[ "def", "make_disk_kernel", "(", "radius", ",", "npix", "=", "501", ",", "cdelt", "=", "0.01", ",", "xpix", "=", "None", ",", "ypix", "=", "None", ")", ":", "radius", "/=", "cdelt", "def", "fn", "(", "t", ",", "s", ")", ":", "return", "0.5", "*", ...
Make kernel for a 2D disk. Parameters ---------- radius : float Disk radius in deg.
[ "Make", "kernel", "for", "a", "2D", "disk", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1563-L1581
train
36,007
fermiPy/fermipy
fermipy/utils.py
make_cdisk_kernel
def make_cdisk_kernel(psf, sigma, npix, cdelt, xpix, ypix, psf_scale_fn=None, normalize=False): """Make a kernel for a PSF-convolved 2D disk. Parameters ---------- psf : `~fermipy.irfs.PSFModel` sigma : float 68% containment radius in degrees. """ sigma /= 0.8246211251235321 dtheta = psf.dtheta egy = psf.energies x = make_pixel_distance(npix, xpix, ypix) x *= cdelt k = np.zeros((len(egy), npix, npix)) for i in range(len(egy)): def fn(t): return psf.eval(i, t, scale_fn=psf_scale_fn) psfc = convolve2d_disk(fn, dtheta, sigma) k[i] = np.interp(np.ravel(x), dtheta, psfc).reshape(x.shape) if normalize: k /= (np.sum(k, axis=0)[np.newaxis, ...] * np.radians(cdelt) ** 2) return k
python
def make_cdisk_kernel(psf, sigma, npix, cdelt, xpix, ypix, psf_scale_fn=None, normalize=False): """Make a kernel for a PSF-convolved 2D disk. Parameters ---------- psf : `~fermipy.irfs.PSFModel` sigma : float 68% containment radius in degrees. """ sigma /= 0.8246211251235321 dtheta = psf.dtheta egy = psf.energies x = make_pixel_distance(npix, xpix, ypix) x *= cdelt k = np.zeros((len(egy), npix, npix)) for i in range(len(egy)): def fn(t): return psf.eval(i, t, scale_fn=psf_scale_fn) psfc = convolve2d_disk(fn, dtheta, sigma) k[i] = np.interp(np.ravel(x), dtheta, psfc).reshape(x.shape) if normalize: k /= (np.sum(k, axis=0)[np.newaxis, ...] * np.radians(cdelt) ** 2) return k
[ "def", "make_cdisk_kernel", "(", "psf", ",", "sigma", ",", "npix", ",", "cdelt", ",", "xpix", ",", "ypix", ",", "psf_scale_fn", "=", "None", ",", "normalize", "=", "False", ")", ":", "sigma", "/=", "0.8246211251235321", "dtheta", "=", "psf", ".", "dtheta...
Make a kernel for a PSF-convolved 2D disk. Parameters ---------- psf : `~fermipy.irfs.PSFModel` sigma : float 68% containment radius in degrees.
[ "Make", "a", "kernel", "for", "a", "PSF", "-", "convolved", "2D", "disk", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1584-L1614
train
36,008
fermiPy/fermipy
fermipy/utils.py
make_radial_kernel
def make_radial_kernel(psf, fn, sigma, npix, cdelt, xpix, ypix, psf_scale_fn=None, normalize=False, klims=None, sparse=False): """Make a kernel for a general radially symmetric 2D function. Parameters ---------- psf : `~fermipy.irfs.PSFModel` fn : callable Function that evaluates the kernel at a radial coordinate r. sigma : float 68% containment radius in degrees. """ if klims is None: egy = psf.energies else: egy = psf.energies[klims[0]:klims[1] + 1] ang_dist = make_pixel_distance(npix, xpix, ypix) * cdelt max_ang_dist = np.max(ang_dist) + cdelt #dtheta = np.linspace(0.0, (np.max(ang_dist) * 1.05)**0.5, 200)**2.0 # z = create_kernel_function_lookup(psf, fn, sigma, egy, # dtheta, psf_scale_fn) shape = (len(egy), npix, npix) k = np.zeros(shape) r99 = psf.containment_angle(energies=egy, fraction=0.997) r34 = psf.containment_angle(energies=egy, fraction=0.34) rmin = np.maximum(r34 / 4., 0.01) rmax = np.maximum(r99, 0.1) if sigma is not None: rmin = np.maximum(rmin, 0.5 * sigma) rmax = np.maximum(rmax, 2.0 * r34 + 3.0 * sigma) rmax = np.minimum(rmax, max_ang_dist) for i in range(len(egy)): rebin = min(int(np.ceil(cdelt / rmin[i])), 8) if sparse: dtheta = np.linspace(0.0, rmax[i]**0.5, 100)**2.0 else: dtheta = np.linspace(0.0, max_ang_dist**0.5, 200)**2.0 z = eval_radial_kernel(psf, fn, sigma, i, dtheta, psf_scale_fn) xdist = make_pixel_distance(npix * rebin, xpix * rebin + (rebin - 1.0) / 2., ypix * rebin + (rebin - 1.0) / 2.) xdist *= cdelt / float(rebin) #x = val_to_pix(dtheta, np.ravel(xdist)) if sparse: m = np.ravel(xdist) < rmax[i] kk = np.zeros(xdist.size) #kk[m] = map_coordinates(z, [x[m]], order=2, prefilter=False) kk[m] = np.interp(np.ravel(xdist)[m], dtheta, z) kk = kk.reshape(xdist.shape) else: kk = np.interp(np.ravel(xdist), dtheta, z).reshape(xdist.shape) # kk = map_coordinates(z, [x], order=2, # prefilter=False).reshape(xdist.shape) if rebin > 1: kk = sum_bins(kk, 0, rebin) kk = sum_bins(kk, 1, rebin) k[i] = kk / float(rebin)**2 k = k.reshape((len(egy),) + ang_dist.shape) if normalize: k /= (np.sum(k, axis=0)[np.newaxis, ...] * np.radians(cdelt) ** 2) return k
python
def make_radial_kernel(psf, fn, sigma, npix, cdelt, xpix, ypix, psf_scale_fn=None, normalize=False, klims=None, sparse=False): """Make a kernel for a general radially symmetric 2D function. Parameters ---------- psf : `~fermipy.irfs.PSFModel` fn : callable Function that evaluates the kernel at a radial coordinate r. sigma : float 68% containment radius in degrees. """ if klims is None: egy = psf.energies else: egy = psf.energies[klims[0]:klims[1] + 1] ang_dist = make_pixel_distance(npix, xpix, ypix) * cdelt max_ang_dist = np.max(ang_dist) + cdelt #dtheta = np.linspace(0.0, (np.max(ang_dist) * 1.05)**0.5, 200)**2.0 # z = create_kernel_function_lookup(psf, fn, sigma, egy, # dtheta, psf_scale_fn) shape = (len(egy), npix, npix) k = np.zeros(shape) r99 = psf.containment_angle(energies=egy, fraction=0.997) r34 = psf.containment_angle(energies=egy, fraction=0.34) rmin = np.maximum(r34 / 4., 0.01) rmax = np.maximum(r99, 0.1) if sigma is not None: rmin = np.maximum(rmin, 0.5 * sigma) rmax = np.maximum(rmax, 2.0 * r34 + 3.0 * sigma) rmax = np.minimum(rmax, max_ang_dist) for i in range(len(egy)): rebin = min(int(np.ceil(cdelt / rmin[i])), 8) if sparse: dtheta = np.linspace(0.0, rmax[i]**0.5, 100)**2.0 else: dtheta = np.linspace(0.0, max_ang_dist**0.5, 200)**2.0 z = eval_radial_kernel(psf, fn, sigma, i, dtheta, psf_scale_fn) xdist = make_pixel_distance(npix * rebin, xpix * rebin + (rebin - 1.0) / 2., ypix * rebin + (rebin - 1.0) / 2.) xdist *= cdelt / float(rebin) #x = val_to_pix(dtheta, np.ravel(xdist)) if sparse: m = np.ravel(xdist) < rmax[i] kk = np.zeros(xdist.size) #kk[m] = map_coordinates(z, [x[m]], order=2, prefilter=False) kk[m] = np.interp(np.ravel(xdist)[m], dtheta, z) kk = kk.reshape(xdist.shape) else: kk = np.interp(np.ravel(xdist), dtheta, z).reshape(xdist.shape) # kk = map_coordinates(z, [x], order=2, # prefilter=False).reshape(xdist.shape) if rebin > 1: kk = sum_bins(kk, 0, rebin) kk = sum_bins(kk, 1, rebin) k[i] = kk / float(rebin)**2 k = k.reshape((len(egy),) + ang_dist.shape) if normalize: k /= (np.sum(k, axis=0)[np.newaxis, ...] * np.radians(cdelt) ** 2) return k
[ "def", "make_radial_kernel", "(", "psf", ",", "fn", ",", "sigma", ",", "npix", ",", "cdelt", ",", "xpix", ",", "ypix", ",", "psf_scale_fn", "=", "None", ",", "normalize", "=", "False", ",", "klims", "=", "None", ",", "sparse", "=", "False", ")", ":",...
Make a kernel for a general radially symmetric 2D function. Parameters ---------- psf : `~fermipy.irfs.PSFModel` fn : callable Function that evaluates the kernel at a radial coordinate r. sigma : float 68% containment radius in degrees.
[ "Make", "a", "kernel", "for", "a", "general", "radially", "symmetric", "2D", "function", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1663-L1738
train
36,009
fermiPy/fermipy
fermipy/utils.py
make_psf_kernel
def make_psf_kernel(psf, npix, cdelt, xpix, ypix, psf_scale_fn=None, normalize=False): """ Generate a kernel for a point-source. Parameters ---------- psf : `~fermipy.irfs.PSFModel` npix : int Number of pixels in X and Y dimensions. cdelt : float Pixel size in degrees. """ egy = psf.energies x = make_pixel_distance(npix, xpix, ypix) x *= cdelt k = np.zeros((len(egy), npix, npix)) for i in range(len(egy)): k[i] = psf.eval(i, x, scale_fn=psf_scale_fn) if normalize: k /= (np.sum(k, axis=0)[np.newaxis, ...] * np.radians(cdelt) ** 2) return k
python
def make_psf_kernel(psf, npix, cdelt, xpix, ypix, psf_scale_fn=None, normalize=False): """ Generate a kernel for a point-source. Parameters ---------- psf : `~fermipy.irfs.PSFModel` npix : int Number of pixels in X and Y dimensions. cdelt : float Pixel size in degrees. """ egy = psf.energies x = make_pixel_distance(npix, xpix, ypix) x *= cdelt k = np.zeros((len(egy), npix, npix)) for i in range(len(egy)): k[i] = psf.eval(i, x, scale_fn=psf_scale_fn) if normalize: k /= (np.sum(k, axis=0)[np.newaxis, ...] * np.radians(cdelt) ** 2) return k
[ "def", "make_psf_kernel", "(", "psf", ",", "npix", ",", "cdelt", ",", "xpix", ",", "ypix", ",", "psf_scale_fn", "=", "None", ",", "normalize", "=", "False", ")", ":", "egy", "=", "psf", ".", "energies", "x", "=", "make_pixel_distance", "(", "npix", ","...
Generate a kernel for a point-source. Parameters ---------- psf : `~fermipy.irfs.PSFModel` npix : int Number of pixels in X and Y dimensions. cdelt : float Pixel size in degrees.
[ "Generate", "a", "kernel", "for", "a", "point", "-", "source", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1777-L1805
train
36,010
fermiPy/fermipy
fermipy/utils.py
overlap_slices
def overlap_slices(large_array_shape, small_array_shape, position): """ Modified version of `~astropy.nddata.utils.overlap_slices`. Get slices for the overlapping part of a small and a large array. Given a certain position of the center of the small array, with respect to the large array, tuples of slices are returned which can be used to extract, add or subtract the small array at the given position. This function takes care of the correct behavior at the boundaries, where the small array is cut of appropriately. Parameters ---------- large_array_shape : tuple Shape of the large array. small_array_shape : tuple Shape of the small array. position : tuple Position of the small array's center, with respect to the large array. Coordinates should be in the same order as the array shape. Returns ------- slices_large : tuple of slices Slices in all directions for the large array, such that ``large_array[slices_large]`` extracts the region of the large array that overlaps with the small array. slices_small : slice Slices in all directions for the small array, such that ``small_array[slices_small]`` extracts the region that is inside the large array. """ # Get edge coordinates edges_min = [int(pos - small_shape // 2) for (pos, small_shape) in zip(position, small_array_shape)] edges_max = [int(pos + (small_shape - small_shape // 2)) for (pos, small_shape) in zip(position, small_array_shape)] # Set up slices slices_large = tuple(slice(max(0, edge_min), min(large_shape, edge_max)) for (edge_min, edge_max, large_shape) in zip(edges_min, edges_max, large_array_shape)) slices_small = tuple(slice(max(0, -edge_min), min(large_shape - edge_min, edge_max - edge_min)) for (edge_min, edge_max, large_shape) in zip(edges_min, edges_max, large_array_shape)) return slices_large, slices_small
python
def overlap_slices(large_array_shape, small_array_shape, position): """ Modified version of `~astropy.nddata.utils.overlap_slices`. Get slices for the overlapping part of a small and a large array. Given a certain position of the center of the small array, with respect to the large array, tuples of slices are returned which can be used to extract, add or subtract the small array at the given position. This function takes care of the correct behavior at the boundaries, where the small array is cut of appropriately. Parameters ---------- large_array_shape : tuple Shape of the large array. small_array_shape : tuple Shape of the small array. position : tuple Position of the small array's center, with respect to the large array. Coordinates should be in the same order as the array shape. Returns ------- slices_large : tuple of slices Slices in all directions for the large array, such that ``large_array[slices_large]`` extracts the region of the large array that overlaps with the small array. slices_small : slice Slices in all directions for the small array, such that ``small_array[slices_small]`` extracts the region that is inside the large array. """ # Get edge coordinates edges_min = [int(pos - small_shape // 2) for (pos, small_shape) in zip(position, small_array_shape)] edges_max = [int(pos + (small_shape - small_shape // 2)) for (pos, small_shape) in zip(position, small_array_shape)] # Set up slices slices_large = tuple(slice(max(0, edge_min), min(large_shape, edge_max)) for (edge_min, edge_max, large_shape) in zip(edges_min, edges_max, large_array_shape)) slices_small = tuple(slice(max(0, -edge_min), min(large_shape - edge_min, edge_max - edge_min)) for (edge_min, edge_max, large_shape) in zip(edges_min, edges_max, large_array_shape)) return slices_large, slices_small
[ "def", "overlap_slices", "(", "large_array_shape", ",", "small_array_shape", ",", "position", ")", ":", "# Get edge coordinates", "edges_min", "=", "[", "int", "(", "pos", "-", "small_shape", "//", "2", ")", "for", "(", "pos", ",", "small_shape", ")", "in", ...
Modified version of `~astropy.nddata.utils.overlap_slices`. Get slices for the overlapping part of a small and a large array. Given a certain position of the center of the small array, with respect to the large array, tuples of slices are returned which can be used to extract, add or subtract the small array at the given position. This function takes care of the correct behavior at the boundaries, where the small array is cut of appropriately. Parameters ---------- large_array_shape : tuple Shape of the large array. small_array_shape : tuple Shape of the small array. position : tuple Position of the small array's center, with respect to the large array. Coordinates should be in the same order as the array shape. Returns ------- slices_large : tuple of slices Slices in all directions for the large array, such that ``large_array[slices_large]`` extracts the region of the large array that overlaps with the small array. slices_small : slice Slices in all directions for the small array, such that ``small_array[slices_small]`` extracts the region that is inside the large array.
[ "Modified", "version", "of", "~astropy", ".", "nddata", ".", "utils", ".", "overlap_slices", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/utils.py#L1828-L1878
train
36,011
fermiPy/fermipy
fermipy/diffuse/model_manager.py
make_library
def make_library(**kwargs): """Build and return a ModelManager object and fill the associated model library """ library_yaml = kwargs.pop('library', 'models/library.yaml') comp_yaml = kwargs.pop('comp', 'config/binning.yaml') basedir = kwargs.pop('basedir', os.path.abspath('.')) model_man = kwargs.get('ModelManager', ModelManager(basedir=basedir)) model_comp_dict = model_man.make_library(library_yaml, library_yaml, comp_yaml) return dict(model_comp_dict=model_comp_dict, ModelManager=model_man)
python
def make_library(**kwargs): """Build and return a ModelManager object and fill the associated model library """ library_yaml = kwargs.pop('library', 'models/library.yaml') comp_yaml = kwargs.pop('comp', 'config/binning.yaml') basedir = kwargs.pop('basedir', os.path.abspath('.')) model_man = kwargs.get('ModelManager', ModelManager(basedir=basedir)) model_comp_dict = model_man.make_library(library_yaml, library_yaml, comp_yaml) return dict(model_comp_dict=model_comp_dict, ModelManager=model_man)
[ "def", "make_library", "(", "*", "*", "kwargs", ")", ":", "library_yaml", "=", "kwargs", ".", "pop", "(", "'library'", ",", "'models/library.yaml'", ")", "comp_yaml", "=", "kwargs", ".", "pop", "(", "'comp'", ",", "'config/binning.yaml'", ")", "basedir", "="...
Build and return a ModelManager object and fill the associated model library
[ "Build", "and", "return", "a", "ModelManager", "object", "and", "fill", "the", "associated", "model", "library" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/model_manager.py#L472-L484
train
36,012
fermiPy/fermipy
fermipy/diffuse/model_manager.py
ModelInfo.edisp_disable_list
def edisp_disable_list(self): """ Return the list of source for which energy dispersion should be turned off """ l = [] for model_comp in self.model_components.values(): if model_comp.edisp_disable: l += [model_comp.info.source_name] return l
python
def edisp_disable_list(self): """ Return the list of source for which energy dispersion should be turned off """ l = [] for model_comp in self.model_components.values(): if model_comp.edisp_disable: l += [model_comp.info.source_name] return l
[ "def", "edisp_disable_list", "(", "self", ")", ":", "l", "=", "[", "]", "for", "model_comp", "in", "self", ".", "model_components", ".", "values", "(", ")", ":", "if", "model_comp", ".", "edisp_disable", ":", "l", "+=", "[", "model_comp", ".", "info", ...
Return the list of source for which energy dispersion should be turned off
[ "Return", "the", "list", "of", "source", "for", "which", "energy", "dispersion", "should", "be", "turned", "off" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/model_manager.py#L66-L72
train
36,013
fermiPy/fermipy
fermipy/diffuse/model_manager.py
ModelInfo.make_model_rois
def make_model_rois(self, components, name_factory): """ Make the fermipy roi_model objects for each of a set of binning components """ ret_dict = {} # Figure out which sources need to be split by components master_roi_source_info = {} sub_comp_sources = {} for comp_name, model_comp in self.model_components.items(): comp_info = model_comp.info if comp_info.components is None: master_roi_source_info[comp_name] = model_comp else: sub_comp_sources[comp_name] = model_comp # Build the xml for the master master_roi = SourceFactory.make_roi(master_roi_source_info) master_xml_mdl = name_factory.master_srcmdl_xml( modelkey=self.model_name) print("Writing master ROI model to %s" % master_xml_mdl) master_roi.write_xml(master_xml_mdl) ret_dict['master'] = master_roi # Now deal with the components for comp in components: zcut = "zmax%i" % comp.zmax compkey = "%s_%s" % (zcut, comp.make_key( '{ebin_name}_{evtype_name}')) # name_keys = dict(zcut=zcut, # modelkey=self.model_name, # component=compkey) comp_roi_source_info = {} for comp_name, model_comp in sub_comp_sources.items(): comp_info = model_comp.info if comp_info.selection_dependent: key = comp.make_key('{ebin_name}_{evtype_name}') elif comp_info.moving: key = zcut info_clone = comp_info.components[key].clone_and_merge_sub(key) comp_roi_source_info[comp_name] =\ ModelComponent(info=info_clone, spectrum=model_comp.spectrum) # Build the xml for the component comp_roi = SourceFactory.make_roi(comp_roi_source_info) comp_xml_mdl = name_factory.comp_srcmdl_xml(modelkey=self.model_name, component=compkey) print("Writing component ROI model to %s" % comp_xml_mdl) comp_roi.write_xml(comp_xml_mdl) ret_dict[compkey] = comp_roi return ret_dict
python
def make_model_rois(self, components, name_factory): """ Make the fermipy roi_model objects for each of a set of binning components """ ret_dict = {} # Figure out which sources need to be split by components master_roi_source_info = {} sub_comp_sources = {} for comp_name, model_comp in self.model_components.items(): comp_info = model_comp.info if comp_info.components is None: master_roi_source_info[comp_name] = model_comp else: sub_comp_sources[comp_name] = model_comp # Build the xml for the master master_roi = SourceFactory.make_roi(master_roi_source_info) master_xml_mdl = name_factory.master_srcmdl_xml( modelkey=self.model_name) print("Writing master ROI model to %s" % master_xml_mdl) master_roi.write_xml(master_xml_mdl) ret_dict['master'] = master_roi # Now deal with the components for comp in components: zcut = "zmax%i" % comp.zmax compkey = "%s_%s" % (zcut, comp.make_key( '{ebin_name}_{evtype_name}')) # name_keys = dict(zcut=zcut, # modelkey=self.model_name, # component=compkey) comp_roi_source_info = {} for comp_name, model_comp in sub_comp_sources.items(): comp_info = model_comp.info if comp_info.selection_dependent: key = comp.make_key('{ebin_name}_{evtype_name}') elif comp_info.moving: key = zcut info_clone = comp_info.components[key].clone_and_merge_sub(key) comp_roi_source_info[comp_name] =\ ModelComponent(info=info_clone, spectrum=model_comp.spectrum) # Build the xml for the component comp_roi = SourceFactory.make_roi(comp_roi_source_info) comp_xml_mdl = name_factory.comp_srcmdl_xml(modelkey=self.model_name, component=compkey) print("Writing component ROI model to %s" % comp_xml_mdl) comp_roi.write_xml(comp_xml_mdl) ret_dict[compkey] = comp_roi return ret_dict
[ "def", "make_model_rois", "(", "self", ",", "components", ",", "name_factory", ")", ":", "ret_dict", "=", "{", "}", "# Figure out which sources need to be split by components", "master_roi_source_info", "=", "{", "}", "sub_comp_sources", "=", "{", "}", "for", "comp_na...
Make the fermipy roi_model objects for each of a set of binning components
[ "Make", "the", "fermipy", "roi_model", "objects", "for", "each", "of", "a", "set", "of", "binning", "components" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/model_manager.py#L127-L177
train
36,014
fermiPy/fermipy
fermipy/diffuse/model_manager.py
ModelManager.read_model_yaml
def read_model_yaml(self, modelkey): """ Read the yaml file for the diffuse components """ model_yaml = self._name_factory.model_yaml(modelkey=modelkey, fullpath=True) model = yaml.safe_load(open(model_yaml)) return model
python
def read_model_yaml(self, modelkey): """ Read the yaml file for the diffuse components """ model_yaml = self._name_factory.model_yaml(modelkey=modelkey, fullpath=True) model = yaml.safe_load(open(model_yaml)) return model
[ "def", "read_model_yaml", "(", "self", ",", "modelkey", ")", ":", "model_yaml", "=", "self", ".", "_name_factory", ".", "model_yaml", "(", "modelkey", "=", "modelkey", ",", "fullpath", "=", "True", ")", "model", "=", "yaml", ".", "safe_load", "(", "open", ...
Read the yaml file for the diffuse components
[ "Read", "the", "yaml", "file", "for", "the", "diffuse", "components" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/model_manager.py#L209-L215
train
36,015
fermiPy/fermipy
fermipy/diffuse/model_manager.py
ModelManager.make_library
def make_library(self, diffuse_yaml, catalog_yaml, binning_yaml): """ Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file width the library of catalog split definitions binning_yaml : str Name of the yaml file with the binning definitions """ ret_dict = {} #catalog_dict = yaml.safe_load(open(catalog_yaml)) components_dict = Component.build_from_yamlfile(binning_yaml) diffuse_ret_dict = make_diffuse_comp_info_dict(GalpropMapManager=self._gmm, DiffuseModelManager=self._dmm, library=diffuse_yaml, components=components_dict) catalog_ret_dict = make_catalog_comp_dict(library=catalog_yaml, CatalogSourceManager=self._csm) ret_dict.update(diffuse_ret_dict['comp_info_dict']) ret_dict.update(catalog_ret_dict['comp_info_dict']) self._library.update(ret_dict) return ret_dict
python
def make_library(self, diffuse_yaml, catalog_yaml, binning_yaml): """ Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file width the library of catalog split definitions binning_yaml : str Name of the yaml file with the binning definitions """ ret_dict = {} #catalog_dict = yaml.safe_load(open(catalog_yaml)) components_dict = Component.build_from_yamlfile(binning_yaml) diffuse_ret_dict = make_diffuse_comp_info_dict(GalpropMapManager=self._gmm, DiffuseModelManager=self._dmm, library=diffuse_yaml, components=components_dict) catalog_ret_dict = make_catalog_comp_dict(library=catalog_yaml, CatalogSourceManager=self._csm) ret_dict.update(diffuse_ret_dict['comp_info_dict']) ret_dict.update(catalog_ret_dict['comp_info_dict']) self._library.update(ret_dict) return ret_dict
[ "def", "make_library", "(", "self", ",", "diffuse_yaml", ",", "catalog_yaml", ",", "binning_yaml", ")", ":", "ret_dict", "=", "{", "}", "#catalog_dict = yaml.safe_load(open(catalog_yaml))", "components_dict", "=", "Component", ".", "build_from_yamlfile", "(", "binning_y...
Build up the library of all the components Parameters ---------- diffuse_yaml : str Name of the yaml file with the library of diffuse component definitions catalog_yaml : str Name of the yaml file width the library of catalog split definitions binning_yaml : str Name of the yaml file with the binning definitions
[ "Build", "up", "the", "library", "of", "all", "the", "components" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/model_manager.py#L232-L257
train
36,016
fermiPy/fermipy
fermipy/diffuse/model_manager.py
ModelManager.make_model_info
def make_model_info(self, modelkey): """ Build a dictionary with the information for a particular model. Parameters ---------- modelkey : str Key used to identify this particular model Return `ModelInfo` """ model = self.read_model_yaml(modelkey) sources = model['sources'] components = OrderedDict() spec_model_yaml = self._name_factory.fullpath(localpath=model['spectral_models']) self._spec_lib.update(yaml.safe_load(open(spec_model_yaml))) for source, source_info in sources.items(): model_type = source_info.get('model_type', None) par_overrides = source_info.get('par_overides', None) version = source_info['version'] spec_type = source_info['SpectrumType'] edisp_disable = source_info.get('edisp_disable', False) sourcekey = "%s_%s" % (source, version) if model_type == 'galprop_rings': comp_info_dict = self.gmm.diffuse_comp_info_dicts(version) def_spec_type = spec_type['default'] for comp_key, comp_info in comp_info_dict.items(): model_comp = ModelComponent(info=comp_info, spectrum=\ self._spec_lib[spec_type.get(comp_key, def_spec_type)], par_overrides=par_overrides, edisp_disable=edisp_disable) components[comp_key] = model_comp elif model_type == 'Catalog': comp_info_dict = self.csm.split_comp_info_dict(source, version) def_spec_type = spec_type['default'] for comp_key, comp_info in comp_info_dict.items(): model_comp = ModelComponent(info=comp_info, spectrum=\ self._spec_lib[spec_type.get(comp_key, def_spec_type)], par_overrides=par_overrides, edisp_disable=edisp_disable) components[comp_key] = model_comp else: comp_info = self.dmm.diffuse_comp_info(sourcekey) model_comp = ModelComponent(info=comp_info, spectrum=self._spec_lib[spec_type], par_overrides=par_overrides, edisp_disable=edisp_disable) components[sourcekey] = model_comp ret_val = ModelInfo(model_name=modelkey, model_components=components) self._models[modelkey] = ret_val return ret_val
python
def make_model_info(self, modelkey): """ Build a dictionary with the information for a particular model. Parameters ---------- modelkey : str Key used to identify this particular model Return `ModelInfo` """ model = self.read_model_yaml(modelkey) sources = model['sources'] components = OrderedDict() spec_model_yaml = self._name_factory.fullpath(localpath=model['spectral_models']) self._spec_lib.update(yaml.safe_load(open(spec_model_yaml))) for source, source_info in sources.items(): model_type = source_info.get('model_type', None) par_overrides = source_info.get('par_overides', None) version = source_info['version'] spec_type = source_info['SpectrumType'] edisp_disable = source_info.get('edisp_disable', False) sourcekey = "%s_%s" % (source, version) if model_type == 'galprop_rings': comp_info_dict = self.gmm.diffuse_comp_info_dicts(version) def_spec_type = spec_type['default'] for comp_key, comp_info in comp_info_dict.items(): model_comp = ModelComponent(info=comp_info, spectrum=\ self._spec_lib[spec_type.get(comp_key, def_spec_type)], par_overrides=par_overrides, edisp_disable=edisp_disable) components[comp_key] = model_comp elif model_type == 'Catalog': comp_info_dict = self.csm.split_comp_info_dict(source, version) def_spec_type = spec_type['default'] for comp_key, comp_info in comp_info_dict.items(): model_comp = ModelComponent(info=comp_info, spectrum=\ self._spec_lib[spec_type.get(comp_key, def_spec_type)], par_overrides=par_overrides, edisp_disable=edisp_disable) components[comp_key] = model_comp else: comp_info = self.dmm.diffuse_comp_info(sourcekey) model_comp = ModelComponent(info=comp_info, spectrum=self._spec_lib[spec_type], par_overrides=par_overrides, edisp_disable=edisp_disable) components[sourcekey] = model_comp ret_val = ModelInfo(model_name=modelkey, model_components=components) self._models[modelkey] = ret_val return ret_val
[ "def", "make_model_info", "(", "self", ",", "modelkey", ")", ":", "model", "=", "self", ".", "read_model_yaml", "(", "modelkey", ")", "sources", "=", "model", "[", "'sources'", "]", "components", "=", "OrderedDict", "(", ")", "spec_model_yaml", "=", "self", ...
Build a dictionary with the information for a particular model. Parameters ---------- modelkey : str Key used to identify this particular model Return `ModelInfo`
[ "Build", "a", "dictionary", "with", "the", "information", "for", "a", "particular", "model", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/model_manager.py#L259-L314
train
36,017
fermiPy/fermipy
fermipy/diffuse/model_manager.py
ModelManager.get_sub_comp_info
def get_sub_comp_info(source_info, comp): """Build and return information about a sub-component for a particular selection """ sub_comps = source_info.get('components', None) if sub_comps is None: return source_info.copy() moving = source_info.get('moving', False) selection_dependent = source_info.get('selection_dependent', False) if selection_dependent: key = comp.make_key('{ebin_name}_{evtype_name}') elif moving: key = "zmax%i" % comp.zmax ret_dict = source_info.copy() ret_dict.update(sub_comps[key]) return ret_dict
python
def get_sub_comp_info(source_info, comp): """Build and return information about a sub-component for a particular selection """ sub_comps = source_info.get('components', None) if sub_comps is None: return source_info.copy() moving = source_info.get('moving', False) selection_dependent = source_info.get('selection_dependent', False) if selection_dependent: key = comp.make_key('{ebin_name}_{evtype_name}') elif moving: key = "zmax%i" % comp.zmax ret_dict = source_info.copy() ret_dict.update(sub_comps[key]) return ret_dict
[ "def", "get_sub_comp_info", "(", "source_info", ",", "comp", ")", ":", "sub_comps", "=", "source_info", ".", "get", "(", "'components'", ",", "None", ")", "if", "sub_comps", "is", "None", ":", "return", "source_info", ".", "copy", "(", ")", "moving", "=", ...
Build and return information about a sub-component for a particular selection
[ "Build", "and", "return", "information", "about", "a", "sub", "-", "component", "for", "a", "particular", "selection" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/model_manager.py#L455-L469
train
36,018
fermiPy/fermipy
fermipy/validate/utils.py
replace_aliases
def replace_aliases(cut_dict, aliases): """Substitute aliases in a cut dictionary.""" for k, v in cut_dict.items(): for k0, v0 in aliases.items(): cut_dict[k] = cut_dict[k].replace(k0, '(%s)' % v0)
python
def replace_aliases(cut_dict, aliases): """Substitute aliases in a cut dictionary.""" for k, v in cut_dict.items(): for k0, v0 in aliases.items(): cut_dict[k] = cut_dict[k].replace(k0, '(%s)' % v0)
[ "def", "replace_aliases", "(", "cut_dict", ",", "aliases", ")", ":", "for", "k", ",", "v", "in", "cut_dict", ".", "items", "(", ")", ":", "for", "k0", ",", "v0", "in", "aliases", ".", "items", "(", ")", ":", "cut_dict", "[", "k", "]", "=", "cut_d...
Substitute aliases in a cut dictionary.
[ "Substitute", "aliases", "in", "a", "cut", "dictionary", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/validate/utils.py#L25-L29
train
36,019
fermiPy/fermipy
fermipy/validate/utils.py
get_files
def get_files(files, extnames=['.root']): """Extract a list of file paths from a list containing both paths and file lists with one path per line.""" files_out = [] for f in files: mime = mimetypes.guess_type(f) if os.path.splitext(f)[1] in extnames: files_out += [f] elif mime[0] == 'text/plain': files_out += list(np.loadtxt(f, unpack=True, dtype='str')) else: raise Exception('Unrecognized input type.') return files_out
python
def get_files(files, extnames=['.root']): """Extract a list of file paths from a list containing both paths and file lists with one path per line.""" files_out = [] for f in files: mime = mimetypes.guess_type(f) if os.path.splitext(f)[1] in extnames: files_out += [f] elif mime[0] == 'text/plain': files_out += list(np.loadtxt(f, unpack=True, dtype='str')) else: raise Exception('Unrecognized input type.') return files_out
[ "def", "get_files", "(", "files", ",", "extnames", "=", "[", "'.root'", "]", ")", ":", "files_out", "=", "[", "]", "for", "f", "in", "files", ":", "mime", "=", "mimetypes", ".", "guess_type", "(", "f", ")", "if", "os", ".", "path", ".", "splitext",...
Extract a list of file paths from a list containing both paths and file lists with one path per line.
[ "Extract", "a", "list", "of", "file", "paths", "from", "a", "list", "containing", "both", "paths", "and", "file", "lists", "with", "one", "path", "per", "line", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/validate/utils.py#L37-L52
train
36,020
fermiPy/fermipy
fermipy/validate/utils.py
get_cuts_from_xml
def get_cuts_from_xml(xmlfile): """Extract event selection strings from the XML file.""" root = ElementTree.ElementTree(file=xmlfile).getroot() event_maps = root.findall('EventMap') alias_maps = root.findall('AliasDict')[0] event_classes = {} event_types = {} event_aliases = {} for m in event_maps: if m.attrib['altName'] == 'EVENT_CLASS': for c in m.findall('EventCategory'): event_classes[c.attrib['name']] = strip( c.find('ShortCut').text) elif m.attrib['altName'] == 'EVENT_TYPE': for c in m.findall('EventCategory'): event_types[c.attrib['name']] = strip(c.find('ShortCut').text) for m in alias_maps.findall('Alias'): event_aliases[m.attrib['name']] = strip(m.text) replace_aliases(event_aliases, event_aliases.copy()) replace_aliases(event_aliases, event_aliases.copy()) replace_aliases(event_classes, event_aliases) replace_aliases(event_types, event_aliases) event_selections = {} event_selections.update(event_classes) event_selections.update(event_types) event_selections.update(event_aliases) return event_selections
python
def get_cuts_from_xml(xmlfile): """Extract event selection strings from the XML file.""" root = ElementTree.ElementTree(file=xmlfile).getroot() event_maps = root.findall('EventMap') alias_maps = root.findall('AliasDict')[0] event_classes = {} event_types = {} event_aliases = {} for m in event_maps: if m.attrib['altName'] == 'EVENT_CLASS': for c in m.findall('EventCategory'): event_classes[c.attrib['name']] = strip( c.find('ShortCut').text) elif m.attrib['altName'] == 'EVENT_TYPE': for c in m.findall('EventCategory'): event_types[c.attrib['name']] = strip(c.find('ShortCut').text) for m in alias_maps.findall('Alias'): event_aliases[m.attrib['name']] = strip(m.text) replace_aliases(event_aliases, event_aliases.copy()) replace_aliases(event_aliases, event_aliases.copy()) replace_aliases(event_classes, event_aliases) replace_aliases(event_types, event_aliases) event_selections = {} event_selections.update(event_classes) event_selections.update(event_types) event_selections.update(event_aliases) return event_selections
[ "def", "get_cuts_from_xml", "(", "xmlfile", ")", ":", "root", "=", "ElementTree", ".", "ElementTree", "(", "file", "=", "xmlfile", ")", ".", "getroot", "(", ")", "event_maps", "=", "root", ".", "findall", "(", "'EventMap'", ")", "alias_maps", "=", "root", ...
Extract event selection strings from the XML file.
[ "Extract", "event", "selection", "strings", "from", "the", "XML", "file", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/validate/utils.py#L84-L117
train
36,021
fermiPy/fermipy
fermipy/validate/utils.py
set_event_list
def set_event_list(tree, selection=None, fraction=None, start_fraction=None): """ Set the event list for a tree or chain. Parameters ---------- tree : `ROOT.TTree` Input tree/chain. selection : str Cut string defining the event list. fraction : float Fraction of the total file to include in the event list starting from the *end* of the file. """ import ROOT elist = rand_str() if selection is None: cuts = '' else: cuts = selection if fraction is None or fraction >= 1.0: n = tree.Draw(">>%s" % elist, cuts, "goff") tree.SetEventList(ROOT.gDirectory.Get(elist)) elif start_fraction is None: nentries = int(tree.GetEntries()) first_entry = min(int((1.0 - fraction) * nentries), nentries) n = tree.Draw(">>%s" % elist, cuts, "goff", nentries, first_entry) tree.SetEventList(ROOT.gDirectory.Get(elist)) else: nentries = int(tree.GetEntries()) first_entry = min(int(start_fraction * nentries), nentries) n = first_entry + int(nentries * fraction) n = tree.Draw(">>%s" % elist, cuts, "goff", n - first_entry, first_entry) tree.SetEventList(ROOT.gDirectory.Get(elist)) return n
python
def set_event_list(tree, selection=None, fraction=None, start_fraction=None): """ Set the event list for a tree or chain. Parameters ---------- tree : `ROOT.TTree` Input tree/chain. selection : str Cut string defining the event list. fraction : float Fraction of the total file to include in the event list starting from the *end* of the file. """ import ROOT elist = rand_str() if selection is None: cuts = '' else: cuts = selection if fraction is None or fraction >= 1.0: n = tree.Draw(">>%s" % elist, cuts, "goff") tree.SetEventList(ROOT.gDirectory.Get(elist)) elif start_fraction is None: nentries = int(tree.GetEntries()) first_entry = min(int((1.0 - fraction) * nentries), nentries) n = tree.Draw(">>%s" % elist, cuts, "goff", nentries, first_entry) tree.SetEventList(ROOT.gDirectory.Get(elist)) else: nentries = int(tree.GetEntries()) first_entry = min(int(start_fraction * nentries), nentries) n = first_entry + int(nentries * fraction) n = tree.Draw(">>%s" % elist, cuts, "goff", n - first_entry, first_entry) tree.SetEventList(ROOT.gDirectory.Get(elist)) return n
[ "def", "set_event_list", "(", "tree", ",", "selection", "=", "None", ",", "fraction", "=", "None", ",", "start_fraction", "=", "None", ")", ":", "import", "ROOT", "elist", "=", "rand_str", "(", ")", "if", "selection", "is", "None", ":", "cuts", "=", "'...
Set the event list for a tree or chain. Parameters ---------- tree : `ROOT.TTree` Input tree/chain. selection : str Cut string defining the event list. fraction : float Fraction of the total file to include in the event list starting from the *end* of the file.
[ "Set", "the", "event", "list", "for", "a", "tree", "or", "chain", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/validate/utils.py#L120-L160
train
36,022
fermiPy/fermipy
fermipy/sourcefind.py
SourceFind.localize
def localize(self, name, **kwargs): """Find the best-fit position of a source. Localization is performed in two steps. First a TS map is computed centered on the source with half-width set by ``dtheta_max``. A fit is then performed to the maximum TS peak in this map. The source position is then further refined by scanning the likelihood in the vicinity of the peak found in the first step. The size of the scan region is set to encompass the 99% positional uncertainty contour as determined from the peak fit. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- localize : dict Dictionary containing results of the localization analysis. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['localize'], optimizer=self.defaults['optimizer']) schema.add_option('use_cache', True) schema.add_option('prefix', '') config = utils.create_dict(self.config['localize'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running localization for %s' % name) free_state = FreeParameterState(self) loc = self._localize(name, **config) free_state.restore() self.logger.info('Finished localization.') if config['make_plots']: self._plotter.make_localization_plots(loc, self.roi, prefix=config['prefix']) outfile = \ utils.format_filename(self.workdir, 'loc', prefix=[config['prefix'], name.lower().replace(' ', '_')]) if config['write_fits']: loc['file'] = os.path.basename(outfile) + '.fits' self._make_localize_fits(loc, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', dict(loc)) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return loc
python
def localize(self, name, **kwargs): """Find the best-fit position of a source. Localization is performed in two steps. First a TS map is computed centered on the source with half-width set by ``dtheta_max``. A fit is then performed to the maximum TS peak in this map. The source position is then further refined by scanning the likelihood in the vicinity of the peak found in the first step. The size of the scan region is set to encompass the 99% positional uncertainty contour as determined from the peak fit. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- localize : dict Dictionary containing results of the localization analysis. """ timer = Timer.create(start=True) name = self.roi.get_source_by_name(name).name schema = ConfigSchema(self.defaults['localize'], optimizer=self.defaults['optimizer']) schema.add_option('use_cache', True) schema.add_option('prefix', '') config = utils.create_dict(self.config['localize'], optimizer=self.config['optimizer']) config = schema.create_config(config, **kwargs) self.logger.info('Running localization for %s' % name) free_state = FreeParameterState(self) loc = self._localize(name, **config) free_state.restore() self.logger.info('Finished localization.') if config['make_plots']: self._plotter.make_localization_plots(loc, self.roi, prefix=config['prefix']) outfile = \ utils.format_filename(self.workdir, 'loc', prefix=[config['prefix'], name.lower().replace(' ', '_')]) if config['write_fits']: loc['file'] = os.path.basename(outfile) + '.fits' self._make_localize_fits(loc, outfile + '.fits', **config) if config['write_npy']: np.save(outfile + '.npy', dict(loc)) self.logger.info('Execution time: %.2f s', timer.elapsed_time) return loc
[ "def", "localize", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "timer", "=", "Timer", ".", "create", "(", "start", "=", "True", ")", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", ".", "name", "schema...
Find the best-fit position of a source. Localization is performed in two steps. First a TS map is computed centered on the source with half-width set by ``dtheta_max``. A fit is then performed to the maximum TS peak in this map. The source position is then further refined by scanning the likelihood in the vicinity of the peak found in the first step. The size of the scan region is set to encompass the 99% positional uncertainty contour as determined from the peak fit. Parameters ---------- name : str Source name. {options} optimizer : dict Dictionary that overrides the default optimizer settings. Returns ------- localize : dict Dictionary containing results of the localization analysis.
[ "Find", "the", "best", "-", "fit", "position", "of", "a", "source", ".", "Localization", "is", "performed", "in", "two", "steps", ".", "First", "a", "TS", "map", "is", "computed", "centered", "on", "the", "source", "with", "half", "-", "width", "set", ...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/sourcefind.py#L237-L301
train
36,023
fermiPy/fermipy
fermipy/sourcefind.py
SourceFind._fit_position_tsmap
def _fit_position_tsmap(self, name, **kwargs): """Localize a source from its TS map.""" prefix = kwargs.get('prefix', '') dtheta_max = kwargs.get('dtheta_max', 0.5) zmin = kwargs.get('zmin', -3.0) kw = { 'map_size': 2.0 * dtheta_max, 'write_fits': kwargs.get('write_fits', False), 'write_npy': kwargs.get('write_npy', False), 'use_pylike': kwargs.get('use_pylike', True), 'max_kernel_radius': self.config['tsmap']['max_kernel_radius'], 'loglevel': logging.DEBUG } src = self.roi.copy_source(name) if src['SpatialModel'] in ['RadialDisk', 'RadialGaussian']: kw['max_kernel_radius'] = max(kw['max_kernel_radius'], 2.0 * src['SpatialWidth']) skydir = kwargs.get('skydir', src.skydir) tsmap = self.tsmap(utils.join_strings([prefix, name.lower(). replace(' ', '_')]), model=src.data, map_skydir=skydir, exclude=[name], make_plots=False, **kw) # Find peaks with TS > 4 peaks = find_peaks(tsmap['ts'], 4.0, 0.2) peak_best = None o = {} for p in sorted(peaks, key=lambda t: t['amp'], reverse=True): xy = p['ix'], p['iy'] ts_value = tsmap['ts'].data[xy[1], xy[0]] posfit = fit_error_ellipse(tsmap['ts'], xy=xy, dpix=2, zmin=max(zmin, -ts_value * 0.5)) offset = posfit['skydir'].separation(self.roi[name].skydir).deg if posfit['fit_success'] and posfit['fit_inbounds']: peak_best = p break if peak_best is None: ts_value = np.max(tsmap['ts'].data) posfit = fit_error_ellipse(tsmap['ts'], dpix=2, zmin=max(zmin, -ts_value * 0.5)) o.update(posfit) pix = posfit['skydir'].to_pixel(self.geom.wcs) o['xpix'] = float(pix[0]) o['ypix'] = float(pix[1]) o['skydir'] = posfit['skydir'].transform_to('icrs') o['pos_offset'] = posfit['skydir'].separation( self.roi[name].skydir).deg o['loglike'] = 0.5 * posfit['zoffset'] o['tsmap'] = tsmap['ts'] return o
python
def _fit_position_tsmap(self, name, **kwargs): """Localize a source from its TS map.""" prefix = kwargs.get('prefix', '') dtheta_max = kwargs.get('dtheta_max', 0.5) zmin = kwargs.get('zmin', -3.0) kw = { 'map_size': 2.0 * dtheta_max, 'write_fits': kwargs.get('write_fits', False), 'write_npy': kwargs.get('write_npy', False), 'use_pylike': kwargs.get('use_pylike', True), 'max_kernel_radius': self.config['tsmap']['max_kernel_radius'], 'loglevel': logging.DEBUG } src = self.roi.copy_source(name) if src['SpatialModel'] in ['RadialDisk', 'RadialGaussian']: kw['max_kernel_radius'] = max(kw['max_kernel_radius'], 2.0 * src['SpatialWidth']) skydir = kwargs.get('skydir', src.skydir) tsmap = self.tsmap(utils.join_strings([prefix, name.lower(). replace(' ', '_')]), model=src.data, map_skydir=skydir, exclude=[name], make_plots=False, **kw) # Find peaks with TS > 4 peaks = find_peaks(tsmap['ts'], 4.0, 0.2) peak_best = None o = {} for p in sorted(peaks, key=lambda t: t['amp'], reverse=True): xy = p['ix'], p['iy'] ts_value = tsmap['ts'].data[xy[1], xy[0]] posfit = fit_error_ellipse(tsmap['ts'], xy=xy, dpix=2, zmin=max(zmin, -ts_value * 0.5)) offset = posfit['skydir'].separation(self.roi[name].skydir).deg if posfit['fit_success'] and posfit['fit_inbounds']: peak_best = p break if peak_best is None: ts_value = np.max(tsmap['ts'].data) posfit = fit_error_ellipse(tsmap['ts'], dpix=2, zmin=max(zmin, -ts_value * 0.5)) o.update(posfit) pix = posfit['skydir'].to_pixel(self.geom.wcs) o['xpix'] = float(pix[0]) o['ypix'] = float(pix[1]) o['skydir'] = posfit['skydir'].transform_to('icrs') o['pos_offset'] = posfit['skydir'].separation( self.roi[name].skydir).deg o['loglike'] = 0.5 * posfit['zoffset'] o['tsmap'] = tsmap['ts'] return o
[ "def", "_fit_position_tsmap", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "prefix", "=", "kwargs", ".", "get", "(", "'prefix'", ",", "''", ")", "dtheta_max", "=", "kwargs", ".", "get", "(", "'dtheta_max'", ",", "0.5", ")", "zmin", "=...
Localize a source from its TS map.
[ "Localize", "a", "source", "from", "its", "TS", "map", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/sourcefind.py#L503-L562
train
36,024
fermiPy/fermipy
fermipy/jobs/slac_impl.py
get_lsf_status
def get_lsf_status(): """Count and print the number of jobs in various LSF states """ status_count = {'RUN': 0, 'PEND': 0, 'SUSP': 0, 'USUSP': 0, 'NJOB': 0, 'UNKNWN': 0} try: subproc = subprocess.Popen(['bjobs'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) subproc.stderr.close() output = subproc.stdout.readlines() except OSError: return status_count for line in output[1:]: line = line.strip().split() # Protect against format of multiproc jobs if len(line) < 5: continue status_count['NJOB'] += 1 for k in status_count: if line[2] == k: status_count[k] += 1 return status_count
python
def get_lsf_status(): """Count and print the number of jobs in various LSF states """ status_count = {'RUN': 0, 'PEND': 0, 'SUSP': 0, 'USUSP': 0, 'NJOB': 0, 'UNKNWN': 0} try: subproc = subprocess.Popen(['bjobs'], stdout=subprocess.PIPE, stderr=subprocess.PIPE) subproc.stderr.close() output = subproc.stdout.readlines() except OSError: return status_count for line in output[1:]: line = line.strip().split() # Protect against format of multiproc jobs if len(line) < 5: continue status_count['NJOB'] += 1 for k in status_count: if line[2] == k: status_count[k] += 1 return status_count
[ "def", "get_lsf_status", "(", ")", ":", "status_count", "=", "{", "'RUN'", ":", "0", ",", "'PEND'", ":", "0", ",", "'SUSP'", ":", "0", ",", "'USUSP'", ":", "0", ",", "'NJOB'", ":", "0", ",", "'UNKNWN'", ":", "0", "}", "try", ":", "subproc", "=", ...
Count and print the number of jobs in various LSF states
[ "Count", "and", "print", "the", "number", "of", "jobs", "in", "various", "LSF", "states" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/slac_impl.py#L44-L75
train
36,025
fermiPy/fermipy
fermipy/jobs/slac_impl.py
build_bsub_command
def build_bsub_command(command_template, lsf_args): """Build and return a lsf batch command template The structure will be 'bsub -s <key> <value> <command_template>' where <key> and <value> refer to items in lsf_args """ if command_template is None: return "" full_command = 'bsub -o {logfile}' for key, value in lsf_args.items(): full_command += ' -%s' % key if value is not None: full_command += ' %s' % value full_command += ' %s' % command_template return full_command
python
def build_bsub_command(command_template, lsf_args): """Build and return a lsf batch command template The structure will be 'bsub -s <key> <value> <command_template>' where <key> and <value> refer to items in lsf_args """ if command_template is None: return "" full_command = 'bsub -o {logfile}' for key, value in lsf_args.items(): full_command += ' -%s' % key if value is not None: full_command += ' %s' % value full_command += ' %s' % command_template return full_command
[ "def", "build_bsub_command", "(", "command_template", ",", "lsf_args", ")", ":", "if", "command_template", "is", "None", ":", "return", "\"\"", "full_command", "=", "'bsub -o {logfile}'", "for", "key", ",", "value", "in", "lsf_args", ".", "items", "(", ")", ":...
Build and return a lsf batch command template The structure will be 'bsub -s <key> <value> <command_template>' where <key> and <value> refer to items in lsf_args
[ "Build", "and", "return", "a", "lsf", "batch", "command", "template" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/slac_impl.py#L78-L92
train
36,026
fermiPy/fermipy
fermipy/jobs/slac_impl.py
SlacInterface.dispatch_job_hook
def dispatch_job_hook(self, link, key, job_config, logfile, stream=sys.stdout): """Send a single job to the LSF batch Parameters ---------- link : `fermipy.jobs.chain.Link` The link used to invoke the command we are running key : str A string that identifies this particular instance of the job job_config : dict A dictionrary with the arguments for the job. Used with the self._command_template job template logfile : str The logfile for this job, may be used to check for success/ failure """ full_sub_dict = job_config.copy() if self._no_batch: full_command = "%s >& %s" % ( link.command_template().format(**full_sub_dict), logfile) else: full_sub_dict['logfile'] = logfile full_command_template = build_bsub_command( link.command_template(), self._lsf_args) full_command = full_command_template.format(**full_sub_dict) logdir = os.path.dirname(logfile) print_bsub = True if self._dry_run: if print_bsub: stream.write("%s\n" % full_command) return 0 try: os.makedirs(logdir) except OSError: pass proc = subprocess.Popen(full_command.split(), stderr=stream, stdout=stream) proc.communicate() return proc.returncode
python
def dispatch_job_hook(self, link, key, job_config, logfile, stream=sys.stdout): """Send a single job to the LSF batch Parameters ---------- link : `fermipy.jobs.chain.Link` The link used to invoke the command we are running key : str A string that identifies this particular instance of the job job_config : dict A dictionrary with the arguments for the job. Used with the self._command_template job template logfile : str The logfile for this job, may be used to check for success/ failure """ full_sub_dict = job_config.copy() if self._no_batch: full_command = "%s >& %s" % ( link.command_template().format(**full_sub_dict), logfile) else: full_sub_dict['logfile'] = logfile full_command_template = build_bsub_command( link.command_template(), self._lsf_args) full_command = full_command_template.format(**full_sub_dict) logdir = os.path.dirname(logfile) print_bsub = True if self._dry_run: if print_bsub: stream.write("%s\n" % full_command) return 0 try: os.makedirs(logdir) except OSError: pass proc = subprocess.Popen(full_command.split(), stderr=stream, stdout=stream) proc.communicate() return proc.returncode
[ "def", "dispatch_job_hook", "(", "self", ",", "link", ",", "key", ",", "job_config", ",", "logfile", ",", "stream", "=", "sys", ".", "stdout", ")", ":", "full_sub_dict", "=", "job_config", ".", "copy", "(", ")", "if", "self", ".", "_no_batch", ":", "fu...
Send a single job to the LSF batch Parameters ---------- link : `fermipy.jobs.chain.Link` The link used to invoke the command we are running key : str A string that identifies this particular instance of the job job_config : dict A dictionrary with the arguments for the job. Used with the self._command_template job template logfile : str The logfile for this job, may be used to check for success/ failure
[ "Send", "a", "single", "job", "to", "the", "LSF", "batch" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/slac_impl.py#L128-L174
train
36,027
fermiPy/fermipy
fermipy/jobs/slac_impl.py
SlacInterface.submit_jobs
def submit_jobs(self, link, job_dict=None, job_archive=None, stream=sys.stdout): """Submit all the jobs in job_dict """ if link is None: return JobStatus.no_job if job_dict is None: job_keys = link.jobs.keys() else: job_keys = sorted(job_dict.keys()) # copy & reverse the keys b/c we will be popping item off the back of # the list unsubmitted_jobs = job_keys unsubmitted_jobs.reverse() failed = False if unsubmitted_jobs: if stream != sys.stdout: sys.stdout.write('Submitting jobs (%i): ' % len(unsubmitted_jobs)) sys.stdout.flush() while unsubmitted_jobs: status = get_lsf_status() njob_to_submit = min(self._max_jobs - status['NJOB'], self._jobs_per_cycle, len(unsubmitted_jobs)) if self._dry_run: njob_to_submit = len(unsubmitted_jobs) for i in range(njob_to_submit): job_key = unsubmitted_jobs.pop() # job_details = job_dict[job_key] job_details = link.jobs[job_key] job_config = job_details.job_config if job_details.status == JobStatus.failed: clean_job(job_details.logfile, {}, self._dry_run) # clean_job(job_details.logfile, # job_details.outfiles, self.args['dry_run']) job_config['logfile'] = job_details.logfile new_job_details = self.dispatch_job( link, job_key, job_archive, stream) if new_job_details.status == JobStatus.failed: failed = True clean_job(new_job_details.logfile, new_job_details.outfiles, self._dry_run) link.jobs[job_key] = new_job_details if unsubmitted_jobs: if stream != sys.stdout: sys.stdout.write('.') sys.stdout.flush() stream.write('Sleeping %.0f seconds between submission cycles\n' % self._time_per_cycle) time.sleep(self._time_per_cycle) if failed: return JobStatus.failed if stream != sys.stdout: sys.stdout.write('!\n') return JobStatus.done
python
def submit_jobs(self, link, job_dict=None, job_archive=None, stream=sys.stdout): """Submit all the jobs in job_dict """ if link is None: return JobStatus.no_job if job_dict is None: job_keys = link.jobs.keys() else: job_keys = sorted(job_dict.keys()) # copy & reverse the keys b/c we will be popping item off the back of # the list unsubmitted_jobs = job_keys unsubmitted_jobs.reverse() failed = False if unsubmitted_jobs: if stream != sys.stdout: sys.stdout.write('Submitting jobs (%i): ' % len(unsubmitted_jobs)) sys.stdout.flush() while unsubmitted_jobs: status = get_lsf_status() njob_to_submit = min(self._max_jobs - status['NJOB'], self._jobs_per_cycle, len(unsubmitted_jobs)) if self._dry_run: njob_to_submit = len(unsubmitted_jobs) for i in range(njob_to_submit): job_key = unsubmitted_jobs.pop() # job_details = job_dict[job_key] job_details = link.jobs[job_key] job_config = job_details.job_config if job_details.status == JobStatus.failed: clean_job(job_details.logfile, {}, self._dry_run) # clean_job(job_details.logfile, # job_details.outfiles, self.args['dry_run']) job_config['logfile'] = job_details.logfile new_job_details = self.dispatch_job( link, job_key, job_archive, stream) if new_job_details.status == JobStatus.failed: failed = True clean_job(new_job_details.logfile, new_job_details.outfiles, self._dry_run) link.jobs[job_key] = new_job_details if unsubmitted_jobs: if stream != sys.stdout: sys.stdout.write('.') sys.stdout.flush() stream.write('Sleeping %.0f seconds between submission cycles\n' % self._time_per_cycle) time.sleep(self._time_per_cycle) if failed: return JobStatus.failed if stream != sys.stdout: sys.stdout.write('!\n') return JobStatus.done
[ "def", "submit_jobs", "(", "self", ",", "link", ",", "job_dict", "=", "None", ",", "job_archive", "=", "None", ",", "stream", "=", "sys", ".", "stdout", ")", ":", "if", "link", "is", "None", ":", "return", "JobStatus", ".", "no_job", "if", "job_dict", ...
Submit all the jobs in job_dict
[ "Submit", "all", "the", "jobs", "in", "job_dict" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/slac_impl.py#L176-L239
train
36,028
fermiPy/fermipy
fermipy/gtanalysis.py
create_sc_table
def create_sc_table(scfile, colnames=None): """Load an FT2 file from a file or list of files.""" if utils.is_fits_file(scfile) and colnames is None: return create_table_from_fits(scfile, 'SC_DATA') if utils.is_fits_file(scfile): files = [scfile] else: files = [line.strip() for line in open(scfile, 'r')] tables = [create_table_from_fits(f, 'SC_DATA', colnames) for f in files] return vstack(tables)
python
def create_sc_table(scfile, colnames=None): """Load an FT2 file from a file or list of files.""" if utils.is_fits_file(scfile) and colnames is None: return create_table_from_fits(scfile, 'SC_DATA') if utils.is_fits_file(scfile): files = [scfile] else: files = [line.strip() for line in open(scfile, 'r')] tables = [create_table_from_fits(f, 'SC_DATA', colnames) for f in files] return vstack(tables)
[ "def", "create_sc_table", "(", "scfile", ",", "colnames", "=", "None", ")", ":", "if", "utils", ".", "is_fits_file", "(", "scfile", ")", "and", "colnames", "is", "None", ":", "return", "create_table_from_fits", "(", "scfile", ",", "'SC_DATA'", ")", "if", "...
Load an FT2 file from a file or list of files.
[ "Load", "an", "FT2", "file", "from", "a", "file", "or", "list", "of", "files", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L134-L148
train
36,029
fermiPy/fermipy
fermipy/gtanalysis.py
create_table_from_fits
def create_table_from_fits(fitsfile, hduname, colnames=None): """Memory efficient function for loading a table from a FITS file.""" if colnames is None: return Table.read(fitsfile, hduname) cols = [] with fits.open(fitsfile, memmap=True) as h: for k in colnames: data = h[hduname].data.field(k) cols += [Column(name=k, data=data)] return Table(cols)
python
def create_table_from_fits(fitsfile, hduname, colnames=None): """Memory efficient function for loading a table from a FITS file.""" if colnames is None: return Table.read(fitsfile, hduname) cols = [] with fits.open(fitsfile, memmap=True) as h: for k in colnames: data = h[hduname].data.field(k) cols += [Column(name=k, data=data)] return Table(cols)
[ "def", "create_table_from_fits", "(", "fitsfile", ",", "hduname", ",", "colnames", "=", "None", ")", ":", "if", "colnames", "is", "None", ":", "return", "Table", ".", "read", "(", "fitsfile", ",", "hduname", ")", "cols", "=", "[", "]", "with", "fits", ...
Memory efficient function for loading a table from a FITS file.
[ "Memory", "efficient", "function", "for", "loading", "a", "table", "from", "a", "FITS", "file", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L151-L163
train
36,030
fermiPy/fermipy
fermipy/gtanalysis.py
get_spectral_index
def get_spectral_index(src, egy): """Compute the local spectral index of a source.""" delta = 1E-5 f0 = src.spectrum()(pyLike.dArg(egy * (1 - delta))) f1 = src.spectrum()(pyLike.dArg(egy * (1 + delta))) if f0 > 0 and f1 > 0: gamma = np.log10(f0 / f1) / np.log10((1 - delta) / (1 + delta)) else: gamma = np.nan return gamma
python
def get_spectral_index(src, egy): """Compute the local spectral index of a source.""" delta = 1E-5 f0 = src.spectrum()(pyLike.dArg(egy * (1 - delta))) f1 = src.spectrum()(pyLike.dArg(egy * (1 + delta))) if f0 > 0 and f1 > 0: gamma = np.log10(f0 / f1) / np.log10((1 - delta) / (1 + delta)) else: gamma = np.nan return gamma
[ "def", "get_spectral_index", "(", "src", ",", "egy", ")", ":", "delta", "=", "1E-5", "f0", "=", "src", ".", "spectrum", "(", ")", "(", "pyLike", ".", "dArg", "(", "egy", "*", "(", "1", "-", "delta", ")", ")", ")", "f1", "=", "src", ".", "spectr...
Compute the local spectral index of a source.
[ "Compute", "the", "local", "spectral", "index", "of", "a", "source", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L166-L177
train
36,031
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.create
def create(cls, infile, config=None, params=None, mask=None): """Create a new instance of GTAnalysis from an analysis output file generated with `~fermipy.GTAnalysis.write_roi`. By default the new instance will inherit the configuration of the saved analysis instance. The configuration may be overriden by passing a configuration file path with the ``config`` argument. Parameters ---------- infile : str Path to the ROI results file. config : str Path to a configuration file. This will override the configuration in the ROI results file. params : str Path to a yaml file with updated parameter values mask : str Path to a fits file with an updated mask """ infile = os.path.abspath(infile) roi_file, roi_data = utils.load_data(infile) if config is None: config = roi_data['config'] validate = False else: validate = True gta = cls(config, validate=validate) gta.setup(init_sources=False) gta.load_roi(infile, params=params, mask=mask) return gta
python
def create(cls, infile, config=None, params=None, mask=None): """Create a new instance of GTAnalysis from an analysis output file generated with `~fermipy.GTAnalysis.write_roi`. By default the new instance will inherit the configuration of the saved analysis instance. The configuration may be overriden by passing a configuration file path with the ``config`` argument. Parameters ---------- infile : str Path to the ROI results file. config : str Path to a configuration file. This will override the configuration in the ROI results file. params : str Path to a yaml file with updated parameter values mask : str Path to a fits file with an updated mask """ infile = os.path.abspath(infile) roi_file, roi_data = utils.load_data(infile) if config is None: config = roi_data['config'] validate = False else: validate = True gta = cls(config, validate=validate) gta.setup(init_sources=False) gta.load_roi(infile, params=params, mask=mask) return gta
[ "def", "create", "(", "cls", ",", "infile", ",", "config", "=", "None", ",", "params", "=", "None", ",", "mask", "=", "None", ")", ":", "infile", "=", "os", ".", "path", ".", "abspath", "(", "infile", ")", "roi_file", ",", "roi_data", "=", "utils",...
Create a new instance of GTAnalysis from an analysis output file generated with `~fermipy.GTAnalysis.write_roi`. By default the new instance will inherit the configuration of the saved analysis instance. The configuration may be overriden by passing a configuration file path with the ``config`` argument. Parameters ---------- infile : str Path to the ROI results file. config : str Path to a configuration file. This will override the configuration in the ROI results file. params : str Path to a yaml file with updated parameter values mask : str Path to a fits file with an updated mask
[ "Create", "a", "new", "instance", "of", "GTAnalysis", "from", "an", "analysis", "output", "file", "generated", "with", "~fermipy", ".", "GTAnalysis", ".", "write_roi", ".", "By", "default", "the", "new", "instance", "will", "inherit", "the", "configuration", "...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L511-L549
train
36,032
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.clone
def clone(self, config, **kwargs): """Make a clone of this analysis instance.""" gta = GTAnalysis(config, **kwargs) gta._roi = copy.deepcopy(self.roi) return gta
python
def clone(self, config, **kwargs): """Make a clone of this analysis instance.""" gta = GTAnalysis(config, **kwargs) gta._roi = copy.deepcopy(self.roi) return gta
[ "def", "clone", "(", "self", ",", "config", ",", "*", "*", "kwargs", ")", ":", "gta", "=", "GTAnalysis", "(", "config", ",", "*", "*", "kwargs", ")", "gta", ".", "_roi", "=", "copy", ".", "deepcopy", "(", "self", ".", "roi", ")", "return", "gta" ...
Make a clone of this analysis instance.
[ "Make", "a", "clone", "of", "this", "analysis", "instance", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L551-L555
train
36,033
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_random_seed
def set_random_seed(self, seed): """Set the seed for the random number generator""" self.config['mc']['seed'] = seed np.random.seed(seed)
python
def set_random_seed(self, seed): """Set the seed for the random number generator""" self.config['mc']['seed'] = seed np.random.seed(seed)
[ "def", "set_random_seed", "(", "self", ",", "seed", ")", ":", "self", ".", "config", "[", "'mc'", "]", "[", "'seed'", "]", "=", "seed", "np", ".", "random", ".", "seed", "(", "seed", ")" ]
Set the seed for the random number generator
[ "Set", "the", "seed", "for", "the", "random", "number", "generator" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L557-L560
train
36,034
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.reload_source
def reload_source(self, name, init_source=True): """Delete and reload a source in the model. This will update the spatial model of this source to the one defined in the XML model.""" for c in self.components: c.reload_source(name) if init_source: self._init_source(name) self.like.model = self.like.components[0].model
python
def reload_source(self, name, init_source=True): """Delete and reload a source in the model. This will update the spatial model of this source to the one defined in the XML model.""" for c in self.components: c.reload_source(name) if init_source: self._init_source(name) self.like.model = self.like.components[0].model
[ "def", "reload_source", "(", "self", ",", "name", ",", "init_source", "=", "True", ")", ":", "for", "c", "in", "self", ".", "components", ":", "c", ".", "reload_source", "(", "name", ")", "if", "init_source", ":", "self", ".", "_init_source", "(", "nam...
Delete and reload a source in the model. This will update the spatial model of this source to the one defined in the XML model.
[ "Delete", "and", "reload", "a", "source", "in", "the", "model", ".", "This", "will", "update", "the", "spatial", "model", "of", "this", "source", "to", "the", "one", "defined", "in", "the", "XML", "model", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L630-L641
train
36,035
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_source_morphology
def set_source_morphology(self, name, **kwargs): """Set the spatial model of a source. Parameters ---------- name : str Source name. spatial_model : str Spatial model name (PointSource, RadialGaussian, etc.). spatial_pars : dict Dictionary of spatial parameters (optional). use_cache : bool Generate the spatial model by interpolating the cached source map. use_pylike : bool """ name = self.roi.get_source_by_name(name).name src = self.roi[name] spatial_model = kwargs.get('spatial_model', src['SpatialModel']) spatial_pars = kwargs.get('spatial_pars', {}) use_pylike = kwargs.get('use_pylike', True) psf_scale_fn = kwargs.get('psf_scale_fn', None) update_source = kwargs.get('update_source', False) if hasattr(pyLike.BinnedLikelihood, 'setSourceMapImage') and not use_pylike: src.set_spatial_model(spatial_model, spatial_pars) self._update_srcmap(src.name, src, psf_scale_fn=psf_scale_fn) else: src = self.delete_source(name, loglevel=logging.DEBUG, save_template=False) src.set_spatial_model(spatial_model, spatial_pars) self.add_source(src.name, src, init_source=False, use_pylike=use_pylike, loglevel=logging.DEBUG) if update_source: self.update_source(name)
python
def set_source_morphology(self, name, **kwargs): """Set the spatial model of a source. Parameters ---------- name : str Source name. spatial_model : str Spatial model name (PointSource, RadialGaussian, etc.). spatial_pars : dict Dictionary of spatial parameters (optional). use_cache : bool Generate the spatial model by interpolating the cached source map. use_pylike : bool """ name = self.roi.get_source_by_name(name).name src = self.roi[name] spatial_model = kwargs.get('spatial_model', src['SpatialModel']) spatial_pars = kwargs.get('spatial_pars', {}) use_pylike = kwargs.get('use_pylike', True) psf_scale_fn = kwargs.get('psf_scale_fn', None) update_source = kwargs.get('update_source', False) if hasattr(pyLike.BinnedLikelihood, 'setSourceMapImage') and not use_pylike: src.set_spatial_model(spatial_model, spatial_pars) self._update_srcmap(src.name, src, psf_scale_fn=psf_scale_fn) else: src = self.delete_source(name, loglevel=logging.DEBUG, save_template=False) src.set_spatial_model(spatial_model, spatial_pars) self.add_source(src.name, src, init_source=False, use_pylike=use_pylike, loglevel=logging.DEBUG) if update_source: self.update_source(name)
[ "def", "set_source_morphology", "(", "self", ",", "name", ",", "*", "*", "kwargs", ")", ":", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", ".", "name", "src", "=", "self", ".", "roi", "[", "name", "]", "spatial_model", ...
Set the spatial model of a source. Parameters ---------- name : str Source name. spatial_model : str Spatial model name (PointSource, RadialGaussian, etc.). spatial_pars : dict Dictionary of spatial parameters (optional). use_cache : bool Generate the spatial model by interpolating the cached source map. use_pylike : bool
[ "Set", "the", "spatial", "model", "of", "a", "source", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L654-L696
train
36,036
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_source_spectrum
def set_source_spectrum(self, name, spectrum_type='PowerLaw', spectrum_pars=None, update_source=True): """Set the spectral model of a source. This function can be used to change the spectral type of a source or modify its spectral parameters. If called with spectrum_type='FileFunction' and spectrum_pars=None, the source spectrum will be replaced with a FileFunction with the same differential flux distribution as the original spectrum. Parameters ---------- name : str Source name. spectrum_type : str Spectrum type (PowerLaw, etc.). spectrum_pars : dict Dictionary of spectral parameters (optional). update_source : bool Recompute all source characteristics (flux, TS, NPred) using the new spectral model of the source. """ name = self.roi.get_source_by_name(name).name src = self.roi[name] spectrum_pars = {} if spectrum_pars is None else spectrum_pars if (self.roi[name]['SpectrumType'] == 'PowerLaw' and spectrum_type == 'LogParabola'): spectrum_pars.setdefault('beta', {'value': 0.0, 'scale': 1.0, 'min': 0.0, 'max': 1.0}) spectrum_pars.setdefault('Eb', src.spectral_pars['Scale']) spectrum_pars.setdefault('norm', src.spectral_pars['Prefactor']) if 'alpha' not in spectrum_pars: spectrum_pars['alpha'] = src.spectral_pars['Index'] spectrum_pars['alpha']['value'] *= -1.0 if spectrum_pars['alpha']['scale'] == -1.0: spectrum_pars['alpha']['value'] *= -1.0 spectrum_pars['alpha']['scale'] *= -1.0 if spectrum_type == 'FileFunction': self._create_filefunction(name, spectrum_pars) else: fn = gtutils.create_spectrum_from_dict(spectrum_type, spectrum_pars) self.like.setSpectrum(str(name), fn) # Get parameters src = self.components[0].like.logLike.getSource(str(name)) pars_dict = gtutils.get_function_pars_dict(src.spectrum()) self.roi[name]['SpectrumType'] = spectrum_type self.roi[name].set_spectral_pars(pars_dict) for c in self.components: c.roi[name]['SpectrumType'] = spectrum_type c.roi[name].set_spectral_pars(pars_dict) if update_source: self.update_source(name)
python
def set_source_spectrum(self, name, spectrum_type='PowerLaw', spectrum_pars=None, update_source=True): """Set the spectral model of a source. This function can be used to change the spectral type of a source or modify its spectral parameters. If called with spectrum_type='FileFunction' and spectrum_pars=None, the source spectrum will be replaced with a FileFunction with the same differential flux distribution as the original spectrum. Parameters ---------- name : str Source name. spectrum_type : str Spectrum type (PowerLaw, etc.). spectrum_pars : dict Dictionary of spectral parameters (optional). update_source : bool Recompute all source characteristics (flux, TS, NPred) using the new spectral model of the source. """ name = self.roi.get_source_by_name(name).name src = self.roi[name] spectrum_pars = {} if spectrum_pars is None else spectrum_pars if (self.roi[name]['SpectrumType'] == 'PowerLaw' and spectrum_type == 'LogParabola'): spectrum_pars.setdefault('beta', {'value': 0.0, 'scale': 1.0, 'min': 0.0, 'max': 1.0}) spectrum_pars.setdefault('Eb', src.spectral_pars['Scale']) spectrum_pars.setdefault('norm', src.spectral_pars['Prefactor']) if 'alpha' not in spectrum_pars: spectrum_pars['alpha'] = src.spectral_pars['Index'] spectrum_pars['alpha']['value'] *= -1.0 if spectrum_pars['alpha']['scale'] == -1.0: spectrum_pars['alpha']['value'] *= -1.0 spectrum_pars['alpha']['scale'] *= -1.0 if spectrum_type == 'FileFunction': self._create_filefunction(name, spectrum_pars) else: fn = gtutils.create_spectrum_from_dict(spectrum_type, spectrum_pars) self.like.setSpectrum(str(name), fn) # Get parameters src = self.components[0].like.logLike.getSource(str(name)) pars_dict = gtutils.get_function_pars_dict(src.spectrum()) self.roi[name]['SpectrumType'] = spectrum_type self.roi[name].set_spectral_pars(pars_dict) for c in self.components: c.roi[name]['SpectrumType'] = spectrum_type c.roi[name].set_spectral_pars(pars_dict) if update_source: self.update_source(name)
[ "def", "set_source_spectrum", "(", "self", ",", "name", ",", "spectrum_type", "=", "'PowerLaw'", ",", "spectrum_pars", "=", "None", ",", "update_source", "=", "True", ")", ":", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", "...
Set the spectral model of a source. This function can be used to change the spectral type of a source or modify its spectral parameters. If called with spectrum_type='FileFunction' and spectrum_pars=None, the source spectrum will be replaced with a FileFunction with the same differential flux distribution as the original spectrum. Parameters ---------- name : str Source name. spectrum_type : str Spectrum type (PowerLaw, etc.). spectrum_pars : dict Dictionary of spectral parameters (optional). update_source : bool Recompute all source characteristics (flux, TS, NPred) using the new spectral model of the source.
[ "Set", "the", "spectral", "model", "of", "a", "source", ".", "This", "function", "can", "be", "used", "to", "change", "the", "spectral", "type", "of", "a", "source", "or", "modify", "its", "spectral", "parameters", ".", "If", "called", "with", "spectrum_ty...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L698-L759
train
36,037
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_source_dnde
def set_source_dnde(self, name, dnde, update_source=True): """Set the differential flux distribution of a source with the FileFunction spectral type. Parameters ---------- name : str Source name. dnde : `~numpy.ndarray` Array of differential flux values (cm^{-2} s^{-1} MeV^{-1}). """ name = self.roi.get_source_by_name(name).name if self.roi[name]['SpectrumType'] != 'FileFunction': msg = 'Wrong spectral type: %s' % self.roi[name]['SpectrumType'] self.logger.error(msg) raise Exception(msg) xy = self.get_source_dnde(name) if len(dnde) != len(xy[0]): msg = 'Wrong length for dnde array: %i' % len(dnde) self.logger.error(msg) raise Exception(msg) for c in self.components: src = c.like.logLike.getSource(str(name)) spectrum = src.spectrum() file_function = pyLike.FileFunction_cast(spectrum) file_function.setSpectrum(10**xy[0], dnde) if update_source: self.update_source(name)
python
def set_source_dnde(self, name, dnde, update_source=True): """Set the differential flux distribution of a source with the FileFunction spectral type. Parameters ---------- name : str Source name. dnde : `~numpy.ndarray` Array of differential flux values (cm^{-2} s^{-1} MeV^{-1}). """ name = self.roi.get_source_by_name(name).name if self.roi[name]['SpectrumType'] != 'FileFunction': msg = 'Wrong spectral type: %s' % self.roi[name]['SpectrumType'] self.logger.error(msg) raise Exception(msg) xy = self.get_source_dnde(name) if len(dnde) != len(xy[0]): msg = 'Wrong length for dnde array: %i' % len(dnde) self.logger.error(msg) raise Exception(msg) for c in self.components: src = c.like.logLike.getSource(str(name)) spectrum = src.spectrum() file_function = pyLike.FileFunction_cast(spectrum) file_function.setSpectrum(10**xy[0], dnde) if update_source: self.update_source(name)
[ "def", "set_source_dnde", "(", "self", ",", "name", ",", "dnde", ",", "update_source", "=", "True", ")", ":", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", ".", "name", "if", "self", ".", "roi", "[", "name", "]", "[", ...
Set the differential flux distribution of a source with the FileFunction spectral type. Parameters ---------- name : str Source name. dnde : `~numpy.ndarray` Array of differential flux values (cm^{-2} s^{-1} MeV^{-1}).
[ "Set", "the", "differential", "flux", "distribution", "of", "a", "source", "with", "the", "FileFunction", "spectral", "type", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L761-L794
train
36,038
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.get_source_dnde
def get_source_dnde(self, name): """Return differential flux distribution of a source. For sources with FileFunction spectral type this returns the internal differential flux array. Returns ------- loge : `~numpy.ndarray` Array of energies at which the differential flux is evaluated (log10(E/MeV)). dnde : `~numpy.ndarray` Array of differential flux values (cm^{-2} s^{-1} MeV^{-1}) evaluated at energies in ``loge``. """ name = self.roi.get_source_by_name(name).name if self.roi[name]['SpectrumType'] != 'FileFunction': src = self.components[0].like.logLike.getSource(str(name)) spectrum = src.spectrum() file_function = pyLike.FileFunction_cast(spectrum) loge = file_function.log_energy() logdnde = file_function.log_dnde() loge = np.log10(np.exp(loge)) dnde = np.exp(logdnde) return loge, dnde else: ebinsz = (self.log_energies[-1] - self.log_energies[0]) / self.enumbins loge = utils.extend_array(self.log_energies, ebinsz, 0.5, 6.5) dnde = np.array([self.like[name].spectrum()(pyLike.dArg(10 ** egy)) for egy in loge]) return loge, dnde
python
def get_source_dnde(self, name): """Return differential flux distribution of a source. For sources with FileFunction spectral type this returns the internal differential flux array. Returns ------- loge : `~numpy.ndarray` Array of energies at which the differential flux is evaluated (log10(E/MeV)). dnde : `~numpy.ndarray` Array of differential flux values (cm^{-2} s^{-1} MeV^{-1}) evaluated at energies in ``loge``. """ name = self.roi.get_source_by_name(name).name if self.roi[name]['SpectrumType'] != 'FileFunction': src = self.components[0].like.logLike.getSource(str(name)) spectrum = src.spectrum() file_function = pyLike.FileFunction_cast(spectrum) loge = file_function.log_energy() logdnde = file_function.log_dnde() loge = np.log10(np.exp(loge)) dnde = np.exp(logdnde) return loge, dnde else: ebinsz = (self.log_energies[-1] - self.log_energies[0]) / self.enumbins loge = utils.extend_array(self.log_energies, ebinsz, 0.5, 6.5) dnde = np.array([self.like[name].spectrum()(pyLike.dArg(10 ** egy)) for egy in loge]) return loge, dnde
[ "def", "get_source_dnde", "(", "self", ",", "name", ")", ":", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", ".", "name", "if", "self", ".", "roi", "[", "name", "]", "[", "'SpectrumType'", "]", "!=", "'FileFunction'", ":"...
Return differential flux distribution of a source. For sources with FileFunction spectral type this returns the internal differential flux array. Returns ------- loge : `~numpy.ndarray` Array of energies at which the differential flux is evaluated (log10(E/MeV)). dnde : `~numpy.ndarray` Array of differential flux values (cm^{-2} s^{-1} MeV^{-1}) evaluated at energies in ``loge``.
[ "Return", "differential", "flux", "distribution", "of", "a", "source", ".", "For", "sources", "with", "FileFunction", "spectral", "type", "this", "returns", "the", "internal", "differential", "flux", "array", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L796-L835
train
36,039
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis._create_filefunction
def _create_filefunction(self, name, spectrum_pars): """Replace the spectrum of an existing source with a FileFunction.""" spectrum_pars = {} if spectrum_pars is None else spectrum_pars if 'loge' in spectrum_pars: loge = spectrum_pars.get('loge') else: ebinsz = (self.log_energies[-1] - self.log_energies[0]) / self.enumbins loge = utils.extend_array(self.log_energies, ebinsz, 0.5, 6.5) # Get the values dnde = np.zeros(len(loge)) if 'dnde' in spectrum_pars: dnde = spectrum_pars.get('dnde') else: dnde = np.array([self.like[name].spectrum()(pyLike.dArg(10 ** egy)) for egy in loge]) filename = \ os.path.join(self.workdir, '%s_filespectrum.txt' % (name.lower().replace(' ', '_'))) # Create file spectrum txt file np.savetxt(filename, np.vstack((10**loge, dnde)).T) self.like.setSpectrum(name, str('FileFunction')) self.roi[name]['Spectrum_Filename'] = filename # Update for c in self.components: src = c.like.logLike.getSource(str(name)) spectrum = src.spectrum() spectrum.getParam(str('Normalization')).setBounds(1E-3, 1E3) file_function = pyLike.FileFunction_cast(spectrum) file_function.readFunction(str(filename)) c.roi[name]['Spectrum_Filename'] = filename
python
def _create_filefunction(self, name, spectrum_pars): """Replace the spectrum of an existing source with a FileFunction.""" spectrum_pars = {} if spectrum_pars is None else spectrum_pars if 'loge' in spectrum_pars: loge = spectrum_pars.get('loge') else: ebinsz = (self.log_energies[-1] - self.log_energies[0]) / self.enumbins loge = utils.extend_array(self.log_energies, ebinsz, 0.5, 6.5) # Get the values dnde = np.zeros(len(loge)) if 'dnde' in spectrum_pars: dnde = spectrum_pars.get('dnde') else: dnde = np.array([self.like[name].spectrum()(pyLike.dArg(10 ** egy)) for egy in loge]) filename = \ os.path.join(self.workdir, '%s_filespectrum.txt' % (name.lower().replace(' ', '_'))) # Create file spectrum txt file np.savetxt(filename, np.vstack((10**loge, dnde)).T) self.like.setSpectrum(name, str('FileFunction')) self.roi[name]['Spectrum_Filename'] = filename # Update for c in self.components: src = c.like.logLike.getSource(str(name)) spectrum = src.spectrum() spectrum.getParam(str('Normalization')).setBounds(1E-3, 1E3) file_function = pyLike.FileFunction_cast(spectrum) file_function.readFunction(str(filename)) c.roi[name]['Spectrum_Filename'] = filename
[ "def", "_create_filefunction", "(", "self", ",", "name", ",", "spectrum_pars", ")", ":", "spectrum_pars", "=", "{", "}", "if", "spectrum_pars", "is", "None", "else", "spectrum_pars", "if", "'loge'", "in", "spectrum_pars", ":", "loge", "=", "spectrum_pars", "."...
Replace the spectrum of an existing source with a FileFunction.
[ "Replace", "the", "spectrum", "of", "an", "existing", "source", "with", "a", "FileFunction", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L837-L876
train
36,040
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.stage_output
def stage_output(self): """Copy data products to final output directory.""" if self.workdir == self.outdir: return elif not os.path.isdir(self.workdir): self.logger.error('Working directory does not exist.') return regex = self.config['fileio']['outdir_regex'] savefits = self.config['fileio']['savefits'] files = os.listdir(self.workdir) self.logger.info('Staging files to %s', self.outdir) fitsfiles = [] for c in self.components: for f in c.files.values(): if f is None: continue fitsfiles += [os.path.basename(f)] for f in files: wpath = os.path.join(self.workdir, f) opath = os.path.join(self.outdir, f) if not utils.match_regex_list(regex, os.path.basename(f)): continue if os.path.isfile(opath) and filecmp.cmp(wpath, opath, False): continue if not savefits and f in fitsfiles: continue self.logger.debug('Copying ' + f) self.logger.info('Copying ' + f) shutil.copy(wpath, self.outdir) self.logger.info('Finished.')
python
def stage_output(self): """Copy data products to final output directory.""" if self.workdir == self.outdir: return elif not os.path.isdir(self.workdir): self.logger.error('Working directory does not exist.') return regex = self.config['fileio']['outdir_regex'] savefits = self.config['fileio']['savefits'] files = os.listdir(self.workdir) self.logger.info('Staging files to %s', self.outdir) fitsfiles = [] for c in self.components: for f in c.files.values(): if f is None: continue fitsfiles += [os.path.basename(f)] for f in files: wpath = os.path.join(self.workdir, f) opath = os.path.join(self.outdir, f) if not utils.match_regex_list(regex, os.path.basename(f)): continue if os.path.isfile(opath) and filecmp.cmp(wpath, opath, False): continue if not savefits and f in fitsfiles: continue self.logger.debug('Copying ' + f) self.logger.info('Copying ' + f) shutil.copy(wpath, self.outdir) self.logger.info('Finished.')
[ "def", "stage_output", "(", "self", ")", ":", "if", "self", ".", "workdir", "==", "self", ".", "outdir", ":", "return", "elif", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "workdir", ")", ":", "self", ".", "logger", ".", "error", "(",...
Copy data products to final output directory.
[ "Copy", "data", "products", "to", "final", "output", "directory", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L926-L966
train
36,041
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.stage_input
def stage_input(self): """Copy input files to working directory.""" if self.workdir == self.outdir: return elif not os.path.isdir(self.workdir): self.logger.error('Working directory does not exist.') return self.logger.info('Staging files to %s', self.workdir) files = [os.path.join(self.outdir, f) for f in os.listdir(self.outdir)] regex = copy.deepcopy(self.config['fileio']['workdir_regex']) for f in files: if not os.path.isfile(f): continue if not utils.match_regex_list(regex, os.path.basename(f)): continue self.logger.debug('Copying ' + os.path.basename(f)) shutil.copy(f, self.workdir) for c in self.components: for f in c.files.values(): if f is None: continue wpath = os.path.join(self.workdir, os.path.basename(f)) opath = os.path.join(self.outdir, os.path.basename(f)) if os.path.isfile(wpath): continue elif os.path.isfile(opath): self.logger.debug('Copying ' + os.path.basename(f)) shutil.copy(opath, self.workdir) self.logger.info('Finished.')
python
def stage_input(self): """Copy input files to working directory.""" if self.workdir == self.outdir: return elif not os.path.isdir(self.workdir): self.logger.error('Working directory does not exist.') return self.logger.info('Staging files to %s', self.workdir) files = [os.path.join(self.outdir, f) for f in os.listdir(self.outdir)] regex = copy.deepcopy(self.config['fileio']['workdir_regex']) for f in files: if not os.path.isfile(f): continue if not utils.match_regex_list(regex, os.path.basename(f)): continue self.logger.debug('Copying ' + os.path.basename(f)) shutil.copy(f, self.workdir) for c in self.components: for f in c.files.values(): if f is None: continue wpath = os.path.join(self.workdir, os.path.basename(f)) opath = os.path.join(self.outdir, os.path.basename(f)) if os.path.isfile(wpath): continue elif os.path.isfile(opath): self.logger.debug('Copying ' + os.path.basename(f)) shutil.copy(opath, self.workdir) self.logger.info('Finished.')
[ "def", "stage_input", "(", "self", ")", ":", "if", "self", ".", "workdir", "==", "self", ".", "outdir", ":", "return", "elif", "not", "os", ".", "path", ".", "isdir", "(", "self", ".", "workdir", ")", ":", "self", ".", "logger", ".", "error", "(", ...
Copy input files to working directory.
[ "Copy", "input", "files", "to", "working", "directory", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L968-L1009
train
36,042
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis._create_likelihood
def _create_likelihood(self, srcmdl=None): """Instantiate the likelihood object for each component and create a SummedLikelihood.""" self._like = SummedLikelihood() for c in self.components: c._create_binned_analysis(srcmdl) self._like.addComponent(c.like) self.like.model = self.like.components[0].model self._fitcache = None self._init_roi_model()
python
def _create_likelihood(self, srcmdl=None): """Instantiate the likelihood object for each component and create a SummedLikelihood.""" self._like = SummedLikelihood() for c in self.components: c._create_binned_analysis(srcmdl) self._like.addComponent(c.like) self.like.model = self.like.components[0].model self._fitcache = None self._init_roi_model()
[ "def", "_create_likelihood", "(", "self", ",", "srcmdl", "=", "None", ")", ":", "self", ".", "_like", "=", "SummedLikelihood", "(", ")", "for", "c", "in", "self", ".", "components", ":", "c", ".", "_create_binned_analysis", "(", "srcmdl", ")", "self", "....
Instantiate the likelihood object for each component and create a SummedLikelihood.
[ "Instantiate", "the", "likelihood", "object", "for", "each", "component", "and", "create", "a", "SummedLikelihood", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1070-L1081
train
36,043
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.generate_model
def generate_model(self, model_name=None): """Generate model maps for all components. model_name should be a unique identifier for the model. If model_name is None then the model maps will be generated using the current parameters of the ROI.""" for i, c in enumerate(self._components): c.generate_model(model_name=model_name)
python
def generate_model(self, model_name=None): """Generate model maps for all components. model_name should be a unique identifier for the model. If model_name is None then the model maps will be generated using the current parameters of the ROI.""" for i, c in enumerate(self._components): c.generate_model(model_name=model_name)
[ "def", "generate_model", "(", "self", ",", "model_name", "=", "None", ")", ":", "for", "i", ",", "c", "in", "enumerate", "(", "self", ".", "_components", ")", ":", "c", ".", "generate_model", "(", "model_name", "=", "model_name", ")" ]
Generate model maps for all components. model_name should be a unique identifier for the model. If model_name is None then the model maps will be generated using the current parameters of the ROI.
[ "Generate", "model", "maps", "for", "all", "components", ".", "model_name", "should", "be", "a", "unique", "identifier", "for", "the", "model", ".", "If", "model_name", "is", "None", "then", "the", "model", "maps", "will", "be", "generated", "using", "the", ...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1142-L1149
train
36,044
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_energy_range
def set_energy_range(self, logemin, logemax): """Set the energy bounds of the analysis. This restricts the evaluation of the likelihood to the data that falls in this range. Input values will be rounded to the closest bin edge value. If either argument is None then the lower or upper bound of the analysis instance will be used. Parameters ---------- logemin : float Lower energy bound in log10(E/MeV). logemax : float Upper energy bound in log10(E/MeV). Returns ------- eminmax : array Minimum and maximum energy in log10(E/MeV). """ if logemin is None: logemin = self.log_energies[0] else: imin = int(utils.val_to_edge(self.log_energies, logemin)[0]) logemin = self.log_energies[imin] if logemax is None: logemax = self.log_energies[-1] else: imax = int(utils.val_to_edge(self.log_energies, logemax)[0]) logemax = self.log_energies[imax] self._loge_bounds = np.array([logemin, logemax]) self._roi_data['loge_bounds'] = np.copy(self.loge_bounds) for c in self.components: c.set_energy_range(logemin, logemax) return self._loge_bounds
python
def set_energy_range(self, logemin, logemax): """Set the energy bounds of the analysis. This restricts the evaluation of the likelihood to the data that falls in this range. Input values will be rounded to the closest bin edge value. If either argument is None then the lower or upper bound of the analysis instance will be used. Parameters ---------- logemin : float Lower energy bound in log10(E/MeV). logemax : float Upper energy bound in log10(E/MeV). Returns ------- eminmax : array Minimum and maximum energy in log10(E/MeV). """ if logemin is None: logemin = self.log_energies[0] else: imin = int(utils.val_to_edge(self.log_energies, logemin)[0]) logemin = self.log_energies[imin] if logemax is None: logemax = self.log_energies[-1] else: imax = int(utils.val_to_edge(self.log_energies, logemax)[0]) logemax = self.log_energies[imax] self._loge_bounds = np.array([logemin, logemax]) self._roi_data['loge_bounds'] = np.copy(self.loge_bounds) for c in self.components: c.set_energy_range(logemin, logemax) return self._loge_bounds
[ "def", "set_energy_range", "(", "self", ",", "logemin", ",", "logemax", ")", ":", "if", "logemin", "is", "None", ":", "logemin", "=", "self", ".", "log_energies", "[", "0", "]", "else", ":", "imin", "=", "int", "(", "utils", ".", "val_to_edge", "(", ...
Set the energy bounds of the analysis. This restricts the evaluation of the likelihood to the data that falls in this range. Input values will be rounded to the closest bin edge value. If either argument is None then the lower or upper bound of the analysis instance will be used. Parameters ---------- logemin : float Lower energy bound in log10(E/MeV). logemax : float Upper energy bound in log10(E/MeV). Returns ------- eminmax : array Minimum and maximum energy in log10(E/MeV).
[ "Set", "the", "energy", "bounds", "of", "the", "analysis", ".", "This", "restricts", "the", "evaluation", "of", "the", "likelihood", "to", "the", "data", "that", "falls", "in", "this", "range", ".", "Input", "values", "will", "be", "rounded", "to", "the", ...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1154-L1195
train
36,045
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.model_counts_map
def model_counts_map(self, name=None, exclude=None, use_mask=False): """Return the model counts map for a single source, a list of sources, or for the sum of all sources in the ROI. The exclude parameter can be used to exclude one or more components when generating the model map. Parameters ---------- name : str or list of str Parameter controlling the set of sources for which the model counts map will be calculated. If name=None the model map will be generated for all sources in the ROI. exclude : str or list of str List of sources that will be excluded when calculating the model map. use_mask : bool Parameter that specifies in the model counts map should include mask pixels (i.e., ones whose weights are <= 0) Returns ------- map : `~gammapy.maps.Map` """ maps = [c.model_counts_map(name, exclude, use_mask=use_mask) for c in self.components] return skymap.coadd_maps(self.geom, maps)
python
def model_counts_map(self, name=None, exclude=None, use_mask=False): """Return the model counts map for a single source, a list of sources, or for the sum of all sources in the ROI. The exclude parameter can be used to exclude one or more components when generating the model map. Parameters ---------- name : str or list of str Parameter controlling the set of sources for which the model counts map will be calculated. If name=None the model map will be generated for all sources in the ROI. exclude : str or list of str List of sources that will be excluded when calculating the model map. use_mask : bool Parameter that specifies in the model counts map should include mask pixels (i.e., ones whose weights are <= 0) Returns ------- map : `~gammapy.maps.Map` """ maps = [c.model_counts_map(name, exclude, use_mask=use_mask) for c in self.components] return skymap.coadd_maps(self.geom, maps)
[ "def", "model_counts_map", "(", "self", ",", "name", "=", "None", ",", "exclude", "=", "None", ",", "use_mask", "=", "False", ")", ":", "maps", "=", "[", "c", ".", "model_counts_map", "(", "name", ",", "exclude", ",", "use_mask", "=", "use_mask", ")", ...
Return the model counts map for a single source, a list of sources, or for the sum of all sources in the ROI. The exclude parameter can be used to exclude one or more components when generating the model map. Parameters ---------- name : str or list of str Parameter controlling the set of sources for which the model counts map will be calculated. If name=None the model map will be generated for all sources in the ROI. exclude : str or list of str List of sources that will be excluded when calculating the model map. use_mask : bool Parameter that specifies in the model counts map should include mask pixels (i.e., ones whose weights are <= 0) Returns ------- map : `~gammapy.maps.Map`
[ "Return", "the", "model", "counts", "map", "for", "a", "single", "source", "a", "list", "of", "sources", "or", "for", "the", "sum", "of", "all", "sources", "in", "the", "ROI", ".", "The", "exclude", "parameter", "can", "be", "used", "to", "exclude", "o...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1219-L1249
train
36,046
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.model_counts_spectrum
def model_counts_spectrum(self, name, logemin=None, logemax=None, summed=False, weighted=False): """Return the predicted number of model counts versus energy for a given source and energy range. If summed=True return the counts spectrum summed over all components otherwise return a list of model spectra. If weighted=True return the weighted version of the counts spectrum """ if logemin is None: logemin = self.log_energies[0] if logemax is None: logemax = self.log_energies[-1] if summed: cs = np.zeros(self.enumbins) imin = utils.val_to_bin_bounded(self.log_energies, logemin + 1E-7)[0] imax = utils.val_to_bin_bounded(self.log_energies, logemax - 1E-7)[0] + 1 for c in self.components: ecenter = 0.5 * (c.log_energies[:-1] + c.log_energies[1:]) counts = c.model_counts_spectrum(name, self.log_energies[0], self.log_energies[-1], weighted) cs += np.histogram(ecenter, weights=counts, bins=self.log_energies)[0] return cs[imin:imax] else: cs = [] for c in self.components: cs += [c.model_counts_spectrum(name, logemin, logemax, weighted=weighted)] return cs
python
def model_counts_spectrum(self, name, logemin=None, logemax=None, summed=False, weighted=False): """Return the predicted number of model counts versus energy for a given source and energy range. If summed=True return the counts spectrum summed over all components otherwise return a list of model spectra. If weighted=True return the weighted version of the counts spectrum """ if logemin is None: logemin = self.log_energies[0] if logemax is None: logemax = self.log_energies[-1] if summed: cs = np.zeros(self.enumbins) imin = utils.val_to_bin_bounded(self.log_energies, logemin + 1E-7)[0] imax = utils.val_to_bin_bounded(self.log_energies, logemax - 1E-7)[0] + 1 for c in self.components: ecenter = 0.5 * (c.log_energies[:-1] + c.log_energies[1:]) counts = c.model_counts_spectrum(name, self.log_energies[0], self.log_energies[-1], weighted) cs += np.histogram(ecenter, weights=counts, bins=self.log_energies)[0] return cs[imin:imax] else: cs = [] for c in self.components: cs += [c.model_counts_spectrum(name, logemin, logemax, weighted=weighted)] return cs
[ "def", "model_counts_spectrum", "(", "self", ",", "name", ",", "logemin", "=", "None", ",", "logemax", "=", "None", ",", "summed", "=", "False", ",", "weighted", "=", "False", ")", ":", "if", "logemin", "is", "None", ":", "logemin", "=", "self", ".", ...
Return the predicted number of model counts versus energy for a given source and energy range. If summed=True return the counts spectrum summed over all components otherwise return a list of model spectra. If weighted=True return the weighted version of the counts spectrum
[ "Return", "the", "predicted", "number", "of", "model", "counts", "versus", "energy", "for", "a", "given", "source", "and", "energy", "range", ".", "If", "summed", "=", "True", "return", "the", "counts", "spectrum", "summed", "over", "all", "components", "oth...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1251-L1286
train
36,047
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.get_sources
def get_sources(self, cuts=None, distance=None, skydir=None, minmax_ts=None, minmax_npred=None, exclude=None, square=False): """Retrieve list of sources in the ROI satisfying the given selections. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects. """ coordsys = self.config['binning']['coordsys'] return self.roi.get_sources(skydir, distance, cuts, minmax_ts, minmax_npred, exclude, square, coordsys=coordsys)
python
def get_sources(self, cuts=None, distance=None, skydir=None, minmax_ts=None, minmax_npred=None, exclude=None, square=False): """Retrieve list of sources in the ROI satisfying the given selections. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects. """ coordsys = self.config['binning']['coordsys'] return self.roi.get_sources(skydir, distance, cuts, minmax_ts, minmax_npred, exclude, square, coordsys=coordsys)
[ "def", "get_sources", "(", "self", ",", "cuts", "=", "None", ",", "distance", "=", "None", ",", "skydir", "=", "None", ",", "minmax_ts", "=", "None", ",", "minmax_npred", "=", "None", ",", "exclude", "=", "None", ",", "square", "=", "False", ")", ":"...
Retrieve list of sources in the ROI satisfying the given selections. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects.
[ "Retrieve", "list", "of", "sources", "in", "the", "ROI", "satisfying", "the", "given", "selections", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1288-L1305
train
36,048
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.add_source
def add_source(self, name, src_dict, free=None, init_source=True, save_source_maps=True, use_pylike=True, use_single_psf=False, **kwargs): """Add a source to the ROI model. This function may be called either before or after `~fermipy.gtanalysis.GTAnalysis.setup`. Parameters ---------- name : str Source name. src_dict : dict or `~fermipy.roi_model.Source` object Dictionary or source object defining the source properties (coordinates, spectral parameters, etc.). free : bool Initialize the source with a free normalization parameter. use_pylike : bool Create source maps with pyLikelihood. use_single_psf : bool Use the PSF model calculated for the ROI center. If false then a new model will be generated using the position of the source. """ if self.roi.has_source(name): msg = 'Source %s already exists.' % name self.logger.error(msg) raise Exception(msg) loglevel = kwargs.pop('loglevel', self.loglevel) self.logger.log(loglevel, 'Adding source ' + name) src = self.roi.create_source(name, src_dict, rescale=True) self.make_template(src) for c in self.components: c.add_source(name, src_dict, free=free, save_source_maps=save_source_maps, use_pylike=use_pylike, use_single_psf=use_single_psf) if self._like is None: return if self.config['gtlike']['edisp'] and src.name not in \ self.config['gtlike']['edisp_disable']: self.set_edisp_flag(src.name, True) self.like.syncSrcParams(str(name)) self.like.model = self.like.components[0].model # if free is not None: # self.free_norm(name, free, loglevel=logging.DEBUG) if init_source: self._init_source(name) self._update_roi() if self._fitcache is not None: self._fitcache.update_source(name)
python
def add_source(self, name, src_dict, free=None, init_source=True, save_source_maps=True, use_pylike=True, use_single_psf=False, **kwargs): """Add a source to the ROI model. This function may be called either before or after `~fermipy.gtanalysis.GTAnalysis.setup`. Parameters ---------- name : str Source name. src_dict : dict or `~fermipy.roi_model.Source` object Dictionary or source object defining the source properties (coordinates, spectral parameters, etc.). free : bool Initialize the source with a free normalization parameter. use_pylike : bool Create source maps with pyLikelihood. use_single_psf : bool Use the PSF model calculated for the ROI center. If false then a new model will be generated using the position of the source. """ if self.roi.has_source(name): msg = 'Source %s already exists.' % name self.logger.error(msg) raise Exception(msg) loglevel = kwargs.pop('loglevel', self.loglevel) self.logger.log(loglevel, 'Adding source ' + name) src = self.roi.create_source(name, src_dict, rescale=True) self.make_template(src) for c in self.components: c.add_source(name, src_dict, free=free, save_source_maps=save_source_maps, use_pylike=use_pylike, use_single_psf=use_single_psf) if self._like is None: return if self.config['gtlike']['edisp'] and src.name not in \ self.config['gtlike']['edisp_disable']: self.set_edisp_flag(src.name, True) self.like.syncSrcParams(str(name)) self.like.model = self.like.components[0].model # if free is not None: # self.free_norm(name, free, loglevel=logging.DEBUG) if init_source: self._init_source(name) self._update_roi() if self._fitcache is not None: self._fitcache.update_source(name)
[ "def", "add_source", "(", "self", ",", "name", ",", "src_dict", ",", "free", "=", "None", ",", "init_source", "=", "True", ",", "save_source_maps", "=", "True", ",", "use_pylike", "=", "True", ",", "use_single_psf", "=", "False", ",", "*", "*", "kwargs",...
Add a source to the ROI model. This function may be called either before or after `~fermipy.gtanalysis.GTAnalysis.setup`. Parameters ---------- name : str Source name. src_dict : dict or `~fermipy.roi_model.Source` object Dictionary or source object defining the source properties (coordinates, spectral parameters, etc.). free : bool Initialize the source with a free normalization parameter. use_pylike : bool Create source maps with pyLikelihood. use_single_psf : bool Use the PSF model calculated for the ROI center. If false then a new model will be generated using the position of the source.
[ "Add", "a", "source", "to", "the", "ROI", "model", ".", "This", "function", "may", "be", "called", "either", "before", "or", "after", "~fermipy", ".", "gtanalysis", ".", "GTAnalysis", ".", "setup", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1307-L1370
train
36,049
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.add_sources_from_roi
def add_sources_from_roi(self, names, roi, free=False, **kwargs): """Add multiple sources to the current ROI model copied from another ROI model. Parameters ---------- names : list List of str source names to add. roi : `~fermipy.roi_model.ROIModel` object The roi model from which to add sources. free : bool Initialize the source with a free normalization paramter. """ for name in names: self.add_source(name, roi[name].data, free=free, **kwargs)
python
def add_sources_from_roi(self, names, roi, free=False, **kwargs): """Add multiple sources to the current ROI model copied from another ROI model. Parameters ---------- names : list List of str source names to add. roi : `~fermipy.roi_model.ROIModel` object The roi model from which to add sources. free : bool Initialize the source with a free normalization paramter. """ for name in names: self.add_source(name, roi[name].data, free=free, **kwargs)
[ "def", "add_sources_from_roi", "(", "self", ",", "names", ",", "roi", ",", "free", "=", "False", ",", "*", "*", "kwargs", ")", ":", "for", "name", "in", "names", ":", "self", ".", "add_source", "(", "name", ",", "roi", "[", "name", "]", ".", "data"...
Add multiple sources to the current ROI model copied from another ROI model. Parameters ---------- names : list List of str source names to add. roi : `~fermipy.roi_model.ROIModel` object The roi model from which to add sources. free : bool Initialize the source with a free normalization paramter.
[ "Add", "multiple", "sources", "to", "the", "current", "ROI", "model", "copied", "from", "another", "ROI", "model", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1372-L1390
train
36,050
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.delete_source
def delete_source(self, name, save_template=True, delete_source_map=False, build_fixed_wts=True, **kwargs): """Delete a source from the ROI model. Parameters ---------- name : str Source name. save_template : bool Keep the SpatialMap FITS template associated with this source. delete_source_map : bool Delete the source map associated with this source from the source maps file. Returns ------- src : `~fermipy.roi_model.Model` The deleted source object. """ if not self.roi.has_source(name): self.logger.error('No source with name: %s', name) return loglevel = kwargs.pop('loglevel', self.loglevel) self.logger.log(loglevel, 'Deleting source %s', name) # STs require a source to be freed before deletion if self.like is not None: self.free_norm(name, loglevel=logging.DEBUG) for c in self.components: c.delete_source(name, save_template=save_template, delete_source_map=delete_source_map, build_fixed_wts=build_fixed_wts) src = self.roi.get_source_by_name(name) self.roi.delete_sources([src]) if self.like is not None: self.like.model = self.like.components[0].model self._update_roi() return src
python
def delete_source(self, name, save_template=True, delete_source_map=False, build_fixed_wts=True, **kwargs): """Delete a source from the ROI model. Parameters ---------- name : str Source name. save_template : bool Keep the SpatialMap FITS template associated with this source. delete_source_map : bool Delete the source map associated with this source from the source maps file. Returns ------- src : `~fermipy.roi_model.Model` The deleted source object. """ if not self.roi.has_source(name): self.logger.error('No source with name: %s', name) return loglevel = kwargs.pop('loglevel', self.loglevel) self.logger.log(loglevel, 'Deleting source %s', name) # STs require a source to be freed before deletion if self.like is not None: self.free_norm(name, loglevel=logging.DEBUG) for c in self.components: c.delete_source(name, save_template=save_template, delete_source_map=delete_source_map, build_fixed_wts=build_fixed_wts) src = self.roi.get_source_by_name(name) self.roi.delete_sources([src]) if self.like is not None: self.like.model = self.like.components[0].model self._update_roi() return src
[ "def", "delete_source", "(", "self", ",", "name", ",", "save_template", "=", "True", ",", "delete_source_map", "=", "False", ",", "build_fixed_wts", "=", "True", ",", "*", "*", "kwargs", ")", ":", "if", "not", "self", ".", "roi", ".", "has_source", "(", ...
Delete a source from the ROI model. Parameters ---------- name : str Source name. save_template : bool Keep the SpatialMap FITS template associated with this source. delete_source_map : bool Delete the source map associated with this source from the source maps file. Returns ------- src : `~fermipy.roi_model.Model` The deleted source object.
[ "Delete", "a", "source", "from", "the", "ROI", "model", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1392-L1438
train
36,051
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.delete_sources
def delete_sources(self, cuts=None, distance=None, skydir=None, minmax_ts=None, minmax_npred=None, exclude=None, square=False, names=None): """Delete sources in the ROI model satisfying the given selection criteria. Parameters ---------- cuts : dict Dictionary of [min,max] selections on source properties. distance : float Cut on angular distance from ``skydir``. If None then no selection will be applied. skydir : `~astropy.coordinates.SkyCoord` Reference sky coordinate for ``distance`` selection. If None then the distance selection will be applied with respect to the ROI center. minmax_ts : list Select sources that have TS in the range [min,max]. If either min or max are None then only a lower (upper) bound will be applied. If this parameter is none no selection will be applied. minmax_npred : list Select sources that have npred in the range [min,max]. If either min or max are None then only a lower (upper) bound will be applied. If this parameter is none no selection will be applied. square : bool Switch between applying a circular or square (ROI-like) selection on the maximum projected distance from the ROI center. names : list Select sources matching a name in this list. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects. """ srcs = self.roi.get_sources(skydir=skydir, distance=distance, cuts=cuts, minmax_ts=minmax_ts, minmax_npred=minmax_npred, exclude=exclude, square=square, coordsys=self.config[ 'binning']['coordsys'], names=names) for s in srcs: self.delete_source(s.name, build_fixed_wts=False) if self.like is not None: # Build fixed model weights in one pass for c in self.components: c.like.logLike.buildFixedModelWts() self._update_roi() return srcs
python
def delete_sources(self, cuts=None, distance=None, skydir=None, minmax_ts=None, minmax_npred=None, exclude=None, square=False, names=None): """Delete sources in the ROI model satisfying the given selection criteria. Parameters ---------- cuts : dict Dictionary of [min,max] selections on source properties. distance : float Cut on angular distance from ``skydir``. If None then no selection will be applied. skydir : `~astropy.coordinates.SkyCoord` Reference sky coordinate for ``distance`` selection. If None then the distance selection will be applied with respect to the ROI center. minmax_ts : list Select sources that have TS in the range [min,max]. If either min or max are None then only a lower (upper) bound will be applied. If this parameter is none no selection will be applied. minmax_npred : list Select sources that have npred in the range [min,max]. If either min or max are None then only a lower (upper) bound will be applied. If this parameter is none no selection will be applied. square : bool Switch between applying a circular or square (ROI-like) selection on the maximum projected distance from the ROI center. names : list Select sources matching a name in this list. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects. """ srcs = self.roi.get_sources(skydir=skydir, distance=distance, cuts=cuts, minmax_ts=minmax_ts, minmax_npred=minmax_npred, exclude=exclude, square=square, coordsys=self.config[ 'binning']['coordsys'], names=names) for s in srcs: self.delete_source(s.name, build_fixed_wts=False) if self.like is not None: # Build fixed model weights in one pass for c in self.components: c.like.logLike.buildFixedModelWts() self._update_roi() return srcs
[ "def", "delete_sources", "(", "self", ",", "cuts", "=", "None", ",", "distance", "=", "None", ",", "skydir", "=", "None", ",", "minmax_ts", "=", "None", ",", "minmax_npred", "=", "None", ",", "exclude", "=", "None", ",", "square", "=", "False", ",", ...
Delete sources in the ROI model satisfying the given selection criteria. Parameters ---------- cuts : dict Dictionary of [min,max] selections on source properties. distance : float Cut on angular distance from ``skydir``. If None then no selection will be applied. skydir : `~astropy.coordinates.SkyCoord` Reference sky coordinate for ``distance`` selection. If None then the distance selection will be applied with respect to the ROI center. minmax_ts : list Select sources that have TS in the range [min,max]. If either min or max are None then only a lower (upper) bound will be applied. If this parameter is none no selection will be applied. minmax_npred : list Select sources that have npred in the range [min,max]. If either min or max are None then only a lower (upper) bound will be applied. If this parameter is none no selection will be applied. square : bool Switch between applying a circular or square (ROI-like) selection on the maximum projected distance from the ROI center. names : list Select sources matching a name in this list. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects.
[ "Delete", "sources", "in", "the", "ROI", "model", "satisfying", "the", "given", "selection", "criteria", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1440-L1504
train
36,052
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.free_sources_by_name
def free_sources_by_name(self, names, free=True, pars=None, **kwargs): """Free all sources with names matching ``names``. Parameters ---------- names : list List of source names. free : bool Choose whether to free (free=True) or fix (free=False) source parameters. pars : list Set a list of parameters to be freed/fixed for each source. If none then all source parameters will be freed/fixed. If pars='norm' then only normalization parameters will be freed. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects. """ if names is None: return names = [names] if not isinstance(names, list) else names names = [self.roi.get_source_by_name(t).name for t in names] srcs = [s for s in self.roi.sources if s.name in names] for s in srcs: self.free_source(s.name, free=free, pars=pars, **kwargs) return srcs
python
def free_sources_by_name(self, names, free=True, pars=None, **kwargs): """Free all sources with names matching ``names``. Parameters ---------- names : list List of source names. free : bool Choose whether to free (free=True) or fix (free=False) source parameters. pars : list Set a list of parameters to be freed/fixed for each source. If none then all source parameters will be freed/fixed. If pars='norm' then only normalization parameters will be freed. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects. """ if names is None: return names = [names] if not isinstance(names, list) else names names = [self.roi.get_source_by_name(t).name for t in names] srcs = [s for s in self.roi.sources if s.name in names] for s in srcs: self.free_source(s.name, free=free, pars=pars, **kwargs) return srcs
[ "def", "free_sources_by_name", "(", "self", ",", "names", ",", "free", "=", "True", ",", "pars", "=", "None", ",", "*", "*", "kwargs", ")", ":", "if", "names", "is", "None", ":", "return", "names", "=", "[", "names", "]", "if", "not", "isinstance", ...
Free all sources with names matching ``names``. Parameters ---------- names : list List of source names. free : bool Choose whether to free (free=True) or fix (free=False) source parameters. pars : list Set a list of parameters to be freed/fixed for each source. If none then all source parameters will be freed/fixed. If pars='norm' then only normalization parameters will be freed. Returns ------- srcs : list A list of `~fermipy.roi_model.Model` objects.
[ "Free", "all", "sources", "with", "names", "matching", "names", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1573-L1607
train
36,053
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_parameter
def set_parameter(self, name, par, value, true_value=True, scale=None, bounds=None, error=None, update_source=True): """ Update the value of a parameter. Parameter bounds will automatically be adjusted to encompass the new parameter value. Parameters ---------- name : str Source name. par : str Parameter name. value : float Parameter value. By default this argument should be the unscaled (True) parameter value. scale : float Parameter scale (optional). Value argument is interpreted with respect to the scale parameter if it is provided. error : float Parameter error (optional). By default this argument should be the unscaled (True) parameter value. update_source : bool Update the source dictionary for the object. """ name = self.roi.get_source_by_name(name).name idx = self.like.par_index(name, par) current_bounds = list(self.like.model[idx].getBounds()) if scale is not None: self.like[idx].setScale(scale) else: scale = self.like.model[idx].getScale() if true_value: current_bounds[0] = min(current_bounds[0], value / scale) current_bounds[1] = max(current_bounds[1], value / scale) if error is not None: error = error / scale else: current_bounds[0] = min(current_bounds[0], value) current_bounds[1] = max(current_bounds[1], value) # update current bounds to encompass new value self.like[idx].setBounds(*current_bounds) if true_value: for p in self.like[idx].pars: p.setTrueValue(value) else: self.like[idx].setValue(value) if bounds is not None: if true_value: bounds[0] = min(bounds[0], value / scale) bounds[1] = max(bounds[1], value / scale) else: bounds[0] = min(bounds[0], value) bounds[1] = max(bounds[1], value) # For some reason the numerical accuracy is causing this to throw exceptions. try: if bounds is not None: self.like[idx].setBounds(*bounds) except RuntimeError: self.logger.warning( "Caught failure on setBounds for %s::%s." % (name, par)) pass if error is not None: self.like[idx].setError(error) self._sync_params(name) if update_source: self.update_source(name)
python
def set_parameter(self, name, par, value, true_value=True, scale=None, bounds=None, error=None, update_source=True): """ Update the value of a parameter. Parameter bounds will automatically be adjusted to encompass the new parameter value. Parameters ---------- name : str Source name. par : str Parameter name. value : float Parameter value. By default this argument should be the unscaled (True) parameter value. scale : float Parameter scale (optional). Value argument is interpreted with respect to the scale parameter if it is provided. error : float Parameter error (optional). By default this argument should be the unscaled (True) parameter value. update_source : bool Update the source dictionary for the object. """ name = self.roi.get_source_by_name(name).name idx = self.like.par_index(name, par) current_bounds = list(self.like.model[idx].getBounds()) if scale is not None: self.like[idx].setScale(scale) else: scale = self.like.model[idx].getScale() if true_value: current_bounds[0] = min(current_bounds[0], value / scale) current_bounds[1] = max(current_bounds[1], value / scale) if error is not None: error = error / scale else: current_bounds[0] = min(current_bounds[0], value) current_bounds[1] = max(current_bounds[1], value) # update current bounds to encompass new value self.like[idx].setBounds(*current_bounds) if true_value: for p in self.like[idx].pars: p.setTrueValue(value) else: self.like[idx].setValue(value) if bounds is not None: if true_value: bounds[0] = min(bounds[0], value / scale) bounds[1] = max(bounds[1], value / scale) else: bounds[0] = min(bounds[0], value) bounds[1] = max(bounds[1], value) # For some reason the numerical accuracy is causing this to throw exceptions. try: if bounds is not None: self.like[idx].setBounds(*bounds) except RuntimeError: self.logger.warning( "Caught failure on setBounds for %s::%s." % (name, par)) pass if error is not None: self.like[idx].setError(error) self._sync_params(name) if update_source: self.update_source(name)
[ "def", "set_parameter", "(", "self", ",", "name", ",", "par", ",", "value", ",", "true_value", "=", "True", ",", "scale", "=", "None", ",", "bounds", "=", "None", ",", "error", "=", "None", ",", "update_source", "=", "True", ")", ":", "name", "=", ...
Update the value of a parameter. Parameter bounds will automatically be adjusted to encompass the new parameter value. Parameters ---------- name : str Source name. par : str Parameter name. value : float Parameter value. By default this argument should be the unscaled (True) parameter value. scale : float Parameter scale (optional). Value argument is interpreted with respect to the scale parameter if it is provided. error : float Parameter error (optional). By default this argument should be the unscaled (True) parameter value. update_source : bool Update the source dictionary for the object.
[ "Update", "the", "value", "of", "a", "parameter", ".", "Parameter", "bounds", "will", "automatically", "be", "adjusted", "to", "encompass", "the", "new", "parameter", "value", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1702-L1784
train
36,054
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_parameter_scale
def set_parameter_scale(self, name, par, scale): """Update the scale of a parameter while keeping its value constant.""" name = self.roi.get_source_by_name(name).name idx = self.like.par_index(name, par) current_bounds = list(self.like.model[idx].getBounds()) current_scale = self.like.model[idx].getScale() current_value = self.like[idx].getValue() self.like[idx].setScale(scale) self.like[idx].setValue(current_value * current_scale / scale) self.like[idx].setBounds(current_bounds[0] * current_scale / scale, current_bounds[1] * current_scale / scale) self._sync_params(name)
python
def set_parameter_scale(self, name, par, scale): """Update the scale of a parameter while keeping its value constant.""" name = self.roi.get_source_by_name(name).name idx = self.like.par_index(name, par) current_bounds = list(self.like.model[idx].getBounds()) current_scale = self.like.model[idx].getScale() current_value = self.like[idx].getValue() self.like[idx].setScale(scale) self.like[idx].setValue(current_value * current_scale / scale) self.like[idx].setBounds(current_bounds[0] * current_scale / scale, current_bounds[1] * current_scale / scale) self._sync_params(name)
[ "def", "set_parameter_scale", "(", "self", ",", "name", ",", "par", ",", "scale", ")", ":", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", ".", "name", "idx", "=", "self", ".", "like", ".", "par_index", "(", "name", ","...
Update the scale of a parameter while keeping its value constant.
[ "Update", "the", "scale", "of", "a", "parameter", "while", "keeping", "its", "value", "constant", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1786-L1798
train
36,055
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_parameter_bounds
def set_parameter_bounds(self, name, par, bounds): """Set the bounds on the scaled value of a parameter. Parameters ---------- name : str Source name. par : str Parameter name. bounds : list Upper and lower bound. """ idx = self.like.par_index(name, par) self.like[idx].setBounds(*bounds) self._sync_params(name)
python
def set_parameter_bounds(self, name, par, bounds): """Set the bounds on the scaled value of a parameter. Parameters ---------- name : str Source name. par : str Parameter name. bounds : list Upper and lower bound. """ idx = self.like.par_index(name, par) self.like[idx].setBounds(*bounds) self._sync_params(name)
[ "def", "set_parameter_bounds", "(", "self", ",", "name", ",", "par", ",", "bounds", ")", ":", "idx", "=", "self", ".", "like", ".", "par_index", "(", "name", ",", "par", ")", "self", ".", "like", "[", "idx", "]", ".", "setBounds", "(", "*", "bounds...
Set the bounds on the scaled value of a parameter. Parameters ---------- name : str Source name. par : str Parameter name. bounds : list Upper and lower bound.
[ "Set", "the", "bounds", "on", "the", "scaled", "value", "of", "a", "parameter", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1800-L1818
train
36,056
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.set_parameter_error
def set_parameter_error(self, name, par, error): """Set the error on the value of a parameter. Parameters ---------- name : str Source name. par : str Parameter name. error : float The value for the parameter error """ idx = self.like.par_index(name, par) self.like[idx].setError(error) self._sync_params(name)
python
def set_parameter_error(self, name, par, error): """Set the error on the value of a parameter. Parameters ---------- name : str Source name. par : str Parameter name. error : float The value for the parameter error """ idx = self.like.par_index(name, par) self.like[idx].setError(error) self._sync_params(name)
[ "def", "set_parameter_error", "(", "self", ",", "name", ",", "par", ",", "error", ")", ":", "idx", "=", "self", ".", "like", ".", "par_index", "(", "name", ",", "par", ")", "self", ".", "like", "[", "idx", "]", ".", "setError", "(", "error", ")", ...
Set the error on the value of a parameter. Parameters ---------- name : str Source name. par : str Parameter name. error : float The value for the parameter error
[ "Set", "the", "error", "on", "the", "value", "of", "a", "parameter", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L1820-L1837
train
36,057
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.get_source_name
def get_source_name(self, name): """Return the name of a source as it is defined in the pyLikelihood model object.""" if name not in self.like.sourceNames(): name = self.roi.get_source_by_name(name).name return name
python
def get_source_name(self, name): """Return the name of a source as it is defined in the pyLikelihood model object.""" if name not in self.like.sourceNames(): name = self.roi.get_source_by_name(name).name return name
[ "def", "get_source_name", "(", "self", ",", "name", ")", ":", "if", "name", "not", "in", "self", ".", "like", ".", "sourceNames", "(", ")", ":", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", ".", "name", "return", "nam...
Return the name of a source as it is defined in the pyLikelihood model object.
[ "Return", "the", "name", "of", "a", "source", "as", "it", "is", "defined", "in", "the", "pyLikelihood", "model", "object", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L2160-L2165
train
36,058
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.constrain_norms
def constrain_norms(self, srcNames, cov_scale=1.0): """Constrain the normalizations of one or more sources by adding gaussian priors with sigma equal to the parameter error times a scaling factor.""" # Get the covariance matrix for name in srcNames: par = self.like.normPar(name) err = par.error() val = par.getValue() if par.error() == 0.0 or not par.isFree(): continue self.add_gauss_prior(name, par.getName(), val, err * cov_scale)
python
def constrain_norms(self, srcNames, cov_scale=1.0): """Constrain the normalizations of one or more sources by adding gaussian priors with sigma equal to the parameter error times a scaling factor.""" # Get the covariance matrix for name in srcNames: par = self.like.normPar(name) err = par.error() val = par.getValue() if par.error() == 0.0 or not par.isFree(): continue self.add_gauss_prior(name, par.getName(), val, err * cov_scale)
[ "def", "constrain_norms", "(", "self", ",", "srcNames", ",", "cov_scale", "=", "1.0", ")", ":", "# Get the covariance matrix", "for", "name", "in", "srcNames", ":", "par", "=", "self", ".", "like", ".", "normPar", "(", "name", ")", "err", "=", "par", "."...
Constrain the normalizations of one or more sources by adding gaussian priors with sigma equal to the parameter error times a scaling factor.
[ "Constrain", "the", "normalizations", "of", "one", "or", "more", "sources", "by", "adding", "gaussian", "priors", "with", "sigma", "equal", "to", "the", "parameter", "error", "times", "a", "scaling", "factor", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L2719-L2736
train
36,059
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.remove_priors
def remove_priors(self): """Clear all priors.""" for src in self.roi.sources: for par in self.like[src.name].funcs["Spectrum"].params.values(): par.removePrior()
python
def remove_priors(self): """Clear all priors.""" for src in self.roi.sources: for par in self.like[src.name].funcs["Spectrum"].params.values(): par.removePrior()
[ "def", "remove_priors", "(", "self", ")", ":", "for", "src", "in", "self", ".", "roi", ".", "sources", ":", "for", "par", "in", "self", ".", "like", "[", "src", ".", "name", "]", ".", "funcs", "[", "\"Spectrum\"", "]", ".", "params", ".", "values",...
Clear all priors.
[ "Clear", "all", "priors", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L2748-L2754
train
36,060
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis._create_optObject
def _create_optObject(self, **kwargs): """ Make MINUIT or NewMinuit type optimizer object """ optimizer = kwargs.get('optimizer', self.config['optimizer']['optimizer']) if optimizer.upper() == 'MINUIT': optObject = pyLike.Minuit(self.like.logLike) elif optimizer.upper() == 'NEWMINUIT': optObject = pyLike.NewMinuit(self.like.logLike) else: optFactory = pyLike.OptimizerFactory_instance() optObject = optFactory.create(str(optimizer), self.like.logLike) return optObject
python
def _create_optObject(self, **kwargs): """ Make MINUIT or NewMinuit type optimizer object """ optimizer = kwargs.get('optimizer', self.config['optimizer']['optimizer']) if optimizer.upper() == 'MINUIT': optObject = pyLike.Minuit(self.like.logLike) elif optimizer.upper() == 'NEWMINUIT': optObject = pyLike.NewMinuit(self.like.logLike) else: optFactory = pyLike.OptimizerFactory_instance() optObject = optFactory.create(str(optimizer), self.like.logLike) return optObject
[ "def", "_create_optObject", "(", "self", ",", "*", "*", "kwargs", ")", ":", "optimizer", "=", "kwargs", ".", "get", "(", "'optimizer'", ",", "self", ".", "config", "[", "'optimizer'", "]", "[", "'optimizer'", "]", ")", "if", "optimizer", ".", "upper", ...
Make MINUIT or NewMinuit type optimizer object
[ "Make", "MINUIT", "or", "NewMinuit", "type", "optimizer", "object" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L2756-L2769
train
36,061
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.load_xml
def load_xml(self, xmlfile): """Load model definition from XML. Parameters ---------- xmlfile : str Name of the input XML file. """ self.logger.info('Loading XML') for c in self.components: c.load_xml(xmlfile) for name in self.like.sourceNames(): self.update_source(name) self._fitcache = None self.logger.info('Finished Loading XML')
python
def load_xml(self, xmlfile): """Load model definition from XML. Parameters ---------- xmlfile : str Name of the input XML file. """ self.logger.info('Loading XML') for c in self.components: c.load_xml(xmlfile) for name in self.like.sourceNames(): self.update_source(name) self._fitcache = None self.logger.info('Finished Loading XML')
[ "def", "load_xml", "(", "self", ",", "xmlfile", ")", ":", "self", ".", "logger", ".", "info", "(", "'Loading XML'", ")", "for", "c", "in", "self", ".", "components", ":", "c", ".", "load_xml", "(", "xmlfile", ")", "for", "name", "in", "self", ".", ...
Load model definition from XML. Parameters ---------- xmlfile : str Name of the input XML file.
[ "Load", "model", "definition", "from", "XML", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3177-L3197
train
36,062
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.load_parameters_from_yaml
def load_parameters_from_yaml(self, yamlfile, update_sources=False): """Load model parameters from yaml Parameters ---------- yamlfile : str Name of the input yaml file. """ d = utils.load_yaml(yamlfile) for src, src_pars in d.items(): for par_name, par_dict in src_pars.items(): if par_name in ['SpectrumType']: continue par_value = par_dict.get('value', None) par_error = par_dict.get('error', None) par_scale = par_dict.get('scale', None) par_min = par_dict.get('min', None) par_max = par_dict.get('max', None) par_free = par_dict.get('free', None) if par_min is not None and par_max is not None: par_bounds = [par_min, par_max] else: par_bounds = None try: self.set_parameter(src, par_name, par_value, true_value=False, scale=par_scale, bounds=par_bounds, error=par_error, update_source=update_sources) except RuntimeError as msg: self.logger.warn(msg) self.logger.warn("Did not set parameter %s:%s"%(src,par_name)) continue except Exception as msg: self.logger.warn(msg) continue if par_free is not None: self.free_parameter(src, par_name, par_free) self._sync_params_state()
python
def load_parameters_from_yaml(self, yamlfile, update_sources=False): """Load model parameters from yaml Parameters ---------- yamlfile : str Name of the input yaml file. """ d = utils.load_yaml(yamlfile) for src, src_pars in d.items(): for par_name, par_dict in src_pars.items(): if par_name in ['SpectrumType']: continue par_value = par_dict.get('value', None) par_error = par_dict.get('error', None) par_scale = par_dict.get('scale', None) par_min = par_dict.get('min', None) par_max = par_dict.get('max', None) par_free = par_dict.get('free', None) if par_min is not None and par_max is not None: par_bounds = [par_min, par_max] else: par_bounds = None try: self.set_parameter(src, par_name, par_value, true_value=False, scale=par_scale, bounds=par_bounds, error=par_error, update_source=update_sources) except RuntimeError as msg: self.logger.warn(msg) self.logger.warn("Did not set parameter %s:%s"%(src,par_name)) continue except Exception as msg: self.logger.warn(msg) continue if par_free is not None: self.free_parameter(src, par_name, par_free) self._sync_params_state()
[ "def", "load_parameters_from_yaml", "(", "self", ",", "yamlfile", ",", "update_sources", "=", "False", ")", ":", "d", "=", "utils", ".", "load_yaml", "(", "yamlfile", ")", "for", "src", ",", "src_pars", "in", "d", ".", "items", "(", ")", ":", "for", "p...
Load model parameters from yaml Parameters ---------- yamlfile : str Name of the input yaml file.
[ "Load", "model", "parameters", "from", "yaml" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3211-L3247
train
36,063
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis._restore_counts_maps
def _restore_counts_maps(self): """ Revert counts maps to their state prior to injecting any simulated components. """ for c in self.components: c.restore_counts_maps() if hasattr(self.like.components[0].logLike, 'setCountsMap'): self._init_roi_model() else: self.write_xml('tmp') self._like = SummedLikelihood() for i, c in enumerate(self._components): c._create_binned_analysis() self._like.addComponent(c.like) self._init_roi_model() self.load_xml('tmp')
python
def _restore_counts_maps(self): """ Revert counts maps to their state prior to injecting any simulated components. """ for c in self.components: c.restore_counts_maps() if hasattr(self.like.components[0].logLike, 'setCountsMap'): self._init_roi_model() else: self.write_xml('tmp') self._like = SummedLikelihood() for i, c in enumerate(self._components): c._create_binned_analysis() self._like.addComponent(c.like) self._init_roi_model() self.load_xml('tmp')
[ "def", "_restore_counts_maps", "(", "self", ")", ":", "for", "c", "in", "self", ".", "components", ":", "c", ".", "restore_counts_maps", "(", ")", "if", "hasattr", "(", "self", ".", "like", ".", "components", "[", "0", "]", ".", "logLike", ",", "'setCo...
Revert counts maps to their state prior to injecting any simulated components.
[ "Revert", "counts", "maps", "to", "their", "state", "prior", "to", "injecting", "any", "simulated", "components", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3249-L3267
train
36,064
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.simulate_source
def simulate_source(self, src_dict=None): """ Inject simulated source counts into the data. Parameters ---------- src_dict : dict Dictionary defining the spatial and spectral properties of the source that will be injected. """ self._fitcache = None if src_dict is None: src_dict = {} else: src_dict = copy.deepcopy(src_dict) skydir = wcs_utils.get_target_skydir(src_dict, self.roi.skydir) src_dict.setdefault('ra', skydir.ra.deg) src_dict.setdefault('dec', skydir.dec.deg) src_dict.setdefault('SpatialModel', 'PointSource') src_dict.setdefault('SpatialWidth', 0.3) src_dict.setdefault('Index', 2.0) src_dict.setdefault('Prefactor', 1E-13) self.add_source('mcsource', src_dict, free=True, init_source=False) for c in self.components: c.simulate_roi('mcsource', clear=False) self.delete_source('mcsource') if hasattr(self.like.components[0].logLike, 'setCountsMap'): self._init_roi_model() else: self.write_xml('tmp') self._like = SummedLikelihood() for i, c in enumerate(self._components): c._create_binned_analysis('tmp.xml') self._like.addComponent(c.like) self._init_roi_model() self.load_xml('tmp')
python
def simulate_source(self, src_dict=None): """ Inject simulated source counts into the data. Parameters ---------- src_dict : dict Dictionary defining the spatial and spectral properties of the source that will be injected. """ self._fitcache = None if src_dict is None: src_dict = {} else: src_dict = copy.deepcopy(src_dict) skydir = wcs_utils.get_target_skydir(src_dict, self.roi.skydir) src_dict.setdefault('ra', skydir.ra.deg) src_dict.setdefault('dec', skydir.dec.deg) src_dict.setdefault('SpatialModel', 'PointSource') src_dict.setdefault('SpatialWidth', 0.3) src_dict.setdefault('Index', 2.0) src_dict.setdefault('Prefactor', 1E-13) self.add_source('mcsource', src_dict, free=True, init_source=False) for c in self.components: c.simulate_roi('mcsource', clear=False) self.delete_source('mcsource') if hasattr(self.like.components[0].logLike, 'setCountsMap'): self._init_roi_model() else: self.write_xml('tmp') self._like = SummedLikelihood() for i, c in enumerate(self._components): c._create_binned_analysis('tmp.xml') self._like.addComponent(c.like) self._init_roi_model() self.load_xml('tmp')
[ "def", "simulate_source", "(", "self", ",", "src_dict", "=", "None", ")", ":", "self", ".", "_fitcache", "=", "None", "if", "src_dict", "is", "None", ":", "src_dict", "=", "{", "}", "else", ":", "src_dict", "=", "copy", ".", "deepcopy", "(", "src_dict"...
Inject simulated source counts into the data. Parameters ---------- src_dict : dict Dictionary defining the spatial and spectral properties of the source that will be injected.
[ "Inject", "simulated", "source", "counts", "into", "the", "data", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3269-L3313
train
36,065
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.simulate_roi
def simulate_roi(self, name=None, randomize=True, restore=False): """Generate a simulation of the ROI using the current best-fit model and replace the data counts cube with this simulation. The simulation is created by generating an array of Poisson random numbers with expectation values drawn from the model cube of the binned analysis instance. This function will update the counts cube both in memory and in the source map file. The counts cube can be restored to its original state by calling this method with ``restore`` = True. Parameters ---------- name : str Name of the model component to be simulated. If None then the whole ROI will be simulated. restore : bool Restore the data counts cube to its original state. """ self.logger.info('Simulating ROI') self._fitcache = None if restore: self.logger.info('Restoring') self._restore_counts_maps() self.logger.info('Finished') return for c in self.components: c.simulate_roi(name=name, clear=True, randomize=randomize) if hasattr(self.like.components[0].logLike, 'setCountsMap'): self._init_roi_model() else: self.write_xml('tmp') self._like = SummedLikelihood() for i, c in enumerate(self._components): c._create_binned_analysis('tmp.xml') self._like.addComponent(c.like) self._init_roi_model() self.load_xml('tmp') self.logger.info('Finished')
python
def simulate_roi(self, name=None, randomize=True, restore=False): """Generate a simulation of the ROI using the current best-fit model and replace the data counts cube with this simulation. The simulation is created by generating an array of Poisson random numbers with expectation values drawn from the model cube of the binned analysis instance. This function will update the counts cube both in memory and in the source map file. The counts cube can be restored to its original state by calling this method with ``restore`` = True. Parameters ---------- name : str Name of the model component to be simulated. If None then the whole ROI will be simulated. restore : bool Restore the data counts cube to its original state. """ self.logger.info('Simulating ROI') self._fitcache = None if restore: self.logger.info('Restoring') self._restore_counts_maps() self.logger.info('Finished') return for c in self.components: c.simulate_roi(name=name, clear=True, randomize=randomize) if hasattr(self.like.components[0].logLike, 'setCountsMap'): self._init_roi_model() else: self.write_xml('tmp') self._like = SummedLikelihood() for i, c in enumerate(self._components): c._create_binned_analysis('tmp.xml') self._like.addComponent(c.like) self._init_roi_model() self.load_xml('tmp') self.logger.info('Finished')
[ "def", "simulate_roi", "(", "self", ",", "name", "=", "None", ",", "randomize", "=", "True", ",", "restore", "=", "False", ")", ":", "self", ".", "logger", ".", "info", "(", "'Simulating ROI'", ")", "self", ".", "_fitcache", "=", "None", "if", "restore...
Generate a simulation of the ROI using the current best-fit model and replace the data counts cube with this simulation. The simulation is created by generating an array of Poisson random numbers with expectation values drawn from the model cube of the binned analysis instance. This function will update the counts cube both in memory and in the source map file. The counts cube can be restored to its original state by calling this method with ``restore`` = True. Parameters ---------- name : str Name of the model component to be simulated. If None then the whole ROI will be simulated. restore : bool Restore the data counts cube to its original state.
[ "Generate", "a", "simulation", "of", "the", "ROI", "using", "the", "current", "best", "-", "fit", "model", "and", "replace", "the", "data", "counts", "cube", "with", "this", "simulation", ".", "The", "simulation", "is", "created", "by", "generating", "an", ...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3315-L3359
train
36,066
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.load_roi
def load_roi(self, infile, reload_sources=False, params=None, mask=None): """This function reloads the analysis state from a previously saved instance generated with `~fermipy.gtanalysis.GTAnalysis.write_roi`. Parameters ---------- infile : str reload_sources : bool Regenerate source maps for non-diffuse sources. params : str Path to a yaml file with updated parameter values mask : str Path to a fits file with an updated mask """ infile = utils.resolve_path(infile, workdir=self.workdir) roi_file, roi_data = utils.load_data(infile, workdir=self.workdir) self.logger.info('Loading ROI file: %s', roi_file) key_map = {'dfde': 'dnde', 'dfde100': 'dnde100', 'dfde1000': 'dnde1000', 'dfde10000': 'dnde10000', 'dfde_index': 'dnde_index', 'dfde100_index': 'dnde100_index', 'dfde1000_index': 'dnde1000_index', 'dfde10000_index': 'dnde10000_index', 'e2dfde': 'e2dnde', 'e2dfde100': 'e2dnde100', 'e2dfde1000': 'e2dnde1000', 'e2dfde10000': 'e2dnde10000', 'Npred': 'npred', 'Npred_wt': 'npred_wt', 'logLike': 'loglike', 'dlogLike': 'dloglike', 'emin': 'e_min', 'ectr': 'e_ctr', 'emax': 'e_max', 'logemin': 'loge_min', 'logectr': 'loge_ctr', 'logemax': 'loge_max', 'ref_dfde': 'ref_dnde', 'ref_e2dfde': 'ref_e2dnde', 'ref_dfde_emin': 'ref_dnde_e_min', 'ref_dfde_emax': 'ref_dnde_e_max', } self._roi_data = utils.update_keys(roi_data['roi'], key_map) if 'erange' in self._roi_data: self._roi_data['loge_bounds'] = self._roi_data.pop('erange') self._loge_bounds = self._roi_data.setdefault('loge_bounds', self.loge_bounds) sources = roi_data.pop('sources') sources = utils.update_keys(sources, key_map) for k0, v0 in sources.items(): for k, v in defaults.source_flux_output.items(): if k not in v0: continue if v[2] == float and isinstance(v0[k], np.ndarray): sources[k0][k], sources[k0][k + '_err'] \ = v0[k][0], v0[k][1] self.roi.load_sources(sources.values()) for i, c in enumerate(self.components): if 'src_expscale' in self._roi_data['components'][i]: c._src_expscale = copy.deepcopy(self._roi_data['components'] [i]['src_expscale']) self._create_likelihood(infile) self.set_energy_range(self.loge_bounds[0], self.loge_bounds[1]) if params is not None: self.load_parameters_from_yaml(params) if mask is not None: self.set_weights_map(mask, update_roi=False) if reload_sources: names = [s.name for s in self.roi.sources if not s.diffuse] self.reload_sources(names, False) self.logger.info('Finished Loading ROI')
python
def load_roi(self, infile, reload_sources=False, params=None, mask=None): """This function reloads the analysis state from a previously saved instance generated with `~fermipy.gtanalysis.GTAnalysis.write_roi`. Parameters ---------- infile : str reload_sources : bool Regenerate source maps for non-diffuse sources. params : str Path to a yaml file with updated parameter values mask : str Path to a fits file with an updated mask """ infile = utils.resolve_path(infile, workdir=self.workdir) roi_file, roi_data = utils.load_data(infile, workdir=self.workdir) self.logger.info('Loading ROI file: %s', roi_file) key_map = {'dfde': 'dnde', 'dfde100': 'dnde100', 'dfde1000': 'dnde1000', 'dfde10000': 'dnde10000', 'dfde_index': 'dnde_index', 'dfde100_index': 'dnde100_index', 'dfde1000_index': 'dnde1000_index', 'dfde10000_index': 'dnde10000_index', 'e2dfde': 'e2dnde', 'e2dfde100': 'e2dnde100', 'e2dfde1000': 'e2dnde1000', 'e2dfde10000': 'e2dnde10000', 'Npred': 'npred', 'Npred_wt': 'npred_wt', 'logLike': 'loglike', 'dlogLike': 'dloglike', 'emin': 'e_min', 'ectr': 'e_ctr', 'emax': 'e_max', 'logemin': 'loge_min', 'logectr': 'loge_ctr', 'logemax': 'loge_max', 'ref_dfde': 'ref_dnde', 'ref_e2dfde': 'ref_e2dnde', 'ref_dfde_emin': 'ref_dnde_e_min', 'ref_dfde_emax': 'ref_dnde_e_max', } self._roi_data = utils.update_keys(roi_data['roi'], key_map) if 'erange' in self._roi_data: self._roi_data['loge_bounds'] = self._roi_data.pop('erange') self._loge_bounds = self._roi_data.setdefault('loge_bounds', self.loge_bounds) sources = roi_data.pop('sources') sources = utils.update_keys(sources, key_map) for k0, v0 in sources.items(): for k, v in defaults.source_flux_output.items(): if k not in v0: continue if v[2] == float and isinstance(v0[k], np.ndarray): sources[k0][k], sources[k0][k + '_err'] \ = v0[k][0], v0[k][1] self.roi.load_sources(sources.values()) for i, c in enumerate(self.components): if 'src_expscale' in self._roi_data['components'][i]: c._src_expscale = copy.deepcopy(self._roi_data['components'] [i]['src_expscale']) self._create_likelihood(infile) self.set_energy_range(self.loge_bounds[0], self.loge_bounds[1]) if params is not None: self.load_parameters_from_yaml(params) if mask is not None: self.set_weights_map(mask, update_roi=False) if reload_sources: names = [s.name for s in self.roi.sources if not s.diffuse] self.reload_sources(names, False) self.logger.info('Finished Loading ROI')
[ "def", "load_roi", "(", "self", ",", "infile", ",", "reload_sources", "=", "False", ",", "params", "=", "None", ",", "mask", "=", "None", ")", ":", "infile", "=", "utils", ".", "resolve_path", "(", "infile", ",", "workdir", "=", "self", ".", "workdir",...
This function reloads the analysis state from a previously saved instance generated with `~fermipy.gtanalysis.GTAnalysis.write_roi`. Parameters ---------- infile : str reload_sources : bool Regenerate source maps for non-diffuse sources. params : str Path to a yaml file with updated parameter values mask : str Path to a fits file with an updated mask
[ "This", "function", "reloads", "the", "analysis", "state", "from", "a", "previously", "saved", "instance", "generated", "with", "~fermipy", ".", "gtanalysis", ".", "GTAnalysis", ".", "write_roi", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3519-L3610
train
36,067
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.write_roi
def write_roi(self, outfile=None, save_model_map=False, **kwargs): """Write current state of the analysis to a file. This method writes an XML model definition, a ROI dictionary, and a FITS source catalog file. A previously saved analysis state can be reloaded from the ROI dictionary file with the `~fermipy.gtanalysis.GTAnalysis.load_roi` method. Parameters ---------- outfile : str String prefix of the output files. The extension of this string will be stripped when generating the XML, YAML and npy filenames. make_plots : bool Generate diagnostic plots. save_model_map : bool Save the current counts model to a FITS file. """ # extract the results in a convenient format make_plots = kwargs.get('make_plots', False) save_weight_map = kwargs.get('save_weight_map', False) if outfile is None: pathprefix = os.path.join(self.config['fileio']['workdir'], 'results') elif not os.path.isabs(outfile): pathprefix = os.path.join(self.config['fileio']['workdir'], outfile) else: pathprefix = outfile pathprefix = utils.strip_suffix(pathprefix, ['fits', 'yaml', 'npy']) # pathprefix, ext = os.path.splitext(pathprefix) prefix = os.path.basename(pathprefix) xmlfile = pathprefix + '.xml' fitsfile = pathprefix + '.fits' npyfile = pathprefix + '.npy' self.write_xml(xmlfile) self.write_fits(fitsfile) if not self.config['gtlike']['use_external_srcmap']: for c in self.components: c.like.logLike.saveSourceMaps(str(c.files['srcmap'])) if save_model_map: self.write_model_map(prefix) if save_weight_map: self.write_weight_map(prefix) o = {} o['roi'] = copy.deepcopy(self._roi_data) o['config'] = copy.deepcopy(self.config) o['version'] = fermipy.__version__ o['stversion'] = fermipy.get_st_version() o['sources'] = {} for s in self.roi.sources: o['sources'][s.name] = copy.deepcopy(s.data) for i, c in enumerate(self.components): o['roi']['components'][i][ 'src_expscale'] = copy.deepcopy(c.src_expscale) self.logger.info('Writing %s...', npyfile) np.save(npyfile, o) if make_plots: self.make_plots(prefix, None, **kwargs.get('plotting', {}))
python
def write_roi(self, outfile=None, save_model_map=False, **kwargs): """Write current state of the analysis to a file. This method writes an XML model definition, a ROI dictionary, and a FITS source catalog file. A previously saved analysis state can be reloaded from the ROI dictionary file with the `~fermipy.gtanalysis.GTAnalysis.load_roi` method. Parameters ---------- outfile : str String prefix of the output files. The extension of this string will be stripped when generating the XML, YAML and npy filenames. make_plots : bool Generate diagnostic plots. save_model_map : bool Save the current counts model to a FITS file. """ # extract the results in a convenient format make_plots = kwargs.get('make_plots', False) save_weight_map = kwargs.get('save_weight_map', False) if outfile is None: pathprefix = os.path.join(self.config['fileio']['workdir'], 'results') elif not os.path.isabs(outfile): pathprefix = os.path.join(self.config['fileio']['workdir'], outfile) else: pathprefix = outfile pathprefix = utils.strip_suffix(pathprefix, ['fits', 'yaml', 'npy']) # pathprefix, ext = os.path.splitext(pathprefix) prefix = os.path.basename(pathprefix) xmlfile = pathprefix + '.xml' fitsfile = pathprefix + '.fits' npyfile = pathprefix + '.npy' self.write_xml(xmlfile) self.write_fits(fitsfile) if not self.config['gtlike']['use_external_srcmap']: for c in self.components: c.like.logLike.saveSourceMaps(str(c.files['srcmap'])) if save_model_map: self.write_model_map(prefix) if save_weight_map: self.write_weight_map(prefix) o = {} o['roi'] = copy.deepcopy(self._roi_data) o['config'] = copy.deepcopy(self.config) o['version'] = fermipy.__version__ o['stversion'] = fermipy.get_st_version() o['sources'] = {} for s in self.roi.sources: o['sources'][s.name] = copy.deepcopy(s.data) for i, c in enumerate(self.components): o['roi']['components'][i][ 'src_expscale'] = copy.deepcopy(c.src_expscale) self.logger.info('Writing %s...', npyfile) np.save(npyfile, o) if make_plots: self.make_plots(prefix, None, **kwargs.get('plotting', {}))
[ "def", "write_roi", "(", "self", ",", "outfile", "=", "None", ",", "save_model_map", "=", "False", ",", "*", "*", "kwargs", ")", ":", "# extract the results in a convenient format", "make_plots", "=", "kwargs", ".", "get", "(", "'make_plots'", ",", "False", ")...
Write current state of the analysis to a file. This method writes an XML model definition, a ROI dictionary, and a FITS source catalog file. A previously saved analysis state can be reloaded from the ROI dictionary file with the `~fermipy.gtanalysis.GTAnalysis.load_roi` method. Parameters ---------- outfile : str String prefix of the output files. The extension of this string will be stripped when generating the XML, YAML and npy filenames. make_plots : bool Generate diagnostic plots. save_model_map : bool Save the current counts model to a FITS file.
[ "Write", "current", "state", "of", "the", "analysis", "to", "a", "file", ".", "This", "method", "writes", "an", "XML", "model", "definition", "a", "ROI", "dictionary", "and", "a", "FITS", "source", "catalog", "file", ".", "A", "previously", "saved", "analy...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3612-L3690
train
36,068
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.make_plots
def make_plots(self, prefix, mcube_map=None, **kwargs): """Make diagnostic plots using the current ROI model.""" #mcube_maps = kwargs.pop('mcube_maps', None) if mcube_map is None: mcube_map = self.model_counts_map() plotter = plotting.AnalysisPlotter(self.config['plotting'], fileio=self.config['fileio'], logging=self.config['logging']) plotter.run(self, mcube_map, prefix=prefix, **kwargs)
python
def make_plots(self, prefix, mcube_map=None, **kwargs): """Make diagnostic plots using the current ROI model.""" #mcube_maps = kwargs.pop('mcube_maps', None) if mcube_map is None: mcube_map = self.model_counts_map() plotter = plotting.AnalysisPlotter(self.config['plotting'], fileio=self.config['fileio'], logging=self.config['logging']) plotter.run(self, mcube_map, prefix=prefix, **kwargs)
[ "def", "make_plots", "(", "self", ",", "prefix", ",", "mcube_map", "=", "None", ",", "*", "*", "kwargs", ")", ":", "#mcube_maps = kwargs.pop('mcube_maps', None)", "if", "mcube_map", "is", "None", ":", "mcube_map", "=", "self", ".", "model_counts_map", "(", ")"...
Make diagnostic plots using the current ROI model.
[ "Make", "diagnostic", "plots", "using", "the", "current", "ROI", "model", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3755-L3765
train
36,069
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.update_source
def update_source(self, name, paramsonly=False, reoptimize=False, **kwargs): """Update the dictionary for this source. Parameters ---------- name : str paramsonly : bool reoptimize : bool Re-fit background parameters in likelihood scan. """ npts = self.config['gtlike']['llscan_npts'] optimizer = kwargs.get('optimizer', self.config['optimizer']) sd = self.get_src_model(name, paramsonly, reoptimize, npts, optimizer=optimizer) src = self.roi.get_source_by_name(name) src.update_data(sd)
python
def update_source(self, name, paramsonly=False, reoptimize=False, **kwargs): """Update the dictionary for this source. Parameters ---------- name : str paramsonly : bool reoptimize : bool Re-fit background parameters in likelihood scan. """ npts = self.config['gtlike']['llscan_npts'] optimizer = kwargs.get('optimizer', self.config['optimizer']) sd = self.get_src_model(name, paramsonly, reoptimize, npts, optimizer=optimizer) src = self.roi.get_source_by_name(name) src.update_data(sd)
[ "def", "update_source", "(", "self", ",", "name", ",", "paramsonly", "=", "False", ",", "reoptimize", "=", "False", ",", "*", "*", "kwargs", ")", ":", "npts", "=", "self", ".", "config", "[", "'gtlike'", "]", "[", "'llscan_npts'", "]", "optimizer", "="...
Update the dictionary for this source. Parameters ---------- name : str paramsonly : bool reoptimize : bool Re-fit background parameters in likelihood scan.
[ "Update", "the", "dictionary", "for", "this", "source", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L3969-L3990
train
36,070
fermiPy/fermipy
fermipy/gtanalysis.py
GTAnalysis.compute_srcprob
def compute_srcprob(self,xmlfile=None, overwrite=False): """Run the gtsrcprob app with the current model or a user provided xmlfile""" for i,c in enumerate(self.components): # compute diffuse response, necessary for srcprob c._diffrsp_app(xmlfile=xmlfile) # compute srcprob c._srcprob_app(xmlfile = xmlfile, overwrite = overwrite)
python
def compute_srcprob(self,xmlfile=None, overwrite=False): """Run the gtsrcprob app with the current model or a user provided xmlfile""" for i,c in enumerate(self.components): # compute diffuse response, necessary for srcprob c._diffrsp_app(xmlfile=xmlfile) # compute srcprob c._srcprob_app(xmlfile = xmlfile, overwrite = overwrite)
[ "def", "compute_srcprob", "(", "self", ",", "xmlfile", "=", "None", ",", "overwrite", "=", "False", ")", ":", "for", "i", ",", "c", "in", "enumerate", "(", "self", ".", "components", ")", ":", "# compute diffuse response, necessary for srcprob", "c", ".", "_...
Run the gtsrcprob app with the current model or a user provided xmlfile
[ "Run", "the", "gtsrcprob", "app", "with", "the", "current", "model", "or", "a", "user", "provided", "xmlfile" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4231-L4238
train
36,071
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.reload_source
def reload_source(self, name): """Recompute the source map for a single source in the model. """ src = self.roi.get_source_by_name(name) if hasattr(self.like.logLike, 'loadSourceMap'): self.like.logLike.loadSourceMap(str(name), True, False) srcmap_utils.delete_source_map(self.files['srcmap'], name) self.like.logLike.saveSourceMaps(str(self.files['srcmap'])) self._scale_srcmap(self._src_expscale, check_header=False, names=[name]) self.like.logLike.buildFixedModelWts() else: self.write_xml('tmp') src = self.delete_source(name) self.add_source(name, src, free=True) self.load_xml('tmp')
python
def reload_source(self, name): """Recompute the source map for a single source in the model. """ src = self.roi.get_source_by_name(name) if hasattr(self.like.logLike, 'loadSourceMap'): self.like.logLike.loadSourceMap(str(name), True, False) srcmap_utils.delete_source_map(self.files['srcmap'], name) self.like.logLike.saveSourceMaps(str(self.files['srcmap'])) self._scale_srcmap(self._src_expscale, check_header=False, names=[name]) self.like.logLike.buildFixedModelWts() else: self.write_xml('tmp') src = self.delete_source(name) self.add_source(name, src, free=True) self.load_xml('tmp')
[ "def", "reload_source", "(", "self", ",", "name", ")", ":", "src", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", "if", "hasattr", "(", "self", ".", "like", ".", "logLike", ",", "'loadSourceMap'", ")", ":", "self", ".", "like", ...
Recompute the source map for a single source in the model.
[ "Recompute", "the", "source", "map", "for", "a", "single", "source", "in", "the", "model", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4528-L4545
train
36,072
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.reload_sources
def reload_sources(self, names): """Recompute the source map for a list of sources in the model. """ try: self.like.logLike.loadSourceMaps(names, True, True) # loadSourceMaps doesn't overwrite the header so we need # to ignore EXPSCALE by setting check_header=False self._scale_srcmap(self._src_expscale, check_header=False, names=names) except: for name in names: self.reload_source(name)
python
def reload_sources(self, names): """Recompute the source map for a list of sources in the model. """ try: self.like.logLike.loadSourceMaps(names, True, True) # loadSourceMaps doesn't overwrite the header so we need # to ignore EXPSCALE by setting check_header=False self._scale_srcmap(self._src_expscale, check_header=False, names=names) except: for name in names: self.reload_source(name)
[ "def", "reload_sources", "(", "self", ",", "names", ")", ":", "try", ":", "self", ".", "like", ".", "logLike", ".", "loadSourceMaps", "(", "names", ",", "True", ",", "True", ")", "# loadSourceMaps doesn't overwrite the header so we need", "# to ignore EXPSCALE by se...
Recompute the source map for a list of sources in the model.
[ "Recompute", "the", "source", "map", "for", "a", "list", "of", "sources", "in", "the", "model", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4547-L4559
train
36,073
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._create_source
def _create_source(self, src): """Create a pyLikelihood Source object from a `~fermipy.roi_model.Model` object.""" if src['SpatialType'] == 'SkyDirFunction': pylike_src = pyLike.PointSource(self.like.logLike.observation()) pylike_src.setDir(src.skydir.ra.deg, src.skydir.dec.deg, False, False) elif src['SpatialType'] == 'SpatialMap': filepath = str(utils.path_to_xmlpath(src['Spatial_Filename'])) sm = pyLike.SpatialMap(filepath) pylike_src = pyLike.DiffuseSource(sm, self.like.logLike.observation(), False) elif src['SpatialType'] == 'RadialProfile': filepath = str(utils.path_to_xmlpath(src['Spatial_Filename'])) sm = pyLike.RadialProfile(filepath) sm.setCenter(src['ra'], src['dec']) pylike_src = pyLike.DiffuseSource(sm, self.like.logLike.observation(), False) elif src['SpatialType'] == 'RadialGaussian': sm = pyLike.RadialGaussian(src.skydir.ra.deg, src.skydir.dec.deg, src.spatial_pars['Sigma']['value']) pylike_src = pyLike.DiffuseSource(sm, self.like.logLike.observation(), False) elif src['SpatialType'] == 'RadialDisk': sm = pyLike.RadialDisk(src.skydir.ra.deg, src.skydir.dec.deg, src.spatial_pars['Radius']['value']) pylike_src = pyLike.DiffuseSource(sm, self.like.logLike.observation(), False) elif src['SpatialType'] == 'MapCubeFunction': filepath = str(utils.path_to_xmlpath(src['Spatial_Filename'])) mcf = pyLike.MapCubeFunction2(filepath) pylike_src = pyLike.DiffuseSource(mcf, self.like.logLike.observation(), False) else: raise Exception('Unrecognized spatial type: %s', src['SpatialType']) if src['SpectrumType'] == 'FileFunction': fn = gtutils.create_spectrum_from_dict(src['SpectrumType'], src.spectral_pars) file_function = pyLike.FileFunction_cast(fn) filename = str(os.path.expandvars(src['Spectrum_Filename'])) file_function.readFunction(filename) elif src['SpectrumType'] == 'DMFitFunction': fn = pyLike.DMFitFunction() fn = gtutils.create_spectrum_from_dict(src['SpectrumType'], src.spectral_pars, fn) filename = str(os.path.expandvars(src['Spectrum_Filename'])) fn.readFunction(filename) else: fn = gtutils.create_spectrum_from_dict(src['SpectrumType'], src.spectral_pars) pylike_src.setSpectrum(fn) pylike_src.setName(str(src.name)) return pylike_src
python
def _create_source(self, src): """Create a pyLikelihood Source object from a `~fermipy.roi_model.Model` object.""" if src['SpatialType'] == 'SkyDirFunction': pylike_src = pyLike.PointSource(self.like.logLike.observation()) pylike_src.setDir(src.skydir.ra.deg, src.skydir.dec.deg, False, False) elif src['SpatialType'] == 'SpatialMap': filepath = str(utils.path_to_xmlpath(src['Spatial_Filename'])) sm = pyLike.SpatialMap(filepath) pylike_src = pyLike.DiffuseSource(sm, self.like.logLike.observation(), False) elif src['SpatialType'] == 'RadialProfile': filepath = str(utils.path_to_xmlpath(src['Spatial_Filename'])) sm = pyLike.RadialProfile(filepath) sm.setCenter(src['ra'], src['dec']) pylike_src = pyLike.DiffuseSource(sm, self.like.logLike.observation(), False) elif src['SpatialType'] == 'RadialGaussian': sm = pyLike.RadialGaussian(src.skydir.ra.deg, src.skydir.dec.deg, src.spatial_pars['Sigma']['value']) pylike_src = pyLike.DiffuseSource(sm, self.like.logLike.observation(), False) elif src['SpatialType'] == 'RadialDisk': sm = pyLike.RadialDisk(src.skydir.ra.deg, src.skydir.dec.deg, src.spatial_pars['Radius']['value']) pylike_src = pyLike.DiffuseSource(sm, self.like.logLike.observation(), False) elif src['SpatialType'] == 'MapCubeFunction': filepath = str(utils.path_to_xmlpath(src['Spatial_Filename'])) mcf = pyLike.MapCubeFunction2(filepath) pylike_src = pyLike.DiffuseSource(mcf, self.like.logLike.observation(), False) else: raise Exception('Unrecognized spatial type: %s', src['SpatialType']) if src['SpectrumType'] == 'FileFunction': fn = gtutils.create_spectrum_from_dict(src['SpectrumType'], src.spectral_pars) file_function = pyLike.FileFunction_cast(fn) filename = str(os.path.expandvars(src['Spectrum_Filename'])) file_function.readFunction(filename) elif src['SpectrumType'] == 'DMFitFunction': fn = pyLike.DMFitFunction() fn = gtutils.create_spectrum_from_dict(src['SpectrumType'], src.spectral_pars, fn) filename = str(os.path.expandvars(src['Spectrum_Filename'])) fn.readFunction(filename) else: fn = gtutils.create_spectrum_from_dict(src['SpectrumType'], src.spectral_pars) pylike_src.setSpectrum(fn) pylike_src.setName(str(src.name)) return pylike_src
[ "def", "_create_source", "(", "self", ",", "src", ")", ":", "if", "src", "[", "'SpatialType'", "]", "==", "'SkyDirFunction'", ":", "pylike_src", "=", "pyLike", ".", "PointSource", "(", "self", ".", "like", ".", "logLike", ".", "observation", "(", ")", ")...
Create a pyLikelihood Source object from a `~fermipy.roi_model.Model` object.
[ "Create", "a", "pyLikelihood", "Source", "object", "from", "a", "~fermipy", ".", "roi_model", ".", "Model", "object", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4629-L4695
train
36,074
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.set_exposure_scale
def set_exposure_scale(self, name, scale=None): """Set the exposure correction of a source. Parameters ---------- name : str Source name. scale : factor Exposure scale factor (1.0 = nominal exposure). """ name = self.roi.get_source_by_name(name).name if scale is None and name not in self._src_expscale: return elif scale is None: scale = self._src_expscale.get(name, 1.0) else: self._src_expscale[name] = scale self._scale_srcmap({name: scale})
python
def set_exposure_scale(self, name, scale=None): """Set the exposure correction of a source. Parameters ---------- name : str Source name. scale : factor Exposure scale factor (1.0 = nominal exposure). """ name = self.roi.get_source_by_name(name).name if scale is None and name not in self._src_expscale: return elif scale is None: scale = self._src_expscale.get(name, 1.0) else: self._src_expscale[name] = scale self._scale_srcmap({name: scale})
[ "def", "set_exposure_scale", "(", "self", ",", "name", ",", "scale", "=", "None", ")", ":", "name", "=", "self", ".", "roi", ".", "get_source_by_name", "(", "name", ")", ".", "name", "if", "scale", "is", "None", "and", "name", "not", "in", "self", "....
Set the exposure correction of a source. Parameters ---------- name : str Source name. scale : factor Exposure scale factor (1.0 = nominal exposure).
[ "Set", "the", "exposure", "correction", "of", "a", "source", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4727-L4746
train
36,075
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.set_energy_range
def set_energy_range(self, logemin, logemax): """Set the energy range of the analysis. Parameters ---------- logemin: float Lower end of energy range in log10(E/MeV). logemax : float Upper end of energy range in log10(E/MeV). """ if logemin is None: logemin = self.log_energies[0] if logemax is None: logemax = self.log_energies[-1] imin = int(utils.val_to_edge(self.log_energies, logemin)[0]) imax = int(utils.val_to_edge(self.log_energies, logemax)[0]) if imin - imax == 0: imin = int(len(self.log_energies) - 1) imax = int(len(self.log_energies) - 1) klims = self.like.logLike.klims() if imin != klims[0] or imax != klims[1]: self.like.selectEbounds(imin, imax) return np.array([self.log_energies[imin], self.log_energies[imax]])
python
def set_energy_range(self, logemin, logemax): """Set the energy range of the analysis. Parameters ---------- logemin: float Lower end of energy range in log10(E/MeV). logemax : float Upper end of energy range in log10(E/MeV). """ if logemin is None: logemin = self.log_energies[0] if logemax is None: logemax = self.log_energies[-1] imin = int(utils.val_to_edge(self.log_energies, logemin)[0]) imax = int(utils.val_to_edge(self.log_energies, logemax)[0]) if imin - imax == 0: imin = int(len(self.log_energies) - 1) imax = int(len(self.log_energies) - 1) klims = self.like.logLike.klims() if imin != klims[0] or imax != klims[1]: self.like.selectEbounds(imin, imax) return np.array([self.log_energies[imin], self.log_energies[imax]])
[ "def", "set_energy_range", "(", "self", ",", "logemin", ",", "logemax", ")", ":", "if", "logemin", "is", "None", ":", "logemin", "=", "self", ".", "log_energies", "[", "0", "]", "if", "logemax", "is", "None", ":", "logemax", "=", "self", ".", "log_ener...
Set the energy range of the analysis. Parameters ---------- logemin: float Lower end of energy range in log10(E/MeV). logemax : float Upper end of energy range in log10(E/MeV).
[ "Set", "the", "energy", "range", "of", "the", "analysis", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4755-L4785
train
36,076
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.counts_map
def counts_map(self): """Return 3-D counts map for this component as a Map object. Returns ------- map : `~fermipy.skymap.MapBase` """ try: if isinstance(self.like, gtutils.SummedLikelihood): cmap = self.like.components[0].logLike.countsMap() p_method = cmap.projection().method() else: cmap = self.like.logLike.countsMap() p_method = cmap.projection().method() except Exception: p_method = 0 if p_method == 0: # WCS z = cmap.data() z = np.array(z).reshape(self.enumbins, self.npix, self.npix) return WcsNDMap(copy.deepcopy(self.geom), z) elif p_method == 1: # HPX z = cmap.data() z = np.array(z).reshape(self.enumbins, np.max(self.geom.npix)) return HpxNDMap(copy.deepcopy(self.geom), z) else: self.logger.error('Did not recognize CountsMap type %i' % p_method, exc_info=True) return None
python
def counts_map(self): """Return 3-D counts map for this component as a Map object. Returns ------- map : `~fermipy.skymap.MapBase` """ try: if isinstance(self.like, gtutils.SummedLikelihood): cmap = self.like.components[0].logLike.countsMap() p_method = cmap.projection().method() else: cmap = self.like.logLike.countsMap() p_method = cmap.projection().method() except Exception: p_method = 0 if p_method == 0: # WCS z = cmap.data() z = np.array(z).reshape(self.enumbins, self.npix, self.npix) return WcsNDMap(copy.deepcopy(self.geom), z) elif p_method == 1: # HPX z = cmap.data() z = np.array(z).reshape(self.enumbins, np.max(self.geom.npix)) return HpxNDMap(copy.deepcopy(self.geom), z) else: self.logger.error('Did not recognize CountsMap type %i' % p_method, exc_info=True) return None
[ "def", "counts_map", "(", "self", ")", ":", "try", ":", "if", "isinstance", "(", "self", ".", "like", ",", "gtutils", ".", "SummedLikelihood", ")", ":", "cmap", "=", "self", ".", "like", ".", "components", "[", "0", "]", ".", "logLike", ".", "countsM...
Return 3-D counts map for this component as a Map object. Returns ------- map : `~fermipy.skymap.MapBase`
[ "Return", "3", "-", "D", "counts", "map", "for", "this", "component", "as", "a", "Map", "object", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4787-L4816
train
36,077
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.weight_map
def weight_map(self): """Return 3-D weights map for this component as a Map object. Returns ------- map : `~fermipy.skymap.MapBase` """ # EAC we need the try blocks b/c older versions of the ST don't have some of these functions if isinstance(self.like, gtutils.SummedLikelihood): cmap = self.like.components[0].logLike.countsMap() try: p_method = cmap.projection().method() except AttributeError: p_method = 0 try: if self.like.components[0].logLike.has_weights(): wmap = self.like.components[0].logLike.weightMap() else: wmap = None except Exception: wmap = None else: cmap = self.like.logLike.countsMap() try: p_method = cmap.projection().method() except AttributeError: p_method = 0 try: if self.like.logLike.has_weights(): wmap = self.like.logLike.weightMap() else: wmap = None except Exception: wmap = None if p_method == 0: # WCS if wmap is None: z = np.ones((self.enumbins, self.npix, self.npix)) else: z = wmap.model() z = np.array(z).reshape(self.enumbins, self.npix, self.npix) return WcsNDMap(copy.deepcopy(self._geom), z) elif p_method == 1: # HPX nhpix = np.max(self.geom.npix) if wmap is None: z = np.ones((self.enumbins, nhpix)) else: z = wmap.model() z = np.array(z).reshape(self.enumbins, nhpix) return HpxNDMap(self.geom, z) else: self.logger.error('Did not recognize CountsMap type %i' % p_method, exc_info=True) return None
python
def weight_map(self): """Return 3-D weights map for this component as a Map object. Returns ------- map : `~fermipy.skymap.MapBase` """ # EAC we need the try blocks b/c older versions of the ST don't have some of these functions if isinstance(self.like, gtutils.SummedLikelihood): cmap = self.like.components[0].logLike.countsMap() try: p_method = cmap.projection().method() except AttributeError: p_method = 0 try: if self.like.components[0].logLike.has_weights(): wmap = self.like.components[0].logLike.weightMap() else: wmap = None except Exception: wmap = None else: cmap = self.like.logLike.countsMap() try: p_method = cmap.projection().method() except AttributeError: p_method = 0 try: if self.like.logLike.has_weights(): wmap = self.like.logLike.weightMap() else: wmap = None except Exception: wmap = None if p_method == 0: # WCS if wmap is None: z = np.ones((self.enumbins, self.npix, self.npix)) else: z = wmap.model() z = np.array(z).reshape(self.enumbins, self.npix, self.npix) return WcsNDMap(copy.deepcopy(self._geom), z) elif p_method == 1: # HPX nhpix = np.max(self.geom.npix) if wmap is None: z = np.ones((self.enumbins, nhpix)) else: z = wmap.model() z = np.array(z).reshape(self.enumbins, nhpix) return HpxNDMap(self.geom, z) else: self.logger.error('Did not recognize CountsMap type %i' % p_method, exc_info=True) return None
[ "def", "weight_map", "(", "self", ")", ":", "# EAC we need the try blocks b/c older versions of the ST don't have some of these functions", "if", "isinstance", "(", "self", ".", "like", ",", "gtutils", ".", "SummedLikelihood", ")", ":", "cmap", "=", "self", ".", "like",...
Return 3-D weights map for this component as a Map object. Returns ------- map : `~fermipy.skymap.MapBase`
[ "Return", "3", "-", "D", "weights", "map", "for", "this", "component", "as", "a", "Map", "object", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4818-L4872
train
36,078
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.model_counts_spectrum
def model_counts_spectrum(self, name, logemin, logemax, weighted=False): """Return the model counts spectrum of a source. Parameters ---------- name : str Source name. """ # EAC, we need this b/c older version of the ST don't have the right signature try: cs = np.array(self.like.logLike.modelCountsSpectrum( str(name), weighted)) except (TypeError, NotImplementedError): cs = np.array(self.like.logLike.modelCountsSpectrum(str(name))) imin = utils.val_to_edge(self.log_energies, logemin)[0] imax = utils.val_to_edge(self.log_energies, logemax)[0] if imax <= imin: raise Exception('Invalid energy range.') return cs[imin:imax]
python
def model_counts_spectrum(self, name, logemin, logemax, weighted=False): """Return the model counts spectrum of a source. Parameters ---------- name : str Source name. """ # EAC, we need this b/c older version of the ST don't have the right signature try: cs = np.array(self.like.logLike.modelCountsSpectrum( str(name), weighted)) except (TypeError, NotImplementedError): cs = np.array(self.like.logLike.modelCountsSpectrum(str(name))) imin = utils.val_to_edge(self.log_energies, logemin)[0] imax = utils.val_to_edge(self.log_energies, logemax)[0] if imax <= imin: raise Exception('Invalid energy range.') return cs[imin:imax]
[ "def", "model_counts_spectrum", "(", "self", ",", "name", ",", "logemin", ",", "logemax", ",", "weighted", "=", "False", ")", ":", "# EAC, we need this b/c older version of the ST don't have the right signature", "try", ":", "cs", "=", "np", ".", "array", "(", "self...
Return the model counts spectrum of a source. Parameters ---------- name : str Source name.
[ "Return", "the", "model", "counts", "spectrum", "of", "a", "source", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L4982-L5001
train
36,079
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.setup
def setup(self, overwrite=False, **kwargs): """Run pre-processing step for this component. This will generate all of the auxiliary files needed to instantiate a likelihood object. By default this function will skip any steps for which the output file already exists. Parameters ---------- overwrite : bool Run all pre-processing steps even if the output file of that step is present in the working directory. """ loglevel = kwargs.get('loglevel', self.loglevel) self.logger.log(loglevel, 'Running setup for component %s', self.name) use_external_srcmap = self.config['gtlike']['use_external_srcmap'] # Run data selection if not use_external_srcmap: self._select_data(overwrite=overwrite, **kwargs) # Create LT Cube if self._ext_ltcube is not None: self.logger.log(loglevel, 'Using external LT cube.') else: self._create_ltcube(overwrite=overwrite, **kwargs) self.logger.debug('Loading LT Cube %s', self.files['ltcube']) self._ltc = LTCube.create(self.files['ltcube']) # Extract tmin, tmax from LT cube self._tmin = self._ltc.tstart self._tmax = self._ltc.tstop self.logger.debug('Creating PSF model') self._psf = irfs.PSFModel.create(self.roi.skydir, self._ltc, self.config['gtlike']['irfs'], self.config['selection']['evtype'], self.energies) # Bin data and create exposure cube if not use_external_srcmap: self._bin_data(overwrite=overwrite, **kwargs) self._create_expcube(overwrite=overwrite, **kwargs) # This is needed in case the exposure map is in HEALPix hpxhduname = "HPXEXPOSURES" try: self._bexp = Map.read(self.files['bexpmap'], hdu=hpxhduname) except KeyError: self._bexp = Map.read(self.files['bexpmap']) # Write ROI XML self.roi.write_xml(self.files['srcmdl'], self.config['model']) # Create source maps file if not use_external_srcmap: self._create_srcmaps(overwrite=overwrite) if not self.config['data']['cacheft1'] and os.path.isfile(self.files['ft1']): self.logger.debug('Deleting FT1 file.') os.remove(self.files['ft1']) self.logger.log(loglevel, 'Finished setup for component %s', self.name)
python
def setup(self, overwrite=False, **kwargs): """Run pre-processing step for this component. This will generate all of the auxiliary files needed to instantiate a likelihood object. By default this function will skip any steps for which the output file already exists. Parameters ---------- overwrite : bool Run all pre-processing steps even if the output file of that step is present in the working directory. """ loglevel = kwargs.get('loglevel', self.loglevel) self.logger.log(loglevel, 'Running setup for component %s', self.name) use_external_srcmap = self.config['gtlike']['use_external_srcmap'] # Run data selection if not use_external_srcmap: self._select_data(overwrite=overwrite, **kwargs) # Create LT Cube if self._ext_ltcube is not None: self.logger.log(loglevel, 'Using external LT cube.') else: self._create_ltcube(overwrite=overwrite, **kwargs) self.logger.debug('Loading LT Cube %s', self.files['ltcube']) self._ltc = LTCube.create(self.files['ltcube']) # Extract tmin, tmax from LT cube self._tmin = self._ltc.tstart self._tmax = self._ltc.tstop self.logger.debug('Creating PSF model') self._psf = irfs.PSFModel.create(self.roi.skydir, self._ltc, self.config['gtlike']['irfs'], self.config['selection']['evtype'], self.energies) # Bin data and create exposure cube if not use_external_srcmap: self._bin_data(overwrite=overwrite, **kwargs) self._create_expcube(overwrite=overwrite, **kwargs) # This is needed in case the exposure map is in HEALPix hpxhduname = "HPXEXPOSURES" try: self._bexp = Map.read(self.files['bexpmap'], hdu=hpxhduname) except KeyError: self._bexp = Map.read(self.files['bexpmap']) # Write ROI XML self.roi.write_xml(self.files['srcmdl'], self.config['model']) # Create source maps file if not use_external_srcmap: self._create_srcmaps(overwrite=overwrite) if not self.config['data']['cacheft1'] and os.path.isfile(self.files['ft1']): self.logger.debug('Deleting FT1 file.') os.remove(self.files['ft1']) self.logger.log(loglevel, 'Finished setup for component %s', self.name)
[ "def", "setup", "(", "self", ",", "overwrite", "=", "False", ",", "*", "*", "kwargs", ")", ":", "loglevel", "=", "kwargs", ".", "get", "(", "'loglevel'", ",", "self", ".", "loglevel", ")", "self", ".", "logger", ".", "log", "(", "loglevel", ",", "'...
Run pre-processing step for this component. This will generate all of the auxiliary files needed to instantiate a likelihood object. By default this function will skip any steps for which the output file already exists. Parameters ---------- overwrite : bool Run all pre-processing steps even if the output file of that step is present in the working directory.
[ "Run", "pre", "-", "processing", "step", "for", "this", "component", ".", "This", "will", "generate", "all", "of", "the", "auxiliary", "files", "needed", "to", "instantiate", "a", "likelihood", "object", ".", "By", "default", "this", "function", "will", "ski...
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5003-L5071
train
36,080
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._scale_srcmap
def _scale_srcmap(self, scale_map, check_header=True, names=None): """Apply exposure corrections to the source map file. Parameters ---------- scale_map : dict Dictionary of exposure corrections. check_header : bool Check EXPSCALE header keyword to see if an exposure correction has already been applied to this source. names : list, optional Names of sources to which the exposure correction will be applied. If None then all sources will be corrected. """ srcmap = fits.open(self.files['srcmap']) for hdu in srcmap[1:]: if hdu.name not in scale_map: continue if names is not None and hdu.name not in names: continue scale = scale_map[hdu.name] if scale < 1e-20: self.logger.warning( "The expscale parameter was zero, setting it to 1e-8") scale = 1e-8 if 'EXPSCALE' in hdu.header and check_header: old_scale = hdu.header['EXPSCALE'] else: old_scale = 1.0 hdu.data *= scale / old_scale hdu.header['EXPSCALE'] = (scale, 'Exposure correction applied to this map') srcmap.writeto(self.files['srcmap'], overwrite=True) srcmap.close() # Force reloading the map from disk for name in scale_map.keys(): self.like.logLike.eraseSourceMap(str(name)) self.like.logLike.buildFixedModelWts()
python
def _scale_srcmap(self, scale_map, check_header=True, names=None): """Apply exposure corrections to the source map file. Parameters ---------- scale_map : dict Dictionary of exposure corrections. check_header : bool Check EXPSCALE header keyword to see if an exposure correction has already been applied to this source. names : list, optional Names of sources to which the exposure correction will be applied. If None then all sources will be corrected. """ srcmap = fits.open(self.files['srcmap']) for hdu in srcmap[1:]: if hdu.name not in scale_map: continue if names is not None and hdu.name not in names: continue scale = scale_map[hdu.name] if scale < 1e-20: self.logger.warning( "The expscale parameter was zero, setting it to 1e-8") scale = 1e-8 if 'EXPSCALE' in hdu.header and check_header: old_scale = hdu.header['EXPSCALE'] else: old_scale = 1.0 hdu.data *= scale / old_scale hdu.header['EXPSCALE'] = (scale, 'Exposure correction applied to this map') srcmap.writeto(self.files['srcmap'], overwrite=True) srcmap.close() # Force reloading the map from disk for name in scale_map.keys(): self.like.logLike.eraseSourceMap(str(name)) self.like.logLike.buildFixedModelWts()
[ "def", "_scale_srcmap", "(", "self", ",", "scale_map", ",", "check_header", "=", "True", ",", "names", "=", "None", ")", ":", "srcmap", "=", "fits", ".", "open", "(", "self", ".", "files", "[", "'srcmap'", "]", ")", "for", "hdu", "in", "srcmap", "[",...
Apply exposure corrections to the source map file. Parameters ---------- scale_map : dict Dictionary of exposure corrections. check_header : bool Check EXPSCALE header keyword to see if an exposure correction has already been applied to this source. names : list, optional Names of sources to which the exposure correction will be applied. If None then all sources will be corrected.
[ "Apply", "exposure", "corrections", "to", "the", "source", "map", "file", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5357-L5401
train
36,081
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._make_scaled_srcmap
def _make_scaled_srcmap(self): """Make an exposure cube with the same binning as the counts map.""" self.logger.info('Computing scaled source map.') bexp0 = fits.open(self.files['bexpmap_roi']) bexp1 = fits.open(self.config['gtlike']['bexpmap']) srcmap = fits.open(self.config['gtlike']['srcmap']) if bexp0[0].data.shape != bexp1[0].data.shape: raise Exception('Wrong shape for input exposure map file.') bexp_ratio = bexp0[0].data / bexp1[0].data self.logger.info( 'Min/Med/Max exposure correction: %f %f %f' % (np.min(bexp_ratio), np.median( bexp_ratio), np.max(bexp_ratio))) for hdu in srcmap[1:]: if hdu.name == 'GTI': continue if hdu.name == 'EBOUNDS': continue hdu.data *= bexp_ratio srcmap.writeto(self.files['srcmap'], overwrite=True)
python
def _make_scaled_srcmap(self): """Make an exposure cube with the same binning as the counts map.""" self.logger.info('Computing scaled source map.') bexp0 = fits.open(self.files['bexpmap_roi']) bexp1 = fits.open(self.config['gtlike']['bexpmap']) srcmap = fits.open(self.config['gtlike']['srcmap']) if bexp0[0].data.shape != bexp1[0].data.shape: raise Exception('Wrong shape for input exposure map file.') bexp_ratio = bexp0[0].data / bexp1[0].data self.logger.info( 'Min/Med/Max exposure correction: %f %f %f' % (np.min(bexp_ratio), np.median( bexp_ratio), np.max(bexp_ratio))) for hdu in srcmap[1:]: if hdu.name == 'GTI': continue if hdu.name == 'EBOUNDS': continue hdu.data *= bexp_ratio srcmap.writeto(self.files['srcmap'], overwrite=True)
[ "def", "_make_scaled_srcmap", "(", "self", ")", ":", "self", ".", "logger", ".", "info", "(", "'Computing scaled source map.'", ")", "bexp0", "=", "fits", ".", "open", "(", "self", ".", "files", "[", "'bexpmap_roi'", "]", ")", "bexp1", "=", "fits", ".", ...
Make an exposure cube with the same binning as the counts map.
[ "Make", "an", "exposure", "cube", "with", "the", "same", "binning", "as", "the", "counts", "map", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5403-L5431
train
36,082
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.simulate_roi
def simulate_roi(self, name=None, clear=True, randomize=True): """Simulate the whole ROI or inject a simulation of one or more model components into the data. Parameters ---------- name : str Name of the model component to be simulated. If None then the whole ROI will be simulated. clear : bool Zero the current counts map before injecting the simulation. randomize : bool Fill with each pixel with random values drawn from a poisson distribution. If false then fill each pixel with the counts expectation value. """ cm = self.counts_map() data = cm.data m = self.model_counts_map(name) if clear: data.fill(0.0) if randomize: if m.data.min()<0.: self.logger.warning('At least on negative value found in model map.' ' Changing it/them to 0') indexcond = np.where( m.data <0. ) m.data[indexcond]=np.zeros(len(m.data[indexcond])) data += np.random.poisson(m.data).astype(float) else: data += m.data if hasattr(self.like.logLike, 'setCountsMap'): self.like.logLike.setCountsMap(np.ravel(data)) srcmap_utils.update_source_maps(self.files['srcmap'], {'PRIMARY': data}, logger=self.logger) cm.write(self.files['ccubemc'], overwrite=True, conv='fgst-ccube')
python
def simulate_roi(self, name=None, clear=True, randomize=True): """Simulate the whole ROI or inject a simulation of one or more model components into the data. Parameters ---------- name : str Name of the model component to be simulated. If None then the whole ROI will be simulated. clear : bool Zero the current counts map before injecting the simulation. randomize : bool Fill with each pixel with random values drawn from a poisson distribution. If false then fill each pixel with the counts expectation value. """ cm = self.counts_map() data = cm.data m = self.model_counts_map(name) if clear: data.fill(0.0) if randomize: if m.data.min()<0.: self.logger.warning('At least on negative value found in model map.' ' Changing it/them to 0') indexcond = np.where( m.data <0. ) m.data[indexcond]=np.zeros(len(m.data[indexcond])) data += np.random.poisson(m.data).astype(float) else: data += m.data if hasattr(self.like.logLike, 'setCountsMap'): self.like.logLike.setCountsMap(np.ravel(data)) srcmap_utils.update_source_maps(self.files['srcmap'], {'PRIMARY': data}, logger=self.logger) cm.write(self.files['ccubemc'], overwrite=True, conv='fgst-ccube')
[ "def", "simulate_roi", "(", "self", ",", "name", "=", "None", ",", "clear", "=", "True", ",", "randomize", "=", "True", ")", ":", "cm", "=", "self", ".", "counts_map", "(", ")", "data", "=", "cm", ".", "data", "m", "=", "self", ".", "model_counts_m...
Simulate the whole ROI or inject a simulation of one or more model components into the data. Parameters ---------- name : str Name of the model component to be simulated. If None then the whole ROI will be simulated. clear : bool Zero the current counts map before injecting the simulation. randomize : bool Fill with each pixel with random values drawn from a poisson distribution. If false then fill each pixel with the counts expectation value.
[ "Simulate", "the", "whole", "ROI", "or", "inject", "a", "simulation", "of", "one", "or", "more", "model", "components", "into", "the", "data", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5444-L5487
train
36,083
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._update_srcmap_file
def _update_srcmap_file(self, sources, overwrite=True): """Check the contents of the source map file and generate source maps for any components that are not present.""" if not os.path.isfile(self.files['srcmap']): return hdulist = fits.open(self.files['srcmap']) hdunames = [hdu.name.upper() for hdu in hdulist] srcmaps = {} for src in sources: if src.name.upper() in hdunames and not overwrite: continue self.logger.debug('Creating source map for %s', src.name) srcmaps[src.name] = self._create_srcmap(src.name, src) if srcmaps: self.logger.debug( 'Updating source map file for component %s.', self.name) srcmap_utils.update_source_maps(self.files['srcmap'], srcmaps, logger=self.logger) hdulist.close()
python
def _update_srcmap_file(self, sources, overwrite=True): """Check the contents of the source map file and generate source maps for any components that are not present.""" if not os.path.isfile(self.files['srcmap']): return hdulist = fits.open(self.files['srcmap']) hdunames = [hdu.name.upper() for hdu in hdulist] srcmaps = {} for src in sources: if src.name.upper() in hdunames and not overwrite: continue self.logger.debug('Creating source map for %s', src.name) srcmaps[src.name] = self._create_srcmap(src.name, src) if srcmaps: self.logger.debug( 'Updating source map file for component %s.', self.name) srcmap_utils.update_source_maps(self.files['srcmap'], srcmaps, logger=self.logger) hdulist.close()
[ "def", "_update_srcmap_file", "(", "self", ",", "sources", ",", "overwrite", "=", "True", ")", ":", "if", "not", "os", ".", "path", ".", "isfile", "(", "self", ".", "files", "[", "'srcmap'", "]", ")", ":", "return", "hdulist", "=", "fits", ".", "open...
Check the contents of the source map file and generate source maps for any components that are not present.
[ "Check", "the", "contents", "of", "the", "source", "map", "file", "and", "generate", "source", "maps", "for", "any", "components", "that", "are", "not", "present", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5527-L5551
train
36,084
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._create_srcmap
def _create_srcmap(self, name, src, **kwargs): """Generate the source map for a source.""" psf_scale_fn = kwargs.get('psf_scale_fn', None) skydir = src.skydir spatial_model = src['SpatialModel'] spatial_width = src['SpatialWidth'] xpix, ypix = self.geom.to_image().coord_to_pix(skydir) exp = self._bexp.interp_by_coord( (skydir, self._bexp.geom.axes[0].center)) cache = self._srcmap_cache.get(name, None) if cache is not None: k = cache.create_map([ypix, xpix]) else: k = srcmap_utils.make_srcmap(self._psf, exp, spatial_model, spatial_width, npix=self.npix, xpix=xpix, ypix=ypix, cdelt=self.config['binning']['binsz'], psf_scale_fn=psf_scale_fn, sparse=True) return k
python
def _create_srcmap(self, name, src, **kwargs): """Generate the source map for a source.""" psf_scale_fn = kwargs.get('psf_scale_fn', None) skydir = src.skydir spatial_model = src['SpatialModel'] spatial_width = src['SpatialWidth'] xpix, ypix = self.geom.to_image().coord_to_pix(skydir) exp = self._bexp.interp_by_coord( (skydir, self._bexp.geom.axes[0].center)) cache = self._srcmap_cache.get(name, None) if cache is not None: k = cache.create_map([ypix, xpix]) else: k = srcmap_utils.make_srcmap(self._psf, exp, spatial_model, spatial_width, npix=self.npix, xpix=xpix, ypix=ypix, cdelt=self.config['binning']['binsz'], psf_scale_fn=psf_scale_fn, sparse=True) return k
[ "def", "_create_srcmap", "(", "self", ",", "name", ",", "src", ",", "*", "*", "kwargs", ")", ":", "psf_scale_fn", "=", "kwargs", ".", "get", "(", "'psf_scale_fn'", ",", "None", ")", "skydir", "=", "src", ".", "skydir", "spatial_model", "=", "src", "[",...
Generate the source map for a source.
[ "Generate", "the", "source", "map", "for", "a", "source", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5571-L5592
train
36,085
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._update_srcmap
def _update_srcmap(self, name, src, **kwargs): """Update the source map for an existing source in memory.""" k = self._create_srcmap(name, src, **kwargs) scale = self._src_expscale.get(name, 1.0) k *= scale # Force the source map to be cached # FIXME: No longer necessary to force cacheing in ST after 11-05-02 self.like.logLike.sourceMap(str(name)).model() self.like.logLike.setSourceMapImage(str(name), np.ravel(k)) self.like.logLike.sourceMap(str(name)).model() normPar = self.like.normPar(name) if not normPar.isFree(): self.like.logLike.buildFixedModelWts()
python
def _update_srcmap(self, name, src, **kwargs): """Update the source map for an existing source in memory.""" k = self._create_srcmap(name, src, **kwargs) scale = self._src_expscale.get(name, 1.0) k *= scale # Force the source map to be cached # FIXME: No longer necessary to force cacheing in ST after 11-05-02 self.like.logLike.sourceMap(str(name)).model() self.like.logLike.setSourceMapImage(str(name), np.ravel(k)) self.like.logLike.sourceMap(str(name)).model() normPar = self.like.normPar(name) if not normPar.isFree(): self.like.logLike.buildFixedModelWts()
[ "def", "_update_srcmap", "(", "self", ",", "name", ",", "src", ",", "*", "*", "kwargs", ")", ":", "k", "=", "self", ".", "_create_srcmap", "(", "name", ",", "src", ",", "*", "*", "kwargs", ")", "scale", "=", "self", ".", "_src_expscale", ".", "get"...
Update the source map for an existing source in memory.
[ "Update", "the", "source", "map", "for", "an", "existing", "source", "in", "memory", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5594-L5609
train
36,086
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.generate_model
def generate_model(self, model_name=None, outfile=None): """Generate a counts model map from an XML model file using gtmodel. Parameters ---------- model_name : str Name of the model. If no name is given it will use the baseline model. outfile : str Override the name of the output model file. """ if model_name is not None: model_name = os.path.splitext(model_name)[0] if model_name is None or model_name == '': srcmdl = self.files['srcmdl'] else: srcmdl = self.get_model_path(model_name) if not os.path.isfile(srcmdl): raise Exception("Model file does not exist: %s", srcmdl) if model_name is None: suffix = self.config['file_suffix'] else: suffix = '_%s%s' % (model_name, self.config['file_suffix']) outfile = os.path.join(self.config['fileio']['workdir'], 'mcube%s.fits' % (suffix)) # May consider generating a custom source model file if not os.path.isfile(outfile): kw = dict(srcmaps=self.files['srcmap'], srcmdl=srcmdl, bexpmap=self.files['bexpmap'], outfile=outfile, expcube=self.files['ltcube'], irfs=self.config['gtlike']['irfs'], evtype=self.config['selection']['evtype'], edisp=bool(self.config['gtlike']['edisp']), outtype='ccube', chatter=self.config['logging']['chatter']) run_gtapp('gtmodel', self.logger, kw) else: self.logger.info('Skipping gtmodel')
python
def generate_model(self, model_name=None, outfile=None): """Generate a counts model map from an XML model file using gtmodel. Parameters ---------- model_name : str Name of the model. If no name is given it will use the baseline model. outfile : str Override the name of the output model file. """ if model_name is not None: model_name = os.path.splitext(model_name)[0] if model_name is None or model_name == '': srcmdl = self.files['srcmdl'] else: srcmdl = self.get_model_path(model_name) if not os.path.isfile(srcmdl): raise Exception("Model file does not exist: %s", srcmdl) if model_name is None: suffix = self.config['file_suffix'] else: suffix = '_%s%s' % (model_name, self.config['file_suffix']) outfile = os.path.join(self.config['fileio']['workdir'], 'mcube%s.fits' % (suffix)) # May consider generating a custom source model file if not os.path.isfile(outfile): kw = dict(srcmaps=self.files['srcmap'], srcmdl=srcmdl, bexpmap=self.files['bexpmap'], outfile=outfile, expcube=self.files['ltcube'], irfs=self.config['gtlike']['irfs'], evtype=self.config['selection']['evtype'], edisp=bool(self.config['gtlike']['edisp']), outtype='ccube', chatter=self.config['logging']['chatter']) run_gtapp('gtmodel', self.logger, kw) else: self.logger.info('Skipping gtmodel')
[ "def", "generate_model", "(", "self", ",", "model_name", "=", "None", ",", "outfile", "=", "None", ")", ":", "if", "model_name", "is", "not", "None", ":", "model_name", "=", "os", ".", "path", ".", "splitext", "(", "model_name", ")", "[", "0", "]", "...
Generate a counts model map from an XML model file using gtmodel. Parameters ---------- model_name : str Name of the model. If no name is given it will use the baseline model. outfile : str Override the name of the output model file.
[ "Generate", "a", "counts", "model", "map", "from", "an", "XML", "model", "file", "using", "gtmodel", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5611-L5660
train
36,087
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.write_xml
def write_xml(self, xmlfile): """Write the XML model for this analysis component.""" xmlfile = self.get_model_path(xmlfile) self.logger.info('Writing %s...', xmlfile) self.like.writeXml(str(xmlfile))
python
def write_xml(self, xmlfile): """Write the XML model for this analysis component.""" xmlfile = self.get_model_path(xmlfile) self.logger.info('Writing %s...', xmlfile) self.like.writeXml(str(xmlfile))
[ "def", "write_xml", "(", "self", ",", "xmlfile", ")", ":", "xmlfile", "=", "self", ".", "get_model_path", "(", "xmlfile", ")", "self", ".", "logger", ".", "info", "(", "'Writing %s...'", ",", "xmlfile", ")", "self", ".", "like", ".", "writeXml", "(", "...
Write the XML model for this analysis component.
[ "Write", "the", "XML", "model", "for", "this", "analysis", "component", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5670-L5675
train
36,088
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis.get_model_path
def get_model_path(self, name): """Infer the path to the XML model name.""" name, ext = os.path.splitext(name) ext = '.xml' xmlfile = name + self.config['file_suffix'] + ext xmlfile = utils.resolve_path(xmlfile, workdir=self.config['fileio']['workdir']) return xmlfile
python
def get_model_path(self, name): """Infer the path to the XML model name.""" name, ext = os.path.splitext(name) ext = '.xml' xmlfile = name + self.config['file_suffix'] + ext xmlfile = utils.resolve_path(xmlfile, workdir=self.config['fileio']['workdir']) return xmlfile
[ "def", "get_model_path", "(", "self", ",", "name", ")", ":", "name", ",", "ext", "=", "os", ".", "path", ".", "splitext", "(", "name", ")", "ext", "=", "'.xml'", "xmlfile", "=", "name", "+", "self", ".", "config", "[", "'file_suffix'", "]", "+", "e...
Infer the path to the XML model name.
[ "Infer", "the", "path", "to", "the", "XML", "model", "name", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5677-L5686
train
36,089
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._tscube_app
def _tscube_app(self, xmlfile): """Run gttscube as an application.""" xmlfile = self.get_model_path(xmlfile) outfile = os.path.join(self.config['fileio']['workdir'], 'tscube%s.fits' % (self.config['file_suffix'])) kw = dict(cmap=self.files['ccube'], expcube=self.files['ltcube'], bexpmap=self.files['bexpmap'], irfs=self.config['gtlike']['irfs'], evtype=self.config['selection']['evtype'], srcmdl=xmlfile, nxpix=self.npix, nypix=self.npix, binsz=self.config['binning']['binsz'], xref=float(self.roi.skydir.ra.deg), yref=float(self.roi.skydir.dec.deg), proj=self.config['binning']['proj'], stlevel=0, coordsys=self.config['binning']['coordsys'], outfile=outfile) run_gtapp('gttscube', self.logger, kw)
python
def _tscube_app(self, xmlfile): """Run gttscube as an application.""" xmlfile = self.get_model_path(xmlfile) outfile = os.path.join(self.config['fileio']['workdir'], 'tscube%s.fits' % (self.config['file_suffix'])) kw = dict(cmap=self.files['ccube'], expcube=self.files['ltcube'], bexpmap=self.files['bexpmap'], irfs=self.config['gtlike']['irfs'], evtype=self.config['selection']['evtype'], srcmdl=xmlfile, nxpix=self.npix, nypix=self.npix, binsz=self.config['binning']['binsz'], xref=float(self.roi.skydir.ra.deg), yref=float(self.roi.skydir.dec.deg), proj=self.config['binning']['proj'], stlevel=0, coordsys=self.config['binning']['coordsys'], outfile=outfile) run_gtapp('gttscube', self.logger, kw)
[ "def", "_tscube_app", "(", "self", ",", "xmlfile", ")", ":", "xmlfile", "=", "self", ".", "get_model_path", "(", "xmlfile", ")", "outfile", "=", "os", ".", "path", ".", "join", "(", "self", ".", "config", "[", "'fileio'", "]", "[", "'workdir'", "]", ...
Run gttscube as an application.
[ "Run", "gttscube", "as", "an", "application", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5688-L5711
train
36,090
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._diffrsp_app
def _diffrsp_app(self,xmlfile=None, **kwargs): """ Compute the diffuse response """ loglevel = kwargs.get('loglevel', self.loglevel) self.logger.log(loglevel, 'Computing diffuse repsonce for component %s.', self.name) # set the srcmdl srcmdl_file = self.files['srcmdl'] if xmlfile is not None: srcmdl_file = self.get_model_path(xmlfile) kw = dict(evfile=self.files['ft1'], scfile=self.data_files['scfile'], irfs = self.config['gtlike']['irfs'], evtype = self.config['selection']['evtype'], srcmdl = srcmdl_file) run_gtapp('gtdiffrsp', self.logger, kw, loglevel=loglevel) return
python
def _diffrsp_app(self,xmlfile=None, **kwargs): """ Compute the diffuse response """ loglevel = kwargs.get('loglevel', self.loglevel) self.logger.log(loglevel, 'Computing diffuse repsonce for component %s.', self.name) # set the srcmdl srcmdl_file = self.files['srcmdl'] if xmlfile is not None: srcmdl_file = self.get_model_path(xmlfile) kw = dict(evfile=self.files['ft1'], scfile=self.data_files['scfile'], irfs = self.config['gtlike']['irfs'], evtype = self.config['selection']['evtype'], srcmdl = srcmdl_file) run_gtapp('gtdiffrsp', self.logger, kw, loglevel=loglevel) return
[ "def", "_diffrsp_app", "(", "self", ",", "xmlfile", "=", "None", ",", "*", "*", "kwargs", ")", ":", "loglevel", "=", "kwargs", ".", "get", "(", "'loglevel'", ",", "self", ".", "loglevel", ")", "self", ".", "logger", ".", "log", "(", "loglevel", ",", ...
Compute the diffuse response
[ "Compute", "the", "diffuse", "response" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5713-L5734
train
36,091
fermiPy/fermipy
fermipy/gtanalysis.py
GTBinnedAnalysis._srcprob_app
def _srcprob_app(self,xmlfile=None, overwrite=False, **kwargs): """ Run srcprob for an analysis component as an application """ loglevel = kwargs.get('loglevel', self.loglevel) self.logger.log(loglevel, 'Computing src probability for component %s.', self.name) # set the srcmdl srcmdl_file = self.files['srcmdl'] if xmlfile is not None: srcmdl_file = self.get_model_path(xmlfile) # set the outfile # it's defined here and not in self.files dict # so that it is copied with the stage_output module # even if savefits is False outfile = os.path.join(self.workdir, 'ft1_srcprob{0[file_suffix]:s}.fits'.format(self.config)) kw = dict(evfile=self.files['ft1'], scfile=self.data_files['scfile'], outfile= outfile, irfs = self.config['gtlike']['irfs'], srcmdl = srcmdl_file) self.logger.debug(kw) # run gtapp for the srcprob if os.path.isfile(outfile) and not overwrite: self.logger.info('Skipping gtsrcprob') else: run_gtapp('gtsrcprob', self.logger, kw, loglevel=loglevel)
python
def _srcprob_app(self,xmlfile=None, overwrite=False, **kwargs): """ Run srcprob for an analysis component as an application """ loglevel = kwargs.get('loglevel', self.loglevel) self.logger.log(loglevel, 'Computing src probability for component %s.', self.name) # set the srcmdl srcmdl_file = self.files['srcmdl'] if xmlfile is not None: srcmdl_file = self.get_model_path(xmlfile) # set the outfile # it's defined here and not in self.files dict # so that it is copied with the stage_output module # even if savefits is False outfile = os.path.join(self.workdir, 'ft1_srcprob{0[file_suffix]:s}.fits'.format(self.config)) kw = dict(evfile=self.files['ft1'], scfile=self.data_files['scfile'], outfile= outfile, irfs = self.config['gtlike']['irfs'], srcmdl = srcmdl_file) self.logger.debug(kw) # run gtapp for the srcprob if os.path.isfile(outfile) and not overwrite: self.logger.info('Skipping gtsrcprob') else: run_gtapp('gtsrcprob', self.logger, kw, loglevel=loglevel)
[ "def", "_srcprob_app", "(", "self", ",", "xmlfile", "=", "None", ",", "overwrite", "=", "False", ",", "*", "*", "kwargs", ")", ":", "loglevel", "=", "kwargs", ".", "get", "(", "'loglevel'", ",", "self", ".", "loglevel", ")", "self", ".", "logger", "....
Run srcprob for an analysis component as an application
[ "Run", "srcprob", "for", "an", "analysis", "component", "as", "an", "application" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/gtanalysis.py#L5736-L5769
train
36,092
fermiPy/fermipy
fermipy/jobs/chain.py
purge_dict
def purge_dict(idict): """Remove null items from a dictionary """ odict = {} for key, val in idict.items(): if is_null(val): continue odict[key] = val return odict
python
def purge_dict(idict): """Remove null items from a dictionary """ odict = {} for key, val in idict.items(): if is_null(val): continue odict[key] = val return odict
[ "def", "purge_dict", "(", "idict", ")", ":", "odict", "=", "{", "}", "for", "key", ",", "val", "in", "idict", ".", "items", "(", ")", ":", "if", "is_null", "(", "val", ")", ":", "continue", "odict", "[", "key", "]", "=", "val", "return", "odict" ...
Remove null items from a dictionary
[ "Remove", "null", "items", "from", "a", "dictionary" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/chain.py#L20-L27
train
36,093
fermiPy/fermipy
fermipy/jobs/chain.py
Chain.main
def main(cls): """Hook to run this `Chain` from the command line """ chain = cls.create() args = chain._run_argparser(sys.argv[1:]) chain._run_chain(sys.stdout, args.dry_run) chain._finalize(args.dry_run)
python
def main(cls): """Hook to run this `Chain` from the command line """ chain = cls.create() args = chain._run_argparser(sys.argv[1:]) chain._run_chain(sys.stdout, args.dry_run) chain._finalize(args.dry_run)
[ "def", "main", "(", "cls", ")", ":", "chain", "=", "cls", ".", "create", "(", ")", "args", "=", "chain", ".", "_run_argparser", "(", "sys", ".", "argv", "[", "1", ":", "]", ")", "chain", ".", "_run_chain", "(", "sys", ".", "stdout", ",", "args", ...
Hook to run this `Chain` from the command line
[ "Hook", "to", "run", "this", "Chain", "from", "the", "command", "line" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/chain.py#L48-L53
train
36,094
fermiPy/fermipy
fermipy/jobs/chain.py
Chain._set_link
def _set_link(self, linkname, cls, **kwargs): """Transfer options kwargs to a `Link` object, optionally building the `Link if needed. Parameters ---------- linkname : str Unique name of this particular link cls : type Type of `Link` being created or managed """ val_copy = purge_dict(kwargs.copy()) sub_link_prefix = val_copy.pop('link_prefix', '') link_prefix = self.link_prefix + sub_link_prefix create_args = dict(linkname=linkname, link_prefix=link_prefix, job_archive=val_copy.pop('job_archive', None), file_stage=val_copy.pop('file_stage', None)) job_args = val_copy if linkname in self._links: link = self._links[linkname] link.update_args(job_args) else: link = cls.create(**create_args) self._links[link.linkname] = link logfile_default = os.path.join('logs', '%s.log' % link.full_linkname) logfile = kwargs.setdefault('logfile', logfile_default) link._register_job(JobDetails.topkey, job_args, logfile, status=JobStatus.unknown) return link
python
def _set_link(self, linkname, cls, **kwargs): """Transfer options kwargs to a `Link` object, optionally building the `Link if needed. Parameters ---------- linkname : str Unique name of this particular link cls : type Type of `Link` being created or managed """ val_copy = purge_dict(kwargs.copy()) sub_link_prefix = val_copy.pop('link_prefix', '') link_prefix = self.link_prefix + sub_link_prefix create_args = dict(linkname=linkname, link_prefix=link_prefix, job_archive=val_copy.pop('job_archive', None), file_stage=val_copy.pop('file_stage', None)) job_args = val_copy if linkname in self._links: link = self._links[linkname] link.update_args(job_args) else: link = cls.create(**create_args) self._links[link.linkname] = link logfile_default = os.path.join('logs', '%s.log' % link.full_linkname) logfile = kwargs.setdefault('logfile', logfile_default) link._register_job(JobDetails.topkey, job_args, logfile, status=JobStatus.unknown) return link
[ "def", "_set_link", "(", "self", ",", "linkname", ",", "cls", ",", "*", "*", "kwargs", ")", ":", "val_copy", "=", "purge_dict", "(", "kwargs", ".", "copy", "(", ")", ")", "sub_link_prefix", "=", "val_copy", ".", "pop", "(", "'link_prefix'", ",", "''", ...
Transfer options kwargs to a `Link` object, optionally building the `Link if needed. Parameters ---------- linkname : str Unique name of this particular link cls : type Type of `Link` being created or managed
[ "Transfer", "options", "kwargs", "to", "a", "Link", "object", "optionally", "building", "the", "Link", "if", "needed", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/chain.py#L96-L128
train
36,095
fermiPy/fermipy
fermipy/jobs/chain.py
Chain._set_links_job_archive
def _set_links_job_archive(self): """Pass self._job_archive along to links""" for link in self._links.values(): link._job_archive = self._job_archive
python
def _set_links_job_archive(self): """Pass self._job_archive along to links""" for link in self._links.values(): link._job_archive = self._job_archive
[ "def", "_set_links_job_archive", "(", "self", ")", ":", "for", "link", "in", "self", ".", "_links", ".", "values", "(", ")", ":", "link", ".", "_job_archive", "=", "self", ".", "_job_archive" ]
Pass self._job_archive along to links
[ "Pass", "self", ".", "_job_archive", "along", "to", "links" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/chain.py#L130-L133
train
36,096
fermiPy/fermipy
fermipy/jobs/chain.py
Chain._run_chain
def _run_chain(self, stream=sys.stdout, dry_run=False, stage_files=True, force_run=False, resubmit_failed=False): """Run all the links in the chain Parameters ----------- stream : `file` Stream to print to, Must have 'write' function dry_run : bool Print commands but do not run them stage_files : bool Stage files to and from the scratch area force_run : bool Run jobs, even if they are marked as done resubmit_failed : bool Resubmit failed jobs """ self._set_links_job_archive() failed = False if self._file_stage is not None: input_file_mapping, output_file_mapping = self._map_scratch_files( self.sub_files) if stage_files: self._file_stage.make_scratch_dirs(input_file_mapping, dry_run) self._file_stage.make_scratch_dirs( output_file_mapping, dry_run) self._stage_input_files(input_file_mapping, dry_run) for link in self._links.values(): logfile = os.path.join('logs', "%s.log" % link.full_linkname) link._archive_self(logfile, status=JobStatus.unknown) key = JobDetails.make_fullkey(link.full_linkname) if hasattr(link, 'check_status'): link.check_status(stream, no_wait=True, check_once=True, do_print=False) else: pass link_status = link.check_job_status(key) if link_status in [JobStatus.done]: if not force_run: print ("Skipping done link", link.full_linkname) continue elif link_status in [JobStatus.running]: if not force_run and not resubmit_failed: print ("Skipping running link", link.full_linkname) continue elif link_status in [JobStatus.failed, JobStatus.partial_failed]: if not resubmit_failed: print ("Skipping failed link", link.full_linkname) continue print ("Running link ", link.full_linkname) link.run_with_log(dry_run=dry_run, stage_files=False, resubmit_failed=resubmit_failed) link_status = link.check_jobs_status() link._set_status_self(status=link_status) if link_status in [JobStatus.failed, JobStatus.partial_failed]: print ("Stoping chain execution at failed link %s" % link.full_linkname) failed = True break # elif link_status in [JobStatus.partial_failed]: # print ("Resubmitting partially failed link %s" % # link.full_linkname) # link.run_with_log(dry_run=dry_run, stage_files=False, # resubmit_failed=resubmit_failed) # link_status = link.check_jobs_status() # link._set_status_self(status=link_status) # if link_status in [JobStatus.partial_failed]: # print ("Stoping chain execution: resubmission failed %s" % # link.full_linkname) # failed = True # break if self._file_stage is not None and stage_files and not failed: self._stage_output_files(output_file_mapping, dry_run) chain_status = self.check_links_status() print ("Chain status: %s" % (JOB_STATUS_STRINGS[chain_status])) if chain_status == 5: job_status = 0 else: job_status = -1 self._write_status_to_log(job_status, stream) self._set_status_self(status=chain_status) if self._job_archive: self._job_archive.file_archive.update_file_status() self._job_archive.write_table_file()
python
def _run_chain(self, stream=sys.stdout, dry_run=False, stage_files=True, force_run=False, resubmit_failed=False): """Run all the links in the chain Parameters ----------- stream : `file` Stream to print to, Must have 'write' function dry_run : bool Print commands but do not run them stage_files : bool Stage files to and from the scratch area force_run : bool Run jobs, even if they are marked as done resubmit_failed : bool Resubmit failed jobs """ self._set_links_job_archive() failed = False if self._file_stage is not None: input_file_mapping, output_file_mapping = self._map_scratch_files( self.sub_files) if stage_files: self._file_stage.make_scratch_dirs(input_file_mapping, dry_run) self._file_stage.make_scratch_dirs( output_file_mapping, dry_run) self._stage_input_files(input_file_mapping, dry_run) for link in self._links.values(): logfile = os.path.join('logs', "%s.log" % link.full_linkname) link._archive_self(logfile, status=JobStatus.unknown) key = JobDetails.make_fullkey(link.full_linkname) if hasattr(link, 'check_status'): link.check_status(stream, no_wait=True, check_once=True, do_print=False) else: pass link_status = link.check_job_status(key) if link_status in [JobStatus.done]: if not force_run: print ("Skipping done link", link.full_linkname) continue elif link_status in [JobStatus.running]: if not force_run and not resubmit_failed: print ("Skipping running link", link.full_linkname) continue elif link_status in [JobStatus.failed, JobStatus.partial_failed]: if not resubmit_failed: print ("Skipping failed link", link.full_linkname) continue print ("Running link ", link.full_linkname) link.run_with_log(dry_run=dry_run, stage_files=False, resubmit_failed=resubmit_failed) link_status = link.check_jobs_status() link._set_status_self(status=link_status) if link_status in [JobStatus.failed, JobStatus.partial_failed]: print ("Stoping chain execution at failed link %s" % link.full_linkname) failed = True break # elif link_status in [JobStatus.partial_failed]: # print ("Resubmitting partially failed link %s" % # link.full_linkname) # link.run_with_log(dry_run=dry_run, stage_files=False, # resubmit_failed=resubmit_failed) # link_status = link.check_jobs_status() # link._set_status_self(status=link_status) # if link_status in [JobStatus.partial_failed]: # print ("Stoping chain execution: resubmission failed %s" % # link.full_linkname) # failed = True # break if self._file_stage is not None and stage_files and not failed: self._stage_output_files(output_file_mapping, dry_run) chain_status = self.check_links_status() print ("Chain status: %s" % (JOB_STATUS_STRINGS[chain_status])) if chain_status == 5: job_status = 0 else: job_status = -1 self._write_status_to_log(job_status, stream) self._set_status_self(status=chain_status) if self._job_archive: self._job_archive.file_archive.update_file_status() self._job_archive.write_table_file()
[ "def", "_run_chain", "(", "self", ",", "stream", "=", "sys", ".", "stdout", ",", "dry_run", "=", "False", ",", "stage_files", "=", "True", ",", "force_run", "=", "False", ",", "resubmit_failed", "=", "False", ")", ":", "self", ".", "_set_links_job_archive"...
Run all the links in the chain Parameters ----------- stream : `file` Stream to print to, Must have 'write' function dry_run : bool Print commands but do not run them stage_files : bool Stage files to and from the scratch area force_run : bool Run jobs, even if they are marked as done resubmit_failed : bool Resubmit failed jobs
[ "Run", "all", "the", "links", "in", "the", "chain" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/chain.py#L135-L234
train
36,097
fermiPy/fermipy
fermipy/jobs/chain.py
Chain.clear_jobs
def clear_jobs(self, recursive=True): """Clear a dictionary with all the jobs If recursive is True this will include jobs from all internal `Link` """ if recursive: for link in self._links.values(): link.clear_jobs(recursive) self.jobs.clear()
python
def clear_jobs(self, recursive=True): """Clear a dictionary with all the jobs If recursive is True this will include jobs from all internal `Link` """ if recursive: for link in self._links.values(): link.clear_jobs(recursive) self.jobs.clear()
[ "def", "clear_jobs", "(", "self", ",", "recursive", "=", "True", ")", ":", "if", "recursive", ":", "for", "link", "in", "self", ".", "_links", ".", "values", "(", ")", ":", "link", ".", "clear_jobs", "(", "recursive", ")", "self", ".", "jobs", ".", ...
Clear a dictionary with all the jobs If recursive is True this will include jobs from all internal `Link`
[ "Clear", "a", "dictionary", "with", "all", "the", "jobs" ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/chain.py#L236-L244
train
36,098
fermiPy/fermipy
fermipy/jobs/chain.py
Chain.check_links_status
def check_links_status(self, fail_running=False, fail_pending=False): """"Check the status of all the jobs run from the `Link` objects in this `Chain` and return a status flag that summarizes that. Parameters ---------- fail_running : `bool` If True, consider running jobs as failed fail_pending : `bool` If True, consider pending jobs as failed Returns ------- status : `JobStatus` Job status flag that summarizes the status of all the jobs, """ status_vector = JobStatusVector() for link in self._links.values(): key = JobDetails.make_fullkey(link.full_linkname) link_status = link.check_job_status(key, fail_running=fail_running, fail_pending=fail_pending) status_vector[link_status] += 1 return status_vector.get_status()
python
def check_links_status(self, fail_running=False, fail_pending=False): """"Check the status of all the jobs run from the `Link` objects in this `Chain` and return a status flag that summarizes that. Parameters ---------- fail_running : `bool` If True, consider running jobs as failed fail_pending : `bool` If True, consider pending jobs as failed Returns ------- status : `JobStatus` Job status flag that summarizes the status of all the jobs, """ status_vector = JobStatusVector() for link in self._links.values(): key = JobDetails.make_fullkey(link.full_linkname) link_status = link.check_job_status(key, fail_running=fail_running, fail_pending=fail_pending) status_vector[link_status] += 1 return status_vector.get_status()
[ "def", "check_links_status", "(", "self", ",", "fail_running", "=", "False", ",", "fail_pending", "=", "False", ")", ":", "status_vector", "=", "JobStatusVector", "(", ")", "for", "link", "in", "self", ".", "_links", ".", "values", "(", ")", ":", "key", ...
Check the status of all the jobs run from the `Link` objects in this `Chain` and return a status flag that summarizes that. Parameters ---------- fail_running : `bool` If True, consider running jobs as failed fail_pending : `bool` If True, consider pending jobs as failed Returns ------- status : `JobStatus` Job status flag that summarizes the status of all the jobs,
[ "Check", "the", "status", "of", "all", "the", "jobs", "run", "from", "the", "Link", "objects", "in", "this", "Chain", "and", "return", "a", "status", "flag", "that", "summarizes", "that", "." ]
9df5e7e3728307fd58c5bba36fd86783c39fbad4
https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/chain.py#L290-L320
train
36,099