repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1
value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1
value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
fermiPy/fermipy | fermipy/skymap.py | Map.xypix_to_ipix | def xypix_to_ipix(self, xypix, colwise=False):
"""Return the flattened pixel indices from an array multi-dimensional
pixel indices.
Parameters
----------
xypix : list
List of pixel indices in the order (LON,LAT,ENERGY).
colwise : bool
Use column-wise pixel indexing.
"""
return np.ravel_multi_index(xypix, self.npix,
order='F' if colwise else 'C',
mode='raise') | python | def xypix_to_ipix(self, xypix, colwise=False):
"""Return the flattened pixel indices from an array multi-dimensional
pixel indices.
Parameters
----------
xypix : list
List of pixel indices in the order (LON,LAT,ENERGY).
colwise : bool
Use column-wise pixel indexing.
"""
return np.ravel_multi_index(xypix, self.npix,
order='F' if colwise else 'C',
mode='raise') | [
"def",
"xypix_to_ipix",
"(",
"self",
",",
"xypix",
",",
"colwise",
"=",
"False",
")",
":",
"return",
"np",
".",
"ravel_multi_index",
"(",
"xypix",
",",
"self",
".",
"npix",
",",
"order",
"=",
"'F'",
"if",
"colwise",
"else",
"'C'",
",",
"mode",
"=",
"... | Return the flattened pixel indices from an array multi-dimensional
pixel indices.
Parameters
----------
xypix : list
List of pixel indices in the order (LON,LAT,ENERGY).
colwise : bool
Use column-wise pixel indexing. | [
"Return",
"the",
"flattened",
"pixel",
"indices",
"from",
"an",
"array",
"multi",
"-",
"dimensional",
"pixel",
"indices",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L259-L273 | train | 36,200 |
fermiPy/fermipy | fermipy/skymap.py | Map.ipix_to_xypix | def ipix_to_xypix(self, ipix, colwise=False):
"""Return array multi-dimensional pixel indices from flattened index.
Parameters
----------
colwise : bool
Use column-wise pixel indexing.
"""
return np.unravel_index(ipix, self.npix,
order='F' if colwise else 'C') | python | def ipix_to_xypix(self, ipix, colwise=False):
"""Return array multi-dimensional pixel indices from flattened index.
Parameters
----------
colwise : bool
Use column-wise pixel indexing.
"""
return np.unravel_index(ipix, self.npix,
order='F' if colwise else 'C') | [
"def",
"ipix_to_xypix",
"(",
"self",
",",
"ipix",
",",
"colwise",
"=",
"False",
")",
":",
"return",
"np",
".",
"unravel_index",
"(",
"ipix",
",",
"self",
".",
"npix",
",",
"order",
"=",
"'F'",
"if",
"colwise",
"else",
"'C'",
")"
] | Return array multi-dimensional pixel indices from flattened index.
Parameters
----------
colwise : bool
Use column-wise pixel indexing. | [
"Return",
"array",
"multi",
"-",
"dimensional",
"pixel",
"indices",
"from",
"flattened",
"index",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L275-L284 | train | 36,201 |
fermiPy/fermipy | fermipy/skymap.py | Map.ipix_swap_axes | def ipix_swap_axes(self, ipix, colwise=False):
""" Return the transposed pixel index from the pixel xy coordinates
if colwise is True (False) this assumes the original index was
in column wise scheme
"""
xy = self.ipix_to_xypix(ipix, colwise)
return self.xypix_to_ipix(xy, not colwise) | python | def ipix_swap_axes(self, ipix, colwise=False):
""" Return the transposed pixel index from the pixel xy coordinates
if colwise is True (False) this assumes the original index was
in column wise scheme
"""
xy = self.ipix_to_xypix(ipix, colwise)
return self.xypix_to_ipix(xy, not colwise) | [
"def",
"ipix_swap_axes",
"(",
"self",
",",
"ipix",
",",
"colwise",
"=",
"False",
")",
":",
"xy",
"=",
"self",
".",
"ipix_to_xypix",
"(",
"ipix",
",",
"colwise",
")",
"return",
"self",
".",
"xypix_to_ipix",
"(",
"xy",
",",
"not",
"colwise",
")"
] | Return the transposed pixel index from the pixel xy coordinates
if colwise is True (False) this assumes the original index was
in column wise scheme | [
"Return",
"the",
"transposed",
"pixel",
"index",
"from",
"the",
"pixel",
"xy",
"coordinates"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L286-L293 | train | 36,202 |
fermiPy/fermipy | fermipy/skymap.py | Map.get_map_values | def get_map_values(self, lons, lats, ibin=None):
"""Return the map values corresponding to a set of coordinates.
Parameters
----------
lons : array-like
'Longitudes' (RA or GLON)
lats : array-like
'Latitidues' (DEC or GLAT)
ibin : int or array-like
Extract data only for a given energy bin. None -> extract data for all bins
Returns
----------
vals : numpy.ndarray((n))
Values of pixels in the flattened map, np.nan used to flag
coords outside of map
"""
pix_idxs = self.get_pixel_indices(lons, lats, ibin)
idxs = copy.copy(pix_idxs)
m = np.empty_like(idxs[0], dtype=bool)
m.fill(True)
for i, p in enumerate(pix_idxs):
m &= (pix_idxs[i] >= 0) & (pix_idxs[i] < self._npix[i])
idxs[i][~m] = 0
vals = self.counts.T[idxs]
vals[~m] = np.nan
return vals | python | def get_map_values(self, lons, lats, ibin=None):
"""Return the map values corresponding to a set of coordinates.
Parameters
----------
lons : array-like
'Longitudes' (RA or GLON)
lats : array-like
'Latitidues' (DEC or GLAT)
ibin : int or array-like
Extract data only for a given energy bin. None -> extract data for all bins
Returns
----------
vals : numpy.ndarray((n))
Values of pixels in the flattened map, np.nan used to flag
coords outside of map
"""
pix_idxs = self.get_pixel_indices(lons, lats, ibin)
idxs = copy.copy(pix_idxs)
m = np.empty_like(idxs[0], dtype=bool)
m.fill(True)
for i, p in enumerate(pix_idxs):
m &= (pix_idxs[i] >= 0) & (pix_idxs[i] < self._npix[i])
idxs[i][~m] = 0
vals = self.counts.T[idxs]
vals[~m] = np.nan
return vals | [
"def",
"get_map_values",
"(",
"self",
",",
"lons",
",",
"lats",
",",
"ibin",
"=",
"None",
")",
":",
"pix_idxs",
"=",
"self",
".",
"get_pixel_indices",
"(",
"lons",
",",
"lats",
",",
"ibin",
")",
"idxs",
"=",
"copy",
".",
"copy",
"(",
"pix_idxs",
")",... | Return the map values corresponding to a set of coordinates.
Parameters
----------
lons : array-like
'Longitudes' (RA or GLON)
lats : array-like
'Latitidues' (DEC or GLAT)
ibin : int or array-like
Extract data only for a given energy bin. None -> extract data for all bins
Returns
----------
vals : numpy.ndarray((n))
Values of pixels in the flattened map, np.nan used to flag
coords outside of map | [
"Return",
"the",
"map",
"values",
"corresponding",
"to",
"a",
"set",
"of",
"coordinates",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L351-L382 | train | 36,203 |
fermiPy/fermipy | fermipy/skymap.py | HpxMap.create_from_hdu | def create_from_hdu(cls, hdu, ebins):
""" Creates and returns an HpxMap object from a FITS HDU.
hdu : The FITS
ebins : Energy bin edges [optional]
"""
hpx = HPX.create_from_hdu(hdu, ebins)
colnames = hdu.columns.names
cnames = []
if hpx.conv.convname == 'FGST_SRCMAP_SPARSE':
pixs = hdu.data.field('PIX')
chans = hdu.data.field('CHANNEL')
keys = chans * hpx.npix + pixs
vals = hdu.data.field('VALUE')
nebin = len(ebins)
data = np.zeros((nebin, hpx.npix))
data.flat[keys] = vals
else:
for c in colnames:
if c.find(hpx.conv.colstring) == 0:
cnames.append(c)
nebin = len(cnames)
data = np.ndarray((nebin, hpx.npix))
for i, cname in enumerate(cnames):
data[i, 0:] = hdu.data.field(cname)
return cls(data, hpx) | python | def create_from_hdu(cls, hdu, ebins):
""" Creates and returns an HpxMap object from a FITS HDU.
hdu : The FITS
ebins : Energy bin edges [optional]
"""
hpx = HPX.create_from_hdu(hdu, ebins)
colnames = hdu.columns.names
cnames = []
if hpx.conv.convname == 'FGST_SRCMAP_SPARSE':
pixs = hdu.data.field('PIX')
chans = hdu.data.field('CHANNEL')
keys = chans * hpx.npix + pixs
vals = hdu.data.field('VALUE')
nebin = len(ebins)
data = np.zeros((nebin, hpx.npix))
data.flat[keys] = vals
else:
for c in colnames:
if c.find(hpx.conv.colstring) == 0:
cnames.append(c)
nebin = len(cnames)
data = np.ndarray((nebin, hpx.npix))
for i, cname in enumerate(cnames):
data[i, 0:] = hdu.data.field(cname)
return cls(data, hpx) | [
"def",
"create_from_hdu",
"(",
"cls",
",",
"hdu",
",",
"ebins",
")",
":",
"hpx",
"=",
"HPX",
".",
"create_from_hdu",
"(",
"hdu",
",",
"ebins",
")",
"colnames",
"=",
"hdu",
".",
"columns",
".",
"names",
"cnames",
"=",
"[",
"]",
"if",
"hpx",
".",
"co... | Creates and returns an HpxMap object from a FITS HDU.
hdu : The FITS
ebins : Energy bin edges [optional] | [
"Creates",
"and",
"returns",
"an",
"HpxMap",
"object",
"from",
"a",
"FITS",
"HDU",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L430-L456 | train | 36,204 |
fermiPy/fermipy | fermipy/skymap.py | HpxMap.create_from_hdulist | def create_from_hdulist(cls, hdulist, **kwargs):
""" Creates and returns an HpxMap object from a FITS HDUList
extname : The name of the HDU with the map data
ebounds : The name of the HDU with the energy bin data
"""
extname = kwargs.get('hdu', hdulist[1].name)
ebins = fits_utils.find_and_read_ebins(hdulist)
return cls.create_from_hdu(hdulist[extname], ebins) | python | def create_from_hdulist(cls, hdulist, **kwargs):
""" Creates and returns an HpxMap object from a FITS HDUList
extname : The name of the HDU with the map data
ebounds : The name of the HDU with the energy bin data
"""
extname = kwargs.get('hdu', hdulist[1].name)
ebins = fits_utils.find_and_read_ebins(hdulist)
return cls.create_from_hdu(hdulist[extname], ebins) | [
"def",
"create_from_hdulist",
"(",
"cls",
",",
"hdulist",
",",
"*",
"*",
"kwargs",
")",
":",
"extname",
"=",
"kwargs",
".",
"get",
"(",
"'hdu'",
",",
"hdulist",
"[",
"1",
"]",
".",
"name",
")",
"ebins",
"=",
"fits_utils",
".",
"find_and_read_ebins",
"(... | Creates and returns an HpxMap object from a FITS HDUList
extname : The name of the HDU with the map data
ebounds : The name of the HDU with the energy bin data | [
"Creates",
"and",
"returns",
"an",
"HpxMap",
"object",
"from",
"a",
"FITS",
"HDUList"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L459-L467 | train | 36,205 |
fermiPy/fermipy | fermipy/skymap.py | HpxMap.sum_over_energy | def sum_over_energy(self):
""" Reduce a counts cube to a counts map """
# We sum over axis 0 in the array, and drop the energy binning in the
# hpx object
return HpxMap(np.sum(self.counts, axis=0), self.hpx.copy_and_drop_energy()) | python | def sum_over_energy(self):
""" Reduce a counts cube to a counts map """
# We sum over axis 0 in the array, and drop the energy binning in the
# hpx object
return HpxMap(np.sum(self.counts, axis=0), self.hpx.copy_and_drop_energy()) | [
"def",
"sum_over_energy",
"(",
"self",
")",
":",
"# We sum over axis 0 in the array, and drop the energy binning in the",
"# hpx object",
"return",
"HpxMap",
"(",
"np",
".",
"sum",
"(",
"self",
".",
"counts",
",",
"axis",
"=",
"0",
")",
",",
"self",
".",
"hpx",
... | Reduce a counts cube to a counts map | [
"Reduce",
"a",
"counts",
"cube",
"to",
"a",
"counts",
"map"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L579-L583 | train | 36,206 |
fermiPy/fermipy | fermipy/skymap.py | HpxMap.interpolate | def interpolate(self, lon, lat, egy=None, interp_log=True):
"""Interpolate map values.
Parameters
----------
interp_log : bool
Interpolate the z-coordinate in logspace.
"""
if self.data.ndim == 1:
theta = np.pi / 2. - np.radians(lat)
phi = np.radians(lon)
return hp.pixelfunc.get_interp_val(self.counts, theta,
phi, nest=self.hpx.nest)
else:
return self._interpolate_cube(lon, lat, egy, interp_log) | python | def interpolate(self, lon, lat, egy=None, interp_log=True):
"""Interpolate map values.
Parameters
----------
interp_log : bool
Interpolate the z-coordinate in logspace.
"""
if self.data.ndim == 1:
theta = np.pi / 2. - np.radians(lat)
phi = np.radians(lon)
return hp.pixelfunc.get_interp_val(self.counts, theta,
phi, nest=self.hpx.nest)
else:
return self._interpolate_cube(lon, lat, egy, interp_log) | [
"def",
"interpolate",
"(",
"self",
",",
"lon",
",",
"lat",
",",
"egy",
"=",
"None",
",",
"interp_log",
"=",
"True",
")",
":",
"if",
"self",
".",
"data",
".",
"ndim",
"==",
"1",
":",
"theta",
"=",
"np",
".",
"pi",
"/",
"2.",
"-",
"np",
".",
"r... | Interpolate map values.
Parameters
----------
interp_log : bool
Interpolate the z-coordinate in logspace. | [
"Interpolate",
"map",
"values",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L615-L631 | train | 36,207 |
fermiPy/fermipy | fermipy/skymap.py | HpxMap._interpolate_cube | def _interpolate_cube(self, lon, lat, egy=None, interp_log=True):
"""Perform interpolation on a healpix cube. If egy is None
then interpolation will be performed on the existing energy
planes.
"""
shape = np.broadcast(lon, lat, egy).shape
lon = lon * np.ones(shape)
lat = lat * np.ones(shape)
theta = np.pi / 2. - np.radians(lat)
phi = np.radians(lon)
vals = []
for i, _ in enumerate(self.hpx.evals):
v = hp.pixelfunc.get_interp_val(self.counts[i], theta,
phi, nest=self.hpx.nest)
vals += [np.expand_dims(np.array(v, ndmin=1), -1)]
vals = np.concatenate(vals, axis=-1)
if egy is None:
return vals.T
egy = egy * np.ones(shape)
if interp_log:
xvals = utils.val_to_pix(np.log(self.hpx.evals), np.log(egy))
else:
xvals = utils.val_to_pix(self.hpx.evals, egy)
vals = vals.reshape((-1, vals.shape[-1]))
xvals = np.ravel(xvals)
v = map_coordinates(vals, [np.arange(vals.shape[0]), xvals],
order=1)
return v.reshape(shape) | python | def _interpolate_cube(self, lon, lat, egy=None, interp_log=True):
"""Perform interpolation on a healpix cube. If egy is None
then interpolation will be performed on the existing energy
planes.
"""
shape = np.broadcast(lon, lat, egy).shape
lon = lon * np.ones(shape)
lat = lat * np.ones(shape)
theta = np.pi / 2. - np.radians(lat)
phi = np.radians(lon)
vals = []
for i, _ in enumerate(self.hpx.evals):
v = hp.pixelfunc.get_interp_val(self.counts[i], theta,
phi, nest=self.hpx.nest)
vals += [np.expand_dims(np.array(v, ndmin=1), -1)]
vals = np.concatenate(vals, axis=-1)
if egy is None:
return vals.T
egy = egy * np.ones(shape)
if interp_log:
xvals = utils.val_to_pix(np.log(self.hpx.evals), np.log(egy))
else:
xvals = utils.val_to_pix(self.hpx.evals, egy)
vals = vals.reshape((-1, vals.shape[-1]))
xvals = np.ravel(xvals)
v = map_coordinates(vals, [np.arange(vals.shape[0]), xvals],
order=1)
return v.reshape(shape) | [
"def",
"_interpolate_cube",
"(",
"self",
",",
"lon",
",",
"lat",
",",
"egy",
"=",
"None",
",",
"interp_log",
"=",
"True",
")",
":",
"shape",
"=",
"np",
".",
"broadcast",
"(",
"lon",
",",
"lat",
",",
"egy",
")",
".",
"shape",
"lon",
"=",
"lon",
"*... | Perform interpolation on a healpix cube. If egy is None
then interpolation will be performed on the existing energy
planes. | [
"Perform",
"interpolation",
"on",
"a",
"healpix",
"cube",
".",
"If",
"egy",
"is",
"None",
"then",
"interpolation",
"will",
"be",
"performed",
"on",
"the",
"existing",
"energy",
"planes",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L633-L667 | train | 36,208 |
fermiPy/fermipy | fermipy/skymap.py | HpxMap.expanded_counts_map | def expanded_counts_map(self):
""" return the full counts map """
if self.hpx._ipix is None:
return self.counts
output = np.zeros(
(self.counts.shape[0], self.hpx._maxpix), self.counts.dtype)
for i in range(self.counts.shape[0]):
output[i][self.hpx._ipix] = self.counts[i]
return output | python | def expanded_counts_map(self):
""" return the full counts map """
if self.hpx._ipix is None:
return self.counts
output = np.zeros(
(self.counts.shape[0], self.hpx._maxpix), self.counts.dtype)
for i in range(self.counts.shape[0]):
output[i][self.hpx._ipix] = self.counts[i]
return output | [
"def",
"expanded_counts_map",
"(",
"self",
")",
":",
"if",
"self",
".",
"hpx",
".",
"_ipix",
"is",
"None",
":",
"return",
"self",
".",
"counts",
"output",
"=",
"np",
".",
"zeros",
"(",
"(",
"self",
".",
"counts",
".",
"shape",
"[",
"0",
"]",
",",
... | return the full counts map | [
"return",
"the",
"full",
"counts",
"map"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L687-L696 | train | 36,209 |
fermiPy/fermipy | fermipy/skymap.py | HpxMap.explicit_counts_map | def explicit_counts_map(self, pixels=None):
""" return a counts map with explicit index scheme
Parameters
----------
pixels : `np.ndarray` or None
If set, grab only those pixels.
If none, grab only non-zero pixels
"""
# No pixel index, so build one
if self.hpx._ipix is None:
if self.data.ndim == 2:
summed = self.counts.sum(0)
if pixels is None:
nz = summed.nonzero()[0]
else:
nz = pixels
data_out = np.vstack(self.data[i].flat[nz]
for i in range(self.data.shape[0]))
else:
if pixels is None:
nz = self.data.nonzero()[0]
else:
nz = pixels
data_out = self.data[nz]
return (nz, data_out)
else:
if pixels is None:
return (self.hpx._ipix, self.data)
# FIXME, can we catch this
raise RuntimeError(
'HPX.explicit_counts_map called with pixels for a map that already has pixels') | python | def explicit_counts_map(self, pixels=None):
""" return a counts map with explicit index scheme
Parameters
----------
pixels : `np.ndarray` or None
If set, grab only those pixels.
If none, grab only non-zero pixels
"""
# No pixel index, so build one
if self.hpx._ipix is None:
if self.data.ndim == 2:
summed = self.counts.sum(0)
if pixels is None:
nz = summed.nonzero()[0]
else:
nz = pixels
data_out = np.vstack(self.data[i].flat[nz]
for i in range(self.data.shape[0]))
else:
if pixels is None:
nz = self.data.nonzero()[0]
else:
nz = pixels
data_out = self.data[nz]
return (nz, data_out)
else:
if pixels is None:
return (self.hpx._ipix, self.data)
# FIXME, can we catch this
raise RuntimeError(
'HPX.explicit_counts_map called with pixels for a map that already has pixels') | [
"def",
"explicit_counts_map",
"(",
"self",
",",
"pixels",
"=",
"None",
")",
":",
"# No pixel index, so build one",
"if",
"self",
".",
"hpx",
".",
"_ipix",
"is",
"None",
":",
"if",
"self",
".",
"data",
".",
"ndim",
"==",
"2",
":",
"summed",
"=",
"self",
... | return a counts map with explicit index scheme
Parameters
----------
pixels : `np.ndarray` or None
If set, grab only those pixels.
If none, grab only non-zero pixels | [
"return",
"a",
"counts",
"map",
"with",
"explicit",
"index",
"scheme"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L698-L729 | train | 36,210 |
fermiPy/fermipy | fermipy/skymap.py | HpxMap.sparse_counts_map | def sparse_counts_map(self):
""" return a counts map with sparse index scheme
"""
if self.hpx._ipix is None:
flatarray = self.data.flattern()
else:
flatarray = self.expanded_counts_map()
nz = flatarray.nonzero()[0]
data_out = flatarray[nz]
return (nz, data_out) | python | def sparse_counts_map(self):
""" return a counts map with sparse index scheme
"""
if self.hpx._ipix is None:
flatarray = self.data.flattern()
else:
flatarray = self.expanded_counts_map()
nz = flatarray.nonzero()[0]
data_out = flatarray[nz]
return (nz, data_out) | [
"def",
"sparse_counts_map",
"(",
"self",
")",
":",
"if",
"self",
".",
"hpx",
".",
"_ipix",
"is",
"None",
":",
"flatarray",
"=",
"self",
".",
"data",
".",
"flattern",
"(",
")",
"else",
":",
"flatarray",
"=",
"self",
".",
"expanded_counts_map",
"(",
")",... | return a counts map with sparse index scheme | [
"return",
"a",
"counts",
"map",
"with",
"sparse",
"index",
"scheme"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/skymap.py#L731-L740 | train | 36,211 |
fermiPy/fermipy | fermipy/sensitivity.py | SensitivityCalc.diff_flux_threshold | def diff_flux_threshold(self, skydir, fn, ts_thresh, min_counts):
"""Compute the differential flux threshold for a point source at
position ``skydir`` with spectral parameterization ``fn``.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinates at which the sensitivity will be evaluated.
fn : `~fermipy.spectrum.SpectralFunction`
ts_thresh : float
Threshold on the detection test statistic (TS).
min_counts : float
Threshold on the minimum number of counts.
"""
sig, bkg, bkg_fit = self.compute_counts(skydir, fn)
norms = irfs.compute_norm(sig, bkg, ts_thresh,
min_counts, sum_axes=[2, 3],
rebin_axes=[10, 1],
bkg_fit=bkg_fit)
npred = np.squeeze(np.apply_over_axes(np.sum, norms * sig, [2, 3]))
norms = np.squeeze(norms)
flux = norms * fn.flux(self.ebins[:-1], self.ebins[1:])
eflux = norms * fn.eflux(self.ebins[:-1], self.ebins[1:])
dnde = norms * fn.dnde(self.ectr)
e2dnde = self.ectr**2 * dnde
return dict(e_min=self.ebins[:-1], e_max=self.ebins[1:],
e_ref=self.ectr,
npred=npred, flux=flux, eflux=eflux,
dnde=dnde, e2dnde=e2dnde) | python | def diff_flux_threshold(self, skydir, fn, ts_thresh, min_counts):
"""Compute the differential flux threshold for a point source at
position ``skydir`` with spectral parameterization ``fn``.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinates at which the sensitivity will be evaluated.
fn : `~fermipy.spectrum.SpectralFunction`
ts_thresh : float
Threshold on the detection test statistic (TS).
min_counts : float
Threshold on the minimum number of counts.
"""
sig, bkg, bkg_fit = self.compute_counts(skydir, fn)
norms = irfs.compute_norm(sig, bkg, ts_thresh,
min_counts, sum_axes=[2, 3],
rebin_axes=[10, 1],
bkg_fit=bkg_fit)
npred = np.squeeze(np.apply_over_axes(np.sum, norms * sig, [2, 3]))
norms = np.squeeze(norms)
flux = norms * fn.flux(self.ebins[:-1], self.ebins[1:])
eflux = norms * fn.eflux(self.ebins[:-1], self.ebins[1:])
dnde = norms * fn.dnde(self.ectr)
e2dnde = self.ectr**2 * dnde
return dict(e_min=self.ebins[:-1], e_max=self.ebins[1:],
e_ref=self.ectr,
npred=npred, flux=flux, eflux=eflux,
dnde=dnde, e2dnde=e2dnde) | [
"def",
"diff_flux_threshold",
"(",
"self",
",",
"skydir",
",",
"fn",
",",
"ts_thresh",
",",
"min_counts",
")",
":",
"sig",
",",
"bkg",
",",
"bkg_fit",
"=",
"self",
".",
"compute_counts",
"(",
"skydir",
",",
"fn",
")",
"norms",
"=",
"irfs",
".",
"comput... | Compute the differential flux threshold for a point source at
position ``skydir`` with spectral parameterization ``fn``.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinates at which the sensitivity will be evaluated.
fn : `~fermipy.spectrum.SpectralFunction`
ts_thresh : float
Threshold on the detection test statistic (TS).
min_counts : float
Threshold on the minimum number of counts. | [
"Compute",
"the",
"differential",
"flux",
"threshold",
"for",
"a",
"point",
"source",
"at",
"position",
"skydir",
"with",
"spectral",
"parameterization",
"fn",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/sensitivity.py#L194-L230 | train | 36,212 |
fermiPy/fermipy | fermipy/hpx_utils.py | coords_to_vec | def coords_to_vec(lon, lat):
""" Converts longitute and latitude coordinates to a unit 3-vector
return array(3,n) with v_x[i],v_y[i],v_z[i] = directional cosines
"""
phi = np.radians(lon)
theta = (np.pi / 2) - np.radians(lat)
sin_t = np.sin(theta)
cos_t = np.cos(theta)
xVals = sin_t * np.cos(phi)
yVals = sin_t * np.sin(phi)
zVals = cos_t
# Stack them into the output array
out = np.vstack((xVals, yVals, zVals)).swapaxes(0, 1)
return out | python | def coords_to_vec(lon, lat):
""" Converts longitute and latitude coordinates to a unit 3-vector
return array(3,n) with v_x[i],v_y[i],v_z[i] = directional cosines
"""
phi = np.radians(lon)
theta = (np.pi / 2) - np.radians(lat)
sin_t = np.sin(theta)
cos_t = np.cos(theta)
xVals = sin_t * np.cos(phi)
yVals = sin_t * np.sin(phi)
zVals = cos_t
# Stack them into the output array
out = np.vstack((xVals, yVals, zVals)).swapaxes(0, 1)
return out | [
"def",
"coords_to_vec",
"(",
"lon",
",",
"lat",
")",
":",
"phi",
"=",
"np",
".",
"radians",
"(",
"lon",
")",
"theta",
"=",
"(",
"np",
".",
"pi",
"/",
"2",
")",
"-",
"np",
".",
"radians",
"(",
"lat",
")",
"sin_t",
"=",
"np",
".",
"sin",
"(",
... | Converts longitute and latitude coordinates to a unit 3-vector
return array(3,n) with v_x[i],v_y[i],v_z[i] = directional cosines | [
"Converts",
"longitute",
"and",
"latitude",
"coordinates",
"to",
"a",
"unit",
"3",
"-",
"vector"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L57-L73 | train | 36,213 |
fermiPy/fermipy | fermipy/hpx_utils.py | get_pixel_size_from_nside | def get_pixel_size_from_nside(nside):
""" Returns an estimate of the pixel size from the HEALPix nside coordinate
This just uses a lookup table to provide a nice round number for each
HEALPix order.
"""
order = int(np.log2(nside))
if order < 0 or order > 13:
raise ValueError('HEALPix order must be between 0 to 13 %i' % order)
return HPX_ORDER_TO_PIXSIZE[order] | python | def get_pixel_size_from_nside(nside):
""" Returns an estimate of the pixel size from the HEALPix nside coordinate
This just uses a lookup table to provide a nice round number for each
HEALPix order.
"""
order = int(np.log2(nside))
if order < 0 or order > 13:
raise ValueError('HEALPix order must be between 0 to 13 %i' % order)
return HPX_ORDER_TO_PIXSIZE[order] | [
"def",
"get_pixel_size_from_nside",
"(",
"nside",
")",
":",
"order",
"=",
"int",
"(",
"np",
".",
"log2",
"(",
"nside",
")",
")",
"if",
"order",
"<",
"0",
"or",
"order",
">",
"13",
":",
"raise",
"ValueError",
"(",
"'HEALPix order must be between 0 to 13 %i'",... | Returns an estimate of the pixel size from the HEALPix nside coordinate
This just uses a lookup table to provide a nice round number for each
HEALPix order. | [
"Returns",
"an",
"estimate",
"of",
"the",
"pixel",
"size",
"from",
"the",
"HEALPix",
"nside",
"coordinate"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L76-L86 | train | 36,214 |
fermiPy/fermipy | fermipy/hpx_utils.py | hpx_to_axes | def hpx_to_axes(h, npix):
""" Generate a sequence of bin edge vectors corresponding to the
axes of a HPX object."""
x = h.ebins
z = np.arange(npix[-1] + 1)
return x, z | python | def hpx_to_axes(h, npix):
""" Generate a sequence of bin edge vectors corresponding to the
axes of a HPX object."""
x = h.ebins
z = np.arange(npix[-1] + 1)
return x, z | [
"def",
"hpx_to_axes",
"(",
"h",
",",
"npix",
")",
":",
"x",
"=",
"h",
".",
"ebins",
"z",
"=",
"np",
".",
"arange",
"(",
"npix",
"[",
"-",
"1",
"]",
"+",
"1",
")",
"return",
"x",
",",
"z"
] | Generate a sequence of bin edge vectors corresponding to the
axes of a HPX object. | [
"Generate",
"a",
"sequence",
"of",
"bin",
"edge",
"vectors",
"corresponding",
"to",
"the",
"axes",
"of",
"a",
"HPX",
"object",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L89-L95 | train | 36,215 |
fermiPy/fermipy | fermipy/hpx_utils.py | parse_hpxregion | def parse_hpxregion(region):
"""Parse the HPX_REG header keyword into a list of tokens."""
m = re.match(r'([A-Za-z\_]*?)\((.*?)\)', region)
if m is None:
raise Exception('Failed to parse hpx region string.')
if not m.group(1):
return re.split(',', m.group(2))
else:
return [m.group(1)] + re.split(',', m.group(2)) | python | def parse_hpxregion(region):
"""Parse the HPX_REG header keyword into a list of tokens."""
m = re.match(r'([A-Za-z\_]*?)\((.*?)\)', region)
if m is None:
raise Exception('Failed to parse hpx region string.')
if not m.group(1):
return re.split(',', m.group(2))
else:
return [m.group(1)] + re.split(',', m.group(2)) | [
"def",
"parse_hpxregion",
"(",
"region",
")",
":",
"m",
"=",
"re",
".",
"match",
"(",
"r'([A-Za-z\\_]*?)\\((.*?)\\)'",
",",
"region",
")",
"if",
"m",
"is",
"None",
":",
"raise",
"Exception",
"(",
"'Failed to parse hpx region string.'",
")",
"if",
"not",
"m",
... | Parse the HPX_REG header keyword into a list of tokens. | [
"Parse",
"the",
"HPX_REG",
"header",
"keyword",
"into",
"a",
"list",
"of",
"tokens",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L226-L236 | train | 36,216 |
fermiPy/fermipy | fermipy/hpx_utils.py | upix_to_pix | def upix_to_pix(upix):
"""Get the nside from a unique pixel number."""
nside = np.power(2, np.floor(np.log2(upix / 4)) / 2).astype(int)
pix = upix - 4 * np.power(nside, 2)
return pix, nside | python | def upix_to_pix(upix):
"""Get the nside from a unique pixel number."""
nside = np.power(2, np.floor(np.log2(upix / 4)) / 2).astype(int)
pix = upix - 4 * np.power(nside, 2)
return pix, nside | [
"def",
"upix_to_pix",
"(",
"upix",
")",
":",
"nside",
"=",
"np",
".",
"power",
"(",
"2",
",",
"np",
".",
"floor",
"(",
"np",
".",
"log2",
"(",
"upix",
"/",
"4",
")",
")",
"/",
"2",
")",
".",
"astype",
"(",
"int",
")",
"pix",
"=",
"upix",
"-... | Get the nside from a unique pixel number. | [
"Get",
"the",
"nside",
"from",
"a",
"unique",
"pixel",
"number",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L244-L248 | train | 36,217 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.create_hpx | def create_hpx(cls, nside, nest, coordsys='CEL', order=-1, ebins=None,
region=None, conv=HPX_Conv('FGST_CCUBE'), pixels=None):
"""Create a HPX object.
Parameters
----------
nside : int
HEALPix nside paramter
nest : bool
True for HEALPix "NESTED" indexing scheme, False for "RING" scheme.
coordsys : str
"CEL" or "GAL"
order : int
nside = 2**order
ebins : `~numpy.ndarray`
Energy bin edges
region : str
Allows for partial-sky mappings
conv : `HPX_Conv`
Object defining the convention for column names and the like
pixels : `np.array` or `None`
For use with 'EXPLICIT' region string
"""
return cls(nside, nest, coordsys, order, ebins,
region=region, conv=conv, pixels=pixels) | python | def create_hpx(cls, nside, nest, coordsys='CEL', order=-1, ebins=None,
region=None, conv=HPX_Conv('FGST_CCUBE'), pixels=None):
"""Create a HPX object.
Parameters
----------
nside : int
HEALPix nside paramter
nest : bool
True for HEALPix "NESTED" indexing scheme, False for "RING" scheme.
coordsys : str
"CEL" or "GAL"
order : int
nside = 2**order
ebins : `~numpy.ndarray`
Energy bin edges
region : str
Allows for partial-sky mappings
conv : `HPX_Conv`
Object defining the convention for column names and the like
pixels : `np.array` or `None`
For use with 'EXPLICIT' region string
"""
return cls(nside, nest, coordsys, order, ebins,
region=region, conv=conv, pixels=pixels) | [
"def",
"create_hpx",
"(",
"cls",
",",
"nside",
",",
"nest",
",",
"coordsys",
"=",
"'CEL'",
",",
"order",
"=",
"-",
"1",
",",
"ebins",
"=",
"None",
",",
"region",
"=",
"None",
",",
"conv",
"=",
"HPX_Conv",
"(",
"'FGST_CCUBE'",
")",
",",
"pixels",
"=... | Create a HPX object.
Parameters
----------
nside : int
HEALPix nside paramter
nest : bool
True for HEALPix "NESTED" indexing scheme, False for "RING" scheme.
coordsys : str
"CEL" or "GAL"
order : int
nside = 2**order
ebins : `~numpy.ndarray`
Energy bin edges
region : str
Allows for partial-sky mappings
conv : `HPX_Conv`
Object defining the convention for column names and the like
pixels : `np.array` or `None`
For use with 'EXPLICIT' region string | [
"Create",
"a",
"HPX",
"object",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L423-L454 | train | 36,218 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.identify_HPX_convention | def identify_HPX_convention(header):
""" Identify the convention used to write this file """
# Hopefully the file contains the HPX_CONV keyword specifying
# the convention used
try:
return header['HPX_CONV']
except KeyError:
pass
indxschm = header.get('INDXSCHM', None)
# Try based on the EXTNAME keyword
extname = header.get('EXTNAME', None)
if extname == 'HPXEXPOSURES':
return 'FGST_BEXPCUBE'
elif extname == 'SKYMAP2':
if 'COORDTYPE' in header.keys():
return 'GALPROP'
else:
return 'GALPROP2'
# Check for the INDXSCHM keyword
if indxschm == 'SPARSE':
return 'FGST_SRCMAP_SPARSE'
# Check the name of the first column
colname = header['TTYPE1']
if colname == 'PIX':
colname = header['TTYPE2']
if colname == 'KEY':
return 'FGST_SRCMAP_SPARSE'
elif colname == 'ENERGY1':
return 'FGST_TEMPLATE'
elif colname == 'COSBINS':
return 'FGST_LTCUBE'
elif colname == 'Bin0':
return 'GALPROP'
elif colname in ['CHANNEL1', 'Bin 0']:
if extname == 'SKYMAP':
return 'FGST_CCUBE'
else:
return 'FGST_SRCMAP'
else:
raise ValueError("Could not identify HEALPix convention") | python | def identify_HPX_convention(header):
""" Identify the convention used to write this file """
# Hopefully the file contains the HPX_CONV keyword specifying
# the convention used
try:
return header['HPX_CONV']
except KeyError:
pass
indxschm = header.get('INDXSCHM', None)
# Try based on the EXTNAME keyword
extname = header.get('EXTNAME', None)
if extname == 'HPXEXPOSURES':
return 'FGST_BEXPCUBE'
elif extname == 'SKYMAP2':
if 'COORDTYPE' in header.keys():
return 'GALPROP'
else:
return 'GALPROP2'
# Check for the INDXSCHM keyword
if indxschm == 'SPARSE':
return 'FGST_SRCMAP_SPARSE'
# Check the name of the first column
colname = header['TTYPE1']
if colname == 'PIX':
colname = header['TTYPE2']
if colname == 'KEY':
return 'FGST_SRCMAP_SPARSE'
elif colname == 'ENERGY1':
return 'FGST_TEMPLATE'
elif colname == 'COSBINS':
return 'FGST_LTCUBE'
elif colname == 'Bin0':
return 'GALPROP'
elif colname in ['CHANNEL1', 'Bin 0']:
if extname == 'SKYMAP':
return 'FGST_CCUBE'
else:
return 'FGST_SRCMAP'
else:
raise ValueError("Could not identify HEALPix convention") | [
"def",
"identify_HPX_convention",
"(",
"header",
")",
":",
"# Hopefully the file contains the HPX_CONV keyword specifying",
"# the convention used",
"try",
":",
"return",
"header",
"[",
"'HPX_CONV'",
"]",
"except",
"KeyError",
":",
"pass",
"indxschm",
"=",
"header",
".",
... | Identify the convention used to write this file | [
"Identify",
"the",
"convention",
"used",
"to",
"write",
"this",
"file"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L457-L501 | train | 36,219 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.make_header | def make_header(self):
""" Builds and returns FITS header for this HEALPix map """
cards = [fits.Card("TELESCOP", "GLAST"),
fits.Card("INSTRUME", "LAT"),
fits.Card(self._conv.coordsys, self._coordsys),
fits.Card("PIXTYPE", "HEALPIX"),
fits.Card("ORDERING", self.ordering),
fits.Card("ORDER", self._order),
fits.Card("NSIDE", self._nside),
fits.Card("FIRSTPIX", 0),
fits.Card("LASTPIX", self._maxpix - 1),
fits.Card("HPX_CONV", self._conv.convname)]
if self._coordsys == "CEL":
cards.append(fits.Card("EQUINOX", 2000.0,
"Equinox of RA & DEC specifications"))
if self._region is not None:
cards.append(fits.Card("HPX_REG", self._region))
cards.append(fits.Card("INDXSCHM", "PARTIAL"))
elif self._ipix is not None:
cards.append(fits.Card("INDXSCHM", "EXPLICIT"))
else:
if self._conv.convname in ['FGST_SRCMAP_SPARSE']:
cards.append(fits.Card("INDXSCHM", "SPARSE"))
else:
cards.append(fits.Card("INDXSCHM", "IMPLICIT"))
header = fits.Header(cards)
return header | python | def make_header(self):
""" Builds and returns FITS header for this HEALPix map """
cards = [fits.Card("TELESCOP", "GLAST"),
fits.Card("INSTRUME", "LAT"),
fits.Card(self._conv.coordsys, self._coordsys),
fits.Card("PIXTYPE", "HEALPIX"),
fits.Card("ORDERING", self.ordering),
fits.Card("ORDER", self._order),
fits.Card("NSIDE", self._nside),
fits.Card("FIRSTPIX", 0),
fits.Card("LASTPIX", self._maxpix - 1),
fits.Card("HPX_CONV", self._conv.convname)]
if self._coordsys == "CEL":
cards.append(fits.Card("EQUINOX", 2000.0,
"Equinox of RA & DEC specifications"))
if self._region is not None:
cards.append(fits.Card("HPX_REG", self._region))
cards.append(fits.Card("INDXSCHM", "PARTIAL"))
elif self._ipix is not None:
cards.append(fits.Card("INDXSCHM", "EXPLICIT"))
else:
if self._conv.convname in ['FGST_SRCMAP_SPARSE']:
cards.append(fits.Card("INDXSCHM", "SPARSE"))
else:
cards.append(fits.Card("INDXSCHM", "IMPLICIT"))
header = fits.Header(cards)
return header | [
"def",
"make_header",
"(",
"self",
")",
":",
"cards",
"=",
"[",
"fits",
".",
"Card",
"(",
"\"TELESCOP\"",
",",
"\"GLAST\"",
")",
",",
"fits",
".",
"Card",
"(",
"\"INSTRUME\"",
",",
"\"LAT\"",
")",
",",
"fits",
".",
"Card",
"(",
"self",
".",
"_conv",
... | Builds and returns FITS header for this HEALPix map | [
"Builds",
"and",
"returns",
"FITS",
"header",
"for",
"this",
"HEALPix",
"map"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L574-L603 | train | 36,220 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.make_hdu | def make_hdu(self, data, **kwargs):
""" Builds and returns a FITs HDU with input data
data : The data begin stored
Keyword arguments
-------------------
extname : The HDU extension name
colbase : The prefix for column names
"""
shape = data.shape
extname = kwargs.get('extname', self.conv.extname)
if shape[-1] != self._npix:
raise Exception(
"Size of data array does not match number of pixels")
cols = []
if self._ipix is not None:
cols.append(fits.Column(self.conv.idxstring, "J", array=self._ipix))
if self.conv.convname == 'FGST_SRCMAP_SPARSE':
nonzero = data.nonzero()
nfilled = len(nonzero[0])
if len(shape) == 1:
cols.append(fits.Column("PIX", "J", array=nonzero[0].astype(int)))
cols.append(fits.Column("VALUE", "E", array=data.flat[nonzero].astype(float).reshape(nfilled)))
elif len(shape) == 2:
keys = self._npix * nonzero[0] + nonzero[1]
cols.append(fits.Column("PIX", "J", array=nonzero[1].reshape(nfilled)))
cols.append(fits.Column("CHANNEL", "I", array=nonzero[0].reshape(nfilled)))
cols.append(fits.Column("VALUE", "E",
array=data.flat[keys].astype(float).reshape(nfilled)))
else:
raise Exception("HPX.write_fits only handles 1D and 2D maps")
else:
if len(shape) == 1:
cols.append(fits.Column(self.conv.colname(
indx=self.conv.firstcol), "E", array=data.astype(float)))
elif len(shape) == 2:
for i in range(shape[0]):
cols.append(fits.Column(self.conv.colname(
indx=i + self.conv.firstcol), "E", array=data[i].astype(float)))
else:
raise Exception("HPX.write_fits only handles 1D and 2D maps")
header = self.make_header()
hdu = fits.BinTableHDU.from_columns(cols, header=header, name=extname)
return hdu | python | def make_hdu(self, data, **kwargs):
""" Builds and returns a FITs HDU with input data
data : The data begin stored
Keyword arguments
-------------------
extname : The HDU extension name
colbase : The prefix for column names
"""
shape = data.shape
extname = kwargs.get('extname', self.conv.extname)
if shape[-1] != self._npix:
raise Exception(
"Size of data array does not match number of pixels")
cols = []
if self._ipix is not None:
cols.append(fits.Column(self.conv.idxstring, "J", array=self._ipix))
if self.conv.convname == 'FGST_SRCMAP_SPARSE':
nonzero = data.nonzero()
nfilled = len(nonzero[0])
if len(shape) == 1:
cols.append(fits.Column("PIX", "J", array=nonzero[0].astype(int)))
cols.append(fits.Column("VALUE", "E", array=data.flat[nonzero].astype(float).reshape(nfilled)))
elif len(shape) == 2:
keys = self._npix * nonzero[0] + nonzero[1]
cols.append(fits.Column("PIX", "J", array=nonzero[1].reshape(nfilled)))
cols.append(fits.Column("CHANNEL", "I", array=nonzero[0].reshape(nfilled)))
cols.append(fits.Column("VALUE", "E",
array=data.flat[keys].astype(float).reshape(nfilled)))
else:
raise Exception("HPX.write_fits only handles 1D and 2D maps")
else:
if len(shape) == 1:
cols.append(fits.Column(self.conv.colname(
indx=self.conv.firstcol), "E", array=data.astype(float)))
elif len(shape) == 2:
for i in range(shape[0]):
cols.append(fits.Column(self.conv.colname(
indx=i + self.conv.firstcol), "E", array=data[i].astype(float)))
else:
raise Exception("HPX.write_fits only handles 1D and 2D maps")
header = self.make_header()
hdu = fits.BinTableHDU.from_columns(cols, header=header, name=extname)
return hdu | [
"def",
"make_hdu",
"(",
"self",
",",
"data",
",",
"*",
"*",
"kwargs",
")",
":",
"shape",
"=",
"data",
".",
"shape",
"extname",
"=",
"kwargs",
".",
"get",
"(",
"'extname'",
",",
"self",
".",
"conv",
".",
"extname",
")",
"if",
"shape",
"[",
"-",
"1... | Builds and returns a FITs HDU with input data
data : The data begin stored
Keyword arguments
-------------------
extname : The HDU extension name
colbase : The prefix for column names | [
"Builds",
"and",
"returns",
"a",
"FITs",
"HDU",
"with",
"input",
"data"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L606-L656 | train | 36,221 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.write_fits | def write_fits(self, data, outfile, extname="SKYMAP", clobber=True):
""" Write input data to a FITS file
data : The data begin stored
outfile : The name of the output file
extname : The HDU extension name
clobber : True -> overwrite existing files
"""
hdu_prim = fits.PrimaryHDU()
hdu_hpx = self.make_hdu(data, extname=extname)
hl = [hdu_prim, hdu_hpx]
if self.conv.energy_hdu == 'EBOUNDS':
hdu_energy = self.make_energy_bounds_hdu()
elif self.conv.energy_hdu == 'ENERGIES':
hdu_energy = self.make_energies_hdu()
if hdu_energy is not None:
hl.append(hdu_energy)
hdulist = fits.HDUList(hl)
hdulist.writeto(outfile, overwrite=clobber) | python | def write_fits(self, data, outfile, extname="SKYMAP", clobber=True):
""" Write input data to a FITS file
data : The data begin stored
outfile : The name of the output file
extname : The HDU extension name
clobber : True -> overwrite existing files
"""
hdu_prim = fits.PrimaryHDU()
hdu_hpx = self.make_hdu(data, extname=extname)
hl = [hdu_prim, hdu_hpx]
if self.conv.energy_hdu == 'EBOUNDS':
hdu_energy = self.make_energy_bounds_hdu()
elif self.conv.energy_hdu == 'ENERGIES':
hdu_energy = self.make_energies_hdu()
if hdu_energy is not None:
hl.append(hdu_energy)
hdulist = fits.HDUList(hl)
hdulist.writeto(outfile, overwrite=clobber) | [
"def",
"write_fits",
"(",
"self",
",",
"data",
",",
"outfile",
",",
"extname",
"=",
"\"SKYMAP\"",
",",
"clobber",
"=",
"True",
")",
":",
"hdu_prim",
"=",
"fits",
".",
"PrimaryHDU",
"(",
")",
"hdu_hpx",
"=",
"self",
".",
"make_hdu",
"(",
"data",
",",
... | Write input data to a FITS file
data : The data begin stored
outfile : The name of the output file
extname : The HDU extension name
clobber : True -> overwrite existing files | [
"Write",
"input",
"data",
"to",
"a",
"FITS",
"file"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L686-L704 | train | 36,222 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.get_index_list | def get_index_list(nside, nest, region):
""" Returns the list of pixels indices for all the pixels in a region
nside : HEALPix nside parameter
nest : True for 'NESTED', False = 'RING'
region : HEALPix region string
"""
tokens = parse_hpxregion(region)
if tokens[0] == 'DISK':
vec = coords_to_vec(float(tokens[1]), float(tokens[2]))
ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])),
inclusive=False, nest=nest)
elif tokens[0] == 'DISK_INC':
vec = coords_to_vec(float(tokens[1]), float(tokens[2]))
ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])),
inclusive=True, fact=int(tokens[4]),
nest=nest)
elif tokens[0] == 'HPX_PIXEL':
nside_pix = int(tokens[2])
if tokens[1] == 'NESTED':
ipix_ring = hp.nest2ring(nside_pix, int(tokens[3]))
elif tokens[1] == 'RING':
ipix_ring = int(tokens[3])
else:
raise Exception(
"Did not recognize ordering scheme %s" % tokens[1])
ilist = match_hpx_pixel(nside, nest, nside_pix, ipix_ring)
else:
raise Exception(
"HPX.get_index_list did not recognize region type %s" % tokens[0])
return ilist | python | def get_index_list(nside, nest, region):
""" Returns the list of pixels indices for all the pixels in a region
nside : HEALPix nside parameter
nest : True for 'NESTED', False = 'RING'
region : HEALPix region string
"""
tokens = parse_hpxregion(region)
if tokens[0] == 'DISK':
vec = coords_to_vec(float(tokens[1]), float(tokens[2]))
ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])),
inclusive=False, nest=nest)
elif tokens[0] == 'DISK_INC':
vec = coords_to_vec(float(tokens[1]), float(tokens[2]))
ilist = hp.query_disc(nside, vec[0], np.radians(float(tokens[3])),
inclusive=True, fact=int(tokens[4]),
nest=nest)
elif tokens[0] == 'HPX_PIXEL':
nside_pix = int(tokens[2])
if tokens[1] == 'NESTED':
ipix_ring = hp.nest2ring(nside_pix, int(tokens[3]))
elif tokens[1] == 'RING':
ipix_ring = int(tokens[3])
else:
raise Exception(
"Did not recognize ordering scheme %s" % tokens[1])
ilist = match_hpx_pixel(nside, nest, nside_pix, ipix_ring)
else:
raise Exception(
"HPX.get_index_list did not recognize region type %s" % tokens[0])
return ilist | [
"def",
"get_index_list",
"(",
"nside",
",",
"nest",
",",
"region",
")",
":",
"tokens",
"=",
"parse_hpxregion",
"(",
"region",
")",
"if",
"tokens",
"[",
"0",
"]",
"==",
"'DISK'",
":",
"vec",
"=",
"coords_to_vec",
"(",
"float",
"(",
"tokens",
"[",
"1",
... | Returns the list of pixels indices for all the pixels in a region
nside : HEALPix nside parameter
nest : True for 'NESTED', False = 'RING'
region : HEALPix region string | [
"Returns",
"the",
"list",
"of",
"pixels",
"indices",
"for",
"all",
"the",
"pixels",
"in",
"a",
"region"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L707-L738 | train | 36,223 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.get_ref_dir | def get_ref_dir(region, coordsys):
""" Finds and returns the reference direction for a given
HEALPix region string.
region : a string describing a HEALPix region
coordsys : coordinate system, GAL | CEL
"""
if region is None:
if coordsys == "GAL":
c = SkyCoord(0., 0., frame=Galactic, unit="deg")
elif coordsys == "CEL":
c = SkyCoord(0., 0., frame=ICRS, unit="deg")
return c
tokens = parse_hpxregion(region)
if tokens[0] in ['DISK', 'DISK_INC']:
if coordsys == "GAL":
c = SkyCoord(float(tokens[1]), float(
tokens[2]), frame=Galactic, unit="deg")
elif coordsys == "CEL":
c = SkyCoord(float(tokens[1]), float(
tokens[2]), frame=ICRS, unit="deg")
return c
elif tokens[0] == 'HPX_PIXEL':
nside_pix = int(tokens[2])
ipix_pix = int(tokens[3])
if tokens[1] == 'NESTED':
nest_pix = True
elif tokens[1] == 'RING':
nest_pix = False
else:
raise Exception(
"Did not recognize ordering scheme %s" % tokens[1])
theta, phi = hp.pix2ang(nside_pix, ipix_pix, nest_pix)
lat = np.degrees((np.pi / 2) - theta)
lon = np.degrees(phi)
if coordsys == "GAL":
c = SkyCoord(lon, lat, frame=Galactic, unit="deg")
elif coordsys == "CEL":
c = SkyCoord(lon, lat, frame=ICRS, unit="deg")
return c
else:
raise Exception(
"HPX.get_ref_dir did not recognize region type %s" % tokens[0])
return None | python | def get_ref_dir(region, coordsys):
""" Finds and returns the reference direction for a given
HEALPix region string.
region : a string describing a HEALPix region
coordsys : coordinate system, GAL | CEL
"""
if region is None:
if coordsys == "GAL":
c = SkyCoord(0., 0., frame=Galactic, unit="deg")
elif coordsys == "CEL":
c = SkyCoord(0., 0., frame=ICRS, unit="deg")
return c
tokens = parse_hpxregion(region)
if tokens[0] in ['DISK', 'DISK_INC']:
if coordsys == "GAL":
c = SkyCoord(float(tokens[1]), float(
tokens[2]), frame=Galactic, unit="deg")
elif coordsys == "CEL":
c = SkyCoord(float(tokens[1]), float(
tokens[2]), frame=ICRS, unit="deg")
return c
elif tokens[0] == 'HPX_PIXEL':
nside_pix = int(tokens[2])
ipix_pix = int(tokens[3])
if tokens[1] == 'NESTED':
nest_pix = True
elif tokens[1] == 'RING':
nest_pix = False
else:
raise Exception(
"Did not recognize ordering scheme %s" % tokens[1])
theta, phi = hp.pix2ang(nside_pix, ipix_pix, nest_pix)
lat = np.degrees((np.pi / 2) - theta)
lon = np.degrees(phi)
if coordsys == "GAL":
c = SkyCoord(lon, lat, frame=Galactic, unit="deg")
elif coordsys == "CEL":
c = SkyCoord(lon, lat, frame=ICRS, unit="deg")
return c
else:
raise Exception(
"HPX.get_ref_dir did not recognize region type %s" % tokens[0])
return None | [
"def",
"get_ref_dir",
"(",
"region",
",",
"coordsys",
")",
":",
"if",
"region",
"is",
"None",
":",
"if",
"coordsys",
"==",
"\"GAL\"",
":",
"c",
"=",
"SkyCoord",
"(",
"0.",
",",
"0.",
",",
"frame",
"=",
"Galactic",
",",
"unit",
"=",
"\"deg\"",
")",
... | Finds and returns the reference direction for a given
HEALPix region string.
region : a string describing a HEALPix region
coordsys : coordinate system, GAL | CEL | [
"Finds",
"and",
"returns",
"the",
"reference",
"direction",
"for",
"a",
"given",
"HEALPix",
"region",
"string",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L741-L785 | train | 36,224 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.make_wcs | def make_wcs(self, naxis=2, proj='CAR', energies=None, oversample=2):
""" Make a WCS projection appropirate for this HPX pixelization
"""
w = WCS(naxis=naxis)
skydir = self.get_ref_dir(self._region, self.coordsys)
if self.coordsys == 'CEL':
w.wcs.ctype[0] = 'RA---%s' % (proj)
w.wcs.ctype[1] = 'DEC--%s' % (proj)
w.wcs.crval[0] = skydir.ra.deg
w.wcs.crval[1] = skydir.dec.deg
elif self.coordsys == 'GAL':
w.wcs.ctype[0] = 'GLON-%s' % (proj)
w.wcs.ctype[1] = 'GLAT-%s' % (proj)
w.wcs.crval[0] = skydir.galactic.l.deg
w.wcs.crval[1] = skydir.galactic.b.deg
else:
raise Exception('Unrecognized coordinate system.')
pixsize = get_pixel_size_from_nside(self.nside)
roisize = self.get_region_size(self._region)
allsky = False
if roisize > 45:
roisize = 90
allsky = True
npixels = int(2. * roisize / pixsize) * oversample
crpix = npixels / 2.
if allsky:
w.wcs.crpix[0] = 2 * crpix
npix = (2 * npixels, npixels)
else:
w.wcs.crpix[0] = crpix
npix = (npixels, npixels)
w.wcs.crpix[1] = crpix
w.wcs.cdelt[0] = -pixsize / oversample
w.wcs.cdelt[1] = pixsize / oversample
if naxis == 3:
w.wcs.crpix[2] = 1
w.wcs.ctype[2] = 'Energy'
if energies is not None:
w.wcs.crval[2] = 10 ** energies[0]
w.wcs.cdelt[2] = 10 ** energies[1] - 10 ** energies[0]
w = WCS(w.to_header())
wcs_proj = WCSProj(w, npix)
return wcs_proj | python | def make_wcs(self, naxis=2, proj='CAR', energies=None, oversample=2):
""" Make a WCS projection appropirate for this HPX pixelization
"""
w = WCS(naxis=naxis)
skydir = self.get_ref_dir(self._region, self.coordsys)
if self.coordsys == 'CEL':
w.wcs.ctype[0] = 'RA---%s' % (proj)
w.wcs.ctype[1] = 'DEC--%s' % (proj)
w.wcs.crval[0] = skydir.ra.deg
w.wcs.crval[1] = skydir.dec.deg
elif self.coordsys == 'GAL':
w.wcs.ctype[0] = 'GLON-%s' % (proj)
w.wcs.ctype[1] = 'GLAT-%s' % (proj)
w.wcs.crval[0] = skydir.galactic.l.deg
w.wcs.crval[1] = skydir.galactic.b.deg
else:
raise Exception('Unrecognized coordinate system.')
pixsize = get_pixel_size_from_nside(self.nside)
roisize = self.get_region_size(self._region)
allsky = False
if roisize > 45:
roisize = 90
allsky = True
npixels = int(2. * roisize / pixsize) * oversample
crpix = npixels / 2.
if allsky:
w.wcs.crpix[0] = 2 * crpix
npix = (2 * npixels, npixels)
else:
w.wcs.crpix[0] = crpix
npix = (npixels, npixels)
w.wcs.crpix[1] = crpix
w.wcs.cdelt[0] = -pixsize / oversample
w.wcs.cdelt[1] = pixsize / oversample
if naxis == 3:
w.wcs.crpix[2] = 1
w.wcs.ctype[2] = 'Energy'
if energies is not None:
w.wcs.crval[2] = 10 ** energies[0]
w.wcs.cdelt[2] = 10 ** energies[1] - 10 ** energies[0]
w = WCS(w.to_header())
wcs_proj = WCSProj(w, npix)
return wcs_proj | [
"def",
"make_wcs",
"(",
"self",
",",
"naxis",
"=",
"2",
",",
"proj",
"=",
"'CAR'",
",",
"energies",
"=",
"None",
",",
"oversample",
"=",
"2",
")",
":",
"w",
"=",
"WCS",
"(",
"naxis",
"=",
"naxis",
")",
"skydir",
"=",
"self",
".",
"get_ref_dir",
"... | Make a WCS projection appropirate for this HPX pixelization | [
"Make",
"a",
"WCS",
"projection",
"appropirate",
"for",
"this",
"HPX",
"pixelization"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L805-L854 | train | 36,225 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.get_sky_coords | def get_sky_coords(self):
""" Get the sky coordinates of all the pixels in this pixelization """
if self._ipix is None:
theta, phi = hp.pix2ang(
self._nside, list(range(self._npix)), self._nest)
else:
theta, phi = hp.pix2ang(self._nside, self._ipix, self._nest)
lat = np.degrees((np.pi / 2) - theta)
lon = np.degrees(phi)
return np.vstack([lon, lat]).T | python | def get_sky_coords(self):
""" Get the sky coordinates of all the pixels in this pixelization """
if self._ipix is None:
theta, phi = hp.pix2ang(
self._nside, list(range(self._npix)), self._nest)
else:
theta, phi = hp.pix2ang(self._nside, self._ipix, self._nest)
lat = np.degrees((np.pi / 2) - theta)
lon = np.degrees(phi)
return np.vstack([lon, lat]).T | [
"def",
"get_sky_coords",
"(",
"self",
")",
":",
"if",
"self",
".",
"_ipix",
"is",
"None",
":",
"theta",
",",
"phi",
"=",
"hp",
".",
"pix2ang",
"(",
"self",
".",
"_nside",
",",
"list",
"(",
"range",
"(",
"self",
".",
"_npix",
")",
")",
",",
"self"... | Get the sky coordinates of all the pixels in this pixelization | [
"Get",
"the",
"sky",
"coordinates",
"of",
"all",
"the",
"pixels",
"in",
"this",
"pixelization"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L856-L866 | train | 36,226 |
fermiPy/fermipy | fermipy/hpx_utils.py | HPX.skydir_to_pixel | def skydir_to_pixel(self, skydir):
"""Return the pixel index of a SkyCoord object."""
if self.coordsys in ['CEL', 'EQU']:
skydir = skydir.transform_to('icrs')
lon = skydir.ra.deg
lat = skydir.dec.deg
else:
skydir = skydir.transform_to('galactic')
lon = skydir.l.deg
lat = skydir.b.deg
return self.get_pixel_indices(lat, lon) | python | def skydir_to_pixel(self, skydir):
"""Return the pixel index of a SkyCoord object."""
if self.coordsys in ['CEL', 'EQU']:
skydir = skydir.transform_to('icrs')
lon = skydir.ra.deg
lat = skydir.dec.deg
else:
skydir = skydir.transform_to('galactic')
lon = skydir.l.deg
lat = skydir.b.deg
return self.get_pixel_indices(lat, lon) | [
"def",
"skydir_to_pixel",
"(",
"self",
",",
"skydir",
")",
":",
"if",
"self",
".",
"coordsys",
"in",
"[",
"'CEL'",
",",
"'EQU'",
"]",
":",
"skydir",
"=",
"skydir",
".",
"transform_to",
"(",
"'icrs'",
")",
"lon",
"=",
"skydir",
".",
"ra",
".",
"deg",
... | Return the pixel index of a SkyCoord object. | [
"Return",
"the",
"pixel",
"index",
"of",
"a",
"SkyCoord",
"object",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L882-L893 | train | 36,227 |
fermiPy/fermipy | fermipy/hpx_utils.py | HpxToWcsMapping.write_to_fitsfile | def write_to_fitsfile(self, fitsfile, clobber=True):
"""Write this mapping to a FITS file, to avoid having to recompute it
"""
from fermipy.skymap import Map
hpx_header = self._hpx.make_header()
index_map = Map(self.ipixs, self.wcs)
mult_map = Map(self.mult_val, self.wcs)
prim_hdu = index_map.create_primary_hdu()
mult_hdu = index_map.create_image_hdu()
for key in ['COORDSYS', 'ORDERING', 'PIXTYPE',
'ORDERING', 'ORDER', 'NSIDE',
'FIRSTPIX', 'LASTPIX']:
prim_hdu.header[key] = hpx_header[key]
mult_hdu.header[key] = hpx_header[key]
hdulist = fits.HDUList([prim_hdu, mult_hdu])
hdulist.writeto(fitsfile, overwrite=clobber) | python | def write_to_fitsfile(self, fitsfile, clobber=True):
"""Write this mapping to a FITS file, to avoid having to recompute it
"""
from fermipy.skymap import Map
hpx_header = self._hpx.make_header()
index_map = Map(self.ipixs, self.wcs)
mult_map = Map(self.mult_val, self.wcs)
prim_hdu = index_map.create_primary_hdu()
mult_hdu = index_map.create_image_hdu()
for key in ['COORDSYS', 'ORDERING', 'PIXTYPE',
'ORDERING', 'ORDER', 'NSIDE',
'FIRSTPIX', 'LASTPIX']:
prim_hdu.header[key] = hpx_header[key]
mult_hdu.header[key] = hpx_header[key]
hdulist = fits.HDUList([prim_hdu, mult_hdu])
hdulist.writeto(fitsfile, overwrite=clobber) | [
"def",
"write_to_fitsfile",
"(",
"self",
",",
"fitsfile",
",",
"clobber",
"=",
"True",
")",
":",
"from",
"fermipy",
".",
"skymap",
"import",
"Map",
"hpx_header",
"=",
"self",
".",
"_hpx",
".",
"make_header",
"(",
")",
"index_map",
"=",
"Map",
"(",
"self"... | Write this mapping to a FITS file, to avoid having to recompute it | [
"Write",
"this",
"mapping",
"to",
"a",
"FITS",
"file",
"to",
"avoid",
"having",
"to",
"recompute",
"it"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L953-L969 | train | 36,228 |
fermiPy/fermipy | fermipy/hpx_utils.py | HpxToWcsMapping.create_from_fitsfile | def create_from_fitsfile(cls, fitsfile):
""" Read a fits file and use it to make a mapping
"""
from fermipy.skymap import Map
index_map = Map.create_from_fits(fitsfile)
mult_map = Map.create_from_fits(fitsfile, hdu=1)
ff = fits.open(fitsfile)
hpx = HPX.create_from_hdu(ff[0])
mapping_data = dict(ipixs=index_map.counts,
mult_val=mult_map.counts,
npix=mult_map.counts.shape)
return cls(hpx, index_map.wcs, mapping_data) | python | def create_from_fitsfile(cls, fitsfile):
""" Read a fits file and use it to make a mapping
"""
from fermipy.skymap import Map
index_map = Map.create_from_fits(fitsfile)
mult_map = Map.create_from_fits(fitsfile, hdu=1)
ff = fits.open(fitsfile)
hpx = HPX.create_from_hdu(ff[0])
mapping_data = dict(ipixs=index_map.counts,
mult_val=mult_map.counts,
npix=mult_map.counts.shape)
return cls(hpx, index_map.wcs, mapping_data) | [
"def",
"create_from_fitsfile",
"(",
"cls",
",",
"fitsfile",
")",
":",
"from",
"fermipy",
".",
"skymap",
"import",
"Map",
"index_map",
"=",
"Map",
".",
"create_from_fits",
"(",
"fitsfile",
")",
"mult_map",
"=",
"Map",
".",
"create_from_fits",
"(",
"fitsfile",
... | Read a fits file and use it to make a mapping | [
"Read",
"a",
"fits",
"file",
"and",
"use",
"it",
"to",
"make",
"a",
"mapping"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L972-L983 | train | 36,229 |
fermiPy/fermipy | fermipy/hpx_utils.py | HpxToWcsMapping.fill_wcs_map_from_hpx_data | def fill_wcs_map_from_hpx_data(self, hpx_data, wcs_data, normalize=True):
"""Fills the wcs map from the hpx data using the pre-calculated
mappings
hpx_data : the input HEALPix data
wcs_data : the data array being filled
normalize : True -> perserve integral by splitting HEALPix values between bins
"""
# FIXME, there really ought to be a better way to do this
hpx_naxis = len(hpx_data.shape)
wcs_naxis = len(wcs_data.shape)
if hpx_naxis + 1 != wcs_naxis:
raise ValueError("HPX.fill_wcs_map_from_hpx_data: HPX naxis should be 1 less that WCS naxis: %i, %i"%(hpx_naxis, wcs_naxis))
if hpx_naxis == 2:
if hpx_data.shape[1] != wcs_data.shape[2]:
raise ValueError("HPX.fill_wcs_map_from_hpx_data: size of energy axes don't match: %i, %i"%(hpx_naxis[1], wcs_naxis[2]))
lmap_valid = self._lmap[self._valid]
wcs_layer_shape = wcs_data.shape[0]*wcs_data.shape[1]
if hpx_naxis == 2:
for i in range(hpx_data.shape[1]):
wcs_data_layer = np.zeros(wcs_layer_shape)
wcs_data_layer[self._valid] = hpx_data[:,i][lmap_valid]
orig_value = wcs_data_layer.sum()
if normalize:
wcs_data_layer *= self._mult_val
wcs_data[:,:,i].flat = wcs_data_layer
else:
wcs_data_flat = np.zeros(wcs_layer_shape)
wcs_data_flat[self._valid] = hpx_data[lmap_valid]
if normalize:
wcs_data_flat *= self._mult_val
wcs_data.flat = wcs_data_flat | python | def fill_wcs_map_from_hpx_data(self, hpx_data, wcs_data, normalize=True):
"""Fills the wcs map from the hpx data using the pre-calculated
mappings
hpx_data : the input HEALPix data
wcs_data : the data array being filled
normalize : True -> perserve integral by splitting HEALPix values between bins
"""
# FIXME, there really ought to be a better way to do this
hpx_naxis = len(hpx_data.shape)
wcs_naxis = len(wcs_data.shape)
if hpx_naxis + 1 != wcs_naxis:
raise ValueError("HPX.fill_wcs_map_from_hpx_data: HPX naxis should be 1 less that WCS naxis: %i, %i"%(hpx_naxis, wcs_naxis))
if hpx_naxis == 2:
if hpx_data.shape[1] != wcs_data.shape[2]:
raise ValueError("HPX.fill_wcs_map_from_hpx_data: size of energy axes don't match: %i, %i"%(hpx_naxis[1], wcs_naxis[2]))
lmap_valid = self._lmap[self._valid]
wcs_layer_shape = wcs_data.shape[0]*wcs_data.shape[1]
if hpx_naxis == 2:
for i in range(hpx_data.shape[1]):
wcs_data_layer = np.zeros(wcs_layer_shape)
wcs_data_layer[self._valid] = hpx_data[:,i][lmap_valid]
orig_value = wcs_data_layer.sum()
if normalize:
wcs_data_layer *= self._mult_val
wcs_data[:,:,i].flat = wcs_data_layer
else:
wcs_data_flat = np.zeros(wcs_layer_shape)
wcs_data_flat[self._valid] = hpx_data[lmap_valid]
if normalize:
wcs_data_flat *= self._mult_val
wcs_data.flat = wcs_data_flat | [
"def",
"fill_wcs_map_from_hpx_data",
"(",
"self",
",",
"hpx_data",
",",
"wcs_data",
",",
"normalize",
"=",
"True",
")",
":",
"# FIXME, there really ought to be a better way to do this",
"hpx_naxis",
"=",
"len",
"(",
"hpx_data",
".",
"shape",
")",
"wcs_naxis",
"=",
"... | Fills the wcs map from the hpx data using the pre-calculated
mappings
hpx_data : the input HEALPix data
wcs_data : the data array being filled
normalize : True -> perserve integral by splitting HEALPix values between bins | [
"Fills",
"the",
"wcs",
"map",
"from",
"the",
"hpx",
"data",
"using",
"the",
"pre",
"-",
"calculated",
"mappings"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L985-L1020 | train | 36,230 |
fermiPy/fermipy | fermipy/hpx_utils.py | HpxToWcsMapping.make_wcs_data_from_hpx_data | def make_wcs_data_from_hpx_data(self, hpx_data, wcs, normalize=True):
""" Creates and fills a wcs map from the hpx data using the pre-calculated
mappings
hpx_data : the input HEALPix data
wcs : the WCS object
normalize : True -> perserve integral by splitting HEALPix values between bins
"""
wcs_data = np.zeros(wcs.npix)
self.fill_wcs_map_from_hpx_data(hpx_data, wcs_data, normalize)
return wcs_data | python | def make_wcs_data_from_hpx_data(self, hpx_data, wcs, normalize=True):
""" Creates and fills a wcs map from the hpx data using the pre-calculated
mappings
hpx_data : the input HEALPix data
wcs : the WCS object
normalize : True -> perserve integral by splitting HEALPix values between bins
"""
wcs_data = np.zeros(wcs.npix)
self.fill_wcs_map_from_hpx_data(hpx_data, wcs_data, normalize)
return wcs_data | [
"def",
"make_wcs_data_from_hpx_data",
"(",
"self",
",",
"hpx_data",
",",
"wcs",
",",
"normalize",
"=",
"True",
")",
":",
"wcs_data",
"=",
"np",
".",
"zeros",
"(",
"wcs",
".",
"npix",
")",
"self",
".",
"fill_wcs_map_from_hpx_data",
"(",
"hpx_data",
",",
"wc... | Creates and fills a wcs map from the hpx data using the pre-calculated
mappings
hpx_data : the input HEALPix data
wcs : the WCS object
normalize : True -> perserve integral by splitting HEALPix values between bins | [
"Creates",
"and",
"fills",
"a",
"wcs",
"map",
"from",
"the",
"hpx",
"data",
"using",
"the",
"pre",
"-",
"calculated",
"mappings"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/hpx_utils.py#L1022-L1032 | train | 36,231 |
fermiPy/fermipy | fermipy/jobs/target_collect.py | _get_enum_bins | def _get_enum_bins(configfile):
"""Get the number of energy bin in the SED
Parameters
----------
configfile : str
Fermipy configuration file.
Returns
-------
nbins : int
The number of energy bins
"""
config = yaml.safe_load(open(configfile))
emin = config['selection']['emin']
emax = config['selection']['emax']
log_emin = np.log10(emin)
log_emax = np.log10(emax)
ndec = log_emax - log_emin
binsperdec = config['binning']['binsperdec']
nebins = int(np.round(binsperdec * ndec))
return nebins | python | def _get_enum_bins(configfile):
"""Get the number of energy bin in the SED
Parameters
----------
configfile : str
Fermipy configuration file.
Returns
-------
nbins : int
The number of energy bins
"""
config = yaml.safe_load(open(configfile))
emin = config['selection']['emin']
emax = config['selection']['emax']
log_emin = np.log10(emin)
log_emax = np.log10(emax)
ndec = log_emax - log_emin
binsperdec = config['binning']['binsperdec']
nebins = int(np.round(binsperdec * ndec))
return nebins | [
"def",
"_get_enum_bins",
"(",
"configfile",
")",
":",
"config",
"=",
"yaml",
".",
"safe_load",
"(",
"open",
"(",
"configfile",
")",
")",
"emin",
"=",
"config",
"[",
"'selection'",
"]",
"[",
"'emin'",
"]",
"emax",
"=",
"config",
"[",
"'selection'",
"]",
... | Get the number of energy bin in the SED
Parameters
----------
configfile : str
Fermipy configuration file.
Returns
-------
nbins : int
The number of energy bins | [
"Get",
"the",
"number",
"of",
"energy",
"bin",
"in",
"the",
"SED"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_collect.py#L33-L59 | train | 36,232 |
fermiPy/fermipy | fermipy/jobs/target_collect.py | fill_output_table | def fill_output_table(filelist, hdu, collist, nbins):
"""Fill the arrays from the files in filelist
Parameters
----------
filelist : list
List of the files to get data from.
hdu : str
Name of the HDU containing the table with the input data.
colllist : list
List of the column names
nbins : int
Number of bins in the input data arrays
Returns
-------
table : astropy.table.Table
A table with all the requested data extracted.
"""
nfiles = len(filelist)
shape = (nbins, nfiles)
outdict = {}
for c in collist:
outdict[c['name']] = np.ndarray(shape)
sys.stdout.write('Working on %i files: ' % nfiles)
sys.stdout.flush()
for i, f in enumerate(filelist):
sys.stdout.write('.')
sys.stdout.flush()
tab = Table.read(f, hdu)
for c in collist:
cname = c['name']
outdict[cname][:, i] = tab[cname].data
sys.stdout.write('!\n')
outcols = []
for c in collist:
cname = c['name']
if 'unit' in c:
col = Column(data=outdict[cname], name=cname,
dtype=np.float, shape=nfiles, unit=c['unit'])
else:
col = Column(data=outdict[cname], name=cname,
dtype=np.float, shape=nfiles)
outcols.append(col)
tab = Table(data=outcols)
return tab | python | def fill_output_table(filelist, hdu, collist, nbins):
"""Fill the arrays from the files in filelist
Parameters
----------
filelist : list
List of the files to get data from.
hdu : str
Name of the HDU containing the table with the input data.
colllist : list
List of the column names
nbins : int
Number of bins in the input data arrays
Returns
-------
table : astropy.table.Table
A table with all the requested data extracted.
"""
nfiles = len(filelist)
shape = (nbins, nfiles)
outdict = {}
for c in collist:
outdict[c['name']] = np.ndarray(shape)
sys.stdout.write('Working on %i files: ' % nfiles)
sys.stdout.flush()
for i, f in enumerate(filelist):
sys.stdout.write('.')
sys.stdout.flush()
tab = Table.read(f, hdu)
for c in collist:
cname = c['name']
outdict[cname][:, i] = tab[cname].data
sys.stdout.write('!\n')
outcols = []
for c in collist:
cname = c['name']
if 'unit' in c:
col = Column(data=outdict[cname], name=cname,
dtype=np.float, shape=nfiles, unit=c['unit'])
else:
col = Column(data=outdict[cname], name=cname,
dtype=np.float, shape=nfiles)
outcols.append(col)
tab = Table(data=outcols)
return tab | [
"def",
"fill_output_table",
"(",
"filelist",
",",
"hdu",
",",
"collist",
",",
"nbins",
")",
":",
"nfiles",
"=",
"len",
"(",
"filelist",
")",
"shape",
"=",
"(",
"nbins",
",",
"nfiles",
")",
"outdict",
"=",
"{",
"}",
"for",
"c",
"in",
"collist",
":",
... | Fill the arrays from the files in filelist
Parameters
----------
filelist : list
List of the files to get data from.
hdu : str
Name of the HDU containing the table with the input data.
colllist : list
List of the column names
nbins : int
Number of bins in the input data arrays
Returns
-------
table : astropy.table.Table
A table with all the requested data extracted. | [
"Fill",
"the",
"arrays",
"from",
"the",
"files",
"in",
"filelist"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_collect.py#L63-L115 | train | 36,233 |
fermiPy/fermipy | fermipy/jobs/target_collect.py | vstack_tables | def vstack_tables(filelist, hdus):
"""vstack a set of HDUs from a set of files
Parameters
----------
filelist : list
List of the files to get data from.
hdus : list
Names of the HDU containing the table with the input data.
Returns
-------
out_tables : list
A list with the table with all the requested data extracted.
out_names : list
A list with the names of the tables.
"""
nfiles = len(filelist)
out_tables = []
out_names = []
for hdu in hdus:
sys.stdout.write('Working on %i files for %s: ' % (nfiles, hdu))
sys.stdout.flush()
tlist = []
for f in filelist:
try:
tab = Table.read(f, hdu)
tlist.append(tab)
sys.stdout.write('.')
except KeyError:
sys.stdout.write('x')
sys.stdout.flush()
sys.stdout.write('!\n')
if tlist:
out_table = vstack(tlist)
out_tables.append(out_table)
out_names.append(hdu)
return (out_tables, out_names) | python | def vstack_tables(filelist, hdus):
"""vstack a set of HDUs from a set of files
Parameters
----------
filelist : list
List of the files to get data from.
hdus : list
Names of the HDU containing the table with the input data.
Returns
-------
out_tables : list
A list with the table with all the requested data extracted.
out_names : list
A list with the names of the tables.
"""
nfiles = len(filelist)
out_tables = []
out_names = []
for hdu in hdus:
sys.stdout.write('Working on %i files for %s: ' % (nfiles, hdu))
sys.stdout.flush()
tlist = []
for f in filelist:
try:
tab = Table.read(f, hdu)
tlist.append(tab)
sys.stdout.write('.')
except KeyError:
sys.stdout.write('x')
sys.stdout.flush()
sys.stdout.write('!\n')
if tlist:
out_table = vstack(tlist)
out_tables.append(out_table)
out_names.append(hdu)
return (out_tables, out_names) | [
"def",
"vstack_tables",
"(",
"filelist",
",",
"hdus",
")",
":",
"nfiles",
"=",
"len",
"(",
"filelist",
")",
"out_tables",
"=",
"[",
"]",
"out_names",
"=",
"[",
"]",
"for",
"hdu",
"in",
"hdus",
":",
"sys",
".",
"stdout",
".",
"write",
"(",
"'Working o... | vstack a set of HDUs from a set of files
Parameters
----------
filelist : list
List of the files to get data from.
hdus : list
Names of the HDU containing the table with the input data.
Returns
-------
out_tables : list
A list with the table with all the requested data extracted.
out_names : list
A list with the names of the tables. | [
"vstack",
"a",
"set",
"of",
"HDUs",
"from",
"a",
"set",
"of",
"files"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_collect.py#L118-L160 | train | 36,234 |
fermiPy/fermipy | fermipy/jobs/target_collect.py | collect_summary_stats | def collect_summary_stats(data):
"""Collect summary statisitics from an array
This creates a dictionry of output arrays of summary
statistics, with the input array dimension reducted by one.
Parameters
----------
data : `numpy.ndarray`
Array with the collected input data
Returns
-------
output : dict
Dictionary of `np.ndarray` with the summary data.
These include mean, std, median, and 4 quantiles (0.025, 0.16, 0.86, 0.975).
"""
mean = np.mean(data, axis=0)
std = np.std(data, axis=0)
median = np.median(data, axis=0)
q02, q16, q84, q97 = np.percentile(data, [2.5, 16, 84, 97.5], axis=0)
o = dict(mean=mean,
std=std,
median=median,
q02=q02,
q16=q16,
q84=q84,
q97=q97)
return o | python | def collect_summary_stats(data):
"""Collect summary statisitics from an array
This creates a dictionry of output arrays of summary
statistics, with the input array dimension reducted by one.
Parameters
----------
data : `numpy.ndarray`
Array with the collected input data
Returns
-------
output : dict
Dictionary of `np.ndarray` with the summary data.
These include mean, std, median, and 4 quantiles (0.025, 0.16, 0.86, 0.975).
"""
mean = np.mean(data, axis=0)
std = np.std(data, axis=0)
median = np.median(data, axis=0)
q02, q16, q84, q97 = np.percentile(data, [2.5, 16, 84, 97.5], axis=0)
o = dict(mean=mean,
std=std,
median=median,
q02=q02,
q16=q16,
q84=q84,
q97=q97)
return o | [
"def",
"collect_summary_stats",
"(",
"data",
")",
":",
"mean",
"=",
"np",
".",
"mean",
"(",
"data",
",",
"axis",
"=",
"0",
")",
"std",
"=",
"np",
".",
"std",
"(",
"data",
",",
"axis",
"=",
"0",
")",
"median",
"=",
"np",
".",
"median",
"(",
"dat... | Collect summary statisitics from an array
This creates a dictionry of output arrays of summary
statistics, with the input array dimension reducted by one.
Parameters
----------
data : `numpy.ndarray`
Array with the collected input data
Returns
-------
output : dict
Dictionary of `np.ndarray` with the summary data.
These include mean, std, median, and 4 quantiles (0.025, 0.16, 0.86, 0.975). | [
"Collect",
"summary",
"statisitics",
"from",
"an",
"array"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_collect.py#L163-L197 | train | 36,235 |
fermiPy/fermipy | fermipy/jobs/target_collect.py | add_summary_stats_to_table | def add_summary_stats_to_table(table_in, table_out, colnames):
"""Collect summary statisitics from an input table and add them to an output table
Parameters
----------
table_in : `astropy.table.Table`
Table with the input data.
table_out : `astropy.table.Table`
Table with the output data.
colnames : list
List of the column names to get summary statistics for.
"""
for col in colnames:
col_in = table_in[col]
stats = collect_summary_stats(col_in.data)
for k, v in stats.items():
out_name = "%s_%s" % (col, k)
col_out = Column(data=np.vstack(
[v]), name=out_name, dtype=col_in.dtype, shape=v.shape, unit=col_in.unit)
table_out.add_column(col_out) | python | def add_summary_stats_to_table(table_in, table_out, colnames):
"""Collect summary statisitics from an input table and add them to an output table
Parameters
----------
table_in : `astropy.table.Table`
Table with the input data.
table_out : `astropy.table.Table`
Table with the output data.
colnames : list
List of the column names to get summary statistics for.
"""
for col in colnames:
col_in = table_in[col]
stats = collect_summary_stats(col_in.data)
for k, v in stats.items():
out_name = "%s_%s" % (col, k)
col_out = Column(data=np.vstack(
[v]), name=out_name, dtype=col_in.dtype, shape=v.shape, unit=col_in.unit)
table_out.add_column(col_out) | [
"def",
"add_summary_stats_to_table",
"(",
"table_in",
",",
"table_out",
",",
"colnames",
")",
":",
"for",
"col",
"in",
"colnames",
":",
"col_in",
"=",
"table_in",
"[",
"col",
"]",
"stats",
"=",
"collect_summary_stats",
"(",
"col_in",
".",
"data",
")",
"for",... | Collect summary statisitics from an input table and add them to an output table
Parameters
----------
table_in : `astropy.table.Table`
Table with the input data.
table_out : `astropy.table.Table`
Table with the output data.
colnames : list
List of the column names to get summary statistics for. | [
"Collect",
"summary",
"statisitics",
"from",
"an",
"input",
"table",
"and",
"add",
"them",
"to",
"an",
"output",
"table"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_collect.py#L200-L223 | train | 36,236 |
fermiPy/fermipy | fermipy/jobs/target_collect.py | summarize_sed_results | def summarize_sed_results(sed_table):
"""Build a stats summary table for a table that has all the SED results """
del_cols = ['dnde', 'dnde_err', 'dnde_errp', 'dnde_errn', 'dnde_ul',
'e2dnde', 'e2dnde_err', 'e2dnde_errp', 'e2dnde_errn', 'e2dnde_ul',
'norm', 'norm_err', 'norm_errp', 'norm_errn', 'norm_ul',
'ts']
stats_cols = ['dnde', 'dnde_ul',
'e2dnde', 'e2dnde_ul',
'norm', 'norm_ul']
table_out = Table(sed_table[0])
table_out.remove_columns(del_cols)
add_summary_stats_to_table(sed_table, table_out, stats_cols)
return table_out | python | def summarize_sed_results(sed_table):
"""Build a stats summary table for a table that has all the SED results """
del_cols = ['dnde', 'dnde_err', 'dnde_errp', 'dnde_errn', 'dnde_ul',
'e2dnde', 'e2dnde_err', 'e2dnde_errp', 'e2dnde_errn', 'e2dnde_ul',
'norm', 'norm_err', 'norm_errp', 'norm_errn', 'norm_ul',
'ts']
stats_cols = ['dnde', 'dnde_ul',
'e2dnde', 'e2dnde_ul',
'norm', 'norm_ul']
table_out = Table(sed_table[0])
table_out.remove_columns(del_cols)
add_summary_stats_to_table(sed_table, table_out, stats_cols)
return table_out | [
"def",
"summarize_sed_results",
"(",
"sed_table",
")",
":",
"del_cols",
"=",
"[",
"'dnde'",
",",
"'dnde_err'",
",",
"'dnde_errp'",
",",
"'dnde_errn'",
",",
"'dnde_ul'",
",",
"'e2dnde'",
",",
"'e2dnde_err'",
",",
"'e2dnde_errp'",
",",
"'e2dnde_errn'",
",",
"'e2dn... | Build a stats summary table for a table that has all the SED results | [
"Build",
"a",
"stats",
"summary",
"table",
"for",
"a",
"table",
"that",
"has",
"all",
"the",
"SED",
"results"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_collect.py#L226-L239 | train | 36,237 |
fermiPy/fermipy | fermipy/jobs/name_policy.py | NameFactory.update_base_dict | def update_base_dict(self, yamlfile):
"""Update the values in baseline dictionary used to resolve names
"""
self.base_dict.update(**yaml.safe_load(open(yamlfile))) | python | def update_base_dict(self, yamlfile):
"""Update the values in baseline dictionary used to resolve names
"""
self.base_dict.update(**yaml.safe_load(open(yamlfile))) | [
"def",
"update_base_dict",
"(",
"self",
",",
"yamlfile",
")",
":",
"self",
".",
"base_dict",
".",
"update",
"(",
"*",
"*",
"yaml",
".",
"safe_load",
"(",
"open",
"(",
"yamlfile",
")",
")",
")"
] | Update the values in baseline dictionary used to resolve names | [
"Update",
"the",
"values",
"in",
"baseline",
"dictionary",
"used",
"to",
"resolve",
"names"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/name_policy.py#L59-L62 | train | 36,238 |
fermiPy/fermipy | fermipy/jobs/name_policy.py | NameFactory._format_from_dict | def _format_from_dict(self, format_string, **kwargs):
"""Return a formatted file name dictionary components """
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
localpath = format_string.format(**kwargs_copy)
if kwargs.get('fullpath', False):
return self.fullpath(localpath=localpath)
return localpath | python | def _format_from_dict(self, format_string, **kwargs):
"""Return a formatted file name dictionary components """
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
localpath = format_string.format(**kwargs_copy)
if kwargs.get('fullpath', False):
return self.fullpath(localpath=localpath)
return localpath | [
"def",
"_format_from_dict",
"(",
"self",
",",
"format_string",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs_copy",
"=",
"self",
".",
"base_dict",
".",
"copy",
"(",
")",
"kwargs_copy",
".",
"update",
"(",
"*",
"*",
"kwargs",
")",
"localpath",
"=",
"format_s... | Return a formatted file name dictionary components | [
"Return",
"a",
"formatted",
"file",
"name",
"dictionary",
"components"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/name_policy.py#L64-L71 | train | 36,239 |
fermiPy/fermipy | fermipy/jobs/name_policy.py | NameFactory.sim_sedfile | def sim_sedfile(self, **kwargs):
"""Return the name for the simulated SED file for a particular target
"""
if 'seed' not in kwargs:
kwargs['seed'] = 'SEED'
return self._format_from_dict(NameFactory.sim_sedfile_format, **kwargs) | python | def sim_sedfile(self, **kwargs):
"""Return the name for the simulated SED file for a particular target
"""
if 'seed' not in kwargs:
kwargs['seed'] = 'SEED'
return self._format_from_dict(NameFactory.sim_sedfile_format, **kwargs) | [
"def",
"sim_sedfile",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"'seed'",
"not",
"in",
"kwargs",
":",
"kwargs",
"[",
"'seed'",
"]",
"=",
"'SEED'",
"return",
"self",
".",
"_format_from_dict",
"(",
"NameFactory",
".",
"sim_sedfile_format",
",",
... | Return the name for the simulated SED file for a particular target | [
"Return",
"the",
"name",
"for",
"the",
"simulated",
"SED",
"file",
"for",
"a",
"particular",
"target"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/name_policy.py#L118-L123 | train | 36,240 |
fermiPy/fermipy | fermipy/jobs/name_policy.py | NameFactory.stamp | def stamp(self, **kwargs):
"""Return the path for a stamp file for a scatter gather job"""
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
return NameFactory.stamp_format.format(**kwargs_copy) | python | def stamp(self, **kwargs):
"""Return the path for a stamp file for a scatter gather job"""
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
return NameFactory.stamp_format.format(**kwargs_copy) | [
"def",
"stamp",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs_copy",
"=",
"self",
".",
"base_dict",
".",
"copy",
"(",
")",
"kwargs_copy",
".",
"update",
"(",
"*",
"*",
"kwargs",
")",
"return",
"NameFactory",
".",
"stamp_format",
".",
"format"... | Return the path for a stamp file for a scatter gather job | [
"Return",
"the",
"path",
"for",
"a",
"stamp",
"file",
"for",
"a",
"scatter",
"gather",
"job"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/name_policy.py#L125-L129 | train | 36,241 |
fermiPy/fermipy | fermipy/jobs/name_policy.py | NameFactory.resolve_targetfile | def resolve_targetfile(self, args, require_sim_name=False): # x
"""Get the name of the targetfile based on the job arguments"""
ttype = args.get('ttype')
if is_null(ttype):
sys.stderr.write('Target type must be specified')
return (None, None)
sim = args.get('sim')
if is_null(sim):
if require_sim_name:
sys.stderr.write('Simulation scenario must be specified')
return (None, None)
else:
sim = None
name_keys = dict(target_type=ttype,
targetlist='target_list.yaml',
sim_name=sim,
fullpath=True)
if sim is None:
targetfile = self.targetfile(**name_keys)
else:
targetfile = self.sim_targetfile(**name_keys)
targets_override = args.get('targetfile')
if is_not_null(targets_override):
targetfile = targets_override
return (targetfile, sim) | python | def resolve_targetfile(self, args, require_sim_name=False): # x
"""Get the name of the targetfile based on the job arguments"""
ttype = args.get('ttype')
if is_null(ttype):
sys.stderr.write('Target type must be specified')
return (None, None)
sim = args.get('sim')
if is_null(sim):
if require_sim_name:
sys.stderr.write('Simulation scenario must be specified')
return (None, None)
else:
sim = None
name_keys = dict(target_type=ttype,
targetlist='target_list.yaml',
sim_name=sim,
fullpath=True)
if sim is None:
targetfile = self.targetfile(**name_keys)
else:
targetfile = self.sim_targetfile(**name_keys)
targets_override = args.get('targetfile')
if is_not_null(targets_override):
targetfile = targets_override
return (targetfile, sim) | [
"def",
"resolve_targetfile",
"(",
"self",
",",
"args",
",",
"require_sim_name",
"=",
"False",
")",
":",
"# x",
"ttype",
"=",
"args",
".",
"get",
"(",
"'ttype'",
")",
"if",
"is_null",
"(",
"ttype",
")",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'... | Get the name of the targetfile based on the job arguments | [
"Get",
"the",
"name",
"of",
"the",
"targetfile",
"based",
"on",
"the",
"job",
"arguments"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/name_policy.py#L138-L166 | train | 36,242 |
fermiPy/fermipy | fermipy/jobs/name_policy.py | NameFactory.resolve_randconfig | def resolve_randconfig(self, args):
"""Get the name of the specturm file based on the job arguments"""
ttype = args.get('ttype')
if is_null(ttype):
sys.stderr.write('Target type must be specified')
return None
name_keys = dict(target_type=ttype,
fullpath=True)
randconfig = self.randconfig(**name_keys)
rand_override = args.get('rand_config')
if is_not_null(rand_override):
randconfig = rand_override
return randconfig | python | def resolve_randconfig(self, args):
"""Get the name of the specturm file based on the job arguments"""
ttype = args.get('ttype')
if is_null(ttype):
sys.stderr.write('Target type must be specified')
return None
name_keys = dict(target_type=ttype,
fullpath=True)
randconfig = self.randconfig(**name_keys)
rand_override = args.get('rand_config')
if is_not_null(rand_override):
randconfig = rand_override
return randconfig | [
"def",
"resolve_randconfig",
"(",
"self",
",",
"args",
")",
":",
"ttype",
"=",
"args",
".",
"get",
"(",
"'ttype'",
")",
"if",
"is_null",
"(",
"ttype",
")",
":",
"sys",
".",
"stderr",
".",
"write",
"(",
"'Target type must be specified'",
")",
"return",
"N... | Get the name of the specturm file based on the job arguments | [
"Get",
"the",
"name",
"of",
"the",
"specturm",
"file",
"based",
"on",
"the",
"job",
"arguments"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/name_policy.py#L168-L180 | train | 36,243 |
fermiPy/fermipy | fermipy/castro.py | convert_sed_cols | def convert_sed_cols(tab):
"""Cast SED column names to lowercase."""
# Update Column names
for colname in list(tab.columns.keys()):
newname = colname.lower()
newname = newname.replace('dfde', 'dnde')
if tab.columns[colname].name == newname:
continue
tab.columns[colname].name = newname
return tab | python | def convert_sed_cols(tab):
"""Cast SED column names to lowercase."""
# Update Column names
for colname in list(tab.columns.keys()):
newname = colname.lower()
newname = newname.replace('dfde', 'dnde')
if tab.columns[colname].name == newname:
continue
tab.columns[colname].name = newname
return tab | [
"def",
"convert_sed_cols",
"(",
"tab",
")",
":",
"# Update Column names",
"for",
"colname",
"in",
"list",
"(",
"tab",
".",
"columns",
".",
"keys",
"(",
")",
")",
":",
"newname",
"=",
"colname",
".",
"lower",
"(",
")",
"newname",
"=",
"newname",
".",
"r... | Cast SED column names to lowercase. | [
"Cast",
"SED",
"column",
"names",
"to",
"lowercase",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L38-L51 | train | 36,244 |
fermiPy/fermipy | fermipy/castro.py | Interpolator.derivative | def derivative(self, x, der=1):
""" return the derivative a an array of input values
x : the inputs
der : the order of derivative
"""
from scipy.interpolate import splev
return splev(x, self._sp, der=der) | python | def derivative(self, x, der=1):
""" return the derivative a an array of input values
x : the inputs
der : the order of derivative
"""
from scipy.interpolate import splev
return splev(x, self._sp, der=der) | [
"def",
"derivative",
"(",
"self",
",",
"x",
",",
"der",
"=",
"1",
")",
":",
"from",
"scipy",
".",
"interpolate",
"import",
"splev",
"return",
"splev",
"(",
"x",
",",
"self",
".",
"_sp",
",",
"der",
"=",
"der",
")"
] | return the derivative a an array of input values
x : the inputs
der : the order of derivative | [
"return",
"the",
"derivative",
"a",
"an",
"array",
"of",
"input",
"values"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L112-L119 | train | 36,245 |
fermiPy/fermipy | fermipy/castro.py | LnLFn._compute_mle | def _compute_mle(self):
"""Compute the maximum likelihood estimate.
Calls `scipy.optimize.brentq` to find the roots of the derivative.
"""
min_y = np.min(self._interp.y)
if self._interp.y[0] == min_y:
self._mle = self._interp.x[0]
elif self._interp.y[-1] == min_y:
self._mle = self._interp.x[-1]
else:
argmin_y = np.argmin(self._interp.y)
ix0 = max(argmin_y - 4, 0)
ix1 = min(argmin_y + 4, len(self._interp.x) - 1)
while np.sign(self._interp.derivative(self._interp.x[ix0])) == \
np.sign(self._interp.derivative(self._interp.x[ix1])):
ix0 += 1
self._mle = scipy.optimize.brentq(self._interp.derivative,
self._interp.x[ix0],
self._interp.x[ix1],
xtol=1e-10 *
np.median(self._interp.x)) | python | def _compute_mle(self):
"""Compute the maximum likelihood estimate.
Calls `scipy.optimize.brentq` to find the roots of the derivative.
"""
min_y = np.min(self._interp.y)
if self._interp.y[0] == min_y:
self._mle = self._interp.x[0]
elif self._interp.y[-1] == min_y:
self._mle = self._interp.x[-1]
else:
argmin_y = np.argmin(self._interp.y)
ix0 = max(argmin_y - 4, 0)
ix1 = min(argmin_y + 4, len(self._interp.x) - 1)
while np.sign(self._interp.derivative(self._interp.x[ix0])) == \
np.sign(self._interp.derivative(self._interp.x[ix1])):
ix0 += 1
self._mle = scipy.optimize.brentq(self._interp.derivative,
self._interp.x[ix0],
self._interp.x[ix1],
xtol=1e-10 *
np.median(self._interp.x)) | [
"def",
"_compute_mle",
"(",
"self",
")",
":",
"min_y",
"=",
"np",
".",
"min",
"(",
"self",
".",
"_interp",
".",
"y",
")",
"if",
"self",
".",
"_interp",
".",
"y",
"[",
"0",
"]",
"==",
"min_y",
":",
"self",
".",
"_mle",
"=",
"self",
".",
"_interp... | Compute the maximum likelihood estimate.
Calls `scipy.optimize.brentq` to find the roots of the derivative. | [
"Compute",
"the",
"maximum",
"likelihood",
"estimate",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L193-L216 | train | 36,246 |
fermiPy/fermipy | fermipy/castro.py | LnLFn.getDeltaLogLike | def getDeltaLogLike(self, dlnl, upper=True):
"""Find the point at which the log-likelihood changes by a
given value with respect to its value at the MLE."""
mle_val = self.mle()
# A little bit of paranoia to avoid zeros
if mle_val <= 0.:
mle_val = self._interp.xmin
if mle_val <= 0.:
mle_val = self._interp.x[1]
log_mle = np.log10(mle_val)
lnl_max = self.fn_mle()
# This ultra-safe code to find an absolute maximum
# fmax = self.fn_mle()
# m = (fmax-self.interp.y > 0.1+dlnl) & (self.interp.x>self._mle)
# if sum(m) == 0:
# xmax = self.interp.x[-1]*10
# else:
# xmax = self.interp.x[m][0]
# Matt has found that it is faster to use an interpolator
# than an actual root-finder to find the root,
# probably b/c of python overhead.
# That would be something like this:
# rf = lambda x: self._interp(x)+dlnl-lnl_max
# return opt.brentq(rf,self._mle,self._interp.xmax,
# xtol=1e-10*np.abs(self._mle))
if upper:
x = np.logspace(log_mle, np.log10(self._interp.xmax), 100)
retVal = np.interp(dlnl, self.interp(x) - lnl_max, x)
else:
x = np.linspace(self._interp.xmin, self._mle, 100)
retVal = np.interp(dlnl, self.interp(x)[::-1] - lnl_max, x[::-1])
return retVal | python | def getDeltaLogLike(self, dlnl, upper=True):
"""Find the point at which the log-likelihood changes by a
given value with respect to its value at the MLE."""
mle_val = self.mle()
# A little bit of paranoia to avoid zeros
if mle_val <= 0.:
mle_val = self._interp.xmin
if mle_val <= 0.:
mle_val = self._interp.x[1]
log_mle = np.log10(mle_val)
lnl_max = self.fn_mle()
# This ultra-safe code to find an absolute maximum
# fmax = self.fn_mle()
# m = (fmax-self.interp.y > 0.1+dlnl) & (self.interp.x>self._mle)
# if sum(m) == 0:
# xmax = self.interp.x[-1]*10
# else:
# xmax = self.interp.x[m][0]
# Matt has found that it is faster to use an interpolator
# than an actual root-finder to find the root,
# probably b/c of python overhead.
# That would be something like this:
# rf = lambda x: self._interp(x)+dlnl-lnl_max
# return opt.brentq(rf,self._mle,self._interp.xmax,
# xtol=1e-10*np.abs(self._mle))
if upper:
x = np.logspace(log_mle, np.log10(self._interp.xmax), 100)
retVal = np.interp(dlnl, self.interp(x) - lnl_max, x)
else:
x = np.linspace(self._interp.xmin, self._mle, 100)
retVal = np.interp(dlnl, self.interp(x)[::-1] - lnl_max, x[::-1])
return retVal | [
"def",
"getDeltaLogLike",
"(",
"self",
",",
"dlnl",
",",
"upper",
"=",
"True",
")",
":",
"mle_val",
"=",
"self",
".",
"mle",
"(",
")",
"# A little bit of paranoia to avoid zeros",
"if",
"mle_val",
"<=",
"0.",
":",
"mle_val",
"=",
"self",
".",
"_interp",
".... | Find the point at which the log-likelihood changes by a
given value with respect to its value at the MLE. | [
"Find",
"the",
"point",
"at",
"which",
"the",
"log",
"-",
"likelihood",
"changes",
"by",
"a",
"given",
"value",
"with",
"respect",
"to",
"its",
"value",
"at",
"the",
"MLE",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L235-L270 | train | 36,247 |
fermiPy/fermipy | fermipy/castro.py | ReferenceSpec.build_ebound_table | def build_ebound_table(self):
""" Build and return an EBOUNDS table with the encapsulated data.
"""
cols = [
Column(name="E_MIN", dtype=float, data=self._emin, unit='MeV'),
Column(name="E_MAX", dtype=float, data=self._emax, unit='MeV'),
Column(name="E_REF", dtype=float, data=self._eref, unit='MeV'),
Column(name="REF_DNDE", dtype=float, data=self._ref_dnde,
unit='ph / (MeV cm2 s)'),
Column(name="REF_FLUX", dtype=float, data=self._ref_flux,
unit='ph / (cm2 s)'),
Column(name="REF_EFLUX", dtype=float, data=self._ref_eflux,
unit='MeV / (cm2 s)'),
Column(name="REF_NPRED", dtype=float, data=self._ref_npred,
unit='ph')
]
tab = Table(data=cols)
return tab | python | def build_ebound_table(self):
""" Build and return an EBOUNDS table with the encapsulated data.
"""
cols = [
Column(name="E_MIN", dtype=float, data=self._emin, unit='MeV'),
Column(name="E_MAX", dtype=float, data=self._emax, unit='MeV'),
Column(name="E_REF", dtype=float, data=self._eref, unit='MeV'),
Column(name="REF_DNDE", dtype=float, data=self._ref_dnde,
unit='ph / (MeV cm2 s)'),
Column(name="REF_FLUX", dtype=float, data=self._ref_flux,
unit='ph / (cm2 s)'),
Column(name="REF_EFLUX", dtype=float, data=self._ref_eflux,
unit='MeV / (cm2 s)'),
Column(name="REF_NPRED", dtype=float, data=self._ref_npred,
unit='ph')
]
tab = Table(data=cols)
return tab | [
"def",
"build_ebound_table",
"(",
"self",
")",
":",
"cols",
"=",
"[",
"Column",
"(",
"name",
"=",
"\"E_MIN\"",
",",
"dtype",
"=",
"float",
",",
"data",
"=",
"self",
".",
"_emin",
",",
"unit",
"=",
"'MeV'",
")",
",",
"Column",
"(",
"name",
"=",
"\"E... | Build and return an EBOUNDS table with the encapsulated data. | [
"Build",
"and",
"return",
"an",
"EBOUNDS",
"table",
"with",
"the",
"encapsulated",
"data",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L443-L460 | train | 36,248 |
fermiPy/fermipy | fermipy/castro.py | CastroData_Base.derivative | def derivative(self, x, der=1):
"""Return the derivate of the log-like summed over the energy
bins
Parameters
----------
x : `~numpy.ndarray`
Array of N x M values
der : int
Order of the derivate
Returns
-------
der_val : `~numpy.ndarray`
Array of negative log-likelihood values.
"""
if len(x.shape) == 1:
der_val = np.zeros((1))
else:
der_val = np.zeros((x.shape[1:]))
for i, xv in enumerate(x):
der_val += self._loglikes[i].interp.derivative(xv, der=der)
return der_val | python | def derivative(self, x, der=1):
"""Return the derivate of the log-like summed over the energy
bins
Parameters
----------
x : `~numpy.ndarray`
Array of N x M values
der : int
Order of the derivate
Returns
-------
der_val : `~numpy.ndarray`
Array of negative log-likelihood values.
"""
if len(x.shape) == 1:
der_val = np.zeros((1))
else:
der_val = np.zeros((x.shape[1:]))
for i, xv in enumerate(x):
der_val += self._loglikes[i].interp.derivative(xv, der=der)
return der_val | [
"def",
"derivative",
"(",
"self",
",",
"x",
",",
"der",
"=",
"1",
")",
":",
"if",
"len",
"(",
"x",
".",
"shape",
")",
"==",
"1",
":",
"der_val",
"=",
"np",
".",
"zeros",
"(",
"(",
"1",
")",
")",
"else",
":",
"der_val",
"=",
"np",
".",
"zero... | Return the derivate of the log-like summed over the energy
bins
Parameters
----------
x : `~numpy.ndarray`
Array of N x M values
der : int
Order of the derivate
Returns
-------
der_val : `~numpy.ndarray`
Array of negative log-likelihood values. | [
"Return",
"the",
"derivate",
"of",
"the",
"log",
"-",
"like",
"summed",
"over",
"the",
"energy",
"bins"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L721-L745 | train | 36,249 |
fermiPy/fermipy | fermipy/castro.py | CastroData_Base.mles | def mles(self):
""" return the maximum likelihood estimates for each of the energy bins
"""
mle_vals = np.ndarray((self._nx))
for i in range(self._nx):
mle_vals[i] = self._loglikes[i].mle()
return mle_vals | python | def mles(self):
""" return the maximum likelihood estimates for each of the energy bins
"""
mle_vals = np.ndarray((self._nx))
for i in range(self._nx):
mle_vals[i] = self._loglikes[i].mle()
return mle_vals | [
"def",
"mles",
"(",
"self",
")",
":",
"mle_vals",
"=",
"np",
".",
"ndarray",
"(",
"(",
"self",
".",
"_nx",
")",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"_nx",
")",
":",
"mle_vals",
"[",
"i",
"]",
"=",
"self",
".",
"_loglikes",
"[",
... | return the maximum likelihood estimates for each of the energy bins | [
"return",
"the",
"maximum",
"likelihood",
"estimates",
"for",
"each",
"of",
"the",
"energy",
"bins"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L747-L753 | train | 36,250 |
fermiPy/fermipy | fermipy/castro.py | CastroData_Base.ts_vals | def ts_vals(self):
""" returns test statistic values for each energy bin
"""
ts_vals = np.ndarray((self._nx))
for i in range(self._nx):
ts_vals[i] = self._loglikes[i].TS()
return ts_vals | python | def ts_vals(self):
""" returns test statistic values for each energy bin
"""
ts_vals = np.ndarray((self._nx))
for i in range(self._nx):
ts_vals[i] = self._loglikes[i].TS()
return ts_vals | [
"def",
"ts_vals",
"(",
"self",
")",
":",
"ts_vals",
"=",
"np",
".",
"ndarray",
"(",
"(",
"self",
".",
"_nx",
")",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"_nx",
")",
":",
"ts_vals",
"[",
"i",
"]",
"=",
"self",
".",
"_loglikes",
"[",
... | returns test statistic values for each energy bin | [
"returns",
"test",
"statistic",
"values",
"for",
"each",
"energy",
"bin"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L764-L771 | train | 36,251 |
fermiPy/fermipy | fermipy/castro.py | CastroData_Base.chi2_vals | def chi2_vals(self, x):
"""Compute the difference in the log-likelihood between the
MLE in each energy bin and the normalization predicted by a
global best-fit model. This array can be summed to get a
goodness-of-fit chi2 for the model.
Parameters
----------
x : `~numpy.ndarray`
An array of normalizations derived from a global fit to
all energy bins.
Returns
-------
chi2_vals : `~numpy.ndarray`
An array of chi2 values for each energy bin.
"""
chi2_vals = np.ndarray((self._nx))
for i in range(self._nx):
mle = self._loglikes[i].mle()
nll0 = self._loglikes[i].interp(mle)
nll1 = self._loglikes[i].interp(x[i])
chi2_vals[i] = 2.0 * np.abs(nll0 - nll1)
return chi2_vals | python | def chi2_vals(self, x):
"""Compute the difference in the log-likelihood between the
MLE in each energy bin and the normalization predicted by a
global best-fit model. This array can be summed to get a
goodness-of-fit chi2 for the model.
Parameters
----------
x : `~numpy.ndarray`
An array of normalizations derived from a global fit to
all energy bins.
Returns
-------
chi2_vals : `~numpy.ndarray`
An array of chi2 values for each energy bin.
"""
chi2_vals = np.ndarray((self._nx))
for i in range(self._nx):
mle = self._loglikes[i].mle()
nll0 = self._loglikes[i].interp(mle)
nll1 = self._loglikes[i].interp(x[i])
chi2_vals[i] = 2.0 * np.abs(nll0 - nll1)
return chi2_vals | [
"def",
"chi2_vals",
"(",
"self",
",",
"x",
")",
":",
"chi2_vals",
"=",
"np",
".",
"ndarray",
"(",
"(",
"self",
".",
"_nx",
")",
")",
"for",
"i",
"in",
"range",
"(",
"self",
".",
"_nx",
")",
":",
"mle",
"=",
"self",
".",
"_loglikes",
"[",
"i",
... | Compute the difference in the log-likelihood between the
MLE in each energy bin and the normalization predicted by a
global best-fit model. This array can be summed to get a
goodness-of-fit chi2 for the model.
Parameters
----------
x : `~numpy.ndarray`
An array of normalizations derived from a global fit to
all energy bins.
Returns
-------
chi2_vals : `~numpy.ndarray`
An array of chi2 values for each energy bin. | [
"Compute",
"the",
"difference",
"in",
"the",
"log",
"-",
"likelihood",
"between",
"the",
"MLE",
"in",
"each",
"energy",
"bin",
"and",
"the",
"normalization",
"predicted",
"by",
"a",
"global",
"best",
"-",
"fit",
"model",
".",
"This",
"array",
"can",
"be",
... | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L773-L799 | train | 36,252 |
fermiPy/fermipy | fermipy/castro.py | CastroData_Base.fitNormalization | def fitNormalization(self, specVals, xlims):
"""Fit the normalization given a set of spectral values that
define a spectral shape
This version is faster, and solves for the root of the derivatvie
Parameters
----------
specVals : an array of (nebin values that define a spectral shape
xlims : fit limits
returns the best-fit normalization value
"""
from scipy.optimize import brentq
def fDeriv(x): return self.norm_derivative(specVals, x)
try:
result = brentq(fDeriv, xlims[0], xlims[1])
except:
check_underflow = self.__call__(specVals * xlims[0]) < \
self.__call__(specVals * xlims[1])
if check_underflow.any():
return xlims[0]
else:
return xlims[1]
return result | python | def fitNormalization(self, specVals, xlims):
"""Fit the normalization given a set of spectral values that
define a spectral shape
This version is faster, and solves for the root of the derivatvie
Parameters
----------
specVals : an array of (nebin values that define a spectral shape
xlims : fit limits
returns the best-fit normalization value
"""
from scipy.optimize import brentq
def fDeriv(x): return self.norm_derivative(specVals, x)
try:
result = brentq(fDeriv, xlims[0], xlims[1])
except:
check_underflow = self.__call__(specVals * xlims[0]) < \
self.__call__(specVals * xlims[1])
if check_underflow.any():
return xlims[0]
else:
return xlims[1]
return result | [
"def",
"fitNormalization",
"(",
"self",
",",
"specVals",
",",
"xlims",
")",
":",
"from",
"scipy",
".",
"optimize",
"import",
"brentq",
"def",
"fDeriv",
"(",
"x",
")",
":",
"return",
"self",
".",
"norm_derivative",
"(",
"specVals",
",",
"x",
")",
"try",
... | Fit the normalization given a set of spectral values that
define a spectral shape
This version is faster, and solves for the root of the derivatvie
Parameters
----------
specVals : an array of (nebin values that define a spectral shape
xlims : fit limits
returns the best-fit normalization value | [
"Fit",
"the",
"normalization",
"given",
"a",
"set",
"of",
"spectral",
"values",
"that",
"define",
"a",
"spectral",
"shape"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L847-L872 | train | 36,253 |
fermiPy/fermipy | fermipy/castro.py | CastroData_Base.fitNorm_v2 | def fitNorm_v2(self, specVals):
"""Fit the normalization given a set of spectral values
that define a spectral shape.
This version uses `scipy.optimize.fmin`.
Parameters
----------
specVals : an array of (nebin values that define a spectral shape
xlims : fit limits
Returns
-------
norm : float
Best-fit normalization value
"""
from scipy.optimize import fmin
def fToMin(x): return self.__call__(specVals * x)
result = fmin(fToMin, 0., disp=False, xtol=1e-6)
return result | python | def fitNorm_v2(self, specVals):
"""Fit the normalization given a set of spectral values
that define a spectral shape.
This version uses `scipy.optimize.fmin`.
Parameters
----------
specVals : an array of (nebin values that define a spectral shape
xlims : fit limits
Returns
-------
norm : float
Best-fit normalization value
"""
from scipy.optimize import fmin
def fToMin(x): return self.__call__(specVals * x)
result = fmin(fToMin, 0., disp=False, xtol=1e-6)
return result | [
"def",
"fitNorm_v2",
"(",
"self",
",",
"specVals",
")",
":",
"from",
"scipy",
".",
"optimize",
"import",
"fmin",
"def",
"fToMin",
"(",
"x",
")",
":",
"return",
"self",
".",
"__call__",
"(",
"specVals",
"*",
"x",
")",
"result",
"=",
"fmin",
"(",
"fToM... | Fit the normalization given a set of spectral values
that define a spectral shape.
This version uses `scipy.optimize.fmin`.
Parameters
----------
specVals : an array of (nebin values that define a spectral shape
xlims : fit limits
Returns
-------
norm : float
Best-fit normalization value | [
"Fit",
"the",
"normalization",
"given",
"a",
"set",
"of",
"spectral",
"values",
"that",
"define",
"a",
"spectral",
"shape",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L874-L894 | train | 36,254 |
fermiPy/fermipy | fermipy/castro.py | CastroData_Base.fit_spectrum | def fit_spectrum(self, specFunc, initPars, freePars=None):
""" Fit for the free parameters of a spectral function
Parameters
----------
specFunc : `~fermipy.spectrum.SpectralFunction`
The Spectral Function
initPars : `~numpy.ndarray`
The initial values of the parameters
freePars : `~numpy.ndarray`
Boolean array indicating which parameters should be free in
the fit.
Returns
-------
params : `~numpy.ndarray`
Best-fit parameters.
spec_vals : `~numpy.ndarray`
The values of the best-fit spectral model in each energy bin.
ts_spec : float
The TS of the best-fit spectrum
chi2_vals : `~numpy.ndarray`
Array of chi-squared values for each energy bin.
chi2_spec : float
Global chi-squared value for the sum of all energy bins.
pval_spec : float
p-value of chi-squared for the best-fit spectrum.
"""
if not isinstance(specFunc, SEDFunctor):
specFunc = self.create_functor(specFunc, initPars,
scale=specFunc.scale)
if freePars is None:
freePars = np.empty(len(initPars), dtype=bool)
freePars.fill(True)
initPars = np.array(initPars)
freePars = np.array(freePars)
def fToMin(x):
xp = np.array(specFunc.params)
xp[freePars] = x
return self.__call__(specFunc(xp))
result = fmin(fToMin, initPars[freePars], disp=False, xtol=1e-6)
out_pars = specFunc.params
out_pars[freePars] = np.array(result)
spec_vals = specFunc(out_pars)
spec_npred = np.zeros(len(spec_vals))
if isinstance(specFunc, spectrum.SEDFluxFunctor):
spec_npred = spec_vals * self.refSpec.ref_npred / self.refSpec.ref_flux
elif isinstance(specFunc, spectrum.SEDEFluxFunctor):
spec_npred = spec_vals * self.refSpec.ref_npred / self.refSpec.ref_eflux
ts_spec = self.TS_spectrum(spec_vals)
chi2_vals = self.chi2_vals(spec_vals)
chi2_spec = np.sum(chi2_vals)
pval_spec = stats.distributions.chi2.sf(chi2_spec, len(spec_vals))
return dict(params=out_pars, spec_vals=spec_vals,
spec_npred=spec_npred,
ts_spec=ts_spec, chi2_spec=chi2_spec,
chi2_vals=chi2_vals, pval_spec=pval_spec) | python | def fit_spectrum(self, specFunc, initPars, freePars=None):
""" Fit for the free parameters of a spectral function
Parameters
----------
specFunc : `~fermipy.spectrum.SpectralFunction`
The Spectral Function
initPars : `~numpy.ndarray`
The initial values of the parameters
freePars : `~numpy.ndarray`
Boolean array indicating which parameters should be free in
the fit.
Returns
-------
params : `~numpy.ndarray`
Best-fit parameters.
spec_vals : `~numpy.ndarray`
The values of the best-fit spectral model in each energy bin.
ts_spec : float
The TS of the best-fit spectrum
chi2_vals : `~numpy.ndarray`
Array of chi-squared values for each energy bin.
chi2_spec : float
Global chi-squared value for the sum of all energy bins.
pval_spec : float
p-value of chi-squared for the best-fit spectrum.
"""
if not isinstance(specFunc, SEDFunctor):
specFunc = self.create_functor(specFunc, initPars,
scale=specFunc.scale)
if freePars is None:
freePars = np.empty(len(initPars), dtype=bool)
freePars.fill(True)
initPars = np.array(initPars)
freePars = np.array(freePars)
def fToMin(x):
xp = np.array(specFunc.params)
xp[freePars] = x
return self.__call__(specFunc(xp))
result = fmin(fToMin, initPars[freePars], disp=False, xtol=1e-6)
out_pars = specFunc.params
out_pars[freePars] = np.array(result)
spec_vals = specFunc(out_pars)
spec_npred = np.zeros(len(spec_vals))
if isinstance(specFunc, spectrum.SEDFluxFunctor):
spec_npred = spec_vals * self.refSpec.ref_npred / self.refSpec.ref_flux
elif isinstance(specFunc, spectrum.SEDEFluxFunctor):
spec_npred = spec_vals * self.refSpec.ref_npred / self.refSpec.ref_eflux
ts_spec = self.TS_spectrum(spec_vals)
chi2_vals = self.chi2_vals(spec_vals)
chi2_spec = np.sum(chi2_vals)
pval_spec = stats.distributions.chi2.sf(chi2_spec, len(spec_vals))
return dict(params=out_pars, spec_vals=spec_vals,
spec_npred=spec_npred,
ts_spec=ts_spec, chi2_spec=chi2_spec,
chi2_vals=chi2_vals, pval_spec=pval_spec) | [
"def",
"fit_spectrum",
"(",
"self",
",",
"specFunc",
",",
"initPars",
",",
"freePars",
"=",
"None",
")",
":",
"if",
"not",
"isinstance",
"(",
"specFunc",
",",
"SEDFunctor",
")",
":",
"specFunc",
"=",
"self",
".",
"create_functor",
"(",
"specFunc",
",",
"... | Fit for the free parameters of a spectral function
Parameters
----------
specFunc : `~fermipy.spectrum.SpectralFunction`
The Spectral Function
initPars : `~numpy.ndarray`
The initial values of the parameters
freePars : `~numpy.ndarray`
Boolean array indicating which parameters should be free in
the fit.
Returns
-------
params : `~numpy.ndarray`
Best-fit parameters.
spec_vals : `~numpy.ndarray`
The values of the best-fit spectral model in each energy bin.
ts_spec : float
The TS of the best-fit spectrum
chi2_vals : `~numpy.ndarray`
Array of chi-squared values for each energy bin.
chi2_spec : float
Global chi-squared value for the sum of all energy bins.
pval_spec : float
p-value of chi-squared for the best-fit spectrum. | [
"Fit",
"for",
"the",
"free",
"parameters",
"of",
"a",
"spectral",
"function"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L896-L968 | train | 36,255 |
fermiPy/fermipy | fermipy/castro.py | CastroData_Base.build_scandata_table | def build_scandata_table(self):
"""Build an `astropy.table.Table` object from these data.
"""
shape = self._norm_vals.shape
col_norm = Column(name="norm", dtype=float)
col_normv = Column(name="norm_scan", dtype=float,
shape=shape)
col_dll = Column(name="dloglike_scan", dtype=float,
shape=shape)
tab = Table(data=[col_norm, col_normv, col_dll])
tab.add_row({"norm": 1.,
"norm_scan": self._norm_vals,
"dloglike_scan": -1 * self._nll_vals})
return tab | python | def build_scandata_table(self):
"""Build an `astropy.table.Table` object from these data.
"""
shape = self._norm_vals.shape
col_norm = Column(name="norm", dtype=float)
col_normv = Column(name="norm_scan", dtype=float,
shape=shape)
col_dll = Column(name="dloglike_scan", dtype=float,
shape=shape)
tab = Table(data=[col_norm, col_normv, col_dll])
tab.add_row({"norm": 1.,
"norm_scan": self._norm_vals,
"dloglike_scan": -1 * self._nll_vals})
return tab | [
"def",
"build_scandata_table",
"(",
"self",
")",
":",
"shape",
"=",
"self",
".",
"_norm_vals",
".",
"shape",
"col_norm",
"=",
"Column",
"(",
"name",
"=",
"\"norm\"",
",",
"dtype",
"=",
"float",
")",
"col_normv",
"=",
"Column",
"(",
"name",
"=",
"\"norm_s... | Build an `astropy.table.Table` object from these data. | [
"Build",
"an",
"astropy",
".",
"table",
".",
"Table",
"object",
"from",
"these",
"data",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L975-L988 | train | 36,256 |
fermiPy/fermipy | fermipy/castro.py | CastroData.create_from_yamlfile | def create_from_yamlfile(cls, yamlfile):
"""Create a Castro data object from a yaml file contains
the likelihood data."""
data = load_yaml(yamlfile)
nebins = len(data)
emin = np.array([data[i]['emin'] for i in range(nebins)])
emax = np.array([data[i]['emax'] for i in range(nebins)])
ref_flux = np.array([data[i]['flux'][1] for i in range(nebins)])
ref_eflux = np.array([data[i]['eflux'][1] for i in range(nebins)])
conv = np.array([data[i]['eflux2npred'] for i in range(nebins)])
ref_npred = conv*ref_eflux
ones = np.ones(ref_flux.shape)
ref_spec = ReferenceSpec(emin, emax, ones, ref_flux, ref_eflux, ref_npred)
norm_data = np.array([data[i]['eflux'] for i in range(nebins)])
ll_data = np.array([data[i]['logLike'] for i in range(nebins)])
max_ll = ll_data.max(1)
nll_data = (max_ll - ll_data.T).T
return cls(norm_data, nll_data, ref_spec, 'eflux') | python | def create_from_yamlfile(cls, yamlfile):
"""Create a Castro data object from a yaml file contains
the likelihood data."""
data = load_yaml(yamlfile)
nebins = len(data)
emin = np.array([data[i]['emin'] for i in range(nebins)])
emax = np.array([data[i]['emax'] for i in range(nebins)])
ref_flux = np.array([data[i]['flux'][1] for i in range(nebins)])
ref_eflux = np.array([data[i]['eflux'][1] for i in range(nebins)])
conv = np.array([data[i]['eflux2npred'] for i in range(nebins)])
ref_npred = conv*ref_eflux
ones = np.ones(ref_flux.shape)
ref_spec = ReferenceSpec(emin, emax, ones, ref_flux, ref_eflux, ref_npred)
norm_data = np.array([data[i]['eflux'] for i in range(nebins)])
ll_data = np.array([data[i]['logLike'] for i in range(nebins)])
max_ll = ll_data.max(1)
nll_data = (max_ll - ll_data.T).T
return cls(norm_data, nll_data, ref_spec, 'eflux') | [
"def",
"create_from_yamlfile",
"(",
"cls",
",",
"yamlfile",
")",
":",
"data",
"=",
"load_yaml",
"(",
"yamlfile",
")",
"nebins",
"=",
"len",
"(",
"data",
")",
"emin",
"=",
"np",
".",
"array",
"(",
"[",
"data",
"[",
"i",
"]",
"[",
"'emin'",
"]",
"for... | Create a Castro data object from a yaml file contains
the likelihood data. | [
"Create",
"a",
"Castro",
"data",
"object",
"from",
"a",
"yaml",
"file",
"contains",
"the",
"likelihood",
"data",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L1095-L1112 | train | 36,257 |
fermiPy/fermipy | fermipy/castro.py | CastroData.create_from_flux_points | def create_from_flux_points(cls, txtfile):
"""Create a Castro data object from a text file containing a
sequence of differential flux points."""
tab = Table.read(txtfile, format='ascii.ecsv')
dnde_unit = u.ph / (u.MeV * u.cm ** 2 * u.s)
loge = np.log10(np.array(tab['e_ref'].to(u.MeV)))
norm = np.array(tab['norm'].to(dnde_unit))
norm_errp = np.array(tab['norm_errp'].to(dnde_unit))
norm_errn = np.array(tab['norm_errn'].to(dnde_unit))
norm_err = 0.5 * (norm_errp + norm_errn)
dloge = loge[1:] - loge[:-1]
dloge = np.insert(dloge, 0, dloge[0])
emin = 10 ** (loge - dloge * 0.5)
emax = 10 ** (loge + dloge * 0.5)
ectr = 10 ** loge
deltae = emax - emin
flux = norm * deltae
eflux = norm * deltae * ectr
ones = np.ones(flux.shape)
ref_spec = ReferenceSpec(emin, emax, ones, ones, ones, ones)
spec_data = SpecData(ref_spec, norm, flux, eflux, norm_err)
stephi = np.linspace(0, 1, 11)
steplo = -np.linspace(0, 1, 11)[1:][::-1]
loscale = 3 * norm_err
hiscale = 3 * norm_err
loscale[loscale > norm] = norm[loscale > norm]
norm_vals_hi = norm[:, np.newaxis] + \
stephi[np.newaxis, :] * hiscale[:, np.newaxis]
norm_vals_lo = norm[:, np.newaxis] + \
steplo[np.newaxis, :] * loscale[:, np.newaxis]
norm_vals = np.hstack((norm_vals_lo, norm_vals_hi))
nll_vals = 0.5 * \
(norm_vals - norm[:, np.newaxis]) ** 2 / \
norm_err[:, np.newaxis] ** 2
norm_vals *= flux[:, np.newaxis] / norm[:, np.newaxis]
return cls(norm_vals, nll_vals, spec_data, 'flux') | python | def create_from_flux_points(cls, txtfile):
"""Create a Castro data object from a text file containing a
sequence of differential flux points."""
tab = Table.read(txtfile, format='ascii.ecsv')
dnde_unit = u.ph / (u.MeV * u.cm ** 2 * u.s)
loge = np.log10(np.array(tab['e_ref'].to(u.MeV)))
norm = np.array(tab['norm'].to(dnde_unit))
norm_errp = np.array(tab['norm_errp'].to(dnde_unit))
norm_errn = np.array(tab['norm_errn'].to(dnde_unit))
norm_err = 0.5 * (norm_errp + norm_errn)
dloge = loge[1:] - loge[:-1]
dloge = np.insert(dloge, 0, dloge[0])
emin = 10 ** (loge - dloge * 0.5)
emax = 10 ** (loge + dloge * 0.5)
ectr = 10 ** loge
deltae = emax - emin
flux = norm * deltae
eflux = norm * deltae * ectr
ones = np.ones(flux.shape)
ref_spec = ReferenceSpec(emin, emax, ones, ones, ones, ones)
spec_data = SpecData(ref_spec, norm, flux, eflux, norm_err)
stephi = np.linspace(0, 1, 11)
steplo = -np.linspace(0, 1, 11)[1:][::-1]
loscale = 3 * norm_err
hiscale = 3 * norm_err
loscale[loscale > norm] = norm[loscale > norm]
norm_vals_hi = norm[:, np.newaxis] + \
stephi[np.newaxis, :] * hiscale[:, np.newaxis]
norm_vals_lo = norm[:, np.newaxis] + \
steplo[np.newaxis, :] * loscale[:, np.newaxis]
norm_vals = np.hstack((norm_vals_lo, norm_vals_hi))
nll_vals = 0.5 * \
(norm_vals - norm[:, np.newaxis]) ** 2 / \
norm_err[:, np.newaxis] ** 2
norm_vals *= flux[:, np.newaxis] / norm[:, np.newaxis]
return cls(norm_vals, nll_vals, spec_data, 'flux') | [
"def",
"create_from_flux_points",
"(",
"cls",
",",
"txtfile",
")",
":",
"tab",
"=",
"Table",
".",
"read",
"(",
"txtfile",
",",
"format",
"=",
"'ascii.ecsv'",
")",
"dnde_unit",
"=",
"u",
".",
"ph",
"/",
"(",
"u",
".",
"MeV",
"*",
"u",
".",
"cm",
"**... | Create a Castro data object from a text file containing a
sequence of differential flux points. | [
"Create",
"a",
"Castro",
"data",
"object",
"from",
"a",
"text",
"file",
"containing",
"a",
"sequence",
"of",
"differential",
"flux",
"points",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L1115-L1159 | train | 36,258 |
fermiPy/fermipy | fermipy/castro.py | CastroData.create_from_tables | def create_from_tables(cls, norm_type='eflux',
tab_s="SCANDATA",
tab_e="EBOUNDS"):
"""Create a CastroData object from two tables
Parameters
----------
norm_type : str
Type of normalization to use. Valid options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
tab_s : str
table scan data
tab_e : str
table energy binning and normalization data
Returns
-------
castro : `~fermipy.castro.CastroData`
"""
if norm_type in ['flux', 'eflux', 'dnde']:
norm_vals = np.array(tab_s['norm_scan'] *
tab_e['ref_%s' % norm_type][:, np.newaxis])
elif norm_type == "norm":
norm_vals = np.array(tab_s['norm_scan'])
else:
raise Exception('Unrecognized normalization type: %s' % norm_type)
nll_vals = -np.array(tab_s['dloglike_scan'])
rs = ReferenceSpec.create_from_table(tab_e)
return cls(norm_vals, nll_vals, rs, norm_type) | python | def create_from_tables(cls, norm_type='eflux',
tab_s="SCANDATA",
tab_e="EBOUNDS"):
"""Create a CastroData object from two tables
Parameters
----------
norm_type : str
Type of normalization to use. Valid options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
tab_s : str
table scan data
tab_e : str
table energy binning and normalization data
Returns
-------
castro : `~fermipy.castro.CastroData`
"""
if norm_type in ['flux', 'eflux', 'dnde']:
norm_vals = np.array(tab_s['norm_scan'] *
tab_e['ref_%s' % norm_type][:, np.newaxis])
elif norm_type == "norm":
norm_vals = np.array(tab_s['norm_scan'])
else:
raise Exception('Unrecognized normalization type: %s' % norm_type)
nll_vals = -np.array(tab_s['dloglike_scan'])
rs = ReferenceSpec.create_from_table(tab_e)
return cls(norm_vals, nll_vals, rs, norm_type) | [
"def",
"create_from_tables",
"(",
"cls",
",",
"norm_type",
"=",
"'eflux'",
",",
"tab_s",
"=",
"\"SCANDATA\"",
",",
"tab_e",
"=",
"\"EBOUNDS\"",
")",
":",
"if",
"norm_type",
"in",
"[",
"'flux'",
",",
"'eflux'",
",",
"'dnde'",
"]",
":",
"norm_vals",
"=",
"... | Create a CastroData object from two tables
Parameters
----------
norm_type : str
Type of normalization to use. Valid options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
tab_s : str
table scan data
tab_e : str
table energy binning and normalization data
Returns
-------
castro : `~fermipy.castro.CastroData` | [
"Create",
"a",
"CastroData",
"object",
"from",
"two",
"tables"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L1162-L1200 | train | 36,259 |
fermiPy/fermipy | fermipy/castro.py | CastroData.create_from_fits | def create_from_fits(cls, fitsfile, norm_type='eflux',
hdu_scan="SCANDATA",
hdu_energies="EBOUNDS",
irow=None):
"""Create a CastroData object from a tscube FITS file.
Parameters
----------
fitsfile : str
Name of the fits file
norm_type : str
Type of normalization to use. Valid options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
hdu_scan : str
Name of the FITS HDU with the scan data
hdu_energies : str
Name of the FITS HDU with the energy binning and
normalization data
irow : int or None
If none, then this assumes that there is a single row in
the scan data table Otherwise, this specifies which row of
the table to use
Returns
-------
castro : `~fermipy.castro.CastroData`
"""
if irow is not None:
tab_s = Table.read(fitsfile, hdu=hdu_scan)[irow]
else:
tab_s = Table.read(fitsfile, hdu=hdu_scan)
tab_e = Table.read(fitsfile, hdu=hdu_energies)
tab_s = convert_sed_cols(tab_s)
tab_e = convert_sed_cols(tab_e)
return cls.create_from_tables(norm_type, tab_s, tab_e) | python | def create_from_fits(cls, fitsfile, norm_type='eflux',
hdu_scan="SCANDATA",
hdu_energies="EBOUNDS",
irow=None):
"""Create a CastroData object from a tscube FITS file.
Parameters
----------
fitsfile : str
Name of the fits file
norm_type : str
Type of normalization to use. Valid options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
hdu_scan : str
Name of the FITS HDU with the scan data
hdu_energies : str
Name of the FITS HDU with the energy binning and
normalization data
irow : int or None
If none, then this assumes that there is a single row in
the scan data table Otherwise, this specifies which row of
the table to use
Returns
-------
castro : `~fermipy.castro.CastroData`
"""
if irow is not None:
tab_s = Table.read(fitsfile, hdu=hdu_scan)[irow]
else:
tab_s = Table.read(fitsfile, hdu=hdu_scan)
tab_e = Table.read(fitsfile, hdu=hdu_energies)
tab_s = convert_sed_cols(tab_s)
tab_e = convert_sed_cols(tab_e)
return cls.create_from_tables(norm_type, tab_s, tab_e) | [
"def",
"create_from_fits",
"(",
"cls",
",",
"fitsfile",
",",
"norm_type",
"=",
"'eflux'",
",",
"hdu_scan",
"=",
"\"SCANDATA\"",
",",
"hdu_energies",
"=",
"\"EBOUNDS\"",
",",
"irow",
"=",
"None",
")",
":",
"if",
"irow",
"is",
"not",
"None",
":",
"tab_s",
... | Create a CastroData object from a tscube FITS file.
Parameters
----------
fitsfile : str
Name of the fits file
norm_type : str
Type of normalization to use. Valid options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
hdu_scan : str
Name of the FITS HDU with the scan data
hdu_energies : str
Name of the FITS HDU with the energy binning and
normalization data
irow : int or None
If none, then this assumes that there is a single row in
the scan data table Otherwise, this specifies which row of
the table to use
Returns
-------
castro : `~fermipy.castro.CastroData` | [
"Create",
"a",
"CastroData",
"object",
"from",
"a",
"tscube",
"FITS",
"file",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L1203-L1250 | train | 36,260 |
fermiPy/fermipy | fermipy/castro.py | CastroData.create_from_sedfile | def create_from_sedfile(cls, fitsfile, norm_type='eflux'):
"""Create a CastroData object from an SED fits file
Parameters
----------
fitsfile : str
Name of the fits file
norm_type : str
Type of normalization to use, options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
Returns
-------
castro : `~fermipy.castro.CastroData`
"""
tab_s = Table.read(fitsfile, hdu=1)
tab_s = convert_sed_cols(tab_s)
if norm_type in ['flux', 'eflux', 'dnde']:
ref_colname = 'ref_%s' % norm_type
norm_vals = np.array(tab_s['norm_scan'] *
tab_s[ref_colname][:, np.newaxis])
elif norm_type == "norm":
norm_vals = np.array(tab_s['norm_scan'])
else:
raise ValueError('Unrecognized normalization type: %s' % norm_type)
nll_vals = -np.array(tab_s['dloglike_scan'])
ref_spec = ReferenceSpec.create_from_table(tab_s)
spec_data = SpecData(ref_spec, tab_s['norm'], tab_s['norm_err'])
return cls(norm_vals, nll_vals, spec_data, norm_type) | python | def create_from_sedfile(cls, fitsfile, norm_type='eflux'):
"""Create a CastroData object from an SED fits file
Parameters
----------
fitsfile : str
Name of the fits file
norm_type : str
Type of normalization to use, options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
Returns
-------
castro : `~fermipy.castro.CastroData`
"""
tab_s = Table.read(fitsfile, hdu=1)
tab_s = convert_sed_cols(tab_s)
if norm_type in ['flux', 'eflux', 'dnde']:
ref_colname = 'ref_%s' % norm_type
norm_vals = np.array(tab_s['norm_scan'] *
tab_s[ref_colname][:, np.newaxis])
elif norm_type == "norm":
norm_vals = np.array(tab_s['norm_scan'])
else:
raise ValueError('Unrecognized normalization type: %s' % norm_type)
nll_vals = -np.array(tab_s['dloglike_scan'])
ref_spec = ReferenceSpec.create_from_table(tab_s)
spec_data = SpecData(ref_spec, tab_s['norm'], tab_s['norm_err'])
return cls(norm_vals, nll_vals, spec_data, norm_type) | [
"def",
"create_from_sedfile",
"(",
"cls",
",",
"fitsfile",
",",
"norm_type",
"=",
"'eflux'",
")",
":",
"tab_s",
"=",
"Table",
".",
"read",
"(",
"fitsfile",
",",
"hdu",
"=",
"1",
")",
"tab_s",
"=",
"convert_sed_cols",
"(",
"tab_s",
")",
"if",
"norm_type",... | Create a CastroData object from an SED fits file
Parameters
----------
fitsfile : str
Name of the fits file
norm_type : str
Type of normalization to use, options are:
* norm : Normalization w.r.t. to test source
* flux : Flux of the test source ( ph cm^-2 s^-1 )
* eflux: Energy Flux of the test source ( MeV cm^-2 s^-1 )
* npred: Number of predicted photons (Not implemented)
* dnde : Differential flux of the test source ( ph cm^-2 s^-1
MeV^-1 )
Returns
-------
castro : `~fermipy.castro.CastroData` | [
"Create",
"a",
"CastroData",
"object",
"from",
"an",
"SED",
"fits",
"file"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L1253-L1290 | train | 36,261 |
fermiPy/fermipy | fermipy/castro.py | CastroData.spectrum_loglike | def spectrum_loglike(self, specType, params, scale=1E3):
""" return the log-likelihood for a particular spectrum
Parameters
----------
specTypes : str
The type of spectrum to try
params : array-like
The spectral parameters
scale : float
The energy scale or 'pivot' energy
"""
sfn = self.create_functor(specType, scale)[0]
return self.__call__(sfn(params)) | python | def spectrum_loglike(self, specType, params, scale=1E3):
""" return the log-likelihood for a particular spectrum
Parameters
----------
specTypes : str
The type of spectrum to try
params : array-like
The spectral parameters
scale : float
The energy scale or 'pivot' energy
"""
sfn = self.create_functor(specType, scale)[0]
return self.__call__(sfn(params)) | [
"def",
"spectrum_loglike",
"(",
"self",
",",
"specType",
",",
"params",
",",
"scale",
"=",
"1E3",
")",
":",
"sfn",
"=",
"self",
".",
"create_functor",
"(",
"specType",
",",
"scale",
")",
"[",
"0",
"]",
"return",
"self",
".",
"__call__",
"(",
"sfn",
"... | return the log-likelihood for a particular spectrum
Parameters
----------
specTypes : str
The type of spectrum to try
params : array-like
The spectral parameters
scale : float
The energy scale or 'pivot' energy | [
"return",
"the",
"log",
"-",
"likelihood",
"for",
"a",
"particular",
"spectrum"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L1320-L1335 | train | 36,262 |
fermiPy/fermipy | fermipy/castro.py | CastroData.create_functor | def create_functor(self, specType, initPars=None, scale=1E3):
"""Create a functor object that computes normalizations in a
sequence of energy bins for a given spectral model.
Parameters
----------
specType : str
The type of spectrum to use. This can be a string
corresponding to the spectral model class name or a
`~fermipy.spectrum.SpectralFunction` object.
initPars : `~numpy.ndarray`
Arrays of parameter values with which the spectral
function will be initialized.
scale : float
The 'pivot energy' or energy scale to use for the spectrum
Returns
-------
fn : `~fermipy.spectrum.SEDFunctor`
A functor object.
"""
emin = self._refSpec.emin
emax = self._refSpec.emax
fn = SpectralFunction.create_functor(specType,
self.norm_type,
emin,
emax,
scale=scale)
if initPars is None:
if specType == 'PowerLaw':
initPars = np.array([5e-13, -2.0])
elif specType == 'LogParabola':
initPars = np.array([5e-13, -2.0, 0.0])
elif specType == 'PLExpCutoff':
initPars = np.array([5e-13, -1.0, 1E4])
fn.params = initPars
return fn | python | def create_functor(self, specType, initPars=None, scale=1E3):
"""Create a functor object that computes normalizations in a
sequence of energy bins for a given spectral model.
Parameters
----------
specType : str
The type of spectrum to use. This can be a string
corresponding to the spectral model class name or a
`~fermipy.spectrum.SpectralFunction` object.
initPars : `~numpy.ndarray`
Arrays of parameter values with which the spectral
function will be initialized.
scale : float
The 'pivot energy' or energy scale to use for the spectrum
Returns
-------
fn : `~fermipy.spectrum.SEDFunctor`
A functor object.
"""
emin = self._refSpec.emin
emax = self._refSpec.emax
fn = SpectralFunction.create_functor(specType,
self.norm_type,
emin,
emax,
scale=scale)
if initPars is None:
if specType == 'PowerLaw':
initPars = np.array([5e-13, -2.0])
elif specType == 'LogParabola':
initPars = np.array([5e-13, -2.0, 0.0])
elif specType == 'PLExpCutoff':
initPars = np.array([5e-13, -1.0, 1E4])
fn.params = initPars
return fn | [
"def",
"create_functor",
"(",
"self",
",",
"specType",
",",
"initPars",
"=",
"None",
",",
"scale",
"=",
"1E3",
")",
":",
"emin",
"=",
"self",
".",
"_refSpec",
".",
"emin",
"emax",
"=",
"self",
".",
"_refSpec",
".",
"emax",
"fn",
"=",
"SpectralFunction"... | Create a functor object that computes normalizations in a
sequence of energy bins for a given spectral model.
Parameters
----------
specType : str
The type of spectrum to use. This can be a string
corresponding to the spectral model class name or a
`~fermipy.spectrum.SpectralFunction` object.
initPars : `~numpy.ndarray`
Arrays of parameter values with which the spectral
function will be initialized.
scale : float
The 'pivot energy' or energy scale to use for the spectrum
Returns
-------
fn : `~fermipy.spectrum.SEDFunctor`
A functor object. | [
"Create",
"a",
"functor",
"object",
"that",
"computes",
"normalizations",
"in",
"a",
"sequence",
"of",
"energy",
"bins",
"for",
"a",
"given",
"spectral",
"model",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L1377-L1419 | train | 36,263 |
fermiPy/fermipy | fermipy/castro.py | TSCube.find_and_refine_peaks | def find_and_refine_peaks(self, threshold, min_separation=1.0,
use_cumul=False):
"""Run a simple peak-finding algorithm, and fit the peaks to
paraboloids to extract their positions and error ellipses.
Parameters
----------
threshold : float
Peak threshold in TS.
min_separation : float
Radius of region size in degrees. Sets the minimum allowable
separation between peaks.
use_cumul : bool
If true, used the cumulative TS map (i.e., the TS summed
over the energy bins) instead of the TS Map from the fit
to and index=2 powerlaw.
Returns
-------
peaks : list
List of dictionaries containing the location and amplitude of
each peak. Output of `~fermipy.sourcefind.find_peaks`
"""
if use_cumul:
theMap = self._ts_cumul
else:
theMap = self._tsmap
peaks = find_peaks(theMap, threshold, min_separation)
for peak in peaks:
o, skydir = fit_error_ellipse(theMap, (peak['ix'], peak['iy']),
dpix=2)
peak['fit_loc'] = o
peak['fit_skydir'] = skydir
if o['fit_success']:
skydir = peak['fit_skydir']
else:
skydir = peak['skydir']
return peaks | python | def find_and_refine_peaks(self, threshold, min_separation=1.0,
use_cumul=False):
"""Run a simple peak-finding algorithm, and fit the peaks to
paraboloids to extract their positions and error ellipses.
Parameters
----------
threshold : float
Peak threshold in TS.
min_separation : float
Radius of region size in degrees. Sets the minimum allowable
separation between peaks.
use_cumul : bool
If true, used the cumulative TS map (i.e., the TS summed
over the energy bins) instead of the TS Map from the fit
to and index=2 powerlaw.
Returns
-------
peaks : list
List of dictionaries containing the location and amplitude of
each peak. Output of `~fermipy.sourcefind.find_peaks`
"""
if use_cumul:
theMap = self._ts_cumul
else:
theMap = self._tsmap
peaks = find_peaks(theMap, threshold, min_separation)
for peak in peaks:
o, skydir = fit_error_ellipse(theMap, (peak['ix'], peak['iy']),
dpix=2)
peak['fit_loc'] = o
peak['fit_skydir'] = skydir
if o['fit_success']:
skydir = peak['fit_skydir']
else:
skydir = peak['skydir']
return peaks | [
"def",
"find_and_refine_peaks",
"(",
"self",
",",
"threshold",
",",
"min_separation",
"=",
"1.0",
",",
"use_cumul",
"=",
"False",
")",
":",
"if",
"use_cumul",
":",
"theMap",
"=",
"self",
".",
"_ts_cumul",
"else",
":",
"theMap",
"=",
"self",
".",
"_tsmap",
... | Run a simple peak-finding algorithm, and fit the peaks to
paraboloids to extract their positions and error ellipses.
Parameters
----------
threshold : float
Peak threshold in TS.
min_separation : float
Radius of region size in degrees. Sets the minimum allowable
separation between peaks.
use_cumul : bool
If true, used the cumulative TS map (i.e., the TS summed
over the energy bins) instead of the TS Map from the fit
to and index=2 powerlaw.
Returns
-------
peaks : list
List of dictionaries containing the location and amplitude of
each peak. Output of `~fermipy.sourcefind.find_peaks` | [
"Run",
"a",
"simple",
"peak",
"-",
"finding",
"algorithm",
"and",
"fit",
"the",
"peaks",
"to",
"paraboloids",
"to",
"extract",
"their",
"positions",
"and",
"error",
"ellipses",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/castro.py#L1624-L1666 | train | 36,264 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | make_lat_lons | def make_lat_lons(cvects):
""" Convert from directional cosines to latitidue and longitude
Parameters
----------
cvects : directional cosine (i.e., x,y,z component) values
returns (np.ndarray(2,nsrc)) with the directional cosine (i.e., x,y,z component) values
"""
lats = np.degrees(np.arcsin(cvects[2]))
lons = np.degrees(np.arctan2(cvects[0], cvects[1]))
return np.hstack([lats, lons]) | python | def make_lat_lons(cvects):
""" Convert from directional cosines to latitidue and longitude
Parameters
----------
cvects : directional cosine (i.e., x,y,z component) values
returns (np.ndarray(2,nsrc)) with the directional cosine (i.e., x,y,z component) values
"""
lats = np.degrees(np.arcsin(cvects[2]))
lons = np.degrees(np.arctan2(cvects[0], cvects[1]))
return np.hstack([lats, lons]) | [
"def",
"make_lat_lons",
"(",
"cvects",
")",
":",
"lats",
"=",
"np",
".",
"degrees",
"(",
"np",
".",
"arcsin",
"(",
"cvects",
"[",
"2",
"]",
")",
")",
"lons",
"=",
"np",
".",
"degrees",
"(",
"np",
".",
"arctan2",
"(",
"cvects",
"[",
"0",
"]",
",... | Convert from directional cosines to latitidue and longitude
Parameters
----------
cvects : directional cosine (i.e., x,y,z component) values
returns (np.ndarray(2,nsrc)) with the directional cosine (i.e., x,y,z component) values | [
"Convert",
"from",
"directional",
"cosines",
"to",
"latitidue",
"and",
"longitude"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L20-L31 | train | 36,265 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | fill_edge_matrix | def fill_edge_matrix(nsrcs, match_dict):
""" Create and fill a matrix with the graph 'edges' between sources.
Parameters
----------
nsrcs : int
number of sources (used to allocate the size of the matrix)
match_dict : dict((int,int):float)
Each entry gives a pair of source indices, and the
corresponding measure (either distance or sigma)
Returns
-------
e_matrix : `~numpy.ndarray`
numpy.ndarray((nsrcs,nsrcs)) filled with zeros except for the
matches, which are filled with the edge measures (either
distances or sigmas)
"""
e_matrix = np.zeros((nsrcs, nsrcs))
for k, v in match_dict.items():
e_matrix[k[0], k[1]] = v
return e_matrix | python | def fill_edge_matrix(nsrcs, match_dict):
""" Create and fill a matrix with the graph 'edges' between sources.
Parameters
----------
nsrcs : int
number of sources (used to allocate the size of the matrix)
match_dict : dict((int,int):float)
Each entry gives a pair of source indices, and the
corresponding measure (either distance or sigma)
Returns
-------
e_matrix : `~numpy.ndarray`
numpy.ndarray((nsrcs,nsrcs)) filled with zeros except for the
matches, which are filled with the edge measures (either
distances or sigmas)
"""
e_matrix = np.zeros((nsrcs, nsrcs))
for k, v in match_dict.items():
e_matrix[k[0], k[1]] = v
return e_matrix | [
"def",
"fill_edge_matrix",
"(",
"nsrcs",
",",
"match_dict",
")",
":",
"e_matrix",
"=",
"np",
".",
"zeros",
"(",
"(",
"nsrcs",
",",
"nsrcs",
")",
")",
"for",
"k",
",",
"v",
"in",
"match_dict",
".",
"items",
"(",
")",
":",
"e_matrix",
"[",
"k",
"[",
... | Create and fill a matrix with the graph 'edges' between sources.
Parameters
----------
nsrcs : int
number of sources (used to allocate the size of the matrix)
match_dict : dict((int,int):float)
Each entry gives a pair of source indices, and the
corresponding measure (either distance or sigma)
Returns
-------
e_matrix : `~numpy.ndarray`
numpy.ndarray((nsrcs,nsrcs)) filled with zeros except for the
matches, which are filled with the edge measures (either
distances or sigmas) | [
"Create",
"and",
"fill",
"a",
"matrix",
"with",
"the",
"graph",
"edges",
"between",
"sources",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L132-L154 | train | 36,266 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | make_rev_dict_unique | def make_rev_dict_unique(cdict):
""" Make a reverse dictionary
Parameters
----------
in_dict : dict(int:dict(int:True))
A dictionary of clusters. Each cluster is a source index and
the dictionary of other sources in the cluster.
Returns
-------
rev_dict : dict(int:dict(int:True))
A dictionary pointing from source index to the clusters it is
included in.
"""
rev_dict = {}
for k, v in cdict.items():
if k in rev_dict:
rev_dict[k][k] = True
else:
rev_dict[k] = {k: True}
for vv in v.keys():
if vv in rev_dict:
rev_dict[vv][k] = True
else:
rev_dict[vv] = {k: True}
return rev_dict | python | def make_rev_dict_unique(cdict):
""" Make a reverse dictionary
Parameters
----------
in_dict : dict(int:dict(int:True))
A dictionary of clusters. Each cluster is a source index and
the dictionary of other sources in the cluster.
Returns
-------
rev_dict : dict(int:dict(int:True))
A dictionary pointing from source index to the clusters it is
included in.
"""
rev_dict = {}
for k, v in cdict.items():
if k in rev_dict:
rev_dict[k][k] = True
else:
rev_dict[k] = {k: True}
for vv in v.keys():
if vv in rev_dict:
rev_dict[vv][k] = True
else:
rev_dict[vv] = {k: True}
return rev_dict | [
"def",
"make_rev_dict_unique",
"(",
"cdict",
")",
":",
"rev_dict",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"cdict",
".",
"items",
"(",
")",
":",
"if",
"k",
"in",
"rev_dict",
":",
"rev_dict",
"[",
"k",
"]",
"[",
"k",
"]",
"=",
"True",
"else",
... | Make a reverse dictionary
Parameters
----------
in_dict : dict(int:dict(int:True))
A dictionary of clusters. Each cluster is a source index and
the dictionary of other sources in the cluster.
Returns
-------
rev_dict : dict(int:dict(int:True))
A dictionary pointing from source index to the clusters it is
included in. | [
"Make",
"a",
"reverse",
"dictionary"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L157-L184 | train | 36,267 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | make_clusters | def make_clusters(span_tree, cut_value):
""" Find clusters from the spanning tree
Parameters
----------
span_tree : a sparse nsrcs x nsrcs array
Filled with zeros except for the active edges, which are filled with the
edge measures (either distances or sigmas
cut_value : float
Value used to cluster group. All links with measures above this calue will be cut.
returns dict(int:[int,...])
A dictionary of clusters. Each cluster is a source index and the list of other sources in the cluster.
"""
iv0, iv1 = span_tree.nonzero()
# This is the dictionary of all the pairings for each source
match_dict = {}
for i0, i1 in zip(iv0, iv1):
d = span_tree[i0, i1]
# Cut on the link distance
if d > cut_value:
continue
imin = int(min(i0, i1))
imax = int(max(i0, i1))
if imin in match_dict:
match_dict[imin][imax] = True
else:
match_dict[imin] = {imax: True}
working = True
while working:
working = False
rev_dict = make_rev_dict_unique(match_dict)
k_sort = rev_dict.keys()
k_sort.sort()
for k in k_sort:
v = rev_dict[k]
# Multiple mappings
if len(v) > 1:
working = True
v_sort = v.keys()
v_sort.sort()
cluster_idx = v_sort[0]
for vv in v_sort[1:]:
try:
to_merge = match_dict.pop(vv)
except:
continue
try:
match_dict[cluster_idx].update(to_merge)
match_dict[cluster_idx][vv] = True
except:
continue
# remove self references
try:
match_dict[cluster_idx].pop(cluster_idx)
except:
pass
# Convert to a int:list dictionary
cdict = {}
for k, v in match_dict.items():
cdict[k] = v.keys()
# make the reverse dictionary
rdict = make_reverse_dict(cdict)
return cdict, rdict | python | def make_clusters(span_tree, cut_value):
""" Find clusters from the spanning tree
Parameters
----------
span_tree : a sparse nsrcs x nsrcs array
Filled with zeros except for the active edges, which are filled with the
edge measures (either distances or sigmas
cut_value : float
Value used to cluster group. All links with measures above this calue will be cut.
returns dict(int:[int,...])
A dictionary of clusters. Each cluster is a source index and the list of other sources in the cluster.
"""
iv0, iv1 = span_tree.nonzero()
# This is the dictionary of all the pairings for each source
match_dict = {}
for i0, i1 in zip(iv0, iv1):
d = span_tree[i0, i1]
# Cut on the link distance
if d > cut_value:
continue
imin = int(min(i0, i1))
imax = int(max(i0, i1))
if imin in match_dict:
match_dict[imin][imax] = True
else:
match_dict[imin] = {imax: True}
working = True
while working:
working = False
rev_dict = make_rev_dict_unique(match_dict)
k_sort = rev_dict.keys()
k_sort.sort()
for k in k_sort:
v = rev_dict[k]
# Multiple mappings
if len(v) > 1:
working = True
v_sort = v.keys()
v_sort.sort()
cluster_idx = v_sort[0]
for vv in v_sort[1:]:
try:
to_merge = match_dict.pop(vv)
except:
continue
try:
match_dict[cluster_idx].update(to_merge)
match_dict[cluster_idx][vv] = True
except:
continue
# remove self references
try:
match_dict[cluster_idx].pop(cluster_idx)
except:
pass
# Convert to a int:list dictionary
cdict = {}
for k, v in match_dict.items():
cdict[k] = v.keys()
# make the reverse dictionary
rdict = make_reverse_dict(cdict)
return cdict, rdict | [
"def",
"make_clusters",
"(",
"span_tree",
",",
"cut_value",
")",
":",
"iv0",
",",
"iv1",
"=",
"span_tree",
".",
"nonzero",
"(",
")",
"# This is the dictionary of all the pairings for each source",
"match_dict",
"=",
"{",
"}",
"for",
"i0",
",",
"i1",
"in",
"zip",... | Find clusters from the spanning tree
Parameters
----------
span_tree : a sparse nsrcs x nsrcs array
Filled with zeros except for the active edges, which are filled with the
edge measures (either distances or sigmas
cut_value : float
Value used to cluster group. All links with measures above this calue will be cut.
returns dict(int:[int,...])
A dictionary of clusters. Each cluster is a source index and the list of other sources in the cluster. | [
"Find",
"clusters",
"from",
"the",
"spanning",
"tree"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L187-L258 | train | 36,268 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | select_from_cluster | def select_from_cluster(idx_key, idx_list, measure_vect):
""" Select a single source from a cluster and make it the new cluster key
Parameters
----------
idx_key : int
index of the current key for a cluster
idx_list : [int,...]
list of the other source indices in the cluster
measure_vect : np.narray((nsrc),float)
vector of the measure used to select the best source in the cluster
returns best_idx:out_list
where best_idx is the index of the best source in the cluster and
out_list is the list of all the other indices
"""
best_idx = idx_key
best_measure = measure_vect[idx_key]
out_list = [idx_key] + idx_list
for idx, measure in zip(idx_list, measure_vect[idx_list]):
if measure < best_measure:
best_idx = idx
best_measure = measure
out_list.remove(best_idx)
return best_idx, out_list | python | def select_from_cluster(idx_key, idx_list, measure_vect):
""" Select a single source from a cluster and make it the new cluster key
Parameters
----------
idx_key : int
index of the current key for a cluster
idx_list : [int,...]
list of the other source indices in the cluster
measure_vect : np.narray((nsrc),float)
vector of the measure used to select the best source in the cluster
returns best_idx:out_list
where best_idx is the index of the best source in the cluster and
out_list is the list of all the other indices
"""
best_idx = idx_key
best_measure = measure_vect[idx_key]
out_list = [idx_key] + idx_list
for idx, measure in zip(idx_list, measure_vect[idx_list]):
if measure < best_measure:
best_idx = idx
best_measure = measure
out_list.remove(best_idx)
return best_idx, out_list | [
"def",
"select_from_cluster",
"(",
"idx_key",
",",
"idx_list",
",",
"measure_vect",
")",
":",
"best_idx",
"=",
"idx_key",
"best_measure",
"=",
"measure_vect",
"[",
"idx_key",
"]",
"out_list",
"=",
"[",
"idx_key",
"]",
"+",
"idx_list",
"for",
"idx",
",",
"mea... | Select a single source from a cluster and make it the new cluster key
Parameters
----------
idx_key : int
index of the current key for a cluster
idx_list : [int,...]
list of the other source indices in the cluster
measure_vect : np.narray((nsrc),float)
vector of the measure used to select the best source in the cluster
returns best_idx:out_list
where best_idx is the index of the best source in the cluster and
out_list is the list of all the other indices | [
"Select",
"a",
"single",
"source",
"from",
"a",
"cluster",
"and",
"make",
"it",
"the",
"new",
"cluster",
"key"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L261-L287 | train | 36,269 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | count_sources_in_cluster | def count_sources_in_cluster(n_src, cdict, rev_dict):
""" Make a vector of sources in each cluster
Parameters
----------
n_src : number of sources
cdict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and
the list of other source in the cluster.
rev_dict : dict(int:int)
A single valued dictionary pointing from source index to
cluster key for each source in a cluster. Note that the key
does not point to itself.
Returns
----------
`np.ndarray((n_src),int)' with the number of in the cluster a given source
belongs to.
"""
ret_val = np.zeros((n_src), int)
for i in range(n_src):
try:
key = rev_dict[i]
except KeyError:
key = i
try:
n = len(cdict[key])
except:
n = 0
ret_val[i] = n
return ret_val | python | def count_sources_in_cluster(n_src, cdict, rev_dict):
""" Make a vector of sources in each cluster
Parameters
----------
n_src : number of sources
cdict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and
the list of other source in the cluster.
rev_dict : dict(int:int)
A single valued dictionary pointing from source index to
cluster key for each source in a cluster. Note that the key
does not point to itself.
Returns
----------
`np.ndarray((n_src),int)' with the number of in the cluster a given source
belongs to.
"""
ret_val = np.zeros((n_src), int)
for i in range(n_src):
try:
key = rev_dict[i]
except KeyError:
key = i
try:
n = len(cdict[key])
except:
n = 0
ret_val[i] = n
return ret_val | [
"def",
"count_sources_in_cluster",
"(",
"n_src",
",",
"cdict",
",",
"rev_dict",
")",
":",
"ret_val",
"=",
"np",
".",
"zeros",
"(",
"(",
"n_src",
")",
",",
"int",
")",
"for",
"i",
"in",
"range",
"(",
"n_src",
")",
":",
"try",
":",
"key",
"=",
"rev_d... | Make a vector of sources in each cluster
Parameters
----------
n_src : number of sources
cdict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and
the list of other source in the cluster.
rev_dict : dict(int:int)
A single valued dictionary pointing from source index to
cluster key for each source in a cluster. Note that the key
does not point to itself.
Returns
----------
`np.ndarray((n_src),int)' with the number of in the cluster a given source
belongs to. | [
"Make",
"a",
"vector",
"of",
"sources",
"in",
"each",
"cluster"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L317-L350 | train | 36,270 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | find_dist_to_centroids | def find_dist_to_centroids(cluster_dict, cvects, weights=None):
""" Find the centroids and the distances to the centroid for all sources in a set of clusters
Parameters
----------
cluster_dict : dict(int:[int,...])
Each cluster is a source index and the list of other sources in the cluster.
cvects : np.ndarray(3,nsrc)
Directional cosines (i.e., x,y,z component) values of all the sources
weights : ~numpy.ndarray(nsrc) with the weights to use. None for equal weighting
Returns
----------
distances : ~numpy.ndarray(nsrc) with the distances to the centroid of the cluster. 0 for unclustered sources
cent_dict : dict(int:numpy.ndarray(2)), dictionary for the centroid locations
"""
distances = np.zeros((cvects.shape[1]))
cent_dict = {}
for k, v in cluster_dict.items():
l = [k] + v
distances[l], centroid = find_dist_to_centroid(cvects, l, weights)
cent_dict[k] = make_lat_lons(centroid)
return distances, cent_dict | python | def find_dist_to_centroids(cluster_dict, cvects, weights=None):
""" Find the centroids and the distances to the centroid for all sources in a set of clusters
Parameters
----------
cluster_dict : dict(int:[int,...])
Each cluster is a source index and the list of other sources in the cluster.
cvects : np.ndarray(3,nsrc)
Directional cosines (i.e., x,y,z component) values of all the sources
weights : ~numpy.ndarray(nsrc) with the weights to use. None for equal weighting
Returns
----------
distances : ~numpy.ndarray(nsrc) with the distances to the centroid of the cluster. 0 for unclustered sources
cent_dict : dict(int:numpy.ndarray(2)), dictionary for the centroid locations
"""
distances = np.zeros((cvects.shape[1]))
cent_dict = {}
for k, v in cluster_dict.items():
l = [k] + v
distances[l], centroid = find_dist_to_centroid(cvects, l, weights)
cent_dict[k] = make_lat_lons(centroid)
return distances, cent_dict | [
"def",
"find_dist_to_centroids",
"(",
"cluster_dict",
",",
"cvects",
",",
"weights",
"=",
"None",
")",
":",
"distances",
"=",
"np",
".",
"zeros",
"(",
"(",
"cvects",
".",
"shape",
"[",
"1",
"]",
")",
")",
"cent_dict",
"=",
"{",
"}",
"for",
"k",
",",
... | Find the centroids and the distances to the centroid for all sources in a set of clusters
Parameters
----------
cluster_dict : dict(int:[int,...])
Each cluster is a source index and the list of other sources in the cluster.
cvects : np.ndarray(3,nsrc)
Directional cosines (i.e., x,y,z component) values of all the sources
weights : ~numpy.ndarray(nsrc) with the weights to use. None for equal weighting
Returns
----------
distances : ~numpy.ndarray(nsrc) with the distances to the centroid of the cluster. 0 for unclustered sources
cent_dict : dict(int:numpy.ndarray(2)), dictionary for the centroid locations | [
"Find",
"the",
"centroids",
"and",
"the",
"distances",
"to",
"the",
"centroid",
"for",
"all",
"sources",
"in",
"a",
"set",
"of",
"clusters"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L372-L397 | train | 36,271 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | select_from_clusters | def select_from_clusters(cluster_dict, measure_vect):
""" Select a single source from each cluster and make it the new cluster key
cluster_dict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and the list of other source in the cluster.
measure_vect : np.narray((nsrc),float)
vector of the measure used to select the best source in the cluster
returns dict(int:[int,...])
New dictionary of clusters keyed by the best source in each cluster
"""
out_dict = {}
for idx_key, idx_list in cluster_dict.items():
out_idx, out_list = select_from_cluster(
idx_key, idx_list, measure_vect)
out_dict[out_idx] = out_list
return out_dict | python | def select_from_clusters(cluster_dict, measure_vect):
""" Select a single source from each cluster and make it the new cluster key
cluster_dict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and the list of other source in the cluster.
measure_vect : np.narray((nsrc),float)
vector of the measure used to select the best source in the cluster
returns dict(int:[int,...])
New dictionary of clusters keyed by the best source in each cluster
"""
out_dict = {}
for idx_key, idx_list in cluster_dict.items():
out_idx, out_list = select_from_cluster(
idx_key, idx_list, measure_vect)
out_dict[out_idx] = out_list
return out_dict | [
"def",
"select_from_clusters",
"(",
"cluster_dict",
",",
"measure_vect",
")",
":",
"out_dict",
"=",
"{",
"}",
"for",
"idx_key",
",",
"idx_list",
"in",
"cluster_dict",
".",
"items",
"(",
")",
":",
"out_idx",
",",
"out_list",
"=",
"select_from_cluster",
"(",
"... | Select a single source from each cluster and make it the new cluster key
cluster_dict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and the list of other source in the cluster.
measure_vect : np.narray((nsrc),float)
vector of the measure used to select the best source in the cluster
returns dict(int:[int,...])
New dictionary of clusters keyed by the best source in each cluster | [
"Select",
"a",
"single",
"source",
"from",
"each",
"cluster",
"and",
"make",
"it",
"the",
"new",
"cluster",
"key"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L400-L417 | train | 36,272 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | make_reverse_dict | def make_reverse_dict(in_dict, warn=True):
""" Build a reverse dictionary from a cluster dictionary
Parameters
----------
in_dict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and
the list of other source in the cluster.
Returns
-------
out_dict : dict(int:int)
A single valued dictionary pointing from source index to
cluster key for each source in a cluster. Note that the key
does not point to itself.
"""
out_dict = {}
for k, v in in_dict.items():
for vv in v:
if vv in out_dict:
if warn:
print("Dictionary collision %i" % vv)
out_dict[vv] = k
return out_dict | python | def make_reverse_dict(in_dict, warn=True):
""" Build a reverse dictionary from a cluster dictionary
Parameters
----------
in_dict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and
the list of other source in the cluster.
Returns
-------
out_dict : dict(int:int)
A single valued dictionary pointing from source index to
cluster key for each source in a cluster. Note that the key
does not point to itself.
"""
out_dict = {}
for k, v in in_dict.items():
for vv in v:
if vv in out_dict:
if warn:
print("Dictionary collision %i" % vv)
out_dict[vv] = k
return out_dict | [
"def",
"make_reverse_dict",
"(",
"in_dict",
",",
"warn",
"=",
"True",
")",
":",
"out_dict",
"=",
"{",
"}",
"for",
"k",
",",
"v",
"in",
"in_dict",
".",
"items",
"(",
")",
":",
"for",
"vv",
"in",
"v",
":",
"if",
"vv",
"in",
"out_dict",
":",
"if",
... | Build a reverse dictionary from a cluster dictionary
Parameters
----------
in_dict : dict(int:[int,])
A dictionary of clusters. Each cluster is a source index and
the list of other source in the cluster.
Returns
-------
out_dict : dict(int:int)
A single valued dictionary pointing from source index to
cluster key for each source in a cluster. Note that the key
does not point to itself. | [
"Build",
"a",
"reverse",
"dictionary",
"from",
"a",
"cluster",
"dictionary"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L420-L443 | train | 36,273 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | make_dict_from_vector | def make_dict_from_vector(in_array):
""" Converts the cluster membership array stored in a fits file back to a dictionary
Parameters
----------
in_array : `np.ndarray'
An array filled with the index of the seed of a cluster if a source belongs to a cluster,
and with -1 if it does not.
Returns
-------
returns dict(int:[int,...])
Dictionary of clusters keyed by the best source in each cluster
"""
out_dict = {}
for i, k in enumerate(in_array):
if k < 0:
continue
try:
out_dict[k].append(i)
except KeyError:
out_dict[k] = [i]
return out_dict | python | def make_dict_from_vector(in_array):
""" Converts the cluster membership array stored in a fits file back to a dictionary
Parameters
----------
in_array : `np.ndarray'
An array filled with the index of the seed of a cluster if a source belongs to a cluster,
and with -1 if it does not.
Returns
-------
returns dict(int:[int,...])
Dictionary of clusters keyed by the best source in each cluster
"""
out_dict = {}
for i, k in enumerate(in_array):
if k < 0:
continue
try:
out_dict[k].append(i)
except KeyError:
out_dict[k] = [i]
return out_dict | [
"def",
"make_dict_from_vector",
"(",
"in_array",
")",
":",
"out_dict",
"=",
"{",
"}",
"for",
"i",
",",
"k",
"in",
"enumerate",
"(",
"in_array",
")",
":",
"if",
"k",
"<",
"0",
":",
"continue",
"try",
":",
"out_dict",
"[",
"k",
"]",
".",
"append",
"(... | Converts the cluster membership array stored in a fits file back to a dictionary
Parameters
----------
in_array : `np.ndarray'
An array filled with the index of the seed of a cluster if a source belongs to a cluster,
and with -1 if it does not.
Returns
-------
returns dict(int:[int,...])
Dictionary of clusters keyed by the best source in each cluster | [
"Converts",
"the",
"cluster",
"membership",
"array",
"stored",
"in",
"a",
"fits",
"file",
"back",
"to",
"a",
"dictionary"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L494-L516 | train | 36,274 |
fermiPy/fermipy | fermipy/scripts/cluster_sources.py | filter_and_copy_table | def filter_and_copy_table(tab, to_remove):
""" Filter and copy a FITS table.
Parameters
----------
tab : FITS Table object
to_remove : [int ...}
list of indices to remove from the table
returns FITS Table object
"""
nsrcs = len(tab)
mask = np.zeros((nsrcs), '?')
mask[to_remove] = True
inv_mask = np.invert(mask)
out_tab = tab[inv_mask]
return out_tab | python | def filter_and_copy_table(tab, to_remove):
""" Filter and copy a FITS table.
Parameters
----------
tab : FITS Table object
to_remove : [int ...}
list of indices to remove from the table
returns FITS Table object
"""
nsrcs = len(tab)
mask = np.zeros((nsrcs), '?')
mask[to_remove] = True
inv_mask = np.invert(mask)
out_tab = tab[inv_mask]
return out_tab | [
"def",
"filter_and_copy_table",
"(",
"tab",
",",
"to_remove",
")",
":",
"nsrcs",
"=",
"len",
"(",
"tab",
")",
"mask",
"=",
"np",
".",
"zeros",
"(",
"(",
"nsrcs",
")",
",",
"'?'",
")",
"mask",
"[",
"to_remove",
"]",
"=",
"True",
"inv_mask",
"=",
"np... | Filter and copy a FITS table.
Parameters
----------
tab : FITS Table object
to_remove : [int ...}
list of indices to remove from the table
returns FITS Table object | [
"Filter",
"and",
"copy",
"a",
"FITS",
"table",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/cluster_sources.py#L519-L536 | train | 36,275 |
fermiPy/fermipy | fermipy/jobs/analysis_utils.py | baseline_roi_fit | def baseline_roi_fit(gta, make_plots=False, minmax_npred=[1e3, np.inf]):
"""Do baseline fitting for a target Region of Interest
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
make_plots : bool
Flag to make standard analysis plots
minmax_npred : tuple or list
Range of number of predicted coutns for which to free sources in initial fitting.
"""
gta.free_sources(False)
gta.write_roi('base_roi', make_plots=make_plots)
gta.free_sources(True, minmax_npred=[1e3, np.inf])
gta.optimize()
gta.free_sources(False)
gta.print_roi() | python | def baseline_roi_fit(gta, make_plots=False, minmax_npred=[1e3, np.inf]):
"""Do baseline fitting for a target Region of Interest
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
make_plots : bool
Flag to make standard analysis plots
minmax_npred : tuple or list
Range of number of predicted coutns for which to free sources in initial fitting.
"""
gta.free_sources(False)
gta.write_roi('base_roi', make_plots=make_plots)
gta.free_sources(True, minmax_npred=[1e3, np.inf])
gta.optimize()
gta.free_sources(False)
gta.print_roi() | [
"def",
"baseline_roi_fit",
"(",
"gta",
",",
"make_plots",
"=",
"False",
",",
"minmax_npred",
"=",
"[",
"1e3",
",",
"np",
".",
"inf",
"]",
")",
":",
"gta",
".",
"free_sources",
"(",
"False",
")",
"gta",
".",
"write_roi",
"(",
"'base_roi'",
",",
"make_pl... | Do baseline fitting for a target Region of Interest
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
make_plots : bool
Flag to make standard analysis plots
minmax_npred : tuple or list
Range of number of predicted coutns for which to free sources in initial fitting. | [
"Do",
"baseline",
"fitting",
"for",
"a",
"target",
"Region",
"of",
"Interest"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/analysis_utils.py#L14-L36 | train | 36,276 |
fermiPy/fermipy | fermipy/jobs/analysis_utils.py | localize_sources | def localize_sources(gta, **kwargs):
"""Relocalize sources in the region of interest
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
kwargs :
These are passed to the gta.localize function
"""
# Localize all point sources
for src in sorted(gta.roi.sources, key=lambda t: t['ts'], reverse=True):
# for s in gta.roi.sources:
if not src['SpatialModel'] == 'PointSource':
continue
if src['offset_roi_edge'] > -0.1:
continue
gta.localize(src.name, **kwargs)
gta.optimize()
gta.print_roi() | python | def localize_sources(gta, **kwargs):
"""Relocalize sources in the region of interest
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
kwargs :
These are passed to the gta.localize function
"""
# Localize all point sources
for src in sorted(gta.roi.sources, key=lambda t: t['ts'], reverse=True):
# for s in gta.roi.sources:
if not src['SpatialModel'] == 'PointSource':
continue
if src['offset_roi_edge'] > -0.1:
continue
gta.localize(src.name, **kwargs)
gta.optimize()
gta.print_roi() | [
"def",
"localize_sources",
"(",
"gta",
",",
"*",
"*",
"kwargs",
")",
":",
"# Localize all point sources",
"for",
"src",
"in",
"sorted",
"(",
"gta",
".",
"roi",
".",
"sources",
",",
"key",
"=",
"lambda",
"t",
":",
"t",
"[",
"'ts'",
"]",
",",
"reverse",
... | Relocalize sources in the region of interest
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
kwargs :
These are passed to the gta.localize function | [
"Relocalize",
"sources",
"in",
"the",
"region",
"of",
"interest"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/analysis_utils.py#L39-L64 | train | 36,277 |
fermiPy/fermipy | fermipy/jobs/analysis_utils.py | add_source_get_correlated | def add_source_get_correlated(gta, name, src_dict, correl_thresh=0.25, non_null_src=False):
"""Add a source and get the set of correlated sources
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
name : str
Name of the source we are adding
src_dict : dict
Dictionary of the source parameters
correl_thresh : float
Threshold for considering a source to be correlated
non_null_src : bool
If True, don't zero the source
Returns
-------
cdict : dict
Dictionary with names and correlation factors of correlated sources
test_src_name : bool
Name of the test source
"""
if gta.roi.has_source(name):
gta.zero_source(name)
gta.update_source(name)
test_src_name = "%s_test" % name
else:
test_src_name = name
gta.add_source(test_src_name, src_dict)
gta.free_norm(test_src_name)
gta.free_shape(test_src_name, free=False)
fit_result = gta.fit(covar=True)
mask = fit_result['is_norm']
src_names = np.array(fit_result['src_names'])[mask]
idx = (src_names == test_src_name).argmax()
correl_vals = fit_result['correlation'][idx][mask]
cdict = {}
for src_name, correl_val in zip(src_names, correl_vals):
if src_name == name:
continue
if np.fabs(correl_val) > 0.25:
cdict[src_name] = correl_val
if not non_null_src:
gta.zero_source(test_src_name)
gta.fit(covar=True)
return cdict, test_src_name | python | def add_source_get_correlated(gta, name, src_dict, correl_thresh=0.25, non_null_src=False):
"""Add a source and get the set of correlated sources
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
name : str
Name of the source we are adding
src_dict : dict
Dictionary of the source parameters
correl_thresh : float
Threshold for considering a source to be correlated
non_null_src : bool
If True, don't zero the source
Returns
-------
cdict : dict
Dictionary with names and correlation factors of correlated sources
test_src_name : bool
Name of the test source
"""
if gta.roi.has_source(name):
gta.zero_source(name)
gta.update_source(name)
test_src_name = "%s_test" % name
else:
test_src_name = name
gta.add_source(test_src_name, src_dict)
gta.free_norm(test_src_name)
gta.free_shape(test_src_name, free=False)
fit_result = gta.fit(covar=True)
mask = fit_result['is_norm']
src_names = np.array(fit_result['src_names'])[mask]
idx = (src_names == test_src_name).argmax()
correl_vals = fit_result['correlation'][idx][mask]
cdict = {}
for src_name, correl_val in zip(src_names, correl_vals):
if src_name == name:
continue
if np.fabs(correl_val) > 0.25:
cdict[src_name] = correl_val
if not non_null_src:
gta.zero_source(test_src_name)
gta.fit(covar=True)
return cdict, test_src_name | [
"def",
"add_source_get_correlated",
"(",
"gta",
",",
"name",
",",
"src_dict",
",",
"correl_thresh",
"=",
"0.25",
",",
"non_null_src",
"=",
"False",
")",
":",
"if",
"gta",
".",
"roi",
".",
"has_source",
"(",
"name",
")",
":",
"gta",
".",
"zero_source",
"(... | Add a source and get the set of correlated sources
Parameters
----------
gta : `fermipy.gtaanalysis.GTAnalysis`
The analysis object
name : str
Name of the source we are adding
src_dict : dict
Dictionary of the source parameters
correl_thresh : float
Threshold for considering a source to be correlated
non_null_src : bool
If True, don't zero the source
Returns
-------
cdict : dict
Dictionary with names and correlation factors of correlated sources
test_src_name : bool
Name of the test source | [
"Add",
"a",
"source",
"and",
"get",
"the",
"set",
"of",
"correlated",
"sources"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/analysis_utils.py#L67-L127 | train | 36,278 |
fermiPy/fermipy | fermipy/jobs/target_sim.py | CopyBaseROI.copy_analysis_files | def copy_analysis_files(cls, orig_dir, dest_dir, copyfiles):
""" Copy a list of files from orig_dir to dest_dir"""
for pattern in copyfiles:
glob_path = os.path.join(orig_dir, pattern)
files = glob.glob(glob_path)
for ff in files:
f = os.path.basename(ff)
orig_path = os.path.join(orig_dir, f)
dest_path = os.path.join(dest_dir, f)
try:
copyfile(orig_path, dest_path)
except IOError:
sys.stderr.write("WARNING: failed to copy %s\n" % orig_path) | python | def copy_analysis_files(cls, orig_dir, dest_dir, copyfiles):
""" Copy a list of files from orig_dir to dest_dir"""
for pattern in copyfiles:
glob_path = os.path.join(orig_dir, pattern)
files = glob.glob(glob_path)
for ff in files:
f = os.path.basename(ff)
orig_path = os.path.join(orig_dir, f)
dest_path = os.path.join(dest_dir, f)
try:
copyfile(orig_path, dest_path)
except IOError:
sys.stderr.write("WARNING: failed to copy %s\n" % orig_path) | [
"def",
"copy_analysis_files",
"(",
"cls",
",",
"orig_dir",
",",
"dest_dir",
",",
"copyfiles",
")",
":",
"for",
"pattern",
"in",
"copyfiles",
":",
"glob_path",
"=",
"os",
".",
"path",
".",
"join",
"(",
"orig_dir",
",",
"pattern",
")",
"files",
"=",
"glob"... | Copy a list of files from orig_dir to dest_dir | [
"Copy",
"a",
"list",
"of",
"files",
"from",
"orig_dir",
"to",
"dest_dir"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_sim.py#L66-L78 | train | 36,279 |
fermiPy/fermipy | fermipy/jobs/target_sim.py | CopyBaseROI.copy_target_dir | def copy_target_dir(cls, orig_dir, dest_dir, roi_baseline, extracopy):
""" Create and populate directoris for target analysis
"""
try:
os.makedirs(dest_dir)
except OSError:
pass
copyfiles = ['%s.fits' % roi_baseline,
'%s.npy' % roi_baseline,
'%s_*.xml' % roi_baseline] + cls.copyfiles
if isinstance(extracopy, list):
copyfiles += extracopy
cls.copy_analysis_files(orig_dir, dest_dir, copyfiles) | python | def copy_target_dir(cls, orig_dir, dest_dir, roi_baseline, extracopy):
""" Create and populate directoris for target analysis
"""
try:
os.makedirs(dest_dir)
except OSError:
pass
copyfiles = ['%s.fits' % roi_baseline,
'%s.npy' % roi_baseline,
'%s_*.xml' % roi_baseline] + cls.copyfiles
if isinstance(extracopy, list):
copyfiles += extracopy
cls.copy_analysis_files(orig_dir, dest_dir, copyfiles) | [
"def",
"copy_target_dir",
"(",
"cls",
",",
"orig_dir",
",",
"dest_dir",
",",
"roi_baseline",
",",
"extracopy",
")",
":",
"try",
":",
"os",
".",
"makedirs",
"(",
"dest_dir",
")",
"except",
"OSError",
":",
"pass",
"copyfiles",
"=",
"[",
"'%s.fits'",
"%",
"... | Create and populate directoris for target analysis | [
"Create",
"and",
"populate",
"directoris",
"for",
"target",
"analysis"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_sim.py#L81-L95 | train | 36,280 |
fermiPy/fermipy | fermipy/jobs/target_sim.py | RandomDirGen._make_wcsgeom_from_config | def _make_wcsgeom_from_config(config):
"""Build a `WCS.Geom` object from a fermipy coniguration file"""
binning = config['binning']
binsz = binning['binsz']
coordsys = binning.get('coordsys', 'GAL')
roiwidth = binning['roiwidth']
proj = binning.get('proj', 'AIT')
ra = config['selection']['ra']
dec = config['selection']['dec']
npix = int(np.round(roiwidth / binsz))
skydir = SkyCoord(ra * u.deg, dec * u.deg)
wcsgeom = WcsGeom.create(npix=npix, binsz=binsz,
proj=proj, coordsys=coordsys,
skydir=skydir)
return wcsgeom | python | def _make_wcsgeom_from_config(config):
"""Build a `WCS.Geom` object from a fermipy coniguration file"""
binning = config['binning']
binsz = binning['binsz']
coordsys = binning.get('coordsys', 'GAL')
roiwidth = binning['roiwidth']
proj = binning.get('proj', 'AIT')
ra = config['selection']['ra']
dec = config['selection']['dec']
npix = int(np.round(roiwidth / binsz))
skydir = SkyCoord(ra * u.deg, dec * u.deg)
wcsgeom = WcsGeom.create(npix=npix, binsz=binsz,
proj=proj, coordsys=coordsys,
skydir=skydir)
return wcsgeom | [
"def",
"_make_wcsgeom_from_config",
"(",
"config",
")",
":",
"binning",
"=",
"config",
"[",
"'binning'",
"]",
"binsz",
"=",
"binning",
"[",
"'binsz'",
"]",
"coordsys",
"=",
"binning",
".",
"get",
"(",
"'coordsys'",
",",
"'GAL'",
")",
"roiwidth",
"=",
"binn... | Build a `WCS.Geom` object from a fermipy coniguration file | [
"Build",
"a",
"WCS",
".",
"Geom",
"object",
"from",
"a",
"fermipy",
"coniguration",
"file"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_sim.py#L176-L192 | train | 36,281 |
fermiPy/fermipy | fermipy/jobs/target_sim.py | RandomDirGen._build_skydir_dict | def _build_skydir_dict(wcsgeom, rand_config):
"""Build a dictionary of random directions"""
step_x = rand_config['step_x']
step_y = rand_config['step_y']
max_x = rand_config['max_x']
max_y = rand_config['max_y']
seed = rand_config['seed']
nsims = rand_config['nsims']
cdelt = wcsgeom.wcs.wcs.cdelt
pixstep_x = step_x / cdelt[0]
pixstep_y = -1. * step_y / cdelt[1]
pixmax_x = max_x / cdelt[0]
pixmax_y = max_y / cdelt[0]
nstep_x = int(np.ceil(2. * pixmax_x / pixstep_x)) + 1
nstep_y = int(np.ceil(2. * pixmax_y / pixstep_y)) + 1
center = np.array(wcsgeom._center_pix)
grid = np.meshgrid(np.linspace(-1 * pixmax_x, pixmax_x, nstep_x),
np.linspace(-1 * pixmax_y, pixmax_y, nstep_y))
grid[0] += center[0]
grid[1] += center[1]
test_grid = wcsgeom.pix_to_coord(grid)
glat_vals = test_grid[0].flat
glon_vals = test_grid[1].flat
conv_vals = SkyCoord(glat_vals * u.deg, glon_vals *
u.deg, frame=Galactic).transform_to(ICRS)
ra_vals = conv_vals.ra.deg[seed:nsims]
dec_vals = conv_vals.dec.deg[seed:nsims]
o_dict = {}
for i, (ra, dec) in enumerate(zip(ra_vals, dec_vals)):
key = i + seed
o_dict[key] = dict(ra=ra, dec=dec)
return o_dict | python | def _build_skydir_dict(wcsgeom, rand_config):
"""Build a dictionary of random directions"""
step_x = rand_config['step_x']
step_y = rand_config['step_y']
max_x = rand_config['max_x']
max_y = rand_config['max_y']
seed = rand_config['seed']
nsims = rand_config['nsims']
cdelt = wcsgeom.wcs.wcs.cdelt
pixstep_x = step_x / cdelt[0]
pixstep_y = -1. * step_y / cdelt[1]
pixmax_x = max_x / cdelt[0]
pixmax_y = max_y / cdelt[0]
nstep_x = int(np.ceil(2. * pixmax_x / pixstep_x)) + 1
nstep_y = int(np.ceil(2. * pixmax_y / pixstep_y)) + 1
center = np.array(wcsgeom._center_pix)
grid = np.meshgrid(np.linspace(-1 * pixmax_x, pixmax_x, nstep_x),
np.linspace(-1 * pixmax_y, pixmax_y, nstep_y))
grid[0] += center[0]
grid[1] += center[1]
test_grid = wcsgeom.pix_to_coord(grid)
glat_vals = test_grid[0].flat
glon_vals = test_grid[1].flat
conv_vals = SkyCoord(glat_vals * u.deg, glon_vals *
u.deg, frame=Galactic).transform_to(ICRS)
ra_vals = conv_vals.ra.deg[seed:nsims]
dec_vals = conv_vals.dec.deg[seed:nsims]
o_dict = {}
for i, (ra, dec) in enumerate(zip(ra_vals, dec_vals)):
key = i + seed
o_dict[key] = dict(ra=ra, dec=dec)
return o_dict | [
"def",
"_build_skydir_dict",
"(",
"wcsgeom",
",",
"rand_config",
")",
":",
"step_x",
"=",
"rand_config",
"[",
"'step_x'",
"]",
"step_y",
"=",
"rand_config",
"[",
"'step_y'",
"]",
"max_x",
"=",
"rand_config",
"[",
"'max_x'",
"]",
"max_y",
"=",
"rand_config",
... | Build a dictionary of random directions | [
"Build",
"a",
"dictionary",
"of",
"random",
"directions"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_sim.py#L195-L233 | train | 36,282 |
fermiPy/fermipy | fermipy/jobs/target_sim.py | SimulateROI._clone_config_and_srcmaps | def _clone_config_and_srcmaps(config_path, seed):
"""Clone the configuration"""
workdir = os.path.dirname(config_path)
new_config_path = config_path.replace('.yaml', '_%06i.yaml' % seed)
config = load_yaml(config_path)
comps = config.get('components', [config])
for i, comp in enumerate(comps):
comp_name = "%02i" % i
if 'gtlike' not in comp:
comp['gtlike'] = {}
orig_srcmap = os.path.abspath(os.path.join(workdir, 'srcmap_%s.fits' % (comp_name)))
new_srcmap = os.path.abspath(os.path.join(workdir, 'srcmap_%06i_%s.fits' % (seed, comp_name)))
comp['gtlike']['srcmap'] = os.path.abspath(os.path.join(workdir, 'srcmap_%06i_%s.fits' % (seed, comp_name)))
comp['gtlike']['use_external_srcmap'] = True
copyfile(orig_srcmap, new_srcmap)
write_yaml(config, new_config_path)
return new_config_path | python | def _clone_config_and_srcmaps(config_path, seed):
"""Clone the configuration"""
workdir = os.path.dirname(config_path)
new_config_path = config_path.replace('.yaml', '_%06i.yaml' % seed)
config = load_yaml(config_path)
comps = config.get('components', [config])
for i, comp in enumerate(comps):
comp_name = "%02i" % i
if 'gtlike' not in comp:
comp['gtlike'] = {}
orig_srcmap = os.path.abspath(os.path.join(workdir, 'srcmap_%s.fits' % (comp_name)))
new_srcmap = os.path.abspath(os.path.join(workdir, 'srcmap_%06i_%s.fits' % (seed, comp_name)))
comp['gtlike']['srcmap'] = os.path.abspath(os.path.join(workdir, 'srcmap_%06i_%s.fits' % (seed, comp_name)))
comp['gtlike']['use_external_srcmap'] = True
copyfile(orig_srcmap, new_srcmap)
write_yaml(config, new_config_path)
return new_config_path | [
"def",
"_clone_config_and_srcmaps",
"(",
"config_path",
",",
"seed",
")",
":",
"workdir",
"=",
"os",
".",
"path",
".",
"dirname",
"(",
"config_path",
")",
"new_config_path",
"=",
"config_path",
".",
"replace",
"(",
"'.yaml'",
",",
"'_%06i.yaml'",
"%",
"seed",
... | Clone the configuration | [
"Clone",
"the",
"configuration"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_sim.py#L276-L293 | train | 36,283 |
fermiPy/fermipy | fermipy/jobs/target_sim.py | SimulateROI._run_simulation | def _run_simulation(gta, roi_baseline,
injected_name, test_sources, current_seed, seed, non_null_src):
"""Simulate a realization of this analysis"""
gta.load_roi('sim_baseline_%06i.npy' % current_seed)
gta.set_random_seed(seed)
gta.simulate_roi()
if injected_name:
gta.zero_source(injected_name)
gta.optimize()
gta.find_sources(sqrt_ts_threshold=5.0, search_skydir=gta.roi.skydir,
search_minmax_radius=[1.0, np.nan])
gta.optimize()
gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
gta.fit(covar=True)
gta.write_roi('sim_refit_%06i' % current_seed)
for test_source in test_sources:
test_source_name = test_source['name']
sedfile = "sed_%s_%06i.fits" % (test_source_name, seed)
correl_dict, test_src_name = add_source_get_correlated(gta, test_source_name,
test_source['source_model'],
correl_thresh=0.25,
non_null_src=non_null_src)
# Write the list of correlated sources
correl_yaml = os.path.join(gta.workdir,
"correl_%s_%06i.yaml" % (test_source_name, seed))
write_yaml(correl_dict, correl_yaml)
gta.free_sources(False)
for src_name in correl_dict.keys():
gta.free_source(src_name, pars='norm')
gta.sed(test_source_name, prefix=pkey, outfile=sedfile)
# Set things back to how they were
gta.delete_source(test_source_name)
gta.load_xml('sim_refit_%06i' % current_seed) | python | def _run_simulation(gta, roi_baseline,
injected_name, test_sources, current_seed, seed, non_null_src):
"""Simulate a realization of this analysis"""
gta.load_roi('sim_baseline_%06i.npy' % current_seed)
gta.set_random_seed(seed)
gta.simulate_roi()
if injected_name:
gta.zero_source(injected_name)
gta.optimize()
gta.find_sources(sqrt_ts_threshold=5.0, search_skydir=gta.roi.skydir,
search_minmax_radius=[1.0, np.nan])
gta.optimize()
gta.free_sources(skydir=gta.roi.skydir, distance=1.0, pars='norm')
gta.fit(covar=True)
gta.write_roi('sim_refit_%06i' % current_seed)
for test_source in test_sources:
test_source_name = test_source['name']
sedfile = "sed_%s_%06i.fits" % (test_source_name, seed)
correl_dict, test_src_name = add_source_get_correlated(gta, test_source_name,
test_source['source_model'],
correl_thresh=0.25,
non_null_src=non_null_src)
# Write the list of correlated sources
correl_yaml = os.path.join(gta.workdir,
"correl_%s_%06i.yaml" % (test_source_name, seed))
write_yaml(correl_dict, correl_yaml)
gta.free_sources(False)
for src_name in correl_dict.keys():
gta.free_source(src_name, pars='norm')
gta.sed(test_source_name, prefix=pkey, outfile=sedfile)
# Set things back to how they were
gta.delete_source(test_source_name)
gta.load_xml('sim_refit_%06i' % current_seed) | [
"def",
"_run_simulation",
"(",
"gta",
",",
"roi_baseline",
",",
"injected_name",
",",
"test_sources",
",",
"current_seed",
",",
"seed",
",",
"non_null_src",
")",
":",
"gta",
".",
"load_roi",
"(",
"'sim_baseline_%06i.npy'",
"%",
"current_seed",
")",
"gta",
".",
... | Simulate a realization of this analysis | [
"Simulate",
"a",
"realization",
"of",
"this",
"analysis"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/target_sim.py#L297-L334 | train | 36,284 |
fermiPy/fermipy | fermipy/scripts/merit_skimmer.py | get_branches | def get_branches(aliases):
"""Get unique branch names from an alias dictionary."""
ignore = ['pow', 'log10', 'sqrt', 'max']
branches = []
for k, v in aliases.items():
tokens = re.sub('[\(\)\+\*\/\,\=\<\>\&\!\-\|]', ' ', v).split()
for t in tokens:
if bool(re.search(r'^\d', t)) or len(t) <= 3:
continue
if bool(re.search(r'[a-zA-Z]', t)) and t not in ignore:
branches += [t]
return list(set(branches)) | python | def get_branches(aliases):
"""Get unique branch names from an alias dictionary."""
ignore = ['pow', 'log10', 'sqrt', 'max']
branches = []
for k, v in aliases.items():
tokens = re.sub('[\(\)\+\*\/\,\=\<\>\&\!\-\|]', ' ', v).split()
for t in tokens:
if bool(re.search(r'^\d', t)) or len(t) <= 3:
continue
if bool(re.search(r'[a-zA-Z]', t)) and t not in ignore:
branches += [t]
return list(set(branches)) | [
"def",
"get_branches",
"(",
"aliases",
")",
":",
"ignore",
"=",
"[",
"'pow'",
",",
"'log10'",
",",
"'sqrt'",
",",
"'max'",
"]",
"branches",
"=",
"[",
"]",
"for",
"k",
",",
"v",
"in",
"aliases",
".",
"items",
"(",
")",
":",
"tokens",
"=",
"re",
".... | Get unique branch names from an alias dictionary. | [
"Get",
"unique",
"branch",
"names",
"from",
"an",
"alias",
"dictionary",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/merit_skimmer.py#L24-L41 | train | 36,285 |
fermiPy/fermipy | fermipy/scripts/merit_skimmer.py | load_friend_chains | def load_friend_chains(chain, friend_chains, txt, nfiles=None):
"""Load a list of trees from a file and add them as friends to the
chain."""
if re.search('.root?', txt) is not None:
c = ROOT.TChain(chain.GetName())
c.SetDirectory(0)
c.Add(txt)
friend_chains.append(c)
chain.AddFriend(c, rand_str())
return
files = np.loadtxt(txt, unpack=True, dtype='str')
if files.ndim == 0:
files = np.array([files])
if nfiles is not None:
files = files[:nfiles]
print("Loading %i files..." % len(files))
c = ROOT.TChain(chain.GetName())
c.SetDirectory(0)
for f in files:
c.Add(f)
friend_chains.append(c)
chain.AddFriend(c, rand_str())
return | python | def load_friend_chains(chain, friend_chains, txt, nfiles=None):
"""Load a list of trees from a file and add them as friends to the
chain."""
if re.search('.root?', txt) is not None:
c = ROOT.TChain(chain.GetName())
c.SetDirectory(0)
c.Add(txt)
friend_chains.append(c)
chain.AddFriend(c, rand_str())
return
files = np.loadtxt(txt, unpack=True, dtype='str')
if files.ndim == 0:
files = np.array([files])
if nfiles is not None:
files = files[:nfiles]
print("Loading %i files..." % len(files))
c = ROOT.TChain(chain.GetName())
c.SetDirectory(0)
for f in files:
c.Add(f)
friend_chains.append(c)
chain.AddFriend(c, rand_str())
return | [
"def",
"load_friend_chains",
"(",
"chain",
",",
"friend_chains",
",",
"txt",
",",
"nfiles",
"=",
"None",
")",
":",
"if",
"re",
".",
"search",
"(",
"'.root?'",
",",
"txt",
")",
"is",
"not",
"None",
":",
"c",
"=",
"ROOT",
".",
"TChain",
"(",
"chain",
... | Load a list of trees from a file and add them as friends to the
chain. | [
"Load",
"a",
"list",
"of",
"trees",
"from",
"a",
"file",
"and",
"add",
"them",
"as",
"friends",
"to",
"the",
"chain",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/scripts/merit_skimmer.py#L103-L129 | train | 36,286 |
fermiPy/fermipy | fermipy/fits_utils.py | find_and_read_ebins | def find_and_read_ebins(hdulist):
""" Reads and returns the energy bin edges.
This works for both the CASE where the energies are in the ENERGIES HDU
and the case where they are in the EBOUND HDU
"""
from fermipy import utils
ebins = None
if 'ENERGIES' in hdulist:
hdu = hdulist['ENERGIES']
ectr = hdu.data.field(hdu.columns[0].name)
ebins = np.exp(utils.center_to_edge(np.log(ectr)))
elif 'EBOUNDS' in hdulist:
hdu = hdulist['EBOUNDS']
emin = hdu.data.field('E_MIN') / 1E3
emax = hdu.data.field('E_MAX') / 1E3
ebins = np.append(emin, emax[-1])
return ebins | python | def find_and_read_ebins(hdulist):
""" Reads and returns the energy bin edges.
This works for both the CASE where the energies are in the ENERGIES HDU
and the case where they are in the EBOUND HDU
"""
from fermipy import utils
ebins = None
if 'ENERGIES' in hdulist:
hdu = hdulist['ENERGIES']
ectr = hdu.data.field(hdu.columns[0].name)
ebins = np.exp(utils.center_to_edge(np.log(ectr)))
elif 'EBOUNDS' in hdulist:
hdu = hdulist['EBOUNDS']
emin = hdu.data.field('E_MIN') / 1E3
emax = hdu.data.field('E_MAX') / 1E3
ebins = np.append(emin, emax[-1])
return ebins | [
"def",
"find_and_read_ebins",
"(",
"hdulist",
")",
":",
"from",
"fermipy",
"import",
"utils",
"ebins",
"=",
"None",
"if",
"'ENERGIES'",
"in",
"hdulist",
":",
"hdu",
"=",
"hdulist",
"[",
"'ENERGIES'",
"]",
"ectr",
"=",
"hdu",
".",
"data",
".",
"field",
"(... | Reads and returns the energy bin edges.
This works for both the CASE where the energies are in the ENERGIES HDU
and the case where they are in the EBOUND HDU | [
"Reads",
"and",
"returns",
"the",
"energy",
"bin",
"edges",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/fits_utils.py#L23-L40 | train | 36,287 |
fermiPy/fermipy | fermipy/fits_utils.py | read_energy_bounds | def read_energy_bounds(hdu):
""" Reads and returns the energy bin edges from a FITs HDU
"""
nebins = len(hdu.data)
ebin_edges = np.ndarray((nebins + 1))
try:
ebin_edges[0:-1] = np.log10(hdu.data.field("E_MIN")) - 3.
ebin_edges[-1] = np.log10(hdu.data.field("E_MAX")[-1]) - 3.
except KeyError:
ebin_edges[0:-1] = np.log10(hdu.data.field("energy_MIN"))
ebin_edges[-1] = np.log10(hdu.data.field("energy_MAX")[-1])
return ebin_edges | python | def read_energy_bounds(hdu):
""" Reads and returns the energy bin edges from a FITs HDU
"""
nebins = len(hdu.data)
ebin_edges = np.ndarray((nebins + 1))
try:
ebin_edges[0:-1] = np.log10(hdu.data.field("E_MIN")) - 3.
ebin_edges[-1] = np.log10(hdu.data.field("E_MAX")[-1]) - 3.
except KeyError:
ebin_edges[0:-1] = np.log10(hdu.data.field("energy_MIN"))
ebin_edges[-1] = np.log10(hdu.data.field("energy_MAX")[-1])
return ebin_edges | [
"def",
"read_energy_bounds",
"(",
"hdu",
")",
":",
"nebins",
"=",
"len",
"(",
"hdu",
".",
"data",
")",
"ebin_edges",
"=",
"np",
".",
"ndarray",
"(",
"(",
"nebins",
"+",
"1",
")",
")",
"try",
":",
"ebin_edges",
"[",
"0",
":",
"-",
"1",
"]",
"=",
... | Reads and returns the energy bin edges from a FITs HDU | [
"Reads",
"and",
"returns",
"the",
"energy",
"bin",
"edges",
"from",
"a",
"FITs",
"HDU"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/fits_utils.py#L43-L54 | train | 36,288 |
fermiPy/fermipy | fermipy/fits_utils.py | read_spectral_data | def read_spectral_data(hdu):
""" Reads and returns the energy bin edges, fluxes and npreds from
a FITs HDU
"""
ebins = read_energy_bounds(hdu)
fluxes = np.ndarray((len(ebins)))
try:
fluxes[0:-1] = hdu.data.field("E_MIN_FL")
fluxes[-1] = hdu.data.field("E_MAX_FL")[-1]
npreds = hdu.data.field("NPRED")
except:
fluxes = np.ones((len(ebins)))
npreds = np.ones((len(ebins)))
return ebins, fluxes, npreds | python | def read_spectral_data(hdu):
""" Reads and returns the energy bin edges, fluxes and npreds from
a FITs HDU
"""
ebins = read_energy_bounds(hdu)
fluxes = np.ndarray((len(ebins)))
try:
fluxes[0:-1] = hdu.data.field("E_MIN_FL")
fluxes[-1] = hdu.data.field("E_MAX_FL")[-1]
npreds = hdu.data.field("NPRED")
except:
fluxes = np.ones((len(ebins)))
npreds = np.ones((len(ebins)))
return ebins, fluxes, npreds | [
"def",
"read_spectral_data",
"(",
"hdu",
")",
":",
"ebins",
"=",
"read_energy_bounds",
"(",
"hdu",
")",
"fluxes",
"=",
"np",
".",
"ndarray",
"(",
"(",
"len",
"(",
"ebins",
")",
")",
")",
"try",
":",
"fluxes",
"[",
"0",
":",
"-",
"1",
"]",
"=",
"h... | Reads and returns the energy bin edges, fluxes and npreds from
a FITs HDU | [
"Reads",
"and",
"returns",
"the",
"energy",
"bin",
"edges",
"fluxes",
"and",
"npreds",
"from",
"a",
"FITs",
"HDU"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/fits_utils.py#L57-L70 | train | 36,289 |
fermiPy/fermipy | fermipy/fits_utils.py | make_energies_hdu | def make_energies_hdu(energy_vals, extname="ENERGIES"):
""" Builds and returns a FITs HDU with the energy values
extname : The HDU extension name
"""
cols = [fits.Column("Energy", "D", unit='MeV', array=energy_vals)]
hdu = fits.BinTableHDU.from_columns(cols, name=extname)
return hdu | python | def make_energies_hdu(energy_vals, extname="ENERGIES"):
""" Builds and returns a FITs HDU with the energy values
extname : The HDU extension name
"""
cols = [fits.Column("Energy", "D", unit='MeV', array=energy_vals)]
hdu = fits.BinTableHDU.from_columns(cols, name=extname)
return hdu | [
"def",
"make_energies_hdu",
"(",
"energy_vals",
",",
"extname",
"=",
"\"ENERGIES\"",
")",
":",
"cols",
"=",
"[",
"fits",
".",
"Column",
"(",
"\"Energy\"",
",",
"\"D\"",
",",
"unit",
"=",
"'MeV'",
",",
"array",
"=",
"energy_vals",
")",
"]",
"hdu",
"=",
... | Builds and returns a FITs HDU with the energy values
extname : The HDU extension name | [
"Builds",
"and",
"returns",
"a",
"FITs",
"HDU",
"with",
"the",
"energy",
"values"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/fits_utils.py#L73-L80 | train | 36,290 |
fermiPy/fermipy | fermipy/fits_utils.py | read_projection_from_fits | def read_projection_from_fits(fitsfile, extname=None):
"""
Load a WCS or HPX projection.
"""
f = fits.open(fitsfile)
nhdu = len(f)
# Try and get the energy bounds
try:
ebins = find_and_read_ebins(f)
except:
ebins = None
if extname is None:
# If there is an image in the Primary HDU we can return a WCS-based
# projection
if f[0].header['NAXIS'] != 0:
proj = WCS(f[0].header)
return proj, f, f[0]
else:
if f[extname].header['XTENSION'] == 'IMAGE':
proj = WCS(f[extname].header)
return proj, f, f[extname]
elif extname in ['SKYMAP', 'SKYMAP2']:
proj = HPX.create_from_hdu(f[extname], ebins)
return proj, f, f[extname]
elif f[extname].header['XTENSION'] == 'BINTABLE':
try:
if f[extname].header['PIXTYPE'] == 'HEALPIX':
proj = HPX.create_from_hdu(f[extname], ebins)
return proj, f, f[extname]
except:
pass
return None, f, None
# Loop on HDU and look for either an image or a table with HEALPix data
for i in range(1, nhdu):
# if there is an image we can return a WCS-based projection
if f[i].header['XTENSION'] == 'IMAGE':
proj = WCS(f[i].header)
return proj, f, f[i]
elif f[i].header['XTENSION'] == 'BINTABLE':
if f[i].name in ['SKYMAP', 'SKYMAP2']:
proj = HPX.create_from_hdu(f[i], ebins)
return proj, f, f[i]
try:
if f[i].header['PIXTYPE'] == 'HEALPIX':
proj = HPX.create_from_hdu(f[i], ebins)
return proj, f, f[i]
except:
pass
return None, f, None | python | def read_projection_from_fits(fitsfile, extname=None):
"""
Load a WCS or HPX projection.
"""
f = fits.open(fitsfile)
nhdu = len(f)
# Try and get the energy bounds
try:
ebins = find_and_read_ebins(f)
except:
ebins = None
if extname is None:
# If there is an image in the Primary HDU we can return a WCS-based
# projection
if f[0].header['NAXIS'] != 0:
proj = WCS(f[0].header)
return proj, f, f[0]
else:
if f[extname].header['XTENSION'] == 'IMAGE':
proj = WCS(f[extname].header)
return proj, f, f[extname]
elif extname in ['SKYMAP', 'SKYMAP2']:
proj = HPX.create_from_hdu(f[extname], ebins)
return proj, f, f[extname]
elif f[extname].header['XTENSION'] == 'BINTABLE':
try:
if f[extname].header['PIXTYPE'] == 'HEALPIX':
proj = HPX.create_from_hdu(f[extname], ebins)
return proj, f, f[extname]
except:
pass
return None, f, None
# Loop on HDU and look for either an image or a table with HEALPix data
for i in range(1, nhdu):
# if there is an image we can return a WCS-based projection
if f[i].header['XTENSION'] == 'IMAGE':
proj = WCS(f[i].header)
return proj, f, f[i]
elif f[i].header['XTENSION'] == 'BINTABLE':
if f[i].name in ['SKYMAP', 'SKYMAP2']:
proj = HPX.create_from_hdu(f[i], ebins)
return proj, f, f[i]
try:
if f[i].header['PIXTYPE'] == 'HEALPIX':
proj = HPX.create_from_hdu(f[i], ebins)
return proj, f, f[i]
except:
pass
return None, f, None | [
"def",
"read_projection_from_fits",
"(",
"fitsfile",
",",
"extname",
"=",
"None",
")",
":",
"f",
"=",
"fits",
".",
"open",
"(",
"fitsfile",
")",
"nhdu",
"=",
"len",
"(",
"f",
")",
"# Try and get the energy bounds",
"try",
":",
"ebins",
"=",
"find_and_read_eb... | Load a WCS or HPX projection. | [
"Load",
"a",
"WCS",
"or",
"HPX",
"projection",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/fits_utils.py#L125-L176 | train | 36,291 |
fermiPy/fermipy | fermipy/fits_utils.py | write_tables_to_fits | def write_tables_to_fits(filepath, tablelist, clobber=False,
namelist=None, cardslist=None, hdu_list=None):
"""
Write some astropy.table.Table objects to a single fits file
"""
outhdulist = [fits.PrimaryHDU()]
rmlist = []
for i, table in enumerate(tablelist):
ft_name = "%s._%i" % (filepath, i)
rmlist.append(ft_name)
try:
os.unlink(ft_name)
except:
pass
table.write(ft_name, format="fits")
ft_in = fits.open(ft_name)
if namelist:
ft_in[1].name = namelist[i]
if cardslist:
for k, v in cardslist[i].items():
ft_in[1].header[k] = v
ft_in[1].update()
outhdulist += [ft_in[1]]
if hdu_list is not None:
for h in hdu_list:
outhdulist.append(h)
fits.HDUList(outhdulist).writeto(filepath, overwrite=clobber)
for rm in rmlist:
os.unlink(rm) | python | def write_tables_to_fits(filepath, tablelist, clobber=False,
namelist=None, cardslist=None, hdu_list=None):
"""
Write some astropy.table.Table objects to a single fits file
"""
outhdulist = [fits.PrimaryHDU()]
rmlist = []
for i, table in enumerate(tablelist):
ft_name = "%s._%i" % (filepath, i)
rmlist.append(ft_name)
try:
os.unlink(ft_name)
except:
pass
table.write(ft_name, format="fits")
ft_in = fits.open(ft_name)
if namelist:
ft_in[1].name = namelist[i]
if cardslist:
for k, v in cardslist[i].items():
ft_in[1].header[k] = v
ft_in[1].update()
outhdulist += [ft_in[1]]
if hdu_list is not None:
for h in hdu_list:
outhdulist.append(h)
fits.HDUList(outhdulist).writeto(filepath, overwrite=clobber)
for rm in rmlist:
os.unlink(rm) | [
"def",
"write_tables_to_fits",
"(",
"filepath",
",",
"tablelist",
",",
"clobber",
"=",
"False",
",",
"namelist",
"=",
"None",
",",
"cardslist",
"=",
"None",
",",
"hdu_list",
"=",
"None",
")",
":",
"outhdulist",
"=",
"[",
"fits",
".",
"PrimaryHDU",
"(",
"... | Write some astropy.table.Table objects to a single fits file | [
"Write",
"some",
"astropy",
".",
"table",
".",
"Table",
"objects",
"to",
"a",
"single",
"fits",
"file"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/fits_utils.py#L179-L209 | train | 36,292 |
fermiPy/fermipy | fermipy/residmap.py | get_source_kernel | def get_source_kernel(gta, name, kernel=None):
"""Get the PDF for the given source."""
sm = []
zs = 0
for c in gta.components:
z = c.model_counts_map(name).data.astype('float')
if kernel is not None:
shape = (z.shape[0],) + kernel.shape
z = np.apply_over_axes(np.sum, z, axes=[1, 2]) * np.ones(
shape) * kernel[np.newaxis, :, :]
zs += np.sum(z)
else:
zs += np.sum(z)
sm.append(z)
sm2 = 0
for i, m in enumerate(sm):
sm[i] /= zs
sm2 += np.sum(sm[i] ** 2)
for i, m in enumerate(sm):
sm[i] /= sm2
return sm | python | def get_source_kernel(gta, name, kernel=None):
"""Get the PDF for the given source."""
sm = []
zs = 0
for c in gta.components:
z = c.model_counts_map(name).data.astype('float')
if kernel is not None:
shape = (z.shape[0],) + kernel.shape
z = np.apply_over_axes(np.sum, z, axes=[1, 2]) * np.ones(
shape) * kernel[np.newaxis, :, :]
zs += np.sum(z)
else:
zs += np.sum(z)
sm.append(z)
sm2 = 0
for i, m in enumerate(sm):
sm[i] /= zs
sm2 += np.sum(sm[i] ** 2)
for i, m in enumerate(sm):
sm[i] /= sm2
return sm | [
"def",
"get_source_kernel",
"(",
"gta",
",",
"name",
",",
"kernel",
"=",
"None",
")",
":",
"sm",
"=",
"[",
"]",
"zs",
"=",
"0",
"for",
"c",
"in",
"gta",
".",
"components",
":",
"z",
"=",
"c",
".",
"model_counts_map",
"(",
"name",
")",
".",
"data"... | Get the PDF for the given source. | [
"Get",
"the",
"PDF",
"for",
"the",
"given",
"source",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/residmap.py#L190-L215 | train | 36,293 |
fermiPy/fermipy | fermipy/residmap.py | ResidMapGenerator.residmap | def residmap(self, prefix='', **kwargs):
"""Generate 2-D spatial residual maps using the current ROI
model and the convolution kernel defined with the `model`
argument.
Parameters
----------
prefix : str
String that will be prefixed to the output residual map files.
{options}
Returns
-------
maps : dict
A dictionary containing the `~fermipy.utils.Map` objects
for the residual significance and amplitude.
"""
timer = Timer.create(start=True)
self.logger.info('Generating residual maps')
schema = ConfigSchema(self.defaults['residmap'])
config = schema.create_config(self.config['residmap'], **kwargs)
# Defining default properties of test source model
config['model'].setdefault('Index', 2.0)
config['model'].setdefault('SpectrumType', 'PowerLaw')
config['model'].setdefault('SpatialModel', 'PointSource')
config['model'].setdefault('Prefactor', 1E-13)
o = self._make_residual_map(prefix, **config)
if config['make_plots']:
plotter = plotting.AnalysisPlotter(self.config['plotting'],
fileio=self.config['fileio'],
logging=self.config['logging'])
plotter.make_residmap_plots(o, self.roi)
self.logger.info('Finished residual maps')
outfile = utils.format_filename(self.workdir, 'residmap',
prefix=[o['name']])
if config['write_fits']:
o['file'] = os.path.basename(outfile) + '.fits'
self._make_residmap_fits(o, outfile + '.fits')
if config['write_npy']:
np.save(outfile + '.npy', o)
self.logger.info('Execution time: %.2f s', timer.elapsed_time)
return o | python | def residmap(self, prefix='', **kwargs):
"""Generate 2-D spatial residual maps using the current ROI
model and the convolution kernel defined with the `model`
argument.
Parameters
----------
prefix : str
String that will be prefixed to the output residual map files.
{options}
Returns
-------
maps : dict
A dictionary containing the `~fermipy.utils.Map` objects
for the residual significance and amplitude.
"""
timer = Timer.create(start=True)
self.logger.info('Generating residual maps')
schema = ConfigSchema(self.defaults['residmap'])
config = schema.create_config(self.config['residmap'], **kwargs)
# Defining default properties of test source model
config['model'].setdefault('Index', 2.0)
config['model'].setdefault('SpectrumType', 'PowerLaw')
config['model'].setdefault('SpatialModel', 'PointSource')
config['model'].setdefault('Prefactor', 1E-13)
o = self._make_residual_map(prefix, **config)
if config['make_plots']:
plotter = plotting.AnalysisPlotter(self.config['plotting'],
fileio=self.config['fileio'],
logging=self.config['logging'])
plotter.make_residmap_plots(o, self.roi)
self.logger.info('Finished residual maps')
outfile = utils.format_filename(self.workdir, 'residmap',
prefix=[o['name']])
if config['write_fits']:
o['file'] = os.path.basename(outfile) + '.fits'
self._make_residmap_fits(o, outfile + '.fits')
if config['write_npy']:
np.save(outfile + '.npy', o)
self.logger.info('Execution time: %.2f s', timer.elapsed_time)
return o | [
"def",
"residmap",
"(",
"self",
",",
"prefix",
"=",
"''",
",",
"*",
"*",
"kwargs",
")",
":",
"timer",
"=",
"Timer",
".",
"create",
"(",
"start",
"=",
"True",
")",
"self",
".",
"logger",
".",
"info",
"(",
"'Generating residual maps'",
")",
"schema",
"... | Generate 2-D spatial residual maps using the current ROI
model and the convolution kernel defined with the `model`
argument.
Parameters
----------
prefix : str
String that will be prefixed to the output residual map files.
{options}
Returns
-------
maps : dict
A dictionary containing the `~fermipy.utils.Map` objects
for the residual significance and amplitude. | [
"Generate",
"2",
"-",
"D",
"spatial",
"residual",
"maps",
"using",
"the",
"current",
"ROI",
"model",
"and",
"the",
"convolution",
"kernel",
"defined",
"with",
"the",
"model",
"argument",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/residmap.py#L225-L279 | train | 36,294 |
fermiPy/fermipy | fermipy/jobs/factory.py | LinkFactory.create | def create(appname, **kwargs):
"""Create a `Link` of a particular class, using the kwargs as options"""
if appname in LinkFactory._class_dict:
return LinkFactory._class_dict[appname].create(**kwargs)
else:
raise KeyError(
"Could not create object associated to app %s" % appname) | python | def create(appname, **kwargs):
"""Create a `Link` of a particular class, using the kwargs as options"""
if appname in LinkFactory._class_dict:
return LinkFactory._class_dict[appname].create(**kwargs)
else:
raise KeyError(
"Could not create object associated to app %s" % appname) | [
"def",
"create",
"(",
"appname",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"appname",
"in",
"LinkFactory",
".",
"_class_dict",
":",
"return",
"LinkFactory",
".",
"_class_dict",
"[",
"appname",
"]",
".",
"create",
"(",
"*",
"*",
"kwargs",
")",
"else",
":... | Create a `Link` of a particular class, using the kwargs as options | [
"Create",
"a",
"Link",
"of",
"a",
"particular",
"class",
"using",
"the",
"kwargs",
"as",
"options"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/factory.py#L25-L32 | train | 36,295 |
fermiPy/fermipy | fermipy/diffuse/name_policy.py | NameFactory._replace_none | def _replace_none(self, aDict):
""" Replace all None values in a dict with 'none' """
for k, v in aDict.items():
if v is None:
aDict[k] = 'none' | python | def _replace_none(self, aDict):
""" Replace all None values in a dict with 'none' """
for k, v in aDict.items():
if v is None:
aDict[k] = 'none' | [
"def",
"_replace_none",
"(",
"self",
",",
"aDict",
")",
":",
"for",
"k",
",",
"v",
"in",
"aDict",
".",
"items",
"(",
")",
":",
"if",
"v",
"is",
"None",
":",
"aDict",
"[",
"k",
"]",
"=",
"'none'"
] | Replace all None values in a dict with 'none' | [
"Replace",
"all",
"None",
"values",
"in",
"a",
"dict",
"with",
"none"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/name_policy.py#L136-L140 | train | 36,296 |
fermiPy/fermipy | fermipy/diffuse/name_policy.py | NameFactory.irfs | def irfs(self, **kwargs):
""" Get the name of IFRs associted with a particular dataset
"""
dsval = kwargs.get('dataset', self.dataset(**kwargs))
tokens = dsval.split('_')
irf_name = "%s_%s_%s" % (DATASET_DICTIONARY['%s_%s' % (tokens[0], tokens[1])],
EVCLASS_NAME_DICTIONARY[tokens[3]],
kwargs.get('irf_ver'))
return irf_name | python | def irfs(self, **kwargs):
""" Get the name of IFRs associted with a particular dataset
"""
dsval = kwargs.get('dataset', self.dataset(**kwargs))
tokens = dsval.split('_')
irf_name = "%s_%s_%s" % (DATASET_DICTIONARY['%s_%s' % (tokens[0], tokens[1])],
EVCLASS_NAME_DICTIONARY[tokens[3]],
kwargs.get('irf_ver'))
return irf_name | [
"def",
"irfs",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"dsval",
"=",
"kwargs",
".",
"get",
"(",
"'dataset'",
",",
"self",
".",
"dataset",
"(",
"*",
"*",
"kwargs",
")",
")",
"tokens",
"=",
"dsval",
".",
"split",
"(",
"'_'",
")",
"irf_name"... | Get the name of IFRs associted with a particular dataset | [
"Get",
"the",
"name",
"of",
"IFRs",
"associted",
"with",
"a",
"particular",
"dataset"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/name_policy.py#L152-L160 | train | 36,297 |
fermiPy/fermipy | fermipy/diffuse/name_policy.py | NameFactory.dataset | def dataset(self, **kwargs):
""" Return a key that specifies the data selection
"""
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
self._replace_none(kwargs_copy)
try:
return NameFactory.dataset_format.format(**kwargs_copy)
except KeyError:
return None | python | def dataset(self, **kwargs):
""" Return a key that specifies the data selection
"""
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
self._replace_none(kwargs_copy)
try:
return NameFactory.dataset_format.format(**kwargs_copy)
except KeyError:
return None | [
"def",
"dataset",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs_copy",
"=",
"self",
".",
"base_dict",
".",
"copy",
"(",
")",
"kwargs_copy",
".",
"update",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"_replace_none",
"(",
"kwargs_copy",
")",
... | Return a key that specifies the data selection | [
"Return",
"a",
"key",
"that",
"specifies",
"the",
"data",
"selection"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/name_policy.py#L167-L176 | train | 36,298 |
fermiPy/fermipy | fermipy/diffuse/name_policy.py | NameFactory.component | def component(self, **kwargs):
""" Return a key that specifies data the sub-selection
"""
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
self._replace_none(kwargs_copy)
try:
return NameFactory.component_format.format(**kwargs_copy)
except KeyError:
return None | python | def component(self, **kwargs):
""" Return a key that specifies data the sub-selection
"""
kwargs_copy = self.base_dict.copy()
kwargs_copy.update(**kwargs)
self._replace_none(kwargs_copy)
try:
return NameFactory.component_format.format(**kwargs_copy)
except KeyError:
return None | [
"def",
"component",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs_copy",
"=",
"self",
".",
"base_dict",
".",
"copy",
"(",
")",
"kwargs_copy",
".",
"update",
"(",
"*",
"*",
"kwargs",
")",
"self",
".",
"_replace_none",
"(",
"kwargs_copy",
")",
... | Return a key that specifies data the sub-selection | [
"Return",
"a",
"key",
"that",
"specifies",
"data",
"the",
"sub",
"-",
"selection"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/name_policy.py#L178-L187 | train | 36,299 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.