repo stringlengths 7 55 | path stringlengths 4 127 | func_name stringlengths 1 88 | original_string stringlengths 75 19.8k | language stringclasses 1 value | code stringlengths 75 19.8k | code_tokens listlengths 20 707 | docstring stringlengths 3 17.3k | docstring_tokens listlengths 3 222 | sha stringlengths 40 40 | url stringlengths 87 242 | partition stringclasses 1 value | idx int64 0 252k |
|---|---|---|---|---|---|---|---|---|---|---|---|---|
limdauto/drf_openapi | drf_openapi/codec.py | _get_parameters | def _get_parameters(link, encoding):
"""
Generates Swagger Parameter Item object.
"""
parameters = []
properties = {}
required = []
for field in link.fields:
parser = OpenApiFieldParser(link, field)
if parser.location == 'form':
if encoding in ('multipart/form-data', 'application/x-www-form-urlencoded'):
# 'formData' in swagger MUST be one of these media types.
parameters.append(parser.as_parameter())
else:
# Expand coreapi fields with location='form' into a single swagger
# parameter, with a schema containing multiple properties.
properties[field.name] = parser.as_schema_property()
if field.required:
required.append(field.name)
elif parser.location == 'body':
parameters.append(parser.as_body_parameter(encoding))
else:
parameters.append(parser.as_parameter())
if properties:
parameter = {
'name': 'data',
'in': 'body',
'schema': {
'type': 'object',
'properties': properties
}
}
if required:
parameter['schema']['required'] = required
parameters.append(parameter)
return parameters | python | def _get_parameters(link, encoding):
"""
Generates Swagger Parameter Item object.
"""
parameters = []
properties = {}
required = []
for field in link.fields:
parser = OpenApiFieldParser(link, field)
if parser.location == 'form':
if encoding in ('multipart/form-data', 'application/x-www-form-urlencoded'):
# 'formData' in swagger MUST be one of these media types.
parameters.append(parser.as_parameter())
else:
# Expand coreapi fields with location='form' into a single swagger
# parameter, with a schema containing multiple properties.
properties[field.name] = parser.as_schema_property()
if field.required:
required.append(field.name)
elif parser.location == 'body':
parameters.append(parser.as_body_parameter(encoding))
else:
parameters.append(parser.as_parameter())
if properties:
parameter = {
'name': 'data',
'in': 'body',
'schema': {
'type': 'object',
'properties': properties
}
}
if required:
parameter['schema']['required'] = required
parameters.append(parameter)
return parameters | [
"def",
"_get_parameters",
"(",
"link",
",",
"encoding",
")",
":",
"parameters",
"=",
"[",
"]",
"properties",
"=",
"{",
"}",
"required",
"=",
"[",
"]",
"for",
"field",
"in",
"link",
".",
"fields",
":",
"parser",
"=",
"OpenApiFieldParser",
"(",
"link",
"... | Generates Swagger Parameter Item object. | [
"Generates",
"Swagger",
"Parameter",
"Item",
"object",
"."
] | 1673c6e039eec7f089336a83bdc31613f32f7e21 | https://github.com/limdauto/drf_openapi/blob/1673c6e039eec7f089336a83bdc31613f32f7e21/drf_openapi/codec.py#L216-L254 | train | 35,800 |
demianbrecht/sanction | sanction/__init__.py | Client.auth_uri | def auth_uri(self, redirect_uri=None, scope=None, scope_delim=None,
state=None, **kwargs):
""" Builds the auth URI for the authorization endpoint
:param scope: (optional) The `scope` parameter to pass for
authorization. The format should match that expected by
the provider (i.e. Facebook expects comma-delimited,
while Google expects space-delimited)
:param state: (optional) The `state` parameter to pass for
authorization. If the provider follows the OAuth 2.0
spec, this will be returned to your `redirect_uri` after
authorization. Generally used for CSRF protection.
:param **kwargs: Any other querystring parameters to be passed to the
provider.
"""
kwargs.update({
'client_id': self.client_id,
'response_type': 'code',
})
if scope is not None:
kwargs['scope'] = scope
if state is not None:
kwargs['state'] = state
if redirect_uri is not None:
kwargs['redirect_uri'] = redirect_uri
return '%s?%s' % (self.auth_endpoint, urlencode(kwargs)) | python | def auth_uri(self, redirect_uri=None, scope=None, scope_delim=None,
state=None, **kwargs):
""" Builds the auth URI for the authorization endpoint
:param scope: (optional) The `scope` parameter to pass for
authorization. The format should match that expected by
the provider (i.e. Facebook expects comma-delimited,
while Google expects space-delimited)
:param state: (optional) The `state` parameter to pass for
authorization. If the provider follows the OAuth 2.0
spec, this will be returned to your `redirect_uri` after
authorization. Generally used for CSRF protection.
:param **kwargs: Any other querystring parameters to be passed to the
provider.
"""
kwargs.update({
'client_id': self.client_id,
'response_type': 'code',
})
if scope is not None:
kwargs['scope'] = scope
if state is not None:
kwargs['state'] = state
if redirect_uri is not None:
kwargs['redirect_uri'] = redirect_uri
return '%s?%s' % (self.auth_endpoint, urlencode(kwargs)) | [
"def",
"auth_uri",
"(",
"self",
",",
"redirect_uri",
"=",
"None",
",",
"scope",
"=",
"None",
",",
"scope_delim",
"=",
"None",
",",
"state",
"=",
"None",
",",
"*",
"*",
"kwargs",
")",
":",
"kwargs",
".",
"update",
"(",
"{",
"'client_id'",
":",
"self",... | Builds the auth URI for the authorization endpoint
:param scope: (optional) The `scope` parameter to pass for
authorization. The format should match that expected by
the provider (i.e. Facebook expects comma-delimited,
while Google expects space-delimited)
:param state: (optional) The `state` parameter to pass for
authorization. If the provider follows the OAuth 2.0
spec, this will be returned to your `redirect_uri` after
authorization. Generally used for CSRF protection.
:param **kwargs: Any other querystring parameters to be passed to the
provider. | [
"Builds",
"the",
"auth",
"URI",
"for",
"the",
"authorization",
"endpoint"
] | 08575f149c7ece20c902148c4d1576ab162ae0a8 | https://github.com/demianbrecht/sanction/blob/08575f149c7ece20c902148c4d1576ab162ae0a8/sanction/__init__.py#L63-L93 | train | 35,801 |
fermiPy/fermipy | fermipy/diffuse/fitting.py | build_srcdict | def build_srcdict(gta, prop):
"""Build a dictionary that maps from source name to the value of a source property
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
prop : str
The name of the property we are mapping
Returns
-------
odict : dict
Dictionary that maps from source name to the value of the specified property
"""
o = {}
for s in gta.roi.sources:
o[s.name] = s[prop]
return o | python | def build_srcdict(gta, prop):
"""Build a dictionary that maps from source name to the value of a source property
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
prop : str
The name of the property we are mapping
Returns
-------
odict : dict
Dictionary that maps from source name to the value of the specified property
"""
o = {}
for s in gta.roi.sources:
o[s.name] = s[prop]
return o | [
"def",
"build_srcdict",
"(",
"gta",
",",
"prop",
")",
":",
"o",
"=",
"{",
"}",
"for",
"s",
"in",
"gta",
".",
"roi",
".",
"sources",
":",
"o",
"[",
"s",
".",
"name",
"]",
"=",
"s",
"[",
"prop",
"]",
"return",
"o"
] | Build a dictionary that maps from source name to the value of a source property
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
prop : str
The name of the property we are mapping
Returns
-------
odict : dict
Dictionary that maps from source name to the value of the specified property | [
"Build",
"a",
"dictionary",
"that",
"maps",
"from",
"source",
"name",
"to",
"the",
"value",
"of",
"a",
"source",
"property"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/fitting.py#L33-L56 | train | 35,802 |
fermiPy/fermipy | fermipy/diffuse/fitting.py | get_src_names | def get_src_names(gta):
"""Build and return a list of source name
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
Returns
-------
l : list
Names of the source
"""
o = []
for s in gta.roi.sources:
o += [s.name]
return sorted(o) | python | def get_src_names(gta):
"""Build and return a list of source name
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
Returns
-------
l : list
Names of the source
"""
o = []
for s in gta.roi.sources:
o += [s.name]
return sorted(o) | [
"def",
"get_src_names",
"(",
"gta",
")",
":",
"o",
"=",
"[",
"]",
"for",
"s",
"in",
"gta",
".",
"roi",
".",
"sources",
":",
"o",
"+=",
"[",
"s",
".",
"name",
"]",
"return",
"sorted",
"(",
"o",
")"
] | Build and return a list of source name
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
Returns
-------
l : list
Names of the source | [
"Build",
"and",
"return",
"a",
"list",
"of",
"source",
"name"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/fitting.py#L59-L79 | train | 35,803 |
fermiPy/fermipy | fermipy/diffuse/fitting.py | set_wts_get_npred_wt | def set_wts_get_npred_wt(gta, maskname):
"""Set a weights file and get the weighted npred for all the sources
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
maskname : str
The path to the file with the mask
Returns
-------
odict : dict
Dictionary mapping from source name to weighted npred
"""
if is_null(maskname):
maskname = None
gta.set_weights_map(maskname)
for name in gta.like.sourceNames():
gta._init_source(name)
gta._update_roi()
return build_srcdict(gta, 'npred_wt') | python | def set_wts_get_npred_wt(gta, maskname):
"""Set a weights file and get the weighted npred for all the sources
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
maskname : str
The path to the file with the mask
Returns
-------
odict : dict
Dictionary mapping from source name to weighted npred
"""
if is_null(maskname):
maskname = None
gta.set_weights_map(maskname)
for name in gta.like.sourceNames():
gta._init_source(name)
gta._update_roi()
return build_srcdict(gta, 'npred_wt') | [
"def",
"set_wts_get_npred_wt",
"(",
"gta",
",",
"maskname",
")",
":",
"if",
"is_null",
"(",
"maskname",
")",
":",
"maskname",
"=",
"None",
"gta",
".",
"set_weights_map",
"(",
"maskname",
")",
"for",
"name",
"in",
"gta",
".",
"like",
".",
"sourceNames",
"... | Set a weights file and get the weighted npred for all the sources
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
maskname : str
The path to the file with the mask
Returns
-------
odict : dict
Dictionary mapping from source name to weighted npred | [
"Set",
"a",
"weights",
"file",
"and",
"get",
"the",
"weighted",
"npred",
"for",
"all",
"the",
"sources"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/fitting.py#L82-L109 | train | 35,804 |
fermiPy/fermipy | fermipy/diffuse/fitting.py | snapshot | def snapshot(gta, plotter, key, do_weighted=True, make_plots=True):
"""Take a snapshot of the ROI
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
plotter : `fermipy.plotting.AnalysisPlotter`
The object that makes the plots
key : str
Key for this snapshot, used to create filenames
do_weighted : bool
If True, include weighted version of outputs
make_plots : bool
If True, make plots
"""
gta.write_roi(key, save_model_map=True, make_plots=make_plots, save_weight_map=do_weighted)
if make_plots:
o = gta.residmap(key)
plotter.make_residmap_plots(o, gta.roi)
if do_weighted:
gta.make_plots("%s_wt"%key, weighted=True)
o = gta.residmap("%s_wt"%key, use_weights=True)
plotter.make_residmap_plots(o, gta.roi) | python | def snapshot(gta, plotter, key, do_weighted=True, make_plots=True):
"""Take a snapshot of the ROI
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
plotter : `fermipy.plotting.AnalysisPlotter`
The object that makes the plots
key : str
Key for this snapshot, used to create filenames
do_weighted : bool
If True, include weighted version of outputs
make_plots : bool
If True, make plots
"""
gta.write_roi(key, save_model_map=True, make_plots=make_plots, save_weight_map=do_weighted)
if make_plots:
o = gta.residmap(key)
plotter.make_residmap_plots(o, gta.roi)
if do_weighted:
gta.make_plots("%s_wt"%key, weighted=True)
o = gta.residmap("%s_wt"%key, use_weights=True)
plotter.make_residmap_plots(o, gta.roi) | [
"def",
"snapshot",
"(",
"gta",
",",
"plotter",
",",
"key",
",",
"do_weighted",
"=",
"True",
",",
"make_plots",
"=",
"True",
")",
":",
"gta",
".",
"write_roi",
"(",
"key",
",",
"save_model_map",
"=",
"True",
",",
"make_plots",
"=",
"make_plots",
",",
"s... | Take a snapshot of the ROI
Parameters
----------
gta : `fermipy.GTAnalysis`
The analysis object
plotter : `fermipy.plotting.AnalysisPlotter`
The object that makes the plots
key : str
Key for this snapshot, used to create filenames
do_weighted : bool
If True, include weighted version of outputs
make_plots : bool
If True, make plots | [
"Take",
"a",
"snapshot",
"of",
"the",
"ROI"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/fitting.py#L112-L142 | train | 35,805 |
fermiPy/fermipy | fermipy/diffuse/fitting.py | get_unchanged | def get_unchanged(src_list, npred_dict_new,
npred_dict_old,
npred_threshold=1e4,
frac_threshold=0.9):
"""Compare two dictionarys of npreds, and get the list of sources
than have changed less that set thresholds
Parameters
----------
src_list : list
List of sources to examine
npred_dict_new : dict
Dictionary mapping source name to npred for the current weights file
npred_dict_old : dict
Dictionary mapping source name to npred for the previous weights file
npred_threshold : float
Minimum value of npred above which to consider sources changed
frac_threshold : float
Value of npred_old / npred_new above which to consider sources unchanged
Returns
-------
l : list
Names of 'unchanged' sources
"""
o = []
for s in src_list:
npred_new = npred_dict_new[s]
if npred_new < npred_threshold:
o += [s]
continue
if npred_dict_old is None:
npred_old = 0.
else:
npred_old = npred_dict_old[s]
frac = npred_old / npred_new
if frac > frac_threshold:
o += [s]
return o | python | def get_unchanged(src_list, npred_dict_new,
npred_dict_old,
npred_threshold=1e4,
frac_threshold=0.9):
"""Compare two dictionarys of npreds, and get the list of sources
than have changed less that set thresholds
Parameters
----------
src_list : list
List of sources to examine
npred_dict_new : dict
Dictionary mapping source name to npred for the current weights file
npred_dict_old : dict
Dictionary mapping source name to npred for the previous weights file
npred_threshold : float
Minimum value of npred above which to consider sources changed
frac_threshold : float
Value of npred_old / npred_new above which to consider sources unchanged
Returns
-------
l : list
Names of 'unchanged' sources
"""
o = []
for s in src_list:
npred_new = npred_dict_new[s]
if npred_new < npred_threshold:
o += [s]
continue
if npred_dict_old is None:
npred_old = 0.
else:
npred_old = npred_dict_old[s]
frac = npred_old / npred_new
if frac > frac_threshold:
o += [s]
return o | [
"def",
"get_unchanged",
"(",
"src_list",
",",
"npred_dict_new",
",",
"npred_dict_old",
",",
"npred_threshold",
"=",
"1e4",
",",
"frac_threshold",
"=",
"0.9",
")",
":",
"o",
"=",
"[",
"]",
"for",
"s",
"in",
"src_list",
":",
"npred_new",
"=",
"npred_dict_new",... | Compare two dictionarys of npreds, and get the list of sources
than have changed less that set thresholds
Parameters
----------
src_list : list
List of sources to examine
npred_dict_new : dict
Dictionary mapping source name to npred for the current weights file
npred_dict_old : dict
Dictionary mapping source name to npred for the previous weights file
npred_threshold : float
Minimum value of npred above which to consider sources changed
frac_threshold : float
Value of npred_old / npred_new above which to consider sources unchanged
Returns
-------
l : list
Names of 'unchanged' sources | [
"Compare",
"two",
"dictionarys",
"of",
"npreds",
"and",
"get",
"the",
"list",
"of",
"sources",
"than",
"have",
"changed",
"less",
"that",
"set",
"thresholds"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/fitting.py#L145-L191 | train | 35,806 |
fermiPy/fermipy | fermipy/jobs/native_impl.py | NativeInterface.dispatch_job_hook | def dispatch_job_hook(self, link, key, job_config, logfile, stream=sys.stdout):
"""Send a single job to be executed
Parameters
----------
link : `fermipy.jobs.chain.Link`
The link used to invoke the command we are running
key : str
A string that identifies this particular instance of the job
job_config : dict
A dictionrary with the arguments for the job. Used with
the self._command_template job template
logfile : str
The logfile for this job, may be used to check for success/ failure
"""
full_sub_dict = job_config.copy()
full_command = "%s >& %s" % (
link.command_template().format(**full_sub_dict), logfile)
logdir = os.path.dirname(logfile)
if self._dry_run:
sys.stdout.write("%s\n" % full_command)
else:
try:
os.makedirs(logdir)
except OSError:
pass
os.system(full_command) | python | def dispatch_job_hook(self, link, key, job_config, logfile, stream=sys.stdout):
"""Send a single job to be executed
Parameters
----------
link : `fermipy.jobs.chain.Link`
The link used to invoke the command we are running
key : str
A string that identifies this particular instance of the job
job_config : dict
A dictionrary with the arguments for the job. Used with
the self._command_template job template
logfile : str
The logfile for this job, may be used to check for success/ failure
"""
full_sub_dict = job_config.copy()
full_command = "%s >& %s" % (
link.command_template().format(**full_sub_dict), logfile)
logdir = os.path.dirname(logfile)
if self._dry_run:
sys.stdout.write("%s\n" % full_command)
else:
try:
os.makedirs(logdir)
except OSError:
pass
os.system(full_command) | [
"def",
"dispatch_job_hook",
"(",
"self",
",",
"link",
",",
"key",
",",
"job_config",
",",
"logfile",
",",
"stream",
"=",
"sys",
".",
"stdout",
")",
":",
"full_sub_dict",
"=",
"job_config",
".",
"copy",
"(",
")",
"full_command",
"=",
"\"%s >& %s\"",
"%",
... | Send a single job to be executed
Parameters
----------
link : `fermipy.jobs.chain.Link`
The link used to invoke the command we are running
key : str
A string that identifies this particular instance of the job
job_config : dict
A dictionrary with the arguments for the job. Used with
the self._command_template job template
logfile : str
The logfile for this job, may be used to check for success/ failure | [
"Send",
"a",
"single",
"job",
"to",
"be",
"executed"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/native_impl.py#L39-L72 | train | 35,807 |
fermiPy/fermipy | fermipy/logger.py | log_level | def log_level(level):
"""This is a function that returns a python like
level from a HEASOFT like level.
"""
levels_dict = {0: 50,
1: 40,
2: 30,
3: 20,
4: 10}
if not isinstance(level, int):
level = int(level)
if level > 4:
level = 4
return levels_dict[level] | python | def log_level(level):
"""This is a function that returns a python like
level from a HEASOFT like level.
"""
levels_dict = {0: 50,
1: 40,
2: 30,
3: 20,
4: 10}
if not isinstance(level, int):
level = int(level)
if level > 4:
level = 4
return levels_dict[level] | [
"def",
"log_level",
"(",
"level",
")",
":",
"levels_dict",
"=",
"{",
"0",
":",
"50",
",",
"1",
":",
"40",
",",
"2",
":",
"30",
",",
"3",
":",
"20",
",",
"4",
":",
"10",
"}",
"if",
"not",
"isinstance",
"(",
"level",
",",
"int",
")",
":",
"le... | This is a function that returns a python like
level from a HEASOFT like level. | [
"This",
"is",
"a",
"function",
"that",
"returns",
"a",
"python",
"like",
"level",
"from",
"a",
"HEASOFT",
"like",
"level",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/logger.py#L11-L28 | train | 35,808 |
fermiPy/fermipy | fermipy/logger.py | Logger.setup | def setup(config=None, logfile=None):
"""This method sets up the default configuration of the
logger. Once this method is called all subsequent instances
Logger instances will inherit this configuration."""
if config is None:
configpath = os.path.join(fermipy.PACKAGE_ROOT, 'config',
'logging.yaml')
with open(configpath, 'r') as f:
config = yaml.load(f)
# Update configuration
if logfile:
for name, h in config['handlers'].items():
if 'file_handler' in name:
config['handlers'][name]['filename'] = logfile
logging.config.dictConfig(config) | python | def setup(config=None, logfile=None):
"""This method sets up the default configuration of the
logger. Once this method is called all subsequent instances
Logger instances will inherit this configuration."""
if config is None:
configpath = os.path.join(fermipy.PACKAGE_ROOT, 'config',
'logging.yaml')
with open(configpath, 'r') as f:
config = yaml.load(f)
# Update configuration
if logfile:
for name, h in config['handlers'].items():
if 'file_handler' in name:
config['handlers'][name]['filename'] = logfile
logging.config.dictConfig(config) | [
"def",
"setup",
"(",
"config",
"=",
"None",
",",
"logfile",
"=",
"None",
")",
":",
"if",
"config",
"is",
"None",
":",
"configpath",
"=",
"os",
".",
"path",
".",
"join",
"(",
"fermipy",
".",
"PACKAGE_ROOT",
",",
"'config'",
",",
"'logging.yaml'",
")",
... | This method sets up the default configuration of the
logger. Once this method is called all subsequent instances
Logger instances will inherit this configuration. | [
"This",
"method",
"sets",
"up",
"the",
"default",
"configuration",
"of",
"the",
"logger",
".",
"Once",
"this",
"method",
"is",
"called",
"all",
"subsequent",
"instances",
"Logger",
"instances",
"will",
"inherit",
"this",
"configuration",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/logger.py#L36-L53 | train | 35,809 |
fermiPy/fermipy | fermipy/logger.py | Logger.configure | def configure(name, logfile, loglevel=logging.DEBUG):
"""Create a python logger instance and configure it.
Parameters
----------
name : str
Logger name.
logfile : str
Path to the log file.
loglevel : int
Default log level for STDOUT.
"""
# logging.config.dictConfig({
# 'version': 1,
# 'disable_existing_loggers': False})
logger = logging.getLogger(name)
# Don't propagate to root logger
logger.propagate = False
logger.setLevel(logging.DEBUG)
datefmt = '%Y-%m-%d %H:%M:%S'
format_stream = ('%(asctime)s %(levelname)-8s'
'%(name)s.%(funcName)s(): %(message)s')
format_file = ('%(asctime)s %(levelname)-8s'
'%(name)s.%(funcName)s(): %(message)s')
# format_file = ('%(asctime)s %(levelname)-8s '
# '%(name)s.%(funcName)s() '
# '[%(filename)s:%(lineno)d]: %(message)s')
if not logger.handlers:
# Add a file handler
if logfile is not None:
logfile = logfile.replace('.log', '') + '.log'
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter(format_file, datefmt))
logger.addHandler(fh)
# Add a stream handler
ch = logging.StreamHandler()
ch.setLevel(loglevel)
ch.setFormatter(logging.Formatter(format_stream, datefmt))
logger.addHandler(ch)
else:
logger.handlers[-1].setLevel(loglevel)
return logger | python | def configure(name, logfile, loglevel=logging.DEBUG):
"""Create a python logger instance and configure it.
Parameters
----------
name : str
Logger name.
logfile : str
Path to the log file.
loglevel : int
Default log level for STDOUT.
"""
# logging.config.dictConfig({
# 'version': 1,
# 'disable_existing_loggers': False})
logger = logging.getLogger(name)
# Don't propagate to root logger
logger.propagate = False
logger.setLevel(logging.DEBUG)
datefmt = '%Y-%m-%d %H:%M:%S'
format_stream = ('%(asctime)s %(levelname)-8s'
'%(name)s.%(funcName)s(): %(message)s')
format_file = ('%(asctime)s %(levelname)-8s'
'%(name)s.%(funcName)s(): %(message)s')
# format_file = ('%(asctime)s %(levelname)-8s '
# '%(name)s.%(funcName)s() '
# '[%(filename)s:%(lineno)d]: %(message)s')
if not logger.handlers:
# Add a file handler
if logfile is not None:
logfile = logfile.replace('.log', '') + '.log'
fh = logging.FileHandler(logfile)
fh.setLevel(logging.DEBUG)
fh.setFormatter(logging.Formatter(format_file, datefmt))
logger.addHandler(fh)
# Add a stream handler
ch = logging.StreamHandler()
ch.setLevel(loglevel)
ch.setFormatter(logging.Formatter(format_stream, datefmt))
logger.addHandler(ch)
else:
logger.handlers[-1].setLevel(loglevel)
return logger | [
"def",
"configure",
"(",
"name",
",",
"logfile",
",",
"loglevel",
"=",
"logging",
".",
"DEBUG",
")",
":",
"# logging.config.dictConfig({",
"# 'version': 1,",
"# 'disable_existing_loggers': False})",
"logger",
"=",
"logging",
".",
"getLog... | Create a python logger instance and configure it.
Parameters
----------
name : str
Logger name.
logfile : str
Path to the log file.
loglevel : int
Default log level for STDOUT. | [
"Create",
"a",
"python",
"logger",
"instance",
"and",
"configure",
"it",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/logger.py#L62-L114 | train | 35,810 |
fermiPy/fermipy | fermipy/jobs/link.py | extract_arguments | def extract_arguments(args, defaults):
"""Extract a set of arguments from a large dictionary
Parameters
----------
args : dict
Dictionary with the arguments values to use
defaults : dict
Dictionary with all the argument to extract, and default values for each
Returns
-------
out_dict : dict
A dictionary with only the extracted arguments
"""
out_dict = convert_option_dict_to_dict(defaults)
for key in defaults.keys():
mapped_val = args.get(key, None)
if mapped_val is None:
pass
else:
out_dict[key] = mapped_val
return out_dict | python | def extract_arguments(args, defaults):
"""Extract a set of arguments from a large dictionary
Parameters
----------
args : dict
Dictionary with the arguments values to use
defaults : dict
Dictionary with all the argument to extract, and default values for each
Returns
-------
out_dict : dict
A dictionary with only the extracted arguments
"""
out_dict = convert_option_dict_to_dict(defaults)
for key in defaults.keys():
mapped_val = args.get(key, None)
if mapped_val is None:
pass
else:
out_dict[key] = mapped_val
return out_dict | [
"def",
"extract_arguments",
"(",
"args",
",",
"defaults",
")",
":",
"out_dict",
"=",
"convert_option_dict_to_dict",
"(",
"defaults",
")",
"for",
"key",
"in",
"defaults",
".",
"keys",
"(",
")",
":",
"mapped_val",
"=",
"args",
".",
"get",
"(",
"key",
",",
... | Extract a set of arguments from a large dictionary
Parameters
----------
args : dict
Dictionary with the arguments values to use
defaults : dict
Dictionary with all the argument to extract, and default values for each
Returns
-------
out_dict : dict
A dictionary with only the extracted arguments | [
"Extract",
"a",
"set",
"of",
"arguments",
"from",
"a",
"large",
"dictionary"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L24-L50 | train | 35,811 |
fermiPy/fermipy | fermipy/jobs/link.py | check_files | def check_files(filelist,
file_stage_manager=None,
return_found=True,
return_missing=True):
"""Check that all files in a list exist
Parameters
----------
filelist : list
The list of files we are checking for.
file_stage_manager : `fermipy.jobs.file_archive.FileStageManager`
A object that maps files to scratch space if needed.
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
"""
found = []
missing = []
none_count = 0
for fname in filelist:
if fname is None:
none_count += 1
continue
if fname[0] == '@':
fname = fname[1:]
if os.path.exists(fname):
found.append(fname)
continue
if os.path.exists(fname + '.gz'):
found.append(fname)
continue
if file_stage_manager is not None:
fname = file_stage_manager.get_scratch_path(fname)
if os.path.exists(fname):
found.append(fname)
continue
missing.append(fname)
if return_found and return_missing:
return found, missing
elif return_found:
return found
elif return_missing:
return missing
return None | python | def check_files(filelist,
file_stage_manager=None,
return_found=True,
return_missing=True):
"""Check that all files in a list exist
Parameters
----------
filelist : list
The list of files we are checking for.
file_stage_manager : `fermipy.jobs.file_archive.FileStageManager`
A object that maps files to scratch space if needed.
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
"""
found = []
missing = []
none_count = 0
for fname in filelist:
if fname is None:
none_count += 1
continue
if fname[0] == '@':
fname = fname[1:]
if os.path.exists(fname):
found.append(fname)
continue
if os.path.exists(fname + '.gz'):
found.append(fname)
continue
if file_stage_manager is not None:
fname = file_stage_manager.get_scratch_path(fname)
if os.path.exists(fname):
found.append(fname)
continue
missing.append(fname)
if return_found and return_missing:
return found, missing
elif return_found:
return found
elif return_missing:
return missing
return None | [
"def",
"check_files",
"(",
"filelist",
",",
"file_stage_manager",
"=",
"None",
",",
"return_found",
"=",
"True",
",",
"return_missing",
"=",
"True",
")",
":",
"found",
"=",
"[",
"]",
"missing",
"=",
"[",
"]",
"none_count",
"=",
"0",
"for",
"fname",
"in",... | Check that all files in a list exist
Parameters
----------
filelist : list
The list of files we are checking for.
file_stage_manager : `fermipy.jobs.file_archive.FileStageManager`
A object that maps files to scratch space if needed.
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None` | [
"Check",
"that",
"all",
"files",
"in",
"a",
"list",
"exist"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L53-L111 | train | 35,812 |
fermiPy/fermipy | fermipy/jobs/link.py | add_argument | def add_argument(parser, dest, info):
""" Add an argument to an `argparse.ArgumentParser` object
Parameters
----------
parser : `argparse.ArgumentParser`
The parser in question
dest : str
The destination for the argument
info : `tuple`
The information associated with the argument in question.
"""
default, helpstr, typeinfo = info
if dest == 'args':
parser.add_argument('args', nargs='+', default=None, help=helpstr)
elif typeinfo == list:
parser.add_argument('--%s' % dest, action='append', help=helpstr)
elif typeinfo == bool:
parser.add_argument('--%s' % dest, action='store_true', help=helpstr)
else:
parser.add_argument('--%s' % dest, action='store', type=typeinfo,
default=default, help=helpstr) | python | def add_argument(parser, dest, info):
""" Add an argument to an `argparse.ArgumentParser` object
Parameters
----------
parser : `argparse.ArgumentParser`
The parser in question
dest : str
The destination for the argument
info : `tuple`
The information associated with the argument in question.
"""
default, helpstr, typeinfo = info
if dest == 'args':
parser.add_argument('args', nargs='+', default=None, help=helpstr)
elif typeinfo == list:
parser.add_argument('--%s' % dest, action='append', help=helpstr)
elif typeinfo == bool:
parser.add_argument('--%s' % dest, action='store_true', help=helpstr)
else:
parser.add_argument('--%s' % dest, action='store', type=typeinfo,
default=default, help=helpstr) | [
"def",
"add_argument",
"(",
"parser",
",",
"dest",
",",
"info",
")",
":",
"default",
",",
"helpstr",
",",
"typeinfo",
"=",
"info",
"if",
"dest",
"==",
"'args'",
":",
"parser",
".",
"add_argument",
"(",
"'args'",
",",
"nargs",
"=",
"'+'",
",",
"default"... | Add an argument to an `argparse.ArgumentParser` object
Parameters
----------
parser : `argparse.ArgumentParser`
The parser in question
dest : str
The destination for the argument
info : `tuple`
The information associated with the argument in question. | [
"Add",
"an",
"argument",
"to",
"an",
"argparse",
".",
"ArgumentParser",
"object"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L114-L139 | train | 35,813 |
fermiPy/fermipy | fermipy/jobs/link.py | convert_dict_to_option_dict | def convert_dict_to_option_dict(input_dict):
"""Convert a simple key-value dictionary to a dictionary of options tuples"""
ret_dict = {}
for key, value in input_dict.items():
ret_dict[key] = convert_value_to_option_tuple(value)
return ret_dict | python | def convert_dict_to_option_dict(input_dict):
"""Convert a simple key-value dictionary to a dictionary of options tuples"""
ret_dict = {}
for key, value in input_dict.items():
ret_dict[key] = convert_value_to_option_tuple(value)
return ret_dict | [
"def",
"convert_dict_to_option_dict",
"(",
"input_dict",
")",
":",
"ret_dict",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"input_dict",
".",
"items",
"(",
")",
":",
"ret_dict",
"[",
"key",
"]",
"=",
"convert_value_to_option_tuple",
"(",
"value",
")",
... | Convert a simple key-value dictionary to a dictionary of options tuples | [
"Convert",
"a",
"simple",
"key",
"-",
"value",
"dictionary",
"to",
"a",
"dictionary",
"of",
"options",
"tuples"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L164-L169 | train | 35,814 |
fermiPy/fermipy | fermipy/jobs/link.py | convert_option_dict_to_dict | def convert_option_dict_to_dict(option_dict):
"""Convert a dictionary of options tuples to a simple key-value dictionary"""
ret_dict = {}
for key, value in option_dict.items():
if is_null(value):
ret_dict[key] = None
elif isinstance(value, tuple):
ret_dict[key] = value[0]
else:
ret_dict[key] = value
return ret_dict | python | def convert_option_dict_to_dict(option_dict):
"""Convert a dictionary of options tuples to a simple key-value dictionary"""
ret_dict = {}
for key, value in option_dict.items():
if is_null(value):
ret_dict[key] = None
elif isinstance(value, tuple):
ret_dict[key] = value[0]
else:
ret_dict[key] = value
return ret_dict | [
"def",
"convert_option_dict_to_dict",
"(",
"option_dict",
")",
":",
"ret_dict",
"=",
"{",
"}",
"for",
"key",
",",
"value",
"in",
"option_dict",
".",
"items",
"(",
")",
":",
"if",
"is_null",
"(",
"value",
")",
":",
"ret_dict",
"[",
"key",
"]",
"=",
"Non... | Convert a dictionary of options tuples to a simple key-value dictionary | [
"Convert",
"a",
"dictionary",
"of",
"options",
"tuples",
"to",
"a",
"simple",
"key",
"-",
"value",
"dictionary"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L172-L182 | train | 35,815 |
fermiPy/fermipy | fermipy/jobs/link.py | reduce_by_keys | def reduce_by_keys(orig_dict, keys, default=None):
"""Reduce a dictionary by selecting a set of keys """
ret = {}
for key in keys:
ret[key] = orig_dict.get(key, default)
return ret | python | def reduce_by_keys(orig_dict, keys, default=None):
"""Reduce a dictionary by selecting a set of keys """
ret = {}
for key in keys:
ret[key] = orig_dict.get(key, default)
return ret | [
"def",
"reduce_by_keys",
"(",
"orig_dict",
",",
"keys",
",",
"default",
"=",
"None",
")",
":",
"ret",
"=",
"{",
"}",
"for",
"key",
"in",
"keys",
":",
"ret",
"[",
"key",
"]",
"=",
"orig_dict",
".",
"get",
"(",
"key",
",",
"default",
")",
"return",
... | Reduce a dictionary by selecting a set of keys | [
"Reduce",
"a",
"dictionary",
"by",
"selecting",
"a",
"set",
"of",
"keys"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L184-L189 | train | 35,816 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.construct_docstring | def construct_docstring(options):
"""Construct a docstring for a set of options"""
s = "\nParameters\n"
s += "----------\n\n"
for key, opt in options.items():
s += "%s : %s\n %s [%s]\n" % (key, str(opt[2]),
str(opt[1]), str(opt[0]))
return s | python | def construct_docstring(options):
"""Construct a docstring for a set of options"""
s = "\nParameters\n"
s += "----------\n\n"
for key, opt in options.items():
s += "%s : %s\n %s [%s]\n" % (key, str(opt[2]),
str(opt[1]), str(opt[0]))
return s | [
"def",
"construct_docstring",
"(",
"options",
")",
":",
"s",
"=",
"\"\\nParameters\\n\"",
"s",
"+=",
"\"----------\\n\\n\"",
"for",
"key",
",",
"opt",
"in",
"options",
".",
"items",
"(",
")",
":",
"s",
"+=",
"\"%s : %s\\n %s [%s]\\n\"",
"%",
"(",
"key",
... | Construct a docstring for a set of options | [
"Construct",
"a",
"docstring",
"for",
"a",
"set",
"of",
"options"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L292-L299 | train | 35,817 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.register_class | def register_class(cls):
"""Regsiter this class in the `LinkFactory` """
if cls.appname in LinkFactory._class_dict:
return
LinkFactory.register(cls.appname, cls) | python | def register_class(cls):
"""Regsiter this class in the `LinkFactory` """
if cls.appname in LinkFactory._class_dict:
return
LinkFactory.register(cls.appname, cls) | [
"def",
"register_class",
"(",
"cls",
")",
":",
"if",
"cls",
".",
"appname",
"in",
"LinkFactory",
".",
"_class_dict",
":",
"return",
"LinkFactory",
".",
"register",
"(",
"cls",
".",
"appname",
",",
"cls",
")"
] | Regsiter this class in the `LinkFactory` | [
"Regsiter",
"this",
"class",
"in",
"the",
"LinkFactory"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L313-L317 | train | 35,818 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._fill_argparser | def _fill_argparser(self, parser):
"""Fill an `argparser.ArgumentParser` with the options from this chain
"""
for key, val in self._options.items():
add_argument(parser, key, val) | python | def _fill_argparser(self, parser):
"""Fill an `argparser.ArgumentParser` with the options from this chain
"""
for key, val in self._options.items():
add_argument(parser, key, val) | [
"def",
"_fill_argparser",
"(",
"self",
",",
"parser",
")",
":",
"for",
"key",
",",
"val",
"in",
"self",
".",
"_options",
".",
"items",
"(",
")",
":",
"add_argument",
"(",
"parser",
",",
"key",
",",
"val",
")"
] | Fill an `argparser.ArgumentParser` with the options from this chain | [
"Fill",
"an",
"argparser",
".",
"ArgumentParser",
"with",
"the",
"options",
"from",
"this",
"chain"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L346-L350 | train | 35,819 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._run_argparser | def _run_argparser(self, argv):
"""Initialize a link with a set of arguments using an `argparser.ArgumentParser`
"""
if self._parser is None:
raise ValueError('Link was not given a parser on initialization')
args = self._parser.parse_args(argv)
self.update_args(args.__dict__)
return args | python | def _run_argparser(self, argv):
"""Initialize a link with a set of arguments using an `argparser.ArgumentParser`
"""
if self._parser is None:
raise ValueError('Link was not given a parser on initialization')
args = self._parser.parse_args(argv)
self.update_args(args.__dict__)
return args | [
"def",
"_run_argparser",
"(",
"self",
",",
"argv",
")",
":",
"if",
"self",
".",
"_parser",
"is",
"None",
":",
"raise",
"ValueError",
"(",
"'Link was not given a parser on initialization'",
")",
"args",
"=",
"self",
".",
"_parser",
".",
"parse_args",
"(",
"argv... | Initialize a link with a set of arguments using an `argparser.ArgumentParser` | [
"Initialize",
"a",
"link",
"with",
"a",
"set",
"of",
"arguments",
"using",
"an",
"argparser",
".",
"ArgumentParser"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L352-L359 | train | 35,820 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._update_sub_file_dict | def _update_sub_file_dict(self, sub_files):
"""Update a file dict with information from self"""
sub_files.file_dict.clear()
for job_details in self.jobs.values():
if job_details.file_dict is not None:
sub_files.update(job_details.file_dict)
if job_details.sub_file_dict is not None:
sub_files.update(job_details.sub_file_dict) | python | def _update_sub_file_dict(self, sub_files):
"""Update a file dict with information from self"""
sub_files.file_dict.clear()
for job_details in self.jobs.values():
if job_details.file_dict is not None:
sub_files.update(job_details.file_dict)
if job_details.sub_file_dict is not None:
sub_files.update(job_details.sub_file_dict) | [
"def",
"_update_sub_file_dict",
"(",
"self",
",",
"sub_files",
")",
":",
"sub_files",
".",
"file_dict",
".",
"clear",
"(",
")",
"for",
"job_details",
"in",
"self",
".",
"jobs",
".",
"values",
"(",
")",
":",
"if",
"job_details",
".",
"file_dict",
"is",
"n... | Update a file dict with information from self | [
"Update",
"a",
"file",
"dict",
"with",
"information",
"from",
"self"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L368-L375 | train | 35,821 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._pre_run_checks | def _pre_run_checks(self, stream=sys.stdout, dry_run=False):
"""Do some checks before running this link
This checks if input and output files are present.
If input files are missing this will raise `OSError` if dry_run is False
If all output files are present this return False.
Parameters
-----------
stream : `file`
Stream that this function will print to,
Must have 'write' function
dry_run : bool
Print command but do not run it
Returns
-------
status : bool
True if it is ok to proceed with running the link
"""
input_missing = self.check_input_files(return_found=False)
if input_missing:
if dry_run:
stream.write("Input files are missing: %s: %i\n" %
(self.linkname, len(input_missing)))
else:
print (self.args)
raise OSError("Input files are missing: %s" % input_missing)
output_found, output_missing = self.check_output_files()
if output_found and not output_missing:
stream.write("All output files for %s already exist: %i %i %i\n" %
(self.linkname, len(output_found),
len(output_missing), len(self.files.output_files)))
if dry_run:
pass
else:
pass
# return False
return True | python | def _pre_run_checks(self, stream=sys.stdout, dry_run=False):
"""Do some checks before running this link
This checks if input and output files are present.
If input files are missing this will raise `OSError` if dry_run is False
If all output files are present this return False.
Parameters
-----------
stream : `file`
Stream that this function will print to,
Must have 'write' function
dry_run : bool
Print command but do not run it
Returns
-------
status : bool
True if it is ok to proceed with running the link
"""
input_missing = self.check_input_files(return_found=False)
if input_missing:
if dry_run:
stream.write("Input files are missing: %s: %i\n" %
(self.linkname, len(input_missing)))
else:
print (self.args)
raise OSError("Input files are missing: %s" % input_missing)
output_found, output_missing = self.check_output_files()
if output_found and not output_missing:
stream.write("All output files for %s already exist: %i %i %i\n" %
(self.linkname, len(output_found),
len(output_missing), len(self.files.output_files)))
if dry_run:
pass
else:
pass
# return False
return True | [
"def",
"_pre_run_checks",
"(",
"self",
",",
"stream",
"=",
"sys",
".",
"stdout",
",",
"dry_run",
"=",
"False",
")",
":",
"input_missing",
"=",
"self",
".",
"check_input_files",
"(",
"return_found",
"=",
"False",
")",
"if",
"input_missing",
":",
"if",
"dry_... | Do some checks before running this link
This checks if input and output files are present.
If input files are missing this will raise `OSError` if dry_run is False
If all output files are present this return False.
Parameters
-----------
stream : `file`
Stream that this function will print to,
Must have 'write' function
dry_run : bool
Print command but do not run it
Returns
-------
status : bool
True if it is ok to proceed with running the link | [
"Do",
"some",
"checks",
"before",
"running",
"this",
"link"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L377-L419 | train | 35,822 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._create_job_details | def _create_job_details(self, key, job_config, logfile, status):
"""Create a `JobDetails` for a single job
Parameters
----------
key : str
Key used to identify this particular job
job_config : dict
Dictionary with arguements passed to this particular job
logfile : str
Name of the associated log file
status : int
Current status of the job
Returns
-------
job_details : `fermipy.jobs.JobDetails`
Object with the details about a particular job.
"""
self.update_args(job_config)
job_details = JobDetails(jobname=self.full_linkname,
jobkey=key,
appname=self.appname,
logfile=logfile,
job_config=job_config,
timestamp=get_timestamp(),
file_dict=copy.deepcopy(self.files),
sub_file_dict=copy.deepcopy(self.sub_files),
status=status)
return job_details | python | def _create_job_details(self, key, job_config, logfile, status):
"""Create a `JobDetails` for a single job
Parameters
----------
key : str
Key used to identify this particular job
job_config : dict
Dictionary with arguements passed to this particular job
logfile : str
Name of the associated log file
status : int
Current status of the job
Returns
-------
job_details : `fermipy.jobs.JobDetails`
Object with the details about a particular job.
"""
self.update_args(job_config)
job_details = JobDetails(jobname=self.full_linkname,
jobkey=key,
appname=self.appname,
logfile=logfile,
job_config=job_config,
timestamp=get_timestamp(),
file_dict=copy.deepcopy(self.files),
sub_file_dict=copy.deepcopy(self.sub_files),
status=status)
return job_details | [
"def",
"_create_job_details",
"(",
"self",
",",
"key",
",",
"job_config",
",",
"logfile",
",",
"status",
")",
":",
"self",
".",
"update_args",
"(",
"job_config",
")",
"job_details",
"=",
"JobDetails",
"(",
"jobname",
"=",
"self",
".",
"full_linkname",
",",
... | Create a `JobDetails` for a single job
Parameters
----------
key : str
Key used to identify this particular job
job_config : dict
Dictionary with arguements passed to this particular job
logfile : str
Name of the associated log file
status : int
Current status of the job
Returns
-------
job_details : `fermipy.jobs.JobDetails`
Object with the details about a particular job. | [
"Create",
"a",
"JobDetails",
"for",
"a",
"single",
"job"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L427-L461 | train | 35,823 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._map_scratch_files | def _map_scratch_files(self, file_dict):
"""Build and return the mapping for copying files to and from scratch area"""
if self._file_stage is None:
return ({}, {})
input_files = file_dict.input_files_to_stage
output_files = file_dict.output_files_to_stage
input_file_mapping = self._file_stage.map_files(input_files)
output_file_mapping = self._file_stage.map_files(output_files)
self._update_file_args(input_file_mapping)
self._update_file_args(output_file_mapping)
return input_file_mapping, output_file_mapping | python | def _map_scratch_files(self, file_dict):
"""Build and return the mapping for copying files to and from scratch area"""
if self._file_stage is None:
return ({}, {})
input_files = file_dict.input_files_to_stage
output_files = file_dict.output_files_to_stage
input_file_mapping = self._file_stage.map_files(input_files)
output_file_mapping = self._file_stage.map_files(output_files)
self._update_file_args(input_file_mapping)
self._update_file_args(output_file_mapping)
return input_file_mapping, output_file_mapping | [
"def",
"_map_scratch_files",
"(",
"self",
",",
"file_dict",
")",
":",
"if",
"self",
".",
"_file_stage",
"is",
"None",
":",
"return",
"(",
"{",
"}",
",",
"{",
"}",
")",
"input_files",
"=",
"file_dict",
".",
"input_files_to_stage",
"output_files",
"=",
"file... | Build and return the mapping for copying files to and from scratch area | [
"Build",
"and",
"return",
"the",
"mapping",
"for",
"copying",
"files",
"to",
"and",
"from",
"scratch",
"area"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L463-L473 | train | 35,824 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._update_file_args | def _update_file_args(self, file_mapping):
"""Adjust the arguments to deal with staging files to the scratch area"""
for key, value in self.args.items():
new_value = file_mapping.get(value, value)
if new_value != value:
self.args[key] = new_value | python | def _update_file_args(self, file_mapping):
"""Adjust the arguments to deal with staging files to the scratch area"""
for key, value in self.args.items():
new_value = file_mapping.get(value, value)
if new_value != value:
self.args[key] = new_value | [
"def",
"_update_file_args",
"(",
"self",
",",
"file_mapping",
")",
":",
"for",
"key",
",",
"value",
"in",
"self",
".",
"args",
".",
"items",
"(",
")",
":",
"new_value",
"=",
"file_mapping",
".",
"get",
"(",
"value",
",",
"value",
")",
"if",
"new_value"... | Adjust the arguments to deal with staging files to the scratch area | [
"Adjust",
"the",
"arguments",
"to",
"deal",
"with",
"staging",
"files",
"to",
"the",
"scratch",
"area"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L475-L480 | train | 35,825 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._stage_input_files | def _stage_input_files(self, file_mapping, dry_run=True):
"""Stage the input files to the scratch area and adjust the arguments accordingly"""
# print ("Staging input ", file_mapping)
if self._file_stage is None:
return
self._file_stage.copy_to_scratch(file_mapping, dry_run) | python | def _stage_input_files(self, file_mapping, dry_run=True):
"""Stage the input files to the scratch area and adjust the arguments accordingly"""
# print ("Staging input ", file_mapping)
if self._file_stage is None:
return
self._file_stage.copy_to_scratch(file_mapping, dry_run) | [
"def",
"_stage_input_files",
"(",
"self",
",",
"file_mapping",
",",
"dry_run",
"=",
"True",
")",
":",
"# print (\"Staging input \", file_mapping)",
"if",
"self",
".",
"_file_stage",
"is",
"None",
":",
"return",
"self",
".",
"_file_stage",
".",
"copy_to_scratch",
"... | Stage the input files to the scratch area and adjust the arguments accordingly | [
"Stage",
"the",
"input",
"files",
"to",
"the",
"scratch",
"area",
"and",
"adjust",
"the",
"arguments",
"accordingly"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L482-L487 | train | 35,826 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._stage_output_files | def _stage_output_files(self, file_mapping, dry_run=True):
"""Stage the output files to the scratch area and adjust the arguments accordingly"""
# print ("Staging output ", file_mapping)
if self._file_stage is None:
return
self._file_stage.copy_from_scratch(file_mapping, dry_run) | python | def _stage_output_files(self, file_mapping, dry_run=True):
"""Stage the output files to the scratch area and adjust the arguments accordingly"""
# print ("Staging output ", file_mapping)
if self._file_stage is None:
return
self._file_stage.copy_from_scratch(file_mapping, dry_run) | [
"def",
"_stage_output_files",
"(",
"self",
",",
"file_mapping",
",",
"dry_run",
"=",
"True",
")",
":",
"# print (\"Staging output \", file_mapping)",
"if",
"self",
".",
"_file_stage",
"is",
"None",
":",
"return",
"self",
".",
"_file_stage",
".",
"copy_from_scratch",... | Stage the output files to the scratch area and adjust the arguments accordingly | [
"Stage",
"the",
"output",
"files",
"to",
"the",
"scratch",
"area",
"and",
"adjust",
"the",
"arguments",
"accordingly"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L489-L494 | train | 35,827 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._register_job | def _register_job(self, key, job_config, logfile, status):
"""Create a `JobDetails` for this link
and add it to the self.jobs dictionary.
Parameters
----------
key : str
Key used to identify this particular job
job_config : dict
Dictionary with arguments passed to this particular job
logfile : str
Name of the associated log file
status : int
Current status of the job
Returns
-------
job_details : `fermipy.jobs.JobDetails`
Object with the details about this particular job.
"""
job_details = self._create_job_details(
key, job_config, logfile, status)
self.jobs[job_details.fullkey] = job_details
return job_details | python | def _register_job(self, key, job_config, logfile, status):
"""Create a `JobDetails` for this link
and add it to the self.jobs dictionary.
Parameters
----------
key : str
Key used to identify this particular job
job_config : dict
Dictionary with arguments passed to this particular job
logfile : str
Name of the associated log file
status : int
Current status of the job
Returns
-------
job_details : `fermipy.jobs.JobDetails`
Object with the details about this particular job.
"""
job_details = self._create_job_details(
key, job_config, logfile, status)
self.jobs[job_details.fullkey] = job_details
return job_details | [
"def",
"_register_job",
"(",
"self",
",",
"key",
",",
"job_config",
",",
"logfile",
",",
"status",
")",
":",
"job_details",
"=",
"self",
".",
"_create_job_details",
"(",
"key",
",",
"job_config",
",",
"logfile",
",",
"status",
")",
"self",
".",
"jobs",
"... | Create a `JobDetails` for this link
and add it to the self.jobs dictionary.
Parameters
----------
key : str
Key used to identify this particular job
job_config : dict
Dictionary with arguments passed to this particular job
logfile : str
Name of the associated log file
status : int
Current status of the job
Returns
-------
job_details : `fermipy.jobs.JobDetails`
Object with the details about this particular job. | [
"Create",
"a",
"JobDetails",
"for",
"this",
"link",
"and",
"add",
"it",
"to",
"the",
"self",
".",
"jobs",
"dictionary",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L549-L577 | train | 35,828 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._register_self | def _register_self(self, logfile, key=JobDetails.topkey, status=JobStatus.unknown):
"""Runs this link, captures output to logfile,
and records the job in self.jobs"""
fullkey = JobDetails.make_fullkey(self.full_linkname, key)
if fullkey in self.jobs:
job_details = self.jobs[fullkey]
job_details.status = status
else:
job_details = self._register_job(key, self.args, logfile, status) | python | def _register_self(self, logfile, key=JobDetails.topkey, status=JobStatus.unknown):
"""Runs this link, captures output to logfile,
and records the job in self.jobs"""
fullkey = JobDetails.make_fullkey(self.full_linkname, key)
if fullkey in self.jobs:
job_details = self.jobs[fullkey]
job_details.status = status
else:
job_details = self._register_job(key, self.args, logfile, status) | [
"def",
"_register_self",
"(",
"self",
",",
"logfile",
",",
"key",
"=",
"JobDetails",
".",
"topkey",
",",
"status",
"=",
"JobStatus",
".",
"unknown",
")",
":",
"fullkey",
"=",
"JobDetails",
".",
"make_fullkey",
"(",
"self",
".",
"full_linkname",
",",
"key",... | Runs this link, captures output to logfile,
and records the job in self.jobs | [
"Runs",
"this",
"link",
"captures",
"output",
"to",
"logfile",
"and",
"records",
"the",
"job",
"in",
"self",
".",
"jobs"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L579-L587 | train | 35,829 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._archive_self | def _archive_self(self, logfile, key=JobDetails.topkey, status=JobStatus.unknown):
"""Write info about a job run by this `Link` to the job archive"""
self._register_self(logfile, key, status)
if self._job_archive is None:
return
self._job_archive.register_jobs(self.get_jobs()) | python | def _archive_self(self, logfile, key=JobDetails.topkey, status=JobStatus.unknown):
"""Write info about a job run by this `Link` to the job archive"""
self._register_self(logfile, key, status)
if self._job_archive is None:
return
self._job_archive.register_jobs(self.get_jobs()) | [
"def",
"_archive_self",
"(",
"self",
",",
"logfile",
",",
"key",
"=",
"JobDetails",
".",
"topkey",
",",
"status",
"=",
"JobStatus",
".",
"unknown",
")",
":",
"self",
".",
"_register_self",
"(",
"logfile",
",",
"key",
",",
"status",
")",
"if",
"self",
"... | Write info about a job run by this `Link` to the job archive | [
"Write",
"info",
"about",
"a",
"job",
"run",
"by",
"this",
"Link",
"to",
"the",
"job",
"archive"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L589-L594 | train | 35,830 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._set_status_self | def _set_status_self(self, key=JobDetails.topkey, status=JobStatus.unknown):
"""Set the status of this job, both in self.jobs and
in the `JobArchive` if it is present. """
fullkey = JobDetails.make_fullkey(self.full_linkname, key)
if fullkey in self.jobs:
self.jobs[fullkey].status = status
if self._job_archive:
self._job_archive.register_job(self.jobs[fullkey])
else:
self._register_self('dummy.log', key, status) | python | def _set_status_self(self, key=JobDetails.topkey, status=JobStatus.unknown):
"""Set the status of this job, both in self.jobs and
in the `JobArchive` if it is present. """
fullkey = JobDetails.make_fullkey(self.full_linkname, key)
if fullkey in self.jobs:
self.jobs[fullkey].status = status
if self._job_archive:
self._job_archive.register_job(self.jobs[fullkey])
else:
self._register_self('dummy.log', key, status) | [
"def",
"_set_status_self",
"(",
"self",
",",
"key",
"=",
"JobDetails",
".",
"topkey",
",",
"status",
"=",
"JobStatus",
".",
"unknown",
")",
":",
"fullkey",
"=",
"JobDetails",
".",
"make_fullkey",
"(",
"self",
".",
"full_linkname",
",",
"key",
")",
"if",
... | Set the status of this job, both in self.jobs and
in the `JobArchive` if it is present. | [
"Set",
"the",
"status",
"of",
"this",
"job",
"both",
"in",
"self",
".",
"jobs",
"and",
"in",
"the",
"JobArchive",
"if",
"it",
"is",
"present",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L596-L605 | train | 35,831 |
fermiPy/fermipy | fermipy/jobs/link.py | Link._write_status_to_log | def _write_status_to_log(self, return_code, stream=sys.stdout):
"""Write the status of this job to a log stream.
This is used to check on job completion."""
stream.write("Timestamp: %i\n" % get_timestamp())
if return_code == 0:
stream.write("%s\n" % self._interface.string_successful)
else:
stream.write("%s %i\n" %
(self._interface.string_exited, return_code)) | python | def _write_status_to_log(self, return_code, stream=sys.stdout):
"""Write the status of this job to a log stream.
This is used to check on job completion."""
stream.write("Timestamp: %i\n" % get_timestamp())
if return_code == 0:
stream.write("%s\n" % self._interface.string_successful)
else:
stream.write("%s %i\n" %
(self._interface.string_exited, return_code)) | [
"def",
"_write_status_to_log",
"(",
"self",
",",
"return_code",
",",
"stream",
"=",
"sys",
".",
"stdout",
")",
":",
"stream",
".",
"write",
"(",
"\"Timestamp: %i\\n\"",
"%",
"get_timestamp",
"(",
")",
")",
"if",
"return_code",
"==",
"0",
":",
"stream",
"."... | Write the status of this job to a log stream.
This is used to check on job completion. | [
"Write",
"the",
"status",
"of",
"this",
"job",
"to",
"a",
"log",
"stream",
".",
"This",
"is",
"used",
"to",
"check",
"on",
"job",
"completion",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L607-L615 | train | 35,832 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.get_failed_jobs | def get_failed_jobs(self, fail_running=False, fail_pending=False):
"""Return a dictionary with the subset of jobs that are marked as failed
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
failed_jobs : dict
Dictionary mapping from job key to `JobDetails` for the failed jobs.
"""
failed_jobs = {}
for job_key, job_details in self.jobs.items():
if job_details.status == JobStatus.failed:
failed_jobs[job_key] = job_details
elif job_details.status == JobStatus.partial_failed:
failed_jobs[job_key] = job_details
elif fail_running and job_details.status == JobStatus.running:
failed_jobs[job_key] = job_details
elif fail_pending and job_details.status <= JobStatus.pending:
failed_jobs[job_key] = job_details
return failed_jobs | python | def get_failed_jobs(self, fail_running=False, fail_pending=False):
"""Return a dictionary with the subset of jobs that are marked as failed
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
failed_jobs : dict
Dictionary mapping from job key to `JobDetails` for the failed jobs.
"""
failed_jobs = {}
for job_key, job_details in self.jobs.items():
if job_details.status == JobStatus.failed:
failed_jobs[job_key] = job_details
elif job_details.status == JobStatus.partial_failed:
failed_jobs[job_key] = job_details
elif fail_running and job_details.status == JobStatus.running:
failed_jobs[job_key] = job_details
elif fail_pending and job_details.status <= JobStatus.pending:
failed_jobs[job_key] = job_details
return failed_jobs | [
"def",
"get_failed_jobs",
"(",
"self",
",",
"fail_running",
"=",
"False",
",",
"fail_pending",
"=",
"False",
")",
":",
"failed_jobs",
"=",
"{",
"}",
"for",
"job_key",
",",
"job_details",
"in",
"self",
".",
"jobs",
".",
"items",
"(",
")",
":",
"if",
"jo... | Return a dictionary with the subset of jobs that are marked as failed
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
failed_jobs : dict
Dictionary mapping from job key to `JobDetails` for the failed jobs. | [
"Return",
"a",
"dictionary",
"with",
"the",
"subset",
"of",
"jobs",
"that",
"are",
"marked",
"as",
"failed"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L648-L675 | train | 35,833 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.check_job_status | def check_job_status(self, key=JobDetails.topkey,
fail_running=False,
fail_pending=False,
force_check=False):
"""Check the status of a particular job
By default this checks the status of the top-level job, but
can by made to drill into the sub-jobs.
Parameters
----------
key : str
Key associated to the job in question
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
force_check : `bool`
Drill into status of individual jobs` instead of using top level job only
Returns
-------
status : `JobStatus`
Job status flag
"""
if key in self.jobs:
status = self.jobs[key].status
if status in [JobStatus.unknown, JobStatus.ready,
JobStatus.pending, JobStatus.running] or force_check:
status = self._interface.check_job(self.jobs[key])
if status == JobStatus.running and fail_running:
status = JobStatus.failed
if status == JobStatus.pending and fail_pending:
status = JobStatus.failed
self.jobs[key].status = status
if self._job_archive:
self._job_archive.register_job(self.jobs[key])
else:
status = JobStatus.no_job
return status | python | def check_job_status(self, key=JobDetails.topkey,
fail_running=False,
fail_pending=False,
force_check=False):
"""Check the status of a particular job
By default this checks the status of the top-level job, but
can by made to drill into the sub-jobs.
Parameters
----------
key : str
Key associated to the job in question
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
force_check : `bool`
Drill into status of individual jobs` instead of using top level job only
Returns
-------
status : `JobStatus`
Job status flag
"""
if key in self.jobs:
status = self.jobs[key].status
if status in [JobStatus.unknown, JobStatus.ready,
JobStatus.pending, JobStatus.running] or force_check:
status = self._interface.check_job(self.jobs[key])
if status == JobStatus.running and fail_running:
status = JobStatus.failed
if status == JobStatus.pending and fail_pending:
status = JobStatus.failed
self.jobs[key].status = status
if self._job_archive:
self._job_archive.register_job(self.jobs[key])
else:
status = JobStatus.no_job
return status | [
"def",
"check_job_status",
"(",
"self",
",",
"key",
"=",
"JobDetails",
".",
"topkey",
",",
"fail_running",
"=",
"False",
",",
"fail_pending",
"=",
"False",
",",
"force_check",
"=",
"False",
")",
":",
"if",
"key",
"in",
"self",
".",
"jobs",
":",
"status",... | Check the status of a particular job
By default this checks the status of the top-level job, but
can by made to drill into the sub-jobs.
Parameters
----------
key : str
Key associated to the job in question
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
force_check : `bool`
Drill into status of individual jobs` instead of using top level job only
Returns
-------
status : `JobStatus`
Job status flag | [
"Check",
"the",
"status",
"of",
"a",
"particular",
"job"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L677-L721 | train | 35,834 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.check_jobs_status | def check_jobs_status(self,
fail_running=False,
fail_pending=False):
"""Check the status of all the jobs run from this link
and return a status flag that summarizes that.
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
status : `JobStatus`
Job status flag that summarizes the status of all the jobs,
"""
n_failed = 0
n_partial = 0
n_passed = 0
n_total = 0
for job_details in self.jobs.values():
n_total += 1
if job_details.status in [JobStatus.failed, JobStatus.partial_failed]:
n_failed += 1
elif fail_running and job_details.status == JobStatus.running:
n_failed += 1
elif fail_pending and job_details.status == JobStatus.pending:
n_failed += 1
elif job_details.status == JobStatus.done:
n_passed += 1
if n_failed > 0:
return JobStatus.failed
elif n_passed == n_total:
return JobStatus.done
elif n_passed > 0:
return JobStatus.running
return JobStatus.pending | python | def check_jobs_status(self,
fail_running=False,
fail_pending=False):
"""Check the status of all the jobs run from this link
and return a status flag that summarizes that.
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
status : `JobStatus`
Job status flag that summarizes the status of all the jobs,
"""
n_failed = 0
n_partial = 0
n_passed = 0
n_total = 0
for job_details in self.jobs.values():
n_total += 1
if job_details.status in [JobStatus.failed, JobStatus.partial_failed]:
n_failed += 1
elif fail_running and job_details.status == JobStatus.running:
n_failed += 1
elif fail_pending and job_details.status == JobStatus.pending:
n_failed += 1
elif job_details.status == JobStatus.done:
n_passed += 1
if n_failed > 0:
return JobStatus.failed
elif n_passed == n_total:
return JobStatus.done
elif n_passed > 0:
return JobStatus.running
return JobStatus.pending | [
"def",
"check_jobs_status",
"(",
"self",
",",
"fail_running",
"=",
"False",
",",
"fail_pending",
"=",
"False",
")",
":",
"n_failed",
"=",
"0",
"n_partial",
"=",
"0",
"n_passed",
"=",
"0",
"n_total",
"=",
"0",
"for",
"job_details",
"in",
"self",
".",
"job... | Check the status of all the jobs run from this link
and return a status flag that summarizes that.
Parameters
----------
fail_running : `bool`
If True, consider running jobs as failed
fail_pending : `bool`
If True, consider pending jobs as failed
Returns
-------
status : `JobStatus`
Job status flag that summarizes the status of all the jobs, | [
"Check",
"the",
"status",
"of",
"all",
"the",
"jobs",
"run",
"from",
"this",
"link",
"and",
"return",
"a",
"status",
"flag",
"that",
"summarizes",
"that",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L723-L767 | train | 35,835 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.check_input_files | def check_input_files(self,
return_found=True,
return_missing=True):
"""Check if input files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
"""
all_input_files = self.files.chain_input_files + self.sub_files.chain_input_files
return check_files(all_input_files, self._file_stage,
return_found, return_missing) | python | def check_input_files(self,
return_found=True,
return_missing=True):
"""Check if input files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
"""
all_input_files = self.files.chain_input_files + self.sub_files.chain_input_files
return check_files(all_input_files, self._file_stage,
return_found, return_missing) | [
"def",
"check_input_files",
"(",
"self",
",",
"return_found",
"=",
"True",
",",
"return_missing",
"=",
"True",
")",
":",
"all_input_files",
"=",
"self",
".",
"files",
".",
"chain_input_files",
"+",
"self",
".",
"sub_files",
".",
"chain_input_files",
"return",
... | Check if input files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None` | [
"Check",
"if",
"input",
"files",
"exist",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L797-L821 | train | 35,836 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.check_output_files | def check_output_files(self,
return_found=True,
return_missing=True):
"""Check if output files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
"""
all_output_files = self.files.chain_output_files + \
self.sub_files.chain_output_files
return check_files(all_output_files, self._file_stage,
return_found, return_missing) | python | def check_output_files(self,
return_found=True,
return_missing=True):
"""Check if output files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None`
"""
all_output_files = self.files.chain_output_files + \
self.sub_files.chain_output_files
return check_files(all_output_files, self._file_stage,
return_found, return_missing) | [
"def",
"check_output_files",
"(",
"self",
",",
"return_found",
"=",
"True",
",",
"return_missing",
"=",
"True",
")",
":",
"all_output_files",
"=",
"self",
".",
"files",
".",
"chain_output_files",
"+",
"self",
".",
"sub_files",
".",
"chain_output_files",
"return"... | Check if output files exist.
Parameters
----------
return_found : list
A list with the paths of the files that were found.
return_missing : list
A list with the paths of the files that were missing.
Returns
-------
found : list
List of the found files, if requested, otherwise `None`
missing : list
List of the missing files, if requested, otherwise `None` | [
"Check",
"if",
"output",
"files",
"exist",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L823-L848 | train | 35,837 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.missing_output_files | def missing_output_files(self):
"""Make and return a dictionary of the missing output files.
This returns a dictionary mapping
filepath to list of links that produce the file as output.
"""
missing = self.check_output_files(return_found=False)
ret_dict = {}
for miss_file in missing:
ret_dict[miss_file] = [self.linkname]
return ret_dict | python | def missing_output_files(self):
"""Make and return a dictionary of the missing output files.
This returns a dictionary mapping
filepath to list of links that produce the file as output.
"""
missing = self.check_output_files(return_found=False)
ret_dict = {}
for miss_file in missing:
ret_dict[miss_file] = [self.linkname]
return ret_dict | [
"def",
"missing_output_files",
"(",
"self",
")",
":",
"missing",
"=",
"self",
".",
"check_output_files",
"(",
"return_found",
"=",
"False",
")",
"ret_dict",
"=",
"{",
"}",
"for",
"miss_file",
"in",
"missing",
":",
"ret_dict",
"[",
"miss_file",
"]",
"=",
"[... | Make and return a dictionary of the missing output files.
This returns a dictionary mapping
filepath to list of links that produce the file as output. | [
"Make",
"and",
"return",
"a",
"dictionary",
"of",
"the",
"missing",
"output",
"files",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L862-L872 | train | 35,838 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.formatted_command | def formatted_command(self):
"""Build and return the formatted command for this `Link`.
This is exactly the command as called from the Unix command line.
"""
# FIXME, this isn't really great as it force you to have all the arguments
command_template = self.command_template()
format_dict = self.args.copy()
for key, value in format_dict.items():
# protect whitespace
if isinstance(value, list):
outstr = ""
if key == 'args':
outkey = ""
else:
outkey = "--%s "
for lval in value:
outstr += ' '
outstr += outkey
outstr += lval
format_dict[key] = '"%s"' % outstr
elif isinstance(value, str) and value.find(' ') >= 0 and key != 'args':
format_dict[key] = '"%s"' % value
elif value is None:
format_dict[key] = 'none'
command = command_template.format(**format_dict)
return command | python | def formatted_command(self):
"""Build and return the formatted command for this `Link`.
This is exactly the command as called from the Unix command line.
"""
# FIXME, this isn't really great as it force you to have all the arguments
command_template = self.command_template()
format_dict = self.args.copy()
for key, value in format_dict.items():
# protect whitespace
if isinstance(value, list):
outstr = ""
if key == 'args':
outkey = ""
else:
outkey = "--%s "
for lval in value:
outstr += ' '
outstr += outkey
outstr += lval
format_dict[key] = '"%s"' % outstr
elif isinstance(value, str) and value.find(' ') >= 0 and key != 'args':
format_dict[key] = '"%s"' % value
elif value is None:
format_dict[key] = 'none'
command = command_template.format(**format_dict)
return command | [
"def",
"formatted_command",
"(",
"self",
")",
":",
"# FIXME, this isn't really great as it force you to have all the arguments",
"command_template",
"=",
"self",
".",
"command_template",
"(",
")",
"format_dict",
"=",
"self",
".",
"args",
".",
"copy",
"(",
")",
"for",
... | Build and return the formatted command for this `Link`.
This is exactly the command as called from the Unix command line. | [
"Build",
"and",
"return",
"the",
"formatted",
"command",
"for",
"this",
"Link",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L874-L902 | train | 35,839 |
fermiPy/fermipy | fermipy/jobs/link.py | Link.run_with_log | def run_with_log(self, dry_run=False, stage_files=True, resubmit_failed=False):
"""Runs this link with output sent to a pre-defined logfile
Parameters
-----------
dry_run : bool
Print command but do not run it.
stage_files : bool
Copy files to and from scratch staging area.
resubmit_failed : bool
Flag for sub-classes to resubmit failed jobs.
"""
fullkey = JobDetails.make_fullkey(self.full_linkname)
job_details = self.jobs[fullkey]
odir = os.path.dirname(job_details.logfile)
try:
os.makedirs(odir)
except OSError:
pass
ostream = open(job_details.logfile, 'w')
self.run(ostream, dry_run, stage_files, resubmit_failed) | python | def run_with_log(self, dry_run=False, stage_files=True, resubmit_failed=False):
"""Runs this link with output sent to a pre-defined logfile
Parameters
-----------
dry_run : bool
Print command but do not run it.
stage_files : bool
Copy files to and from scratch staging area.
resubmit_failed : bool
Flag for sub-classes to resubmit failed jobs.
"""
fullkey = JobDetails.make_fullkey(self.full_linkname)
job_details = self.jobs[fullkey]
odir = os.path.dirname(job_details.logfile)
try:
os.makedirs(odir)
except OSError:
pass
ostream = open(job_details.logfile, 'w')
self.run(ostream, dry_run, stage_files, resubmit_failed) | [
"def",
"run_with_log",
"(",
"self",
",",
"dry_run",
"=",
"False",
",",
"stage_files",
"=",
"True",
",",
"resubmit_failed",
"=",
"False",
")",
":",
"fullkey",
"=",
"JobDetails",
".",
"make_fullkey",
"(",
"self",
".",
"full_linkname",
")",
"job_details",
"=",
... | Runs this link with output sent to a pre-defined logfile
Parameters
-----------
dry_run : bool
Print command but do not run it.
stage_files : bool
Copy files to and from scratch staging area.
resubmit_failed : bool
Flag for sub-classes to resubmit failed jobs. | [
"Runs",
"this",
"link",
"with",
"output",
"sent",
"to",
"a",
"pre",
"-",
"defined",
"logfile"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/jobs/link.py#L959-L982 | train | 35,840 |
fermiPy/fermipy | fermipy/wcs_utils.py | create_wcs | def create_wcs(skydir, coordsys='CEL', projection='AIT',
cdelt=1.0, crpix=1., naxis=2, energies=None):
"""Create a WCS object.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinate of the WCS reference point.
coordsys : str
projection : str
cdelt : float or (float,float)
In the first case the same value is used for x and y axes
crpix : float or (float,float)
In the first case the same value is used for x and y axes
naxis : {2, 3}
Number of dimensions of the projection.
energies : array-like
Array of energies that defines the third dimension if naxis=3.
"""
w = WCS(naxis=naxis)
if coordsys == 'CEL':
w.wcs.ctype[0] = 'RA---%s' % (projection)
w.wcs.ctype[1] = 'DEC--%s' % (projection)
w.wcs.crval[0] = skydir.icrs.ra.deg
w.wcs.crval[1] = skydir.icrs.dec.deg
elif coordsys == 'GAL':
w.wcs.ctype[0] = 'GLON-%s' % (projection)
w.wcs.ctype[1] = 'GLAT-%s' % (projection)
w.wcs.crval[0] = skydir.galactic.l.deg
w.wcs.crval[1] = skydir.galactic.b.deg
else:
raise Exception('Unrecognized coordinate system.')
try:
w.wcs.crpix[0] = crpix[0]
w.wcs.crpix[1] = crpix[1]
except:
w.wcs.crpix[0] = crpix
w.wcs.crpix[1] = crpix
try:
w.wcs.cdelt[0] = cdelt[0]
w.wcs.cdelt[1] = cdelt[1]
except:
w.wcs.cdelt[0] = -cdelt
w.wcs.cdelt[1] = cdelt
w = WCS(w.to_header())
if naxis == 3 and energies is not None:
w.wcs.crpix[2] = 1
w.wcs.crval[2] = energies[0]
w.wcs.cdelt[2] = energies[1] - energies[0]
w.wcs.ctype[2] = 'Energy'
w.wcs.cunit[2] = 'MeV'
return w | python | def create_wcs(skydir, coordsys='CEL', projection='AIT',
cdelt=1.0, crpix=1., naxis=2, energies=None):
"""Create a WCS object.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinate of the WCS reference point.
coordsys : str
projection : str
cdelt : float or (float,float)
In the first case the same value is used for x and y axes
crpix : float or (float,float)
In the first case the same value is used for x and y axes
naxis : {2, 3}
Number of dimensions of the projection.
energies : array-like
Array of energies that defines the third dimension if naxis=3.
"""
w = WCS(naxis=naxis)
if coordsys == 'CEL':
w.wcs.ctype[0] = 'RA---%s' % (projection)
w.wcs.ctype[1] = 'DEC--%s' % (projection)
w.wcs.crval[0] = skydir.icrs.ra.deg
w.wcs.crval[1] = skydir.icrs.dec.deg
elif coordsys == 'GAL':
w.wcs.ctype[0] = 'GLON-%s' % (projection)
w.wcs.ctype[1] = 'GLAT-%s' % (projection)
w.wcs.crval[0] = skydir.galactic.l.deg
w.wcs.crval[1] = skydir.galactic.b.deg
else:
raise Exception('Unrecognized coordinate system.')
try:
w.wcs.crpix[0] = crpix[0]
w.wcs.crpix[1] = crpix[1]
except:
w.wcs.crpix[0] = crpix
w.wcs.crpix[1] = crpix
try:
w.wcs.cdelt[0] = cdelt[0]
w.wcs.cdelt[1] = cdelt[1]
except:
w.wcs.cdelt[0] = -cdelt
w.wcs.cdelt[1] = cdelt
w = WCS(w.to_header())
if naxis == 3 and energies is not None:
w.wcs.crpix[2] = 1
w.wcs.crval[2] = energies[0]
w.wcs.cdelt[2] = energies[1] - energies[0]
w.wcs.ctype[2] = 'Energy'
w.wcs.cunit[2] = 'MeV'
return w | [
"def",
"create_wcs",
"(",
"skydir",
",",
"coordsys",
"=",
"'CEL'",
",",
"projection",
"=",
"'AIT'",
",",
"cdelt",
"=",
"1.0",
",",
"crpix",
"=",
"1.",
",",
"naxis",
"=",
"2",
",",
"energies",
"=",
"None",
")",
":",
"w",
"=",
"WCS",
"(",
"naxis",
... | Create a WCS object.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky coordinate of the WCS reference point.
coordsys : str
projection : str
cdelt : float or (float,float)
In the first case the same value is used for x and y axes
crpix : float or (float,float)
In the first case the same value is used for x and y axes
naxis : {2, 3}
Number of dimensions of the projection.
energies : array-like
Array of energies that defines the third dimension if naxis=3. | [
"Create",
"a",
"WCS",
"object",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/wcs_utils.py#L123-L182 | train | 35,841 |
fermiPy/fermipy | fermipy/wcs_utils.py | wcs_add_energy_axis | def wcs_add_energy_axis(wcs, energies):
"""Copy a WCS object, and add on the energy axis.
Parameters
----------
wcs : `~astropy.wcs.WCS`
WCS
energies : array-like
Array of energies.
"""
if wcs.naxis != 2:
raise Exception(
'wcs_add_energy_axis, input WCS naxis != 2 %i' % wcs.naxis)
w = WCS(naxis=3)
w.wcs.crpix[0] = wcs.wcs.crpix[0]
w.wcs.crpix[1] = wcs.wcs.crpix[1]
w.wcs.ctype[0] = wcs.wcs.ctype[0]
w.wcs.ctype[1] = wcs.wcs.ctype[1]
w.wcs.crval[0] = wcs.wcs.crval[0]
w.wcs.crval[1] = wcs.wcs.crval[1]
w.wcs.cdelt[0] = wcs.wcs.cdelt[0]
w.wcs.cdelt[1] = wcs.wcs.cdelt[1]
w = WCS(w.to_header())
w.wcs.crpix[2] = 1
w.wcs.crval[2] = energies[0]
w.wcs.cdelt[2] = energies[1] - energies[0]
w.wcs.ctype[2] = 'Energy'
return w | python | def wcs_add_energy_axis(wcs, energies):
"""Copy a WCS object, and add on the energy axis.
Parameters
----------
wcs : `~astropy.wcs.WCS`
WCS
energies : array-like
Array of energies.
"""
if wcs.naxis != 2:
raise Exception(
'wcs_add_energy_axis, input WCS naxis != 2 %i' % wcs.naxis)
w = WCS(naxis=3)
w.wcs.crpix[0] = wcs.wcs.crpix[0]
w.wcs.crpix[1] = wcs.wcs.crpix[1]
w.wcs.ctype[0] = wcs.wcs.ctype[0]
w.wcs.ctype[1] = wcs.wcs.ctype[1]
w.wcs.crval[0] = wcs.wcs.crval[0]
w.wcs.crval[1] = wcs.wcs.crval[1]
w.wcs.cdelt[0] = wcs.wcs.cdelt[0]
w.wcs.cdelt[1] = wcs.wcs.cdelt[1]
w = WCS(w.to_header())
w.wcs.crpix[2] = 1
w.wcs.crval[2] = energies[0]
w.wcs.cdelt[2] = energies[1] - energies[0]
w.wcs.ctype[2] = 'Energy'
return w | [
"def",
"wcs_add_energy_axis",
"(",
"wcs",
",",
"energies",
")",
":",
"if",
"wcs",
".",
"naxis",
"!=",
"2",
":",
"raise",
"Exception",
"(",
"'wcs_add_energy_axis, input WCS naxis != 2 %i'",
"%",
"wcs",
".",
"naxis",
")",
"w",
"=",
"WCS",
"(",
"naxis",
"=",
... | Copy a WCS object, and add on the energy axis.
Parameters
----------
wcs : `~astropy.wcs.WCS`
WCS
energies : array-like
Array of energies. | [
"Copy",
"a",
"WCS",
"object",
"and",
"add",
"on",
"the",
"energy",
"axis",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/wcs_utils.py#L185-L212 | train | 35,842 |
fermiPy/fermipy | fermipy/wcs_utils.py | sky_to_offset | def sky_to_offset(skydir, lon, lat, coordsys='CEL', projection='AIT'):
"""Convert sky coordinates to a projected offset. This function
is the inverse of offset_to_sky."""
w = create_wcs(skydir, coordsys, projection)
skycrd = np.vstack((lon, lat)).T
if len(skycrd) == 0:
return skycrd
return w.wcs_world2pix(skycrd, 0) | python | def sky_to_offset(skydir, lon, lat, coordsys='CEL', projection='AIT'):
"""Convert sky coordinates to a projected offset. This function
is the inverse of offset_to_sky."""
w = create_wcs(skydir, coordsys, projection)
skycrd = np.vstack((lon, lat)).T
if len(skycrd) == 0:
return skycrd
return w.wcs_world2pix(skycrd, 0) | [
"def",
"sky_to_offset",
"(",
"skydir",
",",
"lon",
",",
"lat",
",",
"coordsys",
"=",
"'CEL'",
",",
"projection",
"=",
"'AIT'",
")",
":",
"w",
"=",
"create_wcs",
"(",
"skydir",
",",
"coordsys",
",",
"projection",
")",
"skycrd",
"=",
"np",
".",
"vstack",... | Convert sky coordinates to a projected offset. This function
is the inverse of offset_to_sky. | [
"Convert",
"sky",
"coordinates",
"to",
"a",
"projected",
"offset",
".",
"This",
"function",
"is",
"the",
"inverse",
"of",
"offset_to_sky",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/wcs_utils.py#L229-L239 | train | 35,843 |
fermiPy/fermipy | fermipy/wcs_utils.py | skydir_to_pix | def skydir_to_pix(skydir, wcs):
"""Convert skydir object to pixel coordinates.
Gracefully handles 0-d coordinate arrays.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
wcs : `~astropy.wcs.WCS`
Returns
-------
xp, yp : `numpy.ndarray`
The pixel coordinates
"""
if len(skydir.shape) > 0 and len(skydir) == 0:
return [np.empty(0), np.empty(0)]
return skydir.to_pixel(wcs, origin=0) | python | def skydir_to_pix(skydir, wcs):
"""Convert skydir object to pixel coordinates.
Gracefully handles 0-d coordinate arrays.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
wcs : `~astropy.wcs.WCS`
Returns
-------
xp, yp : `numpy.ndarray`
The pixel coordinates
"""
if len(skydir.shape) > 0 and len(skydir) == 0:
return [np.empty(0), np.empty(0)]
return skydir.to_pixel(wcs, origin=0) | [
"def",
"skydir_to_pix",
"(",
"skydir",
",",
"wcs",
")",
":",
"if",
"len",
"(",
"skydir",
".",
"shape",
")",
">",
"0",
"and",
"len",
"(",
"skydir",
")",
"==",
"0",
":",
"return",
"[",
"np",
".",
"empty",
"(",
"0",
")",
",",
"np",
".",
"empty",
... | Convert skydir object to pixel coordinates.
Gracefully handles 0-d coordinate arrays.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
wcs : `~astropy.wcs.WCS`
Returns
-------
xp, yp : `numpy.ndarray`
The pixel coordinates | [
"Convert",
"skydir",
"object",
"to",
"pixel",
"coordinates",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/wcs_utils.py#L254-L274 | train | 35,844 |
fermiPy/fermipy | fermipy/wcs_utils.py | pix_to_skydir | def pix_to_skydir(xpix, ypix, wcs):
"""Convert pixel coordinates to a skydir object.
Gracefully handles 0-d coordinate arrays.
Always returns a celestial coordinate.
Parameters
----------
xpix : `numpy.ndarray`
ypix : `numpy.ndarray`
wcs : `~astropy.wcs.WCS`
"""
xpix = np.array(xpix)
ypix = np.array(ypix)
if xpix.ndim > 0 and len(xpix) == 0:
return SkyCoord(np.empty(0), np.empty(0), unit='deg',
frame='icrs')
return SkyCoord.from_pixel(xpix, ypix, wcs,
origin=0).transform_to('icrs') | python | def pix_to_skydir(xpix, ypix, wcs):
"""Convert pixel coordinates to a skydir object.
Gracefully handles 0-d coordinate arrays.
Always returns a celestial coordinate.
Parameters
----------
xpix : `numpy.ndarray`
ypix : `numpy.ndarray`
wcs : `~astropy.wcs.WCS`
"""
xpix = np.array(xpix)
ypix = np.array(ypix)
if xpix.ndim > 0 and len(xpix) == 0:
return SkyCoord(np.empty(0), np.empty(0), unit='deg',
frame='icrs')
return SkyCoord.from_pixel(xpix, ypix, wcs,
origin=0).transform_to('icrs') | [
"def",
"pix_to_skydir",
"(",
"xpix",
",",
"ypix",
",",
"wcs",
")",
":",
"xpix",
"=",
"np",
".",
"array",
"(",
"xpix",
")",
"ypix",
"=",
"np",
".",
"array",
"(",
"ypix",
")",
"if",
"xpix",
".",
"ndim",
">",
"0",
"and",
"len",
"(",
"xpix",
")",
... | Convert pixel coordinates to a skydir object.
Gracefully handles 0-d coordinate arrays.
Always returns a celestial coordinate.
Parameters
----------
xpix : `numpy.ndarray`
ypix : `numpy.ndarray`
wcs : `~astropy.wcs.WCS` | [
"Convert",
"pixel",
"coordinates",
"to",
"a",
"skydir",
"object",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/wcs_utils.py#L277-L300 | train | 35,845 |
fermiPy/fermipy | fermipy/wcs_utils.py | wcs_to_axes | def wcs_to_axes(w, npix):
"""Generate a sequence of bin edge vectors corresponding to the
axes of a WCS object."""
npix = npix[::-1]
x = np.linspace(-(npix[0]) / 2., (npix[0]) / 2.,
npix[0] + 1) * np.abs(w.wcs.cdelt[0])
y = np.linspace(-(npix[1]) / 2., (npix[1]) / 2.,
npix[1] + 1) * np.abs(w.wcs.cdelt[1])
if w.wcs.naxis == 2:
return x, y
cdelt2 = np.log10((w.wcs.cdelt[2] + w.wcs.crval[2]) / w.wcs.crval[2])
z = (np.linspace(0, npix[2], npix[2] + 1)) * cdelt2
z += np.log10(w.wcs.crval[2])
return x, y, z | python | def wcs_to_axes(w, npix):
"""Generate a sequence of bin edge vectors corresponding to the
axes of a WCS object."""
npix = npix[::-1]
x = np.linspace(-(npix[0]) / 2., (npix[0]) / 2.,
npix[0] + 1) * np.abs(w.wcs.cdelt[0])
y = np.linspace(-(npix[1]) / 2., (npix[1]) / 2.,
npix[1] + 1) * np.abs(w.wcs.cdelt[1])
if w.wcs.naxis == 2:
return x, y
cdelt2 = np.log10((w.wcs.cdelt[2] + w.wcs.crval[2]) / w.wcs.crval[2])
z = (np.linspace(0, npix[2], npix[2] + 1)) * cdelt2
z += np.log10(w.wcs.crval[2])
return x, y, z | [
"def",
"wcs_to_axes",
"(",
"w",
",",
"npix",
")",
":",
"npix",
"=",
"npix",
"[",
":",
":",
"-",
"1",
"]",
"x",
"=",
"np",
".",
"linspace",
"(",
"-",
"(",
"npix",
"[",
"0",
"]",
")",
"/",
"2.",
",",
"(",
"npix",
"[",
"0",
"]",
")",
"/",
... | Generate a sequence of bin edge vectors corresponding to the
axes of a WCS object. | [
"Generate",
"a",
"sequence",
"of",
"bin",
"edge",
"vectors",
"corresponding",
"to",
"the",
"axes",
"of",
"a",
"WCS",
"object",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/wcs_utils.py#L353-L372 | train | 35,846 |
fermiPy/fermipy | fermipy/wcs_utils.py | get_cel_to_gal_angle | def get_cel_to_gal_angle(skydir):
"""Calculate the rotation angle in radians between the longitude
axes of a local projection in celestial and galactic coordinates.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Direction of projection center.
Returns
-------
angle : float
Rotation angle in radians.
"""
wcs0 = create_wcs(skydir, coordsys='CEL')
wcs1 = create_wcs(skydir, coordsys='GAL')
x, y = SkyCoord.to_pixel(SkyCoord.from_pixel(1.0, 0.0, wcs0), wcs1)
return np.arctan2(y, x) | python | def get_cel_to_gal_angle(skydir):
"""Calculate the rotation angle in radians between the longitude
axes of a local projection in celestial and galactic coordinates.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Direction of projection center.
Returns
-------
angle : float
Rotation angle in radians.
"""
wcs0 = create_wcs(skydir, coordsys='CEL')
wcs1 = create_wcs(skydir, coordsys='GAL')
x, y = SkyCoord.to_pixel(SkyCoord.from_pixel(1.0, 0.0, wcs0), wcs1)
return np.arctan2(y, x) | [
"def",
"get_cel_to_gal_angle",
"(",
"skydir",
")",
":",
"wcs0",
"=",
"create_wcs",
"(",
"skydir",
",",
"coordsys",
"=",
"'CEL'",
")",
"wcs1",
"=",
"create_wcs",
"(",
"skydir",
",",
"coordsys",
"=",
"'GAL'",
")",
"x",
",",
"y",
"=",
"SkyCoord",
".",
"to... | Calculate the rotation angle in radians between the longitude
axes of a local projection in celestial and galactic coordinates.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Direction of projection center.
Returns
-------
angle : float
Rotation angle in radians. | [
"Calculate",
"the",
"rotation",
"angle",
"in",
"radians",
"between",
"the",
"longitude",
"axes",
"of",
"a",
"local",
"projection",
"in",
"celestial",
"and",
"galactic",
"coordinates",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/wcs_utils.py#L407-L424 | train | 35,847 |
fermiPy/fermipy | fermipy/wcs_utils.py | extract_mapcube_region | def extract_mapcube_region(infile, skydir, outfile, maphdu=0):
"""Extract a region out of an all-sky mapcube file.
Parameters
----------
infile : str
Path to mapcube file.
skydir : `~astropy.coordinates.SkyCoord`
"""
h = fits.open(os.path.expandvars(infile))
npix = 200
shape = list(h[maphdu].data.shape)
shape[1] = 200
shape[2] = 200
wcs = WCS(h[maphdu].header)
skywcs = WCS(h[maphdu].header, naxis=[1, 2])
coordsys = get_coordsys(skywcs)
region_wcs = wcs.deepcopy()
if coordsys == 'CEL':
region_wcs.wcs.crval[0] = skydir.ra.deg
region_wcs.wcs.crval[1] = skydir.dec.deg
elif coordsys == 'GAL':
region_wcs.wcs.crval[0] = skydir.galactic.l.deg
region_wcs.wcs.crval[1] = skydir.galactic.b.deg
else:
raise Exception('Unrecognized coordinate system.')
region_wcs.wcs.crpix[0] = npix // 2 + 0.5
region_wcs.wcs.crpix[1] = npix // 2 + 0.5
from reproject import reproject_interp
data, footprint = reproject_interp(h, region_wcs.to_header(),
hdu_in=maphdu,
shape_out=shape)
hdu_image = fits.PrimaryHDU(data, header=region_wcs.to_header())
hdulist = fits.HDUList([hdu_image, h['ENERGIES']])
hdulist.writeto(outfile, clobber=True) | python | def extract_mapcube_region(infile, skydir, outfile, maphdu=0):
"""Extract a region out of an all-sky mapcube file.
Parameters
----------
infile : str
Path to mapcube file.
skydir : `~astropy.coordinates.SkyCoord`
"""
h = fits.open(os.path.expandvars(infile))
npix = 200
shape = list(h[maphdu].data.shape)
shape[1] = 200
shape[2] = 200
wcs = WCS(h[maphdu].header)
skywcs = WCS(h[maphdu].header, naxis=[1, 2])
coordsys = get_coordsys(skywcs)
region_wcs = wcs.deepcopy()
if coordsys == 'CEL':
region_wcs.wcs.crval[0] = skydir.ra.deg
region_wcs.wcs.crval[1] = skydir.dec.deg
elif coordsys == 'GAL':
region_wcs.wcs.crval[0] = skydir.galactic.l.deg
region_wcs.wcs.crval[1] = skydir.galactic.b.deg
else:
raise Exception('Unrecognized coordinate system.')
region_wcs.wcs.crpix[0] = npix // 2 + 0.5
region_wcs.wcs.crpix[1] = npix // 2 + 0.5
from reproject import reproject_interp
data, footprint = reproject_interp(h, region_wcs.to_header(),
hdu_in=maphdu,
shape_out=shape)
hdu_image = fits.PrimaryHDU(data, header=region_wcs.to_header())
hdulist = fits.HDUList([hdu_image, h['ENERGIES']])
hdulist.writeto(outfile, clobber=True) | [
"def",
"extract_mapcube_region",
"(",
"infile",
",",
"skydir",
",",
"outfile",
",",
"maphdu",
"=",
"0",
")",
":",
"h",
"=",
"fits",
".",
"open",
"(",
"os",
".",
"path",
".",
"expandvars",
"(",
"infile",
")",
")",
"npix",
"=",
"200",
"shape",
"=",
"... | Extract a region out of an all-sky mapcube file.
Parameters
----------
infile : str
Path to mapcube file.
skydir : `~astropy.coordinates.SkyCoord` | [
"Extract",
"a",
"region",
"out",
"of",
"an",
"all",
"-",
"sky",
"mapcube",
"file",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/wcs_utils.py#L448-L493 | train | 35,848 |
fermiPy/fermipy | fermipy/diffuse/utils.py | readlines | def readlines(arg):
"""Read lines from a file into a list.
Removes whitespace and lines that start with '#'
"""
fin = open(arg)
lines_in = fin.readlines()
fin.close()
lines_out = []
for line in lines_in:
line = line.strip()
if not line or line[0] == '#':
continue
lines_out.append(line)
return lines_out | python | def readlines(arg):
"""Read lines from a file into a list.
Removes whitespace and lines that start with '#'
"""
fin = open(arg)
lines_in = fin.readlines()
fin.close()
lines_out = []
for line in lines_in:
line = line.strip()
if not line or line[0] == '#':
continue
lines_out.append(line)
return lines_out | [
"def",
"readlines",
"(",
"arg",
")",
":",
"fin",
"=",
"open",
"(",
"arg",
")",
"lines_in",
"=",
"fin",
".",
"readlines",
"(",
")",
"fin",
".",
"close",
"(",
")",
"lines_out",
"=",
"[",
"]",
"for",
"line",
"in",
"lines_in",
":",
"line",
"=",
"line... | Read lines from a file into a list.
Removes whitespace and lines that start with '#' | [
"Read",
"lines",
"from",
"a",
"file",
"into",
"a",
"list",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/utils.py#L11-L25 | train | 35,849 |
fermiPy/fermipy | fermipy/diffuse/utils.py | create_inputlist | def create_inputlist(arglist):
"""Read lines from a file and makes a list of file names.
Removes whitespace and lines that start with '#'
Recursively read all files with the extension '.lst'
"""
lines = []
if isinstance(arglist, list):
for arg in arglist:
if os.path.splitext(arg)[1] == '.lst':
lines += readlines(arg)
else:
lines.append(arg)
elif is_null(arglist):
pass
else:
if os.path.splitext(arglist)[1] == '.lst':
lines += readlines(arglist)
else:
lines.append(arglist)
return lines | python | def create_inputlist(arglist):
"""Read lines from a file and makes a list of file names.
Removes whitespace and lines that start with '#'
Recursively read all files with the extension '.lst'
"""
lines = []
if isinstance(arglist, list):
for arg in arglist:
if os.path.splitext(arg)[1] == '.lst':
lines += readlines(arg)
else:
lines.append(arg)
elif is_null(arglist):
pass
else:
if os.path.splitext(arglist)[1] == '.lst':
lines += readlines(arglist)
else:
lines.append(arglist)
return lines | [
"def",
"create_inputlist",
"(",
"arglist",
")",
":",
"lines",
"=",
"[",
"]",
"if",
"isinstance",
"(",
"arglist",
",",
"list",
")",
":",
"for",
"arg",
"in",
"arglist",
":",
"if",
"os",
".",
"path",
".",
"splitext",
"(",
"arg",
")",
"[",
"1",
"]",
... | Read lines from a file and makes a list of file names.
Removes whitespace and lines that start with '#'
Recursively read all files with the extension '.lst' | [
"Read",
"lines",
"from",
"a",
"file",
"and",
"makes",
"a",
"list",
"of",
"file",
"names",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/utils.py#L28-L48 | train | 35,850 |
fermiPy/fermipy | fermipy/validate/tools.py | Validator.init | def init(self):
"""Initialize histograms."""
evclass_shape = [16, 40, 10]
evtype_shape = [16, 16, 40, 10]
evclass_psf_shape = [16, 40, 10, 100]
evtype_psf_shape = [16, 16, 40, 10, 100]
self._hists_eff = dict()
self._hists = dict(evclass_on=np.zeros(evclass_shape),
evclass_off=np.zeros(evclass_shape),
evclass_alpha=np.zeros([16, 40, 1]),
evtype_on=np.zeros(evtype_shape),
evtype_off=np.zeros(evtype_shape),
evtype_alpha=np.zeros([16, 1, 40, 1]),
evclass_psf_on=np.zeros(evclass_psf_shape),
evclass_psf_off=np.zeros(evclass_psf_shape),
evtype_psf_on=np.zeros(evtype_psf_shape),
evtype_psf_off=np.zeros(evtype_psf_shape),
) | python | def init(self):
"""Initialize histograms."""
evclass_shape = [16, 40, 10]
evtype_shape = [16, 16, 40, 10]
evclass_psf_shape = [16, 40, 10, 100]
evtype_psf_shape = [16, 16, 40, 10, 100]
self._hists_eff = dict()
self._hists = dict(evclass_on=np.zeros(evclass_shape),
evclass_off=np.zeros(evclass_shape),
evclass_alpha=np.zeros([16, 40, 1]),
evtype_on=np.zeros(evtype_shape),
evtype_off=np.zeros(evtype_shape),
evtype_alpha=np.zeros([16, 1, 40, 1]),
evclass_psf_on=np.zeros(evclass_psf_shape),
evclass_psf_off=np.zeros(evclass_psf_shape),
evtype_psf_on=np.zeros(evtype_psf_shape),
evtype_psf_off=np.zeros(evtype_psf_shape),
) | [
"def",
"init",
"(",
"self",
")",
":",
"evclass_shape",
"=",
"[",
"16",
",",
"40",
",",
"10",
"]",
"evtype_shape",
"=",
"[",
"16",
",",
"16",
",",
"40",
",",
"10",
"]",
"evclass_psf_shape",
"=",
"[",
"16",
",",
"40",
",",
"10",
",",
"100",
"]",
... | Initialize histograms. | [
"Initialize",
"histograms",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/validate/tools.py#L145-L164 | train | 35,851 |
fermiPy/fermipy | fermipy/validate/tools.py | Validator.create_hist | def create_hist(self, evclass, evtype, xsep, energy, ctheta,
fill_sep=False, fill_evtype=False):
"""Load into a histogram."""
nevt = len(evclass)
ebin = utils.val_to_bin(self._energy_bins, energy)
scale = self._psf_scale[ebin]
vals = [energy, ctheta]
bins = [self._energy_bins, self._ctheta_bins]
if fill_sep:
vals += [xsep]
bins += [self._xsep_bins]
if fill_evtype:
loopv = [self._evclass_bins[:-1], self._evtype_bins[:-1]]
shape = [16, 16] + [len(b) - 1 for b in bins]
else:
loopv = [self._evclass_bins[:-1]]
shape = [16] + [len(b) - 1 for b in bins]
h = np.zeros(shape)
for t in itertools.product(*loopv):
m = (evclass[:, int(t[0])] == True)
if fill_evtype:
m &= (evtype[:, int(t[1])] == True)
if not np.sum(m):
continue
z = np.vstack(vals)
z = z[:, m]
if fill_evtype:
h[int(t[0]), int(t[1])] += np.histogramdd(z.T, bins=bins)[0]
else:
h[int(t[0])] += np.histogramdd(z.T, bins=bins)[0]
return h | python | def create_hist(self, evclass, evtype, xsep, energy, ctheta,
fill_sep=False, fill_evtype=False):
"""Load into a histogram."""
nevt = len(evclass)
ebin = utils.val_to_bin(self._energy_bins, energy)
scale = self._psf_scale[ebin]
vals = [energy, ctheta]
bins = [self._energy_bins, self._ctheta_bins]
if fill_sep:
vals += [xsep]
bins += [self._xsep_bins]
if fill_evtype:
loopv = [self._evclass_bins[:-1], self._evtype_bins[:-1]]
shape = [16, 16] + [len(b) - 1 for b in bins]
else:
loopv = [self._evclass_bins[:-1]]
shape = [16] + [len(b) - 1 for b in bins]
h = np.zeros(shape)
for t in itertools.product(*loopv):
m = (evclass[:, int(t[0])] == True)
if fill_evtype:
m &= (evtype[:, int(t[1])] == True)
if not np.sum(m):
continue
z = np.vstack(vals)
z = z[:, m]
if fill_evtype:
h[int(t[0]), int(t[1])] += np.histogramdd(z.T, bins=bins)[0]
else:
h[int(t[0])] += np.histogramdd(z.T, bins=bins)[0]
return h | [
"def",
"create_hist",
"(",
"self",
",",
"evclass",
",",
"evtype",
",",
"xsep",
",",
"energy",
",",
"ctheta",
",",
"fill_sep",
"=",
"False",
",",
"fill_evtype",
"=",
"False",
")",
":",
"nevt",
"=",
"len",
"(",
"evclass",
")",
"ebin",
"=",
"utils",
"."... | Load into a histogram. | [
"Load",
"into",
"a",
"histogram",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/validate/tools.py#L319-L358 | train | 35,852 |
fermiPy/fermipy | fermipy/validate/tools.py | Validator.calc_eff | def calc_eff(self):
"""Calculate the efficiency."""
hists = self.hists
hists_out = self._hists_eff
cth_axis_idx = dict(evclass=2, evtype=3)
for k in ['evclass', 'evtype']:
if k == 'evclass':
ns0 = hists['evclass_on'][4][None, ...]
nb0 = hists['evclass_off'][4][None, ...]
else:
ns0 = hists['evclass_on'][4][None, None, ...]
nb0 = hists['evclass_off'][4][None, None, ...]
eff, eff_var = calc_eff(ns0, nb0,
hists['%s_on' % k], hists['%s_off' % k],
hists['%s_alpha' % k])
hists_out['%s_cth_eff' % k] = eff
hists_out['%s_cth_eff_var' % k] = eff_var
eff, eff_var = calc_eff(ns0, nb0,
hists['%s_on' % k], hists['%s_off' % k],
hists['%s_alpha' % k],
sum_axes=[cth_axis_idx[k]])
hists_out['%s_eff' % k] = np.squeeze(eff)
hists_out['%s_eff_var' % k] = np.squeeze(eff_var) | python | def calc_eff(self):
"""Calculate the efficiency."""
hists = self.hists
hists_out = self._hists_eff
cth_axis_idx = dict(evclass=2, evtype=3)
for k in ['evclass', 'evtype']:
if k == 'evclass':
ns0 = hists['evclass_on'][4][None, ...]
nb0 = hists['evclass_off'][4][None, ...]
else:
ns0 = hists['evclass_on'][4][None, None, ...]
nb0 = hists['evclass_off'][4][None, None, ...]
eff, eff_var = calc_eff(ns0, nb0,
hists['%s_on' % k], hists['%s_off' % k],
hists['%s_alpha' % k])
hists_out['%s_cth_eff' % k] = eff
hists_out['%s_cth_eff_var' % k] = eff_var
eff, eff_var = calc_eff(ns0, nb0,
hists['%s_on' % k], hists['%s_off' % k],
hists['%s_alpha' % k],
sum_axes=[cth_axis_idx[k]])
hists_out['%s_eff' % k] = np.squeeze(eff)
hists_out['%s_eff_var' % k] = np.squeeze(eff_var) | [
"def",
"calc_eff",
"(",
"self",
")",
":",
"hists",
"=",
"self",
".",
"hists",
"hists_out",
"=",
"self",
".",
"_hists_eff",
"cth_axis_idx",
"=",
"dict",
"(",
"evclass",
"=",
"2",
",",
"evtype",
"=",
"3",
")",
"for",
"k",
"in",
"[",
"'evclass'",
",",
... | Calculate the efficiency. | [
"Calculate",
"the",
"efficiency",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/validate/tools.py#L360-L387 | train | 35,853 |
fermiPy/fermipy | fermipy/validate/tools.py | Validator.calc_containment | def calc_containment(self):
"""Calculate PSF containment."""
hists = self.hists
hists_out = self._hists_eff
quantiles = [0.34, 0.68, 0.90, 0.95]
cth_axis_idx = dict(evclass=2, evtype=3)
for k in ['evclass']: # ,'evtype']:
print(k)
non = hists['%s_psf_on' % k]
noff = hists['%s_psf_off' % k]
alpha = hists['%s_alpha' % k][..., None]
if k == 'evclass':
sep = self._sep_bins[None, :, None, 1:]
else:
sep = self._sep_bins[None, None, :, None, 1:]
qval, qerr = calc_quantiles(sep, non, noff, alpha, quantiles)
for i, q in enumerate(quantiles):
hists_out['%s_cth_q%2i' % (k, q * 100)] = qval[i]
hists_out['%s_cth_q%2i_err' % (k, q * 100)] = qerr[i]
non = np.sum(non, axis=cth_axis_idx[k])
noff = np.sum(noff, axis=cth_axis_idx[k])
alpha = np.squeeze(alpha, axis=cth_axis_idx[k])
sep = np.squeeze(sep, axis=cth_axis_idx[k])
qval, qerr = calc_quantiles(sep, non, noff, alpha, quantiles)
for i, q in enumerate(quantiles):
hists_out['%s_q%2i' % (k, q * 100)] = qval[i]
hists_out['%s_q%2i_err' % (k, q * 100)] = qerr[i] | python | def calc_containment(self):
"""Calculate PSF containment."""
hists = self.hists
hists_out = self._hists_eff
quantiles = [0.34, 0.68, 0.90, 0.95]
cth_axis_idx = dict(evclass=2, evtype=3)
for k in ['evclass']: # ,'evtype']:
print(k)
non = hists['%s_psf_on' % k]
noff = hists['%s_psf_off' % k]
alpha = hists['%s_alpha' % k][..., None]
if k == 'evclass':
sep = self._sep_bins[None, :, None, 1:]
else:
sep = self._sep_bins[None, None, :, None, 1:]
qval, qerr = calc_quantiles(sep, non, noff, alpha, quantiles)
for i, q in enumerate(quantiles):
hists_out['%s_cth_q%2i' % (k, q * 100)] = qval[i]
hists_out['%s_cth_q%2i_err' % (k, q * 100)] = qerr[i]
non = np.sum(non, axis=cth_axis_idx[k])
noff = np.sum(noff, axis=cth_axis_idx[k])
alpha = np.squeeze(alpha, axis=cth_axis_idx[k])
sep = np.squeeze(sep, axis=cth_axis_idx[k])
qval, qerr = calc_quantiles(sep, non, noff, alpha, quantiles)
for i, q in enumerate(quantiles):
hists_out['%s_q%2i' % (k, q * 100)] = qval[i]
hists_out['%s_q%2i_err' % (k, q * 100)] = qerr[i] | [
"def",
"calc_containment",
"(",
"self",
")",
":",
"hists",
"=",
"self",
".",
"hists",
"hists_out",
"=",
"self",
".",
"_hists_eff",
"quantiles",
"=",
"[",
"0.34",
",",
"0.68",
",",
"0.90",
",",
"0.95",
"]",
"cth_axis_idx",
"=",
"dict",
"(",
"evclass",
"... | Calculate PSF containment. | [
"Calculate",
"PSF",
"containment",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/validate/tools.py#L398-L430 | train | 35,854 |
fermiPy/fermipy | fermipy/config.py | update_from_schema | def update_from_schema(cfg, cfgin, schema):
"""Update configuration dictionary ``cfg`` with the contents of
``cfgin`` using the ``schema`` dictionary to determine the valid
input keys.
Parameters
----------
cfg : dict
Configuration dictionary to be updated.
cfgin : dict
New configuration dictionary that will be merged with ``cfg``.
schema : dict
Configuration schema defining the valid configuration keys and
their types.
Returns
-------
cfgout : dict
"""
cfgout = copy.deepcopy(cfg)
for k, v in schema.items():
if k not in cfgin:
continue
if isinstance(v, dict):
cfgout.setdefault(k, {})
cfgout[k] = update_from_schema(cfg[k], cfgin[k], v)
elif v[2] is dict:
cfgout[k] = utils.merge_dict(cfg[k], cfgin[k], add_new_keys=True)
else:
cfgout[k] = cfgin[k]
return cfgout | python | def update_from_schema(cfg, cfgin, schema):
"""Update configuration dictionary ``cfg`` with the contents of
``cfgin`` using the ``schema`` dictionary to determine the valid
input keys.
Parameters
----------
cfg : dict
Configuration dictionary to be updated.
cfgin : dict
New configuration dictionary that will be merged with ``cfg``.
schema : dict
Configuration schema defining the valid configuration keys and
their types.
Returns
-------
cfgout : dict
"""
cfgout = copy.deepcopy(cfg)
for k, v in schema.items():
if k not in cfgin:
continue
if isinstance(v, dict):
cfgout.setdefault(k, {})
cfgout[k] = update_from_schema(cfg[k], cfgin[k], v)
elif v[2] is dict:
cfgout[k] = utils.merge_dict(cfg[k], cfgin[k], add_new_keys=True)
else:
cfgout[k] = cfgin[k]
return cfgout | [
"def",
"update_from_schema",
"(",
"cfg",
",",
"cfgin",
",",
"schema",
")",
":",
"cfgout",
"=",
"copy",
".",
"deepcopy",
"(",
"cfg",
")",
"for",
"k",
",",
"v",
"in",
"schema",
".",
"items",
"(",
")",
":",
"if",
"k",
"not",
"in",
"cfgin",
":",
"con... | Update configuration dictionary ``cfg`` with the contents of
``cfgin`` using the ``schema`` dictionary to determine the valid
input keys.
Parameters
----------
cfg : dict
Configuration dictionary to be updated.
cfgin : dict
New configuration dictionary that will be merged with ``cfg``.
schema : dict
Configuration schema defining the valid configuration keys and
their types.
Returns
-------
cfgout : dict | [
"Update",
"configuration",
"dictionary",
"cfg",
"with",
"the",
"contents",
"of",
"cfgin",
"using",
"the",
"schema",
"dictionary",
"to",
"determine",
"the",
"valid",
"input",
"keys",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/config.py#L76-L111 | train | 35,855 |
fermiPy/fermipy | fermipy/config.py | Configurable.write_config | def write_config(self, outfile):
"""Write the configuration dictionary to an output file."""
utils.write_yaml(self.config, outfile, default_flow_style=False) | python | def write_config(self, outfile):
"""Write the configuration dictionary to an output file."""
utils.write_yaml(self.config, outfile, default_flow_style=False) | [
"def",
"write_config",
"(",
"self",
",",
"outfile",
")",
":",
"utils",
".",
"write_yaml",
"(",
"self",
".",
"config",
",",
"outfile",
",",
"default_flow_style",
"=",
"False",
")"
] | Write the configuration dictionary to an output file. | [
"Write",
"the",
"configuration",
"dictionary",
"to",
"an",
"output",
"file",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/config.py#L252-L254 | train | 35,856 |
fermiPy/fermipy | fermipy/config.py | ConfigManager.create | def create(cls, configfile):
"""Create a configuration dictionary from a yaml config file.
This function will first populate the dictionary with defaults
taken from pre-defined configuration files. The configuration
dictionary is then updated with the user-defined configuration
file. Any settings defined by the user will take precedence
over the default settings."""
# populate config dictionary with an initial set of values
# config_logging = ConfigManager.load('logging.yaml')
config = {}
if config['fileio']['outdir'] is None:
config['fileio']['outdir'] = os.path.abspath(
os.path.dirname(configfile))
user_config = cls.load(configfile)
config = utils.merge_dict(config, user_config, True)
config['fileio']['outdir'] = os.path.abspath(
config['fileio']['outdir'])
return config | python | def create(cls, configfile):
"""Create a configuration dictionary from a yaml config file.
This function will first populate the dictionary with defaults
taken from pre-defined configuration files. The configuration
dictionary is then updated with the user-defined configuration
file. Any settings defined by the user will take precedence
over the default settings."""
# populate config dictionary with an initial set of values
# config_logging = ConfigManager.load('logging.yaml')
config = {}
if config['fileio']['outdir'] is None:
config['fileio']['outdir'] = os.path.abspath(
os.path.dirname(configfile))
user_config = cls.load(configfile)
config = utils.merge_dict(config, user_config, True)
config['fileio']['outdir'] = os.path.abspath(
config['fileio']['outdir'])
return config | [
"def",
"create",
"(",
"cls",
",",
"configfile",
")",
":",
"# populate config dictionary with an initial set of values",
"# config_logging = ConfigManager.load('logging.yaml')",
"config",
"=",
"{",
"}",
"if",
"config",
"[",
"'fileio'",
"]",
"[",
"'outdir'",
"]",
"is",
"N... | Create a configuration dictionary from a yaml config file.
This function will first populate the dictionary with defaults
taken from pre-defined configuration files. The configuration
dictionary is then updated with the user-defined configuration
file. Any settings defined by the user will take precedence
over the default settings. | [
"Create",
"a",
"configuration",
"dictionary",
"from",
"a",
"yaml",
"config",
"file",
".",
"This",
"function",
"will",
"first",
"populate",
"the",
"dictionary",
"with",
"defaults",
"taken",
"from",
"pre",
"-",
"defined",
"configuration",
"files",
".",
"The",
"c... | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/config.py#L269-L291 | train | 35,857 |
fermiPy/fermipy | fermipy/merge_utils.py | update_null_primary | def update_null_primary(hdu_in, hdu=None):
""" 'Update' a null primary HDU
This actually just checks hdu exists and creates it from hdu_in if it does not.
"""
if hdu is None:
hdu = fits.PrimaryHDU(header=hdu_in.header)
else:
hdu = hdu_in
hdu.header.remove('FILENAME')
return hdu | python | def update_null_primary(hdu_in, hdu=None):
""" 'Update' a null primary HDU
This actually just checks hdu exists and creates it from hdu_in if it does not.
"""
if hdu is None:
hdu = fits.PrimaryHDU(header=hdu_in.header)
else:
hdu = hdu_in
hdu.header.remove('FILENAME')
return hdu | [
"def",
"update_null_primary",
"(",
"hdu_in",
",",
"hdu",
"=",
"None",
")",
":",
"if",
"hdu",
"is",
"None",
":",
"hdu",
"=",
"fits",
".",
"PrimaryHDU",
"(",
"header",
"=",
"hdu_in",
".",
"header",
")",
"else",
":",
"hdu",
"=",
"hdu_in",
"hdu",
".",
... | 'Update' a null primary HDU
This actually just checks hdu exists and creates it from hdu_in if it does not. | [
"Update",
"a",
"null",
"primary",
"HDU"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L13-L23 | train | 35,858 |
fermiPy/fermipy | fermipy/merge_utils.py | update_primary | def update_primary(hdu_in, hdu=None):
""" 'Update' a primary HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu
"""
if hdu is None:
hdu = fits.PrimaryHDU(data=hdu_in.data, header=hdu_in.header)
else:
hdu.data += hdu_in.data
return hdu | python | def update_primary(hdu_in, hdu=None):
""" 'Update' a primary HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu
"""
if hdu is None:
hdu = fits.PrimaryHDU(data=hdu_in.data, header=hdu_in.header)
else:
hdu.data += hdu_in.data
return hdu | [
"def",
"update_primary",
"(",
"hdu_in",
",",
"hdu",
"=",
"None",
")",
":",
"if",
"hdu",
"is",
"None",
":",
"hdu",
"=",
"fits",
".",
"PrimaryHDU",
"(",
"data",
"=",
"hdu_in",
".",
"data",
",",
"header",
"=",
"hdu_in",
".",
"header",
")",
"else",
":"... | 'Update' a primary HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu | [
"Update",
"a",
"primary",
"HDU"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L26-L36 | train | 35,859 |
fermiPy/fermipy | fermipy/merge_utils.py | update_image | def update_image(hdu_in, hdu=None):
""" 'Update' an image HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu
"""
if hdu is None:
hdu = fits.ImageHDU(
data=hdu_in.data, header=hdu_in.header, name=hdu_in.name)
else:
hdu.data += hdu_in.data
return hdu | python | def update_image(hdu_in, hdu=None):
""" 'Update' an image HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu
"""
if hdu is None:
hdu = fits.ImageHDU(
data=hdu_in.data, header=hdu_in.header, name=hdu_in.name)
else:
hdu.data += hdu_in.data
return hdu | [
"def",
"update_image",
"(",
"hdu_in",
",",
"hdu",
"=",
"None",
")",
":",
"if",
"hdu",
"is",
"None",
":",
"hdu",
"=",
"fits",
".",
"ImageHDU",
"(",
"data",
"=",
"hdu_in",
".",
"data",
",",
"header",
"=",
"hdu_in",
".",
"header",
",",
"name",
"=",
... | 'Update' an image HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this adds the data in hdu_in to hdu | [
"Update",
"an",
"image",
"HDU"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L39-L50 | train | 35,860 |
fermiPy/fermipy | fermipy/merge_utils.py | update_ebounds | def update_ebounds(hdu_in, hdu=None):
""" 'Update' the EBOUNDS HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this raises an exception if it doesn not match hdu_in
"""
if hdu is None:
hdu = fits.BinTableHDU(
data=hdu_in.data, header=hdu_in.header, name=hdu_in.name)
else:
for col in ['CHANNEL', 'E_MIN', 'E_MAX']:
if (hdu.data[col] != hdu_in.data[col]).any():
raise ValueError("Energy bounds do not match : %s %s" %
(hdu.data[col], hdu_in.data[col]))
return hdu | python | def update_ebounds(hdu_in, hdu=None):
""" 'Update' the EBOUNDS HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this raises an exception if it doesn not match hdu_in
"""
if hdu is None:
hdu = fits.BinTableHDU(
data=hdu_in.data, header=hdu_in.header, name=hdu_in.name)
else:
for col in ['CHANNEL', 'E_MIN', 'E_MAX']:
if (hdu.data[col] != hdu_in.data[col]).any():
raise ValueError("Energy bounds do not match : %s %s" %
(hdu.data[col], hdu_in.data[col]))
return hdu | [
"def",
"update_ebounds",
"(",
"hdu_in",
",",
"hdu",
"=",
"None",
")",
":",
"if",
"hdu",
"is",
"None",
":",
"hdu",
"=",
"fits",
".",
"BinTableHDU",
"(",
"data",
"=",
"hdu_in",
".",
"data",
",",
"header",
"=",
"hdu_in",
".",
"header",
",",
"name",
"=... | 'Update' the EBOUNDS HDU
This checks hdu exists and creates it from hdu_in if it does not.
If hdu does exist, this raises an exception if it doesn not match hdu_in | [
"Update",
"the",
"EBOUNDS",
"HDU"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L53-L67 | train | 35,861 |
fermiPy/fermipy | fermipy/merge_utils.py | merge_all_gti_data | def merge_all_gti_data(datalist_in, nrows, first):
""" Merge together all the GTI data
Parameters
-------
datalist_in : list of `astropy.io.fits.BinTableHDU` data
The GTI data that is being merged
nrows : `~numpy.ndarray` of ints
Array with the number of nrows for each object in datalist_in
first : `astropy.io.fits.BinTableHDU`
BinTableHDU to use as a template
Returns
-------
out_hdu : `astropy.io.fits.BinTableHDU`
BinTableHDU with the merge GTIs
"""
max_row = nrows.cumsum()
min_row = max_row - nrows
out_hdu = fits.BinTableHDU.from_columns(
first.columns, header=first.header, nrows=nrows.sum())
for (imin, imax, data_in) in zip(min_row, max_row, datalist_in):
for col in first.columns:
out_hdu.data[col.name][imin:imax] = data_in[col.name]
return out_hdu | python | def merge_all_gti_data(datalist_in, nrows, first):
""" Merge together all the GTI data
Parameters
-------
datalist_in : list of `astropy.io.fits.BinTableHDU` data
The GTI data that is being merged
nrows : `~numpy.ndarray` of ints
Array with the number of nrows for each object in datalist_in
first : `astropy.io.fits.BinTableHDU`
BinTableHDU to use as a template
Returns
-------
out_hdu : `astropy.io.fits.BinTableHDU`
BinTableHDU with the merge GTIs
"""
max_row = nrows.cumsum()
min_row = max_row - nrows
out_hdu = fits.BinTableHDU.from_columns(
first.columns, header=first.header, nrows=nrows.sum())
for (imin, imax, data_in) in zip(min_row, max_row, datalist_in):
for col in first.columns:
out_hdu.data[col.name][imin:imax] = data_in[col.name]
return out_hdu | [
"def",
"merge_all_gti_data",
"(",
"datalist_in",
",",
"nrows",
",",
"first",
")",
":",
"max_row",
"=",
"nrows",
".",
"cumsum",
"(",
")",
"min_row",
"=",
"max_row",
"-",
"nrows",
"out_hdu",
"=",
"fits",
".",
"BinTableHDU",
".",
"from_columns",
"(",
"first",... | Merge together all the GTI data
Parameters
-------
datalist_in : list of `astropy.io.fits.BinTableHDU` data
The GTI data that is being merged
nrows : `~numpy.ndarray` of ints
Array with the number of nrows for each object in datalist_in
first : `astropy.io.fits.BinTableHDU`
BinTableHDU to use as a template
Returns
-------
out_hdu : `astropy.io.fits.BinTableHDU`
BinTableHDU with the merge GTIs | [
"Merge",
"together",
"all",
"the",
"GTI",
"data"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L87-L116 | train | 35,862 |
fermiPy/fermipy | fermipy/merge_utils.py | extract_gti_data | def extract_gti_data(hdu_in):
""" Extract some GTI related data
Parameters
-------
hdu_in : `astropy.io.fits.BinTableHDU`
The GTI data
Returns
-------
data : `astropy.io.fits.BinTableHDU` data
exposure : float
Exposure value taken from FITS header
tstop : float
TSTOP value taken from FITS header
"""
data = hdu_in.data
exposure = hdu_in.header['EXPOSURE']
tstop = hdu_in.header['TSTOP']
return (data, exposure, tstop) | python | def extract_gti_data(hdu_in):
""" Extract some GTI related data
Parameters
-------
hdu_in : `astropy.io.fits.BinTableHDU`
The GTI data
Returns
-------
data : `astropy.io.fits.BinTableHDU` data
exposure : float
Exposure value taken from FITS header
tstop : float
TSTOP value taken from FITS header
"""
data = hdu_in.data
exposure = hdu_in.header['EXPOSURE']
tstop = hdu_in.header['TSTOP']
return (data, exposure, tstop) | [
"def",
"extract_gti_data",
"(",
"hdu_in",
")",
":",
"data",
"=",
"hdu_in",
".",
"data",
"exposure",
"=",
"hdu_in",
".",
"header",
"[",
"'EXPOSURE'",
"]",
"tstop",
"=",
"hdu_in",
".",
"header",
"[",
"'TSTOP'",
"]",
"return",
"(",
"data",
",",
"exposure",
... | Extract some GTI related data
Parameters
-------
hdu_in : `astropy.io.fits.BinTableHDU`
The GTI data
Returns
-------
data : `astropy.io.fits.BinTableHDU` data
exposure : float
Exposure value taken from FITS header
tstop : float
TSTOP value taken from FITS header | [
"Extract",
"some",
"GTI",
"related",
"data"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L119-L141 | train | 35,863 |
fermiPy/fermipy | fermipy/merge_utils.py | update_hpx_skymap_allsky | def update_hpx_skymap_allsky(map_in, map_out):
""" 'Update' a HEALPix skymap
This checks map_out exists and creates it from map_in if it does not.
If map_out does exist, this adds the data in map_in to map_out
"""
if map_out is None:
in_hpx = map_in.hpx
out_hpx = HPX.create_hpx(in_hpx.nside, in_hpx.nest, in_hpx.coordsys,
None, in_hpx.ebins, None, in_hpx.conv, None)
data_out = map_in.expanded_counts_map()
print(data_out.shape, data_out.sum())
map_out = HpxMap(data_out, out_hpx)
else:
map_out.data += map_in.expanded_counts_map()
return map_out | python | def update_hpx_skymap_allsky(map_in, map_out):
""" 'Update' a HEALPix skymap
This checks map_out exists and creates it from map_in if it does not.
If map_out does exist, this adds the data in map_in to map_out
"""
if map_out is None:
in_hpx = map_in.hpx
out_hpx = HPX.create_hpx(in_hpx.nside, in_hpx.nest, in_hpx.coordsys,
None, in_hpx.ebins, None, in_hpx.conv, None)
data_out = map_in.expanded_counts_map()
print(data_out.shape, data_out.sum())
map_out = HpxMap(data_out, out_hpx)
else:
map_out.data += map_in.expanded_counts_map()
return map_out | [
"def",
"update_hpx_skymap_allsky",
"(",
"map_in",
",",
"map_out",
")",
":",
"if",
"map_out",
"is",
"None",
":",
"in_hpx",
"=",
"map_in",
".",
"hpx",
"out_hpx",
"=",
"HPX",
".",
"create_hpx",
"(",
"in_hpx",
".",
"nside",
",",
"in_hpx",
".",
"nest",
",",
... | 'Update' a HEALPix skymap
This checks map_out exists and creates it from map_in if it does not.
If map_out does exist, this adds the data in map_in to map_out | [
"Update",
"a",
"HEALPix",
"skymap"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L144-L159 | train | 35,864 |
fermiPy/fermipy | fermipy/merge_utils.py | merge_wcs_counts_cubes | def merge_wcs_counts_cubes(filelist):
""" Merge all the files in filelist, assuming that they WCS counts cubes
"""
out_prim = None
out_ebounds = None
datalist_gti = []
exposure_sum = 0.
nfiles = len(filelist)
ngti = np.zeros(nfiles, int)
for i, filename in enumerate(filelist):
fin = fits.open(filename)
sys.stdout.write('.')
sys.stdout.flush()
if i == 0:
out_prim = update_primary(fin[0], out_prim)
out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds)
(gti_data, exposure, tstop) = extract_gti_data(fin["GTI"])
datalist_gti.append(gti_data)
exposure_sum += exposure
ngti[i] = len(gti_data)
if i == 0:
first = fin
elif i == nfiles - 1:
date_end = fin[0].header['DATE-END']
else:
fin.close()
out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
out_gti.header['EXPOSURE'] = exposure_sum
out_gti.header['TSTOP'] = tstop
hdulist = [out_prim, out_ebounds, out_gti]
for hdu in hdulist:
hdu.header['DATE-END'] = date_end
out_prim.update_header()
sys.stdout.write("!\n")
return fits.HDUList(hdulist) | python | def merge_wcs_counts_cubes(filelist):
""" Merge all the files in filelist, assuming that they WCS counts cubes
"""
out_prim = None
out_ebounds = None
datalist_gti = []
exposure_sum = 0.
nfiles = len(filelist)
ngti = np.zeros(nfiles, int)
for i, filename in enumerate(filelist):
fin = fits.open(filename)
sys.stdout.write('.')
sys.stdout.flush()
if i == 0:
out_prim = update_primary(fin[0], out_prim)
out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds)
(gti_data, exposure, tstop) = extract_gti_data(fin["GTI"])
datalist_gti.append(gti_data)
exposure_sum += exposure
ngti[i] = len(gti_data)
if i == 0:
first = fin
elif i == nfiles - 1:
date_end = fin[0].header['DATE-END']
else:
fin.close()
out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
out_gti.header['EXPOSURE'] = exposure_sum
out_gti.header['TSTOP'] = tstop
hdulist = [out_prim, out_ebounds, out_gti]
for hdu in hdulist:
hdu.header['DATE-END'] = date_end
out_prim.update_header()
sys.stdout.write("!\n")
return fits.HDUList(hdulist) | [
"def",
"merge_wcs_counts_cubes",
"(",
"filelist",
")",
":",
"out_prim",
"=",
"None",
"out_ebounds",
"=",
"None",
"datalist_gti",
"=",
"[",
"]",
"exposure_sum",
"=",
"0.",
"nfiles",
"=",
"len",
"(",
"filelist",
")",
"ngti",
"=",
"np",
".",
"zeros",
"(",
"... | Merge all the files in filelist, assuming that they WCS counts cubes | [
"Merge",
"all",
"the",
"files",
"in",
"filelist",
"assuming",
"that",
"they",
"WCS",
"counts",
"cubes"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L162-L201 | train | 35,865 |
fermiPy/fermipy | fermipy/merge_utils.py | merge_hpx_counts_cubes | def merge_hpx_counts_cubes(filelist):
""" Merge all the files in filelist, assuming that they HEALPix counts cubes
"""
out_prim = None
out_skymap = None
out_ebounds = None
datalist_gti = []
exposure_sum = 0.
nfiles = len(filelist)
ngti = np.zeros(nfiles, int)
out_name = None
for i, filename in enumerate(filelist):
fin = fits.open(filename)
sys.stdout.write('.')
sys.stdout.flush()
if i == 0:
out_prim = update_null_primary(fin[0], out_prim)
out_name = fin[1].name
map_in = HpxMap.create_from_hdulist(fin)
out_skymap = update_hpx_skymap_allsky(map_in, out_skymap)
if i == 0:
try:
out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds)
except KeyError:
out_ebounds = update_energies(fin["ENERGIES"], out_ebounds)
try:
(gti_data, exposure, tstop) = extract_gti_data(fin["GTI"])
datalist_gti.append(gti_data)
exposure_sum += exposure
ngti[i] = len(gti_data)
except KeyError:
pass
if i == 0:
first = fin
elif i == nfiles - 1:
try:
date_end = fin[0].header['DATE-END']
except KeyError:
date_end = None
else:
fin.close()
out_skymap_hdu = out_skymap.create_image_hdu("SKYMAP")
hdulist = [out_prim, out_skymap_hdu, out_ebounds]
if len(datalist_gti) > 0:
out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
out_gti.header['EXPOSURE'] = exposure_sum
out_gti.header['TSTOP'] = tstop
hdulist.append(out_gti)
for hdu in hdulist:
if date_end:
hdu.header['DATE-END'] = date_end
out_prim.update_header()
sys.stdout.write("!\n")
return fits.HDUList(hdulist) | python | def merge_hpx_counts_cubes(filelist):
""" Merge all the files in filelist, assuming that they HEALPix counts cubes
"""
out_prim = None
out_skymap = None
out_ebounds = None
datalist_gti = []
exposure_sum = 0.
nfiles = len(filelist)
ngti = np.zeros(nfiles, int)
out_name = None
for i, filename in enumerate(filelist):
fin = fits.open(filename)
sys.stdout.write('.')
sys.stdout.flush()
if i == 0:
out_prim = update_null_primary(fin[0], out_prim)
out_name = fin[1].name
map_in = HpxMap.create_from_hdulist(fin)
out_skymap = update_hpx_skymap_allsky(map_in, out_skymap)
if i == 0:
try:
out_ebounds = update_ebounds(fin["EBOUNDS"], out_ebounds)
except KeyError:
out_ebounds = update_energies(fin["ENERGIES"], out_ebounds)
try:
(gti_data, exposure, tstop) = extract_gti_data(fin["GTI"])
datalist_gti.append(gti_data)
exposure_sum += exposure
ngti[i] = len(gti_data)
except KeyError:
pass
if i == 0:
first = fin
elif i == nfiles - 1:
try:
date_end = fin[0].header['DATE-END']
except KeyError:
date_end = None
else:
fin.close()
out_skymap_hdu = out_skymap.create_image_hdu("SKYMAP")
hdulist = [out_prim, out_skymap_hdu, out_ebounds]
if len(datalist_gti) > 0:
out_gti = merge_all_gti_data(datalist_gti, ngti, first['GTI'])
out_gti.header['EXPOSURE'] = exposure_sum
out_gti.header['TSTOP'] = tstop
hdulist.append(out_gti)
for hdu in hdulist:
if date_end:
hdu.header['DATE-END'] = date_end
out_prim.update_header()
sys.stdout.write("!\n")
return fits.HDUList(hdulist) | [
"def",
"merge_hpx_counts_cubes",
"(",
"filelist",
")",
":",
"out_prim",
"=",
"None",
"out_skymap",
"=",
"None",
"out_ebounds",
"=",
"None",
"datalist_gti",
"=",
"[",
"]",
"exposure_sum",
"=",
"0.",
"nfiles",
"=",
"len",
"(",
"filelist",
")",
"ngti",
"=",
"... | Merge all the files in filelist, assuming that they HEALPix counts cubes | [
"Merge",
"all",
"the",
"files",
"in",
"filelist",
"assuming",
"that",
"they",
"HEALPix",
"counts",
"cubes"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/merge_utils.py#L204-L268 | train | 35,866 |
fermiPy/fermipy | fermipy/diffuse/gt_srcmap_partial.py | SrcmapsDiffuse_SG._write_xml | def _write_xml(xmlfile, srcs):
"""Save the ROI model as an XML """
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for src in srcs:
src.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root)) | python | def _write_xml(xmlfile, srcs):
"""Save the ROI model as an XML """
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for src in srcs:
src.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root)) | [
"def",
"_write_xml",
"(",
"xmlfile",
",",
"srcs",
")",
":",
"root",
"=",
"ElementTree",
".",
"Element",
"(",
"'source_library'",
")",
"root",
".",
"set",
"(",
"'title'",
",",
"'source_library'",
")",
"for",
"src",
"in",
"srcs",
":",
"src",
".",
"write_xm... | Save the ROI model as an XML | [
"Save",
"the",
"ROI",
"model",
"as",
"an",
"XML"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/gt_srcmap_partial.py#L129-L138 | train | 35,867 |
fermiPy/fermipy | fermipy/diffuse/gt_srcmap_partial.py | SrcmapsDiffuse_SG._handle_component | def _handle_component(sourcekey, comp_dict):
"""Make the source objects and write the xml for a component
"""
if comp_dict.comp_key is None:
fullkey = sourcekey
else:
fullkey = "%s_%s" % (sourcekey, comp_dict.comp_key)
srcdict = make_sources(fullkey, comp_dict)
if comp_dict.model_type == 'IsoSource':
print("Writing xml for %s to %s: %s %s" % (fullkey,
comp_dict.srcmdl_name,
comp_dict.model_type,
comp_dict.Spectral_Filename))
elif comp_dict.model_type == 'MapCubeSource':
print("Writing xml for %s to %s: %s %s" % (fullkey,
comp_dict.srcmdl_name,
comp_dict.model_type,
comp_dict.Spatial_Filename))
SrcmapsDiffuse_SG._write_xml(comp_dict.srcmdl_name, srcdict.values()) | python | def _handle_component(sourcekey, comp_dict):
"""Make the source objects and write the xml for a component
"""
if comp_dict.comp_key is None:
fullkey = sourcekey
else:
fullkey = "%s_%s" % (sourcekey, comp_dict.comp_key)
srcdict = make_sources(fullkey, comp_dict)
if comp_dict.model_type == 'IsoSource':
print("Writing xml for %s to %s: %s %s" % (fullkey,
comp_dict.srcmdl_name,
comp_dict.model_type,
comp_dict.Spectral_Filename))
elif comp_dict.model_type == 'MapCubeSource':
print("Writing xml for %s to %s: %s %s" % (fullkey,
comp_dict.srcmdl_name,
comp_dict.model_type,
comp_dict.Spatial_Filename))
SrcmapsDiffuse_SG._write_xml(comp_dict.srcmdl_name, srcdict.values()) | [
"def",
"_handle_component",
"(",
"sourcekey",
",",
"comp_dict",
")",
":",
"if",
"comp_dict",
".",
"comp_key",
"is",
"None",
":",
"fullkey",
"=",
"sourcekey",
"else",
":",
"fullkey",
"=",
"\"%s_%s\"",
"%",
"(",
"sourcekey",
",",
"comp_dict",
".",
"comp_key",
... | Make the source objects and write the xml for a component | [
"Make",
"the",
"source",
"objects",
"and",
"write",
"the",
"xml",
"for",
"a",
"component"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/gt_srcmap_partial.py#L141-L159 | train | 35,868 |
fermiPy/fermipy | fermipy/sourcefind_utils.py | find_peaks | def find_peaks(input_map, threshold, min_separation=0.5):
"""Find peaks in a 2-D map object that have amplitude larger than
`threshold` and lie a distance at least `min_separation` from another
peak of larger amplitude. The implementation of this method uses
`~scipy.ndimage.filters.maximum_filter`.
Parameters
----------
input_map : `~gammapy.maps.WcsMap`
threshold : float
min_separation : float
Radius of region size in degrees. Sets the minimum allowable
separation between peaks.
Returns
-------
peaks : list
List of dictionaries containing the location and amplitude of
each peak.
"""
data = input_map.data
cdelt = max(input_map.geom.wcs.wcs.cdelt)
min_separation = max(min_separation, 2 * cdelt)
region_size_pix = int(min_separation / cdelt)
region_size_pix = max(3, region_size_pix)
deltaxy = utils.make_pixel_distance(region_size_pix * 2 + 3)
deltaxy *= max(input_map.geom.wcs.wcs.cdelt)
region = deltaxy < min_separation
local_max = maximum_filter(data, footprint=region) == data
local_max[data < threshold] = False
labeled, num_objects = scipy.ndimage.label(local_max)
slices = scipy.ndimage.find_objects(labeled)
peaks = []
for s in slices:
skydir = SkyCoord.from_pixel(s[1].start, s[0].start,
input_map.geom.wcs)
peaks.append({'ix': s[1].start,
'iy': s[0].start,
'skydir': skydir,
'amp': data[s[0].start, s[1].start]})
return sorted(peaks, key=lambda t: t['amp'], reverse=True) | python | def find_peaks(input_map, threshold, min_separation=0.5):
"""Find peaks in a 2-D map object that have amplitude larger than
`threshold` and lie a distance at least `min_separation` from another
peak of larger amplitude. The implementation of this method uses
`~scipy.ndimage.filters.maximum_filter`.
Parameters
----------
input_map : `~gammapy.maps.WcsMap`
threshold : float
min_separation : float
Radius of region size in degrees. Sets the minimum allowable
separation between peaks.
Returns
-------
peaks : list
List of dictionaries containing the location and amplitude of
each peak.
"""
data = input_map.data
cdelt = max(input_map.geom.wcs.wcs.cdelt)
min_separation = max(min_separation, 2 * cdelt)
region_size_pix = int(min_separation / cdelt)
region_size_pix = max(3, region_size_pix)
deltaxy = utils.make_pixel_distance(region_size_pix * 2 + 3)
deltaxy *= max(input_map.geom.wcs.wcs.cdelt)
region = deltaxy < min_separation
local_max = maximum_filter(data, footprint=region) == data
local_max[data < threshold] = False
labeled, num_objects = scipy.ndimage.label(local_max)
slices = scipy.ndimage.find_objects(labeled)
peaks = []
for s in slices:
skydir = SkyCoord.from_pixel(s[1].start, s[0].start,
input_map.geom.wcs)
peaks.append({'ix': s[1].start,
'iy': s[0].start,
'skydir': skydir,
'amp': data[s[0].start, s[1].start]})
return sorted(peaks, key=lambda t: t['amp'], reverse=True) | [
"def",
"find_peaks",
"(",
"input_map",
",",
"threshold",
",",
"min_separation",
"=",
"0.5",
")",
":",
"data",
"=",
"input_map",
".",
"data",
"cdelt",
"=",
"max",
"(",
"input_map",
".",
"geom",
".",
"wcs",
".",
"wcs",
".",
"cdelt",
")",
"min_separation",
... | Find peaks in a 2-D map object that have amplitude larger than
`threshold` and lie a distance at least `min_separation` from another
peak of larger amplitude. The implementation of this method uses
`~scipy.ndimage.filters.maximum_filter`.
Parameters
----------
input_map : `~gammapy.maps.WcsMap`
threshold : float
min_separation : float
Radius of region size in degrees. Sets the minimum allowable
separation between peaks.
Returns
-------
peaks : list
List of dictionaries containing the location and amplitude of
each peak. | [
"Find",
"peaks",
"in",
"a",
"2",
"-",
"D",
"map",
"object",
"that",
"have",
"amplitude",
"larger",
"than",
"threshold",
"and",
"lie",
"a",
"distance",
"at",
"least",
"min_separation",
"from",
"another",
"peak",
"of",
"larger",
"amplitude",
".",
"The",
"imp... | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/sourcefind_utils.py#L153-L203 | train | 35,869 |
fermiPy/fermipy | fermipy/sourcefind_utils.py | estimate_pos_and_err_parabolic | def estimate_pos_and_err_parabolic(tsvals):
"""Solve for the position and uncertainty of source in one dimension
assuming that you are near the maximum and the errors are parabolic
Parameters
----------
tsvals : `~numpy.ndarray`
The TS values at the maximum TS, and for each pixel on either side
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel
"""
a = tsvals[2] - tsvals[0]
bc = 2. * tsvals[1] - tsvals[0] - tsvals[2]
s = a / (2 * bc)
err = np.sqrt(2 / bc)
return s, err | python | def estimate_pos_and_err_parabolic(tsvals):
"""Solve for the position and uncertainty of source in one dimension
assuming that you are near the maximum and the errors are parabolic
Parameters
----------
tsvals : `~numpy.ndarray`
The TS values at the maximum TS, and for each pixel on either side
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel
"""
a = tsvals[2] - tsvals[0]
bc = 2. * tsvals[1] - tsvals[0] - tsvals[2]
s = a / (2 * bc)
err = np.sqrt(2 / bc)
return s, err | [
"def",
"estimate_pos_and_err_parabolic",
"(",
"tsvals",
")",
":",
"a",
"=",
"tsvals",
"[",
"2",
"]",
"-",
"tsvals",
"[",
"0",
"]",
"bc",
"=",
"2.",
"*",
"tsvals",
"[",
"1",
"]",
"-",
"tsvals",
"[",
"0",
"]",
"-",
"tsvals",
"[",
"2",
"]",
"s",
"... | Solve for the position and uncertainty of source in one dimension
assuming that you are near the maximum and the errors are parabolic
Parameters
----------
tsvals : `~numpy.ndarray`
The TS values at the maximum TS, and for each pixel on either side
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel | [
"Solve",
"for",
"the",
"position",
"and",
"uncertainty",
"of",
"source",
"in",
"one",
"dimension",
"assuming",
"that",
"you",
"are",
"near",
"the",
"maximum",
"and",
"the",
"errors",
"are",
"parabolic"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/sourcefind_utils.py#L206-L225 | train | 35,870 |
fermiPy/fermipy | fermipy/sourcefind_utils.py | refine_peak | def refine_peak(tsmap, pix):
"""Solve for the position and uncertainty of source assuming that you
are near the maximum and the errors are parabolic
Parameters
----------
tsmap : `~numpy.ndarray`
Array with the TS data.
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel
"""
# Note the annoying WCS convention
nx = tsmap.shape[1]
ny = tsmap.shape[0]
if pix[0] == 0 or pix[0] == (nx - 1):
xval = float(pix[0])
xerr = -1
else:
x_arr = tsmap[pix[1], pix[0] - 1:pix[0] + 2]
xval, xerr = estimate_pos_and_err_parabolic(x_arr)
xval += float(pix[0])
if pix[1] == 0 or pix[1] == (ny - 1):
yval = float(pix[1])
yerr = -1
else:
y_arr = tsmap[pix[1] - 1:pix[1] + 2, pix[0]]
yval, yerr = estimate_pos_and_err_parabolic(y_arr)
yval += float(pix[1])
return (xval, yval), (xerr, yerr) | python | def refine_peak(tsmap, pix):
"""Solve for the position and uncertainty of source assuming that you
are near the maximum and the errors are parabolic
Parameters
----------
tsmap : `~numpy.ndarray`
Array with the TS data.
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel
"""
# Note the annoying WCS convention
nx = tsmap.shape[1]
ny = tsmap.shape[0]
if pix[0] == 0 or pix[0] == (nx - 1):
xval = float(pix[0])
xerr = -1
else:
x_arr = tsmap[pix[1], pix[0] - 1:pix[0] + 2]
xval, xerr = estimate_pos_and_err_parabolic(x_arr)
xval += float(pix[0])
if pix[1] == 0 or pix[1] == (ny - 1):
yval = float(pix[1])
yerr = -1
else:
y_arr = tsmap[pix[1] - 1:pix[1] + 2, pix[0]]
yval, yerr = estimate_pos_and_err_parabolic(y_arr)
yval += float(pix[1])
return (xval, yval), (xerr, yerr) | [
"def",
"refine_peak",
"(",
"tsmap",
",",
"pix",
")",
":",
"# Note the annoying WCS convention",
"nx",
"=",
"tsmap",
".",
"shape",
"[",
"1",
"]",
"ny",
"=",
"tsmap",
".",
"shape",
"[",
"0",
"]",
"if",
"pix",
"[",
"0",
"]",
"==",
"0",
"or",
"pix",
"[... | Solve for the position and uncertainty of source assuming that you
are near the maximum and the errors are parabolic
Parameters
----------
tsmap : `~numpy.ndarray`
Array with the TS data.
Returns
-------
The position and uncertainty of the source, in pixel units
w.r.t. the center of the maximum pixel | [
"Solve",
"for",
"the",
"position",
"and",
"uncertainty",
"of",
"source",
"assuming",
"that",
"you",
"are",
"near",
"the",
"maximum",
"and",
"the",
"errors",
"are",
"parabolic"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/sourcefind_utils.py#L228-L263 | train | 35,871 |
fermiPy/fermipy | fermipy/roi_model.py | spectral_pars_from_catalog | def spectral_pars_from_catalog(cat):
"""Create spectral parameters from 3FGL catalog columns."""
spectrum_type = cat['SpectrumType']
pars = get_function_defaults(cat['SpectrumType'])
par_idxs = {k: i for i, k in
enumerate(get_function_par_names(cat['SpectrumType']))}
for k in pars:
pars[k]['value'] = cat['param_values'][par_idxs[k]]
if spectrum_type == 'PowerLaw':
pars['Index']['value'] *= -1.0
pars['Index']['scale'] = -1.0
pars['Scale']['scale'] = 1.0
pars['Index']['max'] = max(5.0, pars['Index']['value'] + 1.0)
pars['Index']['min'] = min(0.0, pars['Index']['value'] - 1.0)
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index'] = make_parameter_dict(pars['Index'], False, False)
elif spectrum_type == 'LogParabola':
pars['norm'] = make_parameter_dict(pars['norm'], False, True)
pars['Eb'] = make_parameter_dict(pars['Eb'], True, False)
pars['alpha'] = make_parameter_dict(pars['alpha'], False, False)
pars['beta'] = make_parameter_dict(pars['beta'], False, False)
elif spectrum_type == 'PLSuperExpCutoff':
pars['Index1']['value'] *= -1.0
pars['Index1']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index1'] = make_parameter_dict(pars['Index1'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Cutoff'] = make_parameter_dict(pars['Cutoff'], False, True)
elif spectrum_type == 'PLSuperExpCutoff2':
pars['Index1']['value'] *= -1.0
pars['Index1']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index1'] = make_parameter_dict(pars['Index1'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Expfactor'] = make_parameter_dict(pars['Expfactor'], False, True)
else:
raise Exception('Unsupported spectral type:' + spectrum_type)
return pars | python | def spectral_pars_from_catalog(cat):
"""Create spectral parameters from 3FGL catalog columns."""
spectrum_type = cat['SpectrumType']
pars = get_function_defaults(cat['SpectrumType'])
par_idxs = {k: i for i, k in
enumerate(get_function_par_names(cat['SpectrumType']))}
for k in pars:
pars[k]['value'] = cat['param_values'][par_idxs[k]]
if spectrum_type == 'PowerLaw':
pars['Index']['value'] *= -1.0
pars['Index']['scale'] = -1.0
pars['Scale']['scale'] = 1.0
pars['Index']['max'] = max(5.0, pars['Index']['value'] + 1.0)
pars['Index']['min'] = min(0.0, pars['Index']['value'] - 1.0)
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index'] = make_parameter_dict(pars['Index'], False, False)
elif spectrum_type == 'LogParabola':
pars['norm'] = make_parameter_dict(pars['norm'], False, True)
pars['Eb'] = make_parameter_dict(pars['Eb'], True, False)
pars['alpha'] = make_parameter_dict(pars['alpha'], False, False)
pars['beta'] = make_parameter_dict(pars['beta'], False, False)
elif spectrum_type == 'PLSuperExpCutoff':
pars['Index1']['value'] *= -1.0
pars['Index1']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index1'] = make_parameter_dict(pars['Index1'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Cutoff'] = make_parameter_dict(pars['Cutoff'], False, True)
elif spectrum_type == 'PLSuperExpCutoff2':
pars['Index1']['value'] *= -1.0
pars['Index1']['scale'] = -1.0
pars['Index2']['scale'] = 1.0
pars['Prefactor'] = make_parameter_dict(pars['Prefactor'])
pars['Scale'] = make_parameter_dict(pars['Scale'], True, False)
pars['Index1'] = make_parameter_dict(pars['Index1'], False, False)
pars['Index2'] = make_parameter_dict(pars['Index2'], False, False)
pars['Expfactor'] = make_parameter_dict(pars['Expfactor'], False, True)
else:
raise Exception('Unsupported spectral type:' + spectrum_type)
return pars | [
"def",
"spectral_pars_from_catalog",
"(",
"cat",
")",
":",
"spectrum_type",
"=",
"cat",
"[",
"'SpectrumType'",
"]",
"pars",
"=",
"get_function_defaults",
"(",
"cat",
"[",
"'SpectrumType'",
"]",
")",
"par_idxs",
"=",
"{",
"k",
":",
"i",
"for",
"i",
",",
"k"... | Create spectral parameters from 3FGL catalog columns. | [
"Create",
"spectral",
"parameters",
"from",
"3FGL",
"catalog",
"columns",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L239-L292 | train | 35,872 |
fermiPy/fermipy | fermipy/roi_model.py | Model.is_free | def is_free(self):
""" returns True if any of the spectral model parameters is set to free, else False
"""
return bool(np.array([int(value.get("free", False)) for key, value in self.spectral_pars.items()]).sum()) | python | def is_free(self):
""" returns True if any of the spectral model parameters is set to free, else False
"""
return bool(np.array([int(value.get("free", False)) for key, value in self.spectral_pars.items()]).sum()) | [
"def",
"is_free",
"(",
"self",
")",
":",
"return",
"bool",
"(",
"np",
".",
"array",
"(",
"[",
"int",
"(",
"value",
".",
"get",
"(",
"\"free\"",
",",
"False",
")",
")",
"for",
"key",
",",
"value",
"in",
"self",
".",
"spectral_pars",
".",
"items",
... | returns True if any of the spectral model parameters is set to free, else False | [
"returns",
"True",
"if",
"any",
"of",
"the",
"spectral",
"model",
"parameters",
"is",
"set",
"to",
"free",
"else",
"False"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L398-L401 | train | 35,873 |
fermiPy/fermipy | fermipy/roi_model.py | Source.set_position | def set_position(self, skydir):
"""
Set the position of the source.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
"""
if not isinstance(skydir, SkyCoord):
skydir = SkyCoord(ra=skydir[0], dec=skydir[1], unit=u.deg)
if not skydir.isscalar:
skydir = np.ravel(skydir)[0]
radec = np.array([skydir.icrs.ra.deg, skydir.icrs.dec.deg])
self._set_radec(radec) | python | def set_position(self, skydir):
"""
Set the position of the source.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
"""
if not isinstance(skydir, SkyCoord):
skydir = SkyCoord(ra=skydir[0], dec=skydir[1], unit=u.deg)
if not skydir.isscalar:
skydir = np.ravel(skydir)[0]
radec = np.array([skydir.icrs.ra.deg, skydir.icrs.dec.deg])
self._set_radec(radec) | [
"def",
"set_position",
"(",
"self",
",",
"skydir",
")",
":",
"if",
"not",
"isinstance",
"(",
"skydir",
",",
"SkyCoord",
")",
":",
"skydir",
"=",
"SkyCoord",
"(",
"ra",
"=",
"skydir",
"[",
"0",
"]",
",",
"dec",
"=",
"skydir",
"[",
"1",
"]",
",",
"... | Set the position of the source.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord` | [
"Set",
"the",
"position",
"of",
"the",
"source",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L869-L886 | train | 35,874 |
fermiPy/fermipy | fermipy/roi_model.py | Source.skydir | def skydir(self):
"""Return a SkyCoord representation of the source position.
Returns
-------
skydir : `~astropy.coordinates.SkyCoord`
"""
return SkyCoord(self.radec[0] * u.deg, self.radec[1] * u.deg) | python | def skydir(self):
"""Return a SkyCoord representation of the source position.
Returns
-------
skydir : `~astropy.coordinates.SkyCoord`
"""
return SkyCoord(self.radec[0] * u.deg, self.radec[1] * u.deg) | [
"def",
"skydir",
"(",
"self",
")",
":",
"return",
"SkyCoord",
"(",
"self",
".",
"radec",
"[",
"0",
"]",
"*",
"u",
".",
"deg",
",",
"self",
".",
"radec",
"[",
"1",
"]",
"*",
"u",
".",
"deg",
")"
] | Return a SkyCoord representation of the source position.
Returns
-------
skydir : `~astropy.coordinates.SkyCoord` | [
"Return",
"a",
"SkyCoord",
"representation",
"of",
"the",
"source",
"position",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L951-L958 | train | 35,875 |
fermiPy/fermipy | fermipy/roi_model.py | Source.create_from_dict | def create_from_dict(cls, src_dict, roi_skydir=None, rescale=False):
"""Create a source object from a python dictionary.
Parameters
----------
src_dict : dict
Dictionary defining the properties of the source.
"""
src_dict = copy.deepcopy(src_dict)
src_dict.setdefault('SpatialModel', 'PointSource')
src_dict.setdefault('Spectrum_Filename', None)
src_dict.setdefault('SpectrumType', 'PowerLaw')
src_dict['SpatialType'] = get_spatial_type(src_dict['SpatialModel'])
spectrum_type = src_dict['SpectrumType']
spatial_type = src_dict['SpatialType']
spectral_pars = src_dict.pop('spectral_pars', {})
spatial_pars = src_dict.pop('spatial_pars', {})
if not spectral_pars:
spectral_pars = extract_pars_from_dict(spectrum_type, src_dict)
norm_par_name = get_function_norm_par_name(spectrum_type)
if norm_par_name is not None:
spectral_pars[norm_par_name].setdefault('free', True)
if not spatial_pars:
spatial_pars = extract_pars_from_dict(spatial_type, src_dict)
for k in ['RA', 'DEC', 'Prefactor']:
if k in spatial_pars:
del spatial_pars[k]
spectral_pars = create_pars_from_dict(spectrum_type, spectral_pars,
rescale)
spatial_pars = create_pars_from_dict(spatial_type, spatial_pars,
False)
if 'file' in src_dict:
src_dict['Spectrum_Filename'] = src_dict.pop('file')
if spectrum_type == 'DMFitFunction' and src_dict['Spectrum_Filename'] is None:
src_dict['Spectrum_Filename'] = os.path.join('$FERMIPY_DATA_DIR',
'gammamc_dif.dat')
src_dict['spectral_pars'] = cast_pars_dict(spectral_pars)
src_dict['spatial_pars'] = cast_pars_dict(spatial_pars)
if 'name' in src_dict:
name = src_dict['name']
src_dict['Source_Name'] = src_dict.pop('name')
elif 'Source_Name' in src_dict:
name = src_dict['Source_Name']
else:
raise Exception('Source name undefined.')
skydir = wcs_utils.get_target_skydir(src_dict, roi_skydir)
src_dict['RAJ2000'] = skydir.ra.deg
src_dict['DEJ2000'] = skydir.dec.deg
radec = np.array([skydir.ra.deg, skydir.dec.deg])
return cls(name, src_dict, radec=radec) | python | def create_from_dict(cls, src_dict, roi_skydir=None, rescale=False):
"""Create a source object from a python dictionary.
Parameters
----------
src_dict : dict
Dictionary defining the properties of the source.
"""
src_dict = copy.deepcopy(src_dict)
src_dict.setdefault('SpatialModel', 'PointSource')
src_dict.setdefault('Spectrum_Filename', None)
src_dict.setdefault('SpectrumType', 'PowerLaw')
src_dict['SpatialType'] = get_spatial_type(src_dict['SpatialModel'])
spectrum_type = src_dict['SpectrumType']
spatial_type = src_dict['SpatialType']
spectral_pars = src_dict.pop('spectral_pars', {})
spatial_pars = src_dict.pop('spatial_pars', {})
if not spectral_pars:
spectral_pars = extract_pars_from_dict(spectrum_type, src_dict)
norm_par_name = get_function_norm_par_name(spectrum_type)
if norm_par_name is not None:
spectral_pars[norm_par_name].setdefault('free', True)
if not spatial_pars:
spatial_pars = extract_pars_from_dict(spatial_type, src_dict)
for k in ['RA', 'DEC', 'Prefactor']:
if k in spatial_pars:
del spatial_pars[k]
spectral_pars = create_pars_from_dict(spectrum_type, spectral_pars,
rescale)
spatial_pars = create_pars_from_dict(spatial_type, spatial_pars,
False)
if 'file' in src_dict:
src_dict['Spectrum_Filename'] = src_dict.pop('file')
if spectrum_type == 'DMFitFunction' and src_dict['Spectrum_Filename'] is None:
src_dict['Spectrum_Filename'] = os.path.join('$FERMIPY_DATA_DIR',
'gammamc_dif.dat')
src_dict['spectral_pars'] = cast_pars_dict(spectral_pars)
src_dict['spatial_pars'] = cast_pars_dict(spatial_pars)
if 'name' in src_dict:
name = src_dict['name']
src_dict['Source_Name'] = src_dict.pop('name')
elif 'Source_Name' in src_dict:
name = src_dict['Source_Name']
else:
raise Exception('Source name undefined.')
skydir = wcs_utils.get_target_skydir(src_dict, roi_skydir)
src_dict['RAJ2000'] = skydir.ra.deg
src_dict['DEJ2000'] = skydir.dec.deg
radec = np.array([skydir.ra.deg, skydir.dec.deg])
return cls(name, src_dict, radec=radec) | [
"def",
"create_from_dict",
"(",
"cls",
",",
"src_dict",
",",
"roi_skydir",
"=",
"None",
",",
"rescale",
"=",
"False",
")",
":",
"src_dict",
"=",
"copy",
".",
"deepcopy",
"(",
"src_dict",
")",
"src_dict",
".",
"setdefault",
"(",
"'SpatialModel'",
",",
"'Poi... | Create a source object from a python dictionary.
Parameters
----------
src_dict : dict
Dictionary defining the properties of the source. | [
"Create",
"a",
"source",
"object",
"from",
"a",
"python",
"dictionary",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L965-L1028 | train | 35,876 |
fermiPy/fermipy | fermipy/roi_model.py | Source.create_from_xmlfile | def create_from_xmlfile(cls, xmlfile, extdir=None):
"""Create a Source object from an XML file.
Parameters
----------
xmlfile : str
Path to XML file.
extdir : str
Path to the extended source archive.
"""
root = ElementTree.ElementTree(file=xmlfile).getroot()
srcs = root.findall('source')
if len(srcs) == 0:
raise Exception('No sources found.')
return cls.create_from_xml(srcs[0], extdir=extdir) | python | def create_from_xmlfile(cls, xmlfile, extdir=None):
"""Create a Source object from an XML file.
Parameters
----------
xmlfile : str
Path to XML file.
extdir : str
Path to the extended source archive.
"""
root = ElementTree.ElementTree(file=xmlfile).getroot()
srcs = root.findall('source')
if len(srcs) == 0:
raise Exception('No sources found.')
return cls.create_from_xml(srcs[0], extdir=extdir) | [
"def",
"create_from_xmlfile",
"(",
"cls",
",",
"xmlfile",
",",
"extdir",
"=",
"None",
")",
":",
"root",
"=",
"ElementTree",
".",
"ElementTree",
"(",
"file",
"=",
"xmlfile",
")",
".",
"getroot",
"(",
")",
"srcs",
"=",
"root",
".",
"findall",
"(",
"'sour... | Create a Source object from an XML file.
Parameters
----------
xmlfile : str
Path to XML file.
extdir : str
Path to the extended source archive. | [
"Create",
"a",
"Source",
"object",
"from",
"an",
"XML",
"file",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1031-L1046 | train | 35,877 |
fermiPy/fermipy | fermipy/roi_model.py | Source.write_xml | def write_xml(self, root):
"""Write this source to an XML node."""
if not self.extended:
try:
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='PointSource'))
except TypeError as msg:
print (self['Source_Name'], self)
raise TypeError(msg)
spat_el = ElementTree.SubElement(source_element, 'spatialModel')
spat_el.set('type', 'SkyDirFunction')
elif self['SpatialType'] == 'SpatialMap':
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='DiffuseSource'))
filename = utils.path_to_xmlpath(self['Spatial_Filename'])
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(map_based_integral='True',
type='SpatialMap',
file=filename))
else:
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='DiffuseSource'))
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(type=self['SpatialType']))
for k, v in self.spatial_pars.items():
utils.create_xml_element(spat_el, 'parameter', v)
el = ElementTree.SubElement(source_element, 'spectrum')
stype = self['SpectrumType'].strip()
el.set('type', stype)
if self['Spectrum_Filename'] is not None:
filename = utils.path_to_xmlpath(self['Spectrum_Filename'])
el.set('file', filename)
for k, v in self.spectral_pars.items():
utils.create_xml_element(el, 'parameter', v) | python | def write_xml(self, root):
"""Write this source to an XML node."""
if not self.extended:
try:
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='PointSource'))
except TypeError as msg:
print (self['Source_Name'], self)
raise TypeError(msg)
spat_el = ElementTree.SubElement(source_element, 'spatialModel')
spat_el.set('type', 'SkyDirFunction')
elif self['SpatialType'] == 'SpatialMap':
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='DiffuseSource'))
filename = utils.path_to_xmlpath(self['Spatial_Filename'])
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(map_based_integral='True',
type='SpatialMap',
file=filename))
else:
source_element = utils.create_xml_element(root, 'source',
dict(name=self['Source_Name'],
type='DiffuseSource'))
spat_el = utils.create_xml_element(source_element, 'spatialModel',
dict(type=self['SpatialType']))
for k, v in self.spatial_pars.items():
utils.create_xml_element(spat_el, 'parameter', v)
el = ElementTree.SubElement(source_element, 'spectrum')
stype = self['SpectrumType'].strip()
el.set('type', stype)
if self['Spectrum_Filename'] is not None:
filename = utils.path_to_xmlpath(self['Spectrum_Filename'])
el.set('file', filename)
for k, v in self.spectral_pars.items():
utils.create_xml_element(el, 'parameter', v) | [
"def",
"write_xml",
"(",
"self",
",",
"root",
")",
":",
"if",
"not",
"self",
".",
"extended",
":",
"try",
":",
"source_element",
"=",
"utils",
".",
"create_xml_element",
"(",
"root",
",",
"'source'",
",",
"dict",
"(",
"name",
"=",
"self",
"[",
"'Source... | Write this source to an XML node. | [
"Write",
"this",
"source",
"to",
"an",
"XML",
"node",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1156-L1201 | train | 35,878 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.clear | def clear(self):
"""Clear the contents of the ROI."""
self._srcs = []
self._diffuse_srcs = []
self._src_dict = collections.defaultdict(list)
self._src_radius = [] | python | def clear(self):
"""Clear the contents of the ROI."""
self._srcs = []
self._diffuse_srcs = []
self._src_dict = collections.defaultdict(list)
self._src_radius = [] | [
"def",
"clear",
"(",
"self",
")",
":",
"self",
".",
"_srcs",
"=",
"[",
"]",
"self",
".",
"_diffuse_srcs",
"=",
"[",
"]",
"self",
".",
"_src_dict",
"=",
"collections",
".",
"defaultdict",
"(",
"list",
")",
"self",
".",
"_src_radius",
"=",
"[",
"]"
] | Clear the contents of the ROI. | [
"Clear",
"the",
"contents",
"of",
"the",
"ROI",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1411-L1416 | train | 35,879 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.create_source | def create_source(self, name, src_dict, build_index=True,
merge_sources=True, rescale=True):
"""Add a new source to the ROI model from a dictionary or an
existing source object.
Parameters
----------
name : str
src_dict : dict or `~fermipy.roi_model.Source`
Returns
-------
src : `~fermipy.roi_model.Source`
"""
src_dict = copy.deepcopy(src_dict)
if isinstance(src_dict, dict):
src_dict['name'] = name
src = Model.create_from_dict(src_dict, self.skydir,
rescale=rescale)
else:
src = src_dict
src.set_name(name)
if isinstance(src, Source):
src.set_roi_direction(self.skydir)
src.set_roi_geom(self.geom)
self.load_source(src, build_index=build_index,
merge_sources=merge_sources)
return self.get_source_by_name(name) | python | def create_source(self, name, src_dict, build_index=True,
merge_sources=True, rescale=True):
"""Add a new source to the ROI model from a dictionary or an
existing source object.
Parameters
----------
name : str
src_dict : dict or `~fermipy.roi_model.Source`
Returns
-------
src : `~fermipy.roi_model.Source`
"""
src_dict = copy.deepcopy(src_dict)
if isinstance(src_dict, dict):
src_dict['name'] = name
src = Model.create_from_dict(src_dict, self.skydir,
rescale=rescale)
else:
src = src_dict
src.set_name(name)
if isinstance(src, Source):
src.set_roi_direction(self.skydir)
src.set_roi_geom(self.geom)
self.load_source(src, build_index=build_index,
merge_sources=merge_sources)
return self.get_source_by_name(name) | [
"def",
"create_source",
"(",
"self",
",",
"name",
",",
"src_dict",
",",
"build_index",
"=",
"True",
",",
"merge_sources",
"=",
"True",
",",
"rescale",
"=",
"True",
")",
":",
"src_dict",
"=",
"copy",
".",
"deepcopy",
"(",
"src_dict",
")",
"if",
"isinstanc... | Add a new source to the ROI model from a dictionary or an
existing source object.
Parameters
----------
name : str
src_dict : dict or `~fermipy.roi_model.Source`
Returns
-------
src : `~fermipy.roi_model.Source` | [
"Add",
"a",
"new",
"source",
"to",
"the",
"ROI",
"model",
"from",
"a",
"dictionary",
"or",
"an",
"existing",
"source",
"object",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1517-L1552 | train | 35,880 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.load_sources | def load_sources(self, sources):
"""Delete all sources in the ROI and load the input source list."""
self.clear()
for s in sources:
if isinstance(s, dict):
s = Model.create_from_dict(s)
self.load_source(s, build_index=False)
self._build_src_index() | python | def load_sources(self, sources):
"""Delete all sources in the ROI and load the input source list."""
self.clear()
for s in sources:
if isinstance(s, dict):
s = Model.create_from_dict(s)
self.load_source(s, build_index=False)
self._build_src_index() | [
"def",
"load_sources",
"(",
"self",
",",
"sources",
")",
":",
"self",
".",
"clear",
"(",
")",
"for",
"s",
"in",
"sources",
":",
"if",
"isinstance",
"(",
"s",
",",
"dict",
")",
":",
"s",
"=",
"Model",
".",
"create_from_dict",
"(",
"s",
")",
"self",
... | Delete all sources in the ROI and load the input source list. | [
"Delete",
"all",
"sources",
"in",
"the",
"ROI",
"and",
"load",
"the",
"input",
"source",
"list",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1558-L1568 | train | 35,881 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.load_source | def load_source(self, src, build_index=True, merge_sources=True,
**kwargs):
"""
Load a single source.
Parameters
----------
src : `~fermipy.roi_model.Source`
Source object that will be added to the ROI.
merge_sources : bool
When a source matches an existing source in the model
update that source with the properties of the new source.
build_index : bool
Re-make the source index after loading this source.
"""
src = copy.deepcopy(src)
name = src.name.replace(' ', '').lower()
min_sep = kwargs.get('min_separation', None)
if min_sep is not None:
sep = src.skydir.separation(self._src_skydir).deg
if len(sep) > 0 and np.min(sep) < min_sep:
return
match_srcs = self.match_source(src)
if len(match_srcs) == 1:
# self.logger.debug('Found matching source for %s : %s',
# src.name, match_srcs[0].name)
if merge_sources:
match_srcs[0].update_from_source(src)
else:
match_srcs[0].add_name(src.name)
self._add_source_alias(src.name.replace(' ', '').lower(),
match_srcs[0])
return
elif len(match_srcs) > 2:
raise Exception('Multiple sources with name %s' % name)
self._add_source_alias(src.name, src)
for name in src.names:
self._add_source_alias(name.replace(' ', '').lower(), src)
if isinstance(src, Source):
self._srcs.append(src)
else:
self._diffuse_srcs.append(src)
if build_index:
self._build_src_index() | python | def load_source(self, src, build_index=True, merge_sources=True,
**kwargs):
"""
Load a single source.
Parameters
----------
src : `~fermipy.roi_model.Source`
Source object that will be added to the ROI.
merge_sources : bool
When a source matches an existing source in the model
update that source with the properties of the new source.
build_index : bool
Re-make the source index after loading this source.
"""
src = copy.deepcopy(src)
name = src.name.replace(' ', '').lower()
min_sep = kwargs.get('min_separation', None)
if min_sep is not None:
sep = src.skydir.separation(self._src_skydir).deg
if len(sep) > 0 and np.min(sep) < min_sep:
return
match_srcs = self.match_source(src)
if len(match_srcs) == 1:
# self.logger.debug('Found matching source for %s : %s',
# src.name, match_srcs[0].name)
if merge_sources:
match_srcs[0].update_from_source(src)
else:
match_srcs[0].add_name(src.name)
self._add_source_alias(src.name.replace(' ', '').lower(),
match_srcs[0])
return
elif len(match_srcs) > 2:
raise Exception('Multiple sources with name %s' % name)
self._add_source_alias(src.name, src)
for name in src.names:
self._add_source_alias(name.replace(' ', '').lower(), src)
if isinstance(src, Source):
self._srcs.append(src)
else:
self._diffuse_srcs.append(src)
if build_index:
self._build_src_index() | [
"def",
"load_source",
"(",
"self",
",",
"src",
",",
"build_index",
"=",
"True",
",",
"merge_sources",
"=",
"True",
",",
"*",
"*",
"kwargs",
")",
":",
"src",
"=",
"copy",
".",
"deepcopy",
"(",
"src",
")",
"name",
"=",
"src",
".",
"name",
".",
"repla... | Load a single source.
Parameters
----------
src : `~fermipy.roi_model.Source`
Source object that will be added to the ROI.
merge_sources : bool
When a source matches an existing source in the model
update that source with the properties of the new source.
build_index : bool
Re-make the source index after loading this source. | [
"Load",
"a",
"single",
"source",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1575-L1634 | train | 35,882 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.match_source | def match_source(self, src):
"""Look for source or sources in the model that match the
given source. Sources are matched by name and any association
columns defined in the assoc_xmatch_columns parameter.
"""
srcs = []
names = [src.name]
for col in self.config['assoc_xmatch_columns']:
if col in src.assoc and src.assoc[col]:
names += [src.assoc[col]]
for name in names:
name = name.replace(' ', '').lower()
if name not in self._src_dict:
continue
srcs += [s for s in self._src_dict[name] if s not in srcs]
return srcs | python | def match_source(self, src):
"""Look for source or sources in the model that match the
given source. Sources are matched by name and any association
columns defined in the assoc_xmatch_columns parameter.
"""
srcs = []
names = [src.name]
for col in self.config['assoc_xmatch_columns']:
if col in src.assoc and src.assoc[col]:
names += [src.assoc[col]]
for name in names:
name = name.replace(' ', '').lower()
if name not in self._src_dict:
continue
srcs += [s for s in self._src_dict[name] if s not in srcs]
return srcs | [
"def",
"match_source",
"(",
"self",
",",
"src",
")",
":",
"srcs",
"=",
"[",
"]",
"names",
"=",
"[",
"src",
".",
"name",
"]",
"for",
"col",
"in",
"self",
".",
"config",
"[",
"'assoc_xmatch_columns'",
"]",
":",
"if",
"col",
"in",
"src",
".",
"assoc",... | Look for source or sources in the model that match the
given source. Sources are matched by name and any association
columns defined in the assoc_xmatch_columns parameter. | [
"Look",
"for",
"source",
"or",
"sources",
"in",
"the",
"model",
"that",
"match",
"the",
"given",
"source",
".",
"Sources",
"are",
"matched",
"by",
"name",
"and",
"any",
"association",
"columns",
"defined",
"in",
"the",
"assoc_xmatch_columns",
"parameter",
"."
... | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1636-L1655 | train | 35,883 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.load | def load(self, **kwargs):
"""Load both point source and diffuse components."""
coordsys = kwargs.get('coordsys', 'CEL')
extdir = kwargs.get('extdir', self.extdir)
srcname = kwargs.get('srcname', None)
self.clear()
self.load_diffuse_srcs()
for c in self.config['catalogs']:
if isinstance(c, catalog.Catalog):
self.load_existing_catalog(c)
continue
extname = os.path.splitext(c)[1]
if extname != '.xml':
self.load_fits_catalog(c, extdir=extdir, coordsys=coordsys,
srcname=srcname)
elif extname == '.xml':
self.load_xml(c, extdir=extdir, coordsys=coordsys)
else:
raise Exception('Unrecognized catalog file extension: %s' % c)
for c in self.config['sources']:
if 'name' not in c:
raise Exception(
'No name field in source dictionary:\n ' + str(c))
self.create_source(c['name'], c, build_index=False)
self._build_src_index() | python | def load(self, **kwargs):
"""Load both point source and diffuse components."""
coordsys = kwargs.get('coordsys', 'CEL')
extdir = kwargs.get('extdir', self.extdir)
srcname = kwargs.get('srcname', None)
self.clear()
self.load_diffuse_srcs()
for c in self.config['catalogs']:
if isinstance(c, catalog.Catalog):
self.load_existing_catalog(c)
continue
extname = os.path.splitext(c)[1]
if extname != '.xml':
self.load_fits_catalog(c, extdir=extdir, coordsys=coordsys,
srcname=srcname)
elif extname == '.xml':
self.load_xml(c, extdir=extdir, coordsys=coordsys)
else:
raise Exception('Unrecognized catalog file extension: %s' % c)
for c in self.config['sources']:
if 'name' not in c:
raise Exception(
'No name field in source dictionary:\n ' + str(c))
self.create_source(c['name'], c, build_index=False)
self._build_src_index() | [
"def",
"load",
"(",
"self",
",",
"*",
"*",
"kwargs",
")",
":",
"coordsys",
"=",
"kwargs",
".",
"get",
"(",
"'coordsys'",
",",
"'CEL'",
")",
"extdir",
"=",
"kwargs",
".",
"get",
"(",
"'extdir'",
",",
"self",
".",
"extdir",
")",
"srcname",
"=",
"kwar... | Load both point source and diffuse components. | [
"Load",
"both",
"point",
"source",
"and",
"diffuse",
"components",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1657-L1690 | train | 35,884 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.create_from_roi_data | def create_from_roi_data(cls, datafile):
"""Create an ROI model."""
data = np.load(datafile).flat[0]
roi = cls()
roi.load_sources(data['sources'].values())
return roi | python | def create_from_roi_data(cls, datafile):
"""Create an ROI model."""
data = np.load(datafile).flat[0]
roi = cls()
roi.load_sources(data['sources'].values())
return roi | [
"def",
"create_from_roi_data",
"(",
"cls",
",",
"datafile",
")",
":",
"data",
"=",
"np",
".",
"load",
"(",
"datafile",
")",
".",
"flat",
"[",
"0",
"]",
"roi",
"=",
"cls",
"(",
")",
"roi",
".",
"load_sources",
"(",
"data",
"[",
"'sources'",
"]",
"."... | Create an ROI model. | [
"Create",
"an",
"ROI",
"model",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1706-L1713 | train | 35,885 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.create | def create(cls, selection, config, **kwargs):
"""Create an ROIModel instance."""
if selection['target'] is not None:
return cls.create_from_source(selection['target'],
config, **kwargs)
else:
target_skydir = wcs_utils.get_target_skydir(selection)
return cls.create_from_position(target_skydir, config, **kwargs) | python | def create(cls, selection, config, **kwargs):
"""Create an ROIModel instance."""
if selection['target'] is not None:
return cls.create_from_source(selection['target'],
config, **kwargs)
else:
target_skydir = wcs_utils.get_target_skydir(selection)
return cls.create_from_position(target_skydir, config, **kwargs) | [
"def",
"create",
"(",
"cls",
",",
"selection",
",",
"config",
",",
"*",
"*",
"kwargs",
")",
":",
"if",
"selection",
"[",
"'target'",
"]",
"is",
"not",
"None",
":",
"return",
"cls",
".",
"create_from_source",
"(",
"selection",
"[",
"'target'",
"]",
",",... | Create an ROIModel instance. | [
"Create",
"an",
"ROIModel",
"instance",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1716-L1724 | train | 35,886 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.create_from_position | def create_from_position(cls, skydir, config, **kwargs):
"""Create an ROIModel instance centered on a sky direction.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction on which the ROI will be centered.
config : dict
Model configuration dictionary.
"""
coordsys = kwargs.pop('coordsys', 'CEL')
roi = cls(config, skydir=skydir, coordsys=coordsys, **kwargs)
return roi | python | def create_from_position(cls, skydir, config, **kwargs):
"""Create an ROIModel instance centered on a sky direction.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction on which the ROI will be centered.
config : dict
Model configuration dictionary.
"""
coordsys = kwargs.pop('coordsys', 'CEL')
roi = cls(config, skydir=skydir, coordsys=coordsys, **kwargs)
return roi | [
"def",
"create_from_position",
"(",
"cls",
",",
"skydir",
",",
"config",
",",
"*",
"*",
"kwargs",
")",
":",
"coordsys",
"=",
"kwargs",
".",
"pop",
"(",
"'coordsys'",
",",
"'CEL'",
")",
"roi",
"=",
"cls",
"(",
"config",
",",
"skydir",
"=",
"skydir",
"... | Create an ROIModel instance centered on a sky direction.
Parameters
----------
skydir : `~astropy.coordinates.SkyCoord`
Sky direction on which the ROI will be centered.
config : dict
Model configuration dictionary. | [
"Create",
"an",
"ROIModel",
"instance",
"centered",
"on",
"a",
"sky",
"direction",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1727-L1742 | train | 35,887 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.create_from_source | def create_from_source(cls, name, config, **kwargs):
"""Create an ROI centered on the given source."""
coordsys = kwargs.pop('coordsys', 'CEL')
roi = cls(config, src_radius=None, src_roiwidth=None,
srcname=name, **kwargs)
src = roi.get_source_by_name(name)
return cls.create_from_position(src.skydir, config,
coordsys=coordsys, **kwargs) | python | def create_from_source(cls, name, config, **kwargs):
"""Create an ROI centered on the given source."""
coordsys = kwargs.pop('coordsys', 'CEL')
roi = cls(config, src_radius=None, src_roiwidth=None,
srcname=name, **kwargs)
src = roi.get_source_by_name(name)
return cls.create_from_position(src.skydir, config,
coordsys=coordsys, **kwargs) | [
"def",
"create_from_source",
"(",
"cls",
",",
"name",
",",
"config",
",",
"*",
"*",
"kwargs",
")",
":",
"coordsys",
"=",
"kwargs",
".",
"pop",
"(",
"'coordsys'",
",",
"'CEL'",
")",
"roi",
"=",
"cls",
"(",
"config",
",",
"src_radius",
"=",
"None",
","... | Create an ROI centered on the given source. | [
"Create",
"an",
"ROI",
"centered",
"on",
"the",
"given",
"source",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1745-L1755 | train | 35,888 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.get_source_by_name | def get_source_by_name(self, name):
"""Return a single source in the ROI with the given name. The
input name string can match any of the strings in the names
property of the source object. Case and whitespace are
ignored when matching name strings. If no sources are found
or multiple sources then an exception is thrown.
Parameters
----------
name : str
Name string.
Returns
-------
srcs : `~fermipy.roi_model.Model`
A source object.
"""
srcs = self.get_sources_by_name(name)
if len(srcs) == 1:
return srcs[0]
elif len(srcs) == 0:
raise Exception('No source matching name: ' + name)
elif len(srcs) > 1:
raise Exception('Multiple sources matching name: ' + name) | python | def get_source_by_name(self, name):
"""Return a single source in the ROI with the given name. The
input name string can match any of the strings in the names
property of the source object. Case and whitespace are
ignored when matching name strings. If no sources are found
or multiple sources then an exception is thrown.
Parameters
----------
name : str
Name string.
Returns
-------
srcs : `~fermipy.roi_model.Model`
A source object.
"""
srcs = self.get_sources_by_name(name)
if len(srcs) == 1:
return srcs[0]
elif len(srcs) == 0:
raise Exception('No source matching name: ' + name)
elif len(srcs) > 1:
raise Exception('Multiple sources matching name: ' + name) | [
"def",
"get_source_by_name",
"(",
"self",
",",
"name",
")",
":",
"srcs",
"=",
"self",
".",
"get_sources_by_name",
"(",
"name",
")",
"if",
"len",
"(",
"srcs",
")",
"==",
"1",
":",
"return",
"srcs",
"[",
"0",
"]",
"elif",
"len",
"(",
"srcs",
")",
"==... | Return a single source in the ROI with the given name. The
input name string can match any of the strings in the names
property of the source object. Case and whitespace are
ignored when matching name strings. If no sources are found
or multiple sources then an exception is thrown.
Parameters
----------
name : str
Name string.
Returns
-------
srcs : `~fermipy.roi_model.Model`
A source object. | [
"Return",
"a",
"single",
"source",
"in",
"the",
"ROI",
"with",
"the",
"given",
"name",
".",
"The",
"input",
"name",
"string",
"can",
"match",
"any",
"of",
"the",
"strings",
"in",
"the",
"names",
"property",
"of",
"the",
"source",
"object",
".",
"Case",
... | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1771-L1796 | train | 35,889 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.get_sources_by_name | def get_sources_by_name(self, name):
"""Return a list of sources in the ROI matching the given
name. The input name string can match any of the strings in
the names property of the source object. Case and whitespace
are ignored when matching name strings.
Parameters
----------
name : str
Returns
-------
srcs : list
A list of `~fermipy.roi_model.Model` objects.
"""
index_name = name.replace(' ', '').lower()
if index_name in self._src_dict:
return list(self._src_dict[index_name])
else:
raise Exception('No source matching name: ' + name) | python | def get_sources_by_name(self, name):
"""Return a list of sources in the ROI matching the given
name. The input name string can match any of the strings in
the names property of the source object. Case and whitespace
are ignored when matching name strings.
Parameters
----------
name : str
Returns
-------
srcs : list
A list of `~fermipy.roi_model.Model` objects.
"""
index_name = name.replace(' ', '').lower()
if index_name in self._src_dict:
return list(self._src_dict[index_name])
else:
raise Exception('No source matching name: ' + name) | [
"def",
"get_sources_by_name",
"(",
"self",
",",
"name",
")",
":",
"index_name",
"=",
"name",
".",
"replace",
"(",
"' '",
",",
"''",
")",
".",
"lower",
"(",
")",
"if",
"index_name",
"in",
"self",
".",
"_src_dict",
":",
"return",
"list",
"(",
"self",
"... | Return a list of sources in the ROI matching the given
name. The input name string can match any of the strings in
the names property of the source object. Case and whitespace
are ignored when matching name strings.
Parameters
----------
name : str
Returns
-------
srcs : list
A list of `~fermipy.roi_model.Model` objects. | [
"Return",
"a",
"list",
"of",
"sources",
"in",
"the",
"ROI",
"matching",
"the",
"given",
"name",
".",
"The",
"input",
"name",
"string",
"can",
"match",
"any",
"of",
"the",
"strings",
"in",
"the",
"names",
"property",
"of",
"the",
"source",
"object",
".",
... | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1798-L1819 | train | 35,890 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.load_fits_catalog | def load_fits_catalog(self, name, **kwargs):
"""Load sources from a FITS catalog file.
Parameters
----------
name : str
Catalog name or path to a catalog FITS file.
"""
# EAC split this function to make it easier to load an existing catalog
cat = catalog.Catalog.create(name)
self.load_existing_catalog(cat, **kwargs) | python | def load_fits_catalog(self, name, **kwargs):
"""Load sources from a FITS catalog file.
Parameters
----------
name : str
Catalog name or path to a catalog FITS file.
"""
# EAC split this function to make it easier to load an existing catalog
cat = catalog.Catalog.create(name)
self.load_existing_catalog(cat, **kwargs) | [
"def",
"load_fits_catalog",
"(",
"self",
",",
"name",
",",
"*",
"*",
"kwargs",
")",
":",
"# EAC split this function to make it easier to load an existing catalog",
"cat",
"=",
"catalog",
".",
"Catalog",
".",
"create",
"(",
"name",
")",
"self",
".",
"load_existing_ca... | Load sources from a FITS catalog file.
Parameters
----------
name : str
Catalog name or path to a catalog FITS file. | [
"Load",
"sources",
"from",
"a",
"FITS",
"catalog",
"file",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L1940-L1951 | train | 35,891 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel._build_src_index | def _build_src_index(self):
"""Build an indices for fast lookup of a source given its name
or coordinates."""
self._srcs = sorted(self._srcs, key=lambda t: t['offset'])
nsrc = len(self._srcs)
radec = np.zeros((2, nsrc))
for i, src in enumerate(self._srcs):
radec[:, i] = src.radec
self._src_skydir = SkyCoord(ra=radec[0], dec=radec[1], unit=u.deg)
self._src_radius = self._src_skydir.separation(self.skydir) | python | def _build_src_index(self):
"""Build an indices for fast lookup of a source given its name
or coordinates."""
self._srcs = sorted(self._srcs, key=lambda t: t['offset'])
nsrc = len(self._srcs)
radec = np.zeros((2, nsrc))
for i, src in enumerate(self._srcs):
radec[:, i] = src.radec
self._src_skydir = SkyCoord(ra=radec[0], dec=radec[1], unit=u.deg)
self._src_radius = self._src_skydir.separation(self.skydir) | [
"def",
"_build_src_index",
"(",
"self",
")",
":",
"self",
".",
"_srcs",
"=",
"sorted",
"(",
"self",
".",
"_srcs",
",",
"key",
"=",
"lambda",
"t",
":",
"t",
"[",
"'offset'",
"]",
")",
"nsrc",
"=",
"len",
"(",
"self",
".",
"_srcs",
")",
"radec",
"=... | Build an indices for fast lookup of a source given its name
or coordinates. | [
"Build",
"an",
"indices",
"for",
"fast",
"lookup",
"of",
"a",
"source",
"given",
"its",
"name",
"or",
"coordinates",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L2087-L2099 | train | 35,892 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.write_xml | def write_xml(self, xmlfile, config=None):
"""Save the ROI model as an XML file."""
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for s in self._srcs:
s.write_xml(root)
if config is not None:
srcs = self.create_diffuse_srcs(config)
diffuse_srcs = {s.name: s for s in srcs}
for s in self._diffuse_srcs:
src = copy.deepcopy(diffuse_srcs.get(s.name, s))
src.update_spectral_pars(s.spectral_pars)
src.write_xml(root)
else:
for s in self._diffuse_srcs:
s.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root)) | python | def write_xml(self, xmlfile, config=None):
"""Save the ROI model as an XML file."""
root = ElementTree.Element('source_library')
root.set('title', 'source_library')
for s in self._srcs:
s.write_xml(root)
if config is not None:
srcs = self.create_diffuse_srcs(config)
diffuse_srcs = {s.name: s for s in srcs}
for s in self._diffuse_srcs:
src = copy.deepcopy(diffuse_srcs.get(s.name, s))
src.update_spectral_pars(s.spectral_pars)
src.write_xml(root)
else:
for s in self._diffuse_srcs:
s.write_xml(root)
output_file = open(xmlfile, 'w')
output_file.write(utils.prettify_xml(root)) | [
"def",
"write_xml",
"(",
"self",
",",
"xmlfile",
",",
"config",
"=",
"None",
")",
":",
"root",
"=",
"ElementTree",
".",
"Element",
"(",
"'source_library'",
")",
"root",
".",
"set",
"(",
"'title'",
",",
"'source_library'",
")",
"for",
"s",
"in",
"self",
... | Save the ROI model as an XML file. | [
"Save",
"the",
"ROI",
"model",
"as",
"an",
"XML",
"file",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L2101-L2122 | train | 35,893 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.create_table | def create_table(self, names=None):
"""Create an astropy Table object with the contents of the ROI model.
"""
scan_shape = (1,)
for src in self._srcs:
scan_shape = max(scan_shape, src['dloglike_scan'].shape)
tab = create_source_table(scan_shape)
for s in self._srcs:
if names is not None and s.name not in names:
continue
s.add_to_table(tab)
return tab | python | def create_table(self, names=None):
"""Create an astropy Table object with the contents of the ROI model.
"""
scan_shape = (1,)
for src in self._srcs:
scan_shape = max(scan_shape, src['dloglike_scan'].shape)
tab = create_source_table(scan_shape)
for s in self._srcs:
if names is not None and s.name not in names:
continue
s.add_to_table(tab)
return tab | [
"def",
"create_table",
"(",
"self",
",",
"names",
"=",
"None",
")",
":",
"scan_shape",
"=",
"(",
"1",
",",
")",
"for",
"src",
"in",
"self",
".",
"_srcs",
":",
"scan_shape",
"=",
"max",
"(",
"scan_shape",
",",
"src",
"[",
"'dloglike_scan'",
"]",
".",
... | Create an astropy Table object with the contents of the ROI model. | [
"Create",
"an",
"astropy",
"Table",
"object",
"with",
"the",
"contents",
"of",
"the",
"ROI",
"model",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L2184-L2198 | train | 35,894 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.write_fits | def write_fits(self, fitsfile):
"""Write the ROI model to a FITS file."""
tab = self.create_table()
hdu_data = fits.table_to_hdu(tab)
hdus = [fits.PrimaryHDU(), hdu_data]
fits_utils.write_hdus(hdus, fitsfile) | python | def write_fits(self, fitsfile):
"""Write the ROI model to a FITS file."""
tab = self.create_table()
hdu_data = fits.table_to_hdu(tab)
hdus = [fits.PrimaryHDU(), hdu_data]
fits_utils.write_hdus(hdus, fitsfile) | [
"def",
"write_fits",
"(",
"self",
",",
"fitsfile",
")",
":",
"tab",
"=",
"self",
".",
"create_table",
"(",
")",
"hdu_data",
"=",
"fits",
".",
"table_to_hdu",
"(",
"tab",
")",
"hdus",
"=",
"[",
"fits",
".",
"PrimaryHDU",
"(",
")",
",",
"hdu_data",
"]"... | Write the ROI model to a FITS file. | [
"Write",
"the",
"ROI",
"model",
"to",
"a",
"FITS",
"file",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L2200-L2206 | train | 35,895 |
fermiPy/fermipy | fermipy/roi_model.py | ROIModel.write_ds9region | def write_ds9region(self, region, *args, **kwargs):
"""Create a ds9 compatible region file from the ROI.
It calls the `to_ds9` method and write the result to the region file. Only the file name is required.
All other parameters will be forwarded to the `to_ds9` method, see the documentation of that method
for all accepted parameters and options.
Parameters
----------
region : str
name of the region file (string)
"""
lines = self.to_ds9(*args,**kwargs)
with open(region,'w') as fo:
fo.write("\n".join(lines)) | python | def write_ds9region(self, region, *args, **kwargs):
"""Create a ds9 compatible region file from the ROI.
It calls the `to_ds9` method and write the result to the region file. Only the file name is required.
All other parameters will be forwarded to the `to_ds9` method, see the documentation of that method
for all accepted parameters and options.
Parameters
----------
region : str
name of the region file (string)
"""
lines = self.to_ds9(*args,**kwargs)
with open(region,'w') as fo:
fo.write("\n".join(lines)) | [
"def",
"write_ds9region",
"(",
"self",
",",
"region",
",",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
":",
"lines",
"=",
"self",
".",
"to_ds9",
"(",
"*",
"args",
",",
"*",
"*",
"kwargs",
")",
"with",
"open",
"(",
"region",
",",
"'w'",
")",
"as",
... | Create a ds9 compatible region file from the ROI.
It calls the `to_ds9` method and write the result to the region file. Only the file name is required.
All other parameters will be forwarded to the `to_ds9` method, see the documentation of that method
for all accepted parameters and options.
Parameters
----------
region : str
name of the region file (string) | [
"Create",
"a",
"ds9",
"compatible",
"region",
"file",
"from",
"the",
"ROI",
"."
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/roi_model.py#L2253-L2267 | train | 35,896 |
fermiPy/fermipy | fermipy/diffuse/catalog_src_manager.py | select_extended | def select_extended(cat_table):
"""Select only rows representing extended sources from a catalog table
"""
try:
l = [len(row.strip()) > 0 for row in cat_table['Extended_Source_Name'].data]
return np.array(l, bool)
except KeyError:
return cat_table['Extended'] | python | def select_extended(cat_table):
"""Select only rows representing extended sources from a catalog table
"""
try:
l = [len(row.strip()) > 0 for row in cat_table['Extended_Source_Name'].data]
return np.array(l, bool)
except KeyError:
return cat_table['Extended'] | [
"def",
"select_extended",
"(",
"cat_table",
")",
":",
"try",
":",
"l",
"=",
"[",
"len",
"(",
"row",
".",
"strip",
"(",
")",
")",
">",
"0",
"for",
"row",
"in",
"cat_table",
"[",
"'Extended_Source_Name'",
"]",
".",
"data",
"]",
"return",
"np",
".",
"... | Select only rows representing extended sources from a catalog table | [
"Select",
"only",
"rows",
"representing",
"extended",
"sources",
"from",
"a",
"catalog",
"table"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/catalog_src_manager.py#L23-L30 | train | 35,897 |
fermiPy/fermipy | fermipy/diffuse/catalog_src_manager.py | make_mask | def make_mask(cat_table, cut):
"""Mask a bit mask selecting the rows that pass a selection
"""
cut_var = cut['cut_var']
min_val = cut.get('min_val', None)
max_val = cut.get('max_val', None)
nsrc = len(cat_table)
if min_val is None:
min_mask = np.ones((nsrc), bool)
else:
min_mask = cat_table[cut_var] >= min_val
if max_val is None:
max_mask = np.ones((nsrc), bool)
else:
max_mask = cat_table[cut_var] <= max_val
full_mask = min_mask * max_mask
return full_mask | python | def make_mask(cat_table, cut):
"""Mask a bit mask selecting the rows that pass a selection
"""
cut_var = cut['cut_var']
min_val = cut.get('min_val', None)
max_val = cut.get('max_val', None)
nsrc = len(cat_table)
if min_val is None:
min_mask = np.ones((nsrc), bool)
else:
min_mask = cat_table[cut_var] >= min_val
if max_val is None:
max_mask = np.ones((nsrc), bool)
else:
max_mask = cat_table[cut_var] <= max_val
full_mask = min_mask * max_mask
return full_mask | [
"def",
"make_mask",
"(",
"cat_table",
",",
"cut",
")",
":",
"cut_var",
"=",
"cut",
"[",
"'cut_var'",
"]",
"min_val",
"=",
"cut",
".",
"get",
"(",
"'min_val'",
",",
"None",
")",
"max_val",
"=",
"cut",
".",
"get",
"(",
"'max_val'",
",",
"None",
")",
... | Mask a bit mask selecting the rows that pass a selection | [
"Mask",
"a",
"bit",
"mask",
"selecting",
"the",
"rows",
"that",
"pass",
"a",
"selection"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/catalog_src_manager.py#L33-L50 | train | 35,898 |
fermiPy/fermipy | fermipy/diffuse/catalog_src_manager.py | select_sources | def select_sources(cat_table, cuts):
"""Select only rows passing a set of cuts from catalog table
"""
nsrc = len(cat_table)
full_mask = np.ones((nsrc), bool)
for cut in cuts:
if cut == 'mask_extended':
full_mask *= mask_extended(cat_table)
elif cut == 'select_extended':
full_mask *= select_extended(cat_table)
else:
full_mask *= make_mask(cat_table, cut)
lout = [src_name.strip() for src_name in cat_table['Source_Name'][full_mask]]
return lout | python | def select_sources(cat_table, cuts):
"""Select only rows passing a set of cuts from catalog table
"""
nsrc = len(cat_table)
full_mask = np.ones((nsrc), bool)
for cut in cuts:
if cut == 'mask_extended':
full_mask *= mask_extended(cat_table)
elif cut == 'select_extended':
full_mask *= select_extended(cat_table)
else:
full_mask *= make_mask(cat_table, cut)
lout = [src_name.strip() for src_name in cat_table['Source_Name'][full_mask]]
return lout | [
"def",
"select_sources",
"(",
"cat_table",
",",
"cuts",
")",
":",
"nsrc",
"=",
"len",
"(",
"cat_table",
")",
"full_mask",
"=",
"np",
".",
"ones",
"(",
"(",
"nsrc",
")",
",",
"bool",
")",
"for",
"cut",
"in",
"cuts",
":",
"if",
"cut",
"==",
"'mask_ex... | Select only rows passing a set of cuts from catalog table | [
"Select",
"only",
"rows",
"passing",
"a",
"set",
"of",
"cuts",
"from",
"catalog",
"table"
] | 9df5e7e3728307fd58c5bba36fd86783c39fbad4 | https://github.com/fermiPy/fermipy/blob/9df5e7e3728307fd58c5bba36fd86783c39fbad4/fermipy/diffuse/catalog_src_manager.py#L53-L67 | train | 35,899 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.